feat: add Dockerfile, deploy pipeline, Ollama infra job, weekly cron
CI / check (push) Successful in 13m8s
CI / test (push) Successful in 13m23s
CI / clippy (push) Successful in 12m46s
CI / deploy-infra (push) Has been skipped
CI / deploy (push) Has been skipped

This commit is contained in:
2026-04-25 19:22:51 +02:00
parent b6a5618f78
commit 72affaa0d8
5 changed files with 160 additions and 7 deletions
+102
View File
@@ -0,0 +1,102 @@
name: CI
on:
push:
branches: [ '**' ]
pull_request:
branches: [ master ]
schedule:
- cron: '0 3 * * 1' # Jeden Montag 03:00 UTC
jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- run: cargo check --all-targets
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- run: cargo test
clippy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
with:
components: clippy
- uses: Swatinem/rust-cache@v2
- run: cargo clippy -- -D warnings
deploy:
runs-on: ubuntu-latest
needs: [ check, test, clippy ]
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
steps:
- uses: actions/checkout@v4
- name: Install cross-compilation tools
run: |
sudo apt-get update
sudo apt-get install -y gcc-aarch64-linux-gnu
- uses: dtolnay/rust-toolchain@stable
with:
targets: aarch64-unknown-linux-gnu
- uses: Swatinem/rust-cache@v2
- name: Build ARM64
run: cargo build --release --target aarch64-unknown-linux-gnu
env:
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
- name: Setup SSH
run: |
mkdir -p ~/.ssh
echo "${{ secrets.PI_SSH_KEY }}" > ~/.ssh/deploy_key
chmod 600 ~/.ssh/deploy_key
ssh-keyscan -p 10022 localhost >> ~/.ssh/known_hosts
- name: Copy binary to Pi
run: |
scp -i ~/.ssh/deploy_key -P 10022 \
target/aarch64-unknown-linux-gnu/release/nazarick \
deploy@localhost:/opt/nazarick/nazarick.new
- name: Restart nazarick on Pi
run: |
ssh -i ~/.ssh/deploy_key -p 10022 deploy@localhost '
mv /opt/nazarick/nazarick.new /opt/nazarick/target/release/nazarick
cd /opt/nazarick
docker compose restart nazarick
'
deploy-infra:
runs-on: ubuntu-latest
if: github.event_name == 'schedule'
steps:
- name: Setup SSH
run: |
mkdir -p ~/.ssh
echo "${{ secrets.PI_SSH_KEY }}" > ~/.ssh/deploy_key
chmod 600 ~/.ssh/deploy_key
ssh-keyscan -p 10022 localhost >> ~/.ssh/known_hosts
- name: Update Ollama + pull latest Gemma
run: |
ssh -i ~/.ssh/deploy_key -p 10022 deploy@localhost '
cd /opt/nazarick
docker compose pull ollama
docker compose up -d ollama
sleep 5
docker exec ollama ollama pull gemma3:2b
docker compose restart nazarick
'
+19
View File
@@ -0,0 +1,19 @@
FROM debian:bookworm-slim
RUN apt-get update \
&& apt-get install -y ca-certificates \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
# Pfad spiegelt die Cargo-Ausgabestruktur:
# current_exe() = /app/target/release/nazarick
# 3× .parent() → /app/target/release → /app/target → /app (workspace_root) ✓
RUN mkdir -p /app/target/release
COPY target/aarch64-unknown-linux-gnu/release/nazarick /app/target/release/nazarick
VOLUME ["/app/config", "/app/data"]
EXPOSE 8765
CMD ["/app/target/release/nazarick"]
+15 -6
View File
@@ -2,18 +2,27 @@
# ─── Modelle ────────────────────────────────────────────────────────────────── # ─── Modelle ──────────────────────────────────────────────────────────────────
[models.default] [models.default]
provider = "openai_compat"
url = "http://localhost:11434/v1"
model = "gemma3:2b"
api_key = "ollama"
skill_format = "xml"
[models.summary]
provider = "openai_compat"
url = "http://localhost:11434"
model = "gemma3:2b"
api_key = "ollama"
skill_format = "xml"
max_summary_tokens = 2000
[models.openrouter-llama]
provider = "openai_compat" provider = "openai_compat"
url = "https://openrouter.ai/api/v1" url = "https://openrouter.ai/api/v1"
model = "meta-llama/llama-3.3-70b-instruct" model = "meta-llama/llama-3.3-70b-instruct"
skill_format = "tool_use" skill_format = "tool_use"
api_key = "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" api_key = "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"
[models.summary]
provider = "openai_compat"
url = "http://localhost:11434"
model = "llama3.1:8b"
max_summary_tokens = 5000
skill_format = "xml"
# ─── Chat ───────────────────────────────────────────────────────────────────── # ─── Chat ─────────────────────────────────────────────────────────────────────
[chat] [chat]
+1 -1
View File
@@ -250,7 +250,7 @@ fn split_message(text: &str) -> Vec<String> {
.unwrap_or(safe_max); .unwrap_or(safe_max);
chunks.push(remaining[..cut].trim().to_string()); chunks.push(remaining[..cut].trim().to_string());
remaining = &remaining[cut..].trim_start(); remaining = remaining[cut..].trim_start();
} }
if !remaining.is_empty() { if !remaining.is_empty() {
+23
View File
@@ -0,0 +1,23 @@
services:
ollama:
image: ollama/ollama:latest
container_name: ollama
restart: unless-stopped
network_mode: host
volumes:
- /opt/nazarick/ollama:/root/.ollama
nazarick:
image: nazarick:latest
container_name: nazarick
restart: unless-stopped
network_mode: host
depends_on:
- ollama
volumes:
# Binary-Pfad: /app/target/release/nazarick
# 3× parent() → workspace_root = /app ✓
- /opt/nazarick/target:/app/target
- /opt/nazarick/config:/app/config
- /opt/nazarick/data:/app/data
working_dir: /app