feat: add Dockerfile, deploy pipeline, Ollama infra job, weekly cron
This commit is contained in:
+15
-6
@@ -2,18 +2,27 @@
|
||||
|
||||
# ─── Modelle ──────────────────────────────────────────────────────────────────
|
||||
[models.default]
|
||||
provider = "openai_compat"
|
||||
url = "http://localhost:11434/v1"
|
||||
model = "gemma3:2b"
|
||||
api_key = "ollama"
|
||||
skill_format = "xml"
|
||||
|
||||
[models.summary]
|
||||
provider = "openai_compat"
|
||||
url = "http://localhost:11434"
|
||||
model = "gemma3:2b"
|
||||
api_key = "ollama"
|
||||
skill_format = "xml"
|
||||
max_summary_tokens = 2000
|
||||
|
||||
[models.openrouter-llama]
|
||||
provider = "openai_compat"
|
||||
url = "https://openrouter.ai/api/v1"
|
||||
model = "meta-llama/llama-3.3-70b-instruct"
|
||||
skill_format = "tool_use"
|
||||
api_key = "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"
|
||||
|
||||
[models.summary]
|
||||
provider = "openai_compat"
|
||||
url = "http://localhost:11434"
|
||||
model = "llama3.1:8b"
|
||||
max_summary_tokens = 5000
|
||||
skill_format = "xml"
|
||||
|
||||
# ─── Chat ─────────────────────────────────────────────────────────────────────
|
||||
[chat]
|
||||
|
||||
Reference in New Issue
Block a user