Files
dot/modules/home/opencode.nix
2026-02-11 22:00:44 +01:00

70 lines
2.2 KiB
Nix

{ config, lib, ... }:
let
cfg = config.dot.llm.opencode;
in
{
options.dot.llm.opencode = {
enableConfig = lib.mkOption {
type = lib.types.bool;
default = false;
description = "Enable managed OpenCode configuration";
};
};
config = lib.mkIf cfg.enableConfig {
# Declare the sops secret
sops.secrets.langdock_api_key = {
# Uses defaultSopsFile from secrets.nix
};
# Create the OpenCode config.json template with secret substitution
sops.templates."opencode/opencode.json" = {
content = builtins.toJSON {
"$schema" = "https://opencode.ai/config.json";
model = "anthropic/claude-sonnet-4-5-20250929";
small_model = "anthropic/claude-haiku-4-5-20251001";
default_agent = "plan";
enabled_providers = [ "anthropic" "langdock-openai" "ollama" ];
provider = {
langdock-openai = {
npm = "@ai-sdk/openai-compatible";
name = "Langdock OpenAI";
options = {
baseURL = "https://api.langdock.com/openai/eu/v1";
apiKey = config.sops.placeholder.langdock_api_key;
};
models = {
"gpt-5.2" = { name = "GPT-5.2"; };
"gpt-5-mini" = { name = "GPT-5 mini"; };
};
};
anthropic = {
options = {
baseURL = "https://api.langdock.com/anthropic/eu/v1";
apiKey = config.sops.placeholder.langdock_api_key;
};
models = {
"claude-opus-4-6-default" = { name = "Opus 4.6"; };
};
};
ollama = {
npm = "@ai-sdk/openai-compatible";
name = "Ollama (Local)";
options = {
baseURL = "http://192.168.11.23:11434/v1";
};
models = {
"qwen3-coder:30b" = { name = "Qwen 3 Coder 30B"; };
"codellama:34b-instruct" = { name = "CodeLlama 34B Instruct"; };
};
};
};
};
};
# Link the generated config to the expected location
xdg.configFile."opencode/opencode.json".source = config.lib.file.mkOutOfStoreSymlink config.sops.templates."opencode/opencode.json".path;
};
}