{ config, lib, ... }: let cfg = config.dot.llm.opencode; in { options.dot.llm.opencode = { enableConfig = lib.mkOption { type = lib.types.bool; default = false; description = "Enable managed OpenCode configuration"; }; }; config = lib.mkIf cfg.enableConfig { # Declare the sops secret sops.secrets.langdock-api-key = { # Uses defaultSopsFile from secrets.nix }; # Create the OpenCode config.json template with secret substitution sops.templates."opencode/opencode.json" = { content = builtins.toJSON { "$schema" = "https://opencode.ai/config.json"; model = "anthropic/claude-sonnet-4-5-20250929"; small_model = "anthropic/claude-haiku-4-5-20251001"; provider = { langdock-openai = { npm = "@ai-sdk/openai-compatible"; name = "Langdock OpenAI"; options = { baseURL = "https://api.langdock.com/openai/eu/v1"; apiKey = config.sops.placeholder.langdock-api-key; }; models = { "gpt-5.2" = { name = "GPT-5.2"; }; "gpt-4.1" = { name = "GPT-4.1"; }; "gpt-5-mini" = { name = "GPT-5 mini"; }; }; }; anthropic = { options = { baseURL = "https://api.langdock.com/anthropic/eu/v1"; apiKey = config.sops.placeholder.langdock-api-key; }; models = { "Opus 4.6" = { name = "opus-4.6-default"; }; }; }; ollama = { npm = "@ai-sdk/openai-compatible"; name = "Ollama (Local)"; options = { baseURL = "http://192.168.11.23:11434/v1"; }; models = { "qwen3-coder:30b" = { name = "Qwen 3 Coder 30B"; }; "codellama:34b-instruct" = { name = "CodeLlama 34B Instruct"; }; }; }; }; }; }; # Link the generated config to the expected location xdg.configFile."opencode/opencode.json".source = config.sops.templates."opencode/opencode.json".path; }; }