144 lines
4.2 KiB
Nix
144 lines
4.2 KiB
Nix
{ config, pkgs, lib, ... }:
|
|
|
|
let
|
|
cfg = config.services.opencode;
|
|
in {
|
|
options.services.opencode = {
|
|
enable = lib.mkEnableOption "OpenCode AI Service";
|
|
port = lib.mkOption {
|
|
type = lib.types.port;
|
|
default = 4099;
|
|
};
|
|
ollamaUrl = lib.mkOption {
|
|
type = lib.types.str;
|
|
default = "http://127.0.0.1:11434/v1";
|
|
};
|
|
};
|
|
|
|
config = lib.mkIf cfg.enable {
|
|
programs.nix-ld.enable = true;
|
|
|
|
environment.etc."opencode/opencode.json".text = builtins.toJSON {
|
|
"$schema" = "https://opencode.ai/config.json";
|
|
"model" = "devstral-2-small-llama_cpp";
|
|
|
|
# MCP servers for web search and enhanced functionality
|
|
# context7: Remote HTTP server for up-to-date documentation and code examples
|
|
# duckduckgo: Local MCP server for web search capabilities
|
|
"mcp" = {
|
|
"context7" = {
|
|
"type" = "remote";
|
|
"url" = "https://mcp.context7.com/mcp";
|
|
};
|
|
"duckduckgo" = {
|
|
"type" = "local";
|
|
"command" = [ "uvx" "duckduckgo-mcp-server" ];
|
|
"environment" = {
|
|
"PATH" = "/run/current-system/sw/bin:/home/gortium/.nix-profile/bin";
|
|
};
|
|
};
|
|
};
|
|
|
|
"provider" = {
|
|
"llamacpp" = {
|
|
"name" = "Llama.cpp (Local MI50)";
|
|
"npm" = "@ai-sdk/openai-compatible";
|
|
"options" = {
|
|
"baseURL" = "http://localhost:8300/v1";
|
|
"apiKey" = "not-needed";
|
|
};
|
|
"models" = {
|
|
"devstral-2-small-llama_cpp" = {
|
|
"name" = "Devstral 2 small 24B Q8 (llama.cpp)";
|
|
"tools" = true;
|
|
"reasoning" = false;
|
|
};
|
|
};
|
|
};
|
|
"ollama" = {
|
|
"name" = "Ollama (Local)";
|
|
"npm" = "@ai-sdk/openai-compatible";
|
|
"options" = {
|
|
"baseURL" = cfg.ollamaUrl;
|
|
"headers" = { "Content-Type" = "application/json"; };
|
|
};
|
|
"models" = {
|
|
"devstral-small-2:24b-128k" = {
|
|
"name" = "Mistral Devstral Small 2 (Ollama)";
|
|
"tools" = true;
|
|
"reasoning" = false;
|
|
};
|
|
};
|
|
};
|
|
};
|
|
};
|
|
|
|
systemd.services.opencode-gsd-install = {
|
|
description = "Install Get Shit Done OpenCode Components";
|
|
after = [ "network-online.target" ];
|
|
wantedBy = [ "multi-user.target" ];
|
|
path = with pkgs; [
|
|
nodejs
|
|
git
|
|
coreutils
|
|
bash
|
|
];
|
|
serviceConfig = {
|
|
Type = "oneshot";
|
|
User = "gortium";
|
|
RemainAfterExit = true;
|
|
Environment = [
|
|
"HOME=/home/gortium"
|
|
"SHELL=${pkgs.bash}/bin/bash"
|
|
"PATH=${lib.makeBinPath [ pkgs.nodejs pkgs.git pkgs.bash pkgs.coreutils ]}"
|
|
];
|
|
};
|
|
script = ''
|
|
# Check if the GSD directory exists
|
|
if [ ! -d "/home/gortium/.config/opencode/gsd" ]; then
|
|
echo "GSD not found. Installing..."
|
|
${pkgs.nodejs}/bin/npx -y github:dbachelder/get-shit-done-opencode --global --force
|
|
else
|
|
echo "GSD already installed. Skipping auto-reinstall."
|
|
echo "To force update, run: sudo systemctl restart opencode-gsd-install.service"
|
|
fi
|
|
'';
|
|
};
|
|
|
|
systemd.services.opencode = {
|
|
description = "OpenCode AI Coding Agent Server";
|
|
after = [ "network.target" "ai_stack.service" "opencode-gsd-install.service" ];
|
|
requires = [ "ai_stack.service" "opencode-gsd-install.service" ];
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
path = with pkgs; [
|
|
bash
|
|
coreutils
|
|
nodejs
|
|
git
|
|
nix
|
|
ripgrep
|
|
fd
|
|
];
|
|
|
|
serviceConfig = {
|
|
Type = "simple";
|
|
User = "gortium";
|
|
WorkingDirectory = "/home/gortium/infra";
|
|
ExecStart = "${pkgs.nodejs}/bin/npx -y opencode-ai serve --hostname 0.0.0.0 --port ${toString cfg.port}";
|
|
Restart = "on-failure";
|
|
};
|
|
|
|
environment = {
|
|
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
|
# Important: GSD at ~/.config/opencode, so we ensure the server sees our /etc config
|
|
OPENCODE_CONFIG = "/etc/opencode/opencode.json";
|
|
HOME = "/home/gortium";
|
|
NODE_PATH = "${pkgs.nodejs}/lib/node_modules";
|
|
};
|
|
};
|
|
|
|
networking.firewall.allowedTCPPorts = [ cfg.port ];
|
|
};
|
|
}
|