2026-01-01 01:36:58 -05:00
|
|
|
{ config, pkgs, lib, ... }:
|
|
|
|
|
|
|
|
|
|
let
|
|
|
|
|
cfg = config.services.opencode;
|
|
|
|
|
in {
|
|
|
|
|
options.services.opencode = {
|
|
|
|
|
enable = lib.mkEnableOption "OpenCode AI Service";
|
|
|
|
|
port = lib.mkOption {
|
|
|
|
|
type = lib.types.port;
|
|
|
|
|
default = 4099;
|
|
|
|
|
};
|
|
|
|
|
ollamaUrl = lib.mkOption {
|
|
|
|
|
type = lib.types.str;
|
|
|
|
|
default = "http://127.0.0.1:11434/v1";
|
|
|
|
|
};
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
config = lib.mkIf cfg.enable {
|
|
|
|
|
programs.nix-ld.enable = true;
|
|
|
|
|
|
2026-01-01 14:30:42 -05:00
|
|
|
# Context7 MCP server for web search capabilities
|
|
|
|
|
# Provides up-to-date, version-specific documentation and code examples from source
|
|
|
|
|
environment.etc."opencode/opencode.json".text = builtins.toJSON {
|
2026-01-01 01:36:58 -05:00
|
|
|
"$schema" = "https://opencode.ai/config.json";
|
2026-01-01 02:25:25 -05:00
|
|
|
"model" = "devstral-2-small-llama_cpp";
|
2026-01-01 01:36:58 -05:00
|
|
|
|
2026-01-01 14:30:42 -05:00
|
|
|
# MCP servers for web search and enhanced functionality
|
|
|
|
|
# context7: Remote HTTP server for up-to-date documentation and code examples
|
|
|
|
|
# duckduckgo: Local MCP server for web search capabilities
|
|
|
|
|
"mcp" = {
|
2026-01-01 02:25:25 -05:00
|
|
|
"context7" = {
|
|
|
|
|
"type" = "remote";
|
|
|
|
|
"url" = "https://mcp.context7.com/mcp";
|
|
|
|
|
};
|
2026-01-01 14:30:42 -05:00
|
|
|
"duckduckgo" = {
|
|
|
|
|
"type" = "local";
|
|
|
|
|
"command" = [ "uvx" "duckduckgo-mcp-server" ];
|
|
|
|
|
"environment" = {
|
|
|
|
|
"PATH" = "/run/current-system/sw/bin:/home/gortium/.nix-profile/bin";
|
|
|
|
|
};
|
|
|
|
|
};
|
2026-01-01 02:25:25 -05:00
|
|
|
};
|
|
|
|
|
|
2026-01-01 01:36:58 -05:00
|
|
|
"provider" = {
|
2026-01-01 02:25:25 -05:00
|
|
|
"llamacpp" = {
|
|
|
|
|
"name" = "Llama.cpp (Local MI50)";
|
|
|
|
|
"npm" = "@ai-sdk/openai-compatible";
|
|
|
|
|
"options" = {
|
|
|
|
|
"baseURL" = "http://localhost:8300/v1";
|
|
|
|
|
"apiKey" = "not-needed";
|
|
|
|
|
};
|
|
|
|
|
"models" = {
|
|
|
|
|
"devstral-2-small-llama_cpp" = {
|
|
|
|
|
"name" = "Devstral 2 small 24B Q8 (llama.cpp)";
|
|
|
|
|
"tools" = true;
|
|
|
|
|
"reasoning" = false;
|
|
|
|
|
};
|
|
|
|
|
};
|
|
|
|
|
};
|
2026-01-01 01:36:58 -05:00
|
|
|
"ollama" = {
|
|
|
|
|
"name" = "Ollama (Local)";
|
|
|
|
|
"npm" = "@ai-sdk/openai-compatible";
|
|
|
|
|
"options" = {
|
|
|
|
|
"baseURL" = cfg.ollamaUrl;
|
2026-01-01 02:25:25 -05:00
|
|
|
"headers" = { "Content-Type" = "application/json"; };
|
2026-01-01 01:36:58 -05:00
|
|
|
};
|
|
|
|
|
"models" = {
|
2026-01-01 02:25:25 -05:00
|
|
|
"devstral-small-2:24b-128k" = {
|
|
|
|
|
"name" = "Mistral Devstral Small 2 (Ollama)";
|
|
|
|
|
"tools" = true;
|
|
|
|
|
"reasoning" = false;
|
2026-01-01 01:36:58 -05:00
|
|
|
};
|
|
|
|
|
};
|
|
|
|
|
};
|
|
|
|
|
};
|
2026-01-01 14:30:42 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
# Documentation for web search integration
|
|
|
|
|
# Web search capabilities are provided through the following MCP servers:
|
|
|
|
|
# 1. Context7 MCP: Remote HTTP server at https://mcp.context7.com/mcp
|
|
|
|
|
# - Provides up-to-date documentation and code examples from source
|
|
|
|
|
# - Useful for library-specific queries and documentation lookup
|
|
|
|
|
# - Example query: "How to implement authentication in Next.js"
|
|
|
|
|
#
|
|
|
|
|
# 2. DuckDuckGo MCP: Local MCP server (uvx duckduckgo-mcp-server)
|
|
|
|
|
# - Provides web search capabilities through DuckDuckGo API
|
|
|
|
|
# - Useful for current events, news, and general web information
|
|
|
|
|
# - Example query: "Latest news about AI technology"
|
|
|
|
|
#
|
|
|
|
|
# Limitations:
|
|
|
|
|
# - Context7 requires valid API key for production use (free tier available)
|
|
|
|
|
# - DuckDuckGo search is subject to rate limits and terms of service
|
|
|
|
|
# - Web search results may vary based on current availability of services
|
2026-01-01 01:36:58 -05:00
|
|
|
|
2026-01-01 14:30:42 -05:00
|
|
|
# This service runs the GSD installer directly from the source
|
2026-01-01 02:25:25 -05:00
|
|
|
systemd.services.opencode-gsd-install = {
|
|
|
|
|
description = "Install Get Shit Done OpenCode Components";
|
|
|
|
|
after = [ "network-online.target" ];
|
|
|
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
|
path = with pkgs; [
|
|
|
|
|
nodejs
|
|
|
|
|
git
|
|
|
|
|
coreutils
|
|
|
|
|
bash
|
|
|
|
|
];
|
|
|
|
|
serviceConfig = {
|
|
|
|
|
Type = "oneshot";
|
|
|
|
|
User = "gortium";
|
|
|
|
|
RemainAfterExit = true;
|
|
|
|
|
Environment = [
|
|
|
|
|
"HOME=/home/gortium"
|
|
|
|
|
"SHELL=${pkgs.bash}/bin/bash"
|
|
|
|
|
"PATH=${lib.makeBinPath [ pkgs.nodejs pkgs.git pkgs.bash pkgs.coreutils ]}"
|
|
|
|
|
];
|
|
|
|
|
};
|
|
|
|
|
script = ''
|
|
|
|
|
# Check if the GSD directory exists
|
|
|
|
|
if [ ! -d "/home/gortium/.config/opencode/gsd" ]; then
|
|
|
|
|
echo "GSD not found. Installing..."
|
|
|
|
|
${pkgs.nodejs}/bin/npx -y github:dbachelder/get-shit-done-opencode --global --force
|
|
|
|
|
else
|
|
|
|
|
echo "GSD already installed. Skipping auto-reinstall."
|
|
|
|
|
echo "To force update, run: sudo systemctl restart opencode-gsd-install.service"
|
|
|
|
|
fi
|
|
|
|
|
'';
|
|
|
|
|
};
|
|
|
|
|
|
2026-01-01 01:36:58 -05:00
|
|
|
systemd.services.opencode = {
|
|
|
|
|
description = "OpenCode AI Coding Agent Server";
|
2026-01-01 02:25:25 -05:00
|
|
|
after = [ "network.target" "ai_stack.service" "opencode-gsd-install.service" ];
|
|
|
|
|
requires = [ "ai_stack.service" "opencode-gsd-install.service" ];
|
2026-01-01 01:36:58 -05:00
|
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
|
|
2026-01-01 02:25:25 -05:00
|
|
|
path = with pkgs; [
|
|
|
|
|
bash
|
|
|
|
|
coreutils
|
|
|
|
|
nodejs
|
|
|
|
|
git
|
|
|
|
|
nix
|
|
|
|
|
ripgrep
|
|
|
|
|
fd
|
|
|
|
|
];
|
|
|
|
|
|
2026-01-01 01:36:58 -05:00
|
|
|
serviceConfig = {
|
|
|
|
|
Type = "simple";
|
|
|
|
|
User = "gortium";
|
2026-01-01 02:25:25 -05:00
|
|
|
WorkingDirectory = "/home/gortium/infra";
|
2026-01-01 01:36:58 -05:00
|
|
|
ExecStart = "${pkgs.nodejs}/bin/npx -y opencode-ai serve --hostname 0.0.0.0 --port ${toString cfg.port}";
|
|
|
|
|
Restart = "on-failure";
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
environment = {
|
|
|
|
|
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
2026-01-01 14:30:42 -05:00
|
|
|
# Important: GSD at ~/.config/opencode, so we ensure the server sees our /etc config
|
2026-01-01 01:36:58 -05:00
|
|
|
OPENCODE_CONFIG = "/etc/opencode/opencode.json";
|
|
|
|
|
HOME = "/home/gortium";
|
2026-01-01 14:30:42 -05:00
|
|
|
NODE_PATH = "${pkgs.nodejs}/lib/node_modules";
|
2026-01-01 01:36:58 -05:00
|
|
|
};
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
networking.firewall.allowedTCPPorts = [ cfg.port ];
|
|
|
|
|
};
|
|
|
|
|
}
|