Files
infra/modules/nixos/services/open_code_server.nix
Thierry Pouplier b59f8952ac feat(4-2): Test and document web search capabilities through MCP servers
- Started OpenCode service and verified it's running
- Tested Context7 web search functionality
- Tested DuckDuckGo web search functionality
- Documented web search integration in open_code_server.nix
- Updated ROADMAP and STATE with completion status
- Phase 4 complete, ready for Phase 5: TAK Server Integration
2026-01-01 14:30:42 -05:00

164 lines
5.4 KiB
Nix

{ config, pkgs, lib, ... }:
let
cfg = config.services.opencode;
in {
options.services.opencode = {
enable = lib.mkEnableOption "OpenCode AI Service";
port = lib.mkOption {
type = lib.types.port;
default = 4099;
};
ollamaUrl = lib.mkOption {
type = lib.types.str;
default = "http://127.0.0.1:11434/v1";
};
};
config = lib.mkIf cfg.enable {
programs.nix-ld.enable = true;
# Context7 MCP server for web search capabilities
# Provides up-to-date, version-specific documentation and code examples from source
environment.etc."opencode/opencode.json".text = builtins.toJSON {
"$schema" = "https://opencode.ai/config.json";
"model" = "devstral-2-small-llama_cpp";
# MCP servers for web search and enhanced functionality
# context7: Remote HTTP server for up-to-date documentation and code examples
# duckduckgo: Local MCP server for web search capabilities
"mcp" = {
"context7" = {
"type" = "remote";
"url" = "https://mcp.context7.com/mcp";
};
"duckduckgo" = {
"type" = "local";
"command" = [ "uvx" "duckduckgo-mcp-server" ];
"environment" = {
"PATH" = "/run/current-system/sw/bin:/home/gortium/.nix-profile/bin";
};
};
};
"provider" = {
"llamacpp" = {
"name" = "Llama.cpp (Local MI50)";
"npm" = "@ai-sdk/openai-compatible";
"options" = {
"baseURL" = "http://localhost:8300/v1";
"apiKey" = "not-needed";
};
"models" = {
"devstral-2-small-llama_cpp" = {
"name" = "Devstral 2 small 24B Q8 (llama.cpp)";
"tools" = true;
"reasoning" = false;
};
};
};
"ollama" = {
"name" = "Ollama (Local)";
"npm" = "@ai-sdk/openai-compatible";
"options" = {
"baseURL" = cfg.ollamaUrl;
"headers" = { "Content-Type" = "application/json"; };
};
"models" = {
"devstral-small-2:24b-128k" = {
"name" = "Mistral Devstral Small 2 (Ollama)";
"tools" = true;
"reasoning" = false;
};
};
};
};
};
# Documentation for web search integration
# Web search capabilities are provided through the following MCP servers:
# 1. Context7 MCP: Remote HTTP server at https://mcp.context7.com/mcp
# - Provides up-to-date documentation and code examples from source
# - Useful for library-specific queries and documentation lookup
# - Example query: "How to implement authentication in Next.js"
#
# 2. DuckDuckGo MCP: Local MCP server (uvx duckduckgo-mcp-server)
# - Provides web search capabilities through DuckDuckGo API
# - Useful for current events, news, and general web information
# - Example query: "Latest news about AI technology"
#
# Limitations:
# - Context7 requires valid API key for production use (free tier available)
# - DuckDuckGo search is subject to rate limits and terms of service
# - Web search results may vary based on current availability of services
# This service runs the GSD installer directly from the source
systemd.services.opencode-gsd-install = {
description = "Install Get Shit Done OpenCode Components";
after = [ "network-online.target" ];
wantedBy = [ "multi-user.target" ];
path = with pkgs; [
nodejs
git
coreutils
bash
];
serviceConfig = {
Type = "oneshot";
User = "gortium";
RemainAfterExit = true;
Environment = [
"HOME=/home/gortium"
"SHELL=${pkgs.bash}/bin/bash"
"PATH=${lib.makeBinPath [ pkgs.nodejs pkgs.git pkgs.bash pkgs.coreutils ]}"
];
};
script = ''
# Check if the GSD directory exists
if [ ! -d "/home/gortium/.config/opencode/gsd" ]; then
echo "GSD not found. Installing..."
${pkgs.nodejs}/bin/npx -y github:dbachelder/get-shit-done-opencode --global --force
else
echo "GSD already installed. Skipping auto-reinstall."
echo "To force update, run: sudo systemctl restart opencode-gsd-install.service"
fi
'';
};
systemd.services.opencode = {
description = "OpenCode AI Coding Agent Server";
after = [ "network.target" "ai_stack.service" "opencode-gsd-install.service" ];
requires = [ "ai_stack.service" "opencode-gsd-install.service" ];
wantedBy = [ "multi-user.target" ];
path = with pkgs; [
bash
coreutils
nodejs
git
nix
ripgrep
fd
];
serviceConfig = {
Type = "simple";
User = "gortium";
WorkingDirectory = "/home/gortium/infra";
ExecStart = "${pkgs.nodejs}/bin/npx -y opencode-ai serve --hostname 0.0.0.0 --port ${toString cfg.port}";
Restart = "on-failure";
};
environment = {
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
# Important: GSD at ~/.config/opencode, so we ensure the server sees our /etc config
OPENCODE_CONFIG = "/etc/opencode/opencode.json";
HOME = "/home/gortium";
NODE_PATH = "${pkgs.nodejs}/lib/node_modules";
};
};
networking.firewall.allowedTCPPorts = [ cfg.port ];
};
}