Progress dump before ai agent

This commit is contained in:
2026-04-04 04:57:47 -04:00
parent 58f7dd65f1
commit 13dbf18f67
18 changed files with 687 additions and 128 deletions

View File

@@ -20,11 +20,7 @@ in {
environment.etc."opencode/opencode.json".text = builtins.toJSON {
"$schema" = "https://opencode.ai/config.json";
"model" = "devstral-2-small-llama_cpp";
# MCP servers for web search and enhanced functionality
# context7: Remote HTTP server for up-to-date documentation and code examples
# duckduckgo: Local MCP server for web search capabilities
"model" = "nemotron-3-nano-llama_cpp";
"mcp" = {
"context7" = {
"type" = "remote";
@@ -46,6 +42,7 @@ in {
"options" = {
"baseURL" = "http://localhost:8300/v1";
"apiKey" = "not-needed";
"maxTokens" = 80000;
};
"models" = {
"devstral-2-small-llama_cpp" = {
@@ -53,6 +50,11 @@ in {
"tools" = true;
"reasoning" = false;
};
"nemotron-3-nano-llama_cpp" = {
"name" = "Nemotron 3 nano 30B Q8 (llama.cpp)";
"tools" = true;
"reasoning" = false;
};
};
};
"ollama" = {
@@ -76,6 +78,7 @@ in {
systemd.services.opencode-gsd-install = {
description = "Install Get Shit Done OpenCode Components";
after = [ "network-online.target" ];
wants = [ "network-online.target" ];
wantedBy = [ "multi-user.target" ];
path = with pkgs; [
nodejs
@@ -131,7 +134,6 @@ in {
environment = {
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
# Important: GSD at ~/.config/opencode, so we ensure the server sees our /etc config
OPENCODE_CONFIG = "/etc/opencode/opencode.json";
HOME = "/home/gortium";
NODE_PATH = "${pkgs.nodejs}/lib/node_modules";