Progress dump before ai agent

This commit is contained in:
2026-04-04 04:57:47 -04:00
parent 58f7dd65f1
commit 13dbf18f67
18 changed files with 687 additions and 128 deletions

View File

@@ -1,7 +0,0 @@
{ pkgs, lib, config, ... }: {
imports =
[
# ./home
./nixos
];
}

View File

@@ -1,6 +0,0 @@
{ pkgs, lib, config, ... }: {
imports =
[
./graphical-desktop.nix
];
}

View File

@@ -1,9 +0,0 @@
{ pkgs, lib, config, ... }: {
imports =
[
./bundles
# ./programs
./services
./filesystem
];
}

View File

@@ -1,6 +0,0 @@
{ pkgs, lib, config, ... }: {
imports =
[
./hoardingcow-mount.nix
];
}

View File

@@ -1,6 +0,0 @@
{ pkgs, lib, config, ... }: {
imports =
[
./systemd
];
}

View File

@@ -9,9 +9,15 @@ with lib;
path = mkOption { type = types.str; };
envFile = mkOption { type = types.nullOr types.path; default = null; };
ports = mkOption { type = types.listOf types.int; default = [ ]; };
# New option to pass raw systemd serviceConfig
serviceConfig = mkOption {
type = types.attrs;
default = { };
description = "Extra systemd serviceConfig options for this stack.";
};
};
});
default = {};
default = { };
};
config = {
@@ -23,28 +29,24 @@ with lib;
systemd.services = mapAttrs' (name: value: nameValuePair "${name}_stack" {
description = "Docker Compose stack: ${name}";
# Added 'docker.socket' to both after and wants to ensure the API is reachable
after = [ "network.target" "docker.service" "docker.socket" "agenix.service" ];
wants = [ "docker.socket" "agenix.service" ];
requires = [ "docker.service" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
path = with pkgs; [ git docker docker-compose bash ];
# We merge the base config with the custom 'serviceConfig' from the submodule
serviceConfig = recursiveUpdate {
Type = "oneshot";
WorkingDirectory = value.path;
User = "root";
# This line forces the service to wait until the docker socket is actually responsive
ExecStartPre = "${pkgs.bash}/bin/bash -c 'while [ ! -S /var/run/docker.sock ]; do sleep 1; done'";
ExecStart = "${pkgs.docker-compose}/bin/docker-compose up -d --remove-orphans";
ExecStop = "${pkgs.docker-compose}/bin/docker-compose down";
RemainAfterExit = true;
# Ensure the environment file is passed correctly
EnvironmentFile = mkIf (value.envFile != null) [ value.envFile ];
};
} value.serviceConfig;
}) config.services.dockerStacks;
};
}

View File

@@ -20,11 +20,7 @@ in {
environment.etc."opencode/opencode.json".text = builtins.toJSON {
"$schema" = "https://opencode.ai/config.json";
"model" = "devstral-2-small-llama_cpp";
# MCP servers for web search and enhanced functionality
# context7: Remote HTTP server for up-to-date documentation and code examples
# duckduckgo: Local MCP server for web search capabilities
"model" = "nemotron-3-nano-llama_cpp";
"mcp" = {
"context7" = {
"type" = "remote";
@@ -46,6 +42,7 @@ in {
"options" = {
"baseURL" = "http://localhost:8300/v1";
"apiKey" = "not-needed";
"maxTokens" = 80000;
};
"models" = {
"devstral-2-small-llama_cpp" = {
@@ -53,6 +50,11 @@ in {
"tools" = true;
"reasoning" = false;
};
"nemotron-3-nano-llama_cpp" = {
"name" = "Nemotron 3 nano 30B Q8 (llama.cpp)";
"tools" = true;
"reasoning" = false;
};
};
};
"ollama" = {
@@ -76,6 +78,7 @@ in {
systemd.services.opencode-gsd-install = {
description = "Install Get Shit Done OpenCode Components";
after = [ "network-online.target" ];
wants = [ "network-online.target" ];
wantedBy = [ "multi-user.target" ];
path = with pkgs; [
nodejs
@@ -131,7 +134,6 @@ in {
environment = {
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
# Important: GSD at ~/.config/opencode, so we ensure the server sees our /etc config
OPENCODE_CONFIG = "/etc/opencode/opencode.json";
HOME = "/home/gortium";
NODE_PATH = "${pkgs.nodejs}/lib/node_modules";