docs: initialize NixOS infrastructure with AI assistant

Creates PROJECT.md with vision and requirements.
Creates config.json with interactive workflow mode.
This commit is contained in:
2026-01-01 01:36:58 -05:00
parent 1210a44ecc
commit b54760f62b
8 changed files with 258 additions and 0 deletions

View File

@@ -0,0 +1,36 @@
{ config, lib, pkgs, ... }:
with lib;
{
options.services.myDockerStacks = mkOption {
type = types.attrsOf (types.submodule {
options = {
path = mkOption { type = types.path; };
ports = mkOption { type = types.listOf types.int; default = []; };
};
});
default = {};
description = "Attribute set of docker-compose stacks to run.";
};
config = {
# Generate the systemd services based on the options provided above
systemd.services = mapAttrs' (name: value: nameValuePair "${name}_stack" {
description = "${name} via Docker Compose";
after = [ "network-online.target" "docker.service" ];
wants = [ "network-online.target" "docker.service" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
WorkingDirectory = value.path;
ExecStartPre = "${pkgs.docker-compose}/bin/docker-compose down";
ExecStart = "${pkgs.docker-compose}/bin/docker-compose up -d";
ExecStop = "${pkgs.docker-compose}/bin/docker-compose down";
RemainAfterExit = true;
};
}) config.services.myDockerStacks;
# Automatically open firewall ports
networking.firewall.allowedTCPPorts = flatten (mapAttrsToList (n: v: v.ports) config.services.myDockerStacks);
};
}

View File

@@ -0,0 +1,37 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.systemd-fancon;
in
{
options.services.systemd-fancon = {
enable = mkEnableOption "systemd-fancon service for fan control";
config = mkOption {
type = types.lines;
default = "";
description = "Configuration for systemd-fancon.";
};
};
config = mkIf cfg.enable {
environment.systemPackages = with pkgs; [
systemd-fancon
lm_sensors
];
boot.kernelModules = [ "amdgpu" ];
systemd.services.systemd-fancon = {
description = "systemd-fancon service";
wantedBy = [ "multi-user.target" ];
after = [ "network-online.target" ];
serviceConfig = {
ExecStart = "${pkgs.systemd-fancon}/bin/systemd-fancon -c ${cfg.configFile}";
Restart = "on-failure";
};
configFile = pkgs.writeText "systemd-fancon.conf" cfg.config;
};
};
}

View File

@@ -0,0 +1,31 @@
systemd.services.init-ollama-model = {
description = "Initialize nemotron 3 with extra context in Ollama Docker";
after = [ "docker-ollama.service" ]; # Ensure it runs after your ollama container
wantedBy = [ "multi-user.target" ];
script = ''
# Wait for Ollama
while ! ${pkgs.curl}/bin/curl -s http://localhost:11434/api/tags > /dev/null; do
sleep 2
done
# Check if the model already exists in the persistent volume
if ! ${pkgs.docker}/bin/docker exec ollama ollama list | grep -q "nemotron-3-nano:30b-128k"; then
echo "nemotron-3-nano:30b-128k not found, creating..."
${pkgs.docker}/bin/docker exec ollama sh -c 'cat <<EOF > /root/.ollama/nemotron-3-nano:30b-128k.modelfile
FROM nemotron-3-nano:30b
PARAMETER num_ctx 131072
PARAMETER num_predict 4096
PARAMETER repeat_penalty 1.1
EOF'
${pkgs.docker}/bin/docker exec ollama ollama create nemotron-3-nano:30b-128k -f /root/.ollama/nemotron-3-nano:30b-128k.modelfile
else
echo "nemotron-3-nano:30b-128k already exists, skipping creation."
fi
'';
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
};
};

View File

@@ -0,0 +1,67 @@
{ config, pkgs, lib, ... }:
let
cfg = config.services.opencode;
in {
options.services.opencode = {
enable = lib.mkEnableOption "OpenCode AI Service";
port = lib.mkOption {
type = lib.types.port;
default = 4099;
};
ollamaUrl = lib.mkOption {
type = lib.types.str;
default = "http://127.0.0.1:11434/v1";
};
};
config = lib.mkIf cfg.enable {
programs.nix-ld.enable = true;
environment.etc."opencode/opencode.json".text = builtins.toJSON {
"$schema" = "https://opencode.ai/config.json";
"model" = "ollama/nemotron-3-nano:30b";
"provider" = {
"ollama" = {
"name" = "Ollama (Local)";
"npm" = "@ai-sdk/openai-compatible";
"options" = {
"baseURL" = cfg.ollamaUrl;
};
"models" = {
# The exact model ID as seen in 'ollama list'
"nemotron-3-nano:30b" = {
"name" = "NVIDIA Nemotron 3 Nano (30B)";
};
};
};
};
};
systemd.services.opencode = {
description = "OpenCode AI Coding Agent Server";
after = [ "network.target" "ai_stack.service" ];
requires = [ "ai_stack.service" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "simple";
User = "gortium";
ExecStart = "${pkgs.nodejs}/bin/npx -y opencode-ai serve --hostname 0.0.0.0 --port ${toString cfg.port}";
Restart = "on-failure";
# Loads your ANTHROPIC_API_KEY etc from your single Agenix file
# EnvironmentFile = config.age.secrets.opencode-secrets.path;
};
environment = {
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
OPENCODE_CONFIG = "/etc/opencode/opencode.json";
HOME = "/home/gortium";
};
};
networking.firewall.allowedTCPPorts = [ cfg.port ];
};
}