docs: initialize NixOS infrastructure with AI assistant
Creates PROJECT.md with vision and requirements. Creates config.json with interactive workflow mode.
This commit is contained in:
67
modules/nixos/services/open_code_server.nix
Normal file
67
modules/nixos/services/open_code_server.nix
Normal file
@@ -0,0 +1,67 @@
|
||||
{ config, pkgs, lib, ... }:
|
||||
|
||||
let
|
||||
cfg = config.services.opencode;
|
||||
in {
|
||||
options.services.opencode = {
|
||||
enable = lib.mkEnableOption "OpenCode AI Service";
|
||||
port = lib.mkOption {
|
||||
type = lib.types.port;
|
||||
default = 4099;
|
||||
};
|
||||
ollamaUrl = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "http://127.0.0.1:11434/v1";
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
programs.nix-ld.enable = true;
|
||||
|
||||
environment.etc."opencode/opencode.json".text = builtins.toJSON {
|
||||
"$schema" = "https://opencode.ai/config.json";
|
||||
|
||||
"model" = "ollama/nemotron-3-nano:30b";
|
||||
|
||||
"provider" = {
|
||||
"ollama" = {
|
||||
"name" = "Ollama (Local)";
|
||||
"npm" = "@ai-sdk/openai-compatible";
|
||||
"options" = {
|
||||
"baseURL" = cfg.ollamaUrl;
|
||||
};
|
||||
"models" = {
|
||||
# The exact model ID as seen in 'ollama list'
|
||||
"nemotron-3-nano:30b" = {
|
||||
"name" = "NVIDIA Nemotron 3 Nano (30B)";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.opencode = {
|
||||
description = "OpenCode AI Coding Agent Server";
|
||||
after = [ "network.target" "ai_stack.service" ];
|
||||
requires = [ "ai_stack.service" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
User = "gortium";
|
||||
ExecStart = "${pkgs.nodejs}/bin/npx -y opencode-ai serve --hostname 0.0.0.0 --port ${toString cfg.port}";
|
||||
Restart = "on-failure";
|
||||
# Loads your ANTHROPIC_API_KEY etc from your single Agenix file
|
||||
# EnvironmentFile = config.age.secrets.opencode-secrets.path;
|
||||
};
|
||||
|
||||
environment = {
|
||||
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
||||
OPENCODE_CONFIG = "/etc/opencode/opencode.json";
|
||||
HOME = "/home/gortium";
|
||||
};
|
||||
};
|
||||
|
||||
networking.firewall.allowedTCPPorts = [ cfg.port ];
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user