configurations for my servers and desktops
nix nixos flake dots dotfiles

feat: testing ollama stuff

+36 -11
+2 -9
flake.nix
··· 51 51 ]; 52 52 }; 53 53 54 - bluepill = nixpkgs-stable.lib.nixosSystem { 54 + bluepill = nixpkgs.lib.nixosSystem { 55 55 modules = [ 56 56 ./modules/common 57 57 ./modules/nixos 58 58 ./hosts/bluepill 59 59 agenix.nixosModules.default 60 - home-manager-stable.nixosModules.home-manager 61 - { 62 - nixpkgs.overlays = [ 63 - (final: prev: { 64 - tailscale = nixpkgs.legacyPackages.${prev.stdenv.hostPlatform.system}.tailscale; 65 - }) 66 - ]; 67 - } 60 + home-manager.nixosModules.home-manager 68 61 ]; 69 62 }; 70 63
+1
hosts/bluepill/default.nix
··· 6 6 ../../modules/nixos/services/mikusmp67.nix 7 7 ../../modules/nixos/services/navidrome.nix 8 8 ../../modules/nixos/services/newbeginnings7.nix 9 + ../../modules/nixos/services/ollama.nix 9 10 ../../modules/nixos/services/restic.nix 10 11 ../../modules/nixos/services/samba.nix 11 12 ../../modules/nixos/services/slskd.nix
+6 -1
hosts/bluepill/hardware.nix
··· 35 35 ]; 36 36 }; 37 37 38 - swapDevices = []; 38 + swapDevices = [ 39 + { 40 + device = "/var/lib/swapfile"; 41 + size = 32 * 1024; 42 + } 43 + ]; 39 44 40 45 nixpkgs.hostPlatform = lib.mkDefault "x86_64-linux"; 41 46 hardware.cpu.amd.updateMicrocode = lib.mkDefault config.hardware.enableRedistributableFirmware;
+1 -1
modules/common/default.nix
··· 13 13 nixpkgs.config.allowUnfree = true; 14 14 15 15 programs.fish.enable = true; 16 - environment.shells = [ pkgs.fish ]; 16 + environment.shells = [pkgs.fish]; 17 17 }
+26
modules/nixos/services/ollama.nix
··· 1 + {pkgs, ...}: { 2 + services.ollama = { 3 + enable = true; 4 + loadModels = ["deepseek-r1:latest" "deepseek-v2:latest" "qwen3-coder:latest" "qwen3-vl:latest" "qwen3:latest" "gemma3:latest" "nemotron-3-nano:latest" "functiongemma:latest" "olmo-3:latest" "gtp-oss:latest" "llama3:latest" "devstral-small-2:latest" "mistral:latest" "mistral-nemo:latest"]; 5 + syncModels = true; 6 + openFirewall = true; 7 + }; 8 + 9 + virtualisation = { 10 + docker.enable = true; 11 + oci-containers = { 12 + backend = "docker"; 13 + containers.open-webui = { 14 + image = "ghcr.io/open-webui/open-webui:main"; 15 + extraOptions = ["--network=host"]; 16 + environment = { 17 + OLLAMA_BASE_URL = "http://127.0.0.1:11434"; 18 + }; 19 + volumes = [ 20 + "/var/lib/open-webui:/app/backend/data" 21 + ]; 22 + autoStart = true; 23 + }; 24 + }; 25 + }; 26 + }