Major llm rework

This commit is contained in:
2026-01-03 22:19:41 -05:00
parent 5101da4914
commit d0b66496a1
6 changed files with 78 additions and 61 deletions

View File

@@ -1,27 +1,33 @@
{
services = {
ollama = {
services.ollama = {
user = "ollama";
host = "0.0.0.0";
enable = true;
syncModels = true;
loadModels = [
"codellama:7b"
"deepscaler:1.5b"
"deepseek-r1:8b"
"deepseek-r1:14b"
"deepseek-r1:32b"
"devstral-small-2:24b"
"functiongemma:270m"
"gemma3:12b"
"gemma3:27b"
"gpt-oss:20b"
"llama3.1:8b"
"llama3.2:1b"
"llama3.2:3b"
"llama2-uncensored:7b"
"mistral-nemo:12b"
"dolphin-mixtral:8x7b"
"qwq:32b"
"Qihoo360-Light-R1-32B"
"magistral:24b"
"ministral-3:14b"
"nemotron-3-nano:30b"
"qwen3-coder:30b"
"qwen3-vl:32b"
"qwen3:14b"
"qwen3:30b"
];
models = "/zfs/models";
openFirewall = true;
};
# open-webui = {
# enable = true;
# openFirewall = true;
# host = "0.0.0.0";
# };
};
}

View File

@@ -2,11 +2,11 @@ let
vars = import ../vars.nix;
in
{
services = {
ollama = {
services.ollama = {
user = "ollama";
enable = true;
host = "0.0.0.0";
syncModels = true;
loadModels = [
"codellama:7b"
"deepscaler:1.5b"
@@ -34,7 +34,6 @@ in
models = vars.ollama;
openFirewall = true;
};
};
systemd.services = {
ollama.serviceConfig = {
Nice = 19;

View File

@@ -1,10 +1,7 @@
let
vars = import ../vars.nix;
in
{
services.open-webui = {
stateDir = "${vars.services}/open_webui/";
enable = true;
host = "0.0.0.0";
openFirewall = true;
environment = {
ANONYMIZED_TELEMETRY = "False";

View File

@@ -12,8 +12,9 @@
"${inputs.self}/common/optional/zerotier.nix"
./hardware.nix
./llms.nix
./syncthing.nix
./open_webui.nix
./qmk.nix
./syncthing.nix
inputs.nixos-hardware.nixosModules.framework-13-7040-amd
];

View File

@@ -3,8 +3,10 @@
user = "ollama";
enable = true;
host = "127.0.0.1";
syncModels = true;
loadModels = [
"codellama:7b"
"deepscaler:1.5b"
"deepseek-r1:14b"
"deepseek-r1:32b"
"deepseek-r1:8b"

View File

@@ -0,0 +1,12 @@
{
services.open-webui = {
enable = true;
environment = {
ANONYMIZED_TELEMETRY = "False";
DO_NOT_TRACK = "True";
SCARF_NO_ANALYTICS = "True";
OLLAMA_API_BASE_URL = "http://127.0.0.1:11434";
WEBUI_AUTH = "False";
};
};
}