mirror of
https://github.com/RichieCahill/dotfiles.git
synced 2026-04-21 06:39:09 -04:00
Compare commits
179 Commits
feature/re
...
feature/ad
| Author | SHA1 | Date | |
|---|---|---|---|
| 59cfc0d02f | |||
| 3914a1a7ab | |||
| b5ac770003 | |||
| 12e398514b | |||
| 69f9ef8187 | |||
| 1b171fcd3e | |||
| 16d938dc59 | |||
| 7ffb7b4a37 | |||
| c7fe44755f | |||
| bb9200860e | |||
| eb04f4a56d | |||
| b91f7c34e1 | |||
| 5b8e543226 | |||
| da48f62195 | |||
| 60f2ab1039 | |||
| c1de454005 | |||
| 391e37b746 | |||
|
|
27565173d4 | ||
| 0c0ed92cb4 | |||
|
|
cc9996d6fa | ||
| 102f36eb1b | |||
| 9ec988729b | |||
| 4e3c25afb4 | |||
| 0d482aca4b | |||
|
|
c624781d84 | ||
| f4996b71e4 | |||
| 58a29214d3 | |||
| c4171b56b5 | |||
| d6d48516ea | |||
| ae882ba578 | |||
| 100b8145e8 | |||
| e99cd8e54a | |||
| de9348432c | |||
| b1fa596f37 | |||
| 908bccb8dc | |||
| b8cc9c5772 | |||
| d62076a900 | |||
| 0c9bd40659 | |||
| f713b8d4fa | |||
| ddba7d1068 | |||
| 41aad90140 | |||
| 76cd6e1188 | |||
| 20ef02b0cc | |||
| c0e9f3f937 | |||
| 9e0a2810f5 | |||
| 5c488422a1 | |||
| 9d43704b64 | |||
| d5bc6e9c6e | |||
| 17cebe1a82 | |||
| f02a866b19 | |||
| 65c2bed046 | |||
| 26cf123357 | |||
| a7c0a58c9a | |||
| e89fb9fae1 | |||
| 44feda70c1 | |||
| 1bfdbfd785 | |||
| 6a09bc66b6 | |||
| 65fca5c8a4 | |||
| a6e2334999 | |||
| c5981e0e6c | |||
| 825672a450 | |||
| d2db0de371 | |||
| 8142582e4a | |||
| 3038e1c704 | |||
| 18de5bc12c | |||
| 4fa2141461 | |||
| 626bd70d67 | |||
| 8ed7eda020 | |||
| e9ae9478bf | |||
| c1ce7e0ac4 | |||
| d040b06869 | |||
| 04c41c6ac0 | |||
| 298adcce87 | |||
| ef25153c84 | |||
| 9416bbd00b | |||
| b8200af6d5 | |||
| afb62b97d1 | |||
| cf75f3d75a | |||
| 0f8a594545 | |||
| db37eb2f9e | |||
| 534d9110e2 | |||
| 86a1cac42c | |||
| d3452dfab5 | |||
| acab92ac9c | |||
|
|
f22a9d107a | ||
| e5e0f883b0 | |||
| 04bf6f2038 | |||
| d35ba60c69 | |||
| 1e85635e89 | |||
| 6423192ee7 | |||
|
|
a33aba3afc | ||
|
|
d4d481e4b2 | ||
| f092348736 | |||
| e6c3ae0bee | |||
| d1f4f21521 | |||
| 00a5536208 | |||
| b554325b13 | |||
| 357168695c | |||
| fc31447591 | |||
| eea620aa2f | |||
| 82d463bfd6 | |||
| ca8c9925ad | |||
| 45a31e6b4d | |||
| c4be520190 | |||
| 28cd7f3f6f | |||
|
|
879885dc3b | ||
| 5e03efa1e8 | |||
| 2ed08501fe | |||
| c36624cc55 | |||
| f074344ac8 | |||
|
|
b773eb2db7 | ||
|
|
6efcc9add1 | ||
| bff561946f | |||
| 1ec05d13da | |||
| 4686a85bcd | |||
| 3434b32fbe | |||
| 98d9efcde3 | |||
|
|
4286f39177 | ||
| 0fe439ceaf | |||
| 7a3c2026b3 | |||
| 0c9ce78c20 | |||
| c10a76babc | |||
| 113ca9c99a | |||
| 29f51bf116 | |||
| 9f3a2b2a4b | |||
|
|
8a4d021541 | ||
| 5225bf1732 | |||
| 7a5dea1c36 | |||
| 0956ea6f58 | |||
| 5d643e0cd1 | |||
| ed3805a89e | |||
| ce8de13734 | |||
| 68b7a2f80d | |||
| 62c3aa69fe | |||
| 470be2b761 | |||
| e1c5bd0f84 | |||
| 786b275f7c | |||
| 324721ff8d | |||
| 7137435703 | |||
| a7b336a7de | |||
| 529b03525b | |||
| d0364bdaad | |||
| 45ddf8bc54 | |||
|
|
6913c7046e | ||
| c4d2b0b8d4 | |||
| 27e939459b | |||
| 501a838be9 | |||
| 2325249687 | |||
|
|
45951f6525 | ||
| fd56fa66f0 | |||
| 1314298c0b | |||
| f0eee80c2d | |||
| c20b6d1da2 | |||
| 34a59f966d | |||
| a677046330 | |||
| 111afa1c6b | |||
| c5f2805e74 | |||
| 9a61d06f08 | |||
| 559ca7a45e | |||
| 03b636eb3a | |||
| a7f5d3c71d | |||
|
|
3bbf8dc7a6 | ||
|
|
1cd4084ec8 | ||
|
|
e65b4b696a | ||
|
|
20a4a8c2fc | ||
| f4348c2ab5 | |||
| 6eab8497ba | |||
|
|
da5cdb8f05 | ||
| 8aa9eea322 | |||
| febe7c7e53 | |||
| b1ca58b2f4 | |||
|
|
7ad4ccd5ca | ||
| c936501afb | |||
| 239d7833f6 | |||
| 8fb6ae41b9 | |||
| 80e0b03463 | |||
| 747e2700ed | |||
| 472f11e5b6 | |||
| d75493997e |
9
.agent/workflows/format_code.md
Normal file
9
.agent/workflows/format_code.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
description: Format code using treefmt
|
||||||
|
---
|
||||||
|
|
||||||
|
// turbo
|
||||||
|
1. Run treefmt
|
||||||
|
```bash
|
||||||
|
treefmt
|
||||||
|
```
|
||||||
4
.github/workflows/build_systems.yml
vendored
4
.github/workflows/build_systems.yml
vendored
@@ -15,12 +15,14 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
system:
|
system:
|
||||||
- "bob"
|
- "bob"
|
||||||
|
- "brain"
|
||||||
- "jeeves"
|
- "jeeves"
|
||||||
|
- "leviathan"
|
||||||
- "rhapsody-in-green"
|
- "rhapsody-in-green"
|
||||||
|
continue-on-error: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Build default package
|
- name: Build default package
|
||||||
run: "nixos-rebuild build --flake ./#${{ matrix.system }}"
|
run: "nixos-rebuild build --flake ./#${{ matrix.system }}"
|
||||||
- name: copy to nix-cache
|
- name: copy to nix-cache
|
||||||
run: nix copy --to ssh://jeeves .#nixosConfigurations.${{ matrix.system }}.config.system.build.toplevel
|
run: nix copy --to ssh://jeeves .#nixosConfigurations.${{ matrix.system }}.config.system.build.toplevel
|
||||||
|
|
||||||
|
|||||||
48
.github/workflows/fix_eval_warnings.yml
vendored
Normal file
48
.github/workflows/fix_eval_warnings.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
name: Fix Evaluation Warnings
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["build_systems"]
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
actions: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze-and-fix:
|
||||||
|
runs-on: self-hosted
|
||||||
|
if: ${{ github.event.workflow_run.conclusion == 'success' || github.event.workflow_run.conclusion == 'failure' }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download logs
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
RUN_ID: ${{ github.event.workflow_run.id }}
|
||||||
|
run: |
|
||||||
|
gh run view $RUN_ID --log > build.log
|
||||||
|
|
||||||
|
- name: Run Fix Script
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||||
|
RUN_ID: ${{ github.event.workflow_run.id }}
|
||||||
|
PYTHONPATH: .
|
||||||
|
run: |
|
||||||
|
python3 python/tools/fix_eval_warnings.py build.log
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
if: hashFiles('fix_suggestions.md') != ''
|
||||||
|
uses: peter-evans/create-pull-request@v6
|
||||||
|
with:
|
||||||
|
token: ${{ github.token }}
|
||||||
|
commit-message: "fix: automated evaluation warning fixes"
|
||||||
|
title: "fix: automated evaluation warning fixes"
|
||||||
|
body-path: fix_suggestions.md
|
||||||
|
branch: "auto-fix-eval-warnings-${{ github.event.workflow_run.id }}"
|
||||||
|
base: main
|
||||||
|
labels: "automated-fix"
|
||||||
29
.github/workflows/merge_flake_lock_update.yml
vendored
Normal file
29
.github/workflows/merge_flake_lock_update.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
name: merge_flake_lock_update
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 2 * * 6"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
merge:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: merge_flake_lock_update
|
||||||
|
run: |
|
||||||
|
pr_number=$(gh pr list --state open --author RichieCahill --label flake_lock_update --json number --jq '.[0].number')
|
||||||
|
echo "pr_number=$pr_number" >> $GITHUB_ENV
|
||||||
|
if [ -n "$pr_number" ]; then
|
||||||
|
gh pr merge "$pr_number" --rebase
|
||||||
|
else
|
||||||
|
echo "No open PR found with label flake_lock_update"
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GH_TOKEN_FOR_UPDATES }}
|
||||||
19
.github/workflows/pytest.yml
vendored
Normal file
19
.github/workflows/pytest.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
name: pytest
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
merge_group:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pytest:
|
||||||
|
runs-on: self-hosted
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Run tests
|
||||||
|
run: nix develop .#devShells.x86_64-linux.default -c pytest tests
|
||||||
3
.github/workflows/update-flake-lock.yml
vendored
3
.github/workflows/update-flake-lock.yml
vendored
@@ -2,7 +2,7 @@ name: update-flake-lock
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "0 0 * * *"
|
- cron: "0 0 * * 6"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lockfile:
|
lockfile:
|
||||||
@@ -20,3 +20,4 @@ jobs:
|
|||||||
pr-labels: |
|
pr-labels: |
|
||||||
dependencies
|
dependencies
|
||||||
automated
|
automated
|
||||||
|
flake_lock_update
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -165,3 +165,4 @@ test.*
|
|||||||
|
|
||||||
# syncthing
|
# syncthing
|
||||||
.stfolder
|
.stfolder
|
||||||
|
fix_suggestions.md
|
||||||
|
|||||||
@@ -1,9 +1,13 @@
|
|||||||
|
# Generate AGE keys from SSH keys with:
|
||||||
|
# ssh-keygen -A
|
||||||
|
# nix-shell -p ssh-to-age --run 'cat /etc/ssh/ssh_host_ed25519_key.pub | ssh-to-age'
|
||||||
keys:
|
keys:
|
||||||
- &admin_richie age1u8zj599elqqvcmhxn8zuwrufsz8w8w366d3ayrljjejljt2q45kq8mxw9c # cspell:disable-line
|
- &admin_richie age1u8zj599elqqvcmhxn8zuwrufsz8w8w366d3ayrljjejljt2q45kq8mxw9c # cspell:disable-line
|
||||||
|
|
||||||
- &system_bob age1q47vup0tjhulkg7d6xwmdsgrw64h4ax3la3evzqpxyy4adsmk9fs56qz3y # cspell:disable-line
|
- &system_bob age1q47vup0tjhulkg7d6xwmdsgrw64h4ax3la3evzqpxyy4adsmk9fs56qz3y # cspell:disable-line
|
||||||
|
- &system_brain age1jhf7vm0005j60mjq63696frrmjhpy8kpc2d66mw044lqap5mjv4snmwvwm # cspell:disable-line
|
||||||
- &system_jeeves age13lmqgc3jvkyah5e3vcwmj4s5wsc2akctcga0lpc0x8v8du3fxprqp4ldkv # cspell:disable-line
|
- &system_jeeves age13lmqgc3jvkyah5e3vcwmj4s5wsc2akctcga0lpc0x8v8du3fxprqp4ldkv # cspell:disable-line
|
||||||
- &system_router age1xzxryqq63x65yuza9lmmkud7crjjxpnkdew070yhx6xn7xe4tdws5twxsv # cspell:disable-line
|
- &system_leviathan age1l272y8udvg60z7edgje42fu49uwt4x2gxn5zvywssnv9h2krms8s094m4k # cspell:disable-line
|
||||||
- &system_rhapsody age1ufnewppysaq2wwcl4ugngjz8pfzc5a35yg7luq0qmuqvctajcycs5lf6k4 # cspell:disable-line
|
- &system_rhapsody age1ufnewppysaq2wwcl4ugngjz8pfzc5a35yg7luq0qmuqvctajcycs5lf6k4 # cspell:disable-line
|
||||||
|
|
||||||
creation_rules:
|
creation_rules:
|
||||||
@@ -12,6 +16,7 @@ creation_rules:
|
|||||||
- age:
|
- age:
|
||||||
- *admin_richie
|
- *admin_richie
|
||||||
- *system_bob
|
- *system_bob
|
||||||
|
- *system_brain
|
||||||
- *system_jeeves
|
- *system_jeeves
|
||||||
- *system_router
|
- *system_leviathan
|
||||||
- *system_rhapsody
|
- *system_rhapsody
|
||||||
|
|||||||
27
.vscode/settings.json
vendored
27
.vscode/settings.json
vendored
@@ -2,6 +2,7 @@
|
|||||||
"cSpell.words": [
|
"cSpell.words": [
|
||||||
"aboutwelcome",
|
"aboutwelcome",
|
||||||
"acltype",
|
"acltype",
|
||||||
|
"addopts",
|
||||||
"addstr",
|
"addstr",
|
||||||
"advplyr",
|
"advplyr",
|
||||||
"ahci",
|
"ahci",
|
||||||
@@ -9,6 +10,7 @@
|
|||||||
"aiounifi",
|
"aiounifi",
|
||||||
"alsa",
|
"alsa",
|
||||||
"apiclient",
|
"apiclient",
|
||||||
|
"apscheduler",
|
||||||
"archlinux",
|
"archlinux",
|
||||||
"ashift",
|
"ashift",
|
||||||
"asrouter",
|
"asrouter",
|
||||||
@@ -114,6 +116,7 @@
|
|||||||
"httpchk",
|
"httpchk",
|
||||||
"hurlenko",
|
"hurlenko",
|
||||||
"hwloc",
|
"hwloc",
|
||||||
|
"ignorelist",
|
||||||
"INITDB",
|
"INITDB",
|
||||||
"iocharset",
|
"iocharset",
|
||||||
"ioit",
|
"ioit",
|
||||||
@@ -148,11 +151,15 @@
|
|||||||
"mixtral",
|
"mixtral",
|
||||||
"mklabel",
|
"mklabel",
|
||||||
"mkpart",
|
"mkpart",
|
||||||
|
"modbus",
|
||||||
|
"modbuss",
|
||||||
"modesetting",
|
"modesetting",
|
||||||
"mountpoint",
|
"mountpoint",
|
||||||
"mountpoints",
|
"mountpoints",
|
||||||
"mousewheel",
|
"mousewheel",
|
||||||
|
"mqtt",
|
||||||
"mtxr",
|
"mtxr",
|
||||||
|
"mypy",
|
||||||
"ncdu",
|
"ncdu",
|
||||||
"nemo",
|
"nemo",
|
||||||
"neofetch",
|
"neofetch",
|
||||||
@@ -184,6 +191,7 @@
|
|||||||
"overalljails",
|
"overalljails",
|
||||||
"overscroll",
|
"overscroll",
|
||||||
"overseerr",
|
"overseerr",
|
||||||
|
"paho",
|
||||||
"partitionwise",
|
"partitionwise",
|
||||||
"pbmode",
|
"pbmode",
|
||||||
"pciutils",
|
"pciutils",
|
||||||
@@ -211,9 +219,14 @@
|
|||||||
"pulseaudio",
|
"pulseaudio",
|
||||||
"punycode",
|
"punycode",
|
||||||
"pychromecast",
|
"pychromecast",
|
||||||
|
"pydocstyle",
|
||||||
|
"pyfakefs",
|
||||||
"pylance",
|
"pylance",
|
||||||
|
"pylint",
|
||||||
"pymetno",
|
"pymetno",
|
||||||
|
"pymodbus",
|
||||||
"pyownet",
|
"pyownet",
|
||||||
|
"pytest",
|
||||||
"qbit",
|
"qbit",
|
||||||
"qbittorrent",
|
"qbittorrent",
|
||||||
"qbittorrentvpn",
|
"qbittorrentvpn",
|
||||||
@@ -239,6 +252,7 @@
|
|||||||
"schemeless",
|
"schemeless",
|
||||||
"scrollback",
|
"scrollback",
|
||||||
"SECUREFOX",
|
"SECUREFOX",
|
||||||
|
"sessionmaker",
|
||||||
"sessionstore",
|
"sessionstore",
|
||||||
"shellcheck",
|
"shellcheck",
|
||||||
"signon",
|
"signon",
|
||||||
@@ -250,6 +264,7 @@
|
|||||||
"socialtracking",
|
"socialtracking",
|
||||||
"sonarr",
|
"sonarr",
|
||||||
"sponsorblock",
|
"sponsorblock",
|
||||||
|
"sqlalchemy",
|
||||||
"sqltools",
|
"sqltools",
|
||||||
"ssdp",
|
"ssdp",
|
||||||
"SSHOPTS",
|
"SSHOPTS",
|
||||||
@@ -261,6 +276,7 @@
|
|||||||
"tabmanager",
|
"tabmanager",
|
||||||
"tamasfe",
|
"tamasfe",
|
||||||
"TCPIP",
|
"TCPIP",
|
||||||
|
"testdisk",
|
||||||
"tiktok",
|
"tiktok",
|
||||||
"timonwong",
|
"timonwong",
|
||||||
"titlebar",
|
"titlebar",
|
||||||
@@ -270,6 +286,7 @@
|
|||||||
"topstories",
|
"topstories",
|
||||||
"treefmt",
|
"treefmt",
|
||||||
"twimg",
|
"twimg",
|
||||||
|
"typer",
|
||||||
"uaccess",
|
"uaccess",
|
||||||
"ublock",
|
"ublock",
|
||||||
"uiprotect",
|
"uiprotect",
|
||||||
@@ -285,6 +302,7 @@
|
|||||||
"usernamehw",
|
"usernamehw",
|
||||||
"userprefs",
|
"userprefs",
|
||||||
"vfat",
|
"vfat",
|
||||||
|
"victron",
|
||||||
"virt",
|
"virt",
|
||||||
"virtualisation",
|
"virtualisation",
|
||||||
"vpnpromourl",
|
"vpnpromourl",
|
||||||
@@ -296,6 +314,8 @@
|
|||||||
"wireshark",
|
"wireshark",
|
||||||
"Workqueues",
|
"Workqueues",
|
||||||
"xattr",
|
"xattr",
|
||||||
|
"xcursorgen",
|
||||||
|
"xdist",
|
||||||
"xhci",
|
"xhci",
|
||||||
"yazi",
|
"yazi",
|
||||||
"yubikey",
|
"yubikey",
|
||||||
@@ -307,5 +327,10 @@
|
|||||||
"zoxide",
|
"zoxide",
|
||||||
"zram",
|
"zram",
|
||||||
"zstd"
|
"zstd"
|
||||||
]
|
],
|
||||||
|
"python-envs.defaultEnvManager": "ms-python.python:system",
|
||||||
|
"python-envs.pythonProjects": [],
|
||||||
|
"python.testing.pytestArgs": ["tests"],
|
||||||
|
"python.testing.unittestEnabled": false,
|
||||||
|
"python.testing.pytestEnabled": true
|
||||||
}
|
}
|
||||||
|
|||||||
12
AGENTS.md
Normal file
12
AGENTS.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
## Dev environment tips
|
||||||
|
|
||||||
|
- use treefmt to format all files
|
||||||
|
- keep new code consistent with the existing style
|
||||||
|
|
||||||
|
### Python
|
||||||
|
|
||||||
|
- make code `ruff` compliant
|
||||||
|
- use pytest to test python code tests should be put in `tests` directory
|
||||||
|
- dont use global state
|
||||||
|
- use google style docstrings
|
||||||
|
- use typer over argparse
|
||||||
@@ -44,7 +44,10 @@
|
|||||||
# firmware update
|
# firmware update
|
||||||
fwupd.enable = true;
|
fwupd.enable = true;
|
||||||
|
|
||||||
snapshot_manager.enable = lib.mkDefault true;
|
snapshot_manager = {
|
||||||
|
enable = lib.mkDefault true;
|
||||||
|
PYTHONPATH = "${inputs.self}/";
|
||||||
|
};
|
||||||
|
|
||||||
zfs = {
|
zfs = {
|
||||||
trim.enable = lib.mkDefault true;
|
trim.enable = lib.mkDefault true;
|
||||||
|
|||||||
@@ -1,4 +1,10 @@
|
|||||||
{ lib, pkgs, ... }:
|
{ lib, pkgs, ... }:
|
||||||
|
let
|
||||||
|
libPath = pkgs.lib.makeLibraryPath [
|
||||||
|
pkgs.zlib
|
||||||
|
pkgs.stdenv.cc.cc.lib
|
||||||
|
];
|
||||||
|
in
|
||||||
{
|
{
|
||||||
programs.nix-ld = {
|
programs.nix-ld = {
|
||||||
enable = lib.mkDefault true;
|
enable = lib.mkDefault true;
|
||||||
@@ -15,6 +21,7 @@
|
|||||||
libxml2
|
libxml2
|
||||||
openssl
|
openssl
|
||||||
stdenv.cc.cc
|
stdenv.cc.cc
|
||||||
|
stdenv.cc.cc.lib
|
||||||
systemd
|
systemd
|
||||||
util-linux
|
util-linux
|
||||||
xz
|
xz
|
||||||
@@ -23,4 +30,9 @@
|
|||||||
zstd
|
zstd
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
environment = {
|
||||||
|
sessionVariables.LD_LIBRARY_PATH = lib.mkDefault libPath;
|
||||||
|
variables.LD_LIBRARY_PATH = lib.mkDefault libPath;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,6 @@
|
|||||||
{
|
{
|
||||||
environment.systemPackages = with pkgs; [
|
environment.systemPackages = with pkgs; [
|
||||||
git
|
git
|
||||||
python313
|
my_python
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
{
|
{
|
||||||
inputs,
|
|
||||||
pkgs,
|
pkgs,
|
||||||
lib,
|
lib,
|
||||||
config,
|
config,
|
||||||
@@ -11,33 +10,48 @@ in
|
|||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
services.snapshot_manager = {
|
services.snapshot_manager = {
|
||||||
enable = lib.mkOption {
|
enable = lib.mkEnableOption "ZFS snapshot manager";
|
||||||
default = true;
|
|
||||||
example = true;
|
|
||||||
description = "Whether to enable k3s-net.";
|
|
||||||
type = lib.types.bool;
|
|
||||||
};
|
|
||||||
path = lib.mkOption {
|
path = lib.mkOption {
|
||||||
type = lib.types.path;
|
type = lib.types.path;
|
||||||
description = "Path that needs to be updated via git pull";
|
|
||||||
default = ./snapshot_config.toml;
|
default = ./snapshot_config.toml;
|
||||||
|
description = "Path to the snapshot_manager TOML config.";
|
||||||
|
};
|
||||||
|
PYTHONPATH = lib.mkOption {
|
||||||
|
type = lib.types.str;
|
||||||
|
description = ''
|
||||||
|
the PYTHONPATH to use for the snapshot_manager service.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
EnvironmentFile = lib.mkOption {
|
||||||
|
type = lib.types.nullOr (lib.types.coercedTo lib.types.path toString lib.types.str);
|
||||||
|
default = null;
|
||||||
|
description = ''
|
||||||
|
Single environment file for the service (e.g. /etc/snapshot-manager/env).
|
||||||
|
Use a leading "-" to ignore if missing (systemd feature).
|
||||||
|
'';
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config = lib.mkIf cfg.enable {
|
config = lib.mkIf cfg.enable {
|
||||||
systemd = {
|
systemd = {
|
||||||
services."snapshot_manager" = {
|
services.snapshot_manager = {
|
||||||
description = "ZFS Snapshot Manager";
|
description = "ZFS Snapshot Manager";
|
||||||
requires = [ "zfs-import.target" ];
|
requires = [ "zfs-import.target" ];
|
||||||
after = [ "zfs-import.target" ];
|
after = [ "zfs-import.target" ];
|
||||||
path = [ pkgs.zfs ];
|
path = [ pkgs.zfs ];
|
||||||
|
environment = {
|
||||||
|
PYTHONPATH = cfg.PYTHONPATH;
|
||||||
|
};
|
||||||
serviceConfig = {
|
serviceConfig = {
|
||||||
Type = "oneshot";
|
Type = "oneshot";
|
||||||
ExecStart = "${inputs.system_tools.packages.x86_64-linux.default}/bin/snapshot_manager --config-file='${cfg.path}'";
|
ExecStart = "${pkgs.my_python}/bin/python -m python.tools.snapshot_manager ${lib.escapeShellArg cfg.path}";
|
||||||
|
}
|
||||||
|
// lib.optionalAttrs (cfg.EnvironmentFile != null) {
|
||||||
|
EnvironmentFile = cfg.EnvironmentFile;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
timers."snapshot_manager" = {
|
timers.snapshot_manager = {
|
||||||
wantedBy = [ "timers.target" ];
|
wantedBy = [ "timers.target" ];
|
||||||
timerConfig = {
|
timerConfig = {
|
||||||
OnBootSec = "15m";
|
OnBootSec = "15m";
|
||||||
|
|||||||
@@ -37,6 +37,8 @@
|
|||||||
TcpKeepAlive = "no";
|
TcpKeepAlive = "no";
|
||||||
X11Forwarding = lib.mkDefault false;
|
X11Forwarding = lib.mkDefault false;
|
||||||
KexAlgorithms = [
|
KexAlgorithms = [
|
||||||
|
"sntrup761x25519-sha512@openssh.com"
|
||||||
|
"mlkem768x25519-sha256"
|
||||||
"curve25519-sha256@libssh.org"
|
"curve25519-sha256@libssh.org"
|
||||||
"diffie-hellman-group-exchange-sha256"
|
"diffie-hellman-group-exchange-sha256"
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
{ pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
{
|
{
|
||||||
boot = {
|
boot = {
|
||||||
kernelPackages = pkgs.linuxPackages_6_14;
|
kernelPackages = pkgs.linuxPackages_6_17;
|
||||||
zfs.package = pkgs.zfs_2_3;
|
zfs.package = pkgs.zfs_unstable;
|
||||||
};
|
};
|
||||||
|
|
||||||
hardware.bluetooth = {
|
hardware.bluetooth = {
|
||||||
|
|||||||
@@ -10,6 +10,9 @@
|
|||||||
authorizedKeys = config.users.users.richie.openssh.authorizedKeys.keys;
|
authorizedKeys = config.users.users.richie.openssh.authorizedKeys.keys;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
availableKernelModules = [ "igb" ];
|
availableKernelModules = [
|
||||||
|
"igb"
|
||||||
|
"r8152"
|
||||||
|
];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,10 +8,11 @@
|
|||||||
dataDir = "/home/richie/Syncthing";
|
dataDir = "/home/richie/Syncthing";
|
||||||
configDir = "/home/richie/.config/syncthing";
|
configDir = "/home/richie/.config/syncthing";
|
||||||
settings.devices = {
|
settings.devices = {
|
||||||
phone.id = "TBRULKD-7DZPGGZ-F6LLB7J-MSO54AY-7KLPBIN-QOFK6PX-W2HBEWI-PHM2CQI"; # cspell:disable-line
|
|
||||||
jeeves.id = "ICRHXZW-ECYJCUZ-I4CZ64R-3XRK7CG-LL2HAAK-FGOHD22-BQA4AI6-5OAL6AG"; # cspell:disable-line
|
|
||||||
ipad.id = "KI76T3X-SFUGV2L-VSNYTKR-TSIUV5L-SHWD3HE-GQRGRCN-GY4UFMD-CW6Z6AX"; # cspell:disable-line
|
|
||||||
bob.id = "CJIAPEJ-VO74RR4-F75VU6M-QNZAMYG-FYUJG7Y-6AT62HJ-355PRPL-PJFETAZ"; # cspell:disable-line
|
bob.id = "CJIAPEJ-VO74RR4-F75VU6M-QNZAMYG-FYUJG7Y-6AT62HJ-355PRPL-PJFETAZ"; # cspell:disable-line
|
||||||
|
brain.id = "SSCGIPI-IV3VYKB-TRNIJE3-COV4T2H-CDBER7F-I2CGHYA-NWOEUDU-3T5QAAN"; # cspell:disable-line
|
||||||
|
ipad.id = "KI76T3X-SFUGV2L-VSNYTKR-TSIUV5L-SHWD3HE-GQRGRCN-GY4UFMD-CW6Z6AX"; # cspell:disable-line
|
||||||
|
jeeves.id = "ICRHXZW-ECYJCUZ-I4CZ64R-3XRK7CG-LL2HAAK-FGOHD22-BQA4AI6-5OAL6AG"; # cspell:disable-line
|
||||||
|
phone.id = "TBRULKD-7DZPGGZ-F6LLB7J-MSO54AY-7KLPBIN-QOFK6PX-W2HBEWI-PHM2CQI"; # cspell:disable-line
|
||||||
rhapsody-in-green.id = "ASL3KC4-3XEN6PA-7BQBRKE-A7JXLI6-DJT43BY-Q4WPOER-7UALUAZ-VTPQ6Q4"; # cspell:disable-line
|
rhapsody-in-green.id = "ASL3KC4-3XEN6PA-7BQBRKE-A7JXLI6-DJT43BY-Q4WPOER-7UALUAZ-VTPQ6Q4"; # cspell:disable-line
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -5,5 +5,7 @@
|
|||||||
randomizedDelaySec = "1h";
|
randomizedDelaySec = "1h";
|
||||||
persistent = true;
|
persistent = true;
|
||||||
flake = "github:RichieCahill/dotfiles";
|
flake = "github:RichieCahill/dotfiles";
|
||||||
|
allowReboot = true;
|
||||||
|
dates = "Sat *-*-* 06:00:00";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
4
docs/Gemfile
Normal file
4
docs/Gemfile
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
source "https://rubygems.org"
|
||||||
|
|
||||||
|
# The github-pages gem pins all compatible versions of Jekyll and its plugins
|
||||||
|
gem "github-pages", group: :jekyll_plugins
|
||||||
23
docs/_config.yml
Normal file
23
docs/_config.yml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
title: "Richie Cahill"
|
||||||
|
description: "ALL THE CHAOS THAT I CANT DO AT WORK"
|
||||||
|
baseurl: "/dotfiles"
|
||||||
|
url: "https://richiecahill.github.io"
|
||||||
|
|
||||||
|
remote_theme: pages-themes/hacker@v0.2.0
|
||||||
|
plugins:
|
||||||
|
- jekyll-feed
|
||||||
|
- jekyll-remote-theme
|
||||||
|
- jekyll-seo-tag
|
||||||
|
- jekyll-sitemap
|
||||||
|
- jekyll-paginate
|
||||||
|
|
||||||
|
paginate: 5
|
||||||
|
paginate_path: "/page:num"
|
||||||
|
|
||||||
|
author:
|
||||||
|
name: "Richie Cahill"
|
||||||
|
email: "richie@tmmworkshop.com"
|
||||||
|
|
||||||
|
social_links:
|
||||||
|
github: "RichieCahill"
|
||||||
|
website: "https://tmmworkshop.com"
|
||||||
13
docs/_posts/2025-10-31-MONOREPO.md
Normal file
13
docs/_posts/2025-10-31-MONOREPO.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# The MONOREPO experiment
|
||||||
|
|
||||||
|
Im testing a [MONOREPO](https://en.wikipedia.org/wiki/Monorepo) because Phil said this was a bad idea. To that i say hold my beer.
|
||||||
|
|
||||||
|
In all seriousness, I Think that for a small dev team/solo dev. The simplicity is worth higher barer to entry. One of my most annoying processes was updating my system tools. I had to build my update in a feature branch and then merge it into my main branch. then go to my dotfiles create a feature branch update the system tools merge it into main.
|
||||||
|
|
||||||
|
It will be starting with my Nix Dotfiles Python tools and now my blog.
|
||||||
|
|
||||||
|
I will be reaching ot to phil on 2030-10-31 and 2035-10-31 to give him updates on the progress.
|
||||||
|
|
||||||
|
Known Issues:
|
||||||
|
|
||||||
|
- the python tests are running on the current derivation not the one the derivation im updating to.
|
||||||
17
docs/index.md
Normal file
17
docs/index.md
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
layout: default
|
||||||
|
title: "Welcome"
|
||||||
|
---
|
||||||
|
|
||||||
|
Welcome to my build logs, notes, and experiments.
|
||||||
|
|
||||||
|
You can read my latest posts below
|
||||||
|
|
||||||
|
<ul>
|
||||||
|
{% for post in site.posts %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ post.url | relative_url }}">{{ post.title }}</a>
|
||||||
|
<small>— {{ post.date | date: "%Y-%m-%d" }}</small>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
3
esphome/.gitignore
vendored
Normal file
3
esphome/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# esphome
|
||||||
|
/.esphome/
|
||||||
|
/secrets.yaml
|
||||||
132
esphome/battery0.yml
Normal file
132
esphome/battery0.yml
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
esphome:
|
||||||
|
name: batteries
|
||||||
|
friendly_name: batteries
|
||||||
|
|
||||||
|
esp32:
|
||||||
|
board: esp32dev
|
||||||
|
framework:
|
||||||
|
type: arduino
|
||||||
|
|
||||||
|
logger:
|
||||||
|
|
||||||
|
api:
|
||||||
|
encryption:
|
||||||
|
key: !secret api_key
|
||||||
|
|
||||||
|
external_components:
|
||||||
|
- source: github://syssi/esphome-jk-bms@main
|
||||||
|
|
||||||
|
ota:
|
||||||
|
- platform: esphome
|
||||||
|
password: !secret ota_password
|
||||||
|
|
||||||
|
wifi:
|
||||||
|
ssid: !secret wifi_ssid
|
||||||
|
password: !secret wifi_password
|
||||||
|
fast_connect: on
|
||||||
|
|
||||||
|
captive_portal:
|
||||||
|
|
||||||
|
esp32_ble_tracker:
|
||||||
|
scan_parameters:
|
||||||
|
interval: 1100ms
|
||||||
|
window: 1100ms
|
||||||
|
active: true
|
||||||
|
|
||||||
|
ble_client:
|
||||||
|
- mac_address: "C8:47:80:29:0F:DB"
|
||||||
|
id: jk_ble0
|
||||||
|
|
||||||
|
jk_bms_ble:
|
||||||
|
- ble_client_id: jk_ble0
|
||||||
|
protocol_version: JK02_32S
|
||||||
|
throttle: 1s
|
||||||
|
id: jk_bms0
|
||||||
|
|
||||||
|
button:
|
||||||
|
- platform: jk_bms_ble
|
||||||
|
retrieve_settings:
|
||||||
|
name: "JK0 retrieve settings"
|
||||||
|
retrieve_device_info:
|
||||||
|
name: "JK0 retrieve device info"
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: jk_bms_ble
|
||||||
|
jk_bms_ble_id: jk_bms0
|
||||||
|
total_voltage:
|
||||||
|
name: "JK0 Total Voltage"
|
||||||
|
state_of_charge:
|
||||||
|
name: "JK0 SoC"
|
||||||
|
charging_power:
|
||||||
|
name: "JK0 charging power"
|
||||||
|
discharging_power:
|
||||||
|
name: "JK0 discharging power"
|
||||||
|
temperature_sensor_1:
|
||||||
|
name: "JK0 Temp 1"
|
||||||
|
temperature_sensor_2:
|
||||||
|
name: "JK0 Temp 2"
|
||||||
|
balancing:
|
||||||
|
name: "JK0 balancing"
|
||||||
|
total_runtime:
|
||||||
|
name: "JK0 total runtime"
|
||||||
|
balancing_current:
|
||||||
|
name: "JK0 balancing current"
|
||||||
|
delta_cell_voltage:
|
||||||
|
name: "JK0 cell delta voltage"
|
||||||
|
average_cell_voltage:
|
||||||
|
name: "JK0 cell average voltage"
|
||||||
|
cell_voltage_1:
|
||||||
|
name: "JK0 cell voltage 1"
|
||||||
|
cell_voltage_2:
|
||||||
|
name: "JK0 cell voltage 2"
|
||||||
|
cell_voltage_3:
|
||||||
|
name: "JK0 cell voltage 3"
|
||||||
|
cell_voltage_4:
|
||||||
|
name: "JK0 cell voltage 4"
|
||||||
|
cell_voltage_5:
|
||||||
|
name: "JK0 cell voltage 5"
|
||||||
|
cell_voltage_6:
|
||||||
|
name: "JK0 cell voltage 6"
|
||||||
|
cell_voltage_7:
|
||||||
|
name: "JK0 cell voltage 7"
|
||||||
|
cell_voltage_8:
|
||||||
|
name: "JK0 cell voltage 8"
|
||||||
|
cell_resistance_1:
|
||||||
|
name: "JK0 cell resistance 1"
|
||||||
|
cell_resistance_2:
|
||||||
|
name: "JK0 cell resistance 2"
|
||||||
|
cell_resistance_3:
|
||||||
|
name: "JK0 cell resistance 3"
|
||||||
|
cell_resistance_4:
|
||||||
|
name: "JK0 cell resistance 4"
|
||||||
|
cell_resistance_5:
|
||||||
|
name: "JK0 cell resistance 5"
|
||||||
|
cell_resistance_6:
|
||||||
|
name: "JK0 cell resistance 6"
|
||||||
|
cell_resistance_7:
|
||||||
|
name: "JK0 cell resistance 7"
|
||||||
|
cell_resistance_8:
|
||||||
|
name: "JK0 cell resistance 8"
|
||||||
|
total_charging_cycle_capacity:
|
||||||
|
name: "JK0 total charging cycle capacity"
|
||||||
|
|
||||||
|
text_sensor:
|
||||||
|
- platform: jk_bms_ble
|
||||||
|
jk_bms_ble_id: jk_bms0
|
||||||
|
errors:
|
||||||
|
name: "JK0 Errors"
|
||||||
|
|
||||||
|
switch:
|
||||||
|
- platform: jk_bms_ble
|
||||||
|
jk_bms_ble_id: jk_bms0
|
||||||
|
charging:
|
||||||
|
name: "JK0 Charging"
|
||||||
|
discharging:
|
||||||
|
name: "JK0 Discharging"
|
||||||
|
balancer:
|
||||||
|
name: "JK0 Balancing"
|
||||||
|
|
||||||
|
- platform: ble_client
|
||||||
|
ble_client_id: jk_ble0
|
||||||
|
name: "JK0 enable bluetooth connection"
|
||||||
|
id: ble_client_switch0
|
||||||
132
esphome/battery1.yml
Normal file
132
esphome/battery1.yml
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
esphome:
|
||||||
|
name: battery1
|
||||||
|
friendly_name: battery1
|
||||||
|
|
||||||
|
esp32:
|
||||||
|
board: esp32dev
|
||||||
|
framework:
|
||||||
|
type: arduino
|
||||||
|
|
||||||
|
logger:
|
||||||
|
|
||||||
|
api:
|
||||||
|
encryption:
|
||||||
|
key: !secret api_key
|
||||||
|
|
||||||
|
external_components:
|
||||||
|
- source: github://syssi/esphome-jk-bms@main
|
||||||
|
|
||||||
|
ota:
|
||||||
|
- platform: esphome
|
||||||
|
password: !secret ota_password
|
||||||
|
|
||||||
|
wifi:
|
||||||
|
ssid: !secret wifi_ssid
|
||||||
|
password: !secret wifi_password
|
||||||
|
fast_connect: on
|
||||||
|
|
||||||
|
captive_portal:
|
||||||
|
|
||||||
|
esp32_ble_tracker:
|
||||||
|
scan_parameters:
|
||||||
|
interval: 1100ms
|
||||||
|
window: 1100ms
|
||||||
|
active: true
|
||||||
|
|
||||||
|
ble_client:
|
||||||
|
- mac_address: "C8:47:80:37:9D:DD"
|
||||||
|
id: jk_ble1
|
||||||
|
|
||||||
|
jk_bms_ble:
|
||||||
|
- ble_client_id: jk_ble1
|
||||||
|
protocol_version: JK02_32S
|
||||||
|
throttle: 1s
|
||||||
|
id: jk_bms1
|
||||||
|
|
||||||
|
button:
|
||||||
|
- platform: jk_bms_ble
|
||||||
|
retrieve_settings:
|
||||||
|
name: "JK1 retrieve settings"
|
||||||
|
retrieve_device_info:
|
||||||
|
name: "JK1 retrieve device info"
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: jk_bms_ble
|
||||||
|
jk_bms_ble_id: jk_bms1
|
||||||
|
total_voltage:
|
||||||
|
name: "JK1 Total Voltage"
|
||||||
|
state_of_charge:
|
||||||
|
name: "JK1 SoC"
|
||||||
|
charging_power:
|
||||||
|
name: "JK1 charging power"
|
||||||
|
discharging_power:
|
||||||
|
name: "JK1 discharging power"
|
||||||
|
temperature_sensor_1:
|
||||||
|
name: "JK1 Temp 1"
|
||||||
|
temperature_sensor_2:
|
||||||
|
name: "JK1 Temp 2"
|
||||||
|
balancing:
|
||||||
|
name: "JK1 balancing"
|
||||||
|
total_runtime:
|
||||||
|
name: "JK1 total runtime"
|
||||||
|
balancing_current:
|
||||||
|
name: "JK1 balancing current"
|
||||||
|
delta_cell_voltage:
|
||||||
|
name: "JK1 cell delta voltage"
|
||||||
|
average_cell_voltage:
|
||||||
|
name: "JK1 cell average voltage"
|
||||||
|
cell_voltage_1:
|
||||||
|
name: "JK1 cell voltage 1"
|
||||||
|
cell_voltage_2:
|
||||||
|
name: "JK1 cell voltage 2"
|
||||||
|
cell_voltage_3:
|
||||||
|
name: "JK1 cell voltage 3"
|
||||||
|
cell_voltage_4:
|
||||||
|
name: "JK1 cell voltage 4"
|
||||||
|
cell_voltage_5:
|
||||||
|
name: "JK1 cell voltage 5"
|
||||||
|
cell_voltage_6:
|
||||||
|
name: "JK1 cell voltage 6"
|
||||||
|
cell_voltage_7:
|
||||||
|
name: "JK1 cell voltage 7"
|
||||||
|
cell_voltage_8:
|
||||||
|
name: "JK1 cell voltage 8"
|
||||||
|
cell_resistance_1:
|
||||||
|
name: "JK1 cell resistance 1"
|
||||||
|
cell_resistance_2:
|
||||||
|
name: "JK1 cell resistance 2"
|
||||||
|
cell_resistance_3:
|
||||||
|
name: "JK1 cell resistance 3"
|
||||||
|
cell_resistance_4:
|
||||||
|
name: "JK1 cell resistance 4"
|
||||||
|
cell_resistance_5:
|
||||||
|
name: "JK1 cell resistance 5"
|
||||||
|
cell_resistance_6:
|
||||||
|
name: "JK1 cell resistance 6"
|
||||||
|
cell_resistance_7:
|
||||||
|
name: "JK1 cell resistance 7"
|
||||||
|
cell_resistance_8:
|
||||||
|
name: "JK1 cell resistance 8"
|
||||||
|
total_charging_cycle_capacity:
|
||||||
|
name: "JK1 total charging cycle capacity"
|
||||||
|
|
||||||
|
text_sensor:
|
||||||
|
- platform: jk_bms_ble
|
||||||
|
jk_bms_ble_id: jk_bms1
|
||||||
|
errors:
|
||||||
|
name: "JK1 Errors"
|
||||||
|
|
||||||
|
switch:
|
||||||
|
- platform: jk_bms_ble
|
||||||
|
jk_bms_ble_id: jk_bms1
|
||||||
|
charging:
|
||||||
|
name: "JK1 Charging"
|
||||||
|
discharging:
|
||||||
|
name: "JK1 Discharging"
|
||||||
|
balancer:
|
||||||
|
name: "JK1 Balancing"
|
||||||
|
|
||||||
|
- platform: ble_client
|
||||||
|
ble_client_id: jk_ble1
|
||||||
|
name: "JK1 enable bluetooth connection"
|
||||||
|
id: ble_client_switch0
|
||||||
48
esphome/environment.yml
Normal file
48
esphome/environment.yml
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
esphome:
|
||||||
|
name: "environment"
|
||||||
|
friendly_name: "environment"
|
||||||
|
|
||||||
|
esp32:
|
||||||
|
board: esp32dev
|
||||||
|
framework:
|
||||||
|
type: arduino
|
||||||
|
|
||||||
|
i2c:
|
||||||
|
sda: GPIO21
|
||||||
|
scl: GPIO22
|
||||||
|
scan: True
|
||||||
|
id: bus_a
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: aht10
|
||||||
|
i2c_id: bus_a
|
||||||
|
address: 0x38
|
||||||
|
variant: AHT20
|
||||||
|
temperature:
|
||||||
|
name: "environment Temperature"
|
||||||
|
id: aht10_temperature
|
||||||
|
humidity:
|
||||||
|
name: "environment Humidity"
|
||||||
|
id: aht10_humidity
|
||||||
|
update_interval: 5s
|
||||||
|
|
||||||
|
web_server:
|
||||||
|
port: 80
|
||||||
|
|
||||||
|
logger:
|
||||||
|
level: DEBUG
|
||||||
|
|
||||||
|
api:
|
||||||
|
encryption:
|
||||||
|
key: !secret api_key
|
||||||
|
|
||||||
|
ota:
|
||||||
|
- platform: esphome
|
||||||
|
password: !secret ota_password
|
||||||
|
|
||||||
|
wifi:
|
||||||
|
ssid: !secret wifi_ssid
|
||||||
|
password: !secret wifi_password
|
||||||
|
fast_connect: on
|
||||||
|
|
||||||
|
captive_portal:
|
||||||
1
file_sizes.txt.new
Normal file
1
file_sizes.txt.new
Normal file
File diff suppressed because one or more lines are too long
135
flake.lock
generated
135
flake.lock
generated
@@ -8,11 +8,11 @@
|
|||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"dir": "pkgs/firefox-addons",
|
"dir": "pkgs/firefox-addons",
|
||||||
"lastModified": 1750219402,
|
"lastModified": 1763697825,
|
||||||
"narHash": "sha256-b3y7V7db0VwLGtpcLRmT1Aa9dpAKoHQdem55UhgB/fw=",
|
"narHash": "sha256-AgCCcVPOi1tuzuW5/StlwqBjRWSX62oL97qWuxrq5UA=",
|
||||||
"owner": "rycee",
|
"owner": "rycee",
|
||||||
"repo": "nur-expressions",
|
"repo": "nur-expressions",
|
||||||
"rev": "a00ce73b626ed274fbfe9f51627861e140b08f6d",
|
"rev": "cefce78793603231be226fa77e7ad58e0e4899b8",
|
||||||
"type": "gitlab"
|
"type": "gitlab"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -29,11 +29,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1750275572,
|
"lastModified": 1763748372,
|
||||||
"narHash": "sha256-upC/GIlsIgtdtWRGd1obzdXWYQptNkfzZeyAFWgsgf0=",
|
"narHash": "sha256-AUc78Qv3sWir0hvbmfXoZ7Jzq9VVL97l+sP9Jgms+JU=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "home-manager",
|
"repo": "home-manager",
|
||||||
"rev": "0f355844e54e4c70906b1ef5cc35a0047d666c04",
|
"rev": "d10a9b16b2a3ee28433f3d1c603f4e9f1fecb8e1",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -44,11 +44,11 @@
|
|||||||
},
|
},
|
||||||
"nixos-hardware": {
|
"nixos-hardware": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1750083401,
|
"lastModified": 1762847253,
|
||||||
"narHash": "sha256-ynqbgIYrg7P1fAKYqe8I/PMiLABBcNDYG9YaAP/d/C4=",
|
"narHash": "sha256-BWWnUUT01lPwCWUvS0p6Px5UOBFeXJ8jR+ZdLX8IbrU=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixos-hardware",
|
"repo": "nixos-hardware",
|
||||||
"rev": "61837d2a33ccc1582c5fabb7bf9130d39fee59ad",
|
"rev": "899dc449bc6428b9ee6b3b8f771ca2b0ef945ab9",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -60,11 +60,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1750134718,
|
"lastModified": 1763421233,
|
||||||
"narHash": "sha256-v263g4GbxXv87hMXMCpjkIxd/viIF7p3JpJrwgKdNiI=",
|
"narHash": "sha256-Stk9ZYRkGrnnpyJ4eqt9eQtdFWRRIvMxpNRf4sIegnw=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "9e83b64f727c88a7711a2c463a7b16eedb69a84c",
|
"rev": "89c2b2330e733d6cdb5eae7b899326930c2c0648",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -76,11 +76,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs-master": {
|
"nixpkgs-master": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1750291913,
|
"lastModified": 1763774007,
|
||||||
"narHash": "sha256-JW40+zIiDS+rZavb9IYdIN40/GmErO2+0+A66rM6/b8=",
|
"narHash": "sha256-PPeHfKA11P09kBkBD5pS3tIAFjnG5muHQnODQGTY87g=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "ba92ab5dc0759a8740003ca34b5c1b888f4766d4",
|
"rev": "8a7cf7e9e18384533d9ecd0bfbcf475ac1dc497e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -106,56 +106,6 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"pyproject-build-systems": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": [
|
|
||||||
"system_tools",
|
|
||||||
"nixpkgs"
|
|
||||||
],
|
|
||||||
"pyproject-nix": [
|
|
||||||
"system_tools",
|
|
||||||
"pyproject-nix"
|
|
||||||
],
|
|
||||||
"uv2nix": [
|
|
||||||
"system_tools",
|
|
||||||
"uv2nix"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1744599653,
|
|
||||||
"narHash": "sha256-nysSwVVjG4hKoOjhjvE6U5lIKA8sEr1d1QzEfZsannU=",
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "build-system-pkgs",
|
|
||||||
"rev": "7dba6dbc73120e15b558754c26024f6c93015dd7",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "build-system-pkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pyproject-nix": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": [
|
|
||||||
"system_tools",
|
|
||||||
"nixpkgs"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1746540146,
|
|
||||||
"narHash": "sha256-QxdHGNpbicIrw5t6U3x+ZxeY/7IEJ6lYbvsjXmcxFIM=",
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "pyproject.nix",
|
|
||||||
"rev": "e09c10c24ebb955125fda449939bfba664c467fd",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "pyproject.nix",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"firefox-addons": "firefox-addons",
|
"firefox-addons": "firefox-addons",
|
||||||
@@ -165,7 +115,6 @@
|
|||||||
"nixpkgs-master": "nixpkgs-master",
|
"nixpkgs-master": "nixpkgs-master",
|
||||||
"nixpkgs-stable": "nixpkgs-stable",
|
"nixpkgs-stable": "nixpkgs-stable",
|
||||||
"sops-nix": "sops-nix",
|
"sops-nix": "sops-nix",
|
||||||
"system_tools": "system_tools",
|
|
||||||
"systems": "systems"
|
"systems": "systems"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -176,11 +125,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1750119275,
|
"lastModified": 1763607916,
|
||||||
"narHash": "sha256-Rr7Pooz9zQbhdVxux16h7URa6mA80Pb/G07T4lHvh0M=",
|
"narHash": "sha256-VefBA1JWRXM929mBAFohFUtQJLUnEwZ2vmYUNkFnSjE=",
|
||||||
"owner": "Mic92",
|
"owner": "Mic92",
|
||||||
"repo": "sops-nix",
|
"repo": "sops-nix",
|
||||||
"rev": "77c423a03b9b2b79709ea2cb63336312e78b72e2",
|
"rev": "877bb495a6f8faf0d89fc10bd142c4b7ed2bcc0b",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -189,29 +138,6 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"system_tools": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": [
|
|
||||||
"nixpkgs"
|
|
||||||
],
|
|
||||||
"pyproject-build-systems": "pyproject-build-systems",
|
|
||||||
"pyproject-nix": "pyproject-nix",
|
|
||||||
"uv2nix": "uv2nix"
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1747501237,
|
|
||||||
"narHash": "sha256-woyaUwmZurfNTXBEFM6M7ueSd/Udixs+4DUInhL835c=",
|
|
||||||
"owner": "RichieCahill",
|
|
||||||
"repo": "system_tools",
|
|
||||||
"rev": "68ab5d1c17ac3fe2487f73dbbb4848bd2291139e",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "RichieCahill",
|
|
||||||
"repo": "system_tools",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"systems": {
|
"systems": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1689347949,
|
"lastModified": 1689347949,
|
||||||
@@ -226,31 +152,6 @@
|
|||||||
"repo": "default-linux",
|
"repo": "default-linux",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"uv2nix": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": [
|
|
||||||
"system_tools",
|
|
||||||
"nixpkgs"
|
|
||||||
],
|
|
||||||
"pyproject-nix": [
|
|
||||||
"system_tools",
|
|
||||||
"pyproject-nix"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1747441483,
|
|
||||||
"narHash": "sha256-W8BFXk5R0TuJcjIhcGoMpSOaIufGXpizK0pm+uTqynA=",
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "uv2nix",
|
|
||||||
"rev": "582024dc64663e9f88d467c2f7f7b20d278349de",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "uv2nix",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"root": "root",
|
"root": "root",
|
||||||
|
|||||||
18
flake.nix
18
flake.nix
@@ -31,11 +31,6 @@
|
|||||||
inputs.nixpkgs.follows = "nixpkgs";
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
};
|
};
|
||||||
|
|
||||||
system_tools = {
|
|
||||||
url = "github:RichieCahill/system_tools";
|
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
|
||||||
};
|
|
||||||
|
|
||||||
sops-nix = {
|
sops-nix = {
|
||||||
url = "github:Mic92/sops-nix";
|
url = "github:Mic92/sops-nix";
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
@@ -59,6 +54,7 @@
|
|||||||
system:
|
system:
|
||||||
import nixpkgs {
|
import nixpkgs {
|
||||||
inherit system;
|
inherit system;
|
||||||
|
overlays = builtins.attrValues outputs.overlays;
|
||||||
config.allowUnfree = true;
|
config.allowUnfree = true;
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -77,6 +73,12 @@
|
|||||||
];
|
];
|
||||||
specialArgs = { inherit inputs outputs; };
|
specialArgs = { inherit inputs outputs; };
|
||||||
};
|
};
|
||||||
|
brain = lib.nixosSystem {
|
||||||
|
modules = [
|
||||||
|
./systems/brain
|
||||||
|
];
|
||||||
|
specialArgs = { inherit inputs outputs; };
|
||||||
|
};
|
||||||
jeeves = lib.nixosSystem {
|
jeeves = lib.nixosSystem {
|
||||||
modules = [
|
modules = [
|
||||||
./systems/jeeves
|
./systems/jeeves
|
||||||
@@ -89,6 +91,12 @@
|
|||||||
];
|
];
|
||||||
specialArgs = { inherit inputs outputs; };
|
specialArgs = { inherit inputs outputs; };
|
||||||
};
|
};
|
||||||
|
leviathan = lib.nixosSystem {
|
||||||
|
modules = [
|
||||||
|
./systems/leviathan
|
||||||
|
];
|
||||||
|
specialArgs = { inherit inputs outputs; };
|
||||||
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,15 +3,37 @@
|
|||||||
# When applied, the stable nixpkgs set (declared in the flake inputs) will be accessible through 'pkgs.stable'
|
# When applied, the stable nixpkgs set (declared in the flake inputs) will be accessible through 'pkgs.stable'
|
||||||
stable = final: _prev: {
|
stable = final: _prev: {
|
||||||
stable = import inputs.nixpkgs-stable {
|
stable = import inputs.nixpkgs-stable {
|
||||||
system = final.system;
|
system = final.stdenv.hostPlatform.system;
|
||||||
config.allowUnfree = true;
|
config.allowUnfree = true;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
# When applied, the master nixpkgs set (declared in the flake inputs) will be accessible through 'pkgs.master'
|
# When applied, the master nixpkgs set (declared in the flake inputs) will be accessible through 'pkgs.master'
|
||||||
master = final: _prev: {
|
master = final: _prev: {
|
||||||
master = import inputs.nixpkgs-master {
|
master = import inputs.nixpkgs-master {
|
||||||
system = final.system;
|
system = final.stdenv.hostPlatform.system;
|
||||||
config.allowUnfree = true;
|
config.allowUnfree = true;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
python-env = final: _prev: {
|
||||||
|
my_python = final.python313.withPackages (
|
||||||
|
ps: with ps; [
|
||||||
|
apprise
|
||||||
|
apscheduler
|
||||||
|
mypy
|
||||||
|
polars
|
||||||
|
psycopg
|
||||||
|
pyfakefs
|
||||||
|
pytest
|
||||||
|
pytest-cov
|
||||||
|
pytest-mock
|
||||||
|
pytest-xdist
|
||||||
|
requests
|
||||||
|
ruff
|
||||||
|
sqlalchemy
|
||||||
|
typer
|
||||||
|
types-requests
|
||||||
|
]
|
||||||
|
);
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
73
pyproject.toml
Normal file
73
pyproject.toml
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
[project]
|
||||||
|
name = "system_tools"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = ""
|
||||||
|
authors = [{ name = "Richie Cahill", email = "richie@tmmworkshop.com" }]
|
||||||
|
requires-python = "~=3.13.0"
|
||||||
|
readme = "README.md"
|
||||||
|
license = "MIT"
|
||||||
|
# these dependencies are a best effort and aren't guaranteed to work
|
||||||
|
dependencies = ["apprise", "apscheduler", "polars", "requests", "typer"]
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
dev = [
|
||||||
|
"mypy",
|
||||||
|
"pyfakefs",
|
||||||
|
"pytest-cov",
|
||||||
|
"pytest-mock",
|
||||||
|
"pytest-xdist",
|
||||||
|
"pytest",
|
||||||
|
"ruff",
|
||||||
|
"types-requests",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
|
||||||
|
target-version = "py313"
|
||||||
|
|
||||||
|
line-length = 120
|
||||||
|
|
||||||
|
lint.select = ["ALL"]
|
||||||
|
lint.ignore = [
|
||||||
|
"G004", # (PERM) This is a performers nit
|
||||||
|
"COM812", # (TEMP) conflicts when used with the formatter
|
||||||
|
"ISC001", # (TEMP) conflicts when used with the formatter
|
||||||
|
"S603", # (PERM) This is known to cause a false positive
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
|
||||||
|
"tests/**" = [
|
||||||
|
"S101", # (perm) pytest needs asserts
|
||||||
|
]
|
||||||
|
"python/random/**" = [
|
||||||
|
"T201", # (perm) I don't care about print statements dir
|
||||||
|
]
|
||||||
|
"python/testing/**" = [
|
||||||
|
"T201", # (perm) I don't care about print statements dir
|
||||||
|
"ERA001", # (perm) I don't care about print statements dir
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.pydocstyle]
|
||||||
|
convention = "google"
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-builtins]
|
||||||
|
builtins-ignorelist = ["id"]
|
||||||
|
|
||||||
|
[tool.ruff.lint.pylint]
|
||||||
|
max-args = 9
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
source = ["python"]
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
exclude_lines = [
|
||||||
|
"pragma: no cover",
|
||||||
|
"if TYPE_CHECKING:",
|
||||||
|
"raise NotImplementedError",
|
||||||
|
"if __name__ == \"__main__\":",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "-n auto -ra"
|
||||||
|
# --cov=system_tools --cov-report=term-missing --cov-report=xml --cov-report=html --cov-branch
|
||||||
1
python/__init__.py
Normal file
1
python/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Server Tools."""
|
||||||
72
python/common.py
Normal file
72
python/common.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"""common."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
from os import getenv
|
||||||
|
from subprocess import PIPE, Popen
|
||||||
|
|
||||||
|
from apprise import Apprise
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_logger(level: str = "INFO") -> None:
|
||||||
|
"""Configure the logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
level (str, optional): The logging level. Defaults to "INFO".
|
||||||
|
"""
|
||||||
|
logging.basicConfig(
|
||||||
|
level=level,
|
||||||
|
datefmt="%Y-%m-%dT%H:%M:%S%z",
|
||||||
|
format="%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s",
|
||||||
|
handlers=[logging.StreamHandler(sys.stdout)],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def bash_wrapper(command: str) -> tuple[str, int]:
|
||||||
|
"""Execute a bash command and capture the output.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
command (str): The bash command to be executed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple[str, int]: A tuple containing the output of the command (stdout) as a string,
|
||||||
|
the error output (stderr) as a string (optional), and the return code as an integer.
|
||||||
|
"""
|
||||||
|
# This is a acceptable risk
|
||||||
|
process = Popen(command.split(), stdout=PIPE, stderr=PIPE)
|
||||||
|
output, error = process.communicate()
|
||||||
|
if error:
|
||||||
|
logger.error(f"{error=}")
|
||||||
|
return error.decode(), process.returncode
|
||||||
|
|
||||||
|
return output.decode(), process.returncode
|
||||||
|
|
||||||
|
|
||||||
|
def signal_alert(body: str, title: str = "") -> None:
|
||||||
|
"""Send a signal alert.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
body (str): The body of the alert.
|
||||||
|
title (str, optional): The title of the alert. Defaults to "".
|
||||||
|
"""
|
||||||
|
apprise_client = Apprise()
|
||||||
|
|
||||||
|
from_phone = getenv("SIGNAL_ALERT_FROM_PHONE")
|
||||||
|
to_phone = getenv("SIGNAL_ALERT_TO_PHONE")
|
||||||
|
if not from_phone or not to_phone:
|
||||||
|
logger.info("SIGNAL_ALERT_FROM_PHONE or SIGNAL_ALERT_TO_PHONE not set")
|
||||||
|
return
|
||||||
|
|
||||||
|
apprise_client.add(f"signal://localhost:8989/{from_phone}/{to_phone}")
|
||||||
|
|
||||||
|
apprise_client.notify(title=title, body=body)
|
||||||
|
|
||||||
|
|
||||||
|
def utcnow() -> datetime:
|
||||||
|
"""Get the current UTC time."""
|
||||||
|
return datetime.now(tz=UTC)
|
||||||
59
python/database.py
Normal file
59
python/database.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
"""database."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from sqlalchemy import inspect
|
||||||
|
from sqlalchemy.exc import NoInspectionAvailable
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def safe_insert(orm_objects: Sequence[object], session: Session) -> list[tuple[Exception, object]]:
|
||||||
|
"""Safer insert at allows for partial rollbacks.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
orm_objects (Sequence[object]): Tables to insert.
|
||||||
|
session (Session): Database session.
|
||||||
|
"""
|
||||||
|
if unmapped := [orm_object for orm_object in orm_objects if not _is_mapped_instance(orm_object)]:
|
||||||
|
error = f"safe_insert expects ORM-mapped instances {unmapped}"
|
||||||
|
raise TypeError(error)
|
||||||
|
return _safe_insert(orm_objects, session)
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_insert(objects: Sequence[object], session: Session) -> list[tuple[Exception, object]]:
|
||||||
|
exceptions: list[tuple[Exception, object]] = []
|
||||||
|
try:
|
||||||
|
session.add_all(objects)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
session.rollback()
|
||||||
|
|
||||||
|
objects_len = len(objects)
|
||||||
|
if objects_len == 1:
|
||||||
|
logger.exception(objects)
|
||||||
|
return [(error, objects[0])]
|
||||||
|
|
||||||
|
middle = objects_len // 2
|
||||||
|
exceptions.extend(_safe_insert(objects=objects[:middle], session=session))
|
||||||
|
exceptions.extend(_safe_insert(objects=objects[middle:], session=session))
|
||||||
|
return exceptions
|
||||||
|
|
||||||
|
|
||||||
|
def _is_mapped_instance(obj: object) -> bool:
|
||||||
|
"""Return True if `obj` is a SQLAlchemy ORM-mapped instance."""
|
||||||
|
try:
|
||||||
|
inspect(obj) # raises NoInspectionAvailable if not mapped
|
||||||
|
except NoInspectionAvailable:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
1
python/installer/__init__.py
Normal file
1
python/installer/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""installer."""
|
||||||
308
python/installer/__main__.py
Normal file
308
python/installer/__main__.py
Normal file
@@ -0,0 +1,308 @@
|
|||||||
|
"""Install NixOS on a ZFS pool."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import curses
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from os import getenv
|
||||||
|
from pathlib import Path
|
||||||
|
from random import getrandbits
|
||||||
|
from subprocess import PIPE, Popen, run
|
||||||
|
from time import sleep
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from python.common import configure_logger
|
||||||
|
from python.installer.tui import draw_menu
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def bash_wrapper(command: str) -> str:
|
||||||
|
"""Execute a bash command and capture the output.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
command (str): The bash command to be executed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple[str, int]: A tuple containing the output of the command (stdout) as a string,
|
||||||
|
the error output (stderr) as a string (optional), and the return code as an integer.
|
||||||
|
"""
|
||||||
|
logger.debug(f"running {command=}")
|
||||||
|
# This is a acceptable risk
|
||||||
|
process = Popen(command.split(), stdout=PIPE, stderr=PIPE)
|
||||||
|
output, _ = process.communicate()
|
||||||
|
if process.returncode != 0:
|
||||||
|
error = f"Failed to run command {command=} return code {process.returncode=}"
|
||||||
|
raise RuntimeError(error)
|
||||||
|
|
||||||
|
return output.decode()
|
||||||
|
|
||||||
|
|
||||||
|
def partition_disk(disk: str, swap_size: int, reserve: int = 0) -> None:
|
||||||
|
"""Partition a disk.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
disk (str): The disk to partition.
|
||||||
|
swap_size (int): The size of the swap partition in GB.
|
||||||
|
minimum value is 1.
|
||||||
|
reserve (int, optional): The size of the reserve partition in GB. Defaults to 0.
|
||||||
|
minimum value is 0.
|
||||||
|
"""
|
||||||
|
logger.info(f"partitioning {disk=}")
|
||||||
|
swap_size = max(swap_size, 1)
|
||||||
|
reserve = max(reserve, 0)
|
||||||
|
|
||||||
|
bash_wrapper(f"blkdiscard -f {disk}")
|
||||||
|
|
||||||
|
if reserve > 0:
|
||||||
|
msg = f"Creating swap partition on {disk=} with size {swap_size=}GiB and reserve {reserve=}GiB"
|
||||||
|
logger.info(msg)
|
||||||
|
|
||||||
|
swap_start = swap_size + reserve
|
||||||
|
swap_partition = f"mkpart swap -{swap_start}GiB -{reserve}GiB "
|
||||||
|
else:
|
||||||
|
logger.info(f"Creating swap partition on {disk=} with size {swap_size=}GiB")
|
||||||
|
swap_start = swap_size
|
||||||
|
swap_partition = f"mkpart swap -{swap_start}GiB 100% "
|
||||||
|
|
||||||
|
logger.debug(f"{swap_partition=}")
|
||||||
|
|
||||||
|
create_partitions = (
|
||||||
|
f"parted --script --align=optimal {disk} -- "
|
||||||
|
"mklabel gpt "
|
||||||
|
"mkpart EFI 1MiB 4GiB "
|
||||||
|
f"mkpart root_pool 4GiB -{swap_start}GiB "
|
||||||
|
f"{swap_partition}"
|
||||||
|
"set 1 esp on"
|
||||||
|
)
|
||||||
|
bash_wrapper(create_partitions)
|
||||||
|
|
||||||
|
logger.info(f"{disk=} successfully partitioned")
|
||||||
|
|
||||||
|
|
||||||
|
def create_zfs_pool(pool_disks: Sequence[str], mnt_dir: str) -> None:
|
||||||
|
"""Create a ZFS pool.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pool_disks (Sequence[str]): A tuple of disks to use for the pool.
|
||||||
|
mnt_dir (str): The mount directory.
|
||||||
|
"""
|
||||||
|
if len(pool_disks) <= 0:
|
||||||
|
error = "disks must be a tuple of at least length 1"
|
||||||
|
raise ValueError(error)
|
||||||
|
|
||||||
|
zpool_create = (
|
||||||
|
"zpool create "
|
||||||
|
"-o ashift=12 "
|
||||||
|
"-o autotrim=on "
|
||||||
|
f"-R {mnt_dir} "
|
||||||
|
"-O acltype=posixacl "
|
||||||
|
"-O canmount=off "
|
||||||
|
"-O dnodesize=auto "
|
||||||
|
"-O normalization=formD "
|
||||||
|
"-O relatime=on "
|
||||||
|
"-O xattr=sa "
|
||||||
|
"-O mountpoint=legacy "
|
||||||
|
"-O compression=zstd "
|
||||||
|
"-O atime=off "
|
||||||
|
"root_pool "
|
||||||
|
)
|
||||||
|
if len(pool_disks) == 1:
|
||||||
|
zpool_create += pool_disks[0]
|
||||||
|
else:
|
||||||
|
zpool_create += "mirror "
|
||||||
|
zpool_create += " ".join(pool_disks)
|
||||||
|
|
||||||
|
bash_wrapper(zpool_create)
|
||||||
|
zpools = bash_wrapper("zpool list -o name")
|
||||||
|
if "root_pool" not in zpools.splitlines():
|
||||||
|
logger.critical("Failed to create root_pool")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def create_zfs_datasets() -> None:
|
||||||
|
"""Create ZFS datasets."""
|
||||||
|
bash_wrapper("zfs create -o canmount=noauto -o reservation=10G root_pool/root")
|
||||||
|
bash_wrapper("zfs create root_pool/home")
|
||||||
|
bash_wrapper("zfs create root_pool/var -o reservation=1G")
|
||||||
|
bash_wrapper("zfs create -o compression=zstd-9 -o reservation=10G root_pool/nix")
|
||||||
|
datasets = bash_wrapper("zfs list -o name")
|
||||||
|
|
||||||
|
expected_datasets = {
|
||||||
|
"root_pool/root",
|
||||||
|
"root_pool/home",
|
||||||
|
"root_pool/var",
|
||||||
|
"root_pool/nix",
|
||||||
|
}
|
||||||
|
missing_datasets = expected_datasets.difference(datasets.splitlines())
|
||||||
|
if missing_datasets:
|
||||||
|
logger.critical(f"Failed to create pools {missing_datasets}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_cpu_manufacturer() -> str:
|
||||||
|
"""Get the CPU manufacturer."""
|
||||||
|
output = bash_wrapper("cat /proc/cpuinfo")
|
||||||
|
|
||||||
|
id_vendor = {"AuthenticAMD": "amd", "GenuineIntel": "intel"}
|
||||||
|
|
||||||
|
for line in output.splitlines():
|
||||||
|
if "vendor_id" in line:
|
||||||
|
return id_vendor[line.split(": ")[1].strip()]
|
||||||
|
|
||||||
|
error = "Failed to get CPU manufacturer"
|
||||||
|
raise RuntimeError(error)
|
||||||
|
|
||||||
|
|
||||||
|
def get_boot_drive_id(disk: str) -> str:
|
||||||
|
"""Get the boot drive ID."""
|
||||||
|
output = bash_wrapper(f"lsblk -o UUID {disk}-part1")
|
||||||
|
return output.splitlines()[1]
|
||||||
|
|
||||||
|
|
||||||
|
def create_nix_hardware_file(mnt_dir: str, disks: Sequence[str], encrypt: str | None) -> None:
|
||||||
|
"""Create a NixOS hardware file."""
|
||||||
|
cpu_manufacturer = get_cpu_manufacturer()
|
||||||
|
|
||||||
|
devices = ""
|
||||||
|
if encrypt:
|
||||||
|
disk = disks[0]
|
||||||
|
|
||||||
|
devices = (
|
||||||
|
f' luks.devices."luks-root-pool-{disk.split("/")[-1]}-part2"'
|
||||||
|
"= {\n"
|
||||||
|
f' device = "{disk}-part2";\n'
|
||||||
|
" bypassWorkqueues = true;\n"
|
||||||
|
" allowDiscards = true;\n"
|
||||||
|
" };\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
host_id = format(getrandbits(32), "08x")
|
||||||
|
|
||||||
|
nix_hardware = (
|
||||||
|
"{ config, lib, modulesPath, ... }:\n"
|
||||||
|
"{\n"
|
||||||
|
' imports = [ (modulesPath + "/installer/scan/not-detected.nix") ];\n\n'
|
||||||
|
" boot = {\n"
|
||||||
|
" initrd = {\n"
|
||||||
|
' availableKernelModules = [ \n "ahci"\n "ehci_pci"\n "nvme"\n "sd_mod"\n'
|
||||||
|
' "usb_storage"\n "usbhid"\n "xhci_pci"\n ];\n'
|
||||||
|
" kernelModules = [ ];\n"
|
||||||
|
f" {devices}"
|
||||||
|
" };\n"
|
||||||
|
f' kernelModules = [ "kvm-{cpu_manufacturer}" ];\n'
|
||||||
|
" extraModulePackages = [ ];\n"
|
||||||
|
" };\n\n"
|
||||||
|
" fileSystems = {\n"
|
||||||
|
' "/" = lib.mkDefault {\n device = "root_pool/root";\n fsType = "zfs";\n };\n\n'
|
||||||
|
' "/home" = {\n device = "root_pool/home";\n fsType = "zfs";\n };\n\n'
|
||||||
|
' "/var" = {\n device = "root_pool/var";\n fsType = "zfs";\n };\n\n'
|
||||||
|
' "/nix" = {\n device = "root_pool/nix";\n fsType = "zfs";\n };\n\n'
|
||||||
|
' "/boot" = {\n'
|
||||||
|
f' device = "/dev/disk/by-uuid/{get_boot_drive_id(disks[0])}";\n'
|
||||||
|
' fsType = "vfat";\n options = [\n "fmask=0077"\n'
|
||||||
|
' "dmask=0077"\n ];\n };\n };\n\n'
|
||||||
|
" swapDevices = [ ];\n\n"
|
||||||
|
" networking.useDHCP = lib.mkDefault true;\n\n"
|
||||||
|
' nixpkgs.hostPlatform = lib.mkDefault "x86_64-linux";\n'
|
||||||
|
f" hardware.cpu.{cpu_manufacturer}.updateMicrocode = "
|
||||||
|
"lib.mkDefault config.hardware.enableRedistributableFirmware;\n"
|
||||||
|
f' networking.hostId = "{host_id}";\n'
|
||||||
|
"}\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
Path(f"{mnt_dir}/etc/nixos/hardware-configuration.nix").write_text(nix_hardware)
|
||||||
|
|
||||||
|
|
||||||
|
def install_nixos(mnt_dir: str, disks: Sequence[str], encrypt: str | None) -> None:
|
||||||
|
"""Install NixOS."""
|
||||||
|
bash_wrapper(f"mount -o X-mount.mkdir -t zfs root_pool/root {mnt_dir}")
|
||||||
|
bash_wrapper(f"mount -o X-mount.mkdir -t zfs root_pool/home {mnt_dir}/home")
|
||||||
|
bash_wrapper(f"mount -o X-mount.mkdir -t zfs root_pool/var {mnt_dir}/var")
|
||||||
|
bash_wrapper(f"mount -o X-mount.mkdir -t zfs root_pool/nix {mnt_dir}/nix")
|
||||||
|
|
||||||
|
for disk in disks:
|
||||||
|
bash_wrapper(f"mkfs.vfat -n EFI {disk}-part1")
|
||||||
|
|
||||||
|
# set up mirroring afterwards if more than one disk
|
||||||
|
boot_partition = (
|
||||||
|
f"mount -t vfat -o fmask=0077,dmask=0077,iocharset=iso8859-1,X-mount.mkdir {disks[0]}-part1 {mnt_dir}/boot"
|
||||||
|
)
|
||||||
|
bash_wrapper(boot_partition)
|
||||||
|
|
||||||
|
bash_wrapper(f"nixos-generate-config --root {mnt_dir}")
|
||||||
|
|
||||||
|
create_nix_hardware_file(mnt_dir, disks, encrypt)
|
||||||
|
|
||||||
|
run(("nixos-install", "--root", mnt_dir), check=True)
|
||||||
|
|
||||||
|
|
||||||
|
def installer(
|
||||||
|
disks: Sequence[str],
|
||||||
|
swap_size: int,
|
||||||
|
reserve: int,
|
||||||
|
encrypt_key: str | None,
|
||||||
|
) -> None:
|
||||||
|
"""Main."""
|
||||||
|
logger.info("Starting installation")
|
||||||
|
|
||||||
|
for disk in disks:
|
||||||
|
partition_disk(disk, swap_size, reserve)
|
||||||
|
|
||||||
|
test = Popen(("printf", f"'{encrypt_key}'"), stdout=PIPE)
|
||||||
|
if encrypt_key:
|
||||||
|
sleep(1)
|
||||||
|
for command in (
|
||||||
|
f"cryptsetup luksFormat --type luks2 {disk}-part2 -",
|
||||||
|
f"cryptsetup luksOpen {disk}-part2 luks-root-pool-{disk.split('/')[-1]}-part2 -",
|
||||||
|
):
|
||||||
|
run(command, check=True, stdin=test.stdout)
|
||||||
|
|
||||||
|
mnt_dir = "/tmp/nix_install" # noqa: S108
|
||||||
|
|
||||||
|
Path(mnt_dir).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
if encrypt_key:
|
||||||
|
pool_disks = [f"/dev/mapper/luks-root-pool-{disk.split('/')[-1]}-part2" for disk in disks]
|
||||||
|
else:
|
||||||
|
pool_disks = [f"{disk}-part2" for disk in disks]
|
||||||
|
|
||||||
|
create_zfs_pool(pool_disks, mnt_dir)
|
||||||
|
|
||||||
|
create_zfs_datasets()
|
||||||
|
|
||||||
|
install_nixos(mnt_dir, disks, encrypt_key)
|
||||||
|
|
||||||
|
logger.info("Installation complete")
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main."""
|
||||||
|
configure_logger("DEBUG")
|
||||||
|
|
||||||
|
state = curses.wrapper(draw_menu)
|
||||||
|
|
||||||
|
encrypt_key = getenv("ENCRYPT_KEY")
|
||||||
|
|
||||||
|
logger.info("installing_nixos")
|
||||||
|
logger.info(f"disks: {state.selected_device_ids}")
|
||||||
|
logger.info(f"swap_size: {state.swap_size}")
|
||||||
|
logger.info(f"reserve: {state.reserve_size}")
|
||||||
|
logger.info(f"encrypted: {bool(encrypt_key)}")
|
||||||
|
|
||||||
|
sleep(3)
|
||||||
|
|
||||||
|
installer(
|
||||||
|
disks=state.get_selected_devices(),
|
||||||
|
swap_size=state.swap_size,
|
||||||
|
reserve=state.reserve_size,
|
||||||
|
encrypt_key=encrypt_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
498
python/installer/tui.py
Normal file
498
python/installer/tui.py
Normal file
@@ -0,0 +1,498 @@
|
|||||||
|
"""TUI module."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import curses
|
||||||
|
import logging
|
||||||
|
from collections import defaultdict
|
||||||
|
from subprocess import PIPE, Popen
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def bash_wrapper(command: str) -> str:
|
||||||
|
"""Execute a bash command and capture the output.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
command (str): The bash command to be executed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple[str, int]: A tuple containing the output of the command (stdout) as a string,
|
||||||
|
the error output (stderr) as a string (optional), and the return code as an integer.
|
||||||
|
"""
|
||||||
|
logger.debug(f"running {command=}")
|
||||||
|
# This is a acceptable risk
|
||||||
|
process = Popen(command.split(), stdout=PIPE, stderr=PIPE)
|
||||||
|
output, _ = process.communicate()
|
||||||
|
if process.returncode != 0:
|
||||||
|
error = f"Failed to run command {command=} return code {process.returncode=}"
|
||||||
|
raise RuntimeError(error)
|
||||||
|
|
||||||
|
return output.decode()
|
||||||
|
|
||||||
|
|
||||||
|
class Cursor:
|
||||||
|
"""Cursor class."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the Cursor class."""
|
||||||
|
self.x_position = 0
|
||||||
|
self.y_position = 0
|
||||||
|
self.height = 0
|
||||||
|
self.width = 0
|
||||||
|
|
||||||
|
def set_height(self, height: int) -> None:
|
||||||
|
"""Set height."""
|
||||||
|
self.height = height
|
||||||
|
|
||||||
|
def set_width(self, width: int) -> None:
|
||||||
|
"""Set width."""
|
||||||
|
self.width = width
|
||||||
|
|
||||||
|
def x_bounce_check(self, cursor: int) -> int:
|
||||||
|
"""X bounce check."""
|
||||||
|
cursor = max(0, cursor)
|
||||||
|
return min(self.width - 1, cursor)
|
||||||
|
|
||||||
|
def y_bounce_check(self, cursor: int) -> int:
|
||||||
|
"""Y bounce check."""
|
||||||
|
cursor = max(0, cursor)
|
||||||
|
return min(self.height - 1, cursor)
|
||||||
|
|
||||||
|
def set_x(self, x: int) -> None:
|
||||||
|
"""Set x."""
|
||||||
|
self.x_position = self.x_bounce_check(x)
|
||||||
|
|
||||||
|
def set_y(self, y: int) -> None:
|
||||||
|
"""Set y."""
|
||||||
|
self.y_position = self.y_bounce_check(y)
|
||||||
|
|
||||||
|
def get_x(self) -> int:
|
||||||
|
"""Get x."""
|
||||||
|
return self.x_position
|
||||||
|
|
||||||
|
def get_y(self) -> int:
|
||||||
|
"""Get y."""
|
||||||
|
return self.y_position
|
||||||
|
|
||||||
|
def move_up(self) -> None:
|
||||||
|
"""Move up."""
|
||||||
|
self.set_y(self.y_position - 1)
|
||||||
|
|
||||||
|
def move_down(self) -> None:
|
||||||
|
"""Move down."""
|
||||||
|
self.set_y(self.y_position + 1)
|
||||||
|
|
||||||
|
def move_left(self) -> None:
|
||||||
|
"""Move left."""
|
||||||
|
self.set_x(self.x_position - 1)
|
||||||
|
|
||||||
|
def move_right(self) -> None:
|
||||||
|
"""Move right."""
|
||||||
|
self.set_x(self.x_position + 1)
|
||||||
|
|
||||||
|
def navigation(self, key: int) -> None:
|
||||||
|
"""Navigation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (int): The key.
|
||||||
|
"""
|
||||||
|
action = {
|
||||||
|
curses.KEY_DOWN: self.move_down,
|
||||||
|
curses.KEY_UP: self.move_up,
|
||||||
|
curses.KEY_RIGHT: self.move_right,
|
||||||
|
curses.KEY_LEFT: self.move_left,
|
||||||
|
}
|
||||||
|
|
||||||
|
action.get(key, lambda: None)()
|
||||||
|
|
||||||
|
|
||||||
|
class State:
|
||||||
|
"""State class to store the state of the program."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the State class."""
|
||||||
|
self.key = 0
|
||||||
|
self.cursor = Cursor()
|
||||||
|
|
||||||
|
self.swap_size = 0
|
||||||
|
self.show_swap_input = False
|
||||||
|
|
||||||
|
self.reserve_size = 0
|
||||||
|
self.show_reserve_input = False
|
||||||
|
|
||||||
|
self.selected_device_ids: set[str] = set()
|
||||||
|
|
||||||
|
def get_selected_devices(self) -> tuple[str, ...]:
|
||||||
|
"""Get selected devices."""
|
||||||
|
return tuple(self.selected_device_ids)
|
||||||
|
|
||||||
|
|
||||||
|
def get_device(raw_device: str) -> dict[str, str]:
|
||||||
|
"""Get a device.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
raw_device (str): The raw device.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, str]: The device.
|
||||||
|
"""
|
||||||
|
raw_device_components = raw_device.split(" ")
|
||||||
|
return {thing.split("=")[0].lower(): thing.split("=")[1].strip('"') for thing in raw_device_components}
|
||||||
|
|
||||||
|
|
||||||
|
def get_devices() -> list[dict[str, str]]:
|
||||||
|
"""Get a list of devices."""
|
||||||
|
# --bytes
|
||||||
|
raw_devices = bash_wrapper("lsblk --paths --pairs").splitlines()
|
||||||
|
return [get_device(raw_device) for raw_device in raw_devices]
|
||||||
|
|
||||||
|
|
||||||
|
def set_color() -> None:
|
||||||
|
"""Set the color."""
|
||||||
|
curses.start_color()
|
||||||
|
curses.use_default_colors()
|
||||||
|
for i in range(curses.COLORS):
|
||||||
|
curses.init_pair(i + 1, i, -1)
|
||||||
|
|
||||||
|
|
||||||
|
def debug_menu(std_screen: curses.window, key: int) -> None:
|
||||||
|
"""Debug menu.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
std_screen (curses.window): The curses window.
|
||||||
|
key (int): The key.
|
||||||
|
"""
|
||||||
|
height, width = std_screen.getmaxyx()
|
||||||
|
std_screen.addstr(height - 4, 0, f"Width: {width}, Height: {height}", curses.color_pair(5))
|
||||||
|
|
||||||
|
key_pressed = f"Last key pressed: {key}"[: width - 1]
|
||||||
|
if key == 0:
|
||||||
|
key_pressed = "No key press detected..."[: width - 1]
|
||||||
|
std_screen.addstr(height - 3, 0, key_pressed)
|
||||||
|
|
||||||
|
for i in range(8):
|
||||||
|
std_screen.addstr(height - 2, i * 3, f"{i}██", curses.color_pair(i))
|
||||||
|
|
||||||
|
|
||||||
|
def get_text_input(std_screen: curses.window, prompt: str, y: int, x: int) -> str:
|
||||||
|
"""Get text input.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
std_screen (curses.window): The curses window.
|
||||||
|
prompt (str): The prompt.
|
||||||
|
y (int): The y position.
|
||||||
|
x (int): The x position.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The input string.
|
||||||
|
"""
|
||||||
|
esc_key = 27
|
||||||
|
curses.echo()
|
||||||
|
std_screen.addstr(y, x, prompt)
|
||||||
|
input_str = ""
|
||||||
|
while True:
|
||||||
|
key = std_screen.getch()
|
||||||
|
if key == ord("\n"):
|
||||||
|
break
|
||||||
|
if key == esc_key:
|
||||||
|
input_str = ""
|
||||||
|
break
|
||||||
|
if key in (curses.KEY_BACKSPACE, ord("\b"), 127):
|
||||||
|
input_str = input_str[:-1]
|
||||||
|
std_screen.addstr(y, x + len(prompt), input_str + " ")
|
||||||
|
else:
|
||||||
|
input_str += chr(key)
|
||||||
|
std_screen.refresh()
|
||||||
|
curses.noecho()
|
||||||
|
return input_str
|
||||||
|
|
||||||
|
|
||||||
|
def swap_size_input(
|
||||||
|
std_screen: curses.window,
|
||||||
|
state: State,
|
||||||
|
swap_offset: int,
|
||||||
|
) -> State:
|
||||||
|
"""Reserve size input.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
std_screen (curses.window): The curses window.
|
||||||
|
state (State): The state object.
|
||||||
|
swap_offset (int): The swap offset.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
State: The updated state object.
|
||||||
|
"""
|
||||||
|
swap_size_text = "Swap size (GB): "
|
||||||
|
std_screen.addstr(swap_offset, 0, f"{swap_size_text}{state.swap_size}")
|
||||||
|
if state.key == ord("\n") and state.cursor.get_y() == swap_offset:
|
||||||
|
state.show_swap_input = True
|
||||||
|
|
||||||
|
if state.show_swap_input:
|
||||||
|
swap_size_str = get_text_input(std_screen, swap_size_text, swap_offset, 0)
|
||||||
|
try:
|
||||||
|
state.swap_size = int(swap_size_str)
|
||||||
|
state.show_swap_input = False
|
||||||
|
except ValueError:
|
||||||
|
std_screen.addstr(swap_offset, 0, "Invalid input. Press any key to continue.")
|
||||||
|
std_screen.getch()
|
||||||
|
state.show_swap_input = False
|
||||||
|
|
||||||
|
return state
|
||||||
|
|
||||||
|
|
||||||
|
def reserve_size_input(
|
||||||
|
std_screen: curses.window,
|
||||||
|
state: State,
|
||||||
|
reserve_offset: int,
|
||||||
|
) -> State:
|
||||||
|
"""Reserve size input.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
std_screen (curses.window): The curses window.
|
||||||
|
state (State): The state object.
|
||||||
|
reserve_offset (int): The reserve offset.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
State: The updated state object.
|
||||||
|
"""
|
||||||
|
reserve_size_text = "reserve size (GB): "
|
||||||
|
std_screen.addstr(reserve_offset, 0, f"{reserve_size_text}{state.reserve_size}")
|
||||||
|
if state.key == ord("\n") and state.cursor.get_y() == reserve_offset:
|
||||||
|
state.show_reserve_input = True
|
||||||
|
|
||||||
|
if state.show_reserve_input:
|
||||||
|
reserve_size_str = get_text_input(std_screen, reserve_size_text, reserve_offset, 0)
|
||||||
|
try:
|
||||||
|
state.reserve_size = int(reserve_size_str)
|
||||||
|
state.show_reserve_input = False
|
||||||
|
except ValueError:
|
||||||
|
std_screen.addstr(reserve_offset, 0, "Invalid input. Press any key to continue.")
|
||||||
|
std_screen.getch()
|
||||||
|
state.show_reserve_input = False
|
||||||
|
|
||||||
|
return state
|
||||||
|
|
||||||
|
|
||||||
|
def status_bar(
|
||||||
|
std_screen: curses.window,
|
||||||
|
cursor: Cursor,
|
||||||
|
width: int,
|
||||||
|
height: int,
|
||||||
|
) -> None:
|
||||||
|
"""Draw the status bar.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
std_screen (curses.window): The curses window.
|
||||||
|
cursor (Cursor): The cursor.
|
||||||
|
width (int): The width.
|
||||||
|
height (int): The height.
|
||||||
|
"""
|
||||||
|
std_screen.attron(curses.A_REVERSE)
|
||||||
|
std_screen.attron(curses.color_pair(3))
|
||||||
|
|
||||||
|
status_bar = f"Press 'q' to exit | STATUS BAR | Pos: {cursor.get_x()}, {cursor.get_y()}"
|
||||||
|
std_screen.addstr(height - 1, 0, status_bar)
|
||||||
|
std_screen.addstr(height - 1, len(status_bar), " " * (width - len(status_bar) - 1))
|
||||||
|
|
||||||
|
std_screen.attroff(curses.color_pair(3))
|
||||||
|
std_screen.attroff(curses.A_REVERSE)
|
||||||
|
|
||||||
|
|
||||||
|
def get_device_id_mapping() -> dict[str, set[str]]:
|
||||||
|
"""Get a list of device ids.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: the list of device ids
|
||||||
|
"""
|
||||||
|
device_ids = bash_wrapper("find /dev/disk/by-id -type l").splitlines()
|
||||||
|
|
||||||
|
device_id_mapping: dict[str, set[str]] = defaultdict(set)
|
||||||
|
|
||||||
|
for device_id in device_ids:
|
||||||
|
device = bash_wrapper(f"readlink -f {device_id}").strip()
|
||||||
|
device_id_mapping[device].add(device_id)
|
||||||
|
|
||||||
|
return device_id_mapping
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_device_menu_padding(devices: list[dict[str, str]], column: str, padding: int = 0) -> int:
|
||||||
|
"""Calculate the device menu padding.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
devices (list[dict[str, str]]): The devices.
|
||||||
|
column (str): The column.
|
||||||
|
padding (int, optional): The padding. Defaults to 0.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: The calculated padding.
|
||||||
|
"""
|
||||||
|
return max(len(device[column]) for device in devices) + padding
|
||||||
|
|
||||||
|
|
||||||
|
def draw_device_ids(
|
||||||
|
state: State,
|
||||||
|
row_number: int,
|
||||||
|
menu_start_x: int,
|
||||||
|
std_screen: curses.window,
|
||||||
|
menu_width: list[int],
|
||||||
|
device_ids: set[str],
|
||||||
|
) -> tuple[State, int]:
|
||||||
|
"""Draw device IDs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
state (State): The state object.
|
||||||
|
row_number (int): The row number.
|
||||||
|
menu_start_x (int): The menu start x.
|
||||||
|
std_screen (curses.window): The curses window.
|
||||||
|
menu_width (list[int]): The menu width.
|
||||||
|
device_ids (set[str]): The device IDs.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[State, int]: The updated state object and the row number.
|
||||||
|
"""
|
||||||
|
for device_id in sorted(device_ids):
|
||||||
|
row_number = row_number + 1
|
||||||
|
if row_number == state.cursor.get_y() and state.cursor.get_x() in menu_width:
|
||||||
|
std_screen.attron(curses.A_BOLD)
|
||||||
|
if state.key == ord(" "):
|
||||||
|
if device_id not in state.selected_device_ids:
|
||||||
|
state.selected_device_ids.add(device_id)
|
||||||
|
else:
|
||||||
|
state.selected_device_ids.remove(device_id)
|
||||||
|
|
||||||
|
if device_id in state.selected_device_ids:
|
||||||
|
std_screen.attron(curses.color_pair(7))
|
||||||
|
|
||||||
|
std_screen.addstr(row_number, menu_start_x, f" {device_id}")
|
||||||
|
|
||||||
|
std_screen.attroff(curses.color_pair(7))
|
||||||
|
std_screen.attroff(curses.A_BOLD)
|
||||||
|
|
||||||
|
return state, row_number
|
||||||
|
|
||||||
|
|
||||||
|
def draw_device_menu(
|
||||||
|
std_screen: curses.window,
|
||||||
|
devices: list[dict[str, str]],
|
||||||
|
device_id_mapping: dict[str, set[str]],
|
||||||
|
state: State,
|
||||||
|
menu_start_y: int = 0,
|
||||||
|
menu_start_x: int = 0,
|
||||||
|
) -> tuple[State, int]:
|
||||||
|
"""Draw the device menu and handle user input.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
std_screen (curses.window): the curses window to draw on
|
||||||
|
devices (list[dict[str, str]]): the list of devices to draw
|
||||||
|
device_id_mapping (dict[str, set[str]]): the list of device ids to draw
|
||||||
|
state (State): the state object to update
|
||||||
|
menu_start_y (int, optional): the y position to start drawing the menu. Defaults to 0.
|
||||||
|
menu_start_x (int, optional): the x position to start drawing the menu. Defaults to 0.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
State: the updated state object
|
||||||
|
"""
|
||||||
|
padding = 2
|
||||||
|
|
||||||
|
name_padding = calculate_device_menu_padding(devices, "name", padding)
|
||||||
|
size_padding = calculate_device_menu_padding(devices, "size", padding)
|
||||||
|
type_padding = calculate_device_menu_padding(devices, "type", padding)
|
||||||
|
mountpoints_padding = calculate_device_menu_padding(devices, "mountpoints", padding)
|
||||||
|
|
||||||
|
device_header = (
|
||||||
|
f"{'Name':{name_padding}}{'Size':{size_padding}}{'Type':{type_padding}}{'Mountpoints':{mountpoints_padding}}"
|
||||||
|
)
|
||||||
|
|
||||||
|
menu_width = list(range(menu_start_x, len(device_header) + menu_start_x))
|
||||||
|
|
||||||
|
std_screen.addstr(menu_start_y, menu_start_x, device_header, curses.color_pair(5))
|
||||||
|
devises_list_start = menu_start_y + 1
|
||||||
|
|
||||||
|
row_number = devises_list_start
|
||||||
|
|
||||||
|
for device in devices:
|
||||||
|
row_number = row_number + 1
|
||||||
|
device_name = device["name"]
|
||||||
|
device_row = (
|
||||||
|
f"{device_name:{name_padding}}"
|
||||||
|
f"{device['size']:{size_padding}}"
|
||||||
|
f"{device['type']:{type_padding}}"
|
||||||
|
f"{device['mountpoints']:{mountpoints_padding}}"
|
||||||
|
)
|
||||||
|
std_screen.addstr(row_number, menu_start_x, device_row)
|
||||||
|
|
||||||
|
state, row_number = draw_device_ids(
|
||||||
|
state=state,
|
||||||
|
row_number=row_number,
|
||||||
|
menu_start_x=menu_start_x,
|
||||||
|
std_screen=std_screen,
|
||||||
|
menu_width=menu_width,
|
||||||
|
device_ids=device_id_mapping[device_name],
|
||||||
|
)
|
||||||
|
|
||||||
|
return state, row_number
|
||||||
|
|
||||||
|
|
||||||
|
def draw_menu(std_screen: curses.window) -> State:
|
||||||
|
"""Draw the menu and handle user input.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
std_screen (curses.window): the curses window to draw on
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
State: the state object
|
||||||
|
"""
|
||||||
|
# Clear and refresh the screen for a blank canvas
|
||||||
|
std_screen.clear()
|
||||||
|
std_screen.refresh()
|
||||||
|
|
||||||
|
set_color()
|
||||||
|
|
||||||
|
state = State()
|
||||||
|
|
||||||
|
devices = get_devices()
|
||||||
|
|
||||||
|
device_id_mapping = get_device_id_mapping()
|
||||||
|
|
||||||
|
# Loop where k is the last character pressed
|
||||||
|
while state.key != ord("q"):
|
||||||
|
std_screen.clear()
|
||||||
|
height, width = std_screen.getmaxyx()
|
||||||
|
|
||||||
|
state.cursor.set_height(height)
|
||||||
|
state.cursor.set_width(width)
|
||||||
|
|
||||||
|
state.cursor.navigation(state.key)
|
||||||
|
|
||||||
|
state, device_menu_size = draw_device_menu(
|
||||||
|
std_screen=std_screen,
|
||||||
|
state=state,
|
||||||
|
devices=devices,
|
||||||
|
device_id_mapping=device_id_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
swap_offset = device_menu_size + 2
|
||||||
|
|
||||||
|
swap_size_input(
|
||||||
|
std_screen=std_screen,
|
||||||
|
state=state,
|
||||||
|
swap_offset=swap_offset,
|
||||||
|
)
|
||||||
|
reserve_size_input(
|
||||||
|
std_screen=std_screen,
|
||||||
|
state=state,
|
||||||
|
reserve_offset=swap_offset + 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
status_bar(std_screen, state.cursor, width, height)
|
||||||
|
|
||||||
|
debug_menu(std_screen, state.key)
|
||||||
|
|
||||||
|
std_screen.move(state.cursor.get_y(), state.cursor.get_x())
|
||||||
|
|
||||||
|
std_screen.refresh()
|
||||||
|
|
||||||
|
state.key = std_screen.getch()
|
||||||
|
|
||||||
|
return state
|
||||||
155
python/parallelize.py
Normal file
155
python/parallelize.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
"""Thing."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from multiprocessing import cpu_count
|
||||||
|
from typing import TYPE_CHECKING, Any, Literal, TypeVar
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Callable, Mapping, Sequence
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
R = TypeVar("R")
|
||||||
|
|
||||||
|
modes = Literal["normal", "early_error"]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ExecutorResults[R]:
|
||||||
|
"""Dataclass to store the results and exceptions of the parallel execution."""
|
||||||
|
|
||||||
|
results: list[R]
|
||||||
|
exceptions: list[BaseException]
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return a string representation of the object."""
|
||||||
|
return f"results={self.results} exceptions={self.exceptions}"
|
||||||
|
|
||||||
|
|
||||||
|
def _parallelize_base[R](
|
||||||
|
executor_type: type[ThreadPoolExecutor | ProcessPoolExecutor],
|
||||||
|
func: Callable[..., R],
|
||||||
|
kwargs_list: Sequence[Mapping[str, Any]],
|
||||||
|
max_workers: int | None,
|
||||||
|
progress_tracker: int | None,
|
||||||
|
mode: modes,
|
||||||
|
) -> ExecutorResults:
|
||||||
|
total_work = len(kwargs_list)
|
||||||
|
|
||||||
|
with executor_type(max_workers=max_workers) as executor:
|
||||||
|
futures = [executor.submit(func, **kwarg) for kwarg in kwargs_list]
|
||||||
|
|
||||||
|
results = []
|
||||||
|
exceptions = []
|
||||||
|
for index, future in enumerate(futures, 1):
|
||||||
|
if exception := future.exception():
|
||||||
|
logger.error(f"{future} raised {exception.__class__.__name__}")
|
||||||
|
exceptions.append(exception)
|
||||||
|
if mode == "early_error":
|
||||||
|
executor.shutdown(wait=False)
|
||||||
|
raise exception
|
||||||
|
continue
|
||||||
|
|
||||||
|
results.append(future.result())
|
||||||
|
|
||||||
|
if progress_tracker and index % progress_tracker == 0:
|
||||||
|
logger.info(f"Progress: {index}/{total_work}")
|
||||||
|
|
||||||
|
return ExecutorResults(results, exceptions)
|
||||||
|
|
||||||
|
|
||||||
|
def parallelize_thread[R](
|
||||||
|
func: Callable[..., R],
|
||||||
|
kwargs_list: Sequence[Mapping[str, Any]],
|
||||||
|
max_workers: int | None = None,
|
||||||
|
progress_tracker: int | None = None,
|
||||||
|
mode: modes = "normal",
|
||||||
|
) -> ExecutorResults:
|
||||||
|
"""Generic function to run a function with multiple arguments in threads.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
func (Callable[..., R]): Function to run in threads.
|
||||||
|
kwargs_list (Sequence[Mapping[str, Any]]): List of dictionaries with the arguments for the function.
|
||||||
|
max_workers (int, optional): Number of workers to use. Defaults to 8.
|
||||||
|
progress_tracker (int, optional): Number of tasks to complete before logging progress.
|
||||||
|
mode (modes, optional): Mode to use. Defaults to "normal".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[list[R], list[Exception]]: List with the results and a list with the exceptions.
|
||||||
|
"""
|
||||||
|
return _parallelize_base(
|
||||||
|
executor_type=ThreadPoolExecutor,
|
||||||
|
func=func,
|
||||||
|
kwargs_list=kwargs_list,
|
||||||
|
max_workers=max_workers,
|
||||||
|
progress_tracker=progress_tracker,
|
||||||
|
mode=mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parallelize_process[R](
|
||||||
|
func: Callable[..., R],
|
||||||
|
kwargs_list: Sequence[Mapping[str, Any]],
|
||||||
|
max_workers: int | None = None,
|
||||||
|
progress_tracker: int | None = None,
|
||||||
|
mode: modes = "normal",
|
||||||
|
) -> ExecutorResults:
|
||||||
|
"""Generic function to run a function with multiple arguments in process.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
func (Callable[..., R]): Function to run in process.
|
||||||
|
kwargs_list (Sequence[Mapping[str, Any]]): List of dictionaries with the arguments for the function.
|
||||||
|
max_workers (int, optional): Number of workers to use. Defaults to 4.
|
||||||
|
progress_tracker (int, optional): Number of tasks to complete before logging progress.
|
||||||
|
mode (modes, optional): Mode to use. Defaults to "normal".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[list[R], list[Exception]]: List with the results and a list with the exceptions.
|
||||||
|
"""
|
||||||
|
if max_workers and max_workers > cpu_count():
|
||||||
|
error = f"max_workers must be less than or equal to {cpu_count()}"
|
||||||
|
raise RuntimeError(error)
|
||||||
|
|
||||||
|
return process_executor_unchecked(
|
||||||
|
func=func,
|
||||||
|
kwargs_list=kwargs_list,
|
||||||
|
max_workers=max_workers,
|
||||||
|
progress_tracker=progress_tracker,
|
||||||
|
mode=mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def process_executor_unchecked[R](
|
||||||
|
func: Callable[..., R],
|
||||||
|
kwargs_list: Sequence[Mapping[str, Any]],
|
||||||
|
max_workers: int | None,
|
||||||
|
progress_tracker: int | None,
|
||||||
|
mode: modes = "normal",
|
||||||
|
) -> ExecutorResults:
|
||||||
|
"""Generic function to run a function with multiple arguments in parallel.
|
||||||
|
|
||||||
|
Note: this function does not check if the number of workers is greater than the number of CPUs.
|
||||||
|
This can cause the system to become unresponsive.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
func (Callable[..., R]): Function to run in parallel.
|
||||||
|
kwargs_list (Sequence[Mapping[str, Any]]): List of dictionaries with the arguments for the function.
|
||||||
|
max_workers (int, optional): Number of workers to use. Defaults to 8.
|
||||||
|
progress_tracker (int, optional): Number of tasks to complete before logging progress.
|
||||||
|
mode (modes, optional): Mode to use. Defaults to "normal".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[list[R], list[Exception]]: List with the results and a list with the exceptions.
|
||||||
|
"""
|
||||||
|
return _parallelize_base(
|
||||||
|
executor_type=ProcessPoolExecutor,
|
||||||
|
func=func,
|
||||||
|
kwargs_list=kwargs_list,
|
||||||
|
max_workers=max_workers,
|
||||||
|
progress_tracker=progress_tracker,
|
||||||
|
mode=mode,
|
||||||
|
)
|
||||||
1
python/random/__init__.py
Normal file
1
python/random/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""init."""
|
||||||
40
python/random/capasitor.py
Normal file
40
python/random/capasitor.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""capasitor."""
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_capacitor_capacity(voltage: float, farads: float) -> float:
|
||||||
|
"""Calculate capacitor capacity."""
|
||||||
|
joules = (farads * voltage**2) // 2
|
||||||
|
return joules // 3600
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_pack_capacity(cells: int, cell_voltage: float, farads: float) -> float:
|
||||||
|
"""Calculate pack capacity."""
|
||||||
|
return calculate_capacitor_capacity(cells * cell_voltage, farads / cells)
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_pack_capacity2(cells: int, cell_voltage: float, farads: float, cell_cost: float) -> tuple[float, float]:
|
||||||
|
"""Calculate pack capacity."""
|
||||||
|
capacitor_capacity = calculate_capacitor_capacity(cells * cell_voltage, farads / cells)
|
||||||
|
return capacitor_capacity, cell_cost * cells
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main."""
|
||||||
|
watt_hours = calculate_pack_capacity(cells=10, cell_voltage=2.7, farads=500)
|
||||||
|
print(f"{watt_hours=}")
|
||||||
|
print(f"{watt_hours*16=}")
|
||||||
|
watt_hours = calculate_pack_capacity(cells=1, cell_voltage=2.7, farads=5000)
|
||||||
|
print(f"{watt_hours=}")
|
||||||
|
|
||||||
|
watt_hours, cost = calculate_pack_capacity2(
|
||||||
|
cells=10,
|
||||||
|
cell_voltage=2.7,
|
||||||
|
farads=3000,
|
||||||
|
cell_cost=11.60,
|
||||||
|
)
|
||||||
|
print(f"{watt_hours=}")
|
||||||
|
print(f"{cost=}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
25
python/random/thing.py
Normal file
25
python/random/thing.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""thing."""
|
||||||
|
|
||||||
|
|
||||||
|
def caculat_batry_specs(
|
||||||
|
cell_amp_hour: int,
|
||||||
|
cell_voltage: float,
|
||||||
|
cells_per_pack: int,
|
||||||
|
packs: int,
|
||||||
|
) -> tuple[float, float]:
|
||||||
|
"""Caculat battry specs."""
|
||||||
|
pack_voltage = cell_voltage * cells_per_pack
|
||||||
|
|
||||||
|
pack_watt_hours = pack_voltage * cell_amp_hour
|
||||||
|
|
||||||
|
battry_capacity = pack_watt_hours * packs
|
||||||
|
return (
|
||||||
|
battry_capacity,
|
||||||
|
pack_voltage,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
battry_capacity, pack_voltage = caculat_batry_specs(300, 3.2, 8, 2)
|
||||||
|
print(f"{battry_capacity=} {pack_voltage=}")
|
||||||
|
cost = 1700
|
||||||
|
print(f"$/kWh {cost / battry_capacity}")
|
||||||
196
python/random/voltage_drop.py
Normal file
196
python/random/voltage_drop.py
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
"""voltage_drop."""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class TemperatureUnit(Enum):
|
||||||
|
"""Temperature unit."""
|
||||||
|
|
||||||
|
CELSIUS = "c"
|
||||||
|
FAHRENHEIT = "f"
|
||||||
|
KELVIN = "k"
|
||||||
|
|
||||||
|
|
||||||
|
class Temperature:
|
||||||
|
"""Temperature."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
temperature: float,
|
||||||
|
unit: TemperatureUnit = TemperatureUnit.CELSIUS,
|
||||||
|
) -> None:
|
||||||
|
"""__init__."""
|
||||||
|
unit_modifier = {
|
||||||
|
TemperatureUnit.CELSIUS: 1,
|
||||||
|
TemperatureUnit.FAHRENHEIT: 0.5556,
|
||||||
|
TemperatureUnit.KELVIN: 1.8,
|
||||||
|
}
|
||||||
|
self.temperature = temperature * unit_modifier[unit]
|
||||||
|
|
||||||
|
def __float__(self) -> float:
|
||||||
|
"""Return the temperature in degrees Celsius."""
|
||||||
|
return self.temperature
|
||||||
|
|
||||||
|
|
||||||
|
class LengthUnit(Enum):
|
||||||
|
"""Length unit."""
|
||||||
|
|
||||||
|
METERS = "m"
|
||||||
|
FEET = "ft"
|
||||||
|
INCHES = "in"
|
||||||
|
|
||||||
|
|
||||||
|
class Length:
|
||||||
|
"""Length."""
|
||||||
|
|
||||||
|
def __init__(self, length: float, unit: LengthUnit) -> None:
|
||||||
|
"""__init__."""
|
||||||
|
self.meters = self._convert_to_meters(length, unit)
|
||||||
|
|
||||||
|
def _convert_to_meters(self, length: float, unit: LengthUnit) -> float:
|
||||||
|
thing = {
|
||||||
|
LengthUnit.METERS: 1,
|
||||||
|
LengthUnit.FEET: 0.3048,
|
||||||
|
LengthUnit.INCHES: 0.0254,
|
||||||
|
}
|
||||||
|
test = thing.get(unit)
|
||||||
|
if test:
|
||||||
|
return length * test
|
||||||
|
error = f"Unsupported unit: {unit}"
|
||||||
|
raise ValueError(error)
|
||||||
|
|
||||||
|
def __float__(self) -> float:
|
||||||
|
"""Return the length in meters."""
|
||||||
|
return self.meters
|
||||||
|
|
||||||
|
def feet(self) -> float:
|
||||||
|
"""Return the length in feet."""
|
||||||
|
return self.meters * 3.2808
|
||||||
|
|
||||||
|
|
||||||
|
class MaterialType(Enum):
|
||||||
|
"""Material type."""
|
||||||
|
|
||||||
|
COPPER = "copper"
|
||||||
|
ALUMINUM = "aluminum"
|
||||||
|
CCA = "cca"
|
||||||
|
SILVER = "silver"
|
||||||
|
GOLD = "gold"
|
||||||
|
|
||||||
|
|
||||||
|
def get_material_resistivity(
|
||||||
|
material: MaterialType,
|
||||||
|
temperature: Temperature | None = None,
|
||||||
|
) -> float:
|
||||||
|
"""Get the resistivity of a material."""
|
||||||
|
if not temperature:
|
||||||
|
temperature = Temperature(20.0)
|
||||||
|
material_info = {
|
||||||
|
MaterialType.COPPER: (1.724e-8, 0.00393),
|
||||||
|
MaterialType.ALUMINUM: (2.908e-8, 0.00403),
|
||||||
|
MaterialType.CCA: (2.577e-8, 0.00397),
|
||||||
|
MaterialType.SILVER: (1.632e-8, 0.00380),
|
||||||
|
MaterialType.GOLD: (2.503e-8, 0.00340),
|
||||||
|
}
|
||||||
|
|
||||||
|
base_resistivity, temp_coefficient = material_info[material]
|
||||||
|
return base_resistivity * (1 + temp_coefficient * float(temperature))
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_awg_diameter_mm(gauge: int) -> float:
|
||||||
|
"""Calculate wire diameter in millimeters for a given AWG gauge."""
|
||||||
|
return round(0.127 * 92 ** ((36 - gauge) / 39), 3)
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_wire_area_m2(gauge: int) -> float:
|
||||||
|
"""Calculate the area of a wire in square meters.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gauge (int): The AWG (American Wire Gauge) number of the wire
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: The area of the wire in square meters
|
||||||
|
"""
|
||||||
|
return math.pi * (calculate_awg_diameter_mm(gauge) / 2000) ** 2
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_resistance_per_meter(gauge: int) -> float:
|
||||||
|
"""Calculate the resistance per meter of a wire.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gauge (int): The AWG (American Wire Gauge) number of the wire
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: The resistance per meter of the wire
|
||||||
|
"""
|
||||||
|
return get_material_resistivity(MaterialType.COPPER) / calculate_wire_area_m2(gauge)
|
||||||
|
|
||||||
|
|
||||||
|
def voltage_drop(
|
||||||
|
gauge: int,
|
||||||
|
material: MaterialType,
|
||||||
|
length: Length,
|
||||||
|
current_a: float,
|
||||||
|
) -> float:
|
||||||
|
"""Calculate the voltage drop of a wire.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gauge (int): The AWG (American Wire Gauge) number of the wire
|
||||||
|
material (MaterialType): The type of conductor material (e.g., copper, aluminum)
|
||||||
|
length (Length): The length of the wire in meters
|
||||||
|
current_a (float): The current flowing through the wire in amperes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: The voltage drop of the wire in volts
|
||||||
|
"""
|
||||||
|
resistivity = get_material_resistivity(material)
|
||||||
|
resistance_per_meter = resistivity / calculate_wire_area_m2(gauge)
|
||||||
|
total_resistance = resistance_per_meter * float(length) * 2 # round-trip
|
||||||
|
return total_resistance * current_a
|
||||||
|
|
||||||
|
|
||||||
|
print(
|
||||||
|
voltage_drop(
|
||||||
|
gauge=10,
|
||||||
|
material=MaterialType.CCA,
|
||||||
|
length=Length(length=20, unit=LengthUnit.FEET),
|
||||||
|
current_a=20,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def max_wire_length(
|
||||||
|
gauge: int,
|
||||||
|
material: MaterialType,
|
||||||
|
current_amps: float,
|
||||||
|
voltage_drop: float = 0.3,
|
||||||
|
temperature: Temperature | None = None,
|
||||||
|
) -> Length:
|
||||||
|
"""Calculate the maximum allowable wire length based on voltage drop criteria.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gauge (int): The AWG (American Wire Gauge) number of the wire
|
||||||
|
material (MaterialType): The type of conductor material (e.g., copper, aluminum)
|
||||||
|
current_amps (float): The current flowing through the wire in amperes
|
||||||
|
voltage_drop (float, optional): Maximum allowable voltage drop as a decimal (default 0.1 or 10%)
|
||||||
|
temperature (Temperature | None, optional): The temperature of the wire. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: Maximum wire length in meters that maintains the specified voltage drop
|
||||||
|
"""
|
||||||
|
if not temperature:
|
||||||
|
temperature = Temperature(100.0, unit=TemperatureUnit.FAHRENHEIT)
|
||||||
|
|
||||||
|
resistivity = get_material_resistivity(material, temperature)
|
||||||
|
resistance_per_meter = resistivity / calculate_wire_area_m2(gauge)
|
||||||
|
# V = IR, solve for length where V is the allowed voltage drop
|
||||||
|
return Length(
|
||||||
|
voltage_drop / (current_amps * resistance_per_meter),
|
||||||
|
LengthUnit.METERS,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
print(max_wire_length(gauge=10, material=MaterialType.CCA, current_amps=20).feet())
|
||||||
|
print(max_wire_length(gauge=10, material=MaterialType.CCA, current_amps=10).feet())
|
||||||
|
print(max_wire_length(gauge=10, material=MaterialType.CCA, current_amps=5).feet())
|
||||||
1
python/system_tests/__init__.py
Normal file
1
python/system_tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""system_tests."""
|
||||||
99
python/system_tests/components.py
Normal file
99
python/system_tests/components.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
"""Validate Jeeves."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from copy import copy
|
||||||
|
from re import search
|
||||||
|
from time import sleep
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from python.common import bash_wrapper
|
||||||
|
from python.zfs import Zpool
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def zpool_tests(pool_names: Sequence[str], zpool_capacity_threshold: int = 90) -> list[str] | None:
|
||||||
|
"""Check the zpool health and capacity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pool_names (Sequence[str]): A list of pool names to test.
|
||||||
|
zpool_capacity_threshold (int, optional): The threshold for the zpool capacity. Defaults to 90.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str] | None: A list of errors if any.
|
||||||
|
"""
|
||||||
|
logger.info("Testing zpool")
|
||||||
|
|
||||||
|
errors: list[str] = []
|
||||||
|
for pool_name in pool_names:
|
||||||
|
pool = Zpool(pool_name)
|
||||||
|
if pool.health != "ONLINE":
|
||||||
|
errors.append(f"{pool.name} is {pool.health}")
|
||||||
|
if pool.capacity >= zpool_capacity_threshold:
|
||||||
|
errors.append(f"{pool.name} is low on space")
|
||||||
|
|
||||||
|
upgrade_status, _ = bash_wrapper("zpool upgrade")
|
||||||
|
if not search(r"Every feature flags pool has all supported and requested features enabled.", upgrade_status):
|
||||||
|
errors.append("ZPool out of date run `sudo zpool upgrade -a`")
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
def systemd_tests(
|
||||||
|
service_names: Sequence[str],
|
||||||
|
max_retries: int = 30,
|
||||||
|
retry_delay_secs: int = 1,
|
||||||
|
retryable_statuses: Sequence[str] | None = None,
|
||||||
|
valid_statuses: Sequence[str] | None = None,
|
||||||
|
) -> list[str] | None:
|
||||||
|
"""Tests a systemd services.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
service_names (Sequence[str]): A list of service names to test.
|
||||||
|
max_retries (int, optional): The maximum number of retries. Defaults to 30.
|
||||||
|
minimum value is 1.
|
||||||
|
retry_delay_secs (int, optional): The delay between retries in seconds. Defaults to 1.
|
||||||
|
minimum value is 1.
|
||||||
|
retryable_statuses (Sequence[str] | None, optional): A list of retryable statuses. Defaults to None.
|
||||||
|
valid_statuses (Sequence[str] | None, optional): A list of valid statuses. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str] | None: A list of errors if any.
|
||||||
|
"""
|
||||||
|
logger.info("Testing systemd service")
|
||||||
|
|
||||||
|
max_retries = max(max_retries, 1)
|
||||||
|
retry_delay_secs = max(retry_delay_secs, 1)
|
||||||
|
last_try = max_retries - 1
|
||||||
|
|
||||||
|
if retryable_statuses is None:
|
||||||
|
retryable_statuses = ("inactive\n", "activating\n")
|
||||||
|
|
||||||
|
if valid_statuses is None:
|
||||||
|
valid_statuses = ("active\n",)
|
||||||
|
|
||||||
|
service_names_set = set(service_names)
|
||||||
|
|
||||||
|
errors: set[str] = set()
|
||||||
|
for retry in range(max_retries):
|
||||||
|
if not service_names_set:
|
||||||
|
break
|
||||||
|
logger.info(f"Testing systemd service in {retry + 1} of {max_retries}")
|
||||||
|
service_names_to_test = copy(service_names_set)
|
||||||
|
for service_name in service_names_to_test:
|
||||||
|
service_status, _ = bash_wrapper(f"systemctl is-active {service_name}")
|
||||||
|
if service_status in valid_statuses:
|
||||||
|
service_names_set.remove(service_name)
|
||||||
|
continue
|
||||||
|
if service_status in retryable_statuses and retry < last_try:
|
||||||
|
continue
|
||||||
|
errors.add(f"{service_name} is {service_status.strip()}")
|
||||||
|
|
||||||
|
sleep(retry_delay_secs)
|
||||||
|
|
||||||
|
return list(errors)
|
||||||
66
python/system_tests/validate_system.py
Normal file
66
python/system_tests/validate_system.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
"""Validate {server_name}."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import tomllib
|
||||||
|
from os import environ
|
||||||
|
from pathlib import Path
|
||||||
|
from socket import gethostname
|
||||||
|
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from python.common import configure_logger, signal_alert
|
||||||
|
from python.system_tests.components import systemd_tests, zpool_tests
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def load_config_data(config_file: Path) -> dict[str, list[str]]:
|
||||||
|
"""Load a TOML configuration file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_file (Path): The path to the configuration file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The configuration data.
|
||||||
|
"""
|
||||||
|
return tomllib.loads(config_file.read_text())
|
||||||
|
|
||||||
|
|
||||||
|
def main(config_file: Path) -> None:
|
||||||
|
"""Main."""
|
||||||
|
configure_logger(level=environ.get("LOG_LEVEL", "INFO"))
|
||||||
|
|
||||||
|
server_name = gethostname()
|
||||||
|
logger.info(f"Starting {server_name} validation")
|
||||||
|
|
||||||
|
config_data = load_config_data(config_file)
|
||||||
|
|
||||||
|
errors: list[str] = []
|
||||||
|
try:
|
||||||
|
if config_data.get("zpools") and (zpool_errors := zpool_tests(config_data["zpools"])):
|
||||||
|
errors.extend(zpool_errors)
|
||||||
|
|
||||||
|
if config_data.get("services") and (systemd_errors := systemd_tests(config_data["services"])):
|
||||||
|
errors.extend(systemd_errors)
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
logger.exception(f"{server_name} validation failed")
|
||||||
|
errors.append(f"{server_name} validation failed: {error}")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
logger.error(f"{server_name} validation failed: \n{'\n'.join(errors)}")
|
||||||
|
signal_alert(f"{server_name} validation failed {errors}")
|
||||||
|
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
logger.info(f"{server_name} validation passed")
|
||||||
|
|
||||||
|
|
||||||
|
def cli() -> None:
|
||||||
|
"""CLI."""
|
||||||
|
typer.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cli()
|
||||||
1
python/testing/__init__.py
Normal file
1
python/testing/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""init."""
|
||||||
1
python/testing/logging/__init__.py
Normal file
1
python/testing/logging/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""init."""
|
||||||
11
python/testing/logging/bar.py
Normal file
11
python/testing/logging/bar.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
"""Bar."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def bar() -> None:
|
||||||
|
"""Bar."""
|
||||||
|
logger.debug(f"bar {__name__}")
|
||||||
|
logger.debug("bar")
|
||||||
20
python/testing/logging/configure_logger.py
Normal file
20
python/testing/logging/configure_logger.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
"""configure_logger."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def configure_logger(level: str = "INFO", test: str | None = None) -> None:
|
||||||
|
"""Configure the logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
level (str, optional): The logging level. Defaults to "INFO".
|
||||||
|
test (str | None, optional): The test name. Defaults to None.
|
||||||
|
"""
|
||||||
|
logging.basicConfig(
|
||||||
|
level=level,
|
||||||
|
datefmt="%Y-%m-%dT%H:%M:%S%z",
|
||||||
|
format="%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s" # this is nesiseary
|
||||||
|
f" {test}",
|
||||||
|
handlers=[logging.StreamHandler(sys.stdout)],
|
||||||
|
)
|
||||||
17
python/testing/logging/foo.py
Normal file
17
python/testing/logging/foo.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
"""foo."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from python.testing.logging.bar import bar
|
||||||
|
from python.testing.logging.configure_logger import configure_logger
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def foo() -> None:
|
||||||
|
"""Foo."""
|
||||||
|
configure_logger("DEBUG", "FOO")
|
||||||
|
logger.debug(f"foo {__name__}")
|
||||||
|
logger.debug("foo")
|
||||||
|
|
||||||
|
bar()
|
||||||
33
python/testing/logging/main.py
Normal file
33
python/testing/logging/main.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
"""main."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from python.testing.logging.bar import bar
|
||||||
|
from python.testing.logging.configure_logger import configure_logger
|
||||||
|
from python.testing.logging.foo import foo
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main."""
|
||||||
|
configure_logger("DEBUG")
|
||||||
|
# handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
# Create and attach a formatter
|
||||||
|
# formatter = logging.Formatter(
|
||||||
|
# "%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s FOO"
|
||||||
|
# )
|
||||||
|
# handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
# Attach handler to logger
|
||||||
|
# foo_logger = logging.getLogger("python.testing.logging.foo")
|
||||||
|
# foo_logger.addHandler(handler)
|
||||||
|
# foo_logger.propagate = True
|
||||||
|
logger.debug("main")
|
||||||
|
foo()
|
||||||
|
bar()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
1
python/tools/__init__.py
Normal file
1
python/tools/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Server Tools."""
|
||||||
161
python/tools/fix_eval_warnings.py
Executable file
161
python/tools/fix_eval_warnings.py
Executable file
@@ -0,0 +1,161 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""fix_eval_warnings."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from python.common import configure_logger
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Config:
|
||||||
|
"""Configuration for the script.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
github_token (str): GitHub token for API authentication.
|
||||||
|
model_name (str): The name of the LLM model to use. Defaults to "gpt-4o".
|
||||||
|
api_base (str): The base URL for the GitHub Models API.
|
||||||
|
Defaults to "https://models.inference.ai.azure.com".
|
||||||
|
"""
|
||||||
|
|
||||||
|
github_token: str
|
||||||
|
model_name: str = "gpt-4o"
|
||||||
|
api_base: str = "https://models.inference.ai.azure.com"
|
||||||
|
|
||||||
|
|
||||||
|
def get_log_content(run_id: str) -> None:
|
||||||
|
"""Fetch the logs for a specific workflow run.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
run_id (str): The run ID.
|
||||||
|
"""
|
||||||
|
logger.info(f"Fetching logs for run ID: {run_id}")
|
||||||
|
# List artifacts to find logs (or use jobs API)
|
||||||
|
# For simplicity, we might need to use 'gh' cli in the workflow to download logs
|
||||||
|
# But let's try to read from a file if passed as argument, which is easier for the workflow
|
||||||
|
|
||||||
|
|
||||||
|
def parse_warnings(log_file_path: Path) -> list[str]:
|
||||||
|
"""Parse the log file for evaluation warnings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
log_file_path (Path): The path to the log file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: A list of warning messages.
|
||||||
|
"""
|
||||||
|
warnings = []
|
||||||
|
with log_file_path.open(encoding="utf-8", errors="ignore") as f:
|
||||||
|
warnings.extend(line.strip() for line in f if "evaluation warning:" in line)
|
||||||
|
return warnings
|
||||||
|
|
||||||
|
|
||||||
|
def generate_fix(warning_msg: str, config: Config) -> str | None:
|
||||||
|
"""Call GitHub Models to generate a fix for the warning.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
warning_msg (str): The warning message.
|
||||||
|
config (Config): The configuration object.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[str]: The suggested fix or None.
|
||||||
|
"""
|
||||||
|
logger.info(f"Generating fix for: {warning_msg}")
|
||||||
|
|
||||||
|
prompt = f"""
|
||||||
|
I encountered the following Nix evaluation warning:
|
||||||
|
|
||||||
|
`{warning_msg}`
|
||||||
|
|
||||||
|
Please explain what this warning means and suggest how to fix it in the Nix code.
|
||||||
|
If possible, provide the exact code change in a diff format or a clear description of what to change.
|
||||||
|
"""
|
||||||
|
|
||||||
|
headers = {"Content-Type": "application/json", "Authorization": f"Bearer {config.github_token}"}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"messages": [
|
||||||
|
{"role": "system", "content": "You are an expert NixOS and Nix language developer."},
|
||||||
|
{"role": "user", "content": prompt},
|
||||||
|
],
|
||||||
|
"model": config.model_name,
|
||||||
|
"temperature": 0.1,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.post(f"{config.api_base}/chat/completions", headers=headers, json=payload, timeout=30)
|
||||||
|
response.raise_for_status()
|
||||||
|
result = response.json()
|
||||||
|
return result["choices"][0]["message"]["content"] # type: ignore[no-any-return]
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Error calling LLM")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def main(
|
||||||
|
log_file: Path = typer.Argument(..., help="Path to the build log file"), # noqa: B008
|
||||||
|
model_name: str = typer.Option("gpt-4o", envvar="MODEL_NAME", help="LLM Model Name"),
|
||||||
|
) -> None:
|
||||||
|
"""Detect evaluation warnings in logs and suggest fixes using GitHub Models.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
log_file (Path): Path to the build log file containing evaluation warnings.
|
||||||
|
model_name (str): The name of the LLM model to use for generating fixes.
|
||||||
|
Defaults to "gpt-4o", can be overridden by MODEL_NAME environment variable.
|
||||||
|
"""
|
||||||
|
configure_logger()
|
||||||
|
|
||||||
|
github_token = os.environ.get("GITHUB_TOKEN")
|
||||||
|
if not github_token:
|
||||||
|
logger.warning("GITHUB_TOKEN not set. LLM calls will fail.")
|
||||||
|
|
||||||
|
config = Config(github_token=github_token or "", model_name=model_name)
|
||||||
|
|
||||||
|
if not log_file.exists():
|
||||||
|
logger.error(f"Log file not found: {log_file}")
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
|
warnings = parse_warnings(log_file)
|
||||||
|
if not warnings:
|
||||||
|
logger.info("No evaluation warnings found.")
|
||||||
|
raise typer.Exit(code=0)
|
||||||
|
|
||||||
|
logger.info(f"Found {len(warnings)} warnings.")
|
||||||
|
|
||||||
|
# Process unique warnings to save tokens
|
||||||
|
unique_warnings = list(set(warnings))
|
||||||
|
|
||||||
|
fixes = []
|
||||||
|
for warning in unique_warnings:
|
||||||
|
if not config.github_token:
|
||||||
|
logger.warning("Skipping LLM call due to missing GITHUB_TOKEN")
|
||||||
|
continue
|
||||||
|
|
||||||
|
fix = generate_fix(warning, config)
|
||||||
|
if fix:
|
||||||
|
fixes.append(f"## Warning\n`{warning}`\n\n## Suggested Fix\n{fix}\n")
|
||||||
|
|
||||||
|
# Output fixes to a markdown file for the PR body
|
||||||
|
if fixes:
|
||||||
|
with Path("fix_suggestions.md").open("w") as f:
|
||||||
|
f.write("# Automated Fix Suggestions\n\n")
|
||||||
|
f.write("\n---\n".join(fixes))
|
||||||
|
logger.info("Fix suggestions written to fix_suggestions.md")
|
||||||
|
else:
|
||||||
|
logger.info("No fixes generated.")
|
||||||
|
|
||||||
|
|
||||||
|
app = typer.Typer()
|
||||||
|
app.command()(main)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app()
|
||||||
144
python/tools/snapshot_manager.py
Normal file
144
python/tools/snapshot_manager.py
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
"""snapshot_manager."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import tomllib
|
||||||
|
from functools import cache
|
||||||
|
from pathlib import Path # noqa: TC003 This is required for the typer CLI
|
||||||
|
from re import compile as re_compile
|
||||||
|
from re import search
|
||||||
|
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from python.common import configure_logger, signal_alert, utcnow
|
||||||
|
from python.zfs import Dataset, get_datasets
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def main(config_file: Path) -> None:
|
||||||
|
"""Main."""
|
||||||
|
configure_logger(level="DEBUG")
|
||||||
|
logger.info("Starting snapshot_manager")
|
||||||
|
|
||||||
|
try:
|
||||||
|
time_stamp = get_time_stamp()
|
||||||
|
|
||||||
|
for dataset in get_datasets():
|
||||||
|
status = dataset.create_snapshot(time_stamp)
|
||||||
|
logger.debug(f"{status=}")
|
||||||
|
if status != "snapshot created":
|
||||||
|
msg = f"{dataset.name} failed to create snapshot {time_stamp}"
|
||||||
|
logger.error(msg)
|
||||||
|
signal_alert(msg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
get_snapshots_to_delete(dataset, get_count_lookup(config_file, dataset.name))
|
||||||
|
except Exception:
|
||||||
|
logger.exception("snapshot_manager failed")
|
||||||
|
signal_alert("snapshot_manager failed")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
logger.info("snapshot_manager completed")
|
||||||
|
|
||||||
|
|
||||||
|
def get_count_lookup(config_file: Path, dataset_name: str) -> dict[str, int]:
|
||||||
|
"""Get the count lookup.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_file (Path): The path to the configuration file.
|
||||||
|
dataset_name (str): The name of the dataset.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, int]: The count lookup.
|
||||||
|
"""
|
||||||
|
config_data = load_config_data(config_file)
|
||||||
|
|
||||||
|
return config_data.get(dataset_name, get_default_config(config_data))
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_config(config_data: dict[str, dict[str, int]]) -> dict[str, int]:
|
||||||
|
"""Get the default configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_data (dict[str, dict[str, int]]): The configuration data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, int]: The default configuration.
|
||||||
|
"""
|
||||||
|
return config_data.get(
|
||||||
|
"default",
|
||||||
|
{"15_min": 4, "hourly": 12, "daily": 0, "monthly": 0},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@cache
|
||||||
|
def load_config_data(config_file: Path) -> dict[str, dict[str, int]]:
|
||||||
|
"""Load a TOML configuration file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_file (Path): The path to the configuration file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The configuration data.
|
||||||
|
"""
|
||||||
|
return tomllib.loads(config_file.read_text())
|
||||||
|
|
||||||
|
|
||||||
|
def get_snapshots_to_delete(
|
||||||
|
dataset: Dataset,
|
||||||
|
count_lookup: dict[str, int],
|
||||||
|
) -> None:
|
||||||
|
"""Get snapshots to delete.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dataset (Dataset): the dataset
|
||||||
|
count_lookup (dict[str, int]): the count lookup
|
||||||
|
"""
|
||||||
|
snapshots = dataset.get_snapshots()
|
||||||
|
|
||||||
|
if not snapshots:
|
||||||
|
logger.info(f"{dataset.name} has no snapshots")
|
||||||
|
return
|
||||||
|
|
||||||
|
filters = (
|
||||||
|
("15_min", re_compile(r"auto_\d{10}(?:15|30|45)")),
|
||||||
|
("hourly", re_compile(r"auto_\d{8}(?!00)\d{2}00")),
|
||||||
|
("daily", re_compile(r"auto_\d{6}(?!01)\d{2}0000")),
|
||||||
|
("monthly", re_compile(r"auto_\d{6}010000")),
|
||||||
|
)
|
||||||
|
|
||||||
|
for filter_name, snapshot_filter in filters:
|
||||||
|
logger.debug(f"{filter_name=}\n{snapshot_filter=}")
|
||||||
|
|
||||||
|
filtered_snapshots = sorted(snapshot.name for snapshot in snapshots if search(snapshot_filter, snapshot.name))
|
||||||
|
|
||||||
|
logger.debug(f"{filtered_snapshots=}")
|
||||||
|
|
||||||
|
snapshots_wanted = count_lookup[filter_name]
|
||||||
|
snapshots_being_deleted = filtered_snapshots[:-snapshots_wanted] if snapshots_wanted > 0 else filtered_snapshots
|
||||||
|
|
||||||
|
logger.info(f"{snapshots_being_deleted} are being deleted")
|
||||||
|
for snapshot in snapshots_being_deleted:
|
||||||
|
if error := dataset.delete_snapshot(snapshot):
|
||||||
|
error_message = f"{dataset.name}@{snapshot} failed to delete: {error}"
|
||||||
|
signal_alert(error_message)
|
||||||
|
logger.error(error_message)
|
||||||
|
|
||||||
|
|
||||||
|
def get_time_stamp() -> str:
|
||||||
|
"""Get the time stamp."""
|
||||||
|
now = utcnow()
|
||||||
|
nearest_15_min = now.replace(minute=(now.minute - (now.minute % 15)))
|
||||||
|
return nearest_15_min.strftime("auto_%Y%m%d%H%M")
|
||||||
|
|
||||||
|
|
||||||
|
def cli() -> None:
|
||||||
|
"""CLI."""
|
||||||
|
typer.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cli()
|
||||||
11
python/zfs/__init__.py
Normal file
11
python/zfs/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
"""init."""
|
||||||
|
|
||||||
|
from python.zfs.dataset import Dataset, Snapshot, get_datasets
|
||||||
|
from python.zfs.zpool import Zpool
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Dataset",
|
||||||
|
"Snapshot",
|
||||||
|
"Zpool",
|
||||||
|
"get_datasets",
|
||||||
|
]
|
||||||
214
python/zfs/dataset.py
Normal file
214
python/zfs/dataset.py
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
"""dataset."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from python.common import bash_wrapper
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _zfs_list(zfs_list: str) -> dict[str, Any]:
|
||||||
|
"""Check the version of zfs."""
|
||||||
|
raw_zfs_list_data, _ = bash_wrapper(zfs_list)
|
||||||
|
|
||||||
|
zfs_list_data = json.loads(raw_zfs_list_data)
|
||||||
|
|
||||||
|
vers_major = zfs_list_data["output_version"]["vers_major"]
|
||||||
|
vers_minor = zfs_list_data["output_version"]["vers_minor"]
|
||||||
|
command = zfs_list_data["output_version"]["command"]
|
||||||
|
|
||||||
|
if vers_major != 0 or vers_minor != 1 or command != "zfs list":
|
||||||
|
error = f"Datasets are not in the correct format {vers_major=} {vers_minor=} {command=}"
|
||||||
|
raise RuntimeError(error)
|
||||||
|
|
||||||
|
return zfs_list_data
|
||||||
|
|
||||||
|
|
||||||
|
class Snapshot:
|
||||||
|
"""Snapshot."""
|
||||||
|
|
||||||
|
def __init__(self, snapshot_data: dict[str, Any]) -> None:
|
||||||
|
"""__init__."""
|
||||||
|
properties = snapshot_data["properties"]
|
||||||
|
self.createtxg = int(snapshot_data["createtxg"])
|
||||||
|
self.creation = datetime.fromtimestamp(int(properties["creation"]["value"]), tz=UTC)
|
||||||
|
self.defer_destroy = properties["defer_destroy"]["value"]
|
||||||
|
self.guid = int(properties["guid"]["value"])
|
||||||
|
self.name = snapshot_data["name"].split("@")[1]
|
||||||
|
self.objsetid = int(properties["objsetid"]["value"])
|
||||||
|
self.referenced = int(properties["referenced"]["value"])
|
||||||
|
self.used = int(properties["used"]["value"])
|
||||||
|
self.userrefs = int(properties["userrefs"]["value"])
|
||||||
|
self.version = int(properties["version"]["value"])
|
||||||
|
self.written = int(properties["written"]["value"])
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""__repr__."""
|
||||||
|
return f"name={self.name} used={self.used} refer={self.referenced}"
|
||||||
|
|
||||||
|
|
||||||
|
class Dataset:
|
||||||
|
"""Dataset."""
|
||||||
|
|
||||||
|
def __init__(self, name: str) -> None:
|
||||||
|
"""__init__."""
|
||||||
|
dataset_data = _zfs_list(f"zfs list {name} -pHj -o all")
|
||||||
|
|
||||||
|
properties = dataset_data["datasets"][name]["properties"]
|
||||||
|
|
||||||
|
self.aclinherit = properties["aclinherit"]["value"]
|
||||||
|
self.aclmode = properties["aclmode"]["value"]
|
||||||
|
self.acltype = properties["acltype"]["value"]
|
||||||
|
self.available = int(properties["available"]["value"])
|
||||||
|
self.canmount = properties["canmount"]["value"]
|
||||||
|
self.checksum = properties["checksum"]["value"]
|
||||||
|
self.clones = properties["clones"]["value"]
|
||||||
|
self.compression = properties["compression"]["value"]
|
||||||
|
self.copies = int(properties["copies"]["value"])
|
||||||
|
self.createtxg = int(properties["createtxg"]["value"])
|
||||||
|
self.creation = datetime.fromtimestamp(int(properties["creation"]["value"]), tz=UTC)
|
||||||
|
self.dedup = properties["dedup"]["value"]
|
||||||
|
self.devices = properties["devices"]["value"]
|
||||||
|
self.encryption = properties["encryption"]["value"]
|
||||||
|
self.exec = properties["exec"]["value"]
|
||||||
|
self.filesystem_limit = properties["filesystem_limit"]["value"]
|
||||||
|
self.guid = int(properties["guid"]["value"])
|
||||||
|
self.keystatus = properties["keystatus"]["value"]
|
||||||
|
self.logbias = properties["logbias"]["value"]
|
||||||
|
self.mlslabel = properties["mlslabel"]["value"]
|
||||||
|
self.mounted = properties["mounted"]["value"]
|
||||||
|
self.mountpoint = properties["mountpoint"]["value"]
|
||||||
|
self.name = name
|
||||||
|
self.quota = int(properties["quota"]["value"])
|
||||||
|
self.readonly = properties["readonly"]["value"]
|
||||||
|
self.recordsize = int(properties["recordsize"]["value"])
|
||||||
|
self.redundant_metadata = properties["redundant_metadata"]["value"]
|
||||||
|
self.referenced = int(properties["referenced"]["value"])
|
||||||
|
self.refquota = int(properties["refquota"]["value"])
|
||||||
|
self.refreservation = int(properties["refreservation"]["value"])
|
||||||
|
self.reservation = int(properties["reservation"]["value"])
|
||||||
|
self.setuid = properties["setuid"]["value"]
|
||||||
|
self.sharenfs = properties["sharenfs"]["value"]
|
||||||
|
self.snapdir = properties["snapdir"]["value"]
|
||||||
|
self.snapshot_limit = properties["snapshot_limit"]["value"]
|
||||||
|
self.sync = properties["sync"]["value"]
|
||||||
|
self.used = int(properties["used"]["value"])
|
||||||
|
self.usedbychildren = int(properties["usedbychildren"]["value"])
|
||||||
|
self.usedbydataset = int(properties["usedbydataset"]["value"])
|
||||||
|
self.usedbysnapshots = int(properties["usedbysnapshots"]["value"])
|
||||||
|
self.version = int(properties["version"]["value"])
|
||||||
|
self.volmode = properties["volmode"]["value"]
|
||||||
|
self.volsize = properties["volsize"]["value"]
|
||||||
|
self.vscan = properties["vscan"]["value"]
|
||||||
|
self.written = int(properties["written"]["value"])
|
||||||
|
self.xattr = properties["xattr"]["value"]
|
||||||
|
|
||||||
|
def get_snapshots(self) -> list[Snapshot] | None:
|
||||||
|
"""Get all snapshots from zfs and process then is test dicts of sets."""
|
||||||
|
snapshots_data = _zfs_list(f"zfs list -t snapshot -pHj {self.name} -o all")
|
||||||
|
|
||||||
|
return [Snapshot(properties) for properties in snapshots_data["datasets"].values()]
|
||||||
|
|
||||||
|
def create_snapshot(self, snapshot_name: str) -> str:
|
||||||
|
"""Creates a zfs snapshot.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
snapshot_name (str): a snapshot name
|
||||||
|
"""
|
||||||
|
logger.debug(f"Creating {self.name}@{snapshot_name}")
|
||||||
|
_, return_code = bash_wrapper(f"zfs snapshot {self.name}@{snapshot_name}")
|
||||||
|
if return_code == 0:
|
||||||
|
return "snapshot created"
|
||||||
|
|
||||||
|
if snapshots := self.get_snapshots():
|
||||||
|
snapshot_names = {snapshot.name for snapshot in snapshots}
|
||||||
|
if snapshot_name in snapshot_names:
|
||||||
|
return f"Snapshot {snapshot_name} already exists for {self.name}"
|
||||||
|
|
||||||
|
return f"Failed to create snapshot {snapshot_name} for {self.name}"
|
||||||
|
|
||||||
|
def delete_snapshot(self, snapshot_name: str) -> str | None:
|
||||||
|
"""Deletes a zfs snapshot.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
snapshot_name (str): a snapshot name
|
||||||
|
"""
|
||||||
|
logger.debug(f"deleting {self.name}@{snapshot_name}")
|
||||||
|
msg, return_code = bash_wrapper(f"zfs destroy {self.name}@{snapshot_name}")
|
||||||
|
if return_code != 0:
|
||||||
|
if msg.startswith(f"cannot destroy '{self.name}@{snapshot_name}': snapshot has dependent clones"):
|
||||||
|
return "snapshot has dependent clones"
|
||||||
|
error = f"Failed to delete snapshot {snapshot_name=} for {self.name}"
|
||||||
|
raise RuntimeError(error)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""__repr__."""
|
||||||
|
return (
|
||||||
|
f"{self.aclinherit=}\n"
|
||||||
|
f"{self.aclmode=}\n"
|
||||||
|
f"{self.acltype=}\n"
|
||||||
|
f"{self.available=}\n"
|
||||||
|
f"{self.canmount=}\n"
|
||||||
|
f"{self.checksum=}\n"
|
||||||
|
f"{self.clones=}\n"
|
||||||
|
f"{self.compression=}\n"
|
||||||
|
f"{self.copies=}\n"
|
||||||
|
f"{self.createtxg=}\n"
|
||||||
|
f"{self.creation=}\n"
|
||||||
|
f"{self.dedup=}\n"
|
||||||
|
f"{self.devices=}\n"
|
||||||
|
f"{self.encryption=}\n"
|
||||||
|
f"{self.exec=}\n"
|
||||||
|
f"{self.filesystem_limit=}\n"
|
||||||
|
f"{self.guid=}\n"
|
||||||
|
f"{self.keystatus=}\n"
|
||||||
|
f"{self.logbias=}\n"
|
||||||
|
f"{self.mlslabel=}\n"
|
||||||
|
f"{self.mounted=}\n"
|
||||||
|
f"{self.mountpoint=}\n"
|
||||||
|
f"{self.name=}\n"
|
||||||
|
f"{self.quota=}\n"
|
||||||
|
f"{self.readonly=}\n"
|
||||||
|
f"{self.recordsize=}\n"
|
||||||
|
f"{self.redundant_metadata=}\n"
|
||||||
|
f"{self.referenced=}\n"
|
||||||
|
f"{self.refquota=}\n"
|
||||||
|
f"{self.refreservation=}\n"
|
||||||
|
f"{self.reservation=}\n"
|
||||||
|
f"{self.setuid=}\n"
|
||||||
|
f"{self.sharenfs=}\n"
|
||||||
|
f"{self.snapdir=}\n"
|
||||||
|
f"{self.snapshot_limit=}\n"
|
||||||
|
f"{self.sync=}\n"
|
||||||
|
f"{self.used=}\n"
|
||||||
|
f"{self.usedbychildren=}\n"
|
||||||
|
f"{self.usedbydataset=}\n"
|
||||||
|
f"{self.usedbysnapshots=}\n"
|
||||||
|
f"{self.version=}\n"
|
||||||
|
f"{self.volmode=}\n"
|
||||||
|
f"{self.volsize=}\n"
|
||||||
|
f"{self.vscan=}\n"
|
||||||
|
f"{self.written=}\n"
|
||||||
|
f"{self.xattr=}\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_datasets() -> list[Dataset]:
|
||||||
|
"""Get zfs list.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[Dataset]: A list of zfs datasets.
|
||||||
|
"""
|
||||||
|
logger.info("Getting zfs list")
|
||||||
|
|
||||||
|
dataset_names, _ = bash_wrapper("zfs list -Hp -t filesystem -o name")
|
||||||
|
|
||||||
|
cleaned_datasets = dataset_names.strip().split("\n")
|
||||||
|
|
||||||
|
return [Dataset(dataset_name) for dataset_name in cleaned_datasets if "/" in dataset_name]
|
||||||
86
python/zfs/zpool.py
Normal file
86
python/zfs/zpool.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
"""test."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from python.common import bash_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def _zpool_list(zfs_list: str) -> dict[str, Any]:
|
||||||
|
"""Check the version of zfs."""
|
||||||
|
raw_zfs_list_data, _ = bash_wrapper(zfs_list)
|
||||||
|
|
||||||
|
zfs_list_data = json.loads(raw_zfs_list_data)
|
||||||
|
|
||||||
|
vers_major = zfs_list_data["output_version"]["vers_major"]
|
||||||
|
vers_minor = zfs_list_data["output_version"]["vers_minor"]
|
||||||
|
command = zfs_list_data["output_version"]["command"]
|
||||||
|
|
||||||
|
if vers_major != 0 or vers_minor != 1 or command != "zpool list":
|
||||||
|
error = f"Datasets are not in the correct format {vers_major=} {vers_minor=} {command=}"
|
||||||
|
raise RuntimeError(error)
|
||||||
|
|
||||||
|
return zfs_list_data
|
||||||
|
|
||||||
|
|
||||||
|
class Zpool:
|
||||||
|
"""Zpool."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
) -> None:
|
||||||
|
"""__init__."""
|
||||||
|
zpool_data = _zpool_list(f"zpool list {name} -pHj -o all")
|
||||||
|
|
||||||
|
properties = zpool_data["pools"][name]["properties"]
|
||||||
|
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
self.allocated = int(properties["allocated"]["value"])
|
||||||
|
self.altroot = properties["altroot"]["value"]
|
||||||
|
self.ashift = int(properties["ashift"]["value"])
|
||||||
|
self.autoexpand = properties["autoexpand"]["value"]
|
||||||
|
self.autoreplace = properties["autoreplace"]["value"]
|
||||||
|
self.autotrim = properties["autotrim"]["value"]
|
||||||
|
self.capacity = int(properties["capacity"]["value"])
|
||||||
|
self.comment = properties["comment"]["value"]
|
||||||
|
self.dedupratio = properties["dedupratio"]["value"]
|
||||||
|
self.delegation = properties["delegation"]["value"]
|
||||||
|
self.expandsize = properties["expandsize"]["value"]
|
||||||
|
self.failmode = properties["failmode"]["value"]
|
||||||
|
self.fragmentation = int(properties["fragmentation"]["value"])
|
||||||
|
self.free = properties["free"]["value"]
|
||||||
|
self.freeing = int(properties["freeing"]["value"])
|
||||||
|
self.guid = int(properties["guid"]["value"])
|
||||||
|
self.health = properties["health"]["value"]
|
||||||
|
self.leaked = int(properties["leaked"]["value"])
|
||||||
|
self.readonly = properties["readonly"]["value"]
|
||||||
|
self.size = int(properties["size"]["value"])
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""__repr__."""
|
||||||
|
return (
|
||||||
|
f"{self.name=}\n"
|
||||||
|
f"{self.allocated=}\n"
|
||||||
|
f"{self.altroot=}\n"
|
||||||
|
f"{self.ashift=}\n"
|
||||||
|
f"{self.autoexpand=}\n"
|
||||||
|
f"{self.autoreplace=}\n"
|
||||||
|
f"{self.autotrim=}\n"
|
||||||
|
f"{self.capacity=}\n"
|
||||||
|
f"{self.comment=}\n"
|
||||||
|
f"{self.dedupratio=}\n"
|
||||||
|
f"{self.delegation=}\n"
|
||||||
|
f"{self.expandsize=}\n"
|
||||||
|
f"{self.failmode=}\n"
|
||||||
|
f"{self.fragmentation=}\n"
|
||||||
|
f"{self.freeing=}\n"
|
||||||
|
f"{self.guid=}\n"
|
||||||
|
f"{self.health=}\n"
|
||||||
|
f"{self.leaked=}\n"
|
||||||
|
f"{self.readonly=}\n"
|
||||||
|
f"{self.size=}"
|
||||||
|
)
|
||||||
@@ -9,6 +9,7 @@
|
|||||||
nix
|
nix
|
||||||
home-manager
|
home-manager
|
||||||
git
|
git
|
||||||
|
my_python
|
||||||
|
|
||||||
ssh-to-age
|
ssh-to-age
|
||||||
gnupg
|
gnupg
|
||||||
|
|||||||
@@ -1,21 +1,21 @@
|
|||||||
|
{ inputs, ... }:
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
../../users/richie
|
"${inputs.self}/users/richie"
|
||||||
../../users/gaming
|
"${inputs.self}/users/gaming"
|
||||||
../../common/global
|
"${inputs.self}/common/global"
|
||||||
../../common/optional/desktop.nix
|
"${inputs.self}/common/optional/desktop.nix"
|
||||||
../../common/optional/docker.nix
|
"${inputs.self}/common/optional/docker.nix"
|
||||||
../../common/optional/scanner.nix
|
"${inputs.self}/common/optional/scanner.nix"
|
||||||
../../common/optional/steam.nix
|
"${inputs.self}/common/optional/steam.nix"
|
||||||
../../common/optional/syncthing_base.nix
|
"${inputs.self}/common/optional/syncthing_base.nix"
|
||||||
../../common/optional/systemd-boot.nix
|
"${inputs.self}/common/optional/systemd-boot.nix"
|
||||||
../../common/optional/update.nix
|
"${inputs.self}/common/optional/update.nix"
|
||||||
../../common/optional/yubikey.nix
|
"${inputs.self}/common/optional/yubikey.nix"
|
||||||
../../common/optional/zerotier.nix
|
"${inputs.self}/common/optional/zerotier.nix"
|
||||||
../../common/optional/nvidia.nix
|
"${inputs.self}/common/optional/nvidia.nix"
|
||||||
./hardware.nix
|
./hardware.nix
|
||||||
./syncthing.nix
|
./syncthing.nix
|
||||||
./games.nix
|
|
||||||
./llms.nix
|
./llms.nix
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
"dotfiles" = {
|
"dotfiles" = {
|
||||||
path = "/home/richie/dotfiles";
|
path = "/home/richie/dotfiles";
|
||||||
devices = [
|
devices = [
|
||||||
|
"brain"
|
||||||
"jeeves"
|
"jeeves"
|
||||||
"rhapsody-in-green"
|
"rhapsody-in-green"
|
||||||
];
|
];
|
||||||
@@ -12,8 +13,9 @@
|
|||||||
id = "4ckma-gtshs"; # cspell:disable-line
|
id = "4ckma-gtshs"; # cspell:disable-line
|
||||||
path = "/home/richie/important";
|
path = "/home/richie/important";
|
||||||
devices = [
|
devices = [
|
||||||
"phone"
|
"brain"
|
||||||
"jeeves"
|
"jeeves"
|
||||||
|
"phone"
|
||||||
"rhapsody-in-green"
|
"rhapsody-in-green"
|
||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
|
|||||||
39
systems/brain/default.nix
Normal file
39
systems/brain/default.nix
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{ inputs, ... }:
|
||||||
|
{
|
||||||
|
imports = [
|
||||||
|
"${inputs.self}/users/richie"
|
||||||
|
"${inputs.self}/common/global"
|
||||||
|
"${inputs.self}/common/optional/docker.nix"
|
||||||
|
"${inputs.self}/common/optional/ssh_decrypt.nix"
|
||||||
|
"${inputs.self}/common/optional/syncthing_base.nix"
|
||||||
|
"${inputs.self}/common/optional/systemd-boot.nix"
|
||||||
|
"${inputs.self}/common/optional/update.nix"
|
||||||
|
"${inputs.self}/common/optional/zerotier.nix"
|
||||||
|
./docker
|
||||||
|
./hardware.nix
|
||||||
|
./programs.nix
|
||||||
|
./services
|
||||||
|
./syncthing.nix
|
||||||
|
inputs.nixos-hardware.nixosModules.framework-11th-gen-intel
|
||||||
|
];
|
||||||
|
|
||||||
|
networking = {
|
||||||
|
hostName = "brain";
|
||||||
|
hostId = "93a06c6e";
|
||||||
|
firewall.enable = true;
|
||||||
|
networkmanager.enable = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
hardware.bluetooth = {
|
||||||
|
enable = true;
|
||||||
|
powerOnBoot = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
services = {
|
||||||
|
openssh.ports = [ 129 ];
|
||||||
|
|
||||||
|
smartd.enable = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
system.stateVersion = "25.05";
|
||||||
|
}
|
||||||
11
systems/brain/docker/default.nix
Normal file
11
systems/brain/docker/default.nix
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
imports =
|
||||||
|
let
|
||||||
|
files = builtins.attrNames (builtins.readDir ./.);
|
||||||
|
nixFiles = builtins.filter (name: lib.hasSuffix ".nix" name && name != "default.nix") files;
|
||||||
|
in
|
||||||
|
map (file: ./. + "/${file}") nixFiles;
|
||||||
|
|
||||||
|
virtualisation.oci-containers.backend = "docker";
|
||||||
|
}
|
||||||
3
systems/brain/docker/docker_networks.md
Normal file
3
systems/brain/docker/docker_networks.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# docker_networks
|
||||||
|
|
||||||
|
docker network create -d bridge web
|
||||||
71
systems/brain/hardware.nix
Normal file
71
systems/brain/hardware.nix
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
{
|
||||||
|
config,
|
||||||
|
lib,
|
||||||
|
modulesPath,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
{
|
||||||
|
imports = [ (modulesPath + "/installer/scan/not-detected.nix") ];
|
||||||
|
|
||||||
|
boot = {
|
||||||
|
initrd = {
|
||||||
|
availableKernelModules = [
|
||||||
|
"ahci"
|
||||||
|
"ehci_pci"
|
||||||
|
"nvme"
|
||||||
|
"sd_mod"
|
||||||
|
"uas"
|
||||||
|
"usb_storage"
|
||||||
|
"usbhid"
|
||||||
|
"xhci_pci"
|
||||||
|
];
|
||||||
|
kernelModules = [ ];
|
||||||
|
luks.devices."luks-root-pool-nvme-Samsung_SSD_990_PRO_2TB_S7KHNJ0Y121613P-part2" = {
|
||||||
|
device = "/dev/disk/by-id/nvme-Samsung_SSD_990_PRO_2TB_S7KHNJ0Y121613P-part2";
|
||||||
|
bypassWorkqueues = true;
|
||||||
|
allowDiscards = true;
|
||||||
|
keyFileSize = 4096;
|
||||||
|
keyFile = "/dev/disk/by-id/usb-USB_SanDisk_3.2Gen1_03021630090925173333-0:0";
|
||||||
|
fallbackToPassword = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
kernelModules = [ "kvm-intel" ];
|
||||||
|
extraModulePackages = [ ];
|
||||||
|
};
|
||||||
|
|
||||||
|
fileSystems = {
|
||||||
|
"/" = lib.mkDefault {
|
||||||
|
device = "root_pool/root";
|
||||||
|
fsType = "zfs";
|
||||||
|
};
|
||||||
|
|
||||||
|
"/home" = {
|
||||||
|
device = "root_pool/home";
|
||||||
|
fsType = "zfs";
|
||||||
|
};
|
||||||
|
|
||||||
|
"/var" = {
|
||||||
|
device = "root_pool/var";
|
||||||
|
fsType = "zfs";
|
||||||
|
};
|
||||||
|
|
||||||
|
"/nix" = {
|
||||||
|
device = "root_pool/nix";
|
||||||
|
fsType = "zfs";
|
||||||
|
};
|
||||||
|
|
||||||
|
"/boot" = {
|
||||||
|
device = "/dev/disk/by-uuid/12CE-A600";
|
||||||
|
fsType = "vfat";
|
||||||
|
options = [
|
||||||
|
"fmask=0077"
|
||||||
|
"dmask=0077"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
swapDevices = [ ];
|
||||||
|
|
||||||
|
nixpkgs.hostPlatform = lib.mkDefault "x86_64-linux";
|
||||||
|
hardware.cpu.intel.updateMicrocode = lib.mkDefault config.hardware.enableRedistributableFirmware;
|
||||||
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
{ pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
{
|
{
|
||||||
environment.systemPackages = with pkgs; [
|
environment.systemPackages = with pkgs; [
|
||||||
osu-lazer-bin
|
filebot
|
||||||
jellyfin-media-player
|
docker-compose
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
9
systems/brain/services/default.nix
Normal file
9
systems/brain/services/default.nix
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
imports =
|
||||||
|
let
|
||||||
|
files = builtins.attrNames (builtins.readDir ./.);
|
||||||
|
nixFiles = builtins.filter (name: lib.hasSuffix ".nix" name && name != "default.nix") files;
|
||||||
|
in
|
||||||
|
map (file: ./. + "/${file}") nixFiles;
|
||||||
|
}
|
||||||
82
systems/brain/services/home_assistant.nix
Normal file
82
systems/brain/services/home_assistant.nix
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
{
|
||||||
|
users = {
|
||||||
|
users.hass = {
|
||||||
|
isSystemUser = true;
|
||||||
|
group = "hass";
|
||||||
|
};
|
||||||
|
groups.hass = { };
|
||||||
|
};
|
||||||
|
|
||||||
|
services = {
|
||||||
|
home-assistant = {
|
||||||
|
enable = true;
|
||||||
|
openFirewall = true;
|
||||||
|
config = {
|
||||||
|
http = {
|
||||||
|
server_port = 8123;
|
||||||
|
server_host = [
|
||||||
|
"192.168.90.35"
|
||||||
|
"192.168.95.35"
|
||||||
|
"127.0.0.1"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
homeassistant = {
|
||||||
|
time_zone = "America/New_York";
|
||||||
|
unit_system = "us_customary";
|
||||||
|
temperature_unit = "F";
|
||||||
|
packages = {
|
||||||
|
victron_modbuss = "!include ${./home_assistant/victron_modbuss.yaml}";
|
||||||
|
battery_sensors = "!include ${./home_assistant/battery_sensors.yaml}";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
recorder = {
|
||||||
|
db_url = "postgresql://@/hass";
|
||||||
|
auto_purge = true;
|
||||||
|
purge_keep_days = 3650;
|
||||||
|
db_retry_wait = 15;
|
||||||
|
};
|
||||||
|
assist_pipeline = { };
|
||||||
|
backup = { };
|
||||||
|
bluetooth = { };
|
||||||
|
config = { };
|
||||||
|
dhcp = { };
|
||||||
|
energy = { };
|
||||||
|
history = { };
|
||||||
|
homeassistant_alerts = { };
|
||||||
|
image_upload = { };
|
||||||
|
logbook = { };
|
||||||
|
media_source = { };
|
||||||
|
mobile_app = { };
|
||||||
|
ssdp = { };
|
||||||
|
sun = { };
|
||||||
|
webhook = { };
|
||||||
|
cloud = { };
|
||||||
|
zeroconf = { };
|
||||||
|
automation = "!include automations.yaml";
|
||||||
|
script = "!include scripts.yaml";
|
||||||
|
scene = "!include scenes.yaml";
|
||||||
|
group = "!include groups.yaml";
|
||||||
|
};
|
||||||
|
extraPackages =
|
||||||
|
python3Packages: with python3Packages; [
|
||||||
|
aioesphomeapi # for esphome
|
||||||
|
bleak-esphome # for esphome
|
||||||
|
esphome-dashboard-api # for esphome
|
||||||
|
forecast-solar # for solar forecast
|
||||||
|
gtts # not sure what wants this
|
||||||
|
jellyfin-apiclient-python # for jellyfin
|
||||||
|
paho-mqtt # for mqtt
|
||||||
|
psycopg2 # for postgresql
|
||||||
|
py-improv-ble-client # for esphome
|
||||||
|
pymodbus # for modbus
|
||||||
|
pyopenweathermap # for weather
|
||||||
|
];
|
||||||
|
extraComponents = [ "isal" ];
|
||||||
|
};
|
||||||
|
esphome = {
|
||||||
|
enable = true;
|
||||||
|
openFirewall = true;
|
||||||
|
address = "192.168.90.35";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
61
systems/brain/services/home_assistant/battery_sensors.yaml
Normal file
61
systems/brain/services/home_assistant/battery_sensors.yaml
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
sensor:
|
||||||
|
# Battery 0
|
||||||
|
- platform: integration
|
||||||
|
source: sensor.batteries_jk0_charging_power
|
||||||
|
name: "JK0 energy in"
|
||||||
|
unique_id: jk0_energy_in_kwh
|
||||||
|
unit_prefix: k
|
||||||
|
method: trapezoidal
|
||||||
|
round: 3
|
||||||
|
max_sub_interval:
|
||||||
|
minutes: 5
|
||||||
|
- platform: integration
|
||||||
|
source: sensor.batteries_jk0_charging_power
|
||||||
|
name: "JK0 energy out"
|
||||||
|
unique_id: jk0_energy_out_kwh
|
||||||
|
unit_prefix: k
|
||||||
|
method: trapezoidal
|
||||||
|
round: 3
|
||||||
|
max_sub_interval:
|
||||||
|
minutes: 5
|
||||||
|
|
||||||
|
# Battery 1
|
||||||
|
- platform: integration
|
||||||
|
source: sensor.battery1_jk1_charging_power
|
||||||
|
name: "JK1 energy in"
|
||||||
|
unique_id: jk1_energy_in_kwh
|
||||||
|
unit_prefix: k
|
||||||
|
method: trapezoidal
|
||||||
|
round: 3
|
||||||
|
max_sub_interval:
|
||||||
|
minutes: 5
|
||||||
|
- platform: integration
|
||||||
|
source: sensor.battery1_jk1_discharge_power
|
||||||
|
name: "JK1 energy out"
|
||||||
|
unique_id: jk1_energy_out_kwh
|
||||||
|
unit_prefix: k
|
||||||
|
method: trapezoidal
|
||||||
|
round: 3
|
||||||
|
max_sub_interval:
|
||||||
|
minutes: 5
|
||||||
|
|
||||||
|
utility_meter:
|
||||||
|
# Battery 0
|
||||||
|
jk0_energy_in_daily:
|
||||||
|
source: sensor.jk0_energy_in
|
||||||
|
name: "JK0 Energy In Daily"
|
||||||
|
cycle: daily
|
||||||
|
jk0_energy_out_daily:
|
||||||
|
source: sensor.jk0_energy_out
|
||||||
|
name: "JK0 Energy Out Daily"
|
||||||
|
cycle: daily
|
||||||
|
|
||||||
|
# Battery 1
|
||||||
|
jk1_energy_in_daily:
|
||||||
|
source: sensor.jk1_energy_in
|
||||||
|
name: "JK1 Energy In Daily"
|
||||||
|
cycle: daily
|
||||||
|
jk1_energy_out_daily:
|
||||||
|
source: sensor.jk1_energy_out
|
||||||
|
name: "JK1 Energy Out Daily"
|
||||||
|
cycle: daily
|
||||||
347
systems/brain/services/home_assistant/victron_modbuss.yaml
Normal file
347
systems/brain/services/home_assistant/victron_modbuss.yaml
Normal file
@@ -0,0 +1,347 @@
|
|||||||
|
modbus:
|
||||||
|
- name: victron_gx
|
||||||
|
type: tcp
|
||||||
|
host: 192.168.103.30
|
||||||
|
port: 502
|
||||||
|
timeout: 3
|
||||||
|
delay: 2
|
||||||
|
sensors:
|
||||||
|
# ---- SOLAR CHARGER (Unit ID 226) ----
|
||||||
|
- name: Solar Voltage
|
||||||
|
slave: 226
|
||||||
|
address: 776
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 0.01
|
||||||
|
precision: 2
|
||||||
|
unit_of_measurement: "V"
|
||||||
|
device_class: voltage
|
||||||
|
state_class: measurement
|
||||||
|
|
||||||
|
- name: Solar Amperage
|
||||||
|
slave: 226
|
||||||
|
address: 777
|
||||||
|
input_type: holding
|
||||||
|
data_type: int16
|
||||||
|
scale: 0.1
|
||||||
|
precision: 1
|
||||||
|
unit_of_measurement: "A"
|
||||||
|
device_class: current
|
||||||
|
state_class: measurement
|
||||||
|
|
||||||
|
- name: Solar Wattage
|
||||||
|
slave: 226
|
||||||
|
address: 789
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 0.1
|
||||||
|
unit_of_measurement: "W"
|
||||||
|
device_class: power
|
||||||
|
state_class: measurement
|
||||||
|
|
||||||
|
- name: Solar Yield Today
|
||||||
|
slave: 226
|
||||||
|
address: 784
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 0.1
|
||||||
|
precision: 3
|
||||||
|
unit_of_measurement: "kWh"
|
||||||
|
device_class: energy
|
||||||
|
state_class: total
|
||||||
|
|
||||||
|
# DC system
|
||||||
|
- name: DC Voltage
|
||||||
|
slave: 100
|
||||||
|
address: 840
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 0.1
|
||||||
|
precision: 2
|
||||||
|
unit_of_measurement: "V"
|
||||||
|
device_class: voltage
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: dc_voltage
|
||||||
|
|
||||||
|
- name: DC Wattage
|
||||||
|
slave: 100
|
||||||
|
address: 860
|
||||||
|
input_type: holding
|
||||||
|
data_type: int16
|
||||||
|
scale: 1
|
||||||
|
precision: 0
|
||||||
|
unit_of_measurement: "W"
|
||||||
|
device_class: power
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: dc_wattage
|
||||||
|
|
||||||
|
# GPS
|
||||||
|
- name: GPS Latitude
|
||||||
|
slave: 1
|
||||||
|
address: 2800
|
||||||
|
input_type: holding
|
||||||
|
data_type: int32
|
||||||
|
scale: 0.0000001
|
||||||
|
precision: 7
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: gps_latitude
|
||||||
|
|
||||||
|
- name: GPS Longitude
|
||||||
|
slave: 1
|
||||||
|
address: 2802
|
||||||
|
input_type: holding
|
||||||
|
data_type: int32
|
||||||
|
scale: 0.0000001
|
||||||
|
precision: 7
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: gps_longitude
|
||||||
|
|
||||||
|
- name: GPS Course
|
||||||
|
slave: 1
|
||||||
|
address: 2804
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 0.01
|
||||||
|
precision: 2
|
||||||
|
unit_of_measurement: "°"
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: gps_course
|
||||||
|
|
||||||
|
- name: GPS Speed
|
||||||
|
slave: 1
|
||||||
|
address: 2805
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 0.01
|
||||||
|
precision: 2
|
||||||
|
unit_of_measurement: "m/s"
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: gps_speed
|
||||||
|
|
||||||
|
- name: GPS Fix
|
||||||
|
slave: 1
|
||||||
|
address: 2806
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 1
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: gps_fix
|
||||||
|
|
||||||
|
- name: GPS Satellites
|
||||||
|
slave: 1
|
||||||
|
address: 2807
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 1
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: gps_satellites
|
||||||
|
|
||||||
|
- name: GPS Altitude
|
||||||
|
slave: 1
|
||||||
|
address: 2808
|
||||||
|
input_type: holding
|
||||||
|
data_type: int32
|
||||||
|
scale: 0.16
|
||||||
|
precision: 1
|
||||||
|
unit_of_measurement: "m"
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: gps_altitude
|
||||||
|
|
||||||
|
# ---- CHARGER (Unit ID 223) ----
|
||||||
|
- name: Charger Output 1 Voltage
|
||||||
|
slave: 223
|
||||||
|
address: 2307
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 0.01
|
||||||
|
precision: 2
|
||||||
|
unit_of_measurement: "V"
|
||||||
|
device_class: voltage
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_output_1_voltage
|
||||||
|
|
||||||
|
- name: Charger Output 1 Current
|
||||||
|
slave: 223
|
||||||
|
address: 2308
|
||||||
|
input_type: holding
|
||||||
|
data_type: int16
|
||||||
|
scale: 0.1
|
||||||
|
precision: 1
|
||||||
|
unit_of_measurement: "A"
|
||||||
|
device_class: current
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_output_1_current
|
||||||
|
|
||||||
|
- name: Charger Output 1 Temperature
|
||||||
|
slave: 223
|
||||||
|
address: 2309
|
||||||
|
input_type: holding
|
||||||
|
data_type: int16
|
||||||
|
scale: 0.1
|
||||||
|
precision: 1
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
device_class: temperature
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_output_1_temperature
|
||||||
|
|
||||||
|
- name: Charger AC Current
|
||||||
|
slave: 223
|
||||||
|
address: 2314
|
||||||
|
input_type: holding
|
||||||
|
data_type: int16
|
||||||
|
scale: 0.1
|
||||||
|
precision: 1
|
||||||
|
unit_of_measurement: "A"
|
||||||
|
device_class: current
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_ac_current
|
||||||
|
|
||||||
|
- name: Charger AC Current Limit
|
||||||
|
slave: 223
|
||||||
|
address: 2316
|
||||||
|
input_type: holding
|
||||||
|
data_type: int16
|
||||||
|
scale: 0.1
|
||||||
|
precision: 1
|
||||||
|
unit_of_measurement: "A"
|
||||||
|
device_class: current
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_ac_current_limit
|
||||||
|
|
||||||
|
- name: Charger On Off Raw
|
||||||
|
slave: 223
|
||||||
|
address: 2317
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 1
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_on_off_raw
|
||||||
|
|
||||||
|
- name: Charger Charge State Raw
|
||||||
|
slave: 223
|
||||||
|
address: 2318
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 1
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_charge_state_raw
|
||||||
|
|
||||||
|
- name: Charger Error Code
|
||||||
|
slave: 223
|
||||||
|
address: 2319
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 1
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_error_code
|
||||||
|
|
||||||
|
- name: Charger Relay State
|
||||||
|
slave: 223
|
||||||
|
address: 2320
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 1
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_relay_state
|
||||||
|
|
||||||
|
- name: Charger Low Voltage Alarm
|
||||||
|
slave: 223
|
||||||
|
address: 2321
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 1
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_low_voltage_alarm
|
||||||
|
|
||||||
|
- name: Charger High Voltage Alarm
|
||||||
|
slave: 223
|
||||||
|
address: 2322
|
||||||
|
input_type: holding
|
||||||
|
data_type: uint16
|
||||||
|
scale: 1
|
||||||
|
state_class: measurement
|
||||||
|
unique_id: charger_high_voltage_alarm
|
||||||
|
|
||||||
|
template:
|
||||||
|
- sensor:
|
||||||
|
- name: Charger On Off
|
||||||
|
state: >-
|
||||||
|
{% set v = states('sensor.charger_on_off_raw')|int %}
|
||||||
|
{{ {0:'Off',1:'On',2:'Error',3:'Unavailable'}.get(v, 'Unknown') }}
|
||||||
|
|
||||||
|
- name: Charger Charge State
|
||||||
|
state: >-
|
||||||
|
{% set v = states('sensor.charger_charge_state_raw')|int %}
|
||||||
|
{{ {
|
||||||
|
0:'Off',1:'Low Power',2:'Fault',3:'Bulk',4:'Absorption',5:'Float',
|
||||||
|
6:'Storage',7:'Equalize/Manual',8:'External Control'
|
||||||
|
}.get(v,'Unknown') }}
|
||||||
|
|
||||||
|
- name: "Charger DC Wattage"
|
||||||
|
unique_id: charger_dc_wattage
|
||||||
|
unit_of_measurement: "W"
|
||||||
|
device_class: power
|
||||||
|
state_class: measurement
|
||||||
|
state: >-
|
||||||
|
{% set v = states('sensor.charger_output_1_voltage')|float(0) %}
|
||||||
|
{% set a = states('sensor.charger_output_1_current')|float(0) %}
|
||||||
|
{{ (v * a) | round(1) }}
|
||||||
|
|
||||||
|
- binary_sensor:
|
||||||
|
- name: Charger Low Voltage Alarm Active
|
||||||
|
state: "{{ states('sensor.charger_low_voltage_alarm')|int == 2 }}"
|
||||||
|
- name: Charger High Voltage Alarm Active
|
||||||
|
state: "{{ states('sensor.charger_high_voltage_alarm')|int == 2 }}"
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: integration
|
||||||
|
source: sensor.dc_wattage
|
||||||
|
name: DC System Energy
|
||||||
|
unit_prefix: k
|
||||||
|
round: 2
|
||||||
|
method: trapezoidal
|
||||||
|
max_sub_interval:
|
||||||
|
minutes: 5
|
||||||
|
|
||||||
|
- platform: integration
|
||||||
|
source: sensor.solar_wattage
|
||||||
|
name: Solar Yield
|
||||||
|
unit_prefix: k
|
||||||
|
round: 2
|
||||||
|
method: trapezoidal
|
||||||
|
max_sub_interval:
|
||||||
|
minutes: 5
|
||||||
|
|
||||||
|
- platform: integration
|
||||||
|
source: sensor.charger_dc_wattage
|
||||||
|
name: DC Charger Energy
|
||||||
|
unit_prefix: k
|
||||||
|
round: 2
|
||||||
|
method: trapezoidal
|
||||||
|
max_sub_interval:
|
||||||
|
minutes: 5
|
||||||
|
|
||||||
|
utility_meter:
|
||||||
|
dc_load_energy_daily:
|
||||||
|
source: sensor.dc_system_energy
|
||||||
|
cycle: daily
|
||||||
|
|
||||||
|
dc_load_energy_monthly:
|
||||||
|
source: sensor.dc_system_energy
|
||||||
|
cycle: monthly
|
||||||
|
|
||||||
|
solar_yield_daily:
|
||||||
|
source: sensor.solar_yield
|
||||||
|
cycle: daily
|
||||||
|
|
||||||
|
solar_yield_monthly:
|
||||||
|
source: sensor.solar_yield
|
||||||
|
cycle: monthly
|
||||||
|
|
||||||
|
charger_dc_wattage_daily:
|
||||||
|
source: sensor.dc_charger_energy
|
||||||
|
cycle: daily
|
||||||
|
|
||||||
|
charger_dc_wattage_monthly:
|
||||||
|
source: sensor.dc_charger_energy
|
||||||
|
cycle: monthly
|
||||||
6
systems/brain/services/jellyfin.nix
Normal file
6
systems/brain/services/jellyfin.nix
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
services.jellyfin = {
|
||||||
|
enable = true;
|
||||||
|
openFirewall = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
151
systems/brain/services/postgress.nix
Normal file
151
systems/brain/services/postgress.nix
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
{ pkgs, ... }:
|
||||||
|
{
|
||||||
|
networking.firewall.allowedTCPPorts = [ 5432 ];
|
||||||
|
|
||||||
|
services.postgresql = {
|
||||||
|
enable = true;
|
||||||
|
package = pkgs.postgresql_17_jit;
|
||||||
|
enableTCPIP = true;
|
||||||
|
enableJIT = true;
|
||||||
|
|
||||||
|
authentication = pkgs.lib.mkOverride 10 ''
|
||||||
|
|
||||||
|
# admins
|
||||||
|
local all postgres trust
|
||||||
|
host all postgres 127.0.0.1/32 trust
|
||||||
|
host all postgres ::1/128 trust
|
||||||
|
|
||||||
|
local all richie trust
|
||||||
|
host all richie 127.0.0.1/32 trust
|
||||||
|
host all richie ::1/128 trust
|
||||||
|
host all richie 192.168.90.1/24 trust
|
||||||
|
host all richie 192.168.99.1/24 trust
|
||||||
|
|
||||||
|
#type database DBuser origin-address auth-method
|
||||||
|
local hass hass trust
|
||||||
|
|
||||||
|
# ipv4
|
||||||
|
host hass hass 192.168.90.1/24 trust
|
||||||
|
host hass hass 127.0.0.1/32 trust
|
||||||
|
|
||||||
|
# ipv6
|
||||||
|
host hass hass ::1/128 trust
|
||||||
|
'';
|
||||||
|
|
||||||
|
identMap = ''
|
||||||
|
# ArbitraryMapName systemUser DBUser
|
||||||
|
superuser_map root postgres
|
||||||
|
superuser_map postgres postgres
|
||||||
|
# Let other names login as themselves
|
||||||
|
superuser_map richie postgres
|
||||||
|
superuser_map hass hass
|
||||||
|
'';
|
||||||
|
ensureUsers = [
|
||||||
|
{
|
||||||
|
name = "postgres";
|
||||||
|
ensureClauses = {
|
||||||
|
superuser = true;
|
||||||
|
login = true;
|
||||||
|
createrole = true;
|
||||||
|
createdb = true;
|
||||||
|
replication = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
name = "richie";
|
||||||
|
ensureDBOwnership = true;
|
||||||
|
ensureClauses = {
|
||||||
|
superuser = true;
|
||||||
|
login = true;
|
||||||
|
createrole = true;
|
||||||
|
createdb = true;
|
||||||
|
replication = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
name = "hass";
|
||||||
|
ensureDBOwnership = true;
|
||||||
|
ensureClauses = {
|
||||||
|
login = true;
|
||||||
|
createrole = true;
|
||||||
|
createdb = true;
|
||||||
|
replication = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
];
|
||||||
|
ensureDatabases = [
|
||||||
|
"hass"
|
||||||
|
"richie"
|
||||||
|
];
|
||||||
|
# Thank you NotAShelf
|
||||||
|
# https://github.com/NotAShelf/nyx/blob/d407b4d6e5ab7f60350af61a3d73a62a5e9ac660/modules/core/roles/server/system/services/databases/postgresql.nix#L74
|
||||||
|
settings = {
|
||||||
|
# Connectivity;
|
||||||
|
max_connections = 100;
|
||||||
|
superuser_reserved_connections = 3;
|
||||||
|
|
||||||
|
# Memory Settings;
|
||||||
|
shared_buffers = "1024 MB";
|
||||||
|
work_mem = "32 MB";
|
||||||
|
maintenance_work_mem = "320 MB";
|
||||||
|
huge_pages = "off";
|
||||||
|
effective_cache_size = "2 GB";
|
||||||
|
effective_io_concurrency = 100; # concurrent IO only really activated if OS supports posix_fadvise function;
|
||||||
|
random_page_cost = 1.25; # speed of random disk access relative to sequential access (1.0);
|
||||||
|
|
||||||
|
# Monitoring;
|
||||||
|
shared_preload_libraries = "pg_stat_statements,auto_explain"; # per statement resource usage stats & log explain statements for slow queries
|
||||||
|
track_io_timing = "on"; # measure exact block IO times;
|
||||||
|
track_functions = "pl"; # track execution times of pl-language procedures if any;
|
||||||
|
# Replication;
|
||||||
|
wal_level = "replica"; # consider using at least "replica";
|
||||||
|
max_wal_senders = 0;
|
||||||
|
synchronous_commit = "on";
|
||||||
|
|
||||||
|
# Checkpointing: ;
|
||||||
|
checkpoint_timeout = "15 min";
|
||||||
|
checkpoint_completion_target = 0.9;
|
||||||
|
max_wal_size = "1024 MB";
|
||||||
|
min_wal_size = "512 MB";
|
||||||
|
|
||||||
|
# WAL writing;
|
||||||
|
wal_compression = "on";
|
||||||
|
wal_buffers = -1; # auto-tuned by Postgres till maximum of segment size (16MB by default);
|
||||||
|
wal_writer_delay = "200ms";
|
||||||
|
wal_writer_flush_after = "1MB";
|
||||||
|
|
||||||
|
# Background writer;
|
||||||
|
bgwriter_delay = "200ms";
|
||||||
|
bgwriter_lru_maxpages = 100;
|
||||||
|
bgwriter_lru_multiplier = 2.0;
|
||||||
|
bgwriter_flush_after = 0;
|
||||||
|
|
||||||
|
# Parallel queries: ;
|
||||||
|
max_worker_processes = 6;
|
||||||
|
max_parallel_workers_per_gather = 3;
|
||||||
|
max_parallel_maintenance_workers = 3;
|
||||||
|
max_parallel_workers = 6;
|
||||||
|
parallel_leader_participation = "on";
|
||||||
|
|
||||||
|
# Advanced features ;
|
||||||
|
enable_partitionwise_join = "on";
|
||||||
|
enable_partitionwise_aggregate = "on";
|
||||||
|
jit = "on";
|
||||||
|
|
||||||
|
jit_above_cost = 100000;
|
||||||
|
jit_inline_above_cost = 150000;
|
||||||
|
jit_optimize_above_cost = 500000;
|
||||||
|
|
||||||
|
# log slow queries
|
||||||
|
log_min_duration_statement = 100;
|
||||||
|
"auto_explain.log_min_duration" = 100;
|
||||||
|
|
||||||
|
# logging configuration
|
||||||
|
log_connections = true;
|
||||||
|
log_statement = "ddl";
|
||||||
|
logging_collector = true;
|
||||||
|
log_disconnections = true;
|
||||||
|
log_rotation_age = "14d";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
30
systems/brain/syncthing.nix
Normal file
30
systems/brain/syncthing.nix
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
networking.firewall.allowedTCPPorts = [ 8384 ];
|
||||||
|
|
||||||
|
services.syncthing = {
|
||||||
|
overrideFolders = false;
|
||||||
|
guiAddress = "192.168.90.35:8384";
|
||||||
|
settings = {
|
||||||
|
"dotfiles" = {
|
||||||
|
path = "/home/richie/dotfiles";
|
||||||
|
devices = [
|
||||||
|
"bob"
|
||||||
|
"jeeves"
|
||||||
|
"rhapsody-in-green"
|
||||||
|
];
|
||||||
|
fsWatcherEnabled = true;
|
||||||
|
};
|
||||||
|
"important" = {
|
||||||
|
id = "4ckma-gtshs"; # cspell:disable-line
|
||||||
|
path = "/home/richie/important";
|
||||||
|
devices = [
|
||||||
|
"bob"
|
||||||
|
"jeeves"
|
||||||
|
"phone"
|
||||||
|
"rhapsody-in-green"
|
||||||
|
];
|
||||||
|
fsWatcherEnabled = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,11 +1,18 @@
|
|||||||
|
{ inputs, ... }:
|
||||||
|
let
|
||||||
|
vars = import ./vars.nix;
|
||||||
|
in
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
../../users/richie
|
"${inputs.self}/users/richie"
|
||||||
../../common/global
|
"${inputs.self}/users/math"
|
||||||
../../common/optional/docker.nix
|
"${inputs.self}/users/dov"
|
||||||
../../common/optional/ssh_decrypt.nix
|
"${inputs.self}/common/global"
|
||||||
../../common/optional/syncthing_base.nix
|
"${inputs.self}/common/optional/docker.nix"
|
||||||
../../common/optional/zerotier.nix
|
"${inputs.self}/common/optional/ssh_decrypt.nix"
|
||||||
|
"${inputs.self}/common/optional/syncthing_base.nix"
|
||||||
|
"${inputs.self}/common/optional/update.nix"
|
||||||
|
"${inputs.self}/common/optional/zerotier.nix"
|
||||||
./docker
|
./docker
|
||||||
./services
|
./services
|
||||||
./hardware.nix
|
./hardware.nix
|
||||||
@@ -21,7 +28,12 @@
|
|||||||
|
|
||||||
smartd.enable = true;
|
smartd.enable = true;
|
||||||
|
|
||||||
snapshot_manager.path = ./snapshot_config.toml;
|
snapshot_manager = {
|
||||||
|
path = ./snapshot_config.toml;
|
||||||
|
EnvironmentFile = "${vars.secrets}/services/snapshot_manager";
|
||||||
|
};
|
||||||
|
|
||||||
|
zerotierone.joinNetworks = [ "a09acf02330d37b9" ];
|
||||||
};
|
};
|
||||||
|
|
||||||
system.stateVersion = "24.05";
|
system.stateVersion = "24.05";
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
{
|
{
|
||||||
config,
|
config,
|
||||||
pkgs,
|
pkgs,
|
||||||
lib,
|
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
|
|
||||||
|
|||||||
21
systems/jeeves/docker/signal-cli-rest-api.nix
Normal file
21
systems/jeeves/docker/signal-cli-rest-api.nix
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
let
|
||||||
|
vars = import ../vars.nix;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
networking.firewall.allowedTCPPorts = [
|
||||||
|
8989
|
||||||
|
];
|
||||||
|
virtualisation.oci-containers.containers.signal_cli_rest_api = {
|
||||||
|
image = "bbernhard/signal-cli-rest-api:latest";
|
||||||
|
ports = [
|
||||||
|
"8989:8080"
|
||||||
|
];
|
||||||
|
volumes = [
|
||||||
|
"${vars.docker_configs}/signal-cli-config:/home/.local/share/signal-cli"
|
||||||
|
];
|
||||||
|
environment = {
|
||||||
|
MODE = "json-rpc";
|
||||||
|
};
|
||||||
|
autoStart = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,4 +1,9 @@
|
|||||||
{ config, lib, ... }:
|
{
|
||||||
|
config,
|
||||||
|
lib,
|
||||||
|
outputs,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
|
||||||
with lib;
|
with lib;
|
||||||
|
|
||||||
@@ -64,11 +69,15 @@ in
|
|||||||
Host jeeves
|
Host jeeves
|
||||||
Port 629
|
Port 629
|
||||||
User github-runners
|
User github-runners
|
||||||
HostName 192.168.99.14
|
HostName jeeves
|
||||||
IdentityFile ${vars.secrets}/services/github-runners/id_ed25519_github-runners
|
IdentityFile ${vars.secrets}/services/github-runners/id_ed25519_github-runners
|
||||||
StrictHostKeyChecking no
|
StrictHostKeyChecking no
|
||||||
UserKnownHostsFile /dev/null
|
UserKnownHostsFile /dev/null
|
||||||
'';
|
'';
|
||||||
|
nixpkgs = {
|
||||||
|
overlays = builtins.attrValues outputs.overlays;
|
||||||
|
config.allowUnfree = true;
|
||||||
|
};
|
||||||
services.github-runners.${name} = {
|
services.github-runners.${name} = {
|
||||||
enable = true;
|
enable = true;
|
||||||
replace = true;
|
replace = true;
|
||||||
@@ -83,6 +92,7 @@ in
|
|||||||
nixos-rebuild
|
nixos-rebuild
|
||||||
openssh
|
openssh
|
||||||
treefmt
|
treefmt
|
||||||
|
my_python
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
users = {
|
users = {
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ sudo zfs create -o recordsize=16k -o primarycache=metadata -o mountpoint=/zfs/me
|
|||||||
# scratch datasets
|
# scratch datasets
|
||||||
sudo zfs create -o recordsize=16k -o sync=disabled scratch/qbitvpn
|
sudo zfs create -o recordsize=16k -o sync=disabled scratch/qbitvpn
|
||||||
sudo zfs create -o recordsize=16k -o sync=disabled scratch/transmission
|
sudo zfs create -o recordsize=16k -o sync=disabled scratch/transmission
|
||||||
|
sudo zfs create -o recordsize=1M scratch/kafka
|
||||||
|
|
||||||
# storage datasets
|
# storage datasets
|
||||||
sudo zfs create -o recordsize=1M -o compression=zstd-19 storage/archive
|
sudo zfs create -o recordsize=1M -o compression=zstd-19 storage/archive
|
||||||
@@ -38,3 +39,4 @@ sudo zfs create -o compression=zstd-19 storage/syncthing
|
|||||||
sudo zfs create -o recordsize=1M -o compression=zstd-9 -o exec=off -o sync=disabled storage/qbitvpn
|
sudo zfs create -o recordsize=1M -o compression=zstd-9 -o exec=off -o sync=disabled storage/qbitvpn
|
||||||
sudo zfs create -o recordsize=1M -o compression=zstd-9 -o exec=off -o sync=disabled storage/transmission
|
sudo zfs create -o recordsize=1M -o compression=zstd-9 -o exec=off -o sync=disabled storage/transmission
|
||||||
sudo zfs create -o recordsize=1M -o compression=zstd-19 storage/library
|
sudo zfs create -o recordsize=1M -o compression=zstd-19 storage/library
|
||||||
|
sudo zfs create -o recordsize=1M -o compression=zstd-19 -o sync=disabled storage/ollama
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ frontend ContentSwitching
|
|||||||
acl host_jellyfin hdr(host) -i jellyfin.tmmworkshop.com
|
acl host_jellyfin hdr(host) -i jellyfin.tmmworkshop.com
|
||||||
acl host_share hdr(host) -i share.tmmworkshop.com
|
acl host_share hdr(host) -i share.tmmworkshop.com
|
||||||
acl host_gcw hdr(host) -i gcw.tmmworkshop.com
|
acl host_gcw hdr(host) -i gcw.tmmworkshop.com
|
||||||
|
acl host_n8n hdr(host) -i n8n.tmmworkshop.com
|
||||||
|
|
||||||
use_backend audiobookshelf_nodes if host_audiobookshelf
|
use_backend audiobookshelf_nodes if host_audiobookshelf
|
||||||
use_backend cache_nodes if host_cache
|
use_backend cache_nodes if host_cache
|
||||||
@@ -40,6 +41,7 @@ frontend ContentSwitching
|
|||||||
use_backend jellyfin if host_jellyfin
|
use_backend jellyfin if host_jellyfin
|
||||||
use_backend share_nodes if host_share
|
use_backend share_nodes if host_share
|
||||||
use_backend gcw_nodes if host_gcw
|
use_backend gcw_nodes if host_gcw
|
||||||
|
use_backend n8n if host_n8n
|
||||||
|
|
||||||
backend audiobookshelf_nodes
|
backend audiobookshelf_nodes
|
||||||
mode http
|
mode http
|
||||||
@@ -55,7 +57,7 @@ backend filebrowser_nodes
|
|||||||
|
|
||||||
backend homeassistant_nodes
|
backend homeassistant_nodes
|
||||||
mode http
|
mode http
|
||||||
server server 127.0.0.1:8123
|
server server 192.168.90.35:8123
|
||||||
|
|
||||||
backend jellyfin
|
backend jellyfin
|
||||||
option httpchk
|
option httpchk
|
||||||
@@ -71,3 +73,7 @@ backend share_nodes
|
|||||||
backend gcw_nodes
|
backend gcw_nodes
|
||||||
mode http
|
mode http
|
||||||
server server 127.0.0.1:8092
|
server server 127.0.0.1:8092
|
||||||
|
|
||||||
|
backend n8n
|
||||||
|
mode http
|
||||||
|
server server 127.0.0.1:5678
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ in
|
|||||||
jellyfin-apiclient-python
|
jellyfin-apiclient-python
|
||||||
psycopg2
|
psycopg2
|
||||||
pymetno
|
pymetno
|
||||||
pyownet
|
aio-ownet
|
||||||
rokuecp
|
rokuecp
|
||||||
uiprotect
|
uiprotect
|
||||||
wakeonlan
|
wakeonlan
|
||||||
|
|||||||
12
systems/jeeves/services/kafka.nix
Normal file
12
systems/jeeves/services/kafka.nix
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
let
|
||||||
|
vars = import ../vars.nix;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
services.apache-kafka = {
|
||||||
|
enable = false;
|
||||||
|
settings = {
|
||||||
|
listeners = [ "PLAINTEXT://localhost:9092" ];
|
||||||
|
"log.dirs" = [ vars.kafka ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
38
systems/jeeves/services/llms.nix
Normal file
38
systems/jeeves/services/llms.nix
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
let
|
||||||
|
vars = import ../vars.nix;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
services = {
|
||||||
|
ollama = {
|
||||||
|
user = "ollama";
|
||||||
|
enable = true;
|
||||||
|
host = "0.0.0.0";
|
||||||
|
loadModels = [
|
||||||
|
"codellama:7b"
|
||||||
|
"deepseek-r1:14b"
|
||||||
|
"deepseek-r1:32b"
|
||||||
|
"deepseek-r1:8b"
|
||||||
|
"gemma3:12b"
|
||||||
|
"gemma3:27b"
|
||||||
|
"gpt-oss:120b"
|
||||||
|
"gpt-oss:20b"
|
||||||
|
"qwen3:14b"
|
||||||
|
"qwen3:30b"
|
||||||
|
];
|
||||||
|
models = vars.ollama;
|
||||||
|
openFirewall = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
systemd.services = {
|
||||||
|
ollama.serviceConfig = {
|
||||||
|
Nice = 19;
|
||||||
|
IOSchedulingPriority = 7;
|
||||||
|
};
|
||||||
|
ollama-model-loader.serviceConfig = {
|
||||||
|
Nice = 19;
|
||||||
|
CPUWeight = 50;
|
||||||
|
IOSchedulingClass = "idle";
|
||||||
|
IOSchedulingPriority = 7;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
|
{ pkgs, ... }:
|
||||||
let
|
let
|
||||||
vars = import ../vars.nix;
|
vars = import ../vars.nix;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
services.nix-serve = {
|
services.nix-serve = {
|
||||||
|
package = pkgs.nix-serve-ng;
|
||||||
enable = true;
|
enable = true;
|
||||||
secretKeyFile = "${vars.secrets}/services/nix-cache/cache-priv-key.pem";
|
secretKeyFile = "${vars.secrets}/services/nix-cache/cache-priv-key.pem";
|
||||||
openFirewall = true;
|
openFirewall = true;
|
||||||
|
|||||||
@@ -48,6 +48,12 @@ in
|
|||||||
host gcw gcw 192.168.90.1/24 trust
|
host gcw gcw 192.168.90.1/24 trust
|
||||||
host gcw gcw 127.0.0.1/32 trust
|
host gcw gcw 127.0.0.1/32 trust
|
||||||
|
|
||||||
|
# math
|
||||||
|
local postgres math trust
|
||||||
|
host postgres math 127.0.0.1/32 trust
|
||||||
|
host postgres math ::1/128 trust
|
||||||
|
host postgres math 192.168.90.1/24 trust
|
||||||
|
|
||||||
'';
|
'';
|
||||||
|
|
||||||
identMap = ''
|
identMap = ''
|
||||||
@@ -110,13 +116,25 @@ in
|
|||||||
replication = true;
|
replication = true;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
name = "math";
|
||||||
|
ensureDBOwnership = true;
|
||||||
|
ensureClauses = {
|
||||||
|
login = true;
|
||||||
|
createrole = true;
|
||||||
|
createdb = true;
|
||||||
|
replication = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
];
|
];
|
||||||
ensureDatabases = [
|
ensureDatabases = [
|
||||||
"gcw"
|
"gcw"
|
||||||
"hass"
|
"hass"
|
||||||
|
"math"
|
||||||
"megan"
|
"megan"
|
||||||
"mxr_dev"
|
"mxr_dev"
|
||||||
"mxr_prod"
|
"mxr_prod"
|
||||||
|
"n8n"
|
||||||
"richie"
|
"richie"
|
||||||
];
|
];
|
||||||
# Thank you NotAShelf
|
# Thank you NotAShelf
|
||||||
@@ -184,9 +202,10 @@ in
|
|||||||
|
|
||||||
# logging configuration
|
# logging configuration
|
||||||
log_connections = true;
|
log_connections = true;
|
||||||
log_statement = "all";
|
log_statement = "ddl";
|
||||||
logging_collector = true;
|
logging_collector = true;
|
||||||
log_disconnections = true;
|
log_disconnections = true;
|
||||||
|
log_rotation_age = "14d";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
inputs,
|
|
||||||
pkgs,
|
pkgs,
|
||||||
|
inputs,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
@@ -22,10 +22,13 @@ in
|
|||||||
wantedBy = [ "multi-user.target" ];
|
wantedBy = [ "multi-user.target" ];
|
||||||
description = "validates startup";
|
description = "validates startup";
|
||||||
path = [ pkgs.zfs ];
|
path = [ pkgs.zfs ];
|
||||||
|
environment = {
|
||||||
|
PYTHONPATH = "${inputs.self}/";
|
||||||
|
};
|
||||||
serviceConfig = {
|
serviceConfig = {
|
||||||
EnvironmentFile = "${vars.secrets}/services/server-validation";
|
EnvironmentFile = "${vars.secrets}/services/server-validation";
|
||||||
Type = "oneshot";
|
Type = "oneshot";
|
||||||
ExecStart = "${inputs.system_tools.packages.x86_64-linux.default}/bin/validate_system --config-file='${./validate_system.toml}'";
|
ExecStart = "${pkgs.my_python}/bin/python -m python.system_tests.validate_system '${./validate_system.toml}'";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -51,3 +51,45 @@ monthly = 12
|
|||||||
hourly = 12
|
hourly = 12
|
||||||
daily = 14
|
daily = 14
|
||||||
monthly = 2
|
monthly = 2
|
||||||
|
|
||||||
|
["media/services"]
|
||||||
|
15_min = 3
|
||||||
|
hourly = 12
|
||||||
|
daily = 14
|
||||||
|
monthly = 2
|
||||||
|
|
||||||
|
["media/home_assistant"]
|
||||||
|
15_min = 3
|
||||||
|
hourly = 12
|
||||||
|
daily = 14
|
||||||
|
monthly = 2
|
||||||
|
|
||||||
|
["scratch/qbitvpn"]
|
||||||
|
15_min = 0
|
||||||
|
hourly = 0
|
||||||
|
daily = 0
|
||||||
|
monthly = 0
|
||||||
|
|
||||||
|
["scratch/transmission"]
|
||||||
|
15_min = 0
|
||||||
|
hourly = 0
|
||||||
|
daily = 0
|
||||||
|
monthly = 0
|
||||||
|
|
||||||
|
["storage/qbitvpn"]
|
||||||
|
15_min = 0
|
||||||
|
hourly = 0
|
||||||
|
daily = 0
|
||||||
|
monthly = 0
|
||||||
|
|
||||||
|
["storage/transmission"]
|
||||||
|
15_min = 0
|
||||||
|
hourly = 0
|
||||||
|
daily = 0
|
||||||
|
monthly = 0
|
||||||
|
|
||||||
|
["storage/ollama"]
|
||||||
|
15_min = 0
|
||||||
|
hourly = 0
|
||||||
|
daily = 0
|
||||||
|
monthly = 0
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ in
|
|||||||
path = "/home/richie/dotfiles";
|
path = "/home/richie/dotfiles";
|
||||||
devices = [
|
devices = [
|
||||||
"bob"
|
"bob"
|
||||||
|
"brain"
|
||||||
"rhapsody-in-green"
|
"rhapsody-in-green"
|
||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
@@ -23,7 +24,10 @@ in
|
|||||||
path = vars.notes;
|
path = vars.notes;
|
||||||
devices = [
|
devices = [
|
||||||
"rhapsody-in-green"
|
"rhapsody-in-green"
|
||||||
"davids-server"
|
{
|
||||||
|
name = "davids-server";
|
||||||
|
encryptionPasswordFile = "${vars.secrets}/services/syncthing/davids-server";
|
||||||
|
}
|
||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
};
|
};
|
||||||
@@ -32,8 +36,9 @@ in
|
|||||||
path = "${vars.syncthing}/important";
|
path = "${vars.syncthing}/important";
|
||||||
devices = [
|
devices = [
|
||||||
"bob"
|
"bob"
|
||||||
"rhapsody-in-green"
|
"brain"
|
||||||
"phone"
|
"phone"
|
||||||
|
"rhapsody-in-green"
|
||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
};
|
};
|
||||||
@@ -67,14 +72,20 @@ in
|
|||||||
path = "/home/richie/vault";
|
path = "/home/richie/vault";
|
||||||
devices = [
|
devices = [
|
||||||
"rhapsody-in-green"
|
"rhapsody-in-green"
|
||||||
"davids-server"
|
{
|
||||||
|
name = "davids-server";
|
||||||
|
encryptionPasswordFile = "${vars.secrets}/services/syncthing/davids-server";
|
||||||
|
}
|
||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
};
|
};
|
||||||
"backup" = {
|
"backup" = {
|
||||||
path = "${vars.syncthing}/backup";
|
path = "${vars.syncthing}/backup";
|
||||||
devices = [
|
devices = [
|
||||||
"davids-server"
|
{
|
||||||
|
name = "davids-server";
|
||||||
|
encryptionPasswordFile = "${vars.secrets}/services/syncthing/davids-server";
|
||||||
|
}
|
||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -17,5 +17,7 @@ in
|
|||||||
share = "${zfs_media}/share";
|
share = "${zfs_media}/share";
|
||||||
syncthing = "${zfs_storage}/syncthing";
|
syncthing = "${zfs_storage}/syncthing";
|
||||||
transmission = "${zfs_storage}/transmission";
|
transmission = "${zfs_storage}/transmission";
|
||||||
|
ollama = "${zfs_storage}/ollama";
|
||||||
transmission_scratch = "${zfs_scratch}/transmission";
|
transmission_scratch = "${zfs_scratch}/transmission";
|
||||||
|
kafka = "${zfs_scratch}/kafka";
|
||||||
}
|
}
|
||||||
|
|||||||
28
systems/leviathan/default.nix
Normal file
28
systems/leviathan/default.nix
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{ inputs, ... }:
|
||||||
|
{
|
||||||
|
imports = [
|
||||||
|
"${inputs.self}/users/elise"
|
||||||
|
"${inputs.self}/users/richie"
|
||||||
|
"${inputs.self}/common/global"
|
||||||
|
"${inputs.self}/common/optional/desktop.nix"
|
||||||
|
"${inputs.self}/common/optional/steam.nix"
|
||||||
|
"${inputs.self}/common/optional/systemd-boot.nix"
|
||||||
|
"${inputs.self}/common/optional/update.nix"
|
||||||
|
"${inputs.self}/common/optional/zerotier.nix"
|
||||||
|
./hardware.nix
|
||||||
|
inputs.nixos-hardware.nixosModules.framework-13-7040-amd
|
||||||
|
];
|
||||||
|
|
||||||
|
networking = {
|
||||||
|
hostName = "leviathan";
|
||||||
|
hostId = "cb9b64d8";
|
||||||
|
firewall.enable = true;
|
||||||
|
networkmanager.enable = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
services = {
|
||||||
|
openssh.ports = [ 332 ];
|
||||||
|
};
|
||||||
|
|
||||||
|
system.stateVersion = "25.05";
|
||||||
|
}
|
||||||
69
systems/leviathan/hardware.nix
Normal file
69
systems/leviathan/hardware.nix
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
{
|
||||||
|
config,
|
||||||
|
lib,
|
||||||
|
modulesPath,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
{
|
||||||
|
imports = [ (modulesPath + "/installer/scan/not-detected.nix") ];
|
||||||
|
|
||||||
|
boot = {
|
||||||
|
initrd = {
|
||||||
|
availableKernelModules = [
|
||||||
|
"ahci"
|
||||||
|
"ehci_pci"
|
||||||
|
"nvme"
|
||||||
|
"sd_mod"
|
||||||
|
"usb_storage"
|
||||||
|
"usbhid"
|
||||||
|
"xhci_pci"
|
||||||
|
];
|
||||||
|
kernelModules = [ ];
|
||||||
|
luks.devices."luks-root-pool-nvme-Samsung_SSD_970_EVO_Plus_1TB_S6S1NS0T617615W-part2" = {
|
||||||
|
device = "/dev/disk/by-id/nvme-Samsung_SSD_970_EVO_Plus_1TB_S6S1NS0T617615W-part2";
|
||||||
|
bypassWorkqueues = true;
|
||||||
|
allowDiscards = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
kernelModules = [ "kvm-amd" ];
|
||||||
|
extraModulePackages = [ ];
|
||||||
|
};
|
||||||
|
|
||||||
|
fileSystems = {
|
||||||
|
"/" = lib.mkDefault {
|
||||||
|
device = "root_pool/root";
|
||||||
|
fsType = "zfs";
|
||||||
|
};
|
||||||
|
|
||||||
|
"/home" = {
|
||||||
|
device = "root_pool/home";
|
||||||
|
fsType = "zfs";
|
||||||
|
};
|
||||||
|
|
||||||
|
"/var" = {
|
||||||
|
device = "root_pool/var";
|
||||||
|
fsType = "zfs";
|
||||||
|
};
|
||||||
|
|
||||||
|
"/nix" = {
|
||||||
|
device = "root_pool/nix";
|
||||||
|
fsType = "zfs";
|
||||||
|
};
|
||||||
|
|
||||||
|
"/boot" = {
|
||||||
|
device = "/dev/disk/by-uuid/12CE-A600";
|
||||||
|
fsType = "vfat";
|
||||||
|
options = [
|
||||||
|
"fmask=0077"
|
||||||
|
"dmask=0077"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
swapDevices = [ ];
|
||||||
|
|
||||||
|
networking.useDHCP = lib.mkDefault true;
|
||||||
|
|
||||||
|
nixpkgs.hostPlatform = lib.mkDefault "x86_64-linux";
|
||||||
|
hardware.cpu.amd.updateMicrocode = lib.mkDefault config.hardware.enableRedistributableFirmware;
|
||||||
|
}
|
||||||
@@ -1,16 +1,17 @@
|
|||||||
{ inputs, ... }:
|
{ inputs, ... }:
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
../../users/richie
|
"${inputs.self}/users/richie"
|
||||||
../../common/global
|
"${inputs.self}/common/global"
|
||||||
../../common/optional/desktop.nix
|
"${inputs.self}/common/optional/desktop.nix"
|
||||||
../../common/optional/docker.nix
|
"${inputs.self}/common/optional/docker.nix"
|
||||||
../../common/optional/steam.nix
|
"${inputs.self}/common/optional/steam.nix"
|
||||||
../../common/optional/syncthing_base.nix
|
"${inputs.self}/common/optional/syncthing_base.nix"
|
||||||
../../common/optional/systemd-boot.nix
|
"${inputs.self}/common/optional/systemd-boot.nix"
|
||||||
../../common/optional/yubikey.nix
|
"${inputs.self}/common/optional/yubikey.nix"
|
||||||
../../common/optional/zerotier.nix
|
"${inputs.self}/common/optional/zerotier.nix"
|
||||||
./hardware.nix
|
./hardware.nix
|
||||||
|
./llms.nix
|
||||||
./syncthing.nix
|
./syncthing.nix
|
||||||
inputs.nixos-hardware.nixosModules.framework-13-7040-amd
|
inputs.nixos-hardware.nixosModules.framework-13-7040-amd
|
||||||
];
|
];
|
||||||
@@ -18,7 +19,10 @@
|
|||||||
networking = {
|
networking = {
|
||||||
hostName = "rhapsody-in-green";
|
hostName = "rhapsody-in-green";
|
||||||
hostId = "6404140d";
|
hostId = "6404140d";
|
||||||
firewall.enable = true;
|
firewall = {
|
||||||
|
enable = true;
|
||||||
|
allowedTCPPorts = [ ];
|
||||||
|
};
|
||||||
networkmanager.enable = true;
|
networkmanager.enable = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
30
systems/rhapsody-in-green/llms.nix
Normal file
30
systems/rhapsody-in-green/llms.nix
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
services.ollama = {
|
||||||
|
user = "ollama";
|
||||||
|
enable = true;
|
||||||
|
host = "127.0.0.1";
|
||||||
|
loadModels = [
|
||||||
|
"codellama:7b"
|
||||||
|
"deepseek-r1:14b"
|
||||||
|
"deepseek-r1:32b"
|
||||||
|
"deepseek-r1:8b"
|
||||||
|
"gemma3:12b"
|
||||||
|
"gemma3:27b"
|
||||||
|
"gpt-oss:20b"
|
||||||
|
"qwen3:14b"
|
||||||
|
"qwen3:30b"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
systemd.services = {
|
||||||
|
ollama.serviceConfig = {
|
||||||
|
Nice = 19;
|
||||||
|
IOSchedulingPriority = 7;
|
||||||
|
};
|
||||||
|
ollama-model-loader.serviceConfig = {
|
||||||
|
Nice = 19;
|
||||||
|
CPUWeight = 50;
|
||||||
|
IOSchedulingClass = "idle";
|
||||||
|
IOSchedulingPriority = 7;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -3,8 +3,9 @@
|
|||||||
"dotfiles" = {
|
"dotfiles" = {
|
||||||
path = "/home/richie/dotfiles";
|
path = "/home/richie/dotfiles";
|
||||||
devices = [
|
devices = [
|
||||||
"jeeves"
|
|
||||||
"bob"
|
"bob"
|
||||||
|
"brain"
|
||||||
|
"jeeves"
|
||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
};
|
};
|
||||||
@@ -21,6 +22,7 @@
|
|||||||
path = "/home/richie/important";
|
path = "/home/richie/important";
|
||||||
devices = [
|
devices = [
|
||||||
"bob"
|
"bob"
|
||||||
|
"brain"
|
||||||
"jeeves"
|
"jeeves"
|
||||||
"phone"
|
"phone"
|
||||||
];
|
];
|
||||||
|
|||||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Tests."""
|
||||||
6
tests/conftest.py
Normal file
6
tests/conftest.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
"""Fixtures for tests."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
PASSWORD = "password" # noqa: S105
|
||||||
|
TOKEN = "token" # noqa: S105
|
||||||
61
tests/test_common.py
Normal file
61
tests/test_common.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
"""test_common."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from os import environ
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from apprise import Apprise
|
||||||
|
|
||||||
|
from python.common import bash_wrapper, signal_alert, utcnow
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
|
||||||
|
def test_utcnow() -> None:
|
||||||
|
"""test_utcnow."""
|
||||||
|
utcnow()
|
||||||
|
|
||||||
|
|
||||||
|
def test_signal_alert(mocker: MockerFixture) -> None:
|
||||||
|
"""test_signal_alert."""
|
||||||
|
environ["SIGNAL_ALERT_FROM_PHONE"] = "1234567890"
|
||||||
|
environ["SIGNAL_ALERT_TO_PHONE"] = "0987654321"
|
||||||
|
|
||||||
|
mock_logger = mocker.patch("python.common.logger")
|
||||||
|
mock_apprise_client = mocker.MagicMock(spec=Apprise)
|
||||||
|
mocker.patch("python.common.Apprise", return_value=mock_apprise_client)
|
||||||
|
|
||||||
|
signal_alert("test")
|
||||||
|
|
||||||
|
mock_logger.info.assert_not_called()
|
||||||
|
mock_apprise_client.add.assert_called_once_with("signal://localhost:8989/1234567890/0987654321")
|
||||||
|
mock_apprise_client.notify.assert_called_once_with(title="", body="test")
|
||||||
|
|
||||||
|
|
||||||
|
def test_signal_alert_no_phones(mocker: MockerFixture) -> None:
|
||||||
|
"""test_signal_alert_no_phones."""
|
||||||
|
if "SIGNAL_ALERT_FROM_PHONE" in environ:
|
||||||
|
del environ["SIGNAL_ALERT_FROM_PHONE"]
|
||||||
|
if "SIGNAL_ALERT_TO_PHONE" in environ:
|
||||||
|
del environ["SIGNAL_ALERT_TO_PHONE"]
|
||||||
|
mock_logger = mocker.patch("python.common.logger")
|
||||||
|
signal_alert("test")
|
||||||
|
|
||||||
|
mock_logger.info.assert_called_once_with("SIGNAL_ALERT_FROM_PHONE or SIGNAL_ALERT_TO_PHONE not set")
|
||||||
|
|
||||||
|
|
||||||
|
def test_test_bash_wrapper() -> None:
|
||||||
|
"""test_test_bash_wrapper."""
|
||||||
|
stdout, returncode = bash_wrapper("echo test")
|
||||||
|
assert stdout == "test\n"
|
||||||
|
assert returncode == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_test_bash_wrapper_error() -> None:
|
||||||
|
"""test_test_bash_wrapper_error."""
|
||||||
|
expected_error = 2
|
||||||
|
stdout, returncode = bash_wrapper("ls /this/path/does/not/exist")
|
||||||
|
assert stdout == "ls: cannot access '/this/path/does/not/exist': No such file or directory\n"
|
||||||
|
assert returncode == expected_error
|
||||||
104
tests/test_components.py
Normal file
104
tests/test_components.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
"""test_components."""
|
||||||
|
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from python.system_tests.components import systemd_tests, zpool_tests
|
||||||
|
from python.zfs import Zpool
|
||||||
|
|
||||||
|
temp = "Every feature flags pool has all supported and requested features enabled.\n"
|
||||||
|
|
||||||
|
SYSTEM_TESTS_COMPONENTS = "python.system_tests.components"
|
||||||
|
|
||||||
|
|
||||||
|
def test_zpool_tests(mocker: MockerFixture) -> None:
|
||||||
|
"""test_zpool_tests."""
|
||||||
|
mock_zpool = mocker.MagicMock(spec=Zpool)
|
||||||
|
mock_zpool.health = "ONLINE"
|
||||||
|
mock_zpool.capacity = 70
|
||||||
|
mock_zpool.name = "Main"
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.Zpool", return_value=mock_zpool)
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.bash_wrapper", return_value=(temp, ""))
|
||||||
|
errors = zpool_tests(("Main",))
|
||||||
|
assert errors == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_zpool_tests_out_of_date(mocker: MockerFixture) -> None:
|
||||||
|
"""test_zpool_tests_out_of_date."""
|
||||||
|
mock_zpool = mocker.MagicMock(spec=Zpool)
|
||||||
|
mock_zpool.health = "ONLINE"
|
||||||
|
mock_zpool.capacity = 70
|
||||||
|
mock_zpool.name = "Main"
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.Zpool", return_value=mock_zpool)
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.bash_wrapper", return_value=("", ""))
|
||||||
|
errors = zpool_tests(("Main",))
|
||||||
|
assert errors == ["ZPool out of date run `sudo zpool upgrade -a`"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_zpool_tests_out_of_space(mocker: MockerFixture) -> None:
|
||||||
|
"""test_zpool_tests_out_of_space."""
|
||||||
|
mock_zpool = mocker.MagicMock(spec=Zpool)
|
||||||
|
mock_zpool.health = "ONLINE"
|
||||||
|
mock_zpool.capacity = 100
|
||||||
|
mock_zpool.name = "Main"
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.Zpool", return_value=mock_zpool)
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.bash_wrapper", return_value=(temp, ""))
|
||||||
|
errors = zpool_tests(("Main",))
|
||||||
|
assert errors == ["Main is low on space"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_zpool_tests_offline(mocker: MockerFixture) -> None:
|
||||||
|
"""test_zpool_tests_offline."""
|
||||||
|
mock_zpool = mocker.MagicMock(spec=Zpool)
|
||||||
|
mock_zpool.health = "OFFLINE"
|
||||||
|
mock_zpool.capacity = 70
|
||||||
|
mock_zpool.name = "Main"
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.Zpool", return_value=mock_zpool)
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.bash_wrapper", return_value=(temp, ""))
|
||||||
|
errors = zpool_tests(("Main",))
|
||||||
|
assert errors == ["Main is OFFLINE"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_systemd_tests(mocker: MockerFixture) -> None:
|
||||||
|
"""test_systemd_tests."""
|
||||||
|
mocker.patch(
|
||||||
|
f"{SYSTEM_TESTS_COMPONENTS}.bash_wrapper",
|
||||||
|
side_effect=[
|
||||||
|
("inactive\n", ""),
|
||||||
|
("active\n", ""),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
errors = systemd_tests(("docker",))
|
||||||
|
assert errors == []
|
||||||
|
"""test_systemd_tests."""
|
||||||
|
|
||||||
|
|
||||||
|
def test_systemd_tests_multiple_negative_retries(mocker: MockerFixture) -> None:
|
||||||
|
"""test_systemd_tests_fail."""
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.bash_wrapper", return_value=("active\n", ""))
|
||||||
|
errors = systemd_tests(("docker",), max_retries=-1, retry_delay_secs=-1)
|
||||||
|
assert errors == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_systemd_tests_multiple_pass(mocker: MockerFixture) -> None:
|
||||||
|
"""test_systemd_tests_fail."""
|
||||||
|
mocker.patch(
|
||||||
|
f"{SYSTEM_TESTS_COMPONENTS}.bash_wrapper",
|
||||||
|
side_effect=[
|
||||||
|
("inactive\n", ""),
|
||||||
|
("activating\n", ""),
|
||||||
|
("active\n", ""),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
errors = systemd_tests(
|
||||||
|
("docker",),
|
||||||
|
retryable_statuses=("inactive\n", "activating\n"),
|
||||||
|
valid_statuses=("active\n",),
|
||||||
|
)
|
||||||
|
assert errors == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_systemd_tests_fail(mocker: MockerFixture) -> None:
|
||||||
|
"""test_systemd_tests_fail."""
|
||||||
|
mocker.patch(f"{SYSTEM_TESTS_COMPONENTS}.bash_wrapper", return_value=("inactive\n", ""))
|
||||||
|
errors = systemd_tests(("docker",), max_retries=5)
|
||||||
|
assert errors == ["docker is inactive"]
|
||||||
67
tests/test_databasse.py
Normal file
67
tests/test_databasse.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""test_database."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from sqlalchemy import Integer, String, create_engine, select
|
||||||
|
from sqlalchemy.orm import DeclarativeBase, Mapped, Session, mapped_column, sessionmaker
|
||||||
|
|
||||||
|
from python.database import safe_insert
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Generator
|
||||||
|
|
||||||
|
|
||||||
|
class TestingBase(DeclarativeBase):
|
||||||
|
"""TestingBase."""
|
||||||
|
|
||||||
|
|
||||||
|
class Item(TestingBase):
|
||||||
|
"""Item."""
|
||||||
|
|
||||||
|
__tablename__ = "items"
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def session() -> Generator[Session]:
|
||||||
|
"""Fresh in-memory DB + tables for each test."""
|
||||||
|
engine = create_engine("sqlite+pysqlite:///:memory:", echo=False, future=True)
|
||||||
|
TestingBase.metadata.create_all(engine)
|
||||||
|
with sessionmaker(bind=engine, expire_on_commit=False, future=True)() as s:
|
||||||
|
yield s
|
||||||
|
|
||||||
|
|
||||||
|
def test_partial_failure_unique_constraint(session: Session) -> None:
|
||||||
|
"""Duplicate name should fail only for the conflicting row; others commit."""
|
||||||
|
objs = [Item(name="a"), Item(name="b"), Item(name="a"), Item(name="c")]
|
||||||
|
failures = safe_insert(objs, session)
|
||||||
|
|
||||||
|
assert len(failures) == 1
|
||||||
|
exc, failed_obj = failures[0]
|
||||||
|
assert isinstance(exc, Exception)
|
||||||
|
assert isinstance(failed_obj, Item)
|
||||||
|
assert failed_obj.name == "a"
|
||||||
|
|
||||||
|
rows = session.scalars(select(Item.name)).all()
|
||||||
|
assert sorted(rows) == ["a", "b", "c"]
|
||||||
|
assert rows.count("a") == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_good_inserts(session: Session) -> None:
|
||||||
|
"""No failures when all rows are valid."""
|
||||||
|
objs = [Item(name="x"), Item(name="y")]
|
||||||
|
failures = safe_insert(objs, session)
|
||||||
|
assert failures == []
|
||||||
|
|
||||||
|
rows = session.scalars(select(Item.name).where(Item.name.in_(("x", "y")))).all()
|
||||||
|
assert sorted(rows) == ["x", "y"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_unmapped_object_raises(session: Session) -> None:
|
||||||
|
"""Non-ORM instances should raise TypeError immediately."""
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
safe_insert([object()], session)
|
||||||
75
tests/test_fix_eval_warnings.py
Normal file
75
tests/test_fix_eval_warnings.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
"""test_fix_eval_warnings."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from typer.testing import CliRunner
|
||||||
|
|
||||||
|
from python.tools.fix_eval_warnings import Config, app, generate_fix, parse_warnings
|
||||||
|
from tests.conftest import TOKEN
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pyfakefs.fake_filesystem import FakeFilesystem
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
runner = CliRunner()
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_warnings(fs: FakeFilesystem) -> None:
|
||||||
|
"""test_parse_warnings."""
|
||||||
|
log_file = Path("/build.log")
|
||||||
|
fs.create_file(
|
||||||
|
log_file,
|
||||||
|
contents="Some output\nevaluation warning: 'system' is deprecated\nMore output",
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
warnings = parse_warnings(log_file)
|
||||||
|
assert len(warnings) == 1
|
||||||
|
assert warnings[0] == "evaluation warning: 'system' is deprecated"
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_fix(mocker: MockerFixture) -> None:
|
||||||
|
"""test_generate_fix."""
|
||||||
|
mock_post = mocker.patch("python.tools.fix_eval_warnings.requests.post")
|
||||||
|
mock_response = mocker.MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"choices": [{"message": {"content": "Use stdenv.hostPlatform.system"}}]
|
||||||
|
}
|
||||||
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
|
config = Config(github_token=TOKEN)
|
||||||
|
fix = generate_fix("evaluation warning: 'system' is deprecated", config)
|
||||||
|
|
||||||
|
assert fix == "Use stdenv.hostPlatform.system"
|
||||||
|
mock_post.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_main(mocker: MockerFixture, fs: FakeFilesystem) -> None:
|
||||||
|
"""test_main."""
|
||||||
|
log_file = Path("/build.log")
|
||||||
|
fs.create_file(
|
||||||
|
log_file,
|
||||||
|
contents="Some output\nevaluation warning: 'system' is deprecated\nMore output",
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_generate_fix = mocker.patch("python.tools.fix_eval_warnings.generate_fix")
|
||||||
|
mock_generate_fix.return_value = "Fixed it"
|
||||||
|
mock_logger = mocker.patch("python.tools.fix_eval_warnings.logger")
|
||||||
|
|
||||||
|
# We need to mock GITHUB_TOKEN env var or the script will warn/fail
|
||||||
|
mocker.patch.dict("os.environ", {"GITHUB_TOKEN": TOKEN})
|
||||||
|
|
||||||
|
result = runner.invoke(app, [str(log_file)])
|
||||||
|
|
||||||
|
assert result.exit_code == 0
|
||||||
|
# Verify logger calls instead of stdout, as CliRunner might not capture logging output correctly
|
||||||
|
# when logging is configured to write to sys.stdout directly.
|
||||||
|
assert any("Found 1 warnings" in str(call) for call in mock_logger.info.call_args_list)
|
||||||
|
assert any(
|
||||||
|
"Fix suggestions written to fix_suggestions.md" in str(call)
|
||||||
|
for call in mock_logger.info.call_args_list
|
||||||
|
)
|
||||||
|
assert Path("fix_suggestions.md").exists()
|
||||||
123
tests/test_parallelize.py
Normal file
123
tests/test_parallelize.py
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
"""test_executors."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from concurrent.futures import Future, ThreadPoolExecutor
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from python.parallelize import _parallelize_base, parallelize_process, parallelize_thread
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
|
||||||
|
class MockFuture(Future):
|
||||||
|
"""MockFuture."""
|
||||||
|
|
||||||
|
def __init__(self, result: Any) -> None: # noqa: ANN401
|
||||||
|
"""Init."""
|
||||||
|
super().__init__()
|
||||||
|
self._result = result
|
||||||
|
self._exception: BaseException | None = None
|
||||||
|
self.set_result(result)
|
||||||
|
|
||||||
|
def exception(self, timeout: float | None = None) -> BaseException | None:
|
||||||
|
"""Exception."""
|
||||||
|
logging.debug(f"{timeout}=")
|
||||||
|
return self._exception
|
||||||
|
|
||||||
|
def result(self, timeout: float | None = None) -> Any: # noqa: ANN401
|
||||||
|
"""Result."""
|
||||||
|
logging.debug(f"{timeout}=")
|
||||||
|
return self._result
|
||||||
|
|
||||||
|
|
||||||
|
class MockPoolExecutor(ThreadPoolExecutor):
|
||||||
|
"""MockPoolExecutor."""
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None: # noqa: ANN401
|
||||||
|
"""Initializes a new ThreadPoolExecutor instance."""
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def submit(self, fn: Callable[..., Any], /, *args: Any, **kwargs: Any) -> Future: # noqa: ANN401
|
||||||
|
"""Submits a callable to be executed with the given arguments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fn: The callable to execute.
|
||||||
|
*args: The positional arguments to pass to the callable.
|
||||||
|
**kwargs: The keyword arguments to pass to the callable.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A Future instance representing the execution of the callable.
|
||||||
|
"""
|
||||||
|
result = fn(*args, **kwargs)
|
||||||
|
return MockFuture(result)
|
||||||
|
|
||||||
|
|
||||||
|
def add(a: int, b: int) -> int:
|
||||||
|
"""Add."""
|
||||||
|
return a + b
|
||||||
|
|
||||||
|
|
||||||
|
def test_parallelize_thread() -> None:
|
||||||
|
"""test_parallelize_thread."""
|
||||||
|
kwargs_list = [{"a": 1, "b": 2}, {"a": 3, "b": 4}]
|
||||||
|
results = parallelize_thread(func=add, kwargs_list=kwargs_list, progress_tracker=1)
|
||||||
|
assert results.results == [3, 7]
|
||||||
|
assert not results.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
def test_parallelize_thread_exception() -> None:
|
||||||
|
"""test_parallelize_thread."""
|
||||||
|
kwargs_list: list[dict[str, int | None]] = [{"a": 1, "b": 2}, {"a": 3, "b": None}]
|
||||||
|
results = parallelize_thread(func=add, kwargs_list=kwargs_list)
|
||||||
|
assert results.results == [3]
|
||||||
|
output = """[TypeError("unsupported operand type(s) for +: 'int' and 'NoneType'")]"""
|
||||||
|
assert str(results.exceptions) == output
|
||||||
|
|
||||||
|
|
||||||
|
def test_parallelize_process() -> None:
|
||||||
|
"""test_parallelize_process."""
|
||||||
|
kwargs_list = [{"a": 1, "b": 2}, {"a": 3, "b": 4}]
|
||||||
|
results = parallelize_process(func=add, kwargs_list=kwargs_list)
|
||||||
|
assert results.results == [3, 7]
|
||||||
|
assert not results.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
def test_parallelize_process_to_many_max_workers(mocker: MockerFixture) -> None:
|
||||||
|
"""test_parallelize_process."""
|
||||||
|
mocker.patch(target="python.parallelize.cpu_count", return_value=1)
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError, match="max_workers must be less than or equal to 1"):
|
||||||
|
parallelize_process(func=add, kwargs_list=[{"a": 1, "b": 2}], max_workers=8)
|
||||||
|
|
||||||
|
|
||||||
|
def test_executor_results_repr() -> None:
|
||||||
|
"""test_ExecutorResults_repr."""
|
||||||
|
results = parallelize_thread(func=add, kwargs_list=[{"a": 1, "b": 2}])
|
||||||
|
assert repr(results) == "results=[3] exceptions=[]"
|
||||||
|
|
||||||
|
|
||||||
|
def test_early_error() -> None:
|
||||||
|
"""test_early_error."""
|
||||||
|
kwargs_list: list[dict[str, int | None]] = [{"a": 1, "b": 2}, {"a": 3, "b": None}]
|
||||||
|
with pytest.raises(TypeError, match=r"unsupported operand type\(s\) for \+\: 'int' and 'NoneType'"):
|
||||||
|
parallelize_thread(func=add, kwargs_list=kwargs_list, mode="early_error")
|
||||||
|
|
||||||
|
|
||||||
|
def test_mock_pool_executor() -> None:
|
||||||
|
"""test_mock_pool_executor."""
|
||||||
|
results = _parallelize_base(
|
||||||
|
executor_type=MockPoolExecutor,
|
||||||
|
func=add,
|
||||||
|
kwargs_list=[{"a": 1, "b": 2}, {"a": 3, "b": 4}],
|
||||||
|
max_workers=None,
|
||||||
|
progress_tracker=None,
|
||||||
|
mode="normal",
|
||||||
|
)
|
||||||
|
assert repr(results) == "results=[3, 7] exceptions=[]"
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user