Compare commits

..

5 Commits

Author SHA1 Message Date
24b66745d1 updated test_test_bash_wrapper_error 2025-12-31 21:00:22 -05:00
3074e3c47e added busybox to nix_builder.nix 2025-12-31 17:06:38 -05:00
9aea31e841 added pytest_safe.yml 2025-12-31 16:55:05 -05:00
ec5975d663 added busybox 2025-12-31 16:34:21 -05:00
f5b22322d0 testing unshare 2025-12-31 16:14:20 -05:00
142 changed files with 523 additions and 15616 deletions

View File

@@ -1,30 +0,0 @@
name: fix_eval_warnings
on:
workflow_run:
workflows: ["build_systems"]
types: [completed]
jobs:
check-warnings:
if: >-
github.event.workflow_run.conclusion != 'cancelled' &&
github.event.workflow_run.head_branch == 'main' &&
(github.event.workflow_run.event == 'push' || github.event.workflow_run.event == 'schedule')
runs-on: self-hosted
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Fix eval warnings
env:
GH_TOKEN: ${{ secrets.GH_TOKEN_FOR_UPDATES }}
run: >-
nix develop .#devShells.x86_64-linux.default -c
python -m python.eval_warnings.main
--run-id "${{ github.event.workflow_run.id }}"
--repo "${{ github.repository }}"
--ollama-url "${{ secrets.OLLAMA_URL }}"
--run-url "${{ github.event.workflow_run.html_url }}"

19
.github/workflows/pytest_safe.yml vendored Normal file
View File

@@ -0,0 +1,19 @@
name: pytest_safe
on:
push:
branches:
- main
pull_request:
branches:
- main
merge_group:
jobs:
pytest:
runs-on: self-hosted
steps:
- uses: actions/checkout@v4
- name: Run tests
run: unshare --map-root-user --user --net -- pytest tests

4
.gitignore vendored
View File

@@ -165,7 +165,3 @@ test.*
# syncthing # syncthing
.stfolder .stfolder
# Frontend build output
frontend/dist/
frontend/node_modules/

View File

@@ -77,7 +77,6 @@
"esphome", "esphome",
"extest", "extest",
"fadvise", "fadvise",
"fastfetch",
"fastforwardteam", "fastforwardteam",
"FASTFOX", "FASTFOX",
"ffmpegthumbnailer", "ffmpegthumbnailer",
@@ -167,6 +166,7 @@
"mypy", "mypy",
"ncdu", "ncdu",
"nemo", "nemo",
"neofetch",
"nerdfonts", "nerdfonts",
"netdev", "netdev",
"netdevs", "netdevs",
@@ -287,7 +287,6 @@
"topstories", "topstories",
"treefmt", "treefmt",
"twimg", "twimg",
"typedmonarchmoney",
"typer", "typer",
"uaccess", "uaccess",
"ubiquiti", "ubiquiti",

View File

@@ -3,10 +3,3 @@
- use treefmt to format all files - use treefmt to format all files
- make python code ruff compliant - make python code ruff compliant
- use pytest to test python code - use pytest to test python code
- always use the minimum amount of complexity
- if judgment calls are easy to reverse make them. if not ask me first
- Match existing code style.
- Use builtin helpers getenv() over os.environ.get.
- Prefer single-purpose functions over “do everything” helpers.
- Avoid compatibility branches like PG_USER and POSTGRESQL_URL unless requested.
- Keep helpers only if reused or they simplify the code otherwise inline.

View File

@@ -33,8 +33,6 @@ in
]; ];
warn-dirty = false; warn-dirty = false;
flake-registry = ""; # disable global flake registries flake-registry = ""; # disable global flake registries
connect-timeout = 10;
fallback = true;
}; };
# Add each flake input as a registry and nix_path # Add each flake input as a registry and nix_path

36
flake.lock generated
View File

@@ -8,11 +8,11 @@
}, },
"locked": { "locked": {
"dir": "pkgs/firefox-addons", "dir": "pkgs/firefox-addons",
"lastModified": 1772824881, "lastModified": 1766762570,
"narHash": "sha256-NqX+JCA8hRV3GoYrsqnHB2IWKte1eQ8NK2WVbJkORcw=", "narHash": "sha256-Nevsj5NYurwp3I6nSMeh3uirwoinVSbCldqOXu4smms=",
"owner": "rycee", "owner": "rycee",
"repo": "nur-expressions", "repo": "nur-expressions",
"rev": "07e1616c9b13fe4794dad4bcc33cd7088c554465", "rev": "03d7d310ea91d6e4b47ed70aa86c781fcc5b38e1",
"type": "gitlab" "type": "gitlab"
}, },
"original": { "original": {
@@ -29,11 +29,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1772807318, "lastModified": 1766682973,
"narHash": "sha256-Qjw6ILt8cb2HQQpCmWNLMZZ63wEo1KjTQt+1BcQBr7k=", "narHash": "sha256-GKO35onS711ThCxwWcfuvbIBKXwriahGqs+WZuJ3v9E=",
"owner": "nix-community", "owner": "nix-community",
"repo": "home-manager", "repo": "home-manager",
"rev": "daa2c221320809f5514edde74d0ad0193ad54ed8", "rev": "91cdb0e2d574c64fae80d221f4bf09d5592e9ec2",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -44,11 +44,11 @@
}, },
"nixos-hardware": { "nixos-hardware": {
"locked": { "locked": {
"lastModified": 1771969195, "lastModified": 1766568855,
"narHash": "sha256-qwcDBtrRvJbrrnv1lf/pREQi8t2hWZxVAyeMo7/E9sw=", "narHash": "sha256-UXVtN77D7pzKmzOotFTStgZBqpOcf8cO95FcupWp4Zo=",
"owner": "nixos", "owner": "nixos",
"repo": "nixos-hardware", "repo": "nixos-hardware",
"rev": "41c6b421bdc301b2624486e11905c9af7b8ec68e", "rev": "c5db9569ac9cc70929c268ac461f4003e3e5ca80",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -60,11 +60,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1772624091, "lastModified": 1766651565,
"narHash": "sha256-QKyJ0QGWBn6r0invrMAK8dmJoBYWoOWy7lN+UHzW1jc=", "narHash": "sha256-QEhk0eXgyIqTpJ/ehZKg9IKS7EtlWxF3N7DXy42zPfU=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "80bdc1e5ce51f56b19791b52b2901187931f5353", "rev": "3e2499d5539c16d0d173ba53552a4ff8547f4539",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -76,11 +76,11 @@
}, },
"nixpkgs-master": { "nixpkgs-master": {
"locked": { "locked": {
"lastModified": 1772842888, "lastModified": 1766794443,
"narHash": "sha256-bQRYIwRb9xuEMHTLd5EzjHhYMKzbUbIo7abFV84iUjM=", "narHash": "sha256-Q8IyTQ3Lu8vX/iqO3U+E4pjLbP1NsqFih6uElf8OYrQ=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "af5157af67f118e13172750f63012f199b61e3a1", "rev": "088b069b8270ee36d83533c86b9f91d924d185d9",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -125,11 +125,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1772495394, "lastModified": 1766289575,
"narHash": "sha256-hmIvE/slLKEFKNEJz27IZ8BKlAaZDcjIHmkZ7GCEjfw=", "narHash": "sha256-BOKCwOQQIP4p9z8DasT5r+qjri3x7sPCOq+FTjY8Z+o=",
"owner": "Mic92", "owner": "Mic92",
"repo": "sops-nix", "repo": "sops-nix",
"rev": "1d9b98a29a45abe9c4d3174bd36de9f28755e3ff", "rev": "9836912e37aef546029e48c8749834735a6b9dad",
"type": "github" "type": "github"
}, },
"original": { "original": {

24
frontend/.gitignore vendored
View File

@@ -1,24 +0,0 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

View File

@@ -1,73 +0,0 @@
# React + TypeScript + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
Currently, two official plugins are available:
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) (or [oxc](https://oxc.rs) when used in [rolldown-vite](https://vite.dev/guide/rolldown)) for Fast Refresh
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
## React Compiler
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
## Expanding the ESLint configuration
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
```js
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Remove tseslint.configs.recommended and replace with this
tseslint.configs.recommendedTypeChecked,
// Alternatively, use this for stricter rules
tseslint.configs.strictTypeChecked,
// Optionally, add this for stylistic rules
tseslint.configs.stylisticTypeChecked,
// Other configs...
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
```js
// eslint.config.js
import reactX from 'eslint-plugin-react-x'
import reactDom from 'eslint-plugin-react-dom'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Enable lint rules for React
reactX.configs['recommended-typescript'],
// Enable lint rules for React DOM
reactDom.configs.recommended,
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```

View File

@@ -1,23 +0,0 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
import tseslint from 'typescript-eslint'
import { defineConfig, globalIgnores } from 'eslint/config'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
js.configs.recommended,
tseslint.configs.recommended,
reactHooks.configs.flat.recommended,
reactRefresh.configs.vite,
],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
},
},
])

View File

@@ -1,13 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>frontend</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

File diff suppressed because it is too large Load Diff

View File

@@ -1,31 +0,0 @@
{
"name": "frontend",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-router-dom": "^7.12.0"
},
"devDependencies": {
"@eslint/js": "^9.39.1",
"@types/node": "^24.10.1",
"@types/react": "^19.2.5",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react": "^5.1.1",
"eslint": "^9.39.1",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.24",
"globals": "^16.5.0",
"typescript": "~5.9.3",
"typescript-eslint": "^8.46.4",
"vite": "^7.2.4"
}
}

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -1,654 +0,0 @@
* {
box-sizing: border-box;
}
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
background: var(--color-bg);
color: var(--color-text);
}
.app {
max-width: 1000px;
margin: 0 auto;
padding: 20px;
}
nav {
display: flex;
align-items: center;
gap: 20px;
padding: 15px 0;
border-bottom: 1px solid var(--color-border);
margin-bottom: 20px;
}
.theme-toggle {
margin-left: auto;
}
nav a {
color: var(--color-primary);
text-decoration: none;
font-weight: 500;
}
nav a:hover {
text-decoration: underline;
}
main {
background: var(--color-bg-card);
padding: 20px;
border-radius: 8px;
box-shadow: var(--shadow);
}
.header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 20px;
}
.header h1 {
margin: 0;
}
.btn {
display: inline-block;
padding: 8px 16px;
border: 1px solid var(--color-border);
border-radius: 4px;
background: var(--color-bg-card);
color: var(--color-text);
text-decoration: none;
cursor: pointer;
font-size: 14px;
margin-left: 8px;
}
.btn:hover {
background: var(--color-bg-hover);
}
.btn-primary {
background: var(--color-primary);
border-color: var(--color-primary);
color: white;
}
.btn-primary:hover {
background: var(--color-primary-hover);
}
.btn-danger {
background: var(--color-danger);
border-color: var(--color-danger);
color: white;
}
.btn-danger:hover {
background: var(--color-danger-hover);
}
.btn-small {
padding: 4px 8px;
font-size: 12px;
}
.btn:disabled {
opacity: 0.6;
cursor: not-allowed;
}
table {
width: 100%;
border-collapse: collapse;
}
th,
td {
padding: 12px;
text-align: left;
border-bottom: 1px solid var(--color-border-light);
}
th {
font-weight: 600;
background: var(--color-bg-muted);
}
tr:hover {
background: var(--color-bg-muted);
}
.error {
background: var(--color-bg-error);
color: var(--color-text-error);
padding: 10px;
border-radius: 4px;
margin-bottom: 20px;
}
.info-grid {
display: grid;
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
gap: 10px;
margin-bottom: 20px;
}
.section {
margin-top: 30px;
padding-top: 20px;
border-top: 1px solid var(--color-border-light);
}
.section h3 {
margin-top: 0;
margin-bottom: 15px;
}
.section h4 {
margin: 15px 0 10px;
font-size: 14px;
color: var(--color-text-muted);
}
.section ul {
list-style: none;
padding: 0;
margin: 0;
}
.section li {
display: flex;
align-items: center;
gap: 10px;
padding: 8px 0;
border-bottom: 1px solid var(--color-border-lighter);
}
.tag {
display: inline-block;
background: var(--color-tag-bg);
padding: 2px 8px;
border-radius: 12px;
font-size: 12px;
color: var(--color-text-muted);
}
.add-form {
display: flex;
gap: 10px;
margin-top: 15px;
flex-wrap: wrap;
}
.add-form select,
.add-form input {
padding: 8px;
border: 1px solid var(--color-border);
border-radius: 4px;
min-width: 200px;
background: var(--color-bg-card);
color: var(--color-text);
}
.form-group {
margin-bottom: 20px;
}
.form-group label {
display: block;
font-weight: 500;
margin-bottom: 5px;
}
.form-group input,
.form-group textarea,
.form-group select {
width: 100%;
padding: 10px;
border: 1px solid var(--color-border);
border-radius: 4px;
font-size: 14px;
background: var(--color-bg-card);
color: var(--color-text);
}
.form-group textarea {
resize: vertical;
}
.form-row {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 20px;
}
.checkbox-group {
display: flex;
flex-wrap: wrap;
gap: 15px;
}
.checkbox-label {
display: flex;
align-items: center;
gap: 5px;
cursor: pointer;
}
.form-actions {
display: flex;
gap: 10px;
margin-top: 30px;
padding-top: 20px;
border-top: 1px solid var(--color-border-light);
}
.need-list .header {
margin-bottom: 20px;
}
.need-form {
background: var(--color-bg-muted);
padding: 20px;
border-radius: 4px;
margin-bottom: 20px;
}
.need-items {
list-style: none;
padding: 0;
}
.need-items li {
display: flex;
justify-content: space-between;
align-items: flex-start;
padding: 15px;
border: 1px solid var(--color-border-light);
border-radius: 4px;
margin-bottom: 10px;
}
.need-info p {
margin: 5px 0 0;
color: var(--color-text-muted);
font-size: 14px;
}
a {
color: var(--color-primary);
}
a:hover {
text-decoration: underline;
}
/* Graph styles */
.graph-container {
width: 100%;
}
.graph-hint {
color: var(--color-text-muted);
font-size: 14px;
margin-bottom: 15px;
}
.selected-info {
margin-top: 15px;
padding: 15px;
background: var(--color-bg-muted);
border-radius: 8px;
}
.selected-info h3 {
margin: 0 0 10px;
}
.selected-info p {
margin: 5px 0;
color: var(--color-text-muted);
}
.legend {
margin-top: 20px;
padding: 15px;
background: var(--color-bg-muted);
border-radius: 8px;
}
.legend h4 {
margin: 0 0 10px;
font-size: 14px;
}
.legend-items {
display: flex;
flex-wrap: wrap;
gap: 15px;
}
.legend-item {
display: flex;
align-items: center;
gap: 8px;
font-size: 12px;
color: var(--color-text-muted);
}
.legend-line {
width: 30px;
border-radius: 2px;
}
/* Weight control styles */
.weight-control {
display: flex;
align-items: center;
gap: 8px;
font-size: 12px;
color: var(--color-text-muted);
}
.weight-control input[type="range"] {
width: 80px;
cursor: pointer;
}
.weight-value {
min-width: 20px;
text-align: center;
font-weight: 600;
}
.weight-display {
font-size: 12px;
color: var(--color-text-muted);
margin-left: auto;
}
/* ID Card Styles */
.id-card {
width: 100%;
}
.id-card-inner {
background: linear-gradient(135deg, #0a0a0f 0%, #1a1a2e 50%, #0a0a0f 100%);
background-image:
radial-gradient(white 1px, transparent 1px),
linear-gradient(135deg, #0a0a0f 0%, #1a1a2e 50%, #0a0a0f 100%);
background-size: 50px 50px, 100% 100%;
background-position: 0 0, 0 0;
color: #fff;
border-radius: 12px;
padding: 25px;
min-height: 500px;
position: relative;
overflow: hidden;
}
.id-card-header {
display: flex;
justify-content: space-between;
align-items: flex-start;
margin-bottom: 15px;
}
.id-card-header-left {
flex: 1;
}
.id-card-header-right {
display: flex;
flex-direction: column;
align-items: flex-end;
gap: 10px;
}
.id-card-title {
font-size: 2.5rem;
font-weight: 700;
margin: 0;
color: #fff;
text-shadow: 2px 2px 4px rgba(0,0,0,0.5);
}
.id-profile-pic {
width: 80px;
height: 80px;
border-radius: 8px;
object-fit: cover;
border: 2px solid rgba(255,255,255,0.3);
}
.id-profile-placeholder {
width: 80px;
height: 80px;
border-radius: 8px;
background: linear-gradient(135deg, #4ecdc4 0%, #44a8a0 100%);
display: flex;
align-items: center;
justify-content: center;
border: 2px solid rgba(255,255,255,0.3);
}
.id-profile-placeholder span {
font-size: 2rem;
font-weight: 700;
color: #fff;
text-shadow: 1px 1px 2px rgba(0,0,0,0.3);
}
.id-card-actions {
display: flex;
gap: 8px;
}
.id-card-actions .btn {
background: rgba(255,255,255,0.1);
border-color: rgba(255,255,255,0.3);
color: #fff;
}
.id-card-actions .btn:hover {
background: rgba(255,255,255,0.2);
}
.id-card-body {
display: grid;
grid-template-columns: 1fr 1.5fr;
gap: 30px;
}
.id-card-left {
display: flex;
flex-direction: column;
gap: 8px;
}
.id-field {
font-size: 1rem;
line-height: 1.4;
}
.id-field-block {
margin-top: 15px;
font-size: 0.95rem;
line-height: 1.5;
}
.id-label {
color: #4ecdc4;
font-weight: 500;
}
.id-card-right {
display: flex;
flex-direction: column;
gap: 20px;
}
.id-bio {
font-size: 0.9rem;
line-height: 1.6;
color: #e0e0e0;
}
.id-relationships {
margin-top: 10px;
}
.id-section-title {
font-size: 1.5rem;
margin: 0 0 15px;
color: #fff;
border-bottom: 1px solid rgba(255,255,255,0.2);
padding-bottom: 8px;
}
.id-rel-group {
margin-bottom: 12px;
font-size: 0.9rem;
line-height: 1.6;
}
.id-rel-label {
color: #a0a0a0;
}
.id-rel-group a {
color: #4ecdc4;
text-decoration: none;
}
.id-rel-group a:hover {
text-decoration: underline;
}
.id-rel-type {
color: #888;
font-size: 0.85em;
}
.id-card-warnings {
margin-top: 30px;
padding-top: 20px;
border-top: 1px solid rgba(255,255,255,0.2);
display: flex;
flex-wrap: wrap;
gap: 20px;
}
.id-warning {
display: flex;
align-items: center;
gap: 8px;
font-size: 0.9rem;
color: #ff6b6b;
}
.warning-dot {
width: 8px;
height: 8px;
background: #ff6b6b;
border-radius: 50%;
flex-shrink: 0;
}
.warning-desc {
color: #ccc;
}
/* Management section */
.id-card-manage {
margin-top: 20px;
background: var(--color-bg-muted);
border-radius: 8px;
padding: 15px;
}
.id-card-manage summary {
cursor: pointer;
font-weight: 600;
font-size: 1.1rem;
padding: 5px 0;
}
.id-card-manage[open] summary {
margin-bottom: 15px;
border-bottom: 1px solid var(--color-border-light);
padding-bottom: 10px;
}
.manage-section {
margin-bottom: 25px;
}
.manage-section h3 {
margin: 0 0 15px;
font-size: 1rem;
}
.manage-relationships {
display: flex;
flex-direction: column;
gap: 10px;
margin-bottom: 15px;
}
.manage-rel-item {
display: flex;
align-items: center;
gap: 12px;
padding: 10px;
background: var(--color-bg-card);
border-radius: 6px;
flex-wrap: wrap;
}
.manage-rel-item a {
font-weight: 500;
min-width: 120px;
}
.manage-needs-list {
list-style: none;
padding: 0;
margin: 0 0 15px;
}
.manage-needs-list li {
display: flex;
align-items: center;
gap: 12px;
padding: 10px;
background: var(--color-bg-card);
border-radius: 6px;
margin-bottom: 8px;
}
.manage-needs-list li .btn {
margin-left: auto;
}
/* Responsive adjustments */
@media (max-width: 768px) {
.id-card-body {
grid-template-columns: 1fr;
}
.id-card-title {
font-size: 1.8rem;
}
.id-card-header {
flex-direction: column;
gap: 15px;
}
}

View File

@@ -1,50 +0,0 @@
import { useEffect, useState } from "react";
import { Link, Route, Routes } from "react-router-dom";
import { ContactDetail } from "./components/ContactDetail";
import { ContactForm } from "./components/ContactForm";
import { ContactList } from "./components/ContactList";
import { NeedList } from "./components/NeedList";
import { RelationshipGraph } from "./components/RelationshipGraph";
import "./App.css";
function App() {
const [theme, setTheme] = useState<"light" | "dark">(() => {
return (localStorage.getItem("theme") as "light" | "dark") || "light";
});
useEffect(() => {
document.documentElement.setAttribute("data-theme", theme);
localStorage.setItem("theme", theme);
}, [theme]);
const toggleTheme = () => {
setTheme((prev) => (prev === "light" ? "dark" : "light"));
};
return (
<div className="app">
<nav>
<Link to="/contacts">Contacts</Link>
<Link to="/graph">Graph</Link>
<Link to="/needs">Needs</Link>
<button className="btn btn-small theme-toggle" onClick={toggleTheme}>
{theme === "light" ? "Dark" : "Light"}
</button>
</nav>
<main>
<Routes>
<Route path="/" element={<ContactList />} />
<Route path="/contacts" element={<ContactList />} />
<Route path="/contacts/new" element={<ContactForm />} />
<Route path="/contacts/:id" element={<ContactDetail />} />
<Route path="/contacts/:id/edit" element={<ContactForm />} />
<Route path="/graph" element={<RelationshipGraph />} />
<Route path="/needs" element={<NeedList />} />
</Routes>
</main>
</div>
);
}
export default App;

View File

@@ -1,105 +0,0 @@
import type {
Contact,
ContactCreate,
ContactListItem,
ContactRelationship,
ContactRelationshipCreate,
ContactRelationshipUpdate,
ContactUpdate,
GraphData,
Need,
NeedCreate,
} from "../types";
const API_BASE = "";
async function request<T>(
endpoint: string,
options?: RequestInit
): Promise<T> {
const response = await fetch(`${API_BASE}${endpoint}`, {
...options,
headers: {
"Content-Type": "application/json",
...options?.headers,
},
});
if (!response.ok) {
const error = await response.json().catch(() => ({}));
throw new Error(error.detail || `HTTP ${response.status}`);
}
return response.json();
}
export const api = {
// Needs
needs: {
list: () => request<Need[]>("/api/needs"),
get: (id: number) => request<Need>(`/api/needs/${id}`),
create: (data: NeedCreate) =>
request<Need>("/api/needs", {
method: "POST",
body: JSON.stringify(data),
}),
delete: (id: number) =>
request<{ deleted: boolean }>(`/api/needs/${id}`, { method: "DELETE" }),
},
// Contacts
contacts: {
list: (skip = 0, limit = 100) =>
request<ContactListItem[]>(`/api/contacts?skip=${skip}&limit=${limit}`),
get: (id: number) => request<Contact>(`/api/contacts/${id}`),
create: (data: ContactCreate) =>
request<Contact>("/api/contacts", {
method: "POST",
body: JSON.stringify(data),
}),
update: (id: number, data: ContactUpdate) =>
request<Contact>(`/api/contacts/${id}`, {
method: "PATCH",
body: JSON.stringify(data),
}),
delete: (id: number) =>
request<{ deleted: boolean }>(`/api/contacts/${id}`, { method: "DELETE" }),
// Contact-Need relationships
addNeed: (contactId: number, needId: number) =>
request<{ added: boolean }>(`/api/contacts/${contactId}/needs/${needId}`, {
method: "POST",
}),
removeNeed: (contactId: number, needId: number) =>
request<{ removed: boolean }>(`/api/contacts/${contactId}/needs/${needId}`, {
method: "DELETE",
}),
// Contact-Contact relationships
getRelationships: (contactId: number) =>
request<ContactRelationship[]>(`/api/contacts/${contactId}/relationships`),
addRelationship: (contactId: number, data: ContactRelationshipCreate) =>
request<ContactRelationship>(`/api/contacts/${contactId}/relationships`, {
method: "POST",
body: JSON.stringify(data),
}),
updateRelationship: (contactId: number, relatedContactId: number, data: ContactRelationshipUpdate) =>
request<ContactRelationship>(
`/api/contacts/${contactId}/relationships/${relatedContactId}`,
{
method: "PATCH",
body: JSON.stringify(data),
}
),
removeRelationship: (contactId: number, relatedContactId: number) =>
request<{ deleted: boolean }>(
`/api/contacts/${contactId}/relationships/${relatedContactId}`,
{ method: "DELETE" }
),
},
// Graph
graph: {
get: () => request<GraphData>("/api/graph"),
},
};

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>

Before

Width:  |  Height:  |  Size: 4.0 KiB

View File

@@ -1,456 +0,0 @@
import { useEffect, useState } from "react";
import { Link, useParams } from "react-router-dom";
import { api } from "../api/client";
import type { Contact, ContactListItem, Need, RelationshipTypeValue } from "../types";
import { RELATIONSHIP_TYPES } from "../types";
export function ContactDetail() {
const { id } = useParams<{ id: string }>();
const [contact, setContact] = useState<Contact | null>(null);
const [allNeeds, setAllNeeds] = useState<Need[]>([]);
const [allContacts, setAllContacts] = useState<ContactListItem[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [newNeedId, setNewNeedId] = useState<number | "">("");
const [newRelContactId, setNewRelContactId] = useState<number | "">("");
const [newRelType, setNewRelType] = useState<RelationshipTypeValue | "">("");
useEffect(() => {
if (!id) return;
Promise.all([
api.contacts.get(Number(id)),
api.needs.list(),
api.contacts.list(),
])
.then(([c, n, contacts]) => {
setContact(c);
setAllNeeds(n);
setAllContacts(contacts.filter((ct) => ct.id !== Number(id)));
})
.catch((err) => setError(err.message))
.finally(() => setLoading(false));
}, [id]);
const handleAddNeed = async () => {
if (!contact || newNeedId === "") return;
try {
await api.contacts.addNeed(contact.id, Number(newNeedId));
const updated = await api.contacts.get(contact.id);
setContact(updated);
setNewNeedId("");
} catch (err) {
setError(err instanceof Error ? err.message : "Failed to add need");
}
};
const handleRemoveNeed = async (needId: number) => {
if (!contact) return;
try {
await api.contacts.removeNeed(contact.id, needId);
const updated = await api.contacts.get(contact.id);
setContact(updated);
} catch (err) {
setError(err instanceof Error ? err.message : "Failed to remove need");
}
};
const handleAddRelationship = async () => {
if (!contact || newRelContactId === "" || newRelType === "") return;
try {
await api.contacts.addRelationship(contact.id, {
related_contact_id: Number(newRelContactId),
relationship_type: newRelType,
});
const updated = await api.contacts.get(contact.id);
setContact(updated);
setNewRelContactId("");
setNewRelType("");
} catch (err) {
setError(
err instanceof Error ? err.message : "Failed to add relationship"
);
}
};
const handleRemoveRelationship = async (relatedContactId: number) => {
if (!contact) return;
try {
await api.contacts.removeRelationship(contact.id, relatedContactId);
const updated = await api.contacts.get(contact.id);
setContact(updated);
} catch (err) {
setError(
err instanceof Error ? err.message : "Failed to remove relationship"
);
}
};
const handleUpdateWeight = async (relatedContactId: number, newWeight: number) => {
if (!contact) return;
try {
await api.contacts.updateRelationship(contact.id, relatedContactId, {
closeness_weight: newWeight,
});
const updated = await api.contacts.get(contact.id);
setContact(updated);
} catch (err) {
setError(
err instanceof Error ? err.message : "Failed to update weight"
);
}
};
if (loading) return <div>Loading...</div>;
if (error) return <div className="error">Error: {error}</div>;
if (!contact) return <div>Contact not found</div>;
const availableNeeds = allNeeds.filter(
(n) => !contact.needs.some((cn) => cn.id === n.id)
);
const getContactName = (contactId: number) => {
const c = allContacts.find((ct) => ct.id === contactId);
return c?.name || `Contact #${contactId}`;
};
const getRelationshipDisplayName = (type: string) => {
const rt = RELATIONSHIP_TYPES.find((r) => r.value === type);
return rt?.displayName || type;
};
// Group relationships by category for display
const groupRelationships = () => {
const familial: typeof contact.related_to = [];
const friends: typeof contact.related_to = [];
const partners: typeof contact.related_to = [];
const professional: typeof contact.related_to = [];
const other: typeof contact.related_to = [];
const familialTypes = ['parent', 'child', 'sibling', 'grandparent', 'grandchild', 'aunt_uncle', 'niece_nephew', 'cousin', 'in_law'];
const friendTypes = ['best_friend', 'close_friend', 'friend', 'acquaintance', 'neighbor'];
const partnerTypes = ['spouse', 'partner'];
const professionalTypes = ['mentor', 'mentee', 'business_partner', 'colleague', 'manager', 'direct_report', 'client'];
for (const rel of contact.related_to) {
if (familialTypes.includes(rel.relationship_type)) {
familial.push(rel);
} else if (friendTypes.includes(rel.relationship_type)) {
friends.push(rel);
} else if (partnerTypes.includes(rel.relationship_type)) {
partners.push(rel);
} else if (professionalTypes.includes(rel.relationship_type)) {
professional.push(rel);
} else {
other.push(rel);
}
}
return { familial, friends, partners, professional, other };
};
const relationshipGroups = groupRelationships();
return (
<div className="id-card">
<div className="id-card-inner">
{/* Header with name and profile pic */}
<div className="id-card-header">
<div className="id-card-header-left">
<h1 className="id-card-title">I.D.: {contact.name}</h1>
</div>
<div className="id-card-header-right">
{contact.profile_pic ? (
<img
src={contact.profile_pic}
alt={`${contact.name}'s profile`}
className="id-profile-pic"
/>
) : (
<div className="id-profile-placeholder">
<span>{contact.name.charAt(0).toUpperCase()}</span>
</div>
)}
<div className="id-card-actions">
<Link to={`/contacts/${contact.id}/edit`} className="btn btn-small">
Edit
</Link>
<Link to="/contacts" className="btn btn-small">
Back
</Link>
</div>
</div>
</div>
<div className="id-card-body">
{/* Left column - Basic info */}
<div className="id-card-left">
{contact.legal_name && (
<div className="id-field">Legal name: {contact.legal_name}</div>
)}
{contact.suffix && (
<div className="id-field">Suffix: {contact.suffix}</div>
)}
{contact.gender && (
<div className="id-field">Gender: {contact.gender}</div>
)}
{contact.age && (
<div className="id-field">Age: {contact.age}</div>
)}
{contact.current_job && (
<div className="id-field">Job: {contact.current_job}</div>
)}
{contact.social_structure_style && (
<div className="id-field">Social style: {contact.social_structure_style}</div>
)}
{contact.self_sufficiency_score !== null && (
<div className="id-field">Self-Sufficiency: {contact.self_sufficiency_score}</div>
)}
{contact.timezone && (
<div className="id-field">Timezone: {contact.timezone}</div>
)}
{contact.safe_conversation_starters && (
<div className="id-field-block">
<span className="id-label">Safe con starters:</span> {contact.safe_conversation_starters}
</div>
)}
{contact.topics_to_avoid && (
<div className="id-field-block">
<span className="id-label">Topics to avoid:</span> {contact.topics_to_avoid}
</div>
)}
{contact.goals && (
<div className="id-field-block">
<span className="id-label">Goals:</span> {contact.goals}
</div>
)}
</div>
{/* Right column - Bio and Relationships */}
<div className="id-card-right">
{contact.bio && (
<div className="id-bio">
<span className="id-label">Bio:</span> {contact.bio}
</div>
)}
<div className="id-relationships">
<h2 className="id-section-title">Relationships</h2>
{relationshipGroups.familial.length > 0 && (
<div className="id-rel-group">
<span className="id-rel-label">Familial:</span>{" "}
{relationshipGroups.familial.map((rel, i) => (
<span key={rel.related_contact_id}>
<Link to={`/contacts/${rel.related_contact_id}`}>
{getContactName(rel.related_contact_id)}
</Link>
<span className="id-rel-type">({getRelationshipDisplayName(rel.relationship_type)})</span>
{i < relationshipGroups.familial.length - 1 && ", "}
</span>
))}
</div>
)}
{relationshipGroups.partners.length > 0 && (
<div className="id-rel-group">
<span className="id-rel-label">Partners:</span>{" "}
{relationshipGroups.partners.map((rel, i) => (
<span key={rel.related_contact_id}>
<Link to={`/contacts/${rel.related_contact_id}`}>
{getContactName(rel.related_contact_id)}
</Link>
{i < relationshipGroups.partners.length - 1 && ", "}
</span>
))}
</div>
)}
{relationshipGroups.friends.length > 0 && (
<div className="id-rel-group">
<span className="id-rel-label">Friends:</span>{" "}
{relationshipGroups.friends.map((rel, i) => (
<span key={rel.related_contact_id}>
<Link to={`/contacts/${rel.related_contact_id}`}>
{getContactName(rel.related_contact_id)}
</Link>
{i < relationshipGroups.friends.length - 1 && ", "}
</span>
))}
</div>
)}
{relationshipGroups.professional.length > 0 && (
<div className="id-rel-group">
<span className="id-rel-label">Professional:</span>{" "}
{relationshipGroups.professional.map((rel, i) => (
<span key={rel.related_contact_id}>
<Link to={`/contacts/${rel.related_contact_id}`}>
{getContactName(rel.related_contact_id)}
</Link>
<span className="id-rel-type">({getRelationshipDisplayName(rel.relationship_type)})</span>
{i < relationshipGroups.professional.length - 1 && ", "}
</span>
))}
</div>
)}
{relationshipGroups.other.length > 0 && (
<div className="id-rel-group">
<span className="id-rel-label">Other:</span>{" "}
{relationshipGroups.other.map((rel, i) => (
<span key={rel.related_contact_id}>
<Link to={`/contacts/${rel.related_contact_id}`}>
{getContactName(rel.related_contact_id)}
</Link>
<span className="id-rel-type">({getRelationshipDisplayName(rel.relationship_type)})</span>
{i < relationshipGroups.other.length - 1 && ", "}
</span>
))}
</div>
)}
{contact.related_from.length > 0 && (
<div className="id-rel-group">
<span className="id-rel-label">Known by:</span>{" "}
{contact.related_from.map((rel, i) => (
<span key={rel.contact_id}>
<Link to={`/contacts/${rel.contact_id}`}>
{getContactName(rel.contact_id)}
</Link>
{i < contact.related_from.length - 1 && ", "}
</span>
))}
</div>
)}
</div>
</div>
</div>
{/* Needs/Warnings at bottom */}
{contact.needs.length > 0 && (
<div className="id-card-warnings">
{contact.needs.map((need) => (
<div key={need.id} className="id-warning">
<span className="warning-dot"></span>
Warning: {need.name}
{need.description && <span className="warning-desc"> - {need.description}</span>}
</div>
))}
</div>
)}
</div>
{/* Management section (expandable) */}
<details className="id-card-manage">
<summary>Manage Contact</summary>
<div className="manage-section">
<h3>Manage Relationships</h3>
<div className="manage-relationships">
{contact.related_to.map((rel) => (
<div key={rel.related_contact_id} className="manage-rel-item">
<Link to={`/contacts/${rel.related_contact_id}`}>
{getContactName(rel.related_contact_id)}
</Link>
<span className="tag">{getRelationshipDisplayName(rel.relationship_type)}</span>
<label className="weight-control">
<span>Closeness:</span>
<input
type="range"
min="1"
max="10"
value={rel.closeness_weight}
onChange={(e) => handleUpdateWeight(rel.related_contact_id, Number(e.target.value))}
/>
<span className="weight-value">{rel.closeness_weight}</span>
</label>
<button
onClick={() => handleRemoveRelationship(rel.related_contact_id)}
className="btn btn-small btn-danger"
>
Remove
</button>
</div>
))}
</div>
{allContacts.length > 0 && (
<div className="add-form">
<select
value={newRelContactId}
onChange={(e) =>
setNewRelContactId(
e.target.value ? Number(e.target.value) : ""
)
}
>
<option value="">Select contact...</option>
{allContacts.map((c) => (
<option key={c.id} value={c.id}>
{c.name}
</option>
))}
</select>
<select
value={newRelType}
onChange={(e) => setNewRelType(e.target.value as RelationshipTypeValue | "")}
>
<option value="">Select relationship type...</option>
{RELATIONSHIP_TYPES.map((rt) => (
<option key={rt.value} value={rt.value}>
{rt.displayName}
</option>
))}
</select>
<button onClick={handleAddRelationship} className="btn btn-primary">
Add Relationship
</button>
</div>
)}
</div>
<div className="manage-section">
<h3>Manage Needs/Warnings</h3>
<ul className="manage-needs-list">
{contact.needs.map((need) => (
<li key={need.id}>
<strong>{need.name}</strong>
{need.description && <span> - {need.description}</span>}
<button
onClick={() => handleRemoveNeed(need.id)}
className="btn btn-small btn-danger"
>
Remove
</button>
</li>
))}
</ul>
{availableNeeds.length > 0 && (
<div className="add-form">
<select
value={newNeedId}
onChange={(e) =>
setNewNeedId(e.target.value ? Number(e.target.value) : "")
}
>
<option value="">Select a need...</option>
{availableNeeds.map((n) => (
<option key={n.id} value={n.id}>
{n.name}
</option>
))}
</select>
<button onClick={handleAddNeed} className="btn btn-primary">
Add Need
</button>
</div>
)}
</div>
</details>
</div>
);
}

View File

@@ -1,325 +0,0 @@
import { useEffect, useState } from "react";
import { useNavigate, useParams } from "react-router-dom";
import { api } from "../api/client";
import type { ContactCreate, Need } from "../types";
export function ContactForm() {
const { id } = useParams<{ id: string }>();
const navigate = useNavigate();
const isEdit = Boolean(id);
const [allNeeds, setAllNeeds] = useState<Need[]>([]);
const [loading, setLoading] = useState(isEdit);
const [error, setError] = useState<string | null>(null);
const [submitting, setSubmitting] = useState(false);
const [form, setForm] = useState<ContactCreate>({
name: "",
age: null,
bio: null,
current_job: null,
gender: null,
goals: null,
legal_name: null,
profile_pic: null,
safe_conversation_starters: null,
self_sufficiency_score: null,
social_structure_style: null,
ssn: null,
suffix: null,
timezone: null,
topics_to_avoid: null,
need_ids: [],
});
useEffect(() => {
const loadData = async () => {
try {
const needs = await api.needs.list();
setAllNeeds(needs);
if (id) {
const contact = await api.contacts.get(Number(id));
setForm({
name: contact.name,
age: contact.age,
bio: contact.bio,
current_job: contact.current_job,
gender: contact.gender,
goals: contact.goals,
legal_name: contact.legal_name,
profile_pic: contact.profile_pic,
safe_conversation_starters: contact.safe_conversation_starters,
self_sufficiency_score: contact.self_sufficiency_score,
social_structure_style: contact.social_structure_style,
ssn: contact.ssn,
suffix: contact.suffix,
timezone: contact.timezone,
topics_to_avoid: contact.topics_to_avoid,
need_ids: contact.needs.map((n) => n.id),
});
}
} catch (err) {
setError(err instanceof Error ? err.message : "Failed to load data");
} finally {
setLoading(false);
}
};
loadData();
}, [id]);
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
setSubmitting(true);
setError(null);
try {
if (isEdit) {
await api.contacts.update(Number(id), form);
navigate(`/contacts/${id}`);
} else {
const created = await api.contacts.create(form);
navigate(`/contacts/${created.id}`);
}
} catch (err) {
setError(err instanceof Error ? err.message : "Save failed");
setSubmitting(false);
}
};
const updateField = <K extends keyof ContactCreate>(
field: K,
value: ContactCreate[K]
) => {
setForm((prev) => ({ ...prev, [field]: value }));
};
const toggleNeed = (needId: number) => {
setForm((prev) => ({
...prev,
need_ids: prev.need_ids?.includes(needId)
? prev.need_ids.filter((id) => id !== needId)
: [...(prev.need_ids || []), needId],
}));
};
if (loading) return <div>Loading...</div>;
return (
<div className="contact-form">
<h1>{isEdit ? "Edit Contact" : "New Contact"}</h1>
{error && <div className="error">{error}</div>}
<form onSubmit={handleSubmit}>
<div className="form-group">
<label htmlFor="name">Name *</label>
<input
id="name"
type="text"
value={form.name}
onChange={(e) => updateField("name", e.target.value)}
required
/>
</div>
<div className="form-row">
<div className="form-group">
<label htmlFor="legal_name">Legal Name</label>
<input
id="legal_name"
type="text"
value={form.legal_name || ""}
onChange={(e) =>
updateField("legal_name", e.target.value || null)
}
/>
</div>
<div className="form-group">
<label htmlFor="suffix">Suffix</label>
<input
id="suffix"
type="text"
value={form.suffix || ""}
onChange={(e) => updateField("suffix", e.target.value || null)}
/>
</div>
</div>
<div className="form-row">
<div className="form-group">
<label htmlFor="age">Age</label>
<input
id="age"
type="number"
value={form.age ?? ""}
onChange={(e) =>
updateField("age", e.target.value ? Number(e.target.value) : null)
}
/>
</div>
<div className="form-group">
<label htmlFor="gender">Gender</label>
<input
id="gender"
type="text"
value={form.gender || ""}
onChange={(e) => updateField("gender", e.target.value || null)}
/>
</div>
</div>
<div className="form-group">
<label htmlFor="current_job">Current Job</label>
<input
id="current_job"
type="text"
value={form.current_job || ""}
onChange={(e) =>
updateField("current_job", e.target.value || null)
}
/>
</div>
<div className="form-group">
<label htmlFor="timezone">Timezone</label>
<input
id="timezone"
type="text"
value={form.timezone || ""}
onChange={(e) => updateField("timezone", e.target.value || null)}
/>
</div>
<div className="form-group">
<label htmlFor="profile_pic">Profile Picture URL</label>
<input
id="profile_pic"
type="url"
placeholder="https://example.com/photo.jpg"
value={form.profile_pic || ""}
onChange={(e) => updateField("profile_pic", e.target.value || null)}
/>
</div>
<div className="form-group">
<label htmlFor="bio">Bio</label>
<textarea
id="bio"
value={form.bio || ""}
onChange={(e) => updateField("bio", e.target.value || null)}
rows={3}
/>
</div>
<div className="form-group">
<label htmlFor="goals">Goals</label>
<textarea
id="goals"
value={form.goals || ""}
onChange={(e) => updateField("goals", e.target.value || null)}
rows={3}
/>
</div>
<div className="form-group">
<label htmlFor="social_structure_style">Social Structure Style</label>
<input
id="social_structure_style"
type="text"
value={form.social_structure_style || ""}
onChange={(e) =>
updateField("social_structure_style", e.target.value || null)
}
/>
</div>
<div className="form-group">
<label htmlFor="self_sufficiency_score">
Self-Sufficiency Score (1-10)
</label>
<input
id="self_sufficiency_score"
type="number"
min="1"
max="10"
value={form.self_sufficiency_score ?? ""}
onChange={(e) =>
updateField(
"self_sufficiency_score",
e.target.value ? Number(e.target.value) : null
)
}
/>
</div>
<div className="form-group">
<label htmlFor="safe_conversation_starters">
Safe Conversation Starters
</label>
<textarea
id="safe_conversation_starters"
value={form.safe_conversation_starters || ""}
onChange={(e) =>
updateField("safe_conversation_starters", e.target.value || null)
}
rows={2}
/>
</div>
<div className="form-group">
<label htmlFor="topics_to_avoid">Topics to Avoid</label>
<textarea
id="topics_to_avoid"
value={form.topics_to_avoid || ""}
onChange={(e) =>
updateField("topics_to_avoid", e.target.value || null)
}
rows={2}
/>
</div>
<div className="form-group">
<label htmlFor="ssn">SSN</label>
<input
id="ssn"
type="text"
value={form.ssn || ""}
onChange={(e) => updateField("ssn", e.target.value || null)}
/>
</div>
{allNeeds.length > 0 && (
<div className="form-group">
<label>Needs/Accommodations</label>
<div className="checkbox-group">
{allNeeds.map((need) => (
<label key={need.id} className="checkbox-label">
<input
type="checkbox"
checked={form.need_ids?.includes(need.id) || false}
onChange={() => toggleNeed(need.id)}
/>
{need.name}
</label>
))}
</div>
</div>
)}
<div className="form-actions">
<button type="submit" className="btn btn-primary" disabled={submitting}>
{submitting ? "Saving..." : "Save"}
</button>
<button
type="button"
className="btn"
onClick={() => navigate(isEdit ? `/contacts/${id}` : "/contacts")}
>
Cancel
</button>
</div>
</form>
</div>
);
}

View File

@@ -1,79 +0,0 @@
import { useEffect, useState } from "react";
import { Link } from "react-router-dom";
import { api } from "../api/client";
import type { ContactListItem } from "../types";
export function ContactList() {
const [contacts, setContacts] = useState<ContactListItem[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
useEffect(() => {
api.contacts
.list()
.then(setContacts)
.catch((err) => setError(err.message))
.finally(() => setLoading(false));
}, []);
const handleDelete = async (id: number) => {
if (!confirm("Delete this contact?")) return;
try {
await api.contacts.delete(id);
setContacts((prev) => prev.filter((c) => c.id !== id));
} catch (err) {
setError(err instanceof Error ? err.message : "Delete failed");
}
};
if (loading) return <div>Loading...</div>;
if (error) return <div className="error">Error: {error}</div>;
return (
<div className="contact-list">
<div className="header">
<h1>Contacts</h1>
<Link to="/contacts/new" className="btn btn-primary">
Add Contact
</Link>
</div>
{contacts.length === 0 ? (
<p>No contacts yet.</p>
) : (
<table>
<thead>
<tr>
<th>Name</th>
<th>Job</th>
<th>Timezone</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{contacts.map((contact) => (
<tr key={contact.id}>
<td>
<Link to={`/contacts/${contact.id}`}>{contact.name}</Link>
</td>
<td>{contact.current_job || "-"}</td>
<td>{contact.timezone || "-"}</td>
<td>
<Link to={`/contacts/${contact.id}/edit`} className="btn">
Edit
</Link>
<button
onClick={() => handleDelete(contact.id)}
className="btn btn-danger"
>
Delete
</button>
</td>
</tr>
))}
</tbody>
</table>
)}
</div>
);
}

View File

@@ -1,117 +0,0 @@
import { useEffect, useState } from "react";
import { api } from "../api/client";
import type { Need, NeedCreate } from "../types";
export function NeedList() {
const [needs, setNeeds] = useState<Need[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [showForm, setShowForm] = useState(false);
const [form, setForm] = useState<NeedCreate>({ name: "", description: null });
const [submitting, setSubmitting] = useState(false);
useEffect(() => {
api.needs
.list()
.then(setNeeds)
.catch((err) => setError(err.message))
.finally(() => setLoading(false));
}, []);
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!form.name.trim()) return;
setSubmitting(true);
try {
const created = await api.needs.create(form);
setNeeds((prev) => [...prev, created]);
setForm({ name: "", description: null });
setShowForm(false);
} catch (err) {
setError(err instanceof Error ? err.message : "Create failed");
} finally {
setSubmitting(false);
}
};
const handleDelete = async (id: number) => {
if (!confirm("Delete this need?")) return;
try {
await api.needs.delete(id);
setNeeds((prev) => prev.filter((n) => n.id !== id));
} catch (err) {
setError(err instanceof Error ? err.message : "Delete failed");
}
};
if (loading) return <div>Loading...</div>;
return (
<div className="need-list">
<div className="header">
<h1>Needs / Accommodations</h1>
<button
onClick={() => setShowForm(!showForm)}
className="btn btn-primary"
>
{showForm ? "Cancel" : "Add Need"}
</button>
</div>
{error && <div className="error">{error}</div>}
{showForm && (
<form onSubmit={handleSubmit} className="need-form">
<div className="form-group">
<label htmlFor="name">Name *</label>
<input
id="name"
type="text"
value={form.name}
onChange={(e) => setForm({ ...form, name: e.target.value })}
placeholder="e.g., Light Sensitive, ADHD"
required
/>
</div>
<div className="form-group">
<label htmlFor="description">Description</label>
<textarea
id="description"
value={form.description || ""}
onChange={(e) =>
setForm({ ...form, description: e.target.value || null })
}
placeholder="Optional description..."
rows={2}
/>
</div>
<button type="submit" className="btn btn-primary" disabled={submitting}>
{submitting ? "Creating..." : "Create"}
</button>
</form>
)}
{needs.length === 0 ? (
<p>No needs defined yet.</p>
) : (
<ul className="need-items">
{needs.map((need) => (
<li key={need.id}>
<div className="need-info">
<strong>{need.name}</strong>
{need.description && <p>{need.description}</p>}
</div>
<button
onClick={() => handleDelete(need.id)}
className="btn btn-danger"
>
Delete
</button>
</li>
))}
</ul>
)}
</div>
);
}

View File

@@ -1,330 +0,0 @@
import { useEffect, useRef, useState } from "react";
import { api } from "../api/client";
import type { GraphData, GraphEdge, GraphNode } from "../types";
import { RELATIONSHIP_TYPES } from "../types";
interface SimNode extends GraphNode {
x: number;
y: number;
vx: number;
vy: number;
}
interface SimEdge extends GraphEdge {
sourceNode: SimNode;
targetNode: SimNode;
}
export function RelationshipGraph() {
const canvasRef = useRef<HTMLCanvasElement>(null);
const [data, setData] = useState<GraphData | null>(null);
const [error, setError] = useState<string | null>(null);
const [loading, setLoading] = useState(true);
const [selectedNode, setSelectedNode] = useState<SimNode | null>(null);
const nodesRef = useRef<SimNode[]>([]);
const edgesRef = useRef<SimEdge[]>([]);
const dragNodeRef = useRef<SimNode | null>(null);
const animationRef = useRef<number>(0);
useEffect(() => {
api.graph.get()
.then(setData)
.catch((err) => setError(err.message))
.finally(() => setLoading(false));
}, []);
useEffect(() => {
if (!data || !canvasRef.current) return;
const canvas = canvasRef.current;
const maybeCtx = canvas.getContext("2d");
if (!maybeCtx) return;
const ctx: CanvasRenderingContext2D = maybeCtx;
const width = canvas.width;
const height = canvas.height;
const centerX = width / 2;
const centerY = height / 2;
// Initialize nodes with random positions
const nodes: SimNode[] = data.nodes.map((node) => ({
...node,
x: centerX + (Math.random() - 0.5) * 300,
y: centerY + (Math.random() - 0.5) * 300,
vx: 0,
vy: 0,
}));
nodesRef.current = nodes;
const nodeMap = new Map(nodes.map((n) => [n.id, n]));
// Create edges with node references
const edges: SimEdge[] = data.edges
.map((edge) => {
const sourceNode = nodeMap.get(edge.source);
const targetNode = nodeMap.get(edge.target);
if (!sourceNode || !targetNode) return null;
return { ...edge, sourceNode, targetNode };
})
.filter((e): e is SimEdge => e !== null);
edgesRef.current = edges;
// Force simulation parameters
const repulsion = 5000;
const springStrength = 0.05;
const baseSpringLength = 150;
const damping = 0.9;
const centerPull = 0.01;
function simulate() {
const nodes = nodesRef.current;
const edges = edgesRef.current;
// Reset forces
for (const node of nodes) {
node.vx = 0;
node.vy = 0;
}
// Repulsion between all nodes
for (let i = 0; i < nodes.length; i++) {
for (let j = i + 1; j < nodes.length; j++) {
const dx = nodes[j].x - nodes[i].x;
const dy = nodes[j].y - nodes[i].y;
const dist = Math.sqrt(dx * dx + dy * dy) || 1;
const force = repulsion / (dist * dist);
const fx = (dx / dist) * force;
const fy = (dy / dist) * force;
nodes[i].vx -= fx;
nodes[i].vy -= fy;
nodes[j].vx += fx;
nodes[j].vy += fy;
}
}
// Spring forces for edges - closer relationships = shorter springs
// Weight is 1-10, normalize to 0-1 for calculations
for (const edge of edges) {
const dx = edge.targetNode.x - edge.sourceNode.x;
const dy = edge.targetNode.y - edge.sourceNode.y;
const dist = Math.sqrt(dx * dx + dy * dy) || 1;
// Higher weight (1-10) = shorter ideal length
// Normalize: weight 10 -> 0.5x length, weight 1 -> 1.4x length
const normalizedWeight = edge.closeness_weight / 10;
const idealLength = baseSpringLength * (1.5 - normalizedWeight);
const displacement = dist - idealLength;
const force = springStrength * displacement;
const fx = (dx / dist) * force;
const fy = (dy / dist) * force;
edge.sourceNode.vx += fx;
edge.sourceNode.vy += fy;
edge.targetNode.vx -= fx;
edge.targetNode.vy -= fy;
}
// Pull toward center
for (const node of nodes) {
node.vx += (centerX - node.x) * centerPull;
node.vy += (centerY - node.y) * centerPull;
}
// Apply velocities with damping (skip dragged node)
for (const node of nodes) {
if (node === dragNodeRef.current) continue;
node.x += node.vx * damping;
node.y += node.vy * damping;
// Keep within bounds
node.x = Math.max(30, Math.min(width - 30, node.x));
node.y = Math.max(30, Math.min(height - 30, node.y));
}
}
function getEdgeColor(weight: number): string {
// Interpolate from light gray (distant) to dark blue (close)
// weight is 1-10, normalize to 0-1
const normalized = weight / 10;
const hue = 220;
const saturation = 70;
const lightness = 80 - normalized * 40;
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
}
function draw(context: CanvasRenderingContext2D) {
const nodes = nodesRef.current;
const edges = edgesRef.current;
context.clearRect(0, 0, width, height);
// Draw edges
for (const edge of edges) {
// Weight is 1-10, scale line width accordingly
const lineWidth = 1 + (edge.closeness_weight / 10) * 3;
context.strokeStyle = getEdgeColor(edge.closeness_weight);
context.lineWidth = lineWidth;
context.beginPath();
context.moveTo(edge.sourceNode.x, edge.sourceNode.y);
context.lineTo(edge.targetNode.x, edge.targetNode.y);
context.stroke();
// Draw relationship type label at midpoint
const midX = (edge.sourceNode.x + edge.targetNode.x) / 2;
const midY = (edge.sourceNode.y + edge.targetNode.y) / 2;
context.fillStyle = "#666";
context.font = "10px sans-serif";
context.textAlign = "center";
const typeInfo = RELATIONSHIP_TYPES.find(t => t.value === edge.relationship_type);
const label = typeInfo?.displayName || edge.relationship_type;
context.fillText(label, midX, midY - 5);
}
// Draw nodes
for (const node of nodes) {
const isSelected = node === selectedNode;
const radius = isSelected ? 25 : 20;
// Node circle
context.beginPath();
context.arc(node.x, node.y, radius, 0, Math.PI * 2);
context.fillStyle = isSelected ? "#0066cc" : "#fff";
context.fill();
context.strokeStyle = "#0066cc";
context.lineWidth = 2;
context.stroke();
// Node label
context.fillStyle = isSelected ? "#fff" : "#333";
context.font = "12px sans-serif";
context.textAlign = "center";
context.textBaseline = "middle";
const name = node.name.length > 10 ? node.name.slice(0, 9) + "…" : node.name;
context.fillText(name, node.x, node.y);
}
}
function animate() {
simulate();
draw(ctx);
animationRef.current = requestAnimationFrame(animate);
}
animate();
return () => {
cancelAnimationFrame(animationRef.current);
};
}, [data, selectedNode]);
// Mouse interaction handlers
useEffect(() => {
const canvas = canvasRef.current;
if (!canvas) return;
function getNodeAtPosition(x: number, y: number): SimNode | null {
for (const node of nodesRef.current) {
const dx = x - node.x;
const dy = y - node.y;
if (dx * dx + dy * dy < 400) {
return node;
}
}
return null;
}
function handleMouseDown(e: MouseEvent) {
const rect = canvas!.getBoundingClientRect();
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;
const node = getNodeAtPosition(x, y);
if (node) {
dragNodeRef.current = node;
setSelectedNode(node);
}
}
function handleMouseMove(e: MouseEvent) {
if (!dragNodeRef.current) return;
const rect = canvas!.getBoundingClientRect();
dragNodeRef.current.x = e.clientX - rect.left;
dragNodeRef.current.y = e.clientY - rect.top;
}
function handleMouseUp() {
dragNodeRef.current = null;
}
canvas.addEventListener("mousedown", handleMouseDown);
canvas.addEventListener("mousemove", handleMouseMove);
canvas.addEventListener("mouseup", handleMouseUp);
canvas.addEventListener("mouseleave", handleMouseUp);
return () => {
canvas.removeEventListener("mousedown", handleMouseDown);
canvas.removeEventListener("mousemove", handleMouseMove);
canvas.removeEventListener("mouseup", handleMouseUp);
canvas.removeEventListener("mouseleave", handleMouseUp);
};
}, []);
if (loading) return <p>Loading graph...</p>;
if (error) return <div className="error">{error}</div>;
if (!data) return <p>No data available</p>;
return (
<div className="graph-container">
<div className="header">
<h1>Relationship Graph</h1>
</div>
<p className="graph-hint">
Drag nodes to reposition. Closer relationships have shorter, darker edges.
</p>
<canvas
ref={canvasRef}
width={900}
height={600}
style={{
border: "1px solid var(--color-border)",
borderRadius: "8px",
background: "var(--color-bg)",
cursor: "grab",
}}
/>
{selectedNode && (
<div className="selected-info">
<h3>{selectedNode.name}</h3>
{selectedNode.current_job && <p>Job: {selectedNode.current_job}</p>}
<a href={`/contacts/${selectedNode.id}`}>View details</a>
</div>
)}
<div className="legend">
<h4>Relationship Closeness (1-10)</h4>
<div className="legend-items">
<div className="legend-item">
<span className="legend-line" style={{ background: getEdgeColorCSS(10), height: "4px" }}></span>
<span>10 - Very Close (Spouse, Partner)</span>
</div>
<div className="legend-item">
<span className="legend-line" style={{ background: getEdgeColorCSS(7), height: "3px" }}></span>
<span>7 - Close (Family, Best Friend)</span>
</div>
<div className="legend-item">
<span className="legend-line" style={{ background: getEdgeColorCSS(4), height: "2px" }}></span>
<span>4 - Moderate (Friend, Colleague)</span>
</div>
<div className="legend-item">
<span className="legend-line" style={{ background: getEdgeColorCSS(2), height: "1px" }}></span>
<span>2 - Distant (Acquaintance)</span>
</div>
</div>
</div>
</div>
);
}
function getEdgeColorCSS(weight: number): string {
// weight is 1-10, normalize to 0-1
const normalized = weight / 10;
const hue = 220;
const saturation = 70;
const lightness = 80 - normalized * 40;
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
}

View File

@@ -1,62 +0,0 @@
:root {
/* Light theme (default) */
--color-bg: #f5f5f5;
--color-bg-card: #ffffff;
--color-bg-hover: #f0f0f0;
--color-bg-muted: #f9f9f9;
--color-bg-error: #ffe0e0;
--color-text: #333333;
--color-text-muted: #666666;
--color-text-error: #cc0000;
--color-border: #dddddd;
--color-border-light: #eeeeee;
--color-border-lighter: #f0f0f0;
--color-primary: #0066cc;
--color-primary-hover: #0055aa;
--color-danger: #cc3333;
--color-danger-hover: #aa2222;
--color-tag-bg: #e0e0e0;
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
line-height: 1.5;
font-weight: 400;
color: var(--color-text);
background-color: var(--color-bg);
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
[data-theme="dark"] {
--color-bg: #1a1a1a;
--color-bg-card: #2d2d2d;
--color-bg-hover: #3d3d3d;
--color-bg-muted: #252525;
--color-bg-error: #4a2020;
--color-text: #e0e0e0;
--color-text-muted: #a0a0a0;
--color-text-error: #ff6b6b;
--color-border: #404040;
--color-border-light: #353535;
--color-border-lighter: #303030;
--color-primary: #4da6ff;
--color-primary-hover: #7dbfff;
--color-danger: #ff6b6b;
--color-danger-hover: #ff8a8a;
--color-tag-bg: #404040;
--shadow: 0 1px 3px rgba(0, 0, 0, 0.3);
}

View File

@@ -1,13 +0,0 @@
import { StrictMode } from "react";
import { createRoot } from "react-dom/client";
import { BrowserRouter } from "react-router-dom";
import App from "./App.tsx";
import "./index.css";
createRoot(document.getElementById("root")!).render(
<StrictMode>
<BrowserRouter>
<App />
</BrowserRouter>
</StrictMode>
);

View File

@@ -1,155 +0,0 @@
export interface Need {
id: number;
name: string;
description: string | null;
}
export interface NeedCreate {
name: string;
description?: string | null;
}
export const RELATIONSHIP_TYPES = [
{ value: 'spouse', displayName: 'Spouse', defaultWeight: 10 },
{ value: 'partner', displayName: 'Partner', defaultWeight: 10 },
{ value: 'parent', displayName: 'Parent', defaultWeight: 9 },
{ value: 'child', displayName: 'Child', defaultWeight: 9 },
{ value: 'sibling', displayName: 'Sibling', defaultWeight: 9 },
{ value: 'best_friend', displayName: 'Best Friend', defaultWeight: 8 },
{ value: 'grandparent', displayName: 'Grandparent', defaultWeight: 7 },
{ value: 'grandchild', displayName: 'Grandchild', defaultWeight: 7 },
{ value: 'aunt_uncle', displayName: 'Aunt/Uncle', defaultWeight: 7 },
{ value: 'niece_nephew', displayName: 'Niece/Nephew', defaultWeight: 7 },
{ value: 'cousin', displayName: 'Cousin', defaultWeight: 7 },
{ value: 'in_law', displayName: 'In-Law', defaultWeight: 7 },
{ value: 'close_friend', displayName: 'Close Friend', defaultWeight: 6 },
{ value: 'friend', displayName: 'Friend', defaultWeight: 6 },
{ value: 'mentor', displayName: 'Mentor', defaultWeight: 5 },
{ value: 'mentee', displayName: 'Mentee', defaultWeight: 5 },
{ value: 'business_partner', displayName: 'Business Partner', defaultWeight: 5 },
{ value: 'colleague', displayName: 'Colleague', defaultWeight: 4 },
{ value: 'manager', displayName: 'Manager', defaultWeight: 4 },
{ value: 'direct_report', displayName: 'Direct Report', defaultWeight: 4 },
{ value: 'client', displayName: 'Client', defaultWeight: 4 },
{ value: 'acquaintance', displayName: 'Acquaintance', defaultWeight: 3 },
{ value: 'neighbor', displayName: 'Neighbor', defaultWeight: 3 },
{ value: 'ex', displayName: 'Ex', defaultWeight: 2 },
{ value: 'other', displayName: 'Other', defaultWeight: 2 },
] as const;
export type RelationshipTypeValue = typeof RELATIONSHIP_TYPES[number]['value'];
export interface ContactRelationship {
contact_id: number;
related_contact_id: number;
relationship_type: string;
closeness_weight: number;
}
export interface ContactRelationshipCreate {
related_contact_id: number;
relationship_type: RelationshipTypeValue;
closeness_weight?: number;
}
export interface ContactRelationshipUpdate {
relationship_type?: RelationshipTypeValue;
closeness_weight?: number;
}
export interface GraphNode {
id: number;
name: string;
current_job: string | null;
}
export interface GraphEdge {
source: number;
target: number;
relationship_type: string;
closeness_weight: number;
}
export interface GraphData {
nodes: GraphNode[];
edges: GraphEdge[];
}
export interface Contact {
id: number;
name: string;
age: number | null;
bio: string | null;
current_job: string | null;
gender: string | null;
goals: string | null;
legal_name: string | null;
profile_pic: string | null;
safe_conversation_starters: string | null;
self_sufficiency_score: number | null;
social_structure_style: string | null;
ssn: string | null;
suffix: string | null;
timezone: string | null;
topics_to_avoid: string | null;
needs: Need[];
related_to: ContactRelationship[];
related_from: ContactRelationship[];
}
export interface ContactListItem {
id: number;
name: string;
age: number | null;
bio: string | null;
current_job: string | null;
gender: string | null;
goals: string | null;
legal_name: string | null;
profile_pic: string | null;
safe_conversation_starters: string | null;
self_sufficiency_score: number | null;
social_structure_style: string | null;
ssn: string | null;
suffix: string | null;
timezone: string | null;
topics_to_avoid: string | null;
}
export interface ContactCreate {
name: string;
age?: number | null;
bio?: string | null;
current_job?: string | null;
gender?: string | null;
goals?: string | null;
legal_name?: string | null;
profile_pic?: string | null;
safe_conversation_starters?: string | null;
self_sufficiency_score?: number | null;
social_structure_style?: string | null;
ssn?: string | null;
suffix?: string | null;
timezone?: string | null;
topics_to_avoid?: string | null;
need_ids?: number[];
}
export interface ContactUpdate {
name?: string | null;
age?: number | null;
bio?: string | null;
current_job?: string | null;
gender?: string | null;
goals?: string | null;
legal_name?: string | null;
profile_pic?: string | null;
safe_conversation_starters?: string | null;
self_sufficiency_score?: number | null;
social_structure_style?: string | null;
ssn?: string | null;
suffix?: string | null;
timezone?: string | null;
topics_to_avoid?: string | null;
need_ids?: number[] | null;
}

View File

@@ -1,28 +0,0 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
"target": "ES2022",
"useDefineForClassFields": true,
"lib": ["ES2022", "DOM", "DOM.Iterable"],
"module": "ESNext",
"types": ["vite/client"],
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["src"]
}

View File

@@ -1,7 +0,0 @@
{
"files": [],
"references": [
{ "path": "./tsconfig.app.json" },
{ "path": "./tsconfig.node.json" }
]
}

View File

@@ -1,26 +0,0 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
"target": "ES2023",
"lib": ["ES2023"],
"module": "ESNext",
"types": ["node"],
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["vite.config.ts"]
}

View File

@@ -1,11 +0,0 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react";
export default defineConfig({
plugins: [react()],
server: {
proxy: {
"/api": "http://localhost:8000",
},
},
});

View File

@@ -16,18 +16,13 @@
}; };
python-env = final: _prev: { python-env = final: _prev: {
my_python = final.python314.withPackages ( my_python = final.python313.withPackages (
ps: with ps; [ ps: with ps; [
alembic
apprise apprise
apscheduler apscheduler
fastapi
fastapi-cli
httpx
mypy mypy
polars polars
psycopg psycopg
pydantic
pyfakefs pyfakefs
pytest pytest
pytest-cov pytest-cov
@@ -37,9 +32,7 @@
ruff ruff
scalene scalene
sqlalchemy sqlalchemy
sqlalchemy
textual textual
tinytuya
typer typer
types-requests types-requests
] ]

View File

@@ -7,7 +7,7 @@ requires-python = "~=3.13.0"
readme = "README.md" readme = "README.md"
license = "MIT" license = "MIT"
# these dependencies are a best effort and aren't guaranteed to work # these dependencies are a best effort and aren't guaranteed to work
dependencies = ["apprise", "apscheduler", "httpx", "polars", "pydantic", "pyyaml", "requests", "typer"] dependencies = ["apprise", "apscheduler", "polars", "requests", "typer"]
[dependency-groups] [dependency-groups]
dev = [ dev = [
@@ -40,30 +40,19 @@ lint.ignore = [
"tests/**" = [ "tests/**" = [
"S101", # (perm) pytest needs asserts "S101", # (perm) pytest needs asserts
] ]
"python/stuff/**" = [ "python/random/**" = [
"T201", # (perm) I don't care about print statements dir "T201", # (perm) I don't care about print statements dir
] ]
"python/testing/**" = [ "python/testing/**" = [
"T201", # (perm) I don't care about print statements dir "T201", # (perm) I don't care about print statements dir
"ERA001", # (perm) I don't care about print statements dir "ERA001", # (perm) I don't care about print statements dir
] ]
"python/splendor/**" = [ "python/splendor/**" = [
"S311", # (perm) there is no security issue here "S311", # (perm) there is no security issue here
"T201", # (perm) I don't care about print statements dir "T201", # (perm) I don't care about print statements dir
"PLR2004", # (temps) need to think about this "PLR2004", # (temps) need to think about this
] ]
"python/orm/**" = [
"TC003", # (perm) this creates issues because sqlalchemy uses these at runtime
]
"python/congress_tracker/**" = [
"TC003", # (perm) this creates issues because sqlalchemy uses these at runtime
]
"python/eval_warnings/**" = [
"S607", # (perm) gh and git are expected on PATH in the runner environment
]
"python/alembic/**" = [
"INP001", # (perm) this creates LSP issues for alembic
]
[tool.ruff.lint.pydocstyle] [tool.ruff.lint.pydocstyle]
convention = "google" convention = "google"

View File

@@ -1,109 +0,0 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = python/alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
file_template = %%(year)d_%%(month).2d_%%(day).2d-%%(slug)s_%%(rev)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
revision_environment = true
[post_write_hooks]
hooks = dynamic_schema,ruff
dynamic_schema.type = dynamic_schema
ruff.type = ruff
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -1,93 +0,0 @@
"""Alembic."""
from __future__ import annotations
import logging
import sys
from pathlib import Path
from typing import TYPE_CHECKING, Any, Literal
from alembic import context
from alembic.script import write_hooks
from python.common import bash_wrapper
from python.orm import RichieBase
from python.orm.base import get_postgres_engine
if TYPE_CHECKING:
from collections.abc import MutableMapping
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
target_metadata = RichieBase.metadata
logging.basicConfig(
level="DEBUG",
datefmt="%Y-%m-%dT%H:%M:%S%z",
format="%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s",
handlers=[logging.StreamHandler(sys.stdout)],
)
@write_hooks.register("dynamic_schema")
def dynamic_schema(filename: str, _options: dict[Any, Any]) -> None:
"""Dynamic schema."""
original_file = Path(filename).read_text()
dynamic_schema_file_part1 = original_file.replace(f"schema='{RichieBase.schema_name}'", "schema=schema")
dynamic_schema_file = dynamic_schema_file_part1.replace(f"'{RichieBase.schema_name}.", "f'{schema}.")
Path(filename).write_text(dynamic_schema_file)
@write_hooks.register("ruff")
def ruff_check_and_format(filename: str, _options: dict[Any, Any]) -> None:
"""Docstring for ruff_check_and_format."""
bash_wrapper(f"ruff check --fix {filename}")
bash_wrapper(f"ruff format {filename}")
def include_name(
name: str | None,
type_: Literal["schema", "table", "column", "index", "unique_constraint", "foreign_key_constraint"],
_parent_names: MutableMapping[Literal["schema_name", "table_name", "schema_qualified_table_name"], str | None],
) -> bool:
"""This filter table to be included in the migration.
Args:
name (str): The name of the table.
type_ (str): The type of the table.
parent_names (list[str]): The names of the parent tables.
Returns:
bool: True if the table should be included, False otherwise.
"""
if type_ == "schema":
return name == target_metadata.schema
return True
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = get_postgres_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
include_schemas=True,
version_table_schema=RichieBase.schema_name,
include_name=include_name,
)
with context.begin_transaction():
context.run_migrations()
run_migrations_online()

View File

@@ -1,36 +0,0 @@
"""${message}.
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
from python.orm import RichieBase
if TYPE_CHECKING:
from collections.abc import Sequence
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: str | None = ${repr(down_revision)}
branch_labels: str | Sequence[str] | None = ${repr(branch_labels)}
depends_on: str | Sequence[str] | None = ${repr(depends_on)}
schema=RichieBase.schema_name
def upgrade() -> None:
"""Upgrade."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade."""
${downgrades if downgrades else "pass"}

View File

@@ -1,113 +0,0 @@
"""created contact api.
Revision ID: edd7dd61a3d2
Revises:
Create Date: 2026-01-11 15:45:59.909266
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
from python.orm import RichieBase
if TYPE_CHECKING:
from collections.abc import Sequence
# revision identifiers, used by Alembic.
revision: str = "edd7dd61a3d2"
down_revision: str | None = None
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
schema = RichieBase.schema_name
def upgrade() -> None:
"""Upgrade."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"contact",
sa.Column("name", sa.String(), nullable=False),
sa.Column("age", sa.Integer(), nullable=True),
sa.Column("bio", sa.String(), nullable=True),
sa.Column("current_job", sa.String(), nullable=True),
sa.Column("gender", sa.String(), nullable=True),
sa.Column("goals", sa.String(), nullable=True),
sa.Column("legal_name", sa.String(), nullable=True),
sa.Column("profile_pic", sa.String(), nullable=True),
sa.Column("safe_conversation_starters", sa.String(), nullable=True),
sa.Column("self_sufficiency_score", sa.Integer(), nullable=True),
sa.Column("social_structure_style", sa.String(), nullable=True),
sa.Column("ssn", sa.String(), nullable=True),
sa.Column("suffix", sa.String(), nullable=True),
sa.Column("timezone", sa.String(), nullable=True),
sa.Column("topics_to_avoid", sa.String(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("updated", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("pk_contact")),
schema=schema,
)
op.create_table(
"need",
sa.Column("name", sa.String(), nullable=False),
sa.Column("description", sa.String(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("updated", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("pk_need")),
schema=schema,
)
op.create_table(
"contact_need",
sa.Column("contact_id", sa.Integer(), nullable=False),
sa.Column("need_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["contact_id"],
[f"{schema}.contact.id"],
name=op.f("fk_contact_need_contact_id_contact"),
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["need_id"], [f"{schema}.need.id"], name=op.f("fk_contact_need_need_id_need"), ondelete="CASCADE"
),
sa.PrimaryKeyConstraint("contact_id", "need_id", name=op.f("pk_contact_need")),
schema=schema,
)
op.create_table(
"contact_relationship",
sa.Column("contact_id", sa.Integer(), nullable=False),
sa.Column("related_contact_id", sa.Integer(), nullable=False),
sa.Column("relationship_type", sa.String(length=100), nullable=False),
sa.Column("closeness_weight", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["contact_id"],
[f"{schema}.contact.id"],
name=op.f("fk_contact_relationship_contact_id_contact"),
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["related_contact_id"],
[f"{schema}.contact.id"],
name=op.f("fk_contact_relationship_related_contact_id_contact"),
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("contact_id", "related_contact_id", name=op.f("pk_contact_relationship")),
schema=schema,
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("contact_relationship", schema=schema)
op.drop_table("contact_need", schema=schema)
op.drop_table("need", schema=schema)
op.drop_table("contact", schema=schema)
# ### end Alembic commands ###

View File

@@ -1 +0,0 @@
"""FastAPI applications."""

View File

@@ -1,16 +0,0 @@
"""FastAPI dependencies."""
from collections.abc import Iterator
from typing import Annotated
from fastapi import Depends, Request
from sqlalchemy.orm import Session
def get_db(request: Request) -> Iterator[Session]:
"""Get database session from app state."""
with Session(request.app.state.engine) as session:
yield session
DbSession = Annotated[Session, Depends(get_db)]

View File

@@ -1,117 +0,0 @@
"""FastAPI interface for Contact database."""
import logging
import shutil
import subprocess
import tempfile
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from os import environ
from pathlib import Path
from typing import Annotated
import typer
import uvicorn
from fastapi import FastAPI
from python.api.routers import contact_router, create_frontend_router
from python.common import configure_logger
from python.orm.base import get_postgres_engine
logger = logging.getLogger(__name__)
def create_app(frontend_dir: Path | None = None) -> FastAPI:
"""Create and configure the FastAPI application."""
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncIterator[None]:
"""Manage application lifespan."""
app.state.engine = get_postgres_engine()
yield
app.state.engine.dispose()
app = FastAPI(title="Contact Database API", lifespan=lifespan)
app.include_router(contact_router)
if frontend_dir:
logger.info(f"Serving frontend from {frontend_dir}")
frontend_router = create_frontend_router(frontend_dir)
app.include_router(frontend_router)
return app
def build_frontend(source_dir: Path | None, cache_dir: Path | None = None) -> Path | None:
"""Run npm build and copy output to a temp directory.
Works even if source_dir is read-only by copying to a temp directory first.
Args:
source_dir: Frontend source directory.
cache_dir: Optional npm cache directory for faster repeated builds.
Returns:
Path to frontend build directory, or None if no source_dir provided.
"""
if not source_dir:
return None
if not source_dir.exists():
error = f"Frontend directory {source_dir} does not exist"
raise FileExistsError(error)
logger.info("Building frontend from %s...", source_dir)
# Copy source to a writable temp directory
build_dir = Path(tempfile.mkdtemp(prefix="contact_frontend_build_"))
shutil.copytree(source_dir, build_dir, dirs_exist_ok=True)
env = dict(environ)
if cache_dir:
cache_dir.mkdir(parents=True, exist_ok=True)
env["npm_config_cache"] = str(cache_dir)
subprocess.run(["npm", "install"], cwd=build_dir, env=env, check=True) # noqa: S607
subprocess.run(["npm", "run", "build"], cwd=build_dir, env=env, check=True) # noqa: S607
dist_dir = build_dir / "dist"
if not dist_dir.exists():
error = f"Build output not found at {dist_dir}"
raise FileNotFoundError(error)
output_dir = Path(tempfile.mkdtemp(prefix="contact_frontend_"))
shutil.copytree(dist_dir, output_dir, dirs_exist_ok=True)
logger.info(f"Frontend built and copied to {output_dir}")
shutil.rmtree(build_dir)
return output_dir
def serve(
host: Annotated[str, typer.Option("--host", "-h", help="Host to bind to")],
frontend_dir: Annotated[
Path | None,
typer.Option(
"--frontend-dir",
"-f",
help="Frontend source directory. If provided, runs npm build and serves from temp dir.",
),
] = None,
port: Annotated[int, typer.Option("--port", "-p", help="Port to bind to")] = 8000,
log_level: Annotated[str, typer.Option("--log-level", "-l", help="Log level")] = "INFO",
) -> None:
"""Start the Contact API server."""
configure_logger(log_level)
cache_dir = Path(environ["HOME"]) / ".npm"
serve_dir = build_frontend(frontend_dir, cache_dir=cache_dir)
app = create_app(frontend_dir=serve_dir)
uvicorn.run(app, host=host, port=port)
if __name__ == "__main__":
typer.run(serve)

View File

@@ -1,6 +0,0 @@
"""API routers."""
from python.api.routers.contact import router as contact_router
from python.api.routers.frontend import create_frontend_router
__all__ = ["contact_router", "create_frontend_router"]

View File

@@ -1,459 +0,0 @@
"""Contact API router."""
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.orm import selectinload
from python.api.dependencies import DbSession
from python.orm.contact import Contact, ContactRelationship, Need, RelationshipType
class NeedBase(BaseModel):
"""Base schema for Need."""
name: str
description: str | None = None
class NeedCreate(NeedBase):
"""Schema for creating a Need."""
class NeedResponse(NeedBase):
"""Schema for Need response."""
id: int
model_config = {"from_attributes": True}
class ContactRelationshipCreate(BaseModel):
"""Schema for creating a contact relationship."""
related_contact_id: int
relationship_type: RelationshipType
closeness_weight: int | None = None
class ContactRelationshipUpdate(BaseModel):
"""Schema for updating a contact relationship."""
relationship_type: RelationshipType | None = None
closeness_weight: int | None = None
class ContactRelationshipResponse(BaseModel):
"""Schema for contact relationship response."""
contact_id: int
related_contact_id: int
relationship_type: str
closeness_weight: int
model_config = {"from_attributes": True}
class RelationshipTypeInfo(BaseModel):
"""Information about a relationship type."""
value: str
display_name: str
default_weight: int
class GraphNode(BaseModel):
"""Node in the relationship graph."""
id: int
name: str
current_job: str | None = None
class GraphEdge(BaseModel):
"""Edge in the relationship graph."""
source: int
target: int
relationship_type: str
closeness_weight: int
class GraphData(BaseModel):
"""Complete graph data for visualization."""
nodes: list[GraphNode]
edges: list[GraphEdge]
class ContactBase(BaseModel):
"""Base schema for Contact."""
name: str
age: int | None = None
bio: str | None = None
current_job: str | None = None
gender: str | None = None
goals: str | None = None
legal_name: str | None = None
profile_pic: str | None = None
safe_conversation_starters: str | None = None
self_sufficiency_score: int | None = None
social_structure_style: str | None = None
ssn: str | None = None
suffix: str | None = None
timezone: str | None = None
topics_to_avoid: str | None = None
class ContactCreate(ContactBase):
"""Schema for creating a Contact."""
need_ids: list[int] = []
class ContactUpdate(BaseModel):
"""Schema for updating a Contact."""
name: str | None = None
age: int | None = None
bio: str | None = None
current_job: str | None = None
gender: str | None = None
goals: str | None = None
legal_name: str | None = None
profile_pic: str | None = None
safe_conversation_starters: str | None = None
self_sufficiency_score: int | None = None
social_structure_style: str | None = None
ssn: str | None = None
suffix: str | None = None
timezone: str | None = None
topics_to_avoid: str | None = None
need_ids: list[int] | None = None
class ContactResponse(ContactBase):
"""Schema for Contact response with relationships."""
id: int
needs: list[NeedResponse] = []
related_to: list[ContactRelationshipResponse] = []
related_from: list[ContactRelationshipResponse] = []
model_config = {"from_attributes": True}
class ContactListResponse(ContactBase):
"""Schema for Contact list response."""
id: int
model_config = {"from_attributes": True}
router = APIRouter(prefix="/api", tags=["contacts"])
@router.post("/needs", response_model=NeedResponse)
def create_need(need: NeedCreate, db: DbSession) -> Need:
"""Create a new need."""
db_need = Need(name=need.name, description=need.description)
db.add(db_need)
db.commit()
db.refresh(db_need)
return db_need
@router.get("/needs", response_model=list[NeedResponse])
def list_needs(db: DbSession) -> list[Need]:
"""List all needs."""
return list(db.scalars(select(Need)).all())
@router.get("/needs/{need_id}", response_model=NeedResponse)
def get_need(need_id: int, db: DbSession) -> Need:
"""Get a need by ID."""
need = db.get(Need, need_id)
if not need:
raise HTTPException(status_code=404, detail="Need not found")
return need
@router.delete("/needs/{need_id}")
def delete_need(need_id: int, db: DbSession) -> dict[str, bool]:
"""Delete a need by ID."""
need = db.get(Need, need_id)
if not need:
raise HTTPException(status_code=404, detail="Need not found")
db.delete(need)
db.commit()
return {"deleted": True}
@router.post("/contacts", response_model=ContactResponse)
def create_contact(contact: ContactCreate, db: DbSession) -> Contact:
"""Create a new contact."""
need_ids = contact.need_ids
contact_data = contact.model_dump(exclude={"need_ids"})
db_contact = Contact(**contact_data)
if need_ids:
needs = list(db.scalars(select(Need).where(Need.id.in_(need_ids))).all())
db_contact.needs = needs
db.add(db_contact)
db.commit()
db.refresh(db_contact)
return db_contact
@router.get("/contacts", response_model=list[ContactListResponse])
def list_contacts(
db: DbSession,
skip: int = 0,
limit: int = 100,
) -> list[Contact]:
"""List all contacts with pagination."""
return list(db.scalars(select(Contact).offset(skip).limit(limit)).all())
@router.get("/contacts/{contact_id}", response_model=ContactResponse)
def get_contact(contact_id: int, db: DbSession) -> Contact:
"""Get a contact by ID with all relationships."""
contact = db.scalar(
select(Contact)
.where(Contact.id == contact_id)
.options(
selectinload(Contact.needs),
selectinload(Contact.related_to),
selectinload(Contact.related_from),
)
)
if not contact:
raise HTTPException(status_code=404, detail="Contact not found")
return contact
@router.patch("/contacts/{contact_id}", response_model=ContactResponse)
def update_contact(
contact_id: int,
contact: ContactUpdate,
db: DbSession,
) -> Contact:
"""Update a contact by ID."""
db_contact = db.get(Contact, contact_id)
if not db_contact:
raise HTTPException(status_code=404, detail="Contact not found")
update_data = contact.model_dump(exclude_unset=True)
need_ids = update_data.pop("need_ids", None)
for key, value in update_data.items():
setattr(db_contact, key, value)
if need_ids is not None:
needs = list(db.scalars(select(Need).where(Need.id.in_(need_ids))).all())
db_contact.needs = needs
db.commit()
db.refresh(db_contact)
return db_contact
@router.delete("/contacts/{contact_id}")
def delete_contact(contact_id: int, db: DbSession) -> dict[str, bool]:
"""Delete a contact by ID."""
contact = db.get(Contact, contact_id)
if not contact:
raise HTTPException(status_code=404, detail="Contact not found")
db.delete(contact)
db.commit()
return {"deleted": True}
@router.post("/contacts/{contact_id}/needs/{need_id}")
def add_need_to_contact(
contact_id: int,
need_id: int,
db: DbSession,
) -> dict[str, bool]:
"""Add a need to a contact."""
contact = db.get(Contact, contact_id)
if not contact:
raise HTTPException(status_code=404, detail="Contact not found")
need = db.get(Need, need_id)
if not need:
raise HTTPException(status_code=404, detail="Need not found")
if need not in contact.needs:
contact.needs.append(need)
db.commit()
return {"added": True}
@router.delete("/contacts/{contact_id}/needs/{need_id}")
def remove_need_from_contact(
contact_id: int,
need_id: int,
db: DbSession,
) -> dict[str, bool]:
"""Remove a need from a contact."""
contact = db.get(Contact, contact_id)
if not contact:
raise HTTPException(status_code=404, detail="Contact not found")
need = db.get(Need, need_id)
if not need:
raise HTTPException(status_code=404, detail="Need not found")
if need in contact.needs:
contact.needs.remove(need)
db.commit()
return {"removed": True}
@router.post(
"/contacts/{contact_id}/relationships",
response_model=ContactRelationshipResponse,
)
def add_contact_relationship(
contact_id: int,
relationship: ContactRelationshipCreate,
db: DbSession,
) -> ContactRelationship:
"""Add a relationship between two contacts."""
contact = db.get(Contact, contact_id)
if not contact:
raise HTTPException(status_code=404, detail="Contact not found")
related_contact = db.get(Contact, relationship.related_contact_id)
if not related_contact:
raise HTTPException(status_code=404, detail="Related contact not found")
if contact_id == relationship.related_contact_id:
raise HTTPException(status_code=400, detail="Cannot relate contact to itself")
# Use provided weight or default from relationship type
weight = relationship.closeness_weight
if weight is None:
weight = relationship.relationship_type.default_weight
db_relationship = ContactRelationship(
contact_id=contact_id,
related_contact_id=relationship.related_contact_id,
relationship_type=relationship.relationship_type.value,
closeness_weight=weight,
)
db.add(db_relationship)
db.commit()
db.refresh(db_relationship)
return db_relationship
@router.get(
"/contacts/{contact_id}/relationships",
response_model=list[ContactRelationshipResponse],
)
def get_contact_relationships(
contact_id: int,
db: DbSession,
) -> list[ContactRelationship]:
"""Get all relationships for a contact."""
contact = db.get(Contact, contact_id)
if not contact:
raise HTTPException(status_code=404, detail="Contact not found")
outgoing = list(db.scalars(select(ContactRelationship).where(ContactRelationship.contact_id == contact_id)).all())
incoming = list(
db.scalars(select(ContactRelationship).where(ContactRelationship.related_contact_id == contact_id)).all()
)
return outgoing + incoming
@router.patch(
"/contacts/{contact_id}/relationships/{related_contact_id}",
response_model=ContactRelationshipResponse,
)
def update_contact_relationship(
contact_id: int,
related_contact_id: int,
update: ContactRelationshipUpdate,
db: DbSession,
) -> ContactRelationship:
"""Update a relationship between two contacts."""
relationship = db.scalar(
select(ContactRelationship).where(
ContactRelationship.contact_id == contact_id,
ContactRelationship.related_contact_id == related_contact_id,
)
)
if not relationship:
raise HTTPException(status_code=404, detail="Relationship not found")
if update.relationship_type is not None:
relationship.relationship_type = update.relationship_type.value
if update.closeness_weight is not None:
relationship.closeness_weight = update.closeness_weight
db.commit()
db.refresh(relationship)
return relationship
@router.delete("/contacts/{contact_id}/relationships/{related_contact_id}")
def remove_contact_relationship(
contact_id: int,
related_contact_id: int,
db: DbSession,
) -> dict[str, bool]:
"""Remove a relationship between two contacts."""
relationship = db.scalar(
select(ContactRelationship).where(
ContactRelationship.contact_id == contact_id,
ContactRelationship.related_contact_id == related_contact_id,
)
)
if not relationship:
raise HTTPException(status_code=404, detail="Relationship not found")
db.delete(relationship)
db.commit()
return {"deleted": True}
@router.get("/relationship-types")
def list_relationship_types() -> list[RelationshipTypeInfo]:
"""List all available relationship types with their default weights."""
return [
RelationshipTypeInfo(
value=rt.value,
display_name=rt.display_name,
default_weight=rt.default_weight,
)
for rt in RelationshipType
]
@router.get("/graph")
def get_relationship_graph(db: DbSession) -> GraphData:
"""Get all contacts and relationships as graph data for visualization."""
contacts = list(db.scalars(select(Contact)).all())
relationships = list(db.scalars(select(ContactRelationship)).all())
nodes = [GraphNode(id=c.id, name=c.name, current_job=c.current_job) for c in contacts]
edges = [
GraphEdge(
source=rel.contact_id,
target=rel.related_contact_id,
relationship_type=rel.relationship_type,
closeness_weight=rel.closeness_weight,
)
for rel in relationships
]
return GraphData(nodes=nodes, edges=edges)

View File

@@ -1,24 +0,0 @@
"""Frontend SPA router."""
from pathlib import Path
from fastapi import APIRouter
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
def create_frontend_router(frontend_dir: Path) -> APIRouter:
"""Create a router for serving the frontend SPA."""
router = APIRouter(tags=["frontend"])
router.mount("/assets", StaticFiles(directory=frontend_dir / "assets"), name="assets")
@router.get("/{full_path:path}")
async def serve_spa(full_path: str) -> FileResponse:
"""Serve React SPA for all non-API routes."""
file_path = frontend_dir / full_path
if file_path.is_file():
return FileResponse(file_path)
return FileResponse(frontend_dir / "index.html")
return router

View File

@@ -1 +0,0 @@
"""Detect Nix evaluation warnings from build logs and create PRs with LLM-suggested fixes."""

View File

@@ -1,449 +0,0 @@
"""Detect Nix evaluation warnings and create PRs with LLM-suggested fixes."""
from __future__ import annotations
import hashlib
import logging
import re
import subprocess
from dataclasses import dataclass
from io import BytesIO
from pathlib import Path
from typing import Annotated
from zipfile import ZipFile
import typer
from httpx import HTTPError, post
from python.common import configure_logger
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class EvalWarning:
"""A single Nix evaluation warning."""
system: str
message: str
@dataclass
class FileChange:
"""A file change suggested by the LLM."""
file_path: str
original: str
fixed: str
def run_cmd(cmd: list[str], *, check: bool = True) -> subprocess.CompletedProcess[str]:
"""Run a subprocess command and return the result.
Args:
cmd: Command and arguments.
check: Whether to raise on non-zero exit.
Returns:
CompletedProcess with captured stdout/stderr.
"""
logger.debug("Running: %s", " ".join(cmd))
return subprocess.run(cmd, capture_output=True, text=True, check=check)
def download_logs(run_id: str, repo: str) -> dict[str, str]:
"""Download build logs for a GitHub Actions run.
Args:
run_id: The workflow run ID.
repo: The GitHub repository (owner/repo).
Returns:
Dict mapping zip entry names to their text content, filtered to build log files.
Raises:
RuntimeError: If log download fails.
"""
result = subprocess.run(
["gh", "api", f"repos/{repo}/actions/runs/{run_id}/logs"],
capture_output=True,
check=False,
)
if result.returncode != 0:
msg = f"Failed to download logs: {result.stderr.decode(errors='replace')}"
raise RuntimeError(msg)
logs: dict[str, str] = {}
with ZipFile(BytesIO(result.stdout)) as zip_file:
for name in zip_file.namelist():
if name.startswith("build-") and name.endswith(".txt"):
logs[name] = zip_file.read(name).decode(errors="replace")
return logs
def parse_warnings(logs: dict[str, str]) -> set[EvalWarning]:
"""Parse Nix evaluation warnings from build log contents.
Args:
logs: Dict mapping zip entry names (e.g. "build-bob/2_Build.txt") to their text.
Returns:
Deduplicated set of warnings.
"""
warnings: set[EvalWarning] = set()
warning_pattern = re.compile(r"(?:^[\d\-T:.Z]+ )?(warning:|trace: warning:)")
timestamp_prefix = re.compile(r"^[\d\-T:.Z]+ ")
for name, content in sorted(logs.items()):
system = name.split("/")[0].removeprefix("build-")
for line in content.splitlines():
if warning_pattern.search(line):
message = timestamp_prefix.sub("", line).strip()
if message.startswith("warning: ignoring untrusted flake configuration setting"):
continue
logger.debug(f"Found warning: {line}")
warnings.add(EvalWarning(system=system, message=message))
logger.info("Found %d unique warnings", len(warnings))
return warnings
def extract_referenced_files(warnings: set[EvalWarning]) -> dict[str, str]:
"""Extract file paths referenced in warnings and read their contents.
Args:
warnings: List of parsed warnings.
Returns:
Dict mapping repo-relative file paths to their contents.
"""
paths: set[str] = set()
warning_text = "\n".join(w.message for w in warnings)
nix_store_path = re.compile(r"/nix/store/[^/]+-source/([^:]+\.nix)")
for match in nix_store_path.finditer(warning_text):
paths.add(match.group(1))
repo_relative_path = re.compile(r"(?<![/\w])(systems|common|users|overlays)/[^:\s]+\.nix")
for match in repo_relative_path.finditer(warning_text):
paths.add(match.group(0))
files: dict[str, str] = {}
for path_str in sorted(paths):
path = Path(path_str)
if path.is_file():
files[path_str] = path.read_text()
if not files and Path("flake.nix").is_file():
files["flake.nix"] = Path("flake.nix").read_text()
logger.info("Extracted %d referenced files", len(files))
return files
def compute_warning_hash(warnings: set[EvalWarning]) -> str:
"""Compute a short hash of the warning set for deduplication.
Args:
warnings: List of warnings.
Returns:
8-character hex hash.
"""
text = "\n".join(sorted(f"[{w.system}] {w.message}" for w in warnings))
return hashlib.sha256(text.encode()).hexdigest()[:8]
def check_duplicate_pr(warning_hash: str) -> bool:
"""Check if an open PR already exists for this warning hash.
Args:
warning_hash: The hash to check.
Returns:
True if a duplicate PR exists.
Raises:
RuntimeError: If the gh CLI call fails.
"""
result = run_cmd(
[
"gh",
"pr",
"list",
"--state",
"open",
"--label",
"eval-warning-fix",
"--json",
"title",
"--jq",
".[].title",
],
check=False,
)
if result.returncode != 0:
msg = f"Failed to check for duplicate PRs: {result.stderr}"
raise RuntimeError(msg)
for title in result.stdout.splitlines():
if warning_hash in title:
logger.info("Duplicate PR found for hash %s", warning_hash)
return True
return False
def query_ollama(
warnings: set[EvalWarning],
files: dict[str, str],
ollama_url: str,
) -> str | None:
"""Query Ollama for a fix suggestion.
Args:
warnings: List of warnings.
files: Referenced file contents.
ollama_url: Ollama API base URL.
Returns:
LLM response text, or None on failure.
"""
warning_text = "\n".join(f"[{w.system}] {w.message}" for w in warnings)
file_context = "\n".join(f"--- FILE: {path} ---\n{content}\n--- END FILE ---" for path, content in files.items())
prompt = f"""You are a NixOS configuration expert. \
Analyze the following Nix evaluation warnings and suggest fixes.
## Warnings
{warning_text}
## Referenced Files
{file_context}
## Instructions
- Identify the root cause of each warning
- Provide the exact file changes needed to fix the warnings
- Output your response in two clearly separated sections:
1. **REASONING**: Brief explanation of what causes each warning and how to fix it
2. **CHANGES**: For each file that needs changes, output a block like:
FILE: path/to/file.nix
<<<<<<< ORIGINAL
the original lines to replace
=======
the replacement lines
>>>>>>> FIXED
- Only suggest changes for files that exist in the repository
- Do not add unnecessary complexity
- Preserve the existing code style
- If a warning comes from upstream nixpkgs and cannot be fixed in this repo, \
say so in REASONING and do not suggest changes"""
try:
response = post(
f"{ollama_url}/api/generate",
json={
"model": "qwen3-coder:30b",
"prompt": prompt,
"stream": False,
"options": {"num_predict": 4096},
},
timeout=300,
)
response.raise_for_status()
except HTTPError:
logger.exception("Ollama request failed")
return None
return response.json().get("response")
def parse_changes(response: str) -> list[FileChange]:
"""Parse file changes from the **CHANGES** section of the LLM response.
Expects blocks in the format:
FILE: path/to/file.nix
<<<<<<< ORIGINAL
...
=======
...
>>>>>>> FIXED
Args:
response: Raw LLM response text.
Returns:
List of parsed file changes.
"""
if "**CHANGES**" not in response:
logger.warning("LLM response missing **CHANGES** section")
return []
changes_section = response.split("**CHANGES**", 1)[1]
changes: list[FileChange] = []
current_file = ""
section: str | None = None
original_lines: list[str] = []
fixed_lines: list[str] = []
for line in changes_section.splitlines():
stripped = line.strip()
if stripped.startswith("FILE:"):
current_file = stripped.removeprefix("FILE:").strip()
elif stripped == "<<<<<<< ORIGINAL":
section = "original"
original_lines = []
elif stripped == "=======" and section == "original":
section = "fixed"
fixed_lines = []
elif stripped == ">>>>>>> FIXED" and section == "fixed":
section = None
if current_file:
changes.append(FileChange(current_file, "\n".join(original_lines), "\n".join(fixed_lines)))
elif section == "original":
original_lines.append(line)
elif section == "fixed":
fixed_lines.append(line)
logger.info("Parsed %d file changes", len(changes))
return changes
def apply_changes(changes: list[FileChange]) -> int:
"""Apply file changes to the working directory.
Args:
changes: List of changes to apply.
Returns:
Number of changes successfully applied.
"""
applied = 0
cwd = Path.cwd().resolve()
for change in changes:
path = Path(change.file_path).resolve()
if not path.is_relative_to(cwd):
logger.warning("Path traversal blocked: %s", change.file_path)
continue
if not path.is_file():
logger.warning("File not found: %s", change.file_path)
continue
content = path.read_text()
if change.original not in content:
logger.warning("Original text not found in %s", change.file_path)
continue
path.write_text(content.replace(change.original, change.fixed, 1))
logger.info("Applied fix to %s", change.file_path)
applied += 1
return applied
def create_pr(
warning_hash: str,
warnings: set[EvalWarning],
llm_response: str,
run_url: str,
) -> None:
"""Create a git branch and PR with the applied fixes.
Args:
warning_hash: Short hash for branch naming and deduplication.
warnings: Original warnings for the PR body.
llm_response: Full LLM response for extracting reasoning.
run_url: URL to the triggering build run.
"""
branch = f"fix/eval-warning-{warning_hash}"
warning_text = "\n".join(f"[{w.system}] {w.message}" for w in warnings)
if "**REASONING**" not in llm_response:
logger.warning("LLM response missing **REASONING** section")
reasoning = ""
else:
_, after = llm_response.split("**REASONING**", 1)
reasoning = "\n".join(after.split("**CHANGES**", 1)[0].strip().splitlines()[:50])
run_cmd(["git", "config", "user.name", "github-actions[bot]"])
run_cmd(["git", "config", "user.email", "github-actions[bot]@users.noreply.github.com"])
run_cmd(["git", "checkout", "-b", branch])
run_cmd(["git", "add", "-A"])
diff_result = run_cmd(["git", "diff", "--cached", "--quiet"], check=False)
if diff_result.returncode == 0:
logger.info("No file changes to commit")
return
run_cmd(["git", "commit", "-m", f"fix: resolve nix evaluation warnings ({warning_hash})"])
run_cmd(["git", "push", "origin", branch, "--force"])
body = f"""## Nix Evaluation Warnings
Detected in [build_systems run]({run_url}):
```
{warning_text}
```
## LLM Analysis (qwen3-coder:30b)
{reasoning}
---
*Auto-generated by fix_eval_warnings. Review carefully before merging.*"""
run_cmd(
[
"gh",
"pr",
"create",
"--title",
f"fix: resolve nix eval warnings ({warning_hash})",
"--label",
"automated",
"--label",
"eval-warning-fix",
"--body",
body,
]
)
logger.info("PR created on branch %s", branch)
def main(
run_id: Annotated[str, typer.Option("--run-id", help="GitHub Actions run ID")],
repo: Annotated[str, typer.Option("--repo", help="GitHub repository (owner/repo)")],
ollama_url: Annotated[str, typer.Option("--ollama-url", help="Ollama API base URL")],
run_url: Annotated[str, typer.Option("--run-url", help="URL to the triggering build run")],
log_level: Annotated[str, typer.Option("--log-level", "-l", help="Log level")] = "INFO",
) -> None:
"""Detect Nix evaluation warnings and create PRs with LLM-suggested fixes."""
configure_logger(log_level)
logs = download_logs(run_id, repo)
warnings = parse_warnings(logs)
if not warnings:
return
warning_hash = compute_warning_hash(warnings)
if check_duplicate_pr(warning_hash):
return
files = extract_referenced_files(warnings)
llm_response = query_ollama(warnings, files, ollama_url)
if not llm_response:
return
changes = parse_changes(llm_response)
applied = apply_changes(changes)
if applied == 0:
logger.info("No changes could be applied")
return
create_pr(warning_hash, warnings, llm_response, run_url)
if __name__ == "__main__":
typer.run(main)

View File

@@ -1 +0,0 @@
"""Tuya heater control service."""

View File

@@ -1,69 +0,0 @@
"""TinyTuya device controller for heater."""
import logging
import tinytuya
from python.heater.models import ActionResult, DeviceConfig, HeaterStatus
logger = logging.getLogger(__name__)
# DPS mapping for heater
DPS_POWER = "1" # bool: on/off
DPS_SETPOINT = "101" # int: target temp (read-only)
DPS_STATE = "102" # str: "Stop", "Heat", etc.
DPS_UNKNOWN = "104" # int: unknown
DPS_ERROR = "108" # int: last error code
class HeaterController:
"""Controls a Tuya heater device via local network."""
def __init__(self, config: DeviceConfig) -> None:
"""Initialize the controller."""
self.device = tinytuya.Device(config.device_id, config.ip, config.local_key)
self.device.set_version(config.version)
self.device.set_socketTimeout(0.5)
self.device.set_socketRetryLimit(1)
def status(self) -> HeaterStatus:
"""Get current heater status."""
data = self.device.status()
if "Error" in data:
logger.error("Device error: %s", data)
return HeaterStatus(power=False, raw_dps={"error": data["Error"]})
dps = data.get("dps", {})
return HeaterStatus(
power=bool(dps.get(DPS_POWER, False)),
setpoint=dps.get(DPS_SETPOINT),
state=dps.get(DPS_STATE),
error_code=dps.get(DPS_ERROR),
raw_dps=dps,
)
def turn_on(self) -> ActionResult:
"""Turn heater on."""
try:
self.device.set_value(index=DPS_POWER, value=True)
return ActionResult(success=True, action="on", power=True)
except Exception as error:
logger.exception("Failed to turn on")
return ActionResult(success=False, action="on", error=str(error))
def turn_off(self) -> ActionResult:
"""Turn heater off."""
try:
self.device.set_value(index=DPS_POWER, value=False)
return ActionResult(success=True, action="off", power=False)
except Exception as error:
logger.exception("Failed to turn off")
return ActionResult(success=False, action="off", error=str(error))
def toggle(self) -> ActionResult:
"""Toggle heater power state."""
status = self.status()
if status.power:
return self.turn_off()
return self.turn_on()

View File

@@ -1,85 +0,0 @@
"""FastAPI heater control service."""
import logging
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from typing import Annotated
import typer
import uvicorn
from fastapi import FastAPI, HTTPException
from python.common import configure_logger
from python.heater.controller import HeaterController
from python.heater.models import ActionResult, DeviceConfig, HeaterStatus
logger = logging.getLogger(__name__)
def create_app(config: DeviceConfig) -> FastAPI:
"""Create FastAPI application."""
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncIterator[None]:
app.state.controller = HeaterController(config)
yield
app = FastAPI(
title="Heater Control API",
description="Fast local control for Tuya heater",
lifespan=lifespan,
)
@app.get("/status")
def get_status() -> HeaterStatus:
return app.state.controller.status()
@app.post("/on")
def heater_on() -> ActionResult:
result = app.state.controller.turn_on()
if not result.success:
raise HTTPException(status_code=500, detail=result.error)
return result
@app.post("/off")
def heater_off() -> ActionResult:
result = app.state.controller.turn_off()
if not result.success:
raise HTTPException(status_code=500, detail=result.error)
return result
@app.post("/toggle")
def heater_toggle() -> ActionResult:
result = app.state.controller.toggle()
if not result.success:
raise HTTPException(status_code=500, detail=result.error)
return result
return app
def serve(
host: Annotated[str, typer.Option("--host", "-h", help="Host to bind to")],
port: Annotated[int, typer.Option("--port", "-p", help="Port to bind to")] = 8124,
log_level: Annotated[str, typer.Option("--log-level", "-l", help="Log level")] = "INFO",
device_id: Annotated[str | None, typer.Option("--device-id", envvar="TUYA_DEVICE_ID")] = None,
device_ip: Annotated[str | None, typer.Option("--device-ip", envvar="TUYA_DEVICE_IP")] = None,
local_key: Annotated[str | None, typer.Option("--local-key", envvar="TUYA_LOCAL_KEY")] = None,
) -> None:
"""Start the heater control API server."""
configure_logger(log_level)
logger.info("Starting heater control API server")
if not device_id or not device_ip or not local_key:
error = "Must provide device ID, IP, and local key"
raise typer.Exit(error)
config = DeviceConfig(device_id=device_id, ip=device_ip, local_key=local_key)
app = create_app(config)
uvicorn.run(app, host=host, port=port)
if __name__ == "__main__":
typer.run(serve)

View File

@@ -1,31 +0,0 @@
"""Pydantic models for heater API."""
from pydantic import BaseModel, Field
class DeviceConfig(BaseModel):
"""Tuya device configuration."""
device_id: str
ip: str
local_key: str
version: float = 3.5
class HeaterStatus(BaseModel):
"""Current heater status."""
power: bool
setpoint: int | None = None
state: str | None = None # "Stop", "Heat", etc.
error_code: int | None = None
raw_dps: dict[str, object] = Field(default_factory=dict)
class ActionResult(BaseModel):
"""Result of a heater action."""
success: bool
action: str
power: bool | None = None
error: str | None = None

View File

@@ -1,22 +0,0 @@
"""ORM package exports."""
from __future__ import annotations
from python.orm.base import RichieBase, TableBase
from python.orm.contact import (
Contact,
ContactNeed,
ContactRelationship,
Need,
RelationshipType,
)
__all__ = [
"Contact",
"ContactNeed",
"ContactRelationship",
"Need",
"RelationshipType",
"RichieBase",
"TableBase",
]

View File

@@ -1,80 +0,0 @@
"""Base ORM definitions."""
from __future__ import annotations
from datetime import datetime
from os import getenv
from typing import cast
from sqlalchemy import DateTime, MetaData, create_engine, func
from sqlalchemy.engine import URL, Engine
from sqlalchemy.ext.declarative import AbstractConcreteBase
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
class RichieBase(DeclarativeBase):
"""Base class for all ORM models."""
schema_name = "main"
metadata = MetaData(
schema=schema_name,
naming_convention={
"ix": "ix_%(table_name)s_%(column_0_name)s",
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s",
},
)
class TableBase(AbstractConcreteBase, RichieBase):
"""Abstract concrete base for tables with IDs and timestamps."""
__abstract__ = True
id: Mapped[int] = mapped_column(primary_key=True)
created: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
)
updated: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
)
def get_connection_info() -> tuple[str, str, str, str, str | None]:
"""Get connection info from environment variables."""
database = getenv("POSTGRES_DB")
host = getenv("POSTGRES_HOST")
port = getenv("POSTGRES_PORT")
username = getenv("POSTGRES_USER")
password = getenv("POSTGRES_PASSWORD")
if None in (database, host, port, username):
error = f"Missing environment variables for Postgres connection.\n{database=}\n{host=}\n{port=}\n{username=}\n"
raise ValueError(error)
return cast("tuple[str, str, str, str, str | None]", (database, host, port, username, password))
def get_postgres_engine(*, pool_pre_ping: bool = True) -> Engine:
"""Create a SQLAlchemy engine from environment variables."""
database, host, port, username, password = get_connection_info()
url = URL.create(
drivername="postgresql+psycopg",
username=username,
password=password,
host=host,
port=int(port),
database=database,
)
return create_engine(
url=url,
pool_pre_ping=pool_pre_ping,
pool_recycle=1800,
)

View File

@@ -1,168 +0,0 @@
"""Contact database models."""
from __future__ import annotations
from enum import Enum
from sqlalchemy import ForeignKey, String
from sqlalchemy.orm import Mapped, mapped_column, relationship
from python.orm.base import RichieBase, TableBase
class RelationshipType(str, Enum):
"""Relationship types with default closeness weights.
Default weight is an integer 1-10 where 10 = closest relationship.
Users can override this per-relationship in the UI.
"""
SPOUSE = "spouse"
PARTNER = "partner"
PARENT = "parent"
CHILD = "child"
SIBLING = "sibling"
BEST_FRIEND = "best_friend"
GRANDPARENT = "grandparent"
GRANDCHILD = "grandchild"
AUNT_UNCLE = "aunt_uncle"
NIECE_NEPHEW = "niece_nephew"
COUSIN = "cousin"
IN_LAW = "in_law"
CLOSE_FRIEND = "close_friend"
FRIEND = "friend"
MENTOR = "mentor"
MENTEE = "mentee"
BUSINESS_PARTNER = "business_partner"
COLLEAGUE = "colleague"
MANAGER = "manager"
DIRECT_REPORT = "direct_report"
CLIENT = "client"
ACQUAINTANCE = "acquaintance"
NEIGHBOR = "neighbor"
EX = "ex"
OTHER = "other"
@property
def default_weight(self) -> int:
"""Return the default closeness weight (1-10) for this relationship type."""
weights = {
RelationshipType.SPOUSE: 10,
RelationshipType.PARTNER: 10,
RelationshipType.PARENT: 9,
RelationshipType.CHILD: 9,
RelationshipType.SIBLING: 9,
RelationshipType.BEST_FRIEND: 8,
RelationshipType.GRANDPARENT: 7,
RelationshipType.GRANDCHILD: 7,
RelationshipType.AUNT_UNCLE: 7,
RelationshipType.NIECE_NEPHEW: 7,
RelationshipType.COUSIN: 7,
RelationshipType.IN_LAW: 7,
RelationshipType.CLOSE_FRIEND: 6,
RelationshipType.FRIEND: 6,
RelationshipType.MENTOR: 5,
RelationshipType.MENTEE: 5,
RelationshipType.BUSINESS_PARTNER: 5,
RelationshipType.COLLEAGUE: 4,
RelationshipType.MANAGER: 4,
RelationshipType.DIRECT_REPORT: 4,
RelationshipType.CLIENT: 4,
RelationshipType.ACQUAINTANCE: 3,
RelationshipType.NEIGHBOR: 3,
RelationshipType.EX: 2,
RelationshipType.OTHER: 2,
}
return weights.get(self, 5)
@property
def display_name(self) -> str:
"""Return a human-readable display name."""
return self.value.replace("_", " ").title()
class ContactNeed(RichieBase):
"""Association table: Contact <-> Need."""
__tablename__ = "contact_need"
contact_id: Mapped[int] = mapped_column(
ForeignKey("main.contact.id", ondelete="CASCADE"),
primary_key=True,
)
need_id: Mapped[int] = mapped_column(
ForeignKey("main.need.id", ondelete="CASCADE"),
primary_key=True,
)
class ContactRelationship(RichieBase):
"""Association table: Contact <-> Contact with relationship type and weight."""
__tablename__ = "contact_relationship"
contact_id: Mapped[int] = mapped_column(
ForeignKey("main.contact.id", ondelete="CASCADE"),
primary_key=True,
)
related_contact_id: Mapped[int] = mapped_column(
ForeignKey("main.contact.id", ondelete="CASCADE"),
primary_key=True,
)
relationship_type: Mapped[str] = mapped_column(String(100))
closeness_weight: Mapped[int] = mapped_column(default=5)
class Contact(TableBase):
"""Contact model."""
__tablename__ = "contact"
name: Mapped[str]
age: Mapped[int | None]
bio: Mapped[str | None]
current_job: Mapped[str | None]
gender: Mapped[str | None]
goals: Mapped[str | None]
legal_name: Mapped[str | None]
profile_pic: Mapped[str | None]
safe_conversation_starters: Mapped[str | None]
self_sufficiency_score: Mapped[int | None]
social_structure_style: Mapped[str | None]
ssn: Mapped[str | None]
suffix: Mapped[str | None]
timezone: Mapped[str | None]
topics_to_avoid: Mapped[str | None]
needs: Mapped[list[Need]] = relationship(
"Need",
secondary=ContactNeed.__table__,
back_populates="contacts",
)
related_to: Mapped[list[ContactRelationship]] = relationship(
"ContactRelationship",
foreign_keys=[ContactRelationship.contact_id],
cascade="all, delete-orphan",
)
related_from: Mapped[list[ContactRelationship]] = relationship(
"ContactRelationship",
foreign_keys=[ContactRelationship.related_contact_id],
cascade="all, delete-orphan",
)
class Need(TableBase):
"""Need/accommodation model (e.g., light sensitive, ADHD)."""
__tablename__ = "need"
name: Mapped[str]
description: Mapped[str | None]
contacts: Mapped[list[Contact]] = relationship(
"Contact",
secondary=ContactNeed.__table__,
back_populates="needs",
)

View File

@@ -1 +0,0 @@
"""Van weather service - fetches weather with masked GPS location."""

View File

@@ -1,254 +0,0 @@
"""Van weather service - fetches weather with masked GPS for privacy."""
import logging
import time
from datetime import UTC, datetime
from typing import Annotated, Any
import requests
import typer
from apscheduler.schedulers.blocking import BlockingScheduler
from python.common import configure_logger
from python.van_weather.models import Config, DailyForecast, HourlyForecast, Weather
# Map Pirate Weather icons to Home Assistant conditions
CONDITION_MAP = {
"clear-day": "sunny",
"clear-night": "clear-night",
"rain": "rainy",
"snow": "snowy",
"sleet": "snowy-rainy",
"wind": "windy",
"fog": "fog",
"cloudy": "cloudy",
"partly-cloudy-day": "partlycloudy",
"partly-cloudy-night": "partlycloudy",
}
logger = logging.getLogger(__name__)
def get_ha_state(url: str, token: str, entity_id: str) -> float:
"""Get numeric state from Home Assistant entity."""
response = requests.get(
f"{url}/api/states/{entity_id}",
headers={"Authorization": f"Bearer {token}"},
timeout=30,
)
response.raise_for_status()
return float(response.json()["state"])
def parse_daily_forecast(data: dict[str, dict[str, Any]]) -> list[DailyForecast]:
"""Parse daily forecast from Pirate Weather API."""
daily = data.get("daily", {}).get("data", [])
daily_forecasts = []
for day in daily[:8]: # Up to 8 days
time_stamp = day.get("time")
if time_stamp:
date_time = datetime.fromtimestamp(time_stamp, tz=UTC).isoformat()
daily_forecasts.append(
DailyForecast(
date_time=date_time,
condition=CONDITION_MAP.get(day.get("icon", ""), "cloudy"),
temperature=day.get("temperatureHigh"),
templow=day.get("temperatureLow"),
precipitation_probability=day.get("precipProbability"),
)
)
return daily_forecasts
def parse_hourly_forecast(data: dict[str, dict[str, Any]]) -> list[HourlyForecast]:
"""Parse hourly forecast from Pirate Weather API."""
hourly = data.get("hourly", {}).get("data", [])
hourly_forecasts = []
for hour in hourly[:48]: # Up to 48 hours
time_stamp = hour.get("time")
if time_stamp:
date_time = datetime.fromtimestamp(time_stamp, tz=UTC).isoformat()
hourly_forecasts.append(
HourlyForecast(
date_time=date_time,
condition=CONDITION_MAP.get(hour.get("icon", ""), "cloudy"),
temperature=hour.get("temperature"),
precipitation_probability=hour.get("precipProbability"),
)
)
return hourly_forecasts
def fetch_weather(api_key: str, lat: float, lon: float) -> Weather:
"""Fetch weather from Pirate Weather API."""
url = f"https://api.pirateweather.net/forecast/{api_key}/{lat},{lon}"
response = requests.get(url, params={"units": "us"}, timeout=30)
response.raise_for_status()
data = response.json()
daily_forecasts = parse_daily_forecast(data)
hourly_forecasts = parse_hourly_forecast(data)
current = data.get("currently", {})
icon = current.get("icon", "")
return Weather(
temperature=current.get("temperature"),
feels_like=current.get("apparentTemperature"),
humidity=current.get("humidity"),
wind_speed=current.get("windSpeed"),
wind_bearing=current.get("windBearing"),
condition=CONDITION_MAP.get(icon, "cloudy"),
summary=current.get("summary"),
pressure=current.get("pressure"),
visibility=current.get("visibility"),
daily_forecasts=daily_forecasts,
hourly_forecasts=hourly_forecasts,
)
def post_to_ha(url: str, token: str, weather: Weather) -> None:
"""Post weather data to Home Assistant as sensor entities."""
max_retries = 6
retry_delay = 10
for attempt in range(1, max_retries + 1):
try:
_post_weather_data(url, token, weather)
except requests.RequestException:
if attempt == max_retries:
logger.exception(f"Failed to post weather to HA after {max_retries} attempts")
return
logger.warning(f"Post to HA failed (attempt {attempt}/{max_retries}), retrying in {retry_delay}s")
time.sleep(retry_delay)
def _post_weather_data(url: str, token: str, weather: Weather) -> None:
"""Post all weather data to Home Assistant. Raises on failure."""
headers = {"Authorization": f"Bearer {token}"}
# Post current weather as individual sensors
sensors = {
"sensor.van_weather_condition": {
"state": weather.condition or "unknown",
"attributes": {"friendly_name": "Van Weather Condition"},
},
"sensor.van_weather_temperature": {
"state": weather.temperature,
"attributes": {"unit_of_measurement": "°F", "device_class": "temperature"},
},
"sensor.van_weather_apparent_temperature": {
"state": weather.feels_like,
"attributes": {"unit_of_measurement": "°F", "device_class": "temperature"},
},
"sensor.van_weather_humidity": {
"state": int((weather.humidity or 0) * 100),
"attributes": {"unit_of_measurement": "%", "device_class": "humidity"},
},
"sensor.van_weather_pressure": {
"state": weather.pressure,
"attributes": {"unit_of_measurement": "mbar", "device_class": "pressure"},
},
"sensor.van_weather_wind_speed": {
"state": weather.wind_speed,
"attributes": {"unit_of_measurement": "mph", "device_class": "wind_speed"},
},
"sensor.van_weather_wind_bearing": {
"state": weather.wind_bearing,
"attributes": {"unit_of_measurement": "°"},
},
"sensor.van_weather_visibility": {
"state": weather.visibility,
"attributes": {"unit_of_measurement": "mi"},
},
}
for entity_id, data in sensors.items():
if data["state"] is not None:
response = requests.post(f"{url}/api/states/{entity_id}", headers=headers, json=data, timeout=30)
response.raise_for_status()
# Post daily forecast as JSON attribute sensor
daily_forecast = [
{
"datetime": daily_forecast.date_time.isoformat(),
"condition": daily_forecast.condition,
"temperature": daily_forecast.temperature,
"templow": daily_forecast.templow,
"precipitation_probability": int((daily_forecast.precipitation_probability or 0) * 100),
}
for daily_forecast in weather.daily_forecasts
]
response = requests.post(
f"{url}/api/states/sensor.van_weather_forecast_daily",
headers=headers,
json={"state": len(daily_forecast), "attributes": {"forecast": daily_forecast}},
timeout=30,
)
response.raise_for_status()
# Post hourly forecast as JSON attribute sensor
hourly_forecast = [
{
"datetime": hourly_forecast.date_time.isoformat(),
"condition": hourly_forecast.condition,
"temperature": hourly_forecast.temperature,
"precipitation_probability": int((hourly_forecast.precipitation_probability or 0) * 100),
}
for hourly_forecast in weather.hourly_forecasts
]
response = requests.post(
f"{url}/api/states/sensor.van_weather_forecast_hourly",
headers=headers,
json={"state": len(hourly_forecast), "attributes": {"forecast": hourly_forecast}},
timeout=30,
)
response.raise_for_status()
def update_weather(config: Config) -> None:
"""Fetch GPS, mask it, get weather, post to HA."""
lat = get_ha_state(config.ha_url, config.ha_token, config.lat_entity)
lon = get_ha_state(config.ha_url, config.ha_token, config.lon_entity)
masked_lat = round(lat, config.mask_decimals)
masked_lon = round(lon, config.mask_decimals)
logger.info(f"Masked location: {masked_lat}, {masked_lon}")
weather = fetch_weather(config.pirate_weather_api_key, masked_lat, masked_lon)
logger.info(f"Weather: {weather.temperature}°F, {weather.condition}")
post_to_ha(config.ha_url, config.ha_token, weather)
logger.info("Posted weather to HA")
def main(
ha_url: Annotated[str, typer.Option(envvar="HA_URL")],
ha_token: Annotated[str, typer.Option(envvar="HA_TOKEN")],
api_key: Annotated[str, typer.Option(envvar="PIRATE_WEATHER_API_KEY")],
interval: Annotated[int, typer.Option(help="Poll interval in seconds")] = 900,
log_level: Annotated[str, typer.Option()] = "INFO",
) -> None:
"""Fetch weather for van using masked GPS location."""
configure_logger(log_level)
config = Config(ha_url=ha_url, ha_token=ha_token, pirate_weather_api_key=api_key)
logger.info(f"Starting van weather service, polling every {interval}s")
scheduler = BlockingScheduler()
scheduler.add_job(
update_weather,
"interval",
seconds=interval,
args=[config],
next_run_time=datetime.now(UTC),
)
scheduler.start()
if __name__ == "__main__":
typer.run(main)

View File

@@ -1,61 +0,0 @@
"""Models for van weather service."""
from datetime import datetime
from pydantic import BaseModel, field_serializer
class Config(BaseModel):
"""Service configuration."""
ha_url: str
ha_token: str
pirate_weather_api_key: str
lat_entity: str = "sensor.gps_latitude"
lon_entity: str = "sensor.gps_longitude"
mask_decimals: int = 1 # ~11km accuracy
class DailyForecast(BaseModel):
"""Daily forecast entry."""
date_time: datetime
condition: str | None = None
temperature: float | None = None # High
templow: float | None = None # Low
precipitation_probability: float | None = None
@field_serializer("date_time")
def serialize_date_time(self, date_time: datetime) -> str:
"""Serialize datetime to ISO format."""
return date_time.isoformat()
class HourlyForecast(BaseModel):
"""Hourly forecast entry."""
date_time: datetime
condition: str | None = None
temperature: float | None = None
precipitation_probability: float | None = None
@field_serializer("date_time")
def serialize_date_time(self, date_time: datetime) -> str:
"""Serialize datetime to ISO format."""
return date_time.isoformat()
class Weather(BaseModel):
"""Weather data from Pirate Weather."""
temperature: float | None = None
feels_like: float | None = None
humidity: float | None = None
wind_speed: float | None = None
wind_bearing: float | None = None
condition: str | None = None
summary: str | None = None
pressure: float | None = None
visibility: float | None = None
daily_forecasts: list[DailyForecast] = []
hourly_forecasts: list[HourlyForecast] = []

View File

@@ -6,14 +6,14 @@
default = pkgs.mkShell { default = pkgs.mkShell {
NIX_CONFIG = "extra-experimental-features = nix-command flakes ca-derivations"; NIX_CONFIG = "extra-experimental-features = nix-command flakes ca-derivations";
nativeBuildInputs = with pkgs; [ nativeBuildInputs = with pkgs; [
nix
home-manager
git
my_python
ssh-to-age
gnupg
age age
busybox
git
gnupg
home-manager
my_python
nix
ssh-to-age
]; ];
}; };
} }

View File

@@ -2,6 +2,7 @@
{ {
imports = [ imports = [
"${inputs.self}/users/richie" "${inputs.self}/users/richie"
"${inputs.self}/users/gaming"
"${inputs.self}/common/global" "${inputs.self}/common/global"
"${inputs.self}/common/optional/desktop.nix" "${inputs.self}/common/optional/desktop.nix"
"${inputs.self}/common/optional/docker.nix" "${inputs.self}/common/optional/docker.nix"
@@ -26,6 +27,15 @@
}; };
services = { services = {
displayManager = {
enable = true;
autoLogin = {
user = "gaming";
enable = true;
};
defaultSession = "plasma";
};
openssh.ports = [ 262 ]; openssh.ports = [ 262 ];
snapshot_manager.path = ./snapshot_config.toml; snapshot_manager.path = ./snapshot_config.toml;

View File

@@ -1,42 +1,27 @@
{ {
services.ollama = { services = {
ollama = {
user = "ollama"; user = "ollama";
host = "0.0.0.0";
enable = true; enable = true;
syncModels = true;
loadModels = [ loadModels = [
"codellama:7b" "codellama:7b"
"deepscaler:1.5b" "deepseek-r1:8b"
"deepseek-r1:14b" "deepseek-r1:14b"
"deepseek-r1:32b" "deepseek-r1:32b"
"deepseek-r1:8b"
"devstral-small-2:24b"
"dolphin-llama3:70b"
"dolphin-llama3:8b"
"functiongemma:270m"
"gemma3:12b"
"gemma3:27b"
"gpt-oss:20b"
"huihui_ai/dolphin3-abliterated:8b"
"lfm2:24b"
"llama3.1:8b"
"llama3.2:1b"
"llama3.2:3b" "llama3.2:3b"
"magistral:24b" "llama2-uncensored:7b"
"ministral-3:14b" "mistral-nemo:12b"
"nemotron-3-nano:30b" "dolphin-mixtral:8x7b"
"glm-4.7-flash:q4_K_M" "qwq:32b"
"qwen3-coder:30b" "Qihoo360-Light-R1-32B"
"qwen3-vl:32b"
"qwen3:14b"
"qwen3.5:27b"
"qwen3.5:35b"
"translategemma:12b"
"translategemma:27b"
"translategemma:4b"
]; ];
models = "/zfs/models"; models = "/zfs/models";
openFirewall = true; openFirewall = true;
}; };
# open-webui = {
# enable = true;
# openFirewall = true;
# host = "0.0.0.0";
# };
};
} }

View File

@@ -1,33 +0,0 @@
{
pkgs,
inputs,
...
}:
{
networking.firewall.allowedTCPPorts = [ 8124 ];
systemd.services.heater-api = {
description = "Tuya Heater Control API";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment = {
PYTHONPATH = "${inputs.self}/";
};
serviceConfig = {
Type = "simple";
ExecStart = "${pkgs.my_python}/bin/python -m python.heater.main --host 0.0.0.0 --port 8124";
EnvironmentFile = "/etc/heater.env";
Restart = "on-failure";
RestartSec = "5s";
StandardOutput = "journal";
StandardError = "journal";
NoNewPrivileges = true;
ProtectSystem = "strict";
ProtectHome = "read-only";
PrivateTmp = true;
ReadOnlyPaths = [ "${inputs.self}" ];
};
};
}

View File

@@ -1,4 +1,3 @@
{ pkgs, ... }:
{ {
users = { users = {
users.hass = { users.hass = {
@@ -13,7 +12,14 @@
enable = true; enable = true;
openFirewall = true; openFirewall = true;
config = { config = {
http.server_port = 8123; http = {
server_port = 8123;
server_host = [
"192.168.90.35"
"192.168.95.35"
"127.0.0.1"
];
};
homeassistant = { homeassistant = {
time_zone = "America/New_York"; time_zone = "America/New_York";
unit_system = "us_customary"; unit_system = "us_customary";
@@ -21,9 +27,6 @@
packages = { packages = {
victron_modbuss = "!include ${./home_assistant/victron_modbuss.yaml}"; victron_modbuss = "!include ${./home_assistant/victron_modbuss.yaml}";
battery_sensors = "!include ${./home_assistant/battery_sensors.yaml}"; battery_sensors = "!include ${./home_assistant/battery_sensors.yaml}";
gps_location = "!include ${./home_assistant/gps_location.yaml}";
heater = "!include ${./home_assistant/heater.yaml}";
van_weather = "!include ${./home_assistant/van_weather_template.yaml}";
}; };
}; };
recorder = { recorder = {
@@ -69,17 +72,10 @@
py-improv-ble-client # for esphome py-improv-ble-client # for esphome
pymodbus # for modbus pymodbus # for modbus
pyopenweathermap # for weather pyopenweathermap # for weather
pymetno # for met.no weather
uiprotect # for ubiquiti integration uiprotect # for ubiquiti integration
unifi-discovery # for ubiquiti integration unifi-discovery # for ubiquiti integration
jsonpath # for rest sensors
typedmonarchmoney # for monarch
]; ];
extraComponents = [ "isal" ]; extraComponents = [ "isal" ];
customComponents = with pkgs.home-assistant-custom-components; [
pirate-weather
];
}; };
esphome = { esphome = {
enable = true; enable = true;

View File

@@ -1,122 +0,0 @@
title: Van Overview
views:
- title: Overview
path: overview
type: sections
sections:
# Battery Status Section
- type: grid
title: Battery Status
cards:
- type: gauge
entity: sensor.jk0_soc
name: Battery 0
min: 0
max: 100
severity:
green: 50
yellow: 20
red: 0
needle: true
- type: gauge
entity: sensor.jk1_soc
name: Battery 1
min: 0
max: 100
severity:
green: 50
yellow: 20
red: 0
needle: true
- type: entity
entity: sensor.jk0_total_voltage
name: Batt 0 Voltage
- type: entity
entity: sensor.jk1_total_voltage
name: Batt 1 Voltage
# Solar Production Section
- type: grid
title: Solar Production
cards:
- type: gauge
entity: sensor.solar_wattage
name: Solar Now
min: 0
max: 400
severity:
green: 100
yellow: 50
red: 0
needle: true
- type: entity
entity: sensor.solar_yield_today
name: Today's Yield
- type: entity
entity: sensor.estimated_energy_production_today
name: Forecast Today
- type: entity
entity: sensor.estimated_energy_production_tomorrow
name: Forecast Tomorrow
# Environment Section
- type: grid
title: Van Environment
cards:
- type: entity
entity: sensor.environment_temperature
name: Inside Temp
- type: entity
entity: sensor.environment_humidity
name: Inside Humidity
- type: entity
entity: sensor.current_weather_temperature
name: Outside Temp
- type: entity
entity: sensor.current_weather_humidity
name: Outside Humidity
- type: entity
entity: sensor.current_weather_condition
name: Weather
# Charger Status Section
- type: grid
title: Charger
cards:
- type: entity
entity: sensor.charger_dc_wattage
name: Charging Power
- type: entity
entity: sensor.charger_charge_state
name: Charge State
- type: entity
entity: sensor.charger_dc_wattage_daily
name: Charged Today
# Location Section
- type: grid
title: Location
cards:
- type: entity
entity: sensor.gps_location
name: Current Location
- type: entity
entity: sensor.gps_speed
name: Speed
- type: entity
entity: binary_sensor.gps_fix_available
name: GPS Fix

View File

@@ -1,107 +0,0 @@
automation:
- id: update_home_location_from_gps
alias: Update Home Location from GPS
description: Updates the Home zone location based on GPS coordinates from Modbus
trigger:
- platform: state
entity_id:
- sensor.gps_latitude
- sensor.gps_longitude
condition:
- condition: template
value_template: >-
{% set lat = states('sensor.gps_latitude')|float(0) %}
{% set lon = states('sensor.gps_longitude')|float(0) %}
{% set fix = states('sensor.gps_fix')|int(0) %}
{{ lat != 0 and lon != 0 and fix > 0 }}
action:
- service: homeassistant.set_location
data:
latitude: "{{ states('sensor.gps_latitude') }}"
longitude: "{{ states('sensor.gps_longitude') }}"
- id: update_home_location_on_startup
alias: Update Home Location on Startup
description: Sets home location from last known GPS coordinates on HA restart
trigger:
- platform: homeassistant
event: start
condition:
- condition: template
value_template: >-
{% set lat = states('sensor.gps_latitude')|float(0) %}
{% set lon = states('sensor.gps_longitude')|float(0) %}
{{ lat != 0 and lon != 0 }}
action:
- delay:
seconds: 10
- service: homeassistant.set_location
data:
latitude: "{{ states('sensor.gps_latitude') }}"
longitude: "{{ states('sensor.gps_longitude') }}"
- id: refresh_weather_hourly
alias: Refresh Weather Hourly
description: Forces weather to refresh hourly with current GPS location
trigger:
- platform: time_pattern
hours: "/1"
condition:
- condition: state
entity_id: binary_sensor.gps_fix_available
state: "on"
action:
- service: homeassistant.update_entity
target:
entity_id: weather.home
template:
- sensor:
- name: GPS Location
unique_id: gps_location
state: >-
{% set lat = states('sensor.gps_latitude')|float(0) %}
{% set lon = states('sensor.gps_longitude')|float(0) %}
{% if lat != 0 and lon != 0 %}
{{ lat }}, {{ lon }}
{% else %}
unavailable
{% endif %}
attributes:
latitude: "{{ states('sensor.gps_latitude') }}"
longitude: "{{ states('sensor.gps_longitude') }}"
speed: "{{ states('sensor.gps_speed') }}"
course: "{{ states('sensor.gps_course') }}"
altitude: "{{ states('sensor.gps_altitude') }}"
satellites: "{{ states('sensor.gps_satellites') }}"
fix: "{{ states('sensor.gps_fix') }}"
last_updated: "{{ now().isoformat() }}"
# Weather sensors based on current GPS location
- name: Current Weather Temperature
unique_id: current_weather_temperature
unit_of_measurement: "°F"
device_class: temperature
state: "{{ state_attr('weather.home', 'temperature') }}"
- name: Current Weather Humidity
unique_id: current_weather_humidity
unit_of_measurement: "%"
device_class: humidity
state: "{{ state_attr('weather.home', 'humidity') }}"
- name: Current Weather Condition
unique_id: current_weather_condition
state: "{{ states('weather.home') }}"
- name: Current Weather Wind Speed
unique_id: current_weather_wind_speed
unit_of_measurement: "mph"
device_class: wind_speed
state: "{{ state_attr('weather.home', 'wind_speed') }}"
- binary_sensor:
- name: GPS Fix Available
unique_id: gps_fix_available
device_class: connectivity
state: "{{ states('sensor.gps_fix')|int(0) > 0 }}"

View File

@@ -1,41 +0,0 @@
rest:
- resource: http://localhost:8124/status
scan_interval: 30
sensor:
- name: "Heater Setpoint"
unique_id: heater_setpoint
value_template: "{{ value_json.setpoint }}"
unit_of_measurement: "F"
device_class: temperature
- name: "Heater State"
unique_id: heater_state
value_template: "{{ value_json.state }}"
- name: "Heater Error Code"
unique_id: heater_error_code
value_template: "{{ value_json.error_code }}"
binary_sensor:
- name: "Heater Power"
unique_id: heater_power
value_template: "{{ value_json.power }}"
device_class: running
rest_command:
heater_on:
url: http://localhost:8124/on
method: POST
heater_off:
url: http://localhost:8124/off
method: POST
heater_toggle:
url: http://localhost:8124/toggle
method: POST
template:
- switch:
- unique_id: heater_switch
name: Heater
state: "{{ is_state('binary_sensor.heater_power', 'on') }}"
turn_on:
- action: rest_command.heater_on
turn_off:
- action: rest_command.heater_off

View File

@@ -1,19 +0,0 @@
template:
- weather:
- name: "Van Weather"
unique_id: van_weather_template
condition_template: "{{ states('sensor.van_weather_condition') }}"
temperature_template: "{{ states('sensor.van_weather_temperature') }}"
temperature_unit: "°F"
apparent_temperature_template: "{{ states('sensor.van_weather_apparent_temperature') }}"
humidity_template: "{{ states('sensor.van_weather_humidity') }}"
pressure_template: "{{ states('sensor.van_weather_pressure') }}"
pressure_unit: "mbar"
wind_speed_template: "{{ states('sensor.van_weather_wind_speed') }}"
wind_speed_unit: "mph"
wind_bearing_template: "{{ states('sensor.van_weather_wind_bearing') }}"
visibility_template: "{{ states('sensor.van_weather_visibility') }}"
visibility_unit: "mi"
forecast_daily_template: "{{ state_attr('sensor.van_weather_forecast_daily', 'forecast') }}"
forecast_hourly_template: "{{ state_attr('sensor.van_weather_forecast_hourly', 'forecast') }}"
attribution_template: "Powered by Pirate Weather"

View File

@@ -5,7 +5,6 @@ modbus:
port: 502 port: 502
timeout: 3 timeout: 3
delay: 2 delay: 2
lazy_error_count: 10
sensors: sensors:
# ---- SOLAR CHARGER (Unit ID 226) ---- # ---- SOLAR CHARGER (Unit ID 226) ----
- name: Solar Voltage - name: Solar Voltage
@@ -78,7 +77,7 @@ modbus:
# GPS # GPS
- name: GPS Latitude - name: GPS Latitude
slave: 100 slave: 1
address: 2800 address: 2800
input_type: holding input_type: holding
data_type: int32 data_type: int32
@@ -88,7 +87,7 @@ modbus:
unique_id: gps_latitude unique_id: gps_latitude
- name: GPS Longitude - name: GPS Longitude
slave: 100 slave: 1
address: 2802 address: 2802
input_type: holding input_type: holding
data_type: int32 data_type: int32
@@ -98,7 +97,7 @@ modbus:
unique_id: gps_longitude unique_id: gps_longitude
- name: GPS Course - name: GPS Course
slave: 100 slave: 1
address: 2804 address: 2804
input_type: holding input_type: holding
data_type: uint16 data_type: uint16
@@ -109,7 +108,7 @@ modbus:
unique_id: gps_course unique_id: gps_course
- name: GPS Speed - name: GPS Speed
slave: 100 slave: 1
address: 2805 address: 2805
input_type: holding input_type: holding
data_type: uint16 data_type: uint16
@@ -120,7 +119,7 @@ modbus:
unique_id: gps_speed unique_id: gps_speed
- name: GPS Fix - name: GPS Fix
slave: 100 slave: 1
address: 2806 address: 2806
input_type: holding input_type: holding
data_type: uint16 data_type: uint16
@@ -129,7 +128,7 @@ modbus:
unique_id: gps_fix unique_id: gps_fix
- name: GPS Satellites - name: GPS Satellites
slave: 100 slave: 1
address: 2807 address: 2807
input_type: holding input_type: holding
data_type: uint16 data_type: uint16
@@ -138,7 +137,7 @@ modbus:
unique_id: gps_satellites unique_id: gps_satellites
- name: GPS Altitude - name: GPS Altitude
slave: 100 slave: 1
address: 2808 address: 2808
input_type: holding input_type: holding
data_type: int32 data_type: int32

View File

@@ -1,8 +0,0 @@
{ pkgs, ... }:
{
services.nix-serve = {
package = pkgs.nix-serve-ng;
enable = true;
openFirewall = true;
};
}

View File

@@ -1,35 +0,0 @@
{
pkgs,
inputs,
...
}:
{
systemd.services.van-weather = {
description = "Van Weather Service";
after = [
"network.target"
"home-assistant.service"
];
requires = [ "home-assistant.service" ];
wantedBy = [ "multi-user.target" ];
environment = {
PYTHONPATH = "${inputs.self}/";
};
serviceConfig = {
Type = "simple";
ExecStart = "${pkgs.my_python}/bin/python -m python.van_weather.main";
EnvironmentFile = "/etc/van_weather.env";
Restart = "on-failure";
RestartSec = "5s";
StandardOutput = "journal";
StandardError = "journal";
NoNewPrivileges = true;
ProtectSystem = "strict";
ProtectHome = "read-only";
PrivateTmp = true;
ReadOnlyPaths = [ "${inputs.self}" ];
};
};
}

View File

@@ -0,0 +1,60 @@
{
config,
pkgs,
...
}:
let
vars = import ../vars.nix;
in
{
# environment.systemPackages = with pkgs; [ php.withExtensions ({ all, ... }: [ all.pdo_pgsql ]) ];
services.httpd = {
enable = true;
adminAddr = "webmaster@localhost";
enablePHP = true;
phpPackage = pkgs.php.withExtensions (
{ enabled, all }:
enabled
++ [
all.pdo
all.pdo_pgsql
]
);
extraModules = [ "rewrite" ];
virtualHosts.great_cloud_of_witnesses = {
hostName = "localhost";
listen = [
{
ip = "*";
port = 8092;
}
];
documentRoot = "${vars.services}/great_cloud_of_witnesses";
extraConfig = ''
<Directory "${vars.services}/great_cloud_of_witnesses">
AllowOverride All
Require all granted
</Directory>
'';
};
};
sops.secrets.gcw_password = {
sopsFile = ../../../users/secrets.yaml;
neededForUsers = true;
};
users = {
users.gcw = {
isSystemUser = true;
hashedPasswordFile = config.sops.secrets.gcw_password.path;
group = "gcw";
};
groups.gcw = { };
};
}

View File

@@ -61,7 +61,25 @@ in
"luks-root-pool-wwn-0x55cd2e4150f01556-part2" = "luks-root-pool-wwn-0x55cd2e4150f01556-part2" =
makeLuksSSD "/dev/disk/by-id/wwn-0x55cd2e4150f01556-part2"; makeLuksSSD "/dev/disk/by-id/wwn-0x55cd2e4150f01556-part2";
# Media pool
"luks-media_pool-nvme-INTEL_SSDPEK1A118GA_BTOC14120V2J118B-part1" =
makeLuksSSD "/dev/disk/by-id/nvme-INTEL_SSDPEK1A118GA_BTOC14120V2J118B-part1";
"luks-media_pool-nvme-INTEL_SSDPEK1A118GA_BTOC14120WAG118B-part1" =
makeLuksSSD "/dev/disk/by-id/nvme-INTEL_SSDPEK1A118GA_BTOC14120WAG118B-part1";
"luks-media_pool-nvme-INTEL_SSDPE2ME012T4_CVMD5130000G1P2HGN-part1" =
makeLuksSSD "/dev/disk/by-id/nvme-INTEL_SSDPE2ME012T4_CVMD5130000G1P2HGN-part1";
"luks-media_pool-nvme-INTEL_SSDPE2ME012T4_CVMD5130000U1P2HGN-part1" =
makeLuksSSD "/dev/disk/by-id/nvme-INTEL_SSDPE2ME012T4_CVMD5130000U1P2HGN-part1";
# Storage pool # Storage pool
"luks-storage_pool-nvme-Samsung_SSD_970_EVO_Plus_2TB_S6S2NS0T834822N-part1" =
makeLuksSSD "/dev/disk/by-id/nvme-Samsung_SSD_970_EVO_Plus_2TB_S6S2NS0T834822N-part1";
"luks-storage_pool-nvme-Samsung_SSD_970_EVO_Plus_2TB_S6S2NS0T834817F-part1" =
makeLuksSSD "/dev/disk/by-id/nvme-Samsung_SSD_970_EVO_Plus_2TB_S6S2NS0T834817F-part1";
"luks-storage_pool-nvme-INTEL_MEMPEK1W016GA_PHBT828104DF016D-part1" =
makeLuksSSD "/dev/disk/by-id/nvme-INTEL_MEMPEK1W016GA_PHBT828104DF016D-part1";
"luks-storage_pool-nvme-INTEL_MEMPEK1W016GA_PHBT828105A8016D-part1" =
makeLuksSSD "/dev/disk/by-id/nvme-INTEL_MEMPEK1W016GA_PHBT828105A8016D-part1";
"luks-storage_pool-wwn-0x5000cca23bc438dd-part1" = "luks-storage_pool-wwn-0x5000cca23bc438dd-part1" =
makeLuksDevice "/dev/disk/by-id/wwn-0x5000cca23bc438dd-part1"; makeLuksDevice "/dev/disk/by-id/wwn-0x5000cca23bc438dd-part1";
"luks-storage_pool-wwn-0x5000cca23bd035f5-part1" = "luks-storage_pool-wwn-0x5000cca23bd035f5-part1" =

View File

@@ -112,9 +112,8 @@ in
user = "github-runners"; user = "github-runners";
group = "github-runners"; group = "github-runners";
extraPackages = with pkgs; [ extraPackages = with pkgs; [
gitMinimal busybox
gh nixfmt-rfc-style
nixfmt
nixos-rebuild nixos-rebuild
treefmt treefmt
my_python my_python

View File

@@ -15,27 +15,27 @@ sudo zpool add storage -o ashift=12 logs mirror
sudo zpool create scratch -o ashift=12 -O acltype=posixacl -O atime=off -O dnodesize=auto -O xattr=sa -O compression=zstd -O encryption=aes-256-gcm -O keyformat=hex -O keylocation=file:///key -m /zfs/scratch sudo zpool create scratch -o ashift=12 -O acltype=posixacl -O atime=off -O dnodesize=auto -O xattr=sa -O compression=zstd -O encryption=aes-256-gcm -O keyformat=hex -O keylocation=file:///key -m /zfs/scratch
# media datasets # media datasets
sudo zfs create media/secure -o encryption=aes-256-gcm -o keyformat=hex -o keylocation=file:///root/zfs.key sudo zfs create -o compression=zstd-9 media/docker
sudo zfs create media/secure/docker -o compression=zstd-9 sudo zfs create -o compression=zstd-9 -o sync=disabled media/github-runners
sudo zfs create media/secure/github-runners -o compression=zstd-9 -o sync=disabled sudo zfs create -o copies=3 media/notes
sudo zfs create media/secure/home_assistant -o compression=zstd-19 sudo zfs create -o compression=zstd-9 media/plex
sudo zfs create media/secure/notes -o copies=2 sudo zfs create -o compression=zstd-9 media/services
sudo zfs create media/secure/postgres -o recordsize=16k -o primarycache=metadata sudo zfs create -o compression=zstd-19 media/home_assistant
sudo zfs create media/secure/services -o compression=zstd-9 sudo zfs create -o exec=off media/share
sudo zfs create media/secure/share -o mountpoint=/zfs/media/share -o exec=off sudo zfs create -o recordsize=16k -o primarycache=metadata -o mountpoint=/zfs/media/database/postgres media/postgres
# scratch datasets # scratch datasets
sudo zfs create scratch/kafka -o mountpoint=/zfs/scratch/kafka -o recordsize=1M sudo zfs create scratch/kafka -o mountpoint=/zfs/scratch/kafka -o recordsize=1M
sudo zfs create scratch/transmission -o mountpoint=/zfs/scratch/transmission -o recordsize=16k -o sync=disabled sudo zfs create scratch/transmission -o mountpoint=/zfs/scratch/transmission -o recordsize=16k -o sync=disabled
# storage datasets # storage datasets
sudo zfs create storage/ollama -o recordsize=1M -o compression=zstd-19 -o sync=disabled sudo zfs create -o recordsize=1M -o compression=zstd-19 storage/archive
sudo zfs create storage/secure -o encryption=aes-256-gcm -o keyformat=hex -o keylocation=file:///root/zfs.key sudo zfs create -o compression=zstd-19 storage/main
sudo zfs create storage/secure/archive -o recordsize=1M -o compression=zstd-19 sudo zfs create -o recordsize=16K -o compression=zstd-19 -o copies=2 storage/photos
sudo zfs create storage/secure/library -o recordsize=1M -o compression=zstd-19 sudo zfs create -o recordsize=1M -o compression=zstd-19 storage/plex
sudo zfs create storage/secure/main -o compression=zstd-19 sudo zfs create -o compression=zstd-19 -o copies=3 storage/secrets
sudo zfs create storage/secure/photos -o recordsize=16K -o compression=zstd-19 -o copies=2 sudo zfs create -o compression=zstd-19 storage/syncthing
sudo zfs create storage/secure/plex -o recordsize=1M -o compression=zstd-19 sudo zfs create -o recordsize=1M -o compression=zstd-9 -o exec=off -o sync=disabled storage/qbitvpn
sudo zfs create storage/secure/secrets -o compression=zstd-19 -o copies=3 sudo zfs create -o recordsize=1M -o compression=zstd-9 -o exec=off -o sync=disabled storage/transmission
sudo zfs create storage/secure/syncthing -o compression=zstd-19 sudo zfs create -o recordsize=1M -o compression=zstd-19 storage/library
sudo zfs create storage/secure/transmission -o recordsize=1M -o compression=zstd-9 -o exec=off -o sync=disabled sudo zfs create -o recordsize=1M -o compression=zstd-19 -o sync=disabled storage/ollama

View File

@@ -1,51 +0,0 @@
{
pkgs,
inputs,
...
}:
{
networking.firewall.allowedTCPPorts = [
8069
];
systemd.services.contact-api = {
description = "Contact Database API with Frontend";
after = [
"postgresql.service"
"network.target"
];
requires = [ "postgresql.service" ];
wantedBy = [ "multi-user.target" ];
path = [
pkgs.nodejs
pkgs.coreutils
pkgs.bash
];
environment = {
PYTHONPATH = "${inputs.self}";
POSTGRES_DB = "richie";
POSTGRES_HOST = "/run/postgresql";
POSTGRES_USER = "richie";
POSTGRES_PORT = "5432";
HOME = "/var/lib/contact-api";
};
serviceConfig = {
Type = "simple";
ExecStart = "${pkgs.my_python}/bin/python -m python.api.main --host 192.168.90.40 --port 8069 --frontend-dir ${inputs.self}/frontend";
StateDirectory = "contact-api";
Restart = "on-failure";
RestartSec = "5s";
StandardOutput = "journal";
StandardError = "journal";
# Security hardening
NoNewPrivileges = true;
ProtectSystem = "strict";
ProtectHome = "read-only";
PrivateTmp = true;
ReadOnlyPaths = [
"${inputs.self}"
];
};
};
}

View File

@@ -27,6 +27,7 @@ frontend ContentSwitching
# tmmworkshop.com # tmmworkshop.com
acl host_audiobookshelf hdr(host) -i audiobookshelf.tmmworkshop.com acl host_audiobookshelf hdr(host) -i audiobookshelf.tmmworkshop.com
acl host_cache hdr(host) -i cache.tmmworkshop.com acl host_cache hdr(host) -i cache.tmmworkshop.com
acl host_homeassistant hdr(host) -i homeassistant.tmmworkshop.com
acl host_jellyfin hdr(host) -i jellyfin.tmmworkshop.com acl host_jellyfin hdr(host) -i jellyfin.tmmworkshop.com
acl host_share hdr(host) -i share.tmmworkshop.com acl host_share hdr(host) -i share.tmmworkshop.com
acl host_gcw hdr(host) -i gcw.tmmworkshop.com acl host_gcw hdr(host) -i gcw.tmmworkshop.com
@@ -35,6 +36,7 @@ frontend ContentSwitching
use_backend audiobookshelf_nodes if host_audiobookshelf use_backend audiobookshelf_nodes if host_audiobookshelf
use_backend cache_nodes if host_cache use_backend cache_nodes if host_cache
use_backend homeassistant_nodes if host_homeassistant
use_backend jellyfin if host_jellyfin use_backend jellyfin if host_jellyfin
use_backend share_nodes if host_share use_backend share_nodes if host_share
use_backend gcw_nodes if host_gcw use_backend gcw_nodes if host_gcw
@@ -49,6 +51,10 @@ backend cache_nodes
mode http mode http
server server 127.0.0.1:5000 server server 127.0.0.1:5000
backend homeassistant_nodes
mode http
server server 192.168.90.35:8123
backend jellyfin backend jellyfin
option httpchk option httpchk
option forwardfor option forwardfor

View File

@@ -18,6 +18,11 @@ in
config = { config = {
http = { http = {
server_port = 8123; server_port = 8123;
server_host = [
"192.168.99.14"
"192.168.90.40"
"127.0.0.1"
];
use_x_forwarded_for = true; use_x_forwarded_for = true;
trusted_proxies = "127.0.0.1"; trusted_proxies = "127.0.0.1";
}; };

View File

@@ -6,6 +6,5 @@ in
enable = true; enable = true;
openFirewall = true; openFirewall = true;
dataDir = "${vars.services}/jellyfin"; dataDir = "${vars.services}/jellyfin";
cacheDir = "${vars.services}/jellyfin/cache";
}; };
} }

View File

@@ -2,41 +2,27 @@ let
vars = import ../vars.nix; vars = import ../vars.nix;
in in
{ {
services.ollama = { services = {
ollama = {
user = "ollama"; user = "ollama";
enable = true; enable = true;
host = "0.0.0.0"; host = "0.0.0.0";
syncModels = true;
loadModels = [ loadModels = [
"codellama:7b" "codellama:7b"
"deepscaler:1.5b"
"deepseek-r1:14b" "deepseek-r1:14b"
"deepseek-r1:32b" "deepseek-r1:32b"
"deepseek-r1:8b" "deepseek-r1:8b"
"devstral-small-2:24b"
"dolphin-llama3:70b"
"dolphin-llama3:8b"
"functiongemma:270m"
"gemma3:12b" "gemma3:12b"
"gemma3:27b" "gemma3:27b"
"gpt-oss:120b" "gpt-oss:120b"
"gpt-oss:20b" "gpt-oss:20b"
"lfm2:24b"
"llama3.1:70b"
"llama3.1:8b"
"llama3.2:1b"
"llama3.2:3b"
"magistral:24b"
"ministral-3:14b"
"nemotron-3-nano:30b"
"qwen3-coder:30b"
"qwen3-vl:32b"
"qwen3:14b" "qwen3:14b"
"qwen3.5:35b" "qwen3:30b"
]; ];
models = vars.ollama; models = vars.ollama;
openFirewall = true; openFirewall = true;
}; };
};
systemd.services = { systemd.services = {
ollama.serviceConfig = { ollama.serviceConfig = {
Nice = 19; Nice = 19;

View File

@@ -1,13 +0,0 @@
{
services.open-webui = {
enable = true;
host = "0.0.0.0";
openFirewall = true;
environment = {
ANONYMIZED_TELEMETRY = "False";
DO_NOT_TRACK = "True";
SCARF_NO_ANALYTICS = "True";
OLLAMA_API_BASE_URL = "http://127.0.0.1:11434";
};
};
}

View File

@@ -30,6 +30,18 @@ in
local hass hass trust local hass hass trust
local gitea gitea trust local gitea gitea trust
# megan
host megan megan 192.168.90.1/24 trust
host megan megan 127.0.0.1/32 trust
host gcw megan 192.168.90.1/24 trust
host gcw megan 127.0.0.1/32 trust
# gcw
local gcw gcw trust
host gcw gcw 192.168.90.1/24 trust
host gcw gcw 127.0.0.1/32 trust
# math # math
local postgres math trust local postgres math trust
host postgres math 127.0.0.1/32 trust host postgres math 127.0.0.1/32 trust
@@ -88,6 +100,26 @@ in
replication = true; replication = true;
}; };
} }
{
name = "megan";
ensureDBOwnership = true;
ensureClauses = {
login = true;
createrole = true;
createdb = true;
replication = true;
};
}
{
name = "gcw";
ensureDBOwnership = true;
ensureClauses = {
login = true;
createrole = true;
createdb = true;
replication = true;
};
}
{ {
name = "math"; name = "math";
ensureDBOwnership = true; ensureDBOwnership = true;
@@ -100,9 +132,13 @@ in
} }
]; ];
ensureDatabases = [ ensureDatabases = [
"gcw"
"hass" "hass"
"gitea" "gitea"
"math" "math"
"megan"
"mxr_dev"
"mxr_prod"
"n8n" "n8n"
"richie" "richie"
]; ];

View File

@@ -12,9 +12,8 @@
"${inputs.self}/common/optional/zerotier.nix" "${inputs.self}/common/optional/zerotier.nix"
./hardware.nix ./hardware.nix
./llms.nix ./llms.nix
./open_webui.nix
./qmk.nix
./syncthing.nix ./syncthing.nix
./qmk.nix
inputs.nixos-hardware.nixosModules.framework-13-7040-amd inputs.nixos-hardware.nixosModules.framework-13-7040-amd
]; ];
@@ -30,7 +29,6 @@
services = { services = {
openssh.ports = [ 922 ]; openssh.ports = [ 922 ];
flatpak.enable = true;
}; };
system.stateVersion = "24.05"; system.stateVersion = "24.05";

View File

@@ -3,16 +3,16 @@
user = "ollama"; user = "ollama";
enable = true; enable = true;
host = "127.0.0.1"; host = "127.0.0.1";
syncModels = true;
loadModels = [ loadModels = [
"deepscaler:1.5b" "codellama:7b"
"deepseek-r1:14b"
"deepseek-r1:32b"
"deepseek-r1:8b" "deepseek-r1:8b"
"gemma3:12b" "gemma3:12b"
"gemma3:27b" "gemma3:27b"
"gpt-oss:20b" "gpt-oss:20b"
"lfm2:24b"
"qwen3:14b" "qwen3:14b"
"qwen3.5:27b" "qwen3:30b"
]; ];
}; };
systemd.services = { systemd.services = {

View File

@@ -1,12 +0,0 @@
{
services.open-webui = {
enable = true;
environment = {
ANONYMIZED_TELEMETRY = "False";
DO_NOT_TRACK = "True";
SCARF_NO_ANALYTICS = "True";
OLLAMA_API_BASE_URL = "http://127.0.0.1:11434";
WEBUI_AUTH = "False";
};
};
}

View File

@@ -1,236 +0,0 @@
"""Tests for python/api modules."""
from __future__ import annotations
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from python.api.routers.contact import (
ContactBase,
ContactCreate,
ContactListResponse,
ContactRelationshipCreate,
ContactRelationshipResponse,
ContactRelationshipUpdate,
ContactUpdate,
GraphData,
GraphEdge,
GraphNode,
NeedBase,
NeedCreate,
NeedResponse,
RelationshipTypeInfo,
router,
)
from python.api.routers.frontend import create_frontend_router
from python.orm.contact import RelationshipType
# --- Pydantic schema tests ---
def test_need_base() -> None:
"""Test NeedBase schema."""
need = NeedBase(name="ADHD", description="Attention deficit")
assert need.name == "ADHD"
def test_need_create() -> None:
"""Test NeedCreate schema."""
need = NeedCreate(name="Light Sensitive")
assert need.name == "Light Sensitive"
assert need.description is None
def test_need_response() -> None:
"""Test NeedResponse schema."""
need = NeedResponse(id=1, name="ADHD", description="test")
assert need.id == 1
def test_contact_base() -> None:
"""Test ContactBase schema."""
contact = ContactBase(name="John")
assert contact.name == "John"
assert contact.age is None
assert contact.bio is None
def test_contact_create() -> None:
"""Test ContactCreate schema."""
contact = ContactCreate(name="John", need_ids=[1, 2])
assert contact.need_ids == [1, 2]
def test_contact_create_no_needs() -> None:
"""Test ContactCreate with no needs."""
contact = ContactCreate(name="John")
assert contact.need_ids == []
def test_contact_update() -> None:
"""Test ContactUpdate schema."""
update = ContactUpdate(name="Jane", age=30)
assert update.name == "Jane"
assert update.age == 30
def test_contact_update_partial() -> None:
"""Test ContactUpdate with partial data."""
update = ContactUpdate(age=25)
assert update.name is None
assert update.age == 25
def test_contact_list_response() -> None:
"""Test ContactListResponse schema."""
contact = ContactListResponse(id=1, name="John")
assert contact.id == 1
def test_contact_relationship_create() -> None:
"""Test ContactRelationshipCreate schema."""
rel = ContactRelationshipCreate(
related_contact_id=2,
relationship_type=RelationshipType.FRIEND,
)
assert rel.related_contact_id == 2
assert rel.closeness_weight is None
def test_contact_relationship_create_with_weight() -> None:
"""Test ContactRelationshipCreate with custom weight."""
rel = ContactRelationshipCreate(
related_contact_id=2,
relationship_type=RelationshipType.SPOUSE,
closeness_weight=10,
)
assert rel.closeness_weight == 10
def test_contact_relationship_update() -> None:
"""Test ContactRelationshipUpdate schema."""
update = ContactRelationshipUpdate(closeness_weight=8)
assert update.relationship_type is None
assert update.closeness_weight == 8
def test_contact_relationship_response() -> None:
"""Test ContactRelationshipResponse schema."""
resp = ContactRelationshipResponse(
contact_id=1,
related_contact_id=2,
relationship_type="friend",
closeness_weight=6,
)
assert resp.contact_id == 1
def test_relationship_type_info() -> None:
"""Test RelationshipTypeInfo schema."""
info = RelationshipTypeInfo(value="spouse", display_name="Spouse", default_weight=10)
assert info.value == "spouse"
def test_graph_node() -> None:
"""Test GraphNode schema."""
node = GraphNode(id=1, name="John", current_job="Dev")
assert node.id == 1
def test_graph_edge() -> None:
"""Test GraphEdge schema."""
edge = GraphEdge(source=1, target=2, relationship_type="friend", closeness_weight=6)
assert edge.source == 1
def test_graph_data() -> None:
"""Test GraphData schema."""
data = GraphData(
nodes=[GraphNode(id=1, name="John")],
edges=[GraphEdge(source=1, target=2, relationship_type="friend", closeness_weight=6)],
)
assert len(data.nodes) == 1
assert len(data.edges) == 1
# --- frontend router test ---
def test_create_frontend_router(tmp_path: Path) -> None:
"""Test create_frontend_router creates router."""
# Create required assets dir and index.html
assets_dir = tmp_path / "assets"
assets_dir.mkdir()
index = tmp_path / "index.html"
index.write_text("<html></html>")
router = create_frontend_router(tmp_path)
assert router is not None
# --- API main tests ---
def test_create_app() -> None:
"""Test create_app creates FastAPI app."""
with patch("python.api.main.get_postgres_engine"):
from python.api.main import create_app
app = create_app()
assert app is not None
assert app.title == "Contact Database API"
def test_create_app_with_frontend(tmp_path: Path) -> None:
"""Test create_app with frontend directory."""
assets_dir = tmp_path / "assets"
assets_dir.mkdir()
index = tmp_path / "index.html"
index.write_text("<html></html>")
with patch("python.api.main.get_postgres_engine"):
from python.api.main import create_app
app = create_app(frontend_dir=tmp_path)
assert app is not None
def test_build_frontend_none() -> None:
"""Test build_frontend with None returns None."""
from python.api.main import build_frontend
result = build_frontend(None)
assert result is None
def test_build_frontend_missing_dir() -> None:
"""Test build_frontend with missing directory raises."""
from python.api.main import build_frontend
with pytest.raises(FileExistsError):
build_frontend(Path("/nonexistent/path"))
# --- dependencies test ---
def test_db_session_dependency() -> None:
"""Test get_db dependency."""
from python.api.dependencies import get_db
mock_engine = create_engine("sqlite:///:memory:")
mock_request = MagicMock()
mock_request.app.state.engine = mock_engine
gen = get_db(mock_request)
session = next(gen)
assert isinstance(session, Session)
try:
next(gen)
except StopIteration:
pass

View File

@@ -1,469 +0,0 @@
"""Integration tests for API router using SQLite in-memory database."""
from __future__ import annotations
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from python.api.routers.contact import (
ContactCreate,
ContactRelationshipCreate,
ContactRelationshipUpdate,
ContactUpdate,
NeedCreate,
add_contact_relationship,
add_need_to_contact,
create_contact,
create_need,
delete_contact,
delete_need,
get_contact,
get_contact_relationships,
get_need,
get_relationship_graph,
list_contacts,
list_needs,
list_relationship_types,
RelationshipTypeInfo,
remove_contact_relationship,
remove_need_from_contact,
update_contact,
update_contact_relationship,
)
from python.orm.base import RichieBase
from python.orm.contact import Contact, ContactNeed, ContactRelationship, Need, RelationshipType
import pytest
def _create_db() -> Session:
"""Create in-memory SQLite database with schema."""
engine = create_engine("sqlite:///:memory:")
# Create tables without schema prefix for SQLite
RichieBase.metadata.create_all(engine, checkfirst=True)
return Session(engine)
@pytest.fixture
def db() -> Session:
"""Database session fixture."""
engine = create_engine("sqlite:///:memory:")
# SQLite doesn't support schemas, so we need to drop the schema reference
from sqlalchemy import MetaData
meta = MetaData()
for table in RichieBase.metadata.sorted_tables:
# Create table without schema
table.to_metadata(meta)
meta.create_all(engine)
session = Session(engine)
yield session
session.close()
# --- Need CRUD tests ---
def test_create_need(db: Session) -> None:
"""Test creating a need."""
need = create_need(NeedCreate(name="ADHD", description="Attention deficit"), db)
assert need.name == "ADHD"
assert need.id is not None
def test_list_needs(db: Session) -> None:
"""Test listing needs."""
create_need(NeedCreate(name="ADHD"), db)
create_need(NeedCreate(name="Light Sensitive"), db)
needs = list_needs(db)
assert len(needs) == 2
def test_get_need(db: Session) -> None:
"""Test getting a need by ID."""
created = create_need(NeedCreate(name="ADHD"), db)
need = get_need(created.id, db)
assert need.name == "ADHD"
def test_get_need_not_found(db: Session) -> None:
"""Test getting a need that doesn't exist."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
get_need(999, db)
assert exc_info.value.status_code == 404
def test_delete_need(db: Session) -> None:
"""Test deleting a need."""
created = create_need(NeedCreate(name="ADHD"), db)
result = delete_need(created.id, db)
assert result == {"deleted": True}
def test_delete_need_not_found(db: Session) -> None:
"""Test deleting a need that doesn't exist."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
delete_need(999, db)
assert exc_info.value.status_code == 404
# --- Contact CRUD tests ---
def test_create_contact(db: Session) -> None:
"""Test creating a contact."""
contact = create_contact(ContactCreate(name="John"), db)
assert contact.name == "John"
assert contact.id is not None
def test_create_contact_with_needs(db: Session) -> None:
"""Test creating a contact with needs."""
need = create_need(NeedCreate(name="ADHD"), db)
contact = create_contact(ContactCreate(name="John", need_ids=[need.id]), db)
assert len(contact.needs) == 1
def test_list_contacts(db: Session) -> None:
"""Test listing contacts."""
create_contact(ContactCreate(name="John"), db)
create_contact(ContactCreate(name="Jane"), db)
contacts = list_contacts(db)
assert len(contacts) == 2
def test_list_contacts_pagination(db: Session) -> None:
"""Test listing contacts with pagination."""
for i in range(5):
create_contact(ContactCreate(name=f"Contact {i}"), db)
contacts = list_contacts(db, skip=2, limit=2)
assert len(contacts) == 2
def test_get_contact(db: Session) -> None:
"""Test getting a contact by ID."""
created = create_contact(ContactCreate(name="John"), db)
contact = get_contact(created.id, db)
assert contact.name == "John"
def test_get_contact_not_found(db: Session) -> None:
"""Test getting a contact that doesn't exist."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
get_contact(999, db)
assert exc_info.value.status_code == 404
def test_update_contact(db: Session) -> None:
"""Test updating a contact."""
created = create_contact(ContactCreate(name="John"), db)
updated = update_contact(created.id, ContactUpdate(name="Jane", age=30), db)
assert updated.name == "Jane"
assert updated.age == 30
def test_update_contact_with_needs(db: Session) -> None:
"""Test updating a contact's needs."""
need = create_need(NeedCreate(name="ADHD"), db)
created = create_contact(ContactCreate(name="John"), db)
updated = update_contact(created.id, ContactUpdate(need_ids=[need.id]), db)
assert len(updated.needs) == 1
def test_update_contact_not_found(db: Session) -> None:
"""Test updating a contact that doesn't exist."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
update_contact(999, ContactUpdate(name="Jane"), db)
assert exc_info.value.status_code == 404
def test_delete_contact(db: Session) -> None:
"""Test deleting a contact."""
created = create_contact(ContactCreate(name="John"), db)
result = delete_contact(created.id, db)
assert result == {"deleted": True}
def test_delete_contact_not_found(db: Session) -> None:
"""Test deleting a contact that doesn't exist."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
delete_contact(999, db)
assert exc_info.value.status_code == 404
# --- Need-Contact association tests ---
def test_add_need_to_contact(db: Session) -> None:
"""Test adding a need to a contact."""
need = create_need(NeedCreate(name="ADHD"), db)
contact = create_contact(ContactCreate(name="John"), db)
result = add_need_to_contact(contact.id, need.id, db)
assert result == {"added": True}
def test_add_need_to_contact_contact_not_found(db: Session) -> None:
"""Test adding need to nonexistent contact."""
from fastapi import HTTPException
need = create_need(NeedCreate(name="ADHD"), db)
with pytest.raises(HTTPException) as exc_info:
add_need_to_contact(999, need.id, db)
assert exc_info.value.status_code == 404
def test_add_need_to_contact_need_not_found(db: Session) -> None:
"""Test adding nonexistent need to contact."""
from fastapi import HTTPException
contact = create_contact(ContactCreate(name="John"), db)
with pytest.raises(HTTPException) as exc_info:
add_need_to_contact(contact.id, 999, db)
assert exc_info.value.status_code == 404
def test_remove_need_from_contact(db: Session) -> None:
"""Test removing a need from a contact."""
need = create_need(NeedCreate(name="ADHD"), db)
contact = create_contact(ContactCreate(name="John", need_ids=[need.id]), db)
result = remove_need_from_contact(contact.id, need.id, db)
assert result == {"removed": True}
def test_remove_need_from_contact_contact_not_found(db: Session) -> None:
"""Test removing need from nonexistent contact."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
remove_need_from_contact(999, 1, db)
assert exc_info.value.status_code == 404
def test_remove_need_from_contact_need_not_found(db: Session) -> None:
"""Test removing nonexistent need from contact."""
from fastapi import HTTPException
contact = create_contact(ContactCreate(name="John"), db)
with pytest.raises(HTTPException) as exc_info:
remove_need_from_contact(contact.id, 999, db)
assert exc_info.value.status_code == 404
# --- Relationship tests ---
def test_add_contact_relationship(db: Session) -> None:
"""Test adding a relationship between contacts."""
c1 = create_contact(ContactCreate(name="John"), db)
c2 = create_contact(ContactCreate(name="Jane"), db)
rel = add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.FRIEND),
db,
)
assert rel.contact_id == c1.id
assert rel.related_contact_id == c2.id
def test_add_contact_relationship_default_weight(db: Session) -> None:
"""Test relationship uses default weight from type."""
c1 = create_contact(ContactCreate(name="John"), db)
c2 = create_contact(ContactCreate(name="Jane"), db)
rel = add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.SPOUSE),
db,
)
assert rel.closeness_weight == RelationshipType.SPOUSE.default_weight
def test_add_contact_relationship_custom_weight(db: Session) -> None:
"""Test relationship with custom weight."""
c1 = create_contact(ContactCreate(name="John"), db)
c2 = create_contact(ContactCreate(name="Jane"), db)
rel = add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.FRIEND, closeness_weight=8),
db,
)
assert rel.closeness_weight == 8
def test_add_contact_relationship_contact_not_found(db: Session) -> None:
"""Test adding relationship with nonexistent contact."""
from fastapi import HTTPException
c2 = create_contact(ContactCreate(name="Jane"), db)
with pytest.raises(HTTPException) as exc_info:
add_contact_relationship(
999,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.FRIEND),
db,
)
assert exc_info.value.status_code == 404
def test_add_contact_relationship_related_not_found(db: Session) -> None:
"""Test adding relationship with nonexistent related contact."""
from fastapi import HTTPException
c1 = create_contact(ContactCreate(name="John"), db)
with pytest.raises(HTTPException) as exc_info:
add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=999, relationship_type=RelationshipType.FRIEND),
db,
)
assert exc_info.value.status_code == 404
def test_add_contact_relationship_self(db: Session) -> None:
"""Test cannot relate contact to itself."""
from fastapi import HTTPException
c1 = create_contact(ContactCreate(name="John"), db)
with pytest.raises(HTTPException) as exc_info:
add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c1.id, relationship_type=RelationshipType.FRIEND),
db,
)
assert exc_info.value.status_code == 400
def test_get_contact_relationships(db: Session) -> None:
"""Test getting relationships for a contact."""
c1 = create_contact(ContactCreate(name="John"), db)
c2 = create_contact(ContactCreate(name="Jane"), db)
add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.FRIEND),
db,
)
rels = get_contact_relationships(c1.id, db)
assert len(rels) == 1
def test_get_contact_relationships_not_found(db: Session) -> None:
"""Test getting relationships for nonexistent contact."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
get_contact_relationships(999, db)
assert exc_info.value.status_code == 404
def test_update_contact_relationship(db: Session) -> None:
"""Test updating a relationship."""
c1 = create_contact(ContactCreate(name="John"), db)
c2 = create_contact(ContactCreate(name="Jane"), db)
add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.FRIEND),
db,
)
updated = update_contact_relationship(
c1.id,
c2.id,
ContactRelationshipUpdate(closeness_weight=9),
db,
)
assert updated.closeness_weight == 9
def test_update_contact_relationship_type(db: Session) -> None:
"""Test updating relationship type."""
c1 = create_contact(ContactCreate(name="John"), db)
c2 = create_contact(ContactCreate(name="Jane"), db)
add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.FRIEND),
db,
)
updated = update_contact_relationship(
c1.id,
c2.id,
ContactRelationshipUpdate(relationship_type=RelationshipType.BEST_FRIEND),
db,
)
assert updated.relationship_type == "best_friend"
def test_update_contact_relationship_not_found(db: Session) -> None:
"""Test updating nonexistent relationship."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
update_contact_relationship(
999,
998,
ContactRelationshipUpdate(closeness_weight=5),
db,
)
assert exc_info.value.status_code == 404
def test_remove_contact_relationship(db: Session) -> None:
"""Test removing a relationship."""
c1 = create_contact(ContactCreate(name="John"), db)
c2 = create_contact(ContactCreate(name="Jane"), db)
add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.FRIEND),
db,
)
result = remove_contact_relationship(c1.id, c2.id, db)
assert result == {"deleted": True}
def test_remove_contact_relationship_not_found(db: Session) -> None:
"""Test removing nonexistent relationship."""
from fastapi import HTTPException
with pytest.raises(HTTPException) as exc_info:
remove_contact_relationship(999, 998, db)
assert exc_info.value.status_code == 404
# --- list_relationship_types ---
def test_list_relationship_types() -> None:
"""Test listing relationship types."""
types = list_relationship_types()
assert len(types) == len(RelationshipType)
assert all(isinstance(t, RelationshipTypeInfo) for t in types)
# --- graph tests ---
def test_get_relationship_graph(db: Session) -> None:
"""Test getting relationship graph."""
c1 = create_contact(ContactCreate(name="John"), db)
c2 = create_contact(ContactCreate(name="Jane"), db)
add_contact_relationship(
c1.id,
ContactRelationshipCreate(related_contact_id=c2.id, relationship_type=RelationshipType.FRIEND),
db,
)
graph = get_relationship_graph(db)
assert len(graph.nodes) == 2
assert len(graph.edges) == 1
def test_get_relationship_graph_empty(db: Session) -> None:
"""Test getting empty relationship graph."""
graph = get_relationship_graph(db)
assert len(graph.nodes) == 0
assert len(graph.edges) == 0

View File

@@ -1,66 +0,0 @@
"""Extended tests for python/api/main.py."""
from __future__ import annotations
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from python.api.main import build_frontend, create_app
def test_build_frontend_runs_npm(tmp_path: Path) -> None:
"""Test build_frontend runs npm commands."""
source_dir = tmp_path / "frontend"
source_dir.mkdir()
(source_dir / "package.json").write_text('{"name": "test"}')
dist_dir = tmp_path / "build" / "dist"
dist_dir.mkdir(parents=True)
(dist_dir / "index.html").write_text("<html></html>")
def mock_copytree(src: Path, dst: Path, dirs_exist_ok: bool = False) -> None:
if "dist" in str(src):
Path(dst).mkdir(parents=True, exist_ok=True)
(Path(dst) / "index.html").write_text("<html></html>")
with (
patch("python.api.main.subprocess.run") as mock_run,
patch("python.api.main.shutil.copytree") as mock_copy,
patch("python.api.main.shutil.rmtree"),
patch("python.api.main.tempfile.mkdtemp") as mock_mkdtemp,
):
# First mkdtemp for build dir, second for output dir
build_dir = str(tmp_path / "build")
output_dir = str(tmp_path / "output")
mock_mkdtemp.side_effect = [build_dir, output_dir]
# dist_dir exists check
with patch("pathlib.Path.exists", return_value=True):
result = build_frontend(source_dir, cache_dir=tmp_path / ".npm")
assert mock_run.call_count == 2 # npm install + npm run build
def test_build_frontend_no_dist(tmp_path: Path) -> None:
"""Test build_frontend raises when dist directory not found."""
source_dir = tmp_path / "frontend"
source_dir.mkdir()
(source_dir / "package.json").write_text('{"name": "test"}')
with (
patch("python.api.main.subprocess.run"),
patch("python.api.main.shutil.copytree"),
patch("python.api.main.tempfile.mkdtemp", return_value=str(tmp_path / "build")),
pytest.raises(FileNotFoundError, match="Build output not found"),
):
build_frontend(source_dir)
def test_create_app_includes_contact_router() -> None:
"""Test create_app includes contact router."""
app = create_app()
routes = [r.path for r in app.routes]
# Should have API routes
assert any("/api" in r for r in routes)

View File

@@ -1,61 +0,0 @@
"""Tests for api/main.py serve function and frontend router."""
from __future__ import annotations
from pathlib import Path
from unittest.mock import patch
import pytest
from python.api.main import build_frontend, create_app, serve
def test_build_frontend_none_source() -> None:
"""Test build_frontend returns None when no source dir."""
result = build_frontend(None)
assert result is None
def test_build_frontend_nonexistent_dir(tmp_path: Path) -> None:
"""Test build_frontend raises for nonexistent directory."""
with pytest.raises(FileExistsError):
build_frontend(tmp_path / "nonexistent")
def test_create_app_with_frontend(tmp_path: Path) -> None:
"""Test create_app with frontend directory."""
# Create a minimal frontend dir with assets
assets = tmp_path / "assets"
assets.mkdir()
(tmp_path / "index.html").write_text("<html></html>")
app = create_app(frontend_dir=tmp_path)
routes = [r.path for r in app.routes]
assert any("/api" in r for r in routes)
def test_serve_calls_uvicorn() -> None:
"""Test serve function calls uvicorn.run."""
with (
patch("python.api.main.uvicorn.run") as mock_run,
patch("python.api.main.build_frontend", return_value=None),
patch("python.api.main.configure_logger"),
patch.dict("os.environ", {"HOME": "/tmp"}),
):
serve(host="localhost", port=8000, log_level="INFO")
mock_run.assert_called_once()
def test_serve_with_frontend_dir(tmp_path: Path) -> None:
"""Test serve function with frontend dir."""
assets = tmp_path / "assets"
assets.mkdir()
(tmp_path / "index.html").write_text("<html></html>")
with (
patch("python.api.main.uvicorn.run") as mock_run,
patch("python.api.main.build_frontend", return_value=tmp_path),
patch("python.api.main.configure_logger"),
patch.dict("os.environ", {"HOME": "/tmp"}),
):
serve(host="localhost", frontend_dir=tmp_path, port=8000, log_level="INFO")
mock_run.assert_called_once()

View File

@@ -57,5 +57,5 @@ def test_test_bash_wrapper_error() -> None:
"""test_test_bash_wrapper_error.""" """test_test_bash_wrapper_error."""
expected_error = 2 expected_error = 2
stdout, returncode = bash_wrapper("ls /this/path/does/not/exist") stdout, returncode = bash_wrapper("ls /this/path/does/not/exist")
assert stdout == "ls: cannot access '/this/path/does/not/exist': No such file or directory\n" assert stdout == "ls: /this/path/does/not/exist: No such file or directory\n"
assert returncode == expected_error assert returncode == expected_error

View File

@@ -1,364 +0,0 @@
"""Tests for python/eval_warnings/main.py."""
from __future__ import annotations
import subprocess
from pathlib import Path
from typing import TYPE_CHECKING
from unittest.mock import MagicMock, patch
from zipfile import ZipFile
from io import BytesIO
import pytest
from python.eval_warnings.main import (
EvalWarning,
FileChange,
apply_changes,
compute_warning_hash,
check_duplicate_pr,
download_logs,
extract_referenced_files,
parse_changes,
parse_warnings,
query_ollama,
run_cmd,
create_pr,
)
if TYPE_CHECKING:
pass
def test_eval_warning_frozen() -> None:
"""Test EvalWarning is frozen dataclass."""
w = EvalWarning(system="test", message="warning: test msg")
assert w.system == "test"
assert w.message == "warning: test msg"
def test_file_change() -> None:
"""Test FileChange dataclass."""
fc = FileChange(file_path="test.nix", original="old", fixed="new")
assert fc.file_path == "test.nix"
def test_run_cmd() -> None:
"""Test run_cmd."""
result = run_cmd(["echo", "hello"])
assert result.stdout.strip() == "hello"
def test_run_cmd_check_false() -> None:
"""Test run_cmd with check=False."""
result = run_cmd(["ls", "/nonexistent"], check=False)
assert result.returncode != 0
def test_parse_warnings_basic() -> None:
"""Test parse_warnings extracts warnings."""
logs = {
"build-server1/2_Build.txt": "warning: test warning\nsome other line\ntrace: warning: another warning\n",
}
warnings = parse_warnings(logs)
assert len(warnings) == 2
def test_parse_warnings_ignores_untrusted_flake() -> None:
"""Test parse_warnings ignores untrusted flake settings."""
logs = {
"build-server1/2_Build.txt": "warning: ignoring untrusted flake configuration setting foo\n",
}
warnings = parse_warnings(logs)
assert len(warnings) == 0
def test_parse_warnings_strips_timestamp() -> None:
"""Test parse_warnings strips timestamps."""
logs = {
"build-server1/2_Build.txt": "2024-01-01T00:00:00.000Z warning: test msg\n",
}
warnings = parse_warnings(logs)
assert len(warnings) == 1
w = warnings.pop()
assert w.message == "warning: test msg"
assert w.system == "server1"
def test_parse_warnings_empty() -> None:
"""Test parse_warnings with no warnings."""
logs = {"build-server1/2_Build.txt": "all good\n"}
warnings = parse_warnings(logs)
assert len(warnings) == 0
def test_compute_warning_hash() -> None:
"""Test compute_warning_hash returns consistent 8-char hash."""
warnings = {EvalWarning(system="s1", message="msg1")}
h = compute_warning_hash(warnings)
assert len(h) == 8
# Same input -> same hash
assert compute_warning_hash(warnings) == h
def test_compute_warning_hash_different() -> None:
"""Test different warnings produce different hashes."""
w1 = {EvalWarning(system="s1", message="msg1")}
w2 = {EvalWarning(system="s1", message="msg2")}
assert compute_warning_hash(w1) != compute_warning_hash(w2)
def test_extract_referenced_files(tmp_path: Path) -> None:
"""Test extract_referenced_files reads existing files."""
nix_file = tmp_path / "test.nix"
nix_file.write_text("{ pkgs }: pkgs")
warnings = {EvalWarning(system="s1", message=f"warning: in /nix/store/abc-source/{nix_file}")}
# Won't find the file since it uses absolute paths resolved differently
files = extract_referenced_files(warnings)
# Result depends on actual file resolution
assert isinstance(files, dict)
def test_check_duplicate_pr_no_duplicate() -> None:
"""Test check_duplicate_pr when no duplicate exists."""
mock_result = MagicMock()
mock_result.returncode = 0
mock_result.stdout = "fix: resolve nix eval warnings (abcd1234)\nfix: other (efgh5678)\n"
with patch("python.eval_warnings.main.run_cmd", return_value=mock_result):
assert check_duplicate_pr("xxxxxxxx") is False
def test_check_duplicate_pr_found() -> None:
"""Test check_duplicate_pr when duplicate exists."""
mock_result = MagicMock()
mock_result.returncode = 0
mock_result.stdout = "fix: resolve nix eval warnings (abcd1234)\n"
with patch("python.eval_warnings.main.run_cmd", return_value=mock_result):
assert check_duplicate_pr("abcd1234") is True
def test_check_duplicate_pr_error() -> None:
"""Test check_duplicate_pr raises on error."""
mock_result = MagicMock()
mock_result.returncode = 1
mock_result.stderr = "gh error"
with (
patch("python.eval_warnings.main.run_cmd", return_value=mock_result),
pytest.raises(RuntimeError, match="Failed to check for duplicate PRs"),
):
check_duplicate_pr("test")
def test_parse_changes_basic() -> None:
"""Test parse_changes with valid response."""
response = """## **REASONING**
Some reasoning here.
## **CHANGES**
FILE: test.nix
<<<<<<< ORIGINAL
old line
=======
new line
>>>>>>> FIXED
"""
changes = parse_changes(response)
assert len(changes) == 1
assert changes[0].file_path == "test.nix"
assert changes[0].original == "old line"
assert changes[0].fixed == "new line"
def test_parse_changes_no_changes_section() -> None:
"""Test parse_changes with missing CHANGES section."""
response = "Some text without changes"
changes = parse_changes(response)
assert changes == []
def test_parse_changes_multiple() -> None:
"""Test parse_changes with multiple file changes."""
response = """**CHANGES**
FILE: file1.nix
<<<<<<< ORIGINAL
old1
=======
new1
>>>>>>> FIXED
FILE: file2.nix
<<<<<<< ORIGINAL
old2
=======
new2
>>>>>>> FIXED
"""
changes = parse_changes(response)
assert len(changes) == 2
def test_apply_changes(tmp_path: Path) -> None:
"""Test apply_changes applies changes to files."""
test_file = tmp_path / "test.nix"
test_file.write_text("old content here")
changes = [FileChange(file_path=str(test_file), original="old content", fixed="new content")]
with patch("python.eval_warnings.main.Path.cwd", return_value=tmp_path):
applied = apply_changes(changes)
assert applied == 1
assert "new content here" in test_file.read_text()
def test_apply_changes_file_not_found(tmp_path: Path) -> None:
"""Test apply_changes skips missing files."""
changes = [FileChange(file_path=str(tmp_path / "missing.nix"), original="old", fixed="new")]
with patch("python.eval_warnings.main.Path.cwd", return_value=tmp_path):
applied = apply_changes(changes)
assert applied == 0
def test_apply_changes_original_not_found(tmp_path: Path) -> None:
"""Test apply_changes skips if original text not in file."""
test_file = tmp_path / "test.nix"
test_file.write_text("different content")
changes = [FileChange(file_path=str(test_file), original="not found", fixed="new")]
with patch("python.eval_warnings.main.Path.cwd", return_value=tmp_path):
applied = apply_changes(changes)
assert applied == 0
def test_apply_changes_path_traversal(tmp_path: Path) -> None:
"""Test apply_changes blocks path traversal."""
changes = [FileChange(file_path="/etc/passwd", original="old", fixed="new")]
with patch("python.eval_warnings.main.Path.cwd", return_value=tmp_path):
applied = apply_changes(changes)
assert applied == 0
def test_query_ollama_success() -> None:
"""Test query_ollama returns response."""
warnings = {EvalWarning(system="s1", message="warning: test")}
files = {"test.nix": "{ pkgs }: pkgs"}
mock_response = MagicMock()
mock_response.json.return_value = {"response": "some fix suggestion"}
mock_response.raise_for_status.return_value = None
with patch("python.eval_warnings.main.post", return_value=mock_response):
result = query_ollama(warnings, files, "http://localhost:11434")
assert result == "some fix suggestion"
def test_query_ollama_failure() -> None:
"""Test query_ollama returns None on failure."""
from httpx import HTTPError
warnings = {EvalWarning(system="s1", message="warning: test")}
files = {}
with patch("python.eval_warnings.main.post", side_effect=HTTPError("fail")):
result = query_ollama(warnings, files, "http://localhost:11434")
assert result is None
def test_download_logs_success() -> None:
"""Test download_logs extracts build log files from zip."""
# Create a zip file in memory
buf = BytesIO()
with ZipFile(buf, "w") as zf:
zf.writestr("build-server1/2_Build.txt", "warning: test")
zf.writestr("other-file.txt", "not a build log")
zip_bytes = buf.getvalue()
mock_result = MagicMock()
mock_result.returncode = 0
mock_result.stdout = zip_bytes
with patch("python.eval_warnings.main.subprocess.run", return_value=mock_result):
logs = download_logs("12345", "owner/repo")
assert "build-server1/2_Build.txt" in logs
assert "other-file.txt" not in logs
def test_download_logs_failure() -> None:
"""Test download_logs raises on failure."""
mock_result = MagicMock()
mock_result.returncode = 1
mock_result.stderr = b"error"
with (
patch("python.eval_warnings.main.subprocess.run", return_value=mock_result),
pytest.raises(RuntimeError, match="Failed to download logs"),
):
download_logs("12345", "owner/repo")
def test_create_pr() -> None:
"""Test create_pr creates branch and PR."""
warnings = {EvalWarning(system="s1", message="warning: test")}
llm_response = "**REASONING**\nSome fix.\n**CHANGES**\nstuff"
mock_diff_result = MagicMock()
mock_diff_result.returncode = 1 # changes exist
call_count = 0
def mock_run_cmd(cmd: list[str], *, check: bool = True) -> MagicMock:
nonlocal call_count
call_count += 1
result = MagicMock()
result.returncode = 0
result.stdout = ""
if "diff" in cmd:
result.returncode = 1
return result
with patch("python.eval_warnings.main.run_cmd", side_effect=mock_run_cmd):
create_pr("abcd1234", warnings, llm_response, "https://example.com/run/1")
assert call_count > 0
def test_create_pr_no_changes() -> None:
"""Test create_pr does nothing when no file changes."""
warnings = {EvalWarning(system="s1", message="warning: test")}
llm_response = "**REASONING**\nNo changes needed.\n**CHANGES**\n"
def mock_run_cmd(cmd: list[str], *, check: bool = True) -> MagicMock:
result = MagicMock()
result.returncode = 0
result.stdout = ""
return result
with patch("python.eval_warnings.main.run_cmd", side_effect=mock_run_cmd):
create_pr("abcd1234", warnings, llm_response, "https://example.com/run/1")
def test_create_pr_no_reasoning() -> None:
"""Test create_pr handles missing REASONING section."""
warnings = {EvalWarning(system="s1", message="warning: test")}
llm_response = "No reasoning here"
def mock_run_cmd(cmd: list[str], *, check: bool = True) -> MagicMock:
result = MagicMock()
result.returncode = 0 if "diff" not in cmd else 1
result.stdout = ""
return result
with patch("python.eval_warnings.main.run_cmd", side_effect=mock_run_cmd):
create_pr("abcd1234", warnings, llm_response, "https://example.com/run/1")

View File

@@ -1,77 +0,0 @@
"""Extended tests for python/eval_warnings/main.py."""
from __future__ import annotations
import os
from pathlib import Path
from unittest.mock import MagicMock, patch
from python.eval_warnings.main import (
EvalWarning,
extract_referenced_files,
)
def test_extract_referenced_files_nix_store_paths(tmp_path: Path) -> None:
"""Test extracting files from nix store paths."""
# Create matching directory structure
systems_dir = tmp_path / "systems"
systems_dir.mkdir()
nix_file = systems_dir / "test.nix"
nix_file.write_text("{ pkgs }: pkgs")
warnings = {
EvalWarning(
system="s1",
message="warning: in /nix/store/abc-source/systems/test.nix:5: deprecated",
)
}
# Change to tmp_path so relative paths work
old_cwd = os.getcwd()
try:
os.chdir(tmp_path)
files = extract_referenced_files(warnings)
finally:
os.chdir(old_cwd)
assert "systems/test.nix" in files
assert files["systems/test.nix"] == "{ pkgs }: pkgs"
def test_extract_referenced_files_no_files_found() -> None:
"""Test extract_referenced_files when no files are found."""
warnings = {
EvalWarning(
system="s1",
message="warning: something generic without file paths",
)
}
files = extract_referenced_files(warnings)
# Either empty or has flake.nix fallback
assert isinstance(files, dict)
def test_extract_referenced_files_repo_relative_paths(tmp_path: Path) -> None:
"""Test extracting repo-relative file paths."""
# Create the referenced file
systems_dir = tmp_path / "systems" / "foo"
systems_dir.mkdir(parents=True)
nix_file = systems_dir / "bar.nix"
nix_file.write_text("{ config }: {}")
warnings = {
EvalWarning(
system="s1",
message="warning: in systems/foo/bar.nix:10: test",
)
}
old_cwd = os.getcwd()
try:
os.chdir(tmp_path)
files = extract_referenced_files(warnings)
finally:
os.chdir(old_cwd)
assert "systems/foo/bar.nix" in files

View File

@@ -1,115 +0,0 @@
"""Tests for eval_warnings/main.py main() entry point."""
from __future__ import annotations
from unittest.mock import MagicMock, patch
def test_eval_warnings_main_no_warnings() -> None:
"""Test main() when no warnings are found."""
from python.eval_warnings.main import main
with (
patch("python.eval_warnings.main.configure_logger"),
patch("python.eval_warnings.main.download_logs", return_value="clean log"),
patch("python.eval_warnings.main.parse_warnings", return_value=set()),
):
main(
run_id="123",
repo="owner/repo",
ollama_url="http://localhost:11434",
run_url="http://example.com/run",
log_level="INFO",
)
def test_eval_warnings_main_duplicate_pr() -> None:
"""Test main() when a duplicate PR exists."""
from python.eval_warnings.main import main, EvalWarning
warnings = {EvalWarning(system="s1", message="test")}
with (
patch("python.eval_warnings.main.configure_logger"),
patch("python.eval_warnings.main.download_logs", return_value="log"),
patch("python.eval_warnings.main.parse_warnings", return_value=warnings),
patch("python.eval_warnings.main.compute_warning_hash", return_value="abc123"),
patch("python.eval_warnings.main.check_duplicate_pr", return_value=True),
):
main(
run_id="123",
repo="owner/repo",
ollama_url="http://localhost:11434",
run_url="http://example.com/run",
)
def test_eval_warnings_main_no_llm_response() -> None:
"""Test main() when LLM returns no response."""
from python.eval_warnings.main import main, EvalWarning
warnings = {EvalWarning(system="s1", message="test")}
with (
patch("python.eval_warnings.main.configure_logger"),
patch("python.eval_warnings.main.download_logs", return_value="log"),
patch("python.eval_warnings.main.parse_warnings", return_value=warnings),
patch("python.eval_warnings.main.compute_warning_hash", return_value="abc123"),
patch("python.eval_warnings.main.check_duplicate_pr", return_value=False),
patch("python.eval_warnings.main.extract_referenced_files", return_value={}),
patch("python.eval_warnings.main.query_ollama", return_value=None),
):
main(
run_id="123",
repo="owner/repo",
ollama_url="http://localhost:11434",
run_url="http://example.com/run",
)
def test_eval_warnings_main_no_changes_applied() -> None:
"""Test main() when no changes are applied."""
from python.eval_warnings.main import main, EvalWarning
warnings = {EvalWarning(system="s1", message="test")}
with (
patch("python.eval_warnings.main.configure_logger"),
patch("python.eval_warnings.main.download_logs", return_value="log"),
patch("python.eval_warnings.main.parse_warnings", return_value=warnings),
patch("python.eval_warnings.main.compute_warning_hash", return_value="abc123"),
patch("python.eval_warnings.main.check_duplicate_pr", return_value=False),
patch("python.eval_warnings.main.extract_referenced_files", return_value={}),
patch("python.eval_warnings.main.query_ollama", return_value="some response"),
patch("python.eval_warnings.main.parse_changes", return_value=[]),
patch("python.eval_warnings.main.apply_changes", return_value=0),
):
main(
run_id="123",
repo="owner/repo",
ollama_url="http://localhost:11434",
run_url="http://example.com/run",
)
def test_eval_warnings_main_full_success() -> None:
"""Test main() full success path."""
from python.eval_warnings.main import main, EvalWarning
warnings = {EvalWarning(system="s1", message="test")}
with (
patch("python.eval_warnings.main.configure_logger"),
patch("python.eval_warnings.main.download_logs", return_value="log"),
patch("python.eval_warnings.main.parse_warnings", return_value=warnings),
patch("python.eval_warnings.main.compute_warning_hash", return_value="abc123"),
patch("python.eval_warnings.main.check_duplicate_pr", return_value=False),
patch("python.eval_warnings.main.extract_referenced_files", return_value={}),
patch("python.eval_warnings.main.query_ollama", return_value="response"),
patch("python.eval_warnings.main.parse_changes", return_value=[{"file": "a.nix"}]),
patch("python.eval_warnings.main.apply_changes", return_value=1),
patch("python.eval_warnings.main.create_pr") as mock_pr,
):
main(
run_id="123",
repo="owner/repo",
ollama_url="http://localhost:11434",
run_url="http://example.com/run",
)
mock_pr.assert_called_once()

View File

@@ -1,248 +0,0 @@
"""Tests for python/heater modules."""
from __future__ import annotations
import sys
from typing import TYPE_CHECKING
from unittest.mock import MagicMock, patch
from python.heater.models import ActionResult, DeviceConfig, HeaterStatus
if TYPE_CHECKING:
pass
# --- models tests ---
def test_device_config() -> None:
"""Test DeviceConfig creation."""
config = DeviceConfig(device_id="abc123", ip="192.168.1.1", local_key="key123")
assert config.device_id == "abc123"
assert config.ip == "192.168.1.1"
assert config.local_key == "key123"
assert config.version == 3.5
def test_device_config_custom_version() -> None:
"""Test DeviceConfig with custom version."""
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key", version=3.3)
assert config.version == 3.3
def test_heater_status_defaults() -> None:
"""Test HeaterStatus default values."""
status = HeaterStatus(power=True)
assert status.power is True
assert status.setpoint is None
assert status.state is None
assert status.error_code is None
assert status.raw_dps == {}
def test_heater_status_full() -> None:
"""Test HeaterStatus with all fields."""
status = HeaterStatus(
power=True,
setpoint=72,
state="Heat",
error_code=0,
raw_dps={"1": True, "101": 72},
)
assert status.power is True
assert status.setpoint == 72
assert status.state == "Heat"
def test_action_result_success() -> None:
"""Test ActionResult success."""
result = ActionResult(success=True, action="on", power=True)
assert result.success is True
assert result.action == "on"
assert result.power is True
assert result.error is None
def test_action_result_failure() -> None:
"""Test ActionResult failure."""
result = ActionResult(success=False, action="on", error="Connection failed")
assert result.success is False
assert result.error == "Connection failed"
# --- controller tests (with mocked tinytuya) ---
def _get_controller_class() -> type:
"""Import HeaterController with mocked tinytuya."""
mock_tinytuya = MagicMock()
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
# Force reimport
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
return HeaterController
def test_heater_controller_status_success() -> None:
"""Test HeaterController.status returns correct status."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.status.return_value = {"dps": {"1": True, "101": 72, "102": "Heat", "108": 0}}
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
controller = HeaterController(config)
status = controller.status()
assert status.power is True
assert status.setpoint == 72
assert status.state == "Heat"
def test_heater_controller_status_error() -> None:
"""Test HeaterController.status handles device error."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.status.return_value = {"Error": "Connection timeout"}
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
controller = HeaterController(config)
status = controller.status()
assert status.power is False
def test_heater_controller_turn_on() -> None:
"""Test HeaterController.turn_on."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.set_value.return_value = None
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
controller = HeaterController(config)
result = controller.turn_on()
assert result.success is True
assert result.action == "on"
assert result.power is True
def test_heater_controller_turn_on_error() -> None:
"""Test HeaterController.turn_on handles errors."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.set_value.side_effect = ConnectionError("timeout")
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
controller = HeaterController(config)
result = controller.turn_on()
assert result.success is False
assert "timeout" in result.error
def test_heater_controller_turn_off() -> None:
"""Test HeaterController.turn_off."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.set_value.return_value = None
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
controller = HeaterController(config)
result = controller.turn_off()
assert result.success is True
assert result.action == "off"
assert result.power is False
def test_heater_controller_turn_off_error() -> None:
"""Test HeaterController.turn_off handles errors."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.set_value.side_effect = ConnectionError("timeout")
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
controller = HeaterController(config)
result = controller.turn_off()
assert result.success is False
def test_heater_controller_toggle_on_to_off() -> None:
"""Test HeaterController.toggle when heater is on."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.status.return_value = {"dps": {"1": True}}
mock_device.set_value.return_value = None
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
controller = HeaterController(config)
result = controller.toggle()
assert result.success is True
assert result.action == "off"
def test_heater_controller_toggle_off_to_on() -> None:
"""Test HeaterController.toggle when heater is off."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.status.return_value = {"dps": {"1": False}}
mock_device.set_value.return_value = None
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
controller = HeaterController(config)
result = controller.toggle()
assert result.success is True
assert result.action == "on"

View File

@@ -1,43 +0,0 @@
"""Tests for python/heater/main.py."""
from __future__ import annotations
import sys
from unittest.mock import MagicMock, patch
from python.heater.models import ActionResult, DeviceConfig, HeaterStatus
def test_create_app() -> None:
"""Test create_app creates FastAPI app."""
mock_tinytuya = MagicMock()
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
assert app is not None
assert app.title == "Heater Control API"
def test_serve_missing_params() -> None:
"""Test serve raises with missing parameters."""
import typer
mock_tinytuya = MagicMock()
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import serve
with patch("python.heater.main.configure_logger"):
try:
serve(host="0.0.0.0", port=8124, log_level="INFO")
except (typer.Exit, SystemExit):
pass

View File

@@ -1,165 +0,0 @@
"""Extended tests for python/heater/main.py - FastAPI routes."""
from __future__ import annotations
import sys
from unittest.mock import MagicMock, patch
from python.heater.models import ActionResult, DeviceConfig, HeaterStatus
def test_heater_app_routes() -> None:
"""Test heater app has expected routes."""
mock_tinytuya = MagicMock()
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
route_paths = [r.path for r in app.routes]
assert "/status" in route_paths
assert "/on" in route_paths
assert "/off" in route_paths
assert "/toggle" in route_paths
def test_heater_get_status_route() -> None:
"""Test /status route handler."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.status.return_value = {"dps": {"1": True, "101": 72}}
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
# Simulate lifespan by setting controller
app.state.controller = HeaterController(config)
# Find and call the status handler
for route in app.routes:
if hasattr(route, "path") and route.path == "/status":
result = route.endpoint()
assert result.power is True
break
def test_heater_on_route() -> None:
"""Test /on route handler."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.set_value.return_value = None
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
app.state.controller = HeaterController(config)
for route in app.routes:
if hasattr(route, "path") and route.path == "/on":
result = route.endpoint()
assert result.success is True
break
def test_heater_off_route() -> None:
"""Test /off route handler."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.set_value.return_value = None
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
app.state.controller = HeaterController(config)
for route in app.routes:
if hasattr(route, "path") and route.path == "/off":
result = route.endpoint()
assert result.success is True
break
def test_heater_toggle_route() -> None:
"""Test /toggle route handler."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.status.return_value = {"dps": {"1": True}}
mock_device.set_value.return_value = None
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
from python.heater.controller import HeaterController
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
app.state.controller = HeaterController(config)
for route in app.routes:
if hasattr(route, "path") and route.path == "/toggle":
result = route.endpoint()
assert result.success is True
break
def test_heater_on_route_failure() -> None:
"""Test /on route raises HTTPException on failure."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.set_value.side_effect = ConnectionError("fail")
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
from python.heater.controller import HeaterController
from fastapi import HTTPException
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
app.state.controller = HeaterController(config)
import pytest
for route in app.routes:
if hasattr(route, "path") and route.path == "/on":
with pytest.raises(HTTPException):
route.endpoint()
break

View File

@@ -1,103 +0,0 @@
"""Tests for heater/main.py serve function and lifespan."""
from __future__ import annotations
import sys
from unittest.mock import MagicMock, patch
import pytest
from click.exceptions import Exit
from python.heater.models import DeviceConfig
def test_serve_missing_params() -> None:
"""Test serve raises when device params are missing."""
mock_tinytuya = MagicMock()
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import serve
with pytest.raises(Exit):
serve(host="localhost", port=8124, log_level="INFO")
def test_serve_with_params() -> None:
"""Test serve starts uvicorn when params provided."""
mock_tinytuya = MagicMock()
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import serve
with patch("python.heater.main.uvicorn.run") as mock_run:
serve(
host="localhost",
port=8124,
log_level="INFO",
device_id="abc",
device_ip="10.0.0.1",
local_key="key123",
)
mock_run.assert_called_once()
def test_heater_off_route_failure() -> None:
"""Test /off route raises HTTPException on failure."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
mock_device.set_value.side_effect = ConnectionError("fail")
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
from python.heater.controller import HeaterController
from fastapi import HTTPException
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
app.state.controller = HeaterController(config)
for route in app.routes:
if hasattr(route, "path") and route.path == "/off":
with pytest.raises(HTTPException):
route.endpoint()
break
def test_heater_toggle_route_failure() -> None:
"""Test /toggle route raises HTTPException on failure."""
mock_tinytuya = MagicMock()
mock_device = MagicMock()
# toggle calls status() first then set_value - make set_value fail
mock_device.status.return_value = {"dps": {"1": True}}
mock_device.set_value.side_effect = ConnectionError("fail")
mock_tinytuya.Device.return_value = mock_device
with patch.dict(sys.modules, {"tinytuya": mock_tinytuya}):
if "python.heater.controller" in sys.modules:
del sys.modules["python.heater.controller"]
if "python.heater.main" in sys.modules:
del sys.modules["python.heater.main"]
from python.heater.main import create_app
from python.heater.controller import HeaterController
from fastapi import HTTPException
config = DeviceConfig(device_id="abc", ip="10.0.0.1", local_key="key")
app = create_app(config)
app.state.controller = HeaterController(config)
for route in app.routes:
if hasattr(route, "path") and route.path == "/toggle":
with pytest.raises(HTTPException):
route.endpoint()
break

View File

@@ -1,191 +0,0 @@
"""Tests for python/installer modules."""
from __future__ import annotations
import curses
from unittest.mock import MagicMock, patch
import pytest
from python.installer.tui import (
Cursor,
State,
calculate_device_menu_padding,
get_device,
)
# --- Cursor tests ---
def test_cursor_init() -> None:
"""Test Cursor initialization."""
c = Cursor()
assert c.get_x() == 0
assert c.get_y() == 0
assert c.height == 0
assert c.width == 0
def test_cursor_set_height_width() -> None:
"""Test Cursor set_height and set_width."""
c = Cursor()
c.set_height(100)
c.set_width(200)
assert c.height == 100
assert c.width == 200
def test_cursor_bounce_check() -> None:
"""Test Cursor bounce checks."""
c = Cursor()
c.set_height(10)
c.set_width(20)
assert c.x_bounce_check(-1) == 0
assert c.x_bounce_check(25) == 19
assert c.x_bounce_check(5) == 5
assert c.y_bounce_check(-1) == 0
assert c.y_bounce_check(15) == 9
assert c.y_bounce_check(5) == 5
def test_cursor_set_x_y() -> None:
"""Test Cursor set_x and set_y."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_x(5)
c.set_y(3)
assert c.get_x() == 5
assert c.get_y() == 3
def test_cursor_set_x_y_bounds() -> None:
"""Test Cursor set_x and set_y with bounds."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_x(-5)
assert c.get_x() == 0
c.set_y(100)
assert c.get_y() == 9
def test_cursor_move_up() -> None:
"""Test Cursor move_up."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_y(5)
c.move_up()
assert c.get_y() == 4
def test_cursor_move_down() -> None:
"""Test Cursor move_down."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_y(5)
c.move_down()
assert c.get_y() == 6
def test_cursor_move_left() -> None:
"""Test Cursor move_left."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_x(5)
c.move_left()
assert c.get_x() == 4
def test_cursor_move_right() -> None:
"""Test Cursor move_right."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_x(5)
c.move_right()
assert c.get_x() == 6
def test_cursor_navigation() -> None:
"""Test Cursor navigation with arrow keys."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_x(5)
c.set_y(5)
c.navigation(curses.KEY_UP)
assert c.get_y() == 4
c.navigation(curses.KEY_DOWN)
assert c.get_y() == 5
c.navigation(curses.KEY_LEFT)
assert c.get_x() == 4
c.navigation(curses.KEY_RIGHT)
assert c.get_x() == 5
def test_cursor_navigation_unknown_key() -> None:
"""Test Cursor navigation with unknown key (no-op)."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_x(5)
c.set_y(5)
c.navigation(999) # Unknown key
assert c.get_x() == 5
assert c.get_y() == 5
# --- State tests ---
def test_state_init() -> None:
"""Test State initialization."""
s = State()
assert s.key == 0
assert s.swap_size == 0
assert s.reserve_size == 0
assert s.selected_device_ids == set()
assert s.show_swap_input is False
assert s.show_reserve_input is False
def test_state_get_selected_devices() -> None:
"""Test State.get_selected_devices."""
s = State()
s.selected_device_ids = {"/dev/sda", "/dev/sdb"}
result = s.get_selected_devices()
assert isinstance(result, tuple)
assert set(result) == {"/dev/sda", "/dev/sdb"}
# --- get_device tests ---
def test_get_device() -> None:
"""Test get_device parses device string."""
raw = 'NAME="/dev/sda" SIZE="100G" TYPE="disk" MOUNTPOINTS=""'
device = get_device(raw)
assert device["name"] == "/dev/sda"
assert device["size"] == "100G"
assert device["type"] == "disk"
# --- calculate_device_menu_padding ---
def test_calculate_device_menu_padding() -> None:
"""Test calculate_device_menu_padding."""
devices = [
{"name": "/dev/sda", "size": "100G"},
{"name": "/dev/nvme0n1", "size": "500G"},
]
padding = calculate_device_menu_padding(devices, "name", 2)
assert padding == len("/dev/nvme0n1") + 2

View File

@@ -1,168 +0,0 @@
"""Extended tests for python/installer modules."""
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from python.installer.__main__ import (
bash_wrapper,
create_zfs_pool,
get_cpu_manufacturer,
partition_disk,
)
from python.installer.tui import (
Cursor,
State,
bash_wrapper as tui_bash_wrapper,
get_device,
calculate_device_menu_padding,
)
# --- installer __main__ tests ---
def test_installer_bash_wrapper_success() -> None:
"""Test installer bash_wrapper on success."""
result = bash_wrapper("echo hello")
assert result.strip() == "hello"
def test_installer_bash_wrapper_error() -> None:
"""Test installer bash_wrapper raises on error."""
with pytest.raises(RuntimeError, match="Failed to run command"):
bash_wrapper("ls /nonexistent/path/that/does/not/exist")
def test_partition_disk() -> None:
"""Test partition_disk calls commands correctly."""
with patch("python.installer.__main__.bash_wrapper") as mock_bash:
partition_disk("/dev/sda", swap_size=8, reserve=0)
assert mock_bash.call_count == 2
def test_partition_disk_with_reserve() -> None:
"""Test partition_disk with reserve space."""
with patch("python.installer.__main__.bash_wrapper") as mock_bash:
partition_disk("/dev/sda", swap_size=8, reserve=10)
assert mock_bash.call_count == 2
def test_partition_disk_minimum_swap() -> None:
"""Test partition_disk enforces minimum swap size."""
with patch("python.installer.__main__.bash_wrapper") as mock_bash:
partition_disk("/dev/sda", swap_size=0, reserve=-1)
# swap_size should be clamped to 1, reserve to 0
assert mock_bash.call_count == 2
def test_create_zfs_pool_single_disk() -> None:
"""Test create_zfs_pool with single disk."""
with patch("python.installer.__main__.bash_wrapper") as mock_bash:
mock_bash.return_value = "NAME\nroot_pool\n"
create_zfs_pool(["/dev/sda-part2"], "/mnt")
assert mock_bash.call_count == 2
def test_create_zfs_pool_mirror() -> None:
"""Test create_zfs_pool with mirror disks."""
with patch("python.installer.__main__.bash_wrapper") as mock_bash:
mock_bash.return_value = "NAME\nroot_pool\n"
create_zfs_pool(["/dev/sda-part2", "/dev/sdb-part2"], "/mnt")
assert mock_bash.call_count == 2
def test_create_zfs_pool_no_disks() -> None:
"""Test create_zfs_pool raises with no disks."""
with pytest.raises(ValueError, match="disks must be a tuple"):
create_zfs_pool([], "/mnt")
def test_get_cpu_manufacturer_amd() -> None:
"""Test get_cpu_manufacturer with AMD CPU."""
output = "vendor_id\t: AuthenticAMD\nmodel name\t: AMD Ryzen 9\n"
with patch("python.installer.__main__.bash_wrapper", return_value=output):
assert get_cpu_manufacturer() == "amd"
def test_get_cpu_manufacturer_intel() -> None:
"""Test get_cpu_manufacturer with Intel CPU."""
output = "vendor_id\t: GenuineIntel\nmodel name\t: Intel Core i9\n"
with patch("python.installer.__main__.bash_wrapper", return_value=output):
assert get_cpu_manufacturer() == "intel"
def test_get_cpu_manufacturer_unknown() -> None:
"""Test get_cpu_manufacturer with unknown CPU raises."""
output = "model name\t: Unknown CPU\n"
with (
patch("python.installer.__main__.bash_wrapper", return_value=output),
pytest.raises(RuntimeError, match="Failed to get CPU manufacturer"),
):
get_cpu_manufacturer()
# --- tui bash_wrapper tests ---
def test_tui_bash_wrapper_success() -> None:
"""Test tui bash_wrapper success."""
result = tui_bash_wrapper("echo hello")
assert result.strip() == "hello"
def test_tui_bash_wrapper_error() -> None:
"""Test tui bash_wrapper raises on error."""
with pytest.raises(RuntimeError, match="Failed to run command"):
tui_bash_wrapper("ls /nonexistent/path/that/does/not/exist")
# --- Cursor boundary tests ---
def test_cursor_move_at_boundaries() -> None:
"""Test cursor doesn't go below 0."""
c = Cursor()
c.set_height(10)
c.set_width(20)
c.set_x(0)
c.set_y(0)
c.move_up()
assert c.get_y() == 0
c.move_left()
assert c.get_x() == 0
def test_cursor_move_at_max_boundaries() -> None:
"""Test cursor doesn't exceed max."""
c = Cursor()
c.set_height(5)
c.set_width(10)
c.set_x(9)
c.set_y(4)
c.move_down()
assert c.get_y() == 4
c.move_right()
assert c.get_x() == 9
# --- get_device additional ---
def test_get_device_with_mountpoint() -> None:
"""Test get_device with mountpoint."""
raw = 'NAME="/dev/sda1" SIZE="512M" TYPE="part" MOUNTPOINTS="/boot"'
device = get_device(raw)
assert device["mountpoints"] == "/boot"
# --- State additional ---
def test_state_selected_devices_empty() -> None:
"""Test State get_selected_devices when empty."""
s = State()
result = s.get_selected_devices()
assert result == ()

View File

@@ -1,50 +0,0 @@
"""Extended tests for python/installer/__main__.py."""
from __future__ import annotations
import sys
from unittest.mock import MagicMock, patch
import pytest
from python.installer.__main__ import (
create_zfs_datasets,
create_zfs_pool,
get_boot_drive_id,
partition_disk,
)
def test_create_zfs_datasets() -> None:
"""Test create_zfs_datasets creates expected datasets."""
with patch("python.installer.__main__.bash_wrapper") as mock_bash:
mock_bash.return_value = "NAME\nroot_pool\nroot_pool/root\nroot_pool/home\nroot_pool/var\nroot_pool/nix\n"
create_zfs_datasets()
assert mock_bash.call_count == 5 # 4 create + 1 list
def test_create_zfs_datasets_missing(monkeypatch: pytest.MonkeyPatch) -> None:
"""Test create_zfs_datasets exits on missing datasets."""
with (
patch("python.installer.__main__.bash_wrapper") as mock_bash,
pytest.raises(SystemExit),
):
mock_bash.return_value = "NAME\nroot_pool\n"
create_zfs_datasets()
def test_create_zfs_pool_failure(monkeypatch: pytest.MonkeyPatch) -> None:
"""Test create_zfs_pool exits on failure."""
with (
patch("python.installer.__main__.bash_wrapper") as mock_bash,
pytest.raises(SystemExit),
):
mock_bash.return_value = "NAME\n"
create_zfs_pool(["/dev/sda-part2"], "/mnt")
def test_get_boot_drive_id() -> None:
"""Test get_boot_drive_id extracts UUID."""
with patch("python.installer.__main__.bash_wrapper", return_value="UUID\nABCD-1234\n"):
result = get_boot_drive_id("/dev/sda")
assert result == "ABCD-1234"

View File

@@ -1,312 +0,0 @@
"""Additional tests for python/installer/__main__.py covering missing lines."""
from __future__ import annotations
from unittest.mock import MagicMock, call, patch
import pytest
from python.installer.__main__ import (
create_nix_hardware_file,
install_nixos,
installer,
main,
)
# --- create_nix_hardware_file (lines 167-218) ---
def test_create_nix_hardware_file_no_encrypt() -> None:
"""Test create_nix_hardware_file without encryption."""
with (
patch("python.installer.__main__.get_cpu_manufacturer", return_value="amd"),
patch("python.installer.__main__.get_boot_drive_id", return_value="ABCD-1234"),
patch("python.installer.__main__.getrandbits", return_value=0xDEADBEEF),
patch("python.installer.__main__.Path") as mock_path,
):
create_nix_hardware_file("/mnt", ["/dev/sda"], encrypt=None)
mock_path.assert_called_once_with("/mnt/etc/nixos/hardware-configuration.nix")
written_content = mock_path.return_value.write_text.call_args[0][0]
assert "kvm-amd" in written_content
assert "ABCD-1234" in written_content
assert "deadbeef" in written_content
assert "luks" not in written_content
def test_create_nix_hardware_file_with_encrypt() -> None:
"""Test create_nix_hardware_file with encryption enabled."""
with (
patch("python.installer.__main__.get_cpu_manufacturer", return_value="intel"),
patch("python.installer.__main__.get_boot_drive_id", return_value="EFGH-5678"),
patch("python.installer.__main__.getrandbits", return_value=0x12345678),
patch("python.installer.__main__.Path") as mock_path,
):
create_nix_hardware_file("/mnt", ["/dev/sda"], encrypt="mykey")
written_content = mock_path.return_value.write_text.call_args[0][0]
assert "kvm-intel" in written_content
assert "EFGH-5678" in written_content
assert "12345678" in written_content
assert "luks" in written_content
assert "luks-root-pool-sda-part2" in written_content
assert "bypassWorkqueues" in written_content
assert "allowDiscards" in written_content
def test_create_nix_hardware_file_content_structure() -> None:
"""Test create_nix_hardware_file generates correct Nix structure."""
with (
patch("python.installer.__main__.get_cpu_manufacturer", return_value="amd"),
patch("python.installer.__main__.get_boot_drive_id", return_value="UUID-1234"),
patch("python.installer.__main__.getrandbits", return_value=0xAABBCCDD),
patch("python.installer.__main__.Path") as mock_path,
):
create_nix_hardware_file("/mnt", ["/dev/sda"], encrypt=None)
written_content = mock_path.return_value.write_text.call_args[0][0]
assert "{ config, lib, modulesPath, ... }:" in written_content
assert "boot =" in written_content
assert "fileSystems" in written_content
assert "root_pool/root" in written_content
assert "root_pool/home" in written_content
assert "root_pool/var" in written_content
assert "root_pool/nix" in written_content
assert "networking.hostId" in written_content
assert "x86_64-linux" in written_content
# --- install_nixos (lines 221-241) ---
def test_install_nixos_single_disk() -> None:
"""Test install_nixos mounts filesystems and runs nixos-install."""
with (
patch("python.installer.__main__.bash_wrapper") as mock_bash,
patch("python.installer.__main__.run") as mock_run,
patch("python.installer.__main__.create_nix_hardware_file") as mock_hw,
):
install_nixos("/mnt", ["/dev/sda"], encrypt=None)
# 4 mount commands + 1 mkfs.vfat + 1 boot mount + 1 nixos-generate-config = 7 bash_wrapper calls
assert mock_bash.call_count == 7
mock_hw.assert_called_once_with("/mnt", ["/dev/sda"], None)
mock_run.assert_called_once_with(("nixos-install", "--root", "/mnt"), check=True)
def test_install_nixos_multiple_disks() -> None:
"""Test install_nixos formats all disk EFI partitions."""
with (
patch("python.installer.__main__.bash_wrapper") as mock_bash,
patch("python.installer.__main__.run") as mock_run,
patch("python.installer.__main__.create_nix_hardware_file") as mock_hw,
):
install_nixos("/mnt", ["/dev/sda", "/dev/sdb"], encrypt="key")
# 4 mount + 2 mkfs.vfat + 1 boot mount + 1 generate-config = 8
assert mock_bash.call_count == 8
# Check mkfs.vfat called for both disks
bash_calls = [str(c) for c in mock_bash.call_args_list]
assert any("mkfs.vfat" in c and "sda" in c for c in bash_calls)
assert any("mkfs.vfat" in c and "sdb" in c for c in bash_calls)
mock_hw.assert_called_once_with("/mnt", ["/dev/sda", "/dev/sdb"], "key")
def test_install_nixos_mounts_zfs_datasets() -> None:
"""Test install_nixos mounts all required ZFS datasets."""
with (
patch("python.installer.__main__.bash_wrapper") as mock_bash,
patch("python.installer.__main__.run"),
patch("python.installer.__main__.create_nix_hardware_file"),
):
install_nixos("/mnt", ["/dev/sda"], encrypt=None)
bash_calls = [str(c) for c in mock_bash.call_args_list]
assert any("root_pool/root" in c for c in bash_calls)
assert any("root_pool/home" in c for c in bash_calls)
assert any("root_pool/var" in c for c in bash_calls)
assert any("root_pool/nix" in c for c in bash_calls)
# --- installer (lines 244-280) ---
def test_installer_no_encrypt() -> None:
"""Test installer flow without encryption."""
with (
patch("python.installer.__main__.partition_disk") as mock_partition,
patch("python.installer.__main__.Popen") as mock_popen,
patch("python.installer.__main__.Path") as mock_path,
patch("python.installer.__main__.create_zfs_pool") as mock_pool,
patch("python.installer.__main__.create_zfs_datasets") as mock_datasets,
patch("python.installer.__main__.install_nixos") as mock_install,
):
installer(
disks=("/dev/sda",),
swap_size=8,
reserve=0,
encrypt_key=None,
)
mock_partition.assert_called_once_with("/dev/sda", 8, 0)
mock_pool.assert_called_once_with(["/dev/sda-part2"], "/tmp/nix_install")
mock_datasets.assert_called_once()
mock_install.assert_called_once_with("/tmp/nix_install", ("/dev/sda",), None)
def test_installer_with_encrypt() -> None:
"""Test installer flow with encryption enabled."""
with (
patch("python.installer.__main__.partition_disk") as mock_partition,
patch("python.installer.__main__.Popen") as mock_popen,
patch("python.installer.__main__.sleep") as mock_sleep,
patch("python.installer.__main__.run") as mock_run,
patch("python.installer.__main__.Path") as mock_path,
patch("python.installer.__main__.create_zfs_pool") as mock_pool,
patch("python.installer.__main__.create_zfs_datasets") as mock_datasets,
patch("python.installer.__main__.install_nixos") as mock_install,
):
installer(
disks=("/dev/sda",),
swap_size=8,
reserve=10,
encrypt_key="secret",
)
mock_partition.assert_called_once_with("/dev/sda", 8, 10)
mock_sleep.assert_called_once_with(1)
# cryptsetup luksFormat and luksOpen
assert mock_run.call_count == 2
mock_pool.assert_called_once_with(
["/dev/mapper/luks-root-pool-sda-part2"],
"/tmp/nix_install",
)
mock_datasets.assert_called_once()
mock_install.assert_called_once_with("/tmp/nix_install", ("/dev/sda",), "secret")
def test_installer_multiple_disks_no_encrypt() -> None:
"""Test installer with multiple disks and no encryption."""
with (
patch("python.installer.__main__.partition_disk") as mock_partition,
patch("python.installer.__main__.Popen") as mock_popen,
patch("python.installer.__main__.Path") as mock_path,
patch("python.installer.__main__.create_zfs_pool") as mock_pool,
patch("python.installer.__main__.create_zfs_datasets") as mock_datasets,
patch("python.installer.__main__.install_nixos") as mock_install,
):
installer(
disks=("/dev/sda", "/dev/sdb"),
swap_size=4,
reserve=0,
encrypt_key=None,
)
assert mock_partition.call_count == 2
mock_pool.assert_called_once_with(
["/dev/sda-part2", "/dev/sdb-part2"],
"/tmp/nix_install",
)
def test_installer_multiple_disks_with_encrypt() -> None:
"""Test installer with multiple disks and encryption."""
with (
patch("python.installer.__main__.partition_disk") as mock_partition,
patch("python.installer.__main__.Popen") as mock_popen,
patch("python.installer.__main__.sleep") as mock_sleep,
patch("python.installer.__main__.run") as mock_run,
patch("python.installer.__main__.Path") as mock_path,
patch("python.installer.__main__.create_zfs_pool") as mock_pool,
patch("python.installer.__main__.create_zfs_datasets") as mock_datasets,
patch("python.installer.__main__.install_nixos") as mock_install,
):
installer(
disks=("/dev/sda", "/dev/sdb"),
swap_size=4,
reserve=2,
encrypt_key="key123",
)
assert mock_partition.call_count == 2
assert mock_sleep.call_count == 2
# 2 disks x 2 cryptsetup commands = 4
assert mock_run.call_count == 4
mock_pool.assert_called_once_with(
["/dev/mapper/luks-root-pool-sda-part2", "/dev/mapper/luks-root-pool-sdb-part2"],
"/tmp/nix_install",
)
# --- main (lines 283-299) ---
def test_main_calls_installer() -> None:
"""Test main function orchestrates TUI and installer."""
mock_state = MagicMock()
mock_state.selected_device_ids = {"/dev/disk/by-id/ata-DISK1"}
mock_state.get_selected_devices.return_value = ("/dev/disk/by-id/ata-DISK1",)
mock_state.swap_size = 8
mock_state.reserve_size = 0
with (
patch("python.installer.__main__.configure_logger"),
patch("python.installer.__main__.curses.wrapper", return_value=mock_state),
patch("python.installer.__main__.getenv", return_value=None),
patch("python.installer.__main__.sleep"),
patch("python.installer.__main__.installer") as mock_installer,
):
main()
mock_installer.assert_called_once_with(
disks=("/dev/disk/by-id/ata-DISK1",),
swap_size=8,
reserve=0,
encrypt_key=None,
)
def test_main_with_encrypt_key() -> None:
"""Test main function passes encrypt key from environment."""
mock_state = MagicMock()
mock_state.selected_device_ids = {"/dev/disk/by-id/ata-DISK1"}
mock_state.get_selected_devices.return_value = ("/dev/disk/by-id/ata-DISK1",)
mock_state.swap_size = 16
mock_state.reserve_size = 5
with (
patch("python.installer.__main__.configure_logger"),
patch("python.installer.__main__.curses.wrapper", return_value=mock_state),
patch("python.installer.__main__.getenv", return_value="my_encrypt_key"),
patch("python.installer.__main__.sleep"),
patch("python.installer.__main__.installer") as mock_installer,
):
main()
mock_installer.assert_called_once_with(
disks=("/dev/disk/by-id/ata-DISK1",),
swap_size=16,
reserve=5,
encrypt_key="my_encrypt_key",
)
def test_main_calls_sleep() -> None:
"""Test main function sleeps for 3 seconds before installing."""
mock_state = MagicMock()
mock_state.selected_device_ids = set()
mock_state.get_selected_devices.return_value = ()
mock_state.swap_size = 0
mock_state.reserve_size = 0
with (
patch("python.installer.__main__.configure_logger"),
patch("python.installer.__main__.curses.wrapper", return_value=mock_state),
patch("python.installer.__main__.getenv", return_value=None),
patch("python.installer.__main__.sleep") as mock_sleep,
patch("python.installer.__main__.installer"),
):
main()
mock_sleep.assert_called_once_with(3)

Some files were not shown because too many files have changed in this diff Show More