mirror of
https://github.com/RichieCahill/dotfiles.git
synced 2026-04-21 06:39:09 -04:00
Compare commits
9 Commits
main
...
feature/se
| Author | SHA1 | Date | |
|---|---|---|---|
| 0ed7be036e | |||
| e4d5f342be | |||
| f8976f690f | |||
| d1d6a540e5 | |||
| 794dff4e13 | |||
| 21c4a38d81 | |||
| 31c6e2cb69 | |||
| 65dacb6089 | |||
| b0ed4900e4 |
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -40,6 +40,7 @@
|
|||||||
"cgroupdriver",
|
"cgroupdriver",
|
||||||
"charliermarsh",
|
"charliermarsh",
|
||||||
"Checkpointing",
|
"Checkpointing",
|
||||||
|
"cloudflared",
|
||||||
"codellama",
|
"codellama",
|
||||||
"codezombiech",
|
"codezombiech",
|
||||||
"compactmode",
|
"compactmode",
|
||||||
@@ -307,7 +308,6 @@
|
|||||||
"usernamehw",
|
"usernamehw",
|
||||||
"userprefs",
|
"userprefs",
|
||||||
"vaninventory",
|
"vaninventory",
|
||||||
"vdev",
|
|
||||||
"vfat",
|
"vfat",
|
||||||
"victron",
|
"victron",
|
||||||
"virt",
|
"virt",
|
||||||
|
|||||||
@@ -34,7 +34,6 @@ in
|
|||||||
warn-dirty = false;
|
warn-dirty = false;
|
||||||
flake-registry = ""; # disable global flake registries
|
flake-registry = ""; # disable global flake registries
|
||||||
connect-timeout = 10;
|
connect-timeout = 10;
|
||||||
download-buffer-size = 536870912;
|
|
||||||
fallback = true;
|
fallback = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
brain.id = "SSCGIPI-IV3VYKB-TRNIJE3-COV4T2H-CDBER7F-I2CGHYA-NWOEUDU-3T5QAAN"; # cspell:disable-line
|
brain.id = "SSCGIPI-IV3VYKB-TRNIJE3-COV4T2H-CDBER7F-I2CGHYA-NWOEUDU-3T5QAAN"; # cspell:disable-line
|
||||||
ipad.id = "KI76T3X-SFUGV2L-VSNYTKR-TSIUV5L-SHWD3HE-GQRGRCN-GY4UFMD-CW6Z6AX"; # cspell:disable-line
|
ipad.id = "KI76T3X-SFUGV2L-VSNYTKR-TSIUV5L-SHWD3HE-GQRGRCN-GY4UFMD-CW6Z6AX"; # cspell:disable-line
|
||||||
jeeves.id = "ICRHXZW-ECYJCUZ-I4CZ64R-3XRK7CG-LL2HAAK-FGOHD22-BQA4AI6-5OAL6AG"; # cspell:disable-line
|
jeeves.id = "ICRHXZW-ECYJCUZ-I4CZ64R-3XRK7CG-LL2HAAK-FGOHD22-BQA4AI6-5OAL6AG"; # cspell:disable-line
|
||||||
phone.id = "JPVQKQW-CFXOJXT-Q5G5F3H-QIDHDRE-GKHPTQB-GXZUQSP-U7FR7F7-INP3AAH"; # cspell:disable-line
|
phone.id = "TBRULKD-7DZPGGZ-F6LLB7J-MSO54AY-7KLPBIN-QOFK6PX-W2HBEWI-PHM2CQI"; # cspell:disable-line
|
||||||
rhapsody-in-green.id = "ASL3KC4-3XEN6PA-7BQBRKE-A7JXLI6-DJT43BY-Q4WPOER-7UALUAZ-VTPQ6Q4"; # cspell:disable-line
|
rhapsody-in-green.id = "ASL3KC4-3XEN6PA-7BQBRKE-A7JXLI6-DJT43BY-Q4WPOER-7UALUAZ-VTPQ6Q4"; # cspell:disable-line
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
36
flake.lock
generated
36
flake.lock
generated
@@ -8,11 +8,11 @@
|
|||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"dir": "pkgs/firefox-addons",
|
"dir": "pkgs/firefox-addons",
|
||||||
"lastModified": 1776398575,
|
"lastModified": 1773979456,
|
||||||
"narHash": "sha256-WArU6WOdWxzbzGqYk4w1Mucg+bw/SCl6MoSp+/cZMio=",
|
"narHash": "sha256-9kBMJ5IvxqNlkkj/swmE8uK1Sc7TL/LIRUI958m7uBM=",
|
||||||
"owner": "rycee",
|
"owner": "rycee",
|
||||||
"repo": "nur-expressions",
|
"repo": "nur-expressions",
|
||||||
"rev": "05815686caf4e3678f5aeb5fd36e567886ab0d30",
|
"rev": "81e28f47ac18d9e89513929c77e711e657b64851",
|
||||||
"type": "gitlab"
|
"type": "gitlab"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -29,11 +29,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1776454077,
|
"lastModified": 1774007980,
|
||||||
"narHash": "sha256-7zSUFWsU0+jlD7WB3YAxQ84Z/iJurA5hKPm8EfEyGJk=",
|
"narHash": "sha256-FOnZjElEI8pqqCvB6K/1JRHTE8o4rer8driivTpq2uo=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "home-manager",
|
"repo": "home-manager",
|
||||||
"rev": "565e5349208fe7d0831ef959103c9bafbeac0681",
|
"rev": "9670de2921812bc4e0452f6e3efd8c859696c183",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -44,11 +44,11 @@
|
|||||||
},
|
},
|
||||||
"nixos-hardware": {
|
"nixos-hardware": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1775490113,
|
"lastModified": 1774018263,
|
||||||
"narHash": "sha256-2ZBhDNZZwYkRmefK5XLOusCJHnoeKkoN95hoSGgMxWM=",
|
"narHash": "sha256-HHYEwK1A22aSaxv2ibhMMkKvrDGKGlA/qObG4smrSqc=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixos-hardware",
|
"repo": "nixos-hardware",
|
||||||
"rev": "c775c2772ba56e906cbeb4e0b2db19079ef11ff7",
|
"rev": "2d4b4717b2534fad5c715968c1cece04a172b365",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -60,11 +60,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1776169885,
|
"lastModified": 1773821835,
|
||||||
"narHash": "sha256-l/iNYDZ4bGOAFQY2q8y5OAfBBtrDAaPuRQqWaFHVRXM=",
|
"narHash": "sha256-TJ3lSQtW0E2JrznGVm8hOQGVpXjJyXY2guAxku2O9A4=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "4bd9165a9165d7b5e33ae57f3eecbcb28fb231c9",
|
"rev": "b40629efe5d6ec48dd1efba650c797ddbd39ace0",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -76,11 +76,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs-master": {
|
"nixpkgs-master": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1776469842,
|
"lastModified": 1774051532,
|
||||||
"narHash": "sha256-sqzM6PKMQoGk8Sl+uv2sbP1qiS2SPQhA2yn5zgZINMc=",
|
"narHash": "sha256-d3CGMweyYIcPuTj5BKq+1Lx4zwlgL31nVtN647tOZKo=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "025c852a89be820b3117f604c8ace42e9b4caa08",
|
"rev": "8620c0b5cc8fbe76502442181be1d0514bc3a1b7",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -125,11 +125,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1776119890,
|
"lastModified": 1773889674,
|
||||||
"narHash": "sha256-Zm6bxLNnEOYuS/SzrAGsYuXSwk3cbkRQZY0fJnk8a5M=",
|
"narHash": "sha256-+ycaiVAk3MEshJTg35cBTUa0MizGiS+bgpYw/f8ohkg=",
|
||||||
"owner": "Mic92",
|
"owner": "Mic92",
|
||||||
"repo": "sops-nix",
|
"repo": "sops-nix",
|
||||||
"rev": "d4971dd58c6627bfee52a1ad4237637c0a2fb0cd",
|
"rev": "29b6519f3e0780452bca0ac0be4584f04ac16cc5",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|||||||
@@ -21,9 +21,9 @@
|
|||||||
alembic
|
alembic
|
||||||
apprise
|
apprise
|
||||||
apscheduler
|
apscheduler
|
||||||
|
confluent-kafka
|
||||||
fastapi
|
fastapi
|
||||||
fastapi-cli
|
fastapi-cli
|
||||||
faster-whisper
|
|
||||||
httpx
|
httpx
|
||||||
mypy
|
mypy
|
||||||
orjson
|
orjson
|
||||||
|
|||||||
@@ -26,7 +26,6 @@ dependencies = [
|
|||||||
[project.scripts]
|
[project.scripts]
|
||||||
database = "python.database_cli:app"
|
database = "python.database_cli:app"
|
||||||
van-inventory = "python.van_inventory.main:serve"
|
van-inventory = "python.van_inventory.main:serve"
|
||||||
whisper-transcribe = "python.tools.whisper.transcribe:main"
|
|
||||||
|
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
dev = [
|
dev = [
|
||||||
@@ -51,7 +50,6 @@ lint.ignore = [
|
|||||||
"COM812", # (TEMP) conflicts when used with the formatter
|
"COM812", # (TEMP) conflicts when used with the formatter
|
||||||
"ISC001", # (TEMP) conflicts when used with the formatter
|
"ISC001", # (TEMP) conflicts when used with the formatter
|
||||||
"S603", # (PERM) This is known to cause a false positive
|
"S603", # (PERM) This is known to cause a false positive
|
||||||
"S607", # (PERM) This is becoming a consistent annoyance
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff.lint.per-file-ignores]
|
[tool.ruff.lint.per-file-ignores]
|
||||||
@@ -80,7 +78,9 @@ lint.ignore = [
|
|||||||
"python/congress_tracker/**" = [
|
"python/congress_tracker/**" = [
|
||||||
"TC003", # (perm) this creates issues because sqlalchemy uses these at runtime
|
"TC003", # (perm) this creates issues because sqlalchemy uses these at runtime
|
||||||
]
|
]
|
||||||
|
"python/eval_warnings/**" = [
|
||||||
|
"S607", # (perm) gh and git are expected on PATH in the runner environment
|
||||||
|
]
|
||||||
"python/alembic/**" = [
|
"python/alembic/**" = [
|
||||||
"INP001", # (perm) this creates LSP issues for alembic
|
"INP001", # (perm) this creates LSP issues for alembic
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -46,7 +46,12 @@ ALREADY_ATTACHED_QUERY = text("""
|
|||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Attach all weekly partition tables to the posts parent table."""
|
"""Attach all weekly partition tables to the posts parent table."""
|
||||||
connection = op.get_bind()
|
connection = op.get_bind()
|
||||||
already_attached = {row[0] for row in connection.execute(ALREADY_ATTACHED_QUERY, {"parent": f"{schema}.posts"})}
|
already_attached = {
|
||||||
|
row[0]
|
||||||
|
for row in connection.execute(
|
||||||
|
ALREADY_ATTACHED_QUERY, {"parent": f"{schema}.posts"}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
for year in range(PARTITION_START_YEAR, PARTITION_END_YEAR + 1):
|
for year in range(PARTITION_START_YEAR, PARTITION_END_YEAR + 1):
|
||||||
for week in range(1, iso_weeks_in_year(year) + 1):
|
for week in range(1, iso_weeks_in_year(year) + 1):
|
||||||
@@ -69,4 +74,7 @@ def downgrade() -> None:
|
|||||||
for year in range(PARTITION_START_YEAR, PARTITION_END_YEAR + 1):
|
for year in range(PARTITION_START_YEAR, PARTITION_END_YEAR + 1):
|
||||||
for week in range(1, iso_weeks_in_year(year) + 1):
|
for week in range(1, iso_weeks_in_year(year) + 1):
|
||||||
table_name = f"posts_{year}_{week:02d}"
|
table_name = f"posts_{year}_{week:02d}"
|
||||||
op.execute(f"ALTER TABLE {schema}.posts DETACH PARTITION {schema}.{table_name}")
|
op.execute(
|
||||||
|
f"ALTER TABLE {schema}.posts "
|
||||||
|
f"DETACH PARTITION {schema}.{table_name}"
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,153 +0,0 @@
|
|||||||
"""adding congress data.
|
|
||||||
|
|
||||||
Revision ID: 83bfc8af92d8
|
|
||||||
Revises: a1b2c3d4e5f6
|
|
||||||
Create Date: 2026-03-27 10:43:02.324510
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from alembic import op
|
|
||||||
|
|
||||||
from python.orm import DataScienceDevBase
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from collections.abc import Sequence
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = "83bfc8af92d8"
|
|
||||||
down_revision: str | None = "a1b2c3d4e5f6"
|
|
||||||
branch_labels: str | Sequence[str] | None = None
|
|
||||||
depends_on: str | Sequence[str] | None = None
|
|
||||||
|
|
||||||
schema = DataScienceDevBase.schema_name
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table(
|
|
||||||
"bill",
|
|
||||||
sa.Column("congress", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("bill_type", sa.String(), nullable=False),
|
|
||||||
sa.Column("number", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("title", sa.String(), nullable=True),
|
|
||||||
sa.Column("title_short", sa.String(), nullable=True),
|
|
||||||
sa.Column("official_title", sa.String(), nullable=True),
|
|
||||||
sa.Column("status", sa.String(), nullable=True),
|
|
||||||
sa.Column("status_at", sa.Date(), nullable=True),
|
|
||||||
sa.Column("sponsor_bioguide_id", sa.String(), nullable=True),
|
|
||||||
sa.Column("subjects_top_term", sa.String(), nullable=True),
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.Column("updated", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_bill")),
|
|
||||||
sa.UniqueConstraint("congress", "bill_type", "number", name="uq_bill_congress_type_number"),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
op.create_index("ix_bill_congress", "bill", ["congress"], unique=False, schema=schema)
|
|
||||||
op.create_table(
|
|
||||||
"legislator",
|
|
||||||
sa.Column("bioguide_id", sa.Text(), nullable=False),
|
|
||||||
sa.Column("thomas_id", sa.String(), nullable=True),
|
|
||||||
sa.Column("lis_id", sa.String(), nullable=True),
|
|
||||||
sa.Column("govtrack_id", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("opensecrets_id", sa.String(), nullable=True),
|
|
||||||
sa.Column("fec_ids", sa.String(), nullable=True),
|
|
||||||
sa.Column("first_name", sa.String(), nullable=False),
|
|
||||||
sa.Column("last_name", sa.String(), nullable=False),
|
|
||||||
sa.Column("official_full_name", sa.String(), nullable=True),
|
|
||||||
sa.Column("nickname", sa.String(), nullable=True),
|
|
||||||
sa.Column("birthday", sa.Date(), nullable=True),
|
|
||||||
sa.Column("gender", sa.String(), nullable=True),
|
|
||||||
sa.Column("current_party", sa.String(), nullable=True),
|
|
||||||
sa.Column("current_state", sa.String(), nullable=True),
|
|
||||||
sa.Column("current_district", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("current_chamber", sa.String(), nullable=True),
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.Column("updated", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_legislator")),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
op.create_index(op.f("ix_legislator_bioguide_id"), "legislator", ["bioguide_id"], unique=True, schema=schema)
|
|
||||||
op.create_table(
|
|
||||||
"bill_text",
|
|
||||||
sa.Column("bill_id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("version_code", sa.String(), nullable=False),
|
|
||||||
sa.Column("version_name", sa.String(), nullable=True),
|
|
||||||
sa.Column("text_content", sa.String(), nullable=True),
|
|
||||||
sa.Column("date", sa.Date(), nullable=True),
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.Column("updated", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["bill_id"], [f"{schema}.bill.id"], name=op.f("fk_bill_text_bill_id_bill"), ondelete="CASCADE"
|
|
||||||
),
|
|
||||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_bill_text")),
|
|
||||||
sa.UniqueConstraint("bill_id", "version_code", name="uq_bill_text_bill_id_version_code"),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
op.create_table(
|
|
||||||
"vote",
|
|
||||||
sa.Column("congress", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("chamber", sa.String(), nullable=False),
|
|
||||||
sa.Column("session", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("number", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("vote_type", sa.String(), nullable=True),
|
|
||||||
sa.Column("question", sa.String(), nullable=True),
|
|
||||||
sa.Column("result", sa.String(), nullable=True),
|
|
||||||
sa.Column("result_text", sa.String(), nullable=True),
|
|
||||||
sa.Column("vote_date", sa.Date(), nullable=False),
|
|
||||||
sa.Column("yea_count", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("nay_count", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("not_voting_count", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("present_count", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("bill_id", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.Column("updated", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(["bill_id"], [f"{schema}.bill.id"], name=op.f("fk_vote_bill_id_bill")),
|
|
||||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_vote")),
|
|
||||||
sa.UniqueConstraint("congress", "chamber", "session", "number", name="uq_vote_congress_chamber_session_number"),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
op.create_index("ix_vote_congress_chamber", "vote", ["congress", "chamber"], unique=False, schema=schema)
|
|
||||||
op.create_index("ix_vote_date", "vote", ["vote_date"], unique=False, schema=schema)
|
|
||||||
op.create_table(
|
|
||||||
"vote_record",
|
|
||||||
sa.Column("vote_id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("legislator_id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("position", sa.String(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["legislator_id"],
|
|
||||||
[f"{schema}.legislator.id"],
|
|
||||||
name=op.f("fk_vote_record_legislator_id_legislator"),
|
|
||||||
ondelete="CASCADE",
|
|
||||||
),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["vote_id"], [f"{schema}.vote.id"], name=op.f("fk_vote_record_vote_id_vote"), ondelete="CASCADE"
|
|
||||||
),
|
|
||||||
sa.PrimaryKeyConstraint("vote_id", "legislator_id", name=op.f("pk_vote_record")),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table("vote_record", schema=schema)
|
|
||||||
op.drop_index("ix_vote_date", table_name="vote", schema=schema)
|
|
||||||
op.drop_index("ix_vote_congress_chamber", table_name="vote", schema=schema)
|
|
||||||
op.drop_table("vote", schema=schema)
|
|
||||||
op.drop_table("bill_text", schema=schema)
|
|
||||||
op.drop_index(op.f("ix_legislator_bioguide_id"), table_name="legislator", schema=schema)
|
|
||||||
op.drop_table("legislator", schema=schema)
|
|
||||||
op.drop_index("ix_bill_congress", table_name="bill", schema=schema)
|
|
||||||
op.drop_table("bill", schema=schema)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
"""adding LegislatorSocialMedia.
|
|
||||||
|
|
||||||
Revision ID: 5cd7eee3549d
|
|
||||||
Revises: 83bfc8af92d8
|
|
||||||
Create Date: 2026-03-29 11:53:44.224799
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from alembic import op
|
|
||||||
|
|
||||||
from python.orm import DataScienceDevBase
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from collections.abc import Sequence
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = "5cd7eee3549d"
|
|
||||||
down_revision: str | None = "83bfc8af92d8"
|
|
||||||
branch_labels: str | Sequence[str] | None = None
|
|
||||||
depends_on: str | Sequence[str] | None = None
|
|
||||||
|
|
||||||
schema = DataScienceDevBase.schema_name
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table(
|
|
||||||
"legislator_social_media",
|
|
||||||
sa.Column("legislator_id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("platform", sa.String(), nullable=False),
|
|
||||||
sa.Column("account_name", sa.String(), nullable=False),
|
|
||||||
sa.Column("url", sa.String(), nullable=True),
|
|
||||||
sa.Column("source", sa.String(), nullable=False),
|
|
||||||
sa.Column("id", sa.Integer(), nullable=False),
|
|
||||||
sa.Column("created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.Column("updated", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["legislator_id"],
|
|
||||||
[f"{schema}.legislator.id"],
|
|
||||||
name=op.f("fk_legislator_social_media_legislator_id_legislator"),
|
|
||||||
),
|
|
||||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_legislator_social_media")),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table("legislator_social_media", schema=schema)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING, Any, Literal
|
from typing import TYPE_CHECKING, Any, Literal
|
||||||
|
|||||||
@@ -1,187 +0,0 @@
|
|||||||
"""removed ds table from richie DB.
|
|
||||||
|
|
||||||
Revision ID: c8a794340928
|
|
||||||
Revises: 6b275323f435
|
|
||||||
Create Date: 2026-03-29 15:29:23.643146
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from alembic import op
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
from python.orm import RichieBase
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from collections.abc import Sequence
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = "c8a794340928"
|
|
||||||
down_revision: str | None = "6b275323f435"
|
|
||||||
branch_labels: str | Sequence[str] | None = None
|
|
||||||
depends_on: str | Sequence[str] | None = None
|
|
||||||
|
|
||||||
schema = RichieBase.schema_name
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table("vote_record", schema=schema)
|
|
||||||
op.drop_index(op.f("ix_vote_congress_chamber"), table_name="vote", schema=schema)
|
|
||||||
op.drop_index(op.f("ix_vote_date"), table_name="vote", schema=schema)
|
|
||||||
op.drop_index(op.f("ix_legislator_bioguide_id"), table_name="legislator", schema=schema)
|
|
||||||
op.drop_table("legislator", schema=schema)
|
|
||||||
op.drop_table("vote", schema=schema)
|
|
||||||
op.drop_index(op.f("ix_bill_congress"), table_name="bill", schema=schema)
|
|
||||||
op.drop_table("bill", schema=schema)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table(
|
|
||||||
"vote",
|
|
||||||
sa.Column("congress", sa.INTEGER(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("chamber", sa.VARCHAR(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("session", sa.INTEGER(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("number", sa.INTEGER(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("vote_type", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("question", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("result", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("result_text", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("vote_date", sa.DATE(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("yea_count", sa.INTEGER(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("nay_count", sa.INTEGER(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("not_voting_count", sa.INTEGER(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("present_count", sa.INTEGER(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("bill_id", sa.INTEGER(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column(
|
|
||||||
"created",
|
|
||||||
postgresql.TIMESTAMP(timezone=True),
|
|
||||||
server_default=sa.text("now()"),
|
|
||||||
autoincrement=False,
|
|
||||||
nullable=False,
|
|
||||||
),
|
|
||||||
sa.Column(
|
|
||||||
"updated",
|
|
||||||
postgresql.TIMESTAMP(timezone=True),
|
|
||||||
server_default=sa.text("now()"),
|
|
||||||
autoincrement=False,
|
|
||||||
nullable=False,
|
|
||||||
),
|
|
||||||
sa.ForeignKeyConstraint(["bill_id"], [f"{schema}.bill.id"], name=op.f("fk_vote_bill_id_bill")),
|
|
||||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_vote")),
|
|
||||||
sa.UniqueConstraint(
|
|
||||||
"congress",
|
|
||||||
"chamber",
|
|
||||||
"session",
|
|
||||||
"number",
|
|
||||||
name=op.f("uq_vote_congress_chamber_session_number"),
|
|
||||||
postgresql_include=[],
|
|
||||||
postgresql_nulls_not_distinct=False,
|
|
||||||
),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
op.create_index(op.f("ix_vote_date"), "vote", ["vote_date"], unique=False, schema=schema)
|
|
||||||
op.create_index(op.f("ix_vote_congress_chamber"), "vote", ["congress", "chamber"], unique=False, schema=schema)
|
|
||||||
op.create_table(
|
|
||||||
"vote_record",
|
|
||||||
sa.Column("vote_id", sa.INTEGER(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("legislator_id", sa.INTEGER(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("position", sa.VARCHAR(), autoincrement=False, nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["legislator_id"],
|
|
||||||
[f"{schema}.legislator.id"],
|
|
||||||
name=op.f("fk_vote_record_legislator_id_legislator"),
|
|
||||||
ondelete="CASCADE",
|
|
||||||
),
|
|
||||||
sa.ForeignKeyConstraint(
|
|
||||||
["vote_id"], [f"{schema}.vote.id"], name=op.f("fk_vote_record_vote_id_vote"), ondelete="CASCADE"
|
|
||||||
),
|
|
||||||
sa.PrimaryKeyConstraint("vote_id", "legislator_id", name=op.f("pk_vote_record")),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
op.create_table(
|
|
||||||
"legislator",
|
|
||||||
sa.Column("bioguide_id", sa.TEXT(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("thomas_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("lis_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("govtrack_id", sa.INTEGER(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("opensecrets_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("fec_ids", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("first_name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("last_name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("official_full_name", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("nickname", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("birthday", sa.DATE(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("gender", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("current_party", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("current_state", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("current_district", sa.INTEGER(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("current_chamber", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column(
|
|
||||||
"created",
|
|
||||||
postgresql.TIMESTAMP(timezone=True),
|
|
||||||
server_default=sa.text("now()"),
|
|
||||||
autoincrement=False,
|
|
||||||
nullable=False,
|
|
||||||
),
|
|
||||||
sa.Column(
|
|
||||||
"updated",
|
|
||||||
postgresql.TIMESTAMP(timezone=True),
|
|
||||||
server_default=sa.text("now()"),
|
|
||||||
autoincrement=False,
|
|
||||||
nullable=False,
|
|
||||||
),
|
|
||||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_legislator")),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
op.create_index(op.f("ix_legislator_bioguide_id"), "legislator", ["bioguide_id"], unique=True, schema=schema)
|
|
||||||
op.create_table(
|
|
||||||
"bill",
|
|
||||||
sa.Column("congress", sa.INTEGER(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("bill_type", sa.VARCHAR(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("number", sa.INTEGER(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column("title", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("title_short", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("official_title", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("status", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("status_at", sa.DATE(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("sponsor_bioguide_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("subjects_top_term", sa.VARCHAR(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column(
|
|
||||||
"created",
|
|
||||||
postgresql.TIMESTAMP(timezone=True),
|
|
||||||
server_default=sa.text("now()"),
|
|
||||||
autoincrement=False,
|
|
||||||
nullable=False,
|
|
||||||
),
|
|
||||||
sa.Column(
|
|
||||||
"updated",
|
|
||||||
postgresql.TIMESTAMP(timezone=True),
|
|
||||||
server_default=sa.text("now()"),
|
|
||||||
autoincrement=False,
|
|
||||||
nullable=False,
|
|
||||||
),
|
|
||||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_bill")),
|
|
||||||
sa.UniqueConstraint(
|
|
||||||
"congress",
|
|
||||||
"bill_type",
|
|
||||||
"number",
|
|
||||||
name=op.f("uq_bill_congress_type_number"),
|
|
||||||
postgresql_include=[],
|
|
||||||
postgresql_nulls_not_distinct=False,
|
|
||||||
),
|
|
||||||
schema=schema,
|
|
||||||
)
|
|
||||||
op.create_index(op.f("ix_bill_congress"), "bill", ["congress"], unique=False, schema=schema)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
104
python/data_science/bluesky_ids.py
Normal file
104
python/data_science/bluesky_ids.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
"""Utilities for converting Bluesky identifiers to numeric database IDs.
|
||||||
|
|
||||||
|
Handles DID-to-user_id hashing, TID-to-post_id decoding, and AT-URI parsing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
TID_CHARSET = "234567abcdefghijklmnopqrstuvwxyz"
|
||||||
|
_TID_LENGTH = 13
|
||||||
|
_BIGINT_MASK = 0x7FFFFFFFFFFFFFFF
|
||||||
|
_AT_URI_SEGMENT_COUNT = 3
|
||||||
|
|
||||||
|
|
||||||
|
def did_to_user_id(did: str) -> int:
|
||||||
|
"""Convert a DID string to a deterministic 63-bit integer for user_id.
|
||||||
|
|
||||||
|
Uses SHA-256, truncated to 63 bits (positive signed BigInteger range).
|
||||||
|
Collision probability is negligible at Bluesky's scale (~tens of millions of users).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
did: A Bluesky DID string, e.g. "did:plc:abc123".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A positive 63-bit integer suitable for BigInteger storage.
|
||||||
|
"""
|
||||||
|
digest = hashlib.sha256(did.encode()).digest()
|
||||||
|
return int.from_bytes(digest[:8], "big") & _BIGINT_MASK
|
||||||
|
|
||||||
|
|
||||||
|
def tid_to_integer(tid: str) -> int:
|
||||||
|
"""Decode a Bluesky TID (base32-sortbase) into a 64-bit integer for post_id.
|
||||||
|
|
||||||
|
TIDs are 13-character, base32-sortbase encoded identifiers that encode a
|
||||||
|
microsecond timestamp plus a clock ID. They are globally unique by construction.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tid: A 13-character TID string, e.g. "3abc2defghijk".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A positive integer suitable for BigInteger storage.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the TID is malformed (wrong length or invalid characters).
|
||||||
|
"""
|
||||||
|
if len(tid) != _TID_LENGTH:
|
||||||
|
message = f"TID must be {_TID_LENGTH} characters, got {len(tid)}: {tid!r}"
|
||||||
|
raise ValueError(message)
|
||||||
|
|
||||||
|
result = 0
|
||||||
|
for char in tid:
|
||||||
|
index = TID_CHARSET.find(char)
|
||||||
|
if index == -1:
|
||||||
|
message = f"Invalid character {char!r} in TID {tid!r}"
|
||||||
|
raise ValueError(message)
|
||||||
|
result = result * 32 + index
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def parse_at_uri(uri: str) -> tuple[str, str, str]:
|
||||||
|
"""Parse an AT-URI into its components.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
uri: An AT-URI string, e.g. "at://did:plc:abc123/app.bsky.feed.post/3abc2defghijk".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A tuple of (did, collection, rkey).
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the URI doesn't have the expected format.
|
||||||
|
"""
|
||||||
|
stripped = uri.removeprefix("at://")
|
||||||
|
parts = stripped.split("/", maxsplit=2)
|
||||||
|
if len(parts) != _AT_URI_SEGMENT_COUNT:
|
||||||
|
message = f"Expected {_AT_URI_SEGMENT_COUNT} path segments in AT-URI, got {len(parts)}: {uri!r}"
|
||||||
|
raise ValueError(message)
|
||||||
|
return parts[0], parts[1], parts[2]
|
||||||
|
|
||||||
|
|
||||||
|
def post_id_from_uri(uri: str) -> int:
|
||||||
|
"""Extract and decode the post_id (TID) from an AT-URI.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
uri: An AT-URI pointing to a post.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The post_id as an integer.
|
||||||
|
"""
|
||||||
|
_did, _collection, rkey = parse_at_uri(uri)
|
||||||
|
return tid_to_integer(rkey)
|
||||||
|
|
||||||
|
|
||||||
|
def user_id_from_uri(uri: str) -> int:
|
||||||
|
"""Extract and hash the user_id (DID) from an AT-URI.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
uri: An AT-URI pointing to a post.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The user_id as an integer.
|
||||||
|
"""
|
||||||
|
did, _collection, _rkey = parse_at_uri(uri)
|
||||||
|
return did_to_user_id(did)
|
||||||
143
python/data_science/bluesky_transform.py
Normal file
143
python/data_science/bluesky_transform.py
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
"""Transform Bluesky Jetstream messages into rows matching the Posts table schema."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from python.data_science.bluesky_ids import (
|
||||||
|
did_to_user_id,
|
||||||
|
post_id_from_uri,
|
||||||
|
tid_to_integer,
|
||||||
|
user_id_from_uri,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
INSTANCE = "bsky"
|
||||||
|
POST_COLLECTION = "app.bsky.feed.post"
|
||||||
|
EMBED_RECORD_TYPE = "app.bsky.embed.record"
|
||||||
|
EMBED_RECORD_WITH_MEDIA_TYPE = "app.bsky.embed.recordWithMedia"
|
||||||
|
|
||||||
|
|
||||||
|
def transform_jetstream_post(message: dict) -> dict:
|
||||||
|
"""Transform a Jetstream commit message into a dict matching Posts table columns.
|
||||||
|
|
||||||
|
Expects a Jetstream message with kind=commit, operation=create,
|
||||||
|
collection=app.bsky.feed.post.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
message: The full Jetstream JSON message.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A dict with keys matching the Posts table columns.
|
||||||
|
"""
|
||||||
|
did = message["did"]
|
||||||
|
commit = message["commit"]
|
||||||
|
record = commit["record"]
|
||||||
|
|
||||||
|
row: dict = {
|
||||||
|
"post_id": tid_to_integer(commit["rkey"]),
|
||||||
|
"user_id": did_to_user_id(did),
|
||||||
|
"instance": INSTANCE,
|
||||||
|
"date": datetime.fromisoformat(record["createdAt"]),
|
||||||
|
"text": record.get("text", ""),
|
||||||
|
"langs": _extract_langs(record),
|
||||||
|
"like_count": 0,
|
||||||
|
"reply_count": 0,
|
||||||
|
"repost_count": 0,
|
||||||
|
"reply_to": None,
|
||||||
|
"replied_author": None,
|
||||||
|
"thread_root": None,
|
||||||
|
"thread_root_author": None,
|
||||||
|
"repost_from": None,
|
||||||
|
"reposted_author": None,
|
||||||
|
"quotes": None,
|
||||||
|
"quoted_author": None,
|
||||||
|
"labels": _extract_labels(record),
|
||||||
|
"sent_label": None,
|
||||||
|
"sent_score": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
_extract_reply_refs(record, row)
|
||||||
|
_extract_quote_refs(record, row)
|
||||||
|
|
||||||
|
return row
|
||||||
|
|
||||||
|
|
||||||
|
def is_post_create(message: dict) -> bool:
|
||||||
|
"""Check if a Jetstream message is a post creation event.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
message: The full Jetstream JSON message.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if this is a create commit for app.bsky.feed.post.
|
||||||
|
"""
|
||||||
|
if message.get("kind") != "commit":
|
||||||
|
return False
|
||||||
|
commit = message.get("commit", {})
|
||||||
|
return commit.get("operation") == "create" and commit.get("collection") == POST_COLLECTION
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_langs(record: dict) -> str | None:
|
||||||
|
"""Extract langs array as a JSON string, or None if absent."""
|
||||||
|
langs = record.get("langs")
|
||||||
|
if langs is None:
|
||||||
|
return None
|
||||||
|
return json.dumps(langs)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_labels(record: dict) -> str | None:
|
||||||
|
"""Extract self-labels as a JSON string, or None if absent."""
|
||||||
|
labels_obj = record.get("labels")
|
||||||
|
if labels_obj is None:
|
||||||
|
return None
|
||||||
|
values = labels_obj.get("values", [])
|
||||||
|
if not values:
|
||||||
|
return None
|
||||||
|
label_strings = [label.get("val", "") for label in values]
|
||||||
|
return json.dumps(label_strings)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_reply_refs(record: dict, row: dict) -> None:
|
||||||
|
"""Populate reply_to, replied_author, thread_root, thread_root_author from record.reply."""
|
||||||
|
reply = record.get("reply")
|
||||||
|
if reply is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
parent = reply.get("parent", {})
|
||||||
|
parent_uri = parent.get("uri")
|
||||||
|
if parent_uri:
|
||||||
|
row["reply_to"] = post_id_from_uri(parent_uri)
|
||||||
|
row["replied_author"] = user_id_from_uri(parent_uri)
|
||||||
|
|
||||||
|
root = reply.get("root", {})
|
||||||
|
root_uri = root.get("uri")
|
||||||
|
if root_uri:
|
||||||
|
row["thread_root"] = post_id_from_uri(root_uri)
|
||||||
|
row["thread_root_author"] = user_id_from_uri(root_uri)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_quote_refs(record: dict, row: dict) -> None:
|
||||||
|
"""Populate quotes and quoted_author from embed record references."""
|
||||||
|
embed = record.get("embed")
|
||||||
|
if embed is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
embed_type = embed.get("$type", "")
|
||||||
|
|
||||||
|
if embed_type == EMBED_RECORD_TYPE:
|
||||||
|
_set_quote_from_record(embed.get("record", {}), row)
|
||||||
|
elif embed_type == EMBED_RECORD_WITH_MEDIA_TYPE:
|
||||||
|
inner_record = embed.get("record", {}).get("record", {})
|
||||||
|
_set_quote_from_record(inner_record, row)
|
||||||
|
|
||||||
|
|
||||||
|
def _set_quote_from_record(record_ref: dict, row: dict) -> None:
|
||||||
|
"""Set quotes and quoted_author from a record reference object."""
|
||||||
|
uri = record_ref.get("uri")
|
||||||
|
if uri and POST_COLLECTION in uri:
|
||||||
|
row["quotes"] = post_id_from_uri(uri)
|
||||||
|
row["quoted_author"] = user_id_from_uri(uri)
|
||||||
203
python/data_science/firehose_consumer.py
Normal file
203
python/data_science/firehose_consumer.py
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
"""Kafka consumer that ingests Bluesky posts into the partitioned Posts table.
|
||||||
|
|
||||||
|
Consumes Jetstream messages from Kafka, transforms them into Posts rows,
|
||||||
|
and batch-inserts them into PostgreSQL with manual offset commits.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
firehose-consumer
|
||||||
|
firehose-consumer --kafka-servers kafka:9092 --batch-size 500
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import signal
|
||||||
|
from os import getenv
|
||||||
|
from threading import Event
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
import typer
|
||||||
|
from confluent_kafka import Consumer, KafkaError, KafkaException
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from python.data_science.bluesky_transform import is_post_create, transform_jetstream_post
|
||||||
|
from python.data_science.ingest_posts import ingest_batch
|
||||||
|
from python.orm.common import get_postgres_engine
|
||||||
|
from python.orm.data_science_dev.posts.failed_ingestion import FailedIngestion
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DEFAULT_TOPIC = "bluesky.firehose.posts"
|
||||||
|
DEFAULT_KAFKA_SERVERS = "localhost:9092"
|
||||||
|
DEFAULT_GROUP_ID = "bluesky-posts-ingestor"
|
||||||
|
DEFAULT_BATCH_SIZE = 500
|
||||||
|
POLL_TIMEOUT_SECONDS = 5.0
|
||||||
|
|
||||||
|
shutdown_event = Event()
|
||||||
|
|
||||||
|
app = typer.Typer(help="Consume Bluesky posts from Kafka and ingest into PostgreSQL.")
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def main(
|
||||||
|
kafka_servers: Annotated[str, typer.Option(help="Kafka bootstrap servers")] = "",
|
||||||
|
topic: Annotated[str, typer.Option(help="Kafka topic to consume from")] = "",
|
||||||
|
group_id: Annotated[str, typer.Option(help="Kafka consumer group ID")] = "",
|
||||||
|
batch_size: Annotated[int, typer.Option(help="Messages per DB insert batch")] = DEFAULT_BATCH_SIZE,
|
||||||
|
) -> None:
|
||||||
|
"""Consume Bluesky posts from Kafka and ingest into the partitioned posts table."""
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s %(levelname)s %(message)s",
|
||||||
|
datefmt="%H:%M:%S",
|
||||||
|
)
|
||||||
|
|
||||||
|
servers = kafka_servers or getenv("KAFKA_BOOTSTRAP_SERVERS", DEFAULT_KAFKA_SERVERS)
|
||||||
|
topic_name = topic or getenv("BLUESKY_FIREHOSE_TOPIC", DEFAULT_TOPIC)
|
||||||
|
group = group_id or getenv("KAFKA_GROUP_ID", DEFAULT_GROUP_ID)
|
||||||
|
|
||||||
|
signal.signal(signal.SIGTERM, _handle_shutdown)
|
||||||
|
signal.signal(signal.SIGINT, _handle_shutdown)
|
||||||
|
|
||||||
|
consumer = _create_consumer(servers, group)
|
||||||
|
consumer.subscribe([topic_name])
|
||||||
|
|
||||||
|
engine = get_postgres_engine(name="DATA_SCIENCE_DEV")
|
||||||
|
total_inserted = 0
|
||||||
|
|
||||||
|
logger.info("Starting firehose consumer: topic=%s group=%s batch_size=%d", topic_name, group, batch_size)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with Session(engine) as session:
|
||||||
|
while not shutdown_event.is_set():
|
||||||
|
inserted = _consume_batch(consumer, session, batch_size)
|
||||||
|
total_inserted += inserted
|
||||||
|
if inserted > 0:
|
||||||
|
logger.info("Batch inserted %d rows (total: %d)", inserted, total_inserted)
|
||||||
|
except KafkaException:
|
||||||
|
logger.exception("Fatal Kafka error")
|
||||||
|
finally:
|
||||||
|
logger.info("Closing consumer (total inserted: %d)", total_inserted)
|
||||||
|
consumer.close()
|
||||||
|
|
||||||
|
|
||||||
|
def _consume_batch(consumer: Consumer, session: Session, batch_size: int) -> int:
|
||||||
|
"""Poll a batch of messages, transform, and insert into the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
consumer: The Kafka consumer instance.
|
||||||
|
session: SQLAlchemy database session.
|
||||||
|
batch_size: Maximum number of messages to consume per batch.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of rows successfully inserted.
|
||||||
|
"""
|
||||||
|
messages = consumer.consume(num_messages=batch_size, timeout=POLL_TIMEOUT_SECONDS)
|
||||||
|
if not messages:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
rows: list[dict] = []
|
||||||
|
for message in messages:
|
||||||
|
error = message.error()
|
||||||
|
if error is not None:
|
||||||
|
if error.code() == KafkaError._PARTITION_EOF: # noqa: SLF001 — confluent-kafka exposes this as a pseudo-private constant; no public alternative exists
|
||||||
|
continue
|
||||||
|
logger.error("Consumer error: %s", error)
|
||||||
|
continue
|
||||||
|
|
||||||
|
row = _safe_transform(message.value(), session)
|
||||||
|
if row is not None:
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
consumer.commit(asynchronous=False)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
inserted = ingest_batch(session, rows)
|
||||||
|
consumer.commit(asynchronous=False)
|
||||||
|
return inserted
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_transform(raw_value: bytes | None, session: Session) -> dict | None:
|
||||||
|
"""Transform a Kafka message value into a Posts row, logging failures.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
raw_value: Raw message bytes from Kafka.
|
||||||
|
session: SQLAlchemy session for logging failures.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A transformed row dict, or None if transformation failed.
|
||||||
|
"""
|
||||||
|
if raw_value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
message = json.loads(raw_value)
|
||||||
|
except (json.JSONDecodeError, UnicodeDecodeError):
|
||||||
|
logger.exception("Failed to decode Kafka message")
|
||||||
|
_log_failed_ingestion(session, raw_value, "JSON decode error")
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not is_post_create(message):
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return transform_jetstream_post(message)
|
||||||
|
except (KeyError, ValueError, TypeError):
|
||||||
|
logger.exception("Failed to transform Jetstream message")
|
||||||
|
_log_failed_ingestion(session, raw_value, "Transform error")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _log_failed_ingestion(session: Session, raw_value: bytes, error: str) -> None:
|
||||||
|
"""Log a failed ingestion to the FailedIngestion table.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy session.
|
||||||
|
raw_value: The raw message bytes.
|
||||||
|
error: Description of the error.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
session.add(
|
||||||
|
FailedIngestion(
|
||||||
|
raw_line=raw_value.decode(errors="replace")[:10000],
|
||||||
|
error=error,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
session.commit()
|
||||||
|
except Exception:
|
||||||
|
session.rollback()
|
||||||
|
logger.exception("Failed to log ingestion failure")
|
||||||
|
|
||||||
|
|
||||||
|
def _create_consumer(servers: str, group: str) -> Consumer:
|
||||||
|
"""Create a configured Kafka consumer.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
servers: Kafka bootstrap servers string.
|
||||||
|
group: Consumer group ID.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A configured confluent_kafka.Consumer.
|
||||||
|
"""
|
||||||
|
config = {
|
||||||
|
"bootstrap.servers": servers,
|
||||||
|
"group.id": group,
|
||||||
|
"auto.offset.reset": "earliest",
|
||||||
|
"enable.auto.commit": False,
|
||||||
|
"max.poll.interval.ms": 300000,
|
||||||
|
"fetch.min.bytes": 1024,
|
||||||
|
"session.timeout.ms": 30000,
|
||||||
|
}
|
||||||
|
return Consumer(config)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_shutdown(_signum: int, _frame: object) -> None:
|
||||||
|
"""Signal handler to trigger graceful shutdown."""
|
||||||
|
logger.info("Shutdown signal received")
|
||||||
|
shutdown_event.set()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app()
|
||||||
230
python/data_science/firehose_producer.py
Normal file
230
python/data_science/firehose_producer.py
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
"""Bluesky Jetstream firehose to Kafka producer.
|
||||||
|
|
||||||
|
Connects to the Bluesky Jetstream WebSocket API with zstd compression,
|
||||||
|
filters for post creation events, and produces them to a Kafka topic.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
firehose-producer
|
||||||
|
firehose-producer --kafka-servers kafka:9092 --topic bluesky.firehose.posts
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import signal
|
||||||
|
from os import getenv
|
||||||
|
from threading import Event
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
import typer
|
||||||
|
from compression import zstd
|
||||||
|
from confluent_kafka import KafkaError, KafkaException, Producer
|
||||||
|
from websockets.exceptions import ConnectionClosed
|
||||||
|
from websockets.sync.client import connect
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
JETSTREAM_URL = "wss://jetstream2.us-east.bsky.network/subscribe"
|
||||||
|
DEFAULT_TOPIC = "bluesky.firehose.posts"
|
||||||
|
DEFAULT_KAFKA_SERVERS = "localhost:9092"
|
||||||
|
POLL_INTERVAL = 100
|
||||||
|
POST_COLLECTION = "app.bsky.feed.post"
|
||||||
|
|
||||||
|
shutdown_event = Event()
|
||||||
|
|
||||||
|
app = typer.Typer(help="Stream Bluesky firehose posts into Kafka.")
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def main(
|
||||||
|
kafka_servers: Annotated[str, typer.Option(help="Kafka bootstrap servers")] = "",
|
||||||
|
topic: Annotated[str, typer.Option(help="Kafka topic to produce to")] = "",
|
||||||
|
collections: Annotated[str, typer.Option(help="Comma-separated collections to subscribe to")] = POST_COLLECTION,
|
||||||
|
) -> None:
|
||||||
|
"""Connect to Bluesky Jetstream and produce post events to Kafka."""
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s %(levelname)s %(message)s",
|
||||||
|
datefmt="%H:%M:%S",
|
||||||
|
)
|
||||||
|
|
||||||
|
servers = kafka_servers or getenv("KAFKA_BOOTSTRAP_SERVERS", DEFAULT_KAFKA_SERVERS)
|
||||||
|
topic_name = topic or getenv("BLUESKY_FIREHOSE_TOPIC", DEFAULT_TOPIC)
|
||||||
|
|
||||||
|
signal.signal(signal.SIGTERM, _handle_shutdown)
|
||||||
|
signal.signal(signal.SIGINT, _handle_shutdown)
|
||||||
|
|
||||||
|
producer = _create_producer(servers)
|
||||||
|
cursor: int | None = None
|
||||||
|
|
||||||
|
logger.info("Starting firehose producer → %s on %s", topic_name, servers)
|
||||||
|
|
||||||
|
while not shutdown_event.is_set():
|
||||||
|
try:
|
||||||
|
cursor = _stream_loop(producer, topic_name, collections, cursor)
|
||||||
|
except (ConnectionClosed, OSError):
|
||||||
|
logger.exception("WebSocket disconnected, reconnecting")
|
||||||
|
except KafkaException:
|
||||||
|
logger.exception("Kafka error, reconnecting")
|
||||||
|
|
||||||
|
if not shutdown_event.is_set():
|
||||||
|
logger.info("Reconnecting in 5 seconds (cursor=%s)", cursor)
|
||||||
|
shutdown_event.wait(timeout=5)
|
||||||
|
|
||||||
|
logger.info("Shutting down, flushing producer")
|
||||||
|
producer.flush(timeout=30)
|
||||||
|
logger.info("Producer shutdown complete")
|
||||||
|
|
||||||
|
|
||||||
|
def _stream_loop(
|
||||||
|
producer: Producer,
|
||||||
|
topic: str,
|
||||||
|
collections: str,
|
||||||
|
cursor: int | None,
|
||||||
|
) -> int | None:
|
||||||
|
"""Connect to Jetstream and stream messages to Kafka until disconnected.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
producer: The Kafka producer instance.
|
||||||
|
topic: Kafka topic name.
|
||||||
|
collections: Comma-separated AT Protocol collections to subscribe to.
|
||||||
|
cursor: Optional microsecond timestamp to resume from.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The last processed time_us cursor value.
|
||||||
|
"""
|
||||||
|
url = _build_jetstream_url(collections, cursor)
|
||||||
|
logger.info("Connecting to %s", url)
|
||||||
|
|
||||||
|
message_count = 0
|
||||||
|
last_cursor = cursor
|
||||||
|
|
||||||
|
with connect(url, additional_headers={"Accept-Encoding": "zstd"}) as websocket:
|
||||||
|
logger.info("Connected to Jetstream")
|
||||||
|
|
||||||
|
while not shutdown_event.is_set():
|
||||||
|
try:
|
||||||
|
raw_frame = websocket.recv(timeout=10)
|
||||||
|
except TimeoutError:
|
||||||
|
producer.poll(0)
|
||||||
|
continue
|
||||||
|
|
||||||
|
text = _decode_frame(raw_frame)
|
||||||
|
message = json.loads(text)
|
||||||
|
|
||||||
|
time_us = message.get("time_us")
|
||||||
|
if time_us is not None:
|
||||||
|
last_cursor = time_us
|
||||||
|
|
||||||
|
if not _is_post_create(message):
|
||||||
|
continue
|
||||||
|
|
||||||
|
did = message.get("did", "")
|
||||||
|
|
||||||
|
try:
|
||||||
|
producer.produce(
|
||||||
|
topic,
|
||||||
|
key=did.encode(),
|
||||||
|
value=text.encode() if isinstance(text, str) else text,
|
||||||
|
callback=_delivery_callback,
|
||||||
|
)
|
||||||
|
except BufferError:
|
||||||
|
logger.warning("Producer buffer full, flushing")
|
||||||
|
producer.flush(timeout=10)
|
||||||
|
producer.produce(
|
||||||
|
topic,
|
||||||
|
key=did.encode(),
|
||||||
|
value=text.encode() if isinstance(text, str) else text,
|
||||||
|
callback=_delivery_callback,
|
||||||
|
)
|
||||||
|
|
||||||
|
message_count += 1
|
||||||
|
if message_count % POLL_INTERVAL == 0:
|
||||||
|
producer.poll(0)
|
||||||
|
|
||||||
|
if message_count % 10000 == 0:
|
||||||
|
logger.info("Produced %d messages (cursor=%s)", message_count, last_cursor)
|
||||||
|
|
||||||
|
return last_cursor
|
||||||
|
|
||||||
|
|
||||||
|
def _build_jetstream_url(collections: str, cursor: int | None) -> str:
|
||||||
|
"""Build the Jetstream WebSocket URL with query parameters.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
collections: Comma-separated collection names.
|
||||||
|
cursor: Optional microsecond timestamp for resumption.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The full WebSocket URL.
|
||||||
|
"""
|
||||||
|
params = ["compress=true"]
|
||||||
|
for raw_collection in collections.split(","):
|
||||||
|
cleaned = raw_collection.strip()
|
||||||
|
if cleaned:
|
||||||
|
params.append(f"wantedCollections={cleaned}")
|
||||||
|
if cursor is not None:
|
||||||
|
params.append(f"cursor={cursor}")
|
||||||
|
return f"{JETSTREAM_URL}?{'&'.join(params)}"
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_frame(frame: str | bytes) -> str:
|
||||||
|
"""Decode a WebSocket frame, decompressing zstd if binary.
|
||||||
|
|
||||||
|
Jetstream with compress=true sends zstd-compressed binary frames.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
frame: Raw WebSocket frame data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The decoded JSON string.
|
||||||
|
"""
|
||||||
|
if isinstance(frame, bytes):
|
||||||
|
return zstd.decompress(frame).decode()
|
||||||
|
return frame
|
||||||
|
|
||||||
|
|
||||||
|
def _is_post_create(message: dict) -> bool:
|
||||||
|
"""Check if a Jetstream message is a post creation commit."""
|
||||||
|
if message.get("kind") != "commit":
|
||||||
|
return False
|
||||||
|
commit = message.get("commit", {})
|
||||||
|
return commit.get("operation") == "create" and commit.get("collection") == POST_COLLECTION
|
||||||
|
|
||||||
|
|
||||||
|
def _create_producer(servers: str) -> Producer:
|
||||||
|
"""Create a configured Kafka producer.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
servers: Kafka bootstrap servers string.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A configured confluent_kafka.Producer.
|
||||||
|
"""
|
||||||
|
config = {
|
||||||
|
"bootstrap.servers": servers,
|
||||||
|
"linger.ms": 50,
|
||||||
|
"batch.size": 65536,
|
||||||
|
"compression.type": "zstd",
|
||||||
|
"acks": "all",
|
||||||
|
"retries": 5,
|
||||||
|
"retry.backoff.ms": 500,
|
||||||
|
}
|
||||||
|
return Producer(config)
|
||||||
|
|
||||||
|
|
||||||
|
def _delivery_callback(error: KafkaError | None, _message: object) -> None:
|
||||||
|
"""Log delivery failures from the Kafka producer."""
|
||||||
|
if error is not None:
|
||||||
|
logger.error("Kafka delivery failed: %s", error)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_shutdown(_signum: int, _frame: object) -> None:
|
||||||
|
"""Signal handler to trigger graceful shutdown."""
|
||||||
|
logger.info("Shutdown signal received")
|
||||||
|
shutdown_event.set()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app()
|
||||||
@@ -1,613 +0,0 @@
|
|||||||
"""Ingestion pipeline for loading congress data from unitedstates/congress JSON files.
|
|
||||||
|
|
||||||
Loads legislators, bills, votes, vote records, and bill text into the data_science_dev database.
|
|
||||||
Expects the parent directory to contain congress-tracker/ and congress-legislators/ as siblings.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
ingest-congress /path/to/parent/
|
|
||||||
ingest-congress /path/to/parent/ --congress 118
|
|
||||||
ingest-congress /path/to/parent/ --congress 118 --only bills
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from pathlib import Path # noqa: TC003 needed at runtime for typer CLI argument
|
|
||||||
from typing import TYPE_CHECKING, Annotated
|
|
||||||
|
|
||||||
import orjson
|
|
||||||
import typer
|
|
||||||
import yaml
|
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from python.common import configure_logger
|
|
||||||
from python.orm.common import get_postgres_engine
|
|
||||||
from python.orm.data_science_dev.congress import Bill, BillText, Legislator, LegislatorSocialMedia, Vote, VoteRecord
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from collections.abc import Iterator
|
|
||||||
|
|
||||||
from sqlalchemy.engine import Engine
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
BATCH_SIZE = 10_000
|
|
||||||
|
|
||||||
app = typer.Typer(help="Ingest unitedstates/congress data into data_science_dev.")
|
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
|
||||||
def main(
|
|
||||||
parent_dir: Annotated[
|
|
||||||
Path,
|
|
||||||
typer.Argument(help="Parent directory containing congress-tracker/ and congress-legislators/"),
|
|
||||||
],
|
|
||||||
congress: Annotated[int | None, typer.Option(help="Only ingest a specific congress number")] = None,
|
|
||||||
only: Annotated[
|
|
||||||
str | None,
|
|
||||||
typer.Option(help="Only run a specific step: legislators, social-media, bills, votes, bill-text"),
|
|
||||||
] = None,
|
|
||||||
) -> None:
|
|
||||||
"""Ingest congress data from unitedstates/congress JSON files."""
|
|
||||||
configure_logger(level="INFO")
|
|
||||||
|
|
||||||
data_dir = parent_dir / "congress-tracker/congress/data/"
|
|
||||||
legislators_dir = parent_dir / "congress-legislators"
|
|
||||||
|
|
||||||
if not data_dir.is_dir():
|
|
||||||
typer.echo(f"Expected congress-tracker/ directory: {data_dir}", err=True)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
if not legislators_dir.is_dir():
|
|
||||||
typer.echo(f"Expected congress-legislators/ directory: {legislators_dir}", err=True)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
engine = get_postgres_engine(name="DATA_SCIENCE_DEV")
|
|
||||||
|
|
||||||
congress_dirs = _resolve_congress_dirs(data_dir, congress)
|
|
||||||
if not congress_dirs:
|
|
||||||
typer.echo("No congress directories found.", err=True)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
logger.info("Found %d congress directories to process", len(congress_dirs))
|
|
||||||
|
|
||||||
steps: dict[str, tuple] = {
|
|
||||||
"legislators": (ingest_legislators, (engine, legislators_dir)),
|
|
||||||
"legislators-social-media": (ingest_social_media, (engine, legislators_dir)),
|
|
||||||
"bills": (ingest_bills, (engine, congress_dirs)),
|
|
||||||
"votes": (ingest_votes, (engine, congress_dirs)),
|
|
||||||
"bill-text": (ingest_bill_text, (engine, congress_dirs)),
|
|
||||||
}
|
|
||||||
|
|
||||||
if only:
|
|
||||||
if only not in steps:
|
|
||||||
typer.echo(f"Unknown step: {only}. Choose from: {', '.join(steps)}", err=True)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
steps = {only: steps[only]}
|
|
||||||
|
|
||||||
for step_name, (step_func, step_args) in steps.items():
|
|
||||||
logger.info("=== Starting step: %s ===", step_name)
|
|
||||||
step_func(*step_args)
|
|
||||||
logger.info("=== Finished step: %s ===", step_name)
|
|
||||||
|
|
||||||
logger.info("ingest-congress done")
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_congress_dirs(data_dir: Path, congress: int | None) -> list[Path]:
|
|
||||||
"""Find congress number directories under data_dir."""
|
|
||||||
if congress is not None:
|
|
||||||
target = data_dir / str(congress)
|
|
||||||
return [target] if target.is_dir() else []
|
|
||||||
return sorted(path for path in data_dir.iterdir() if path.is_dir() and path.name.isdigit())
|
|
||||||
|
|
||||||
|
|
||||||
def _flush_batch(session: Session, batch: list[object], label: str) -> int:
|
|
||||||
"""Add a batch of ORM objects to the session and commit. Returns count added."""
|
|
||||||
if not batch:
|
|
||||||
return 0
|
|
||||||
session.add_all(batch)
|
|
||||||
session.commit()
|
|
||||||
count = len(batch)
|
|
||||||
logger.info("Committed %d %s", count, label)
|
|
||||||
batch.clear()
|
|
||||||
return count
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Legislators — loaded from congress-legislators YAML files
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def ingest_legislators(engine: Engine, legislators_dir: Path) -> None:
|
|
||||||
"""Load legislators from congress-legislators YAML files."""
|
|
||||||
legislators_data = _load_legislators_yaml(legislators_dir)
|
|
||||||
logger.info("Loaded %d legislators from YAML files", len(legislators_data))
|
|
||||||
|
|
||||||
with Session(engine) as session:
|
|
||||||
existing_legislators = {
|
|
||||||
legislator.bioguide_id: legislator for legislator in session.scalars(select(Legislator)).all()
|
|
||||||
}
|
|
||||||
logger.info("Found %d existing legislators in DB", len(existing_legislators))
|
|
||||||
|
|
||||||
total_inserted = 0
|
|
||||||
total_updated = 0
|
|
||||||
for entry in legislators_data:
|
|
||||||
bioguide_id = entry.get("id", {}).get("bioguide")
|
|
||||||
if not bioguide_id:
|
|
||||||
continue
|
|
||||||
|
|
||||||
fields = _parse_legislator(entry)
|
|
||||||
if existing := existing_legislators.get(bioguide_id):
|
|
||||||
changed = False
|
|
||||||
for field, value in fields.items():
|
|
||||||
if value is not None and getattr(existing, field) != value:
|
|
||||||
setattr(existing, field, value)
|
|
||||||
changed = True
|
|
||||||
if changed:
|
|
||||||
total_updated += 1
|
|
||||||
else:
|
|
||||||
session.add(Legislator(bioguide_id=bioguide_id, **fields))
|
|
||||||
total_inserted += 1
|
|
||||||
|
|
||||||
session.commit()
|
|
||||||
logger.info("Inserted %d new legislators, updated %d existing", total_inserted, total_updated)
|
|
||||||
|
|
||||||
|
|
||||||
def _load_legislators_yaml(legislators_dir: Path) -> list[dict]:
|
|
||||||
"""Load and combine legislators-current.yaml and legislators-historical.yaml."""
|
|
||||||
legislators: list[dict] = []
|
|
||||||
for filename in ("legislators-current.yaml", "legislators-historical.yaml"):
|
|
||||||
path = legislators_dir / filename
|
|
||||||
if not path.exists():
|
|
||||||
logger.warning("Legislators file not found: %s", path)
|
|
||||||
continue
|
|
||||||
with path.open() as file:
|
|
||||||
data = yaml.safe_load(file)
|
|
||||||
if isinstance(data, list):
|
|
||||||
legislators.extend(data)
|
|
||||||
return legislators
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_legislator(entry: dict) -> dict:
|
|
||||||
"""Extract Legislator fields from a congress-legislators YAML entry."""
|
|
||||||
ids = entry.get("id", {})
|
|
||||||
name = entry.get("name", {})
|
|
||||||
bio = entry.get("bio", {})
|
|
||||||
terms = entry.get("terms", [])
|
|
||||||
latest_term = terms[-1] if terms else {}
|
|
||||||
|
|
||||||
fec_ids = ids.get("fec")
|
|
||||||
fec_ids_joined = ",".join(fec_ids) if isinstance(fec_ids, list) else fec_ids
|
|
||||||
|
|
||||||
chamber = latest_term.get("type")
|
|
||||||
chamber_normalized = {"rep": "House", "sen": "Senate"}.get(chamber, chamber)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"thomas_id": ids.get("thomas"),
|
|
||||||
"lis_id": ids.get("lis"),
|
|
||||||
"govtrack_id": ids.get("govtrack"),
|
|
||||||
"opensecrets_id": ids.get("opensecrets"),
|
|
||||||
"fec_ids": fec_ids_joined,
|
|
||||||
"first_name": name.get("first"),
|
|
||||||
"last_name": name.get("last"),
|
|
||||||
"official_full_name": name.get("official_full"),
|
|
||||||
"nickname": name.get("nickname"),
|
|
||||||
"birthday": bio.get("birthday"),
|
|
||||||
"gender": bio.get("gender"),
|
|
||||||
"current_party": latest_term.get("party"),
|
|
||||||
"current_state": latest_term.get("state"),
|
|
||||||
"current_district": latest_term.get("district"),
|
|
||||||
"current_chamber": chamber_normalized,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Social Media — loaded from legislators-social-media.yaml
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
SOCIAL_MEDIA_PLATFORMS = {
|
|
||||||
"twitter": "https://twitter.com/{account}",
|
|
||||||
"facebook": "https://facebook.com/{account}",
|
|
||||||
"youtube": "https://youtube.com/{account}",
|
|
||||||
"instagram": "https://instagram.com/{account}",
|
|
||||||
"mastodon": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def ingest_social_media(engine: Engine, legislators_dir: Path) -> None:
|
|
||||||
"""Load social media accounts from legislators-social-media.yaml."""
|
|
||||||
social_media_path = legislators_dir / "legislators-social-media.yaml"
|
|
||||||
if not social_media_path.exists():
|
|
||||||
logger.warning("Social media file not found: %s", social_media_path)
|
|
||||||
return
|
|
||||||
|
|
||||||
with social_media_path.open() as file:
|
|
||||||
social_media_data = yaml.safe_load(file)
|
|
||||||
|
|
||||||
if not isinstance(social_media_data, list):
|
|
||||||
logger.warning("Unexpected format in %s", social_media_path)
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info("Loaded %d entries from legislators-social-media.yaml", len(social_media_data))
|
|
||||||
|
|
||||||
with Session(engine) as session:
|
|
||||||
legislator_map = _build_legislator_map(session)
|
|
||||||
existing_accounts = {
|
|
||||||
(account.legislator_id, account.platform)
|
|
||||||
for account in session.scalars(select(LegislatorSocialMedia)).all()
|
|
||||||
}
|
|
||||||
logger.info("Found %d existing social media accounts in DB", len(existing_accounts))
|
|
||||||
|
|
||||||
total_inserted = 0
|
|
||||||
total_updated = 0
|
|
||||||
for entry in social_media_data:
|
|
||||||
bioguide_id = entry.get("id", {}).get("bioguide")
|
|
||||||
if not bioguide_id:
|
|
||||||
continue
|
|
||||||
|
|
||||||
legislator_id = legislator_map.get(bioguide_id)
|
|
||||||
if legislator_id is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
social = entry.get("social", {})
|
|
||||||
for platform, url_template in SOCIAL_MEDIA_PLATFORMS.items():
|
|
||||||
account_name = social.get(platform)
|
|
||||||
if not account_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
url = url_template.format(account=account_name) if url_template else None
|
|
||||||
|
|
||||||
if (legislator_id, platform) in existing_accounts:
|
|
||||||
total_updated += 1
|
|
||||||
else:
|
|
||||||
session.add(
|
|
||||||
LegislatorSocialMedia(
|
|
||||||
legislator_id=legislator_id,
|
|
||||||
platform=platform,
|
|
||||||
account_name=str(account_name),
|
|
||||||
url=url,
|
|
||||||
source="https://github.com/unitedstates/congress-legislators",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
existing_accounts.add((legislator_id, platform))
|
|
||||||
total_inserted += 1
|
|
||||||
|
|
||||||
session.commit()
|
|
||||||
logger.info("Inserted %d new social media accounts, updated %d existing", total_inserted, total_updated)
|
|
||||||
|
|
||||||
|
|
||||||
def _iter_voters(position_group: object) -> Iterator[dict]:
|
|
||||||
"""Yield voter dicts from a vote position group (handles list, single dict, or string)."""
|
|
||||||
if isinstance(position_group, dict):
|
|
||||||
yield position_group
|
|
||||||
elif isinstance(position_group, list):
|
|
||||||
for voter in position_group:
|
|
||||||
if isinstance(voter, dict):
|
|
||||||
yield voter
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Bills
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def ingest_bills(engine: Engine, congress_dirs: list[Path]) -> None:
|
|
||||||
"""Load bill data.json files."""
|
|
||||||
with Session(engine) as session:
|
|
||||||
existing_bills = {(bill.congress, bill.bill_type, bill.number) for bill in session.scalars(select(Bill)).all()}
|
|
||||||
logger.info("Found %d existing bills in DB", len(existing_bills))
|
|
||||||
|
|
||||||
total_inserted = 0
|
|
||||||
batch: list[Bill] = []
|
|
||||||
for congress_dir in congress_dirs:
|
|
||||||
bills_dir = congress_dir / "bills"
|
|
||||||
if not bills_dir.is_dir():
|
|
||||||
continue
|
|
||||||
logger.info("Scanning bills from %s", congress_dir.name)
|
|
||||||
for bill_file in bills_dir.rglob("data.json"):
|
|
||||||
data = _read_json(bill_file)
|
|
||||||
if data is None:
|
|
||||||
continue
|
|
||||||
bill = _parse_bill(data, existing_bills)
|
|
||||||
if bill is not None:
|
|
||||||
batch.append(bill)
|
|
||||||
if len(batch) >= BATCH_SIZE:
|
|
||||||
total_inserted += _flush_batch(session, batch, "bills")
|
|
||||||
|
|
||||||
total_inserted += _flush_batch(session, batch, "bills")
|
|
||||||
logger.info("Inserted %d new bills total", total_inserted)
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_bill(data: dict, existing_bills: set[tuple[int, str, int]]) -> Bill | None:
|
|
||||||
"""Parse a bill data.json dict into a Bill ORM object, skipping existing."""
|
|
||||||
raw_congress = data.get("congress")
|
|
||||||
bill_type = data.get("bill_type")
|
|
||||||
raw_number = data.get("number")
|
|
||||||
if raw_congress is None or bill_type is None or raw_number is None:
|
|
||||||
return None
|
|
||||||
congress = int(raw_congress)
|
|
||||||
number = int(raw_number)
|
|
||||||
if (congress, bill_type, number) in existing_bills:
|
|
||||||
return None
|
|
||||||
|
|
||||||
sponsor_bioguide = None
|
|
||||||
sponsor = data.get("sponsor")
|
|
||||||
if sponsor:
|
|
||||||
sponsor_bioguide = sponsor.get("bioguide_id")
|
|
||||||
|
|
||||||
return Bill(
|
|
||||||
congress=congress,
|
|
||||||
bill_type=bill_type,
|
|
||||||
number=number,
|
|
||||||
title=data.get("short_title") or data.get("official_title"),
|
|
||||||
title_short=data.get("short_title"),
|
|
||||||
official_title=data.get("official_title"),
|
|
||||||
status=data.get("status"),
|
|
||||||
status_at=data.get("status_at"),
|
|
||||||
sponsor_bioguide_id=sponsor_bioguide,
|
|
||||||
subjects_top_term=data.get("subjects_top_term"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Votes (and vote records)
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def ingest_votes(engine: Engine, congress_dirs: list[Path]) -> None:
|
|
||||||
"""Load vote data.json files with their vote records."""
|
|
||||||
with Session(engine) as session:
|
|
||||||
legislator_map = _build_legislator_map(session)
|
|
||||||
logger.info("Loaded %d legislators into lookup map", len(legislator_map))
|
|
||||||
bill_map = _build_bill_map(session)
|
|
||||||
logger.info("Loaded %d bills into lookup map", len(bill_map))
|
|
||||||
existing_votes = {
|
|
||||||
(vote.congress, vote.chamber, vote.session, vote.number) for vote in session.scalars(select(Vote)).all()
|
|
||||||
}
|
|
||||||
logger.info("Found %d existing votes in DB", len(existing_votes))
|
|
||||||
|
|
||||||
total_inserted = 0
|
|
||||||
batch: list[Vote] = []
|
|
||||||
for congress_dir in congress_dirs:
|
|
||||||
votes_dir = congress_dir / "votes"
|
|
||||||
if not votes_dir.is_dir():
|
|
||||||
continue
|
|
||||||
logger.info("Scanning votes from %s", congress_dir.name)
|
|
||||||
for vote_file in votes_dir.rglob("data.json"):
|
|
||||||
data = _read_json(vote_file)
|
|
||||||
if data is None:
|
|
||||||
continue
|
|
||||||
vote = _parse_vote(data, legislator_map, bill_map, existing_votes)
|
|
||||||
if vote is not None:
|
|
||||||
batch.append(vote)
|
|
||||||
if len(batch) >= BATCH_SIZE:
|
|
||||||
total_inserted += _flush_batch(session, batch, "votes")
|
|
||||||
|
|
||||||
total_inserted += _flush_batch(session, batch, "votes")
|
|
||||||
logger.info("Inserted %d new votes total", total_inserted)
|
|
||||||
|
|
||||||
|
|
||||||
def _build_legislator_map(session: Session) -> dict[str, int]:
|
|
||||||
"""Build a mapping of bioguide_id -> legislator.id."""
|
|
||||||
return {legislator.bioguide_id: legislator.id for legislator in session.scalars(select(Legislator)).all()}
|
|
||||||
|
|
||||||
|
|
||||||
def _build_bill_map(session: Session) -> dict[tuple[int, str, int], int]:
|
|
||||||
"""Build a mapping of (congress, bill_type, number) -> bill.id."""
|
|
||||||
return {(bill.congress, bill.bill_type, bill.number): bill.id for bill in session.scalars(select(Bill)).all()}
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_vote(
|
|
||||||
data: dict,
|
|
||||||
legislator_map: dict[str, int],
|
|
||||||
bill_map: dict[tuple[int, str, int], int],
|
|
||||||
existing_votes: set[tuple[int, str, int, int]],
|
|
||||||
) -> Vote | None:
|
|
||||||
"""Parse a vote data.json dict into a Vote ORM object with records."""
|
|
||||||
raw_congress = data.get("congress")
|
|
||||||
chamber = data.get("chamber")
|
|
||||||
raw_number = data.get("number")
|
|
||||||
vote_date = data.get("date")
|
|
||||||
if raw_congress is None or chamber is None or raw_number is None or vote_date is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
raw_session = data.get("session")
|
|
||||||
if raw_session is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
congress = int(raw_congress)
|
|
||||||
number = int(raw_number)
|
|
||||||
session_number = int(raw_session)
|
|
||||||
|
|
||||||
# Normalize chamber from "h"/"s" to "House"/"Senate"
|
|
||||||
chamber_normalized = {"h": "House", "s": "Senate"}.get(chamber, chamber)
|
|
||||||
|
|
||||||
if (congress, chamber_normalized, session_number, number) in existing_votes:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Resolve linked bill
|
|
||||||
bill_id = None
|
|
||||||
bill_ref = data.get("bill")
|
|
||||||
if bill_ref:
|
|
||||||
bill_key = (
|
|
||||||
int(bill_ref.get("congress", congress)),
|
|
||||||
bill_ref.get("type"),
|
|
||||||
int(bill_ref.get("number", 0)),
|
|
||||||
)
|
|
||||||
bill_id = bill_map.get(bill_key)
|
|
||||||
|
|
||||||
raw_votes = data.get("votes", {})
|
|
||||||
vote_counts = _count_votes(raw_votes)
|
|
||||||
vote_records = _build_vote_records(raw_votes, legislator_map)
|
|
||||||
|
|
||||||
return Vote(
|
|
||||||
congress=congress,
|
|
||||||
chamber=chamber_normalized,
|
|
||||||
session=session_number,
|
|
||||||
number=number,
|
|
||||||
vote_type=data.get("type"),
|
|
||||||
question=data.get("question"),
|
|
||||||
result=data.get("result"),
|
|
||||||
result_text=data.get("result_text"),
|
|
||||||
vote_date=vote_date[:10] if isinstance(vote_date, str) else vote_date,
|
|
||||||
bill_id=bill_id,
|
|
||||||
vote_records=vote_records,
|
|
||||||
**vote_counts,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _count_votes(raw_votes: dict) -> dict[str, int]:
|
|
||||||
"""Count voters per position category, correctly handling dict and list formats."""
|
|
||||||
yea_count = 0
|
|
||||||
nay_count = 0
|
|
||||||
not_voting_count = 0
|
|
||||||
present_count = 0
|
|
||||||
|
|
||||||
for position, position_group in raw_votes.items():
|
|
||||||
voter_count = sum(1 for _ in _iter_voters(position_group))
|
|
||||||
if position in ("Yea", "Aye"):
|
|
||||||
yea_count += voter_count
|
|
||||||
elif position in ("Nay", "No"):
|
|
||||||
nay_count += voter_count
|
|
||||||
elif position == "Not Voting":
|
|
||||||
not_voting_count += voter_count
|
|
||||||
elif position == "Present":
|
|
||||||
present_count += voter_count
|
|
||||||
|
|
||||||
return {
|
|
||||||
"yea_count": yea_count,
|
|
||||||
"nay_count": nay_count,
|
|
||||||
"not_voting_count": not_voting_count,
|
|
||||||
"present_count": present_count,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _build_vote_records(raw_votes: dict, legislator_map: dict[str, int]) -> list[VoteRecord]:
|
|
||||||
"""Build VoteRecord objects from raw vote data."""
|
|
||||||
records: list[VoteRecord] = []
|
|
||||||
for position, position_group in raw_votes.items():
|
|
||||||
for voter in _iter_voters(position_group):
|
|
||||||
bioguide_id = voter.get("id")
|
|
||||||
if not bioguide_id:
|
|
||||||
continue
|
|
||||||
legislator_id = legislator_map.get(bioguide_id)
|
|
||||||
if legislator_id is None:
|
|
||||||
continue
|
|
||||||
records.append(
|
|
||||||
VoteRecord(
|
|
||||||
legislator_id=legislator_id,
|
|
||||||
position=position,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return records
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Bill Text
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def ingest_bill_text(engine: Engine, congress_dirs: list[Path]) -> None:
|
|
||||||
"""Load bill text from text-versions directories."""
|
|
||||||
with Session(engine) as session:
|
|
||||||
bill_map = _build_bill_map(session)
|
|
||||||
logger.info("Loaded %d bills into lookup map", len(bill_map))
|
|
||||||
existing_bill_texts = {
|
|
||||||
(bill_text.bill_id, bill_text.version_code) for bill_text in session.scalars(select(BillText)).all()
|
|
||||||
}
|
|
||||||
logger.info("Found %d existing bill text versions in DB", len(existing_bill_texts))
|
|
||||||
|
|
||||||
total_inserted = 0
|
|
||||||
batch: list[BillText] = []
|
|
||||||
for congress_dir in congress_dirs:
|
|
||||||
logger.info("Scanning bill texts from %s", congress_dir.name)
|
|
||||||
for bill_text in _iter_bill_texts(congress_dir, bill_map, existing_bill_texts):
|
|
||||||
batch.append(bill_text)
|
|
||||||
if len(batch) >= BATCH_SIZE:
|
|
||||||
total_inserted += _flush_batch(session, batch, "bill texts")
|
|
||||||
|
|
||||||
total_inserted += _flush_batch(session, batch, "bill texts")
|
|
||||||
logger.info("Inserted %d new bill text versions total", total_inserted)
|
|
||||||
|
|
||||||
|
|
||||||
def _iter_bill_texts(
|
|
||||||
congress_dir: Path,
|
|
||||||
bill_map: dict[tuple[int, str, int], int],
|
|
||||||
existing_bill_texts: set[tuple[int, str]],
|
|
||||||
) -> Iterator[BillText]:
|
|
||||||
"""Yield BillText objects for a single congress directory, skipping existing."""
|
|
||||||
bills_dir = congress_dir / "bills"
|
|
||||||
if not bills_dir.is_dir():
|
|
||||||
return
|
|
||||||
|
|
||||||
for bill_dir in bills_dir.rglob("text-versions"):
|
|
||||||
if not bill_dir.is_dir():
|
|
||||||
continue
|
|
||||||
bill_key = _bill_key_from_dir(bill_dir.parent, congress_dir)
|
|
||||||
if bill_key is None:
|
|
||||||
continue
|
|
||||||
bill_id = bill_map.get(bill_key)
|
|
||||||
if bill_id is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for version_dir in sorted(bill_dir.iterdir()):
|
|
||||||
if not version_dir.is_dir():
|
|
||||||
continue
|
|
||||||
if (bill_id, version_dir.name) in existing_bill_texts:
|
|
||||||
continue
|
|
||||||
text_content = _read_bill_text(version_dir)
|
|
||||||
version_data = _read_json(version_dir / "data.json")
|
|
||||||
yield BillText(
|
|
||||||
bill_id=bill_id,
|
|
||||||
version_code=version_dir.name,
|
|
||||||
version_name=version_data.get("version_name") if version_data else None,
|
|
||||||
date=version_data.get("issued_on") if version_data else None,
|
|
||||||
text_content=text_content,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _bill_key_from_dir(bill_dir: Path, congress_dir: Path) -> tuple[int, str, int] | None:
|
|
||||||
"""Extract (congress, bill_type, number) from directory structure."""
|
|
||||||
congress = int(congress_dir.name)
|
|
||||||
bill_type = bill_dir.parent.name
|
|
||||||
name = bill_dir.name
|
|
||||||
# Directory name is like "hr3590" — strip the type prefix to get the number
|
|
||||||
number_str = name[len(bill_type) :]
|
|
||||||
if not number_str.isdigit():
|
|
||||||
return None
|
|
||||||
return (congress, bill_type, int(number_str))
|
|
||||||
|
|
||||||
|
|
||||||
def _read_bill_text(version_dir: Path) -> str | None:
|
|
||||||
"""Read bill text from a version directory, preferring .txt over .xml."""
|
|
||||||
for extension in ("txt", "htm", "html", "xml"):
|
|
||||||
candidates = list(version_dir.glob(f"document.{extension}"))
|
|
||||||
if not candidates:
|
|
||||||
candidates = list(version_dir.glob(f"*.{extension}"))
|
|
||||||
if candidates:
|
|
||||||
try:
|
|
||||||
return candidates[0].read_text(encoding="utf-8")
|
|
||||||
except Exception:
|
|
||||||
logger.exception("Failed to read %s", candidates[0])
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Helpers
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _read_json(path: Path) -> dict | None:
|
|
||||||
"""Read and parse a JSON file, returning None on failure."""
|
|
||||||
try:
|
|
||||||
return orjson.loads(path.read_bytes())
|
|
||||||
except FileNotFoundError:
|
|
||||||
return None
|
|
||||||
except Exception:
|
|
||||||
logger.exception("Failed to parse %s", path)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
app()
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
"""init."""
|
|
||||||
|
|
||||||
from python.orm.data_science_dev.congress.bill import Bill, BillText
|
|
||||||
from python.orm.data_science_dev.congress.legislator import Legislator, LegislatorSocialMedia
|
|
||||||
from python.orm.data_science_dev.congress.vote import Vote, VoteRecord
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"Bill",
|
|
||||||
"BillText",
|
|
||||||
"Legislator",
|
|
||||||
"LegislatorSocialMedia",
|
|
||||||
"Vote",
|
|
||||||
"VoteRecord",
|
|
||||||
]
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
"""Bill model - legislation introduced in Congress."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import date
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from sqlalchemy import ForeignKey, Index, UniqueConstraint
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
||||||
|
|
||||||
from python.orm.data_science_dev.base import DataScienceDevTableBase
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from python.orm.data_science_dev.congress.vote import Vote
|
|
||||||
|
|
||||||
|
|
||||||
class Bill(DataScienceDevTableBase):
|
|
||||||
"""Legislation with congress number, type, titles, status, and sponsor."""
|
|
||||||
|
|
||||||
__tablename__ = "bill"
|
|
||||||
|
|
||||||
congress: Mapped[int]
|
|
||||||
bill_type: Mapped[str]
|
|
||||||
number: Mapped[int]
|
|
||||||
|
|
||||||
title: Mapped[str | None]
|
|
||||||
title_short: Mapped[str | None]
|
|
||||||
official_title: Mapped[str | None]
|
|
||||||
|
|
||||||
status: Mapped[str | None]
|
|
||||||
status_at: Mapped[date | None]
|
|
||||||
|
|
||||||
sponsor_bioguide_id: Mapped[str | None]
|
|
||||||
|
|
||||||
subjects_top_term: Mapped[str | None]
|
|
||||||
|
|
||||||
votes: Mapped[list[Vote]] = relationship(
|
|
||||||
"Vote",
|
|
||||||
back_populates="bill",
|
|
||||||
)
|
|
||||||
bill_texts: Mapped[list[BillText]] = relationship(
|
|
||||||
"BillText",
|
|
||||||
back_populates="bill",
|
|
||||||
cascade="all, delete-orphan",
|
|
||||||
)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint("congress", "bill_type", "number", name="uq_bill_congress_type_number"),
|
|
||||||
Index("ix_bill_congress", "congress"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BillText(DataScienceDevTableBase):
|
|
||||||
"""Stores different text versions of a bill (introduced, enrolled, etc.)."""
|
|
||||||
|
|
||||||
__tablename__ = "bill_text"
|
|
||||||
|
|
||||||
bill_id: Mapped[int] = mapped_column(ForeignKey("main.bill.id", ondelete="CASCADE"))
|
|
||||||
version_code: Mapped[str]
|
|
||||||
version_name: Mapped[str | None]
|
|
||||||
text_content: Mapped[str | None]
|
|
||||||
date: Mapped[date | None]
|
|
||||||
|
|
||||||
bill: Mapped[Bill] = relationship("Bill", back_populates="bill_texts")
|
|
||||||
|
|
||||||
__table_args__ = (UniqueConstraint("bill_id", "version_code", name="uq_bill_text_bill_id_version_code"),)
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
"""Legislator model - members of Congress."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import date
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from sqlalchemy import ForeignKey, Text
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
||||||
|
|
||||||
from python.orm.data_science_dev.base import DataScienceDevTableBase
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from python.orm.data_science_dev.congress.vote import VoteRecord
|
|
||||||
|
|
||||||
|
|
||||||
class Legislator(DataScienceDevTableBase):
|
|
||||||
"""Members of Congress with identification and current term info."""
|
|
||||||
|
|
||||||
__tablename__ = "legislator"
|
|
||||||
|
|
||||||
bioguide_id: Mapped[str] = mapped_column(Text, unique=True, index=True)
|
|
||||||
|
|
||||||
thomas_id: Mapped[str | None]
|
|
||||||
lis_id: Mapped[str | None]
|
|
||||||
govtrack_id: Mapped[int | None]
|
|
||||||
opensecrets_id: Mapped[str | None]
|
|
||||||
fec_ids: Mapped[str | None]
|
|
||||||
|
|
||||||
first_name: Mapped[str]
|
|
||||||
last_name: Mapped[str]
|
|
||||||
official_full_name: Mapped[str | None]
|
|
||||||
nickname: Mapped[str | None]
|
|
||||||
|
|
||||||
birthday: Mapped[date | None]
|
|
||||||
gender: Mapped[str | None]
|
|
||||||
|
|
||||||
current_party: Mapped[str | None]
|
|
||||||
current_state: Mapped[str | None]
|
|
||||||
current_district: Mapped[int | None]
|
|
||||||
current_chamber: Mapped[str | None]
|
|
||||||
|
|
||||||
social_media_accounts: Mapped[list[LegislatorSocialMedia]] = relationship(
|
|
||||||
"LegislatorSocialMedia",
|
|
||||||
back_populates="legislator",
|
|
||||||
cascade="all, delete-orphan",
|
|
||||||
)
|
|
||||||
vote_records: Mapped[list[VoteRecord]] = relationship(
|
|
||||||
"VoteRecord",
|
|
||||||
back_populates="legislator",
|
|
||||||
cascade="all, delete-orphan",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class LegislatorSocialMedia(DataScienceDevTableBase):
|
|
||||||
"""Social media account linked to a legislator."""
|
|
||||||
|
|
||||||
__tablename__ = "legislator_social_media"
|
|
||||||
|
|
||||||
legislator_id: Mapped[int] = mapped_column(ForeignKey("main.legislator.id"))
|
|
||||||
platform: Mapped[str]
|
|
||||||
account_name: Mapped[str]
|
|
||||||
url: Mapped[str | None]
|
|
||||||
source: Mapped[str]
|
|
||||||
|
|
||||||
legislator: Mapped[Legislator] = relationship(back_populates="social_media_accounts")
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
"""Vote model - roll call votes in Congress."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import date
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from sqlalchemy import ForeignKey, Index, UniqueConstraint
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
||||||
|
|
||||||
from python.orm.data_science_dev.base import DataScienceDevBase, DataScienceDevTableBase
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from python.orm.data_science_dev.congress.bill import Bill
|
|
||||||
from python.orm.data_science_dev.congress.legislator import Legislator
|
|
||||||
from python.orm.data_science_dev.congress.vote import Vote
|
|
||||||
|
|
||||||
|
|
||||||
class VoteRecord(DataScienceDevBase):
|
|
||||||
"""Links a vote to a legislator with their position (Yea, Nay, etc.)."""
|
|
||||||
|
|
||||||
__tablename__ = "vote_record"
|
|
||||||
|
|
||||||
vote_id: Mapped[int] = mapped_column(
|
|
||||||
ForeignKey("main.vote.id", ondelete="CASCADE"),
|
|
||||||
primary_key=True,
|
|
||||||
)
|
|
||||||
legislator_id: Mapped[int] = mapped_column(
|
|
||||||
ForeignKey("main.legislator.id", ondelete="CASCADE"),
|
|
||||||
primary_key=True,
|
|
||||||
)
|
|
||||||
position: Mapped[str]
|
|
||||||
|
|
||||||
vote: Mapped[Vote] = relationship("Vote", back_populates="vote_records")
|
|
||||||
legislator: Mapped[Legislator] = relationship("Legislator", back_populates="vote_records")
|
|
||||||
|
|
||||||
|
|
||||||
class Vote(DataScienceDevTableBase):
|
|
||||||
"""Roll call votes with counts and optional bill linkage."""
|
|
||||||
|
|
||||||
__tablename__ = "vote"
|
|
||||||
|
|
||||||
congress: Mapped[int]
|
|
||||||
chamber: Mapped[str]
|
|
||||||
session: Mapped[int]
|
|
||||||
number: Mapped[int]
|
|
||||||
|
|
||||||
vote_type: Mapped[str | None]
|
|
||||||
question: Mapped[str | None]
|
|
||||||
result: Mapped[str | None]
|
|
||||||
result_text: Mapped[str | None]
|
|
||||||
|
|
||||||
vote_date: Mapped[date]
|
|
||||||
|
|
||||||
yea_count: Mapped[int | None]
|
|
||||||
nay_count: Mapped[int | None]
|
|
||||||
not_voting_count: Mapped[int | None]
|
|
||||||
present_count: Mapped[int | None]
|
|
||||||
|
|
||||||
bill_id: Mapped[int | None] = mapped_column(ForeignKey("main.bill.id"))
|
|
||||||
|
|
||||||
bill: Mapped[Bill | None] = relationship("Bill", back_populates="votes")
|
|
||||||
vote_records: Mapped[list[VoteRecord]] = relationship(
|
|
||||||
"VoteRecord",
|
|
||||||
back_populates="vote",
|
|
||||||
cascade="all, delete-orphan",
|
|
||||||
)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint(
|
|
||||||
"congress",
|
|
||||||
"chamber",
|
|
||||||
"session",
|
|
||||||
"number",
|
|
||||||
name="uq_vote_congress_chamber_session_number",
|
|
||||||
),
|
|
||||||
Index("ix_vote_date", "vote_date"),
|
|
||||||
Index("ix_vote_congress_chamber", "congress", "chamber"),
|
|
||||||
)
|
|
||||||
@@ -2,15 +2,9 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from python.orm.data_science_dev.congress import Bill, BillText, Legislator, Vote, VoteRecord
|
|
||||||
from python.orm.data_science_dev.posts import partitions # noqa: F401 — registers partition classes in metadata
|
from python.orm.data_science_dev.posts import partitions # noqa: F401 — registers partition classes in metadata
|
||||||
from python.orm.data_science_dev.posts.tables import Posts
|
from python.orm.data_science_dev.posts.tables import Posts
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Bill",
|
|
||||||
"BillText",
|
|
||||||
"Legislator",
|
|
||||||
"Posts",
|
"Posts",
|
||||||
"Vote",
|
|
||||||
"VoteRecord",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from python.orm.richie.base import RichieBase, TableBase, TableBaseBig, TableBaseSmall
|
from python.orm.richie.base import RichieBase, TableBase, TableBaseBig, TableBaseSmall
|
||||||
|
from python.orm.richie.congress import Bill, Legislator, Vote, VoteRecord
|
||||||
from python.orm.richie.contact import (
|
from python.orm.richie.contact import (
|
||||||
Contact,
|
Contact,
|
||||||
ContactNeed,
|
ContactNeed,
|
||||||
@@ -12,13 +13,17 @@ from python.orm.richie.contact import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"Bill",
|
||||||
"Contact",
|
"Contact",
|
||||||
"ContactNeed",
|
"ContactNeed",
|
||||||
"ContactRelationship",
|
"ContactRelationship",
|
||||||
|
"Legislator",
|
||||||
"Need",
|
"Need",
|
||||||
"RelationshipType",
|
"RelationshipType",
|
||||||
"RichieBase",
|
"RichieBase",
|
||||||
"TableBase",
|
"TableBase",
|
||||||
"TableBaseBig",
|
"TableBaseBig",
|
||||||
"TableBaseSmall",
|
"TableBaseSmall",
|
||||||
|
"Vote",
|
||||||
|
"VoteRecord",
|
||||||
]
|
]
|
||||||
|
|||||||
150
python/orm/richie/congress.py
Normal file
150
python/orm/richie/congress.py
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
"""Congress Tracker database models."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
|
from sqlalchemy import ForeignKey, Index, Text, UniqueConstraint
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
|
||||||
|
from python.orm.richie.base import RichieBase, TableBase
|
||||||
|
|
||||||
|
|
||||||
|
class Legislator(TableBase):
|
||||||
|
"""Legislator model - members of Congress."""
|
||||||
|
|
||||||
|
__tablename__ = "legislator"
|
||||||
|
|
||||||
|
# Natural key - bioguide ID is the authoritative identifier
|
||||||
|
bioguide_id: Mapped[str] = mapped_column(Text, unique=True, index=True)
|
||||||
|
|
||||||
|
# Other IDs for cross-referencing
|
||||||
|
thomas_id: Mapped[str | None]
|
||||||
|
lis_id: Mapped[str | None]
|
||||||
|
govtrack_id: Mapped[int | None]
|
||||||
|
opensecrets_id: Mapped[str | None]
|
||||||
|
fec_ids: Mapped[str | None] # JSON array stored as string
|
||||||
|
|
||||||
|
# Name info
|
||||||
|
first_name: Mapped[str]
|
||||||
|
last_name: Mapped[str]
|
||||||
|
official_full_name: Mapped[str | None]
|
||||||
|
nickname: Mapped[str | None]
|
||||||
|
|
||||||
|
# Bio
|
||||||
|
birthday: Mapped[date | None]
|
||||||
|
gender: Mapped[str | None] # M/F
|
||||||
|
|
||||||
|
# Current term info (denormalized for query efficiency)
|
||||||
|
current_party: Mapped[str | None]
|
||||||
|
current_state: Mapped[str | None]
|
||||||
|
current_district: Mapped[int | None] # House only
|
||||||
|
current_chamber: Mapped[str | None] # rep/sen
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
vote_records: Mapped[list[VoteRecord]] = relationship(
|
||||||
|
"VoteRecord",
|
||||||
|
back_populates="legislator",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Bill(TableBase):
|
||||||
|
"""Bill model - legislation introduced in Congress."""
|
||||||
|
|
||||||
|
__tablename__ = "bill"
|
||||||
|
|
||||||
|
# Composite natural key: congress + bill_type + number
|
||||||
|
congress: Mapped[int]
|
||||||
|
bill_type: Mapped[str] # hr, s, hres, sres, hjres, sjres
|
||||||
|
number: Mapped[int]
|
||||||
|
|
||||||
|
# Bill info
|
||||||
|
title: Mapped[str | None]
|
||||||
|
title_short: Mapped[str | None]
|
||||||
|
official_title: Mapped[str | None]
|
||||||
|
|
||||||
|
# Status
|
||||||
|
status: Mapped[str | None]
|
||||||
|
status_at: Mapped[date | None]
|
||||||
|
|
||||||
|
# Sponsor
|
||||||
|
sponsor_bioguide_id: Mapped[str | None]
|
||||||
|
|
||||||
|
# Subjects
|
||||||
|
subjects_top_term: Mapped[str | None]
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
votes: Mapped[list[Vote]] = relationship(
|
||||||
|
"Vote",
|
||||||
|
back_populates="bill",
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("congress", "bill_type", "number", name="uq_bill_congress_type_number"),
|
||||||
|
Index("ix_bill_congress", "congress"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Vote(TableBase):
|
||||||
|
"""Vote model - roll call votes in Congress."""
|
||||||
|
|
||||||
|
__tablename__ = "vote"
|
||||||
|
|
||||||
|
# Composite natural key: congress + chamber + session + number
|
||||||
|
congress: Mapped[int]
|
||||||
|
chamber: Mapped[str] # house/senate
|
||||||
|
session: Mapped[int]
|
||||||
|
number: Mapped[int]
|
||||||
|
|
||||||
|
# Vote details
|
||||||
|
vote_type: Mapped[str | None]
|
||||||
|
question: Mapped[str | None]
|
||||||
|
result: Mapped[str | None]
|
||||||
|
result_text: Mapped[str | None]
|
||||||
|
|
||||||
|
# Timing
|
||||||
|
vote_date: Mapped[date]
|
||||||
|
|
||||||
|
# Vote counts (denormalized for efficiency)
|
||||||
|
yea_count: Mapped[int | None]
|
||||||
|
nay_count: Mapped[int | None]
|
||||||
|
not_voting_count: Mapped[int | None]
|
||||||
|
present_count: Mapped[int | None]
|
||||||
|
|
||||||
|
# Related bill (optional - not all votes are on bills)
|
||||||
|
bill_id: Mapped[int | None] = mapped_column(ForeignKey("main.bill.id"))
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
bill: Mapped[Bill | None] = relationship("Bill", back_populates="votes")
|
||||||
|
vote_records: Mapped[list[VoteRecord]] = relationship(
|
||||||
|
"VoteRecord",
|
||||||
|
back_populates="vote",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("congress", "chamber", "session", "number", name="uq_vote_congress_chamber_session_number"),
|
||||||
|
Index("ix_vote_date", "vote_date"),
|
||||||
|
Index("ix_vote_congress_chamber", "congress", "chamber"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class VoteRecord(RichieBase):
|
||||||
|
"""Association table: Vote <-> Legislator with position."""
|
||||||
|
|
||||||
|
__tablename__ = "vote_record"
|
||||||
|
|
||||||
|
vote_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("main.vote.id", ondelete="CASCADE"),
|
||||||
|
primary_key=True,
|
||||||
|
)
|
||||||
|
legislator_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("main.legislator.id", ondelete="CASCADE"),
|
||||||
|
primary_key=True,
|
||||||
|
)
|
||||||
|
position: Mapped[str] # Yea, Nay, Not Voting, Present
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
vote: Mapped[Vote] = relationship("Vote", back_populates="vote_records")
|
||||||
|
legislator: Mapped[Legislator] = relationship("Legislator", back_populates="vote_records")
|
||||||
@@ -63,9 +63,9 @@ class DeviceRegistry:
|
|||||||
return
|
return
|
||||||
|
|
||||||
with Session(self.engine) as session:
|
with Session(self.engine) as session:
|
||||||
device = session.scalars(
|
device = session.execute(
|
||||||
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
||||||
).one_or_none()
|
).scalar_one_or_none()
|
||||||
|
|
||||||
if device:
|
if device:
|
||||||
if device.safety_number != safety_number and device.trust_level != TrustLevel.BLOCKED:
|
if device.safety_number != safety_number and device.trust_level != TrustLevel.BLOCKED:
|
||||||
@@ -99,9 +99,9 @@ class DeviceRegistry:
|
|||||||
Returns True if the device was found and verified.
|
Returns True if the device was found and verified.
|
||||||
"""
|
"""
|
||||||
with Session(self.engine) as session:
|
with Session(self.engine) as session:
|
||||||
device = session.scalars(
|
device = session.execute(
|
||||||
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
||||||
).one_or_none()
|
).scalar_one_or_none()
|
||||||
|
|
||||||
if not device:
|
if not device:
|
||||||
logger.warning(f"Cannot verify unknown device: {phone_number}")
|
logger.warning(f"Cannot verify unknown device: {phone_number}")
|
||||||
@@ -139,9 +139,9 @@ class DeviceRegistry:
|
|||||||
def grant_role(self, phone_number: str, role: Role) -> bool:
|
def grant_role(self, phone_number: str, role: Role) -> bool:
|
||||||
"""Add a role to a device. Called by admin over SSH."""
|
"""Add a role to a device. Called by admin over SSH."""
|
||||||
with Session(self.engine) as session:
|
with Session(self.engine) as session:
|
||||||
device = session.scalars(
|
device = session.execute(
|
||||||
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
||||||
).one_or_none()
|
).scalar_one_or_none()
|
||||||
|
|
||||||
if not device:
|
if not device:
|
||||||
logger.warning(f"Cannot grant role for unknown device: {phone_number}")
|
logger.warning(f"Cannot grant role for unknown device: {phone_number}")
|
||||||
@@ -150,7 +150,7 @@ class DeviceRegistry:
|
|||||||
if any(record.name == role for record in device.roles):
|
if any(record.name == role for record in device.roles):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
role_record = session.scalars(select(RoleRecord).where(RoleRecord.name == role)).one_or_none()
|
role_record = session.execute(select(RoleRecord).where(RoleRecord.name == role)).scalar_one_or_none()
|
||||||
|
|
||||||
if not role_record:
|
if not role_record:
|
||||||
logger.warning(f"Unknown role: {role}")
|
logger.warning(f"Unknown role: {role}")
|
||||||
@@ -165,9 +165,9 @@ class DeviceRegistry:
|
|||||||
def revoke_role(self, phone_number: str, role: Role) -> bool:
|
def revoke_role(self, phone_number: str, role: Role) -> bool:
|
||||||
"""Remove a role from a device. Called by admin over SSH."""
|
"""Remove a role from a device. Called by admin over SSH."""
|
||||||
with Session(self.engine) as session:
|
with Session(self.engine) as session:
|
||||||
device = session.scalars(
|
device = session.execute(
|
||||||
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
||||||
).one_or_none()
|
).scalar_one_or_none()
|
||||||
|
|
||||||
if not device:
|
if not device:
|
||||||
logger.warning(f"Cannot revoke role for unknown device: {phone_number}")
|
logger.warning(f"Cannot revoke role for unknown device: {phone_number}")
|
||||||
@@ -182,16 +182,16 @@ class DeviceRegistry:
|
|||||||
def set_roles(self, phone_number: str, roles: list[Role]) -> bool:
|
def set_roles(self, phone_number: str, roles: list[Role]) -> bool:
|
||||||
"""Replace all roles for a device. Called by admin over SSH."""
|
"""Replace all roles for a device. Called by admin over SSH."""
|
||||||
with Session(self.engine) as session:
|
with Session(self.engine) as session:
|
||||||
device = session.scalars(
|
device = session.execute(
|
||||||
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
||||||
).one_or_none()
|
).scalar_one_or_none()
|
||||||
|
|
||||||
if not device:
|
if not device:
|
||||||
logger.warning(f"Cannot set roles for unknown device: {phone_number}")
|
logger.warning(f"Cannot set roles for unknown device: {phone_number}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
role_names = [str(role) for role in roles]
|
role_names = [str(role) for role in roles]
|
||||||
records = session.scalars(select(RoleRecord).where(RoleRecord.name.in_(role_names))).all()
|
records = list(session.execute(select(RoleRecord).where(RoleRecord.name.in_(role_names))).scalars().all())
|
||||||
device.roles = records
|
device.roles = records
|
||||||
session.commit()
|
session.commit()
|
||||||
self._update_cache(phone_number, device)
|
self._update_cache(phone_number, device)
|
||||||
@@ -203,7 +203,7 @@ class DeviceRegistry:
|
|||||||
def list_devices(self) -> list[SignalDevice]:
|
def list_devices(self) -> list[SignalDevice]:
|
||||||
"""Return all known devices."""
|
"""Return all known devices."""
|
||||||
with Session(self.engine) as session:
|
with Session(self.engine) as session:
|
||||||
return list(session.scalars(select(SignalDevice)).all())
|
return list(session.execute(select(SignalDevice)).scalars().all())
|
||||||
|
|
||||||
def sync_identities(self) -> None:
|
def sync_identities(self) -> None:
|
||||||
"""Pull identity list from signal-cli and record any new ones."""
|
"""Pull identity list from signal-cli and record any new ones."""
|
||||||
@@ -226,7 +226,9 @@ class DeviceRegistry:
|
|||||||
def _load_device(self, phone_number: str) -> SignalDevice | None:
|
def _load_device(self, phone_number: str) -> SignalDevice | None:
|
||||||
"""Fetch a device by phone number (with joined roles)."""
|
"""Fetch a device by phone number (with joined roles)."""
|
||||||
with Session(self.engine) as session:
|
with Session(self.engine) as session:
|
||||||
return session.scalars(select(SignalDevice).where(SignalDevice.phone_number == phone_number)).one_or_none()
|
return session.execute(
|
||||||
|
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
||||||
|
).scalar_one_or_none()
|
||||||
|
|
||||||
def _update_cache(self, phone_number: str, device: SignalDevice) -> None:
|
def _update_cache(self, phone_number: str, device: SignalDevice) -> None:
|
||||||
"""Refresh the cache entry for a device."""
|
"""Refresh the cache entry for a device."""
|
||||||
@@ -242,9 +244,9 @@ class DeviceRegistry:
|
|||||||
def _set_trust(self, phone_number: str, level: str, log_msg: str | None = None) -> bool:
|
def _set_trust(self, phone_number: str, level: str, log_msg: str | None = None) -> bool:
|
||||||
"""Update the trust level for a device."""
|
"""Update the trust level for a device."""
|
||||||
with Session(self.engine) as session:
|
with Session(self.engine) as session:
|
||||||
device = session.scalars(
|
device = session.execute(
|
||||||
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
select(SignalDevice).where(SignalDevice.phone_number == phone_number)
|
||||||
).one_or_none()
|
).scalar_one_or_none()
|
||||||
|
|
||||||
if not device:
|
if not device:
|
||||||
return False
|
return False
|
||||||
@@ -267,7 +269,7 @@ def sync_roles(engine: Engine) -> None:
|
|||||||
expected = {role.value for role in Role}
|
expected = {role.value for role in Role}
|
||||||
|
|
||||||
with Session(engine) as session:
|
with Session(engine) as session:
|
||||||
existing = set(session.scalars(select(RoleRecord.name)).all())
|
existing = {record.name for record in session.execute(select(RoleRecord)).scalars().all()}
|
||||||
|
|
||||||
to_add = expected - existing
|
to_add = expected - existing
|
||||||
to_remove = existing - expected
|
to_remove = existing - expected
|
||||||
|
|||||||
@@ -34,9 +34,8 @@ def main(config_file: Path) -> None:
|
|||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
signal_alert(msg)
|
signal_alert(msg)
|
||||||
continue
|
continue
|
||||||
count_lookup = get_count_lookup(config_file, dataset.name)
|
|
||||||
logger.info(f"using {count_lookup} for {dataset.name}")
|
get_snapshots_to_delete(dataset, get_count_lookup(config_file, dataset.name))
|
||||||
get_snapshots_to_delete(dataset, count_lookup)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("snapshot_manager failed")
|
logger.exception("snapshot_manager failed")
|
||||||
signal_alert("snapshot_manager failed")
|
signal_alert("snapshot_manager failed")
|
||||||
@@ -100,7 +99,6 @@ def get_snapshots_to_delete(
|
|||||||
"""
|
"""
|
||||||
snapshots = dataset.get_snapshots()
|
snapshots = dataset.get_snapshots()
|
||||||
|
|
||||||
logger.info(f"calculating snapshots for {dataset.name} to be deleted")
|
|
||||||
if not snapshots:
|
if not snapshots:
|
||||||
logger.info(f"{dataset.name} has no snapshots")
|
logger.info(f"{dataset.name} has no snapshots")
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
FROM nvidia/cuda:12.4.1-cudnn-runtime-ubuntu22.04
|
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
|
||||||
PYTHONDONTWRITEBYTECODE=1 \
|
|
||||||
PYTHONUNBUFFERED=1
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y --no-install-recommends python3 python3-pip ffmpeg \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
RUN pip3 install --no-cache-dir --upgrade pip \
|
|
||||||
&& pip3 install --no-cache-dir faster-whisper requests
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
COPY python/tools/whisper/inference.py /app/inference.py
|
|
||||||
|
|
||||||
ENTRYPOINT ["python3", "/app/inference.py"]
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
*
|
|
||||||
!python/tools/whisper/inference.py
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
"""Whisper transcription tools (host orchestrator and container entrypoint)."""
|
|
||||||
@@ -1,136 +0,0 @@
|
|||||||
"""Container entrypoint that transcribes a directory of audio files with faster-whisper.
|
|
||||||
|
|
||||||
Run inside the whisper-transcribe docker image; segment timestamps are grouped
|
|
||||||
into one-minute buckets so the output reads as ``[HH:MM:00] text``.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from faster_whisper import WhisperModel
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
AUDIO_EXTENSIONS = {".mp3", ".wav", ".m4a", ".flac", ".ogg", ".opus", ".mp4", ".mkv", ".webm", ".aac"}
|
|
||||||
BUCKET_SECONDS = 60
|
|
||||||
BEAM_SIZE = 5
|
|
||||||
SECONDS_PER_HOUR = 3600
|
|
||||||
SECONDS_PER_MINUTE = 60
|
|
||||||
|
|
||||||
|
|
||||||
def format_timestamp(total_seconds: float) -> str:
|
|
||||||
"""Render a whole-minute timestamp as ``HH:MM:00``.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
total_seconds: Offset in seconds from the start of the audio.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A zero-padded ``HH:MM:00`` string.
|
|
||||||
"""
|
|
||||||
hours = int(total_seconds // SECONDS_PER_HOUR)
|
|
||||||
minutes = int((total_seconds % SECONDS_PER_HOUR) // SECONDS_PER_MINUTE)
|
|
||||||
return f"{hours:02d}:{minutes:02d}:00"
|
|
||||||
|
|
||||||
|
|
||||||
def transcribe_file(model: WhisperModel, audio_path: Path, output_path: Path) -> None:
|
|
||||||
"""Transcribe one audio file and write the bucketed transcript to disk.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
model: Loaded faster-whisper model.
|
|
||||||
audio_path: Source audio file.
|
|
||||||
output_path: Destination ``.txt`` path.
|
|
||||||
"""
|
|
||||||
logger.info("Transcribing %s", audio_path)
|
|
||||||
segments, info = model.transcribe(
|
|
||||||
str(audio_path),
|
|
||||||
language="en",
|
|
||||||
beam_size=BEAM_SIZE,
|
|
||||||
vad_filter=True,
|
|
||||||
)
|
|
||||||
logger.info("Duration %.1fs", info.duration)
|
|
||||||
|
|
||||||
buckets: dict[int, list[str]] = {}
|
|
||||||
for segment in segments:
|
|
||||||
bucket = int(segment.start // BUCKET_SECONDS)
|
|
||||||
buckets.setdefault(bucket, []).append(segment.text.strip())
|
|
||||||
|
|
||||||
lines = [f"[{format_timestamp(bucket * BUCKET_SECONDS)}] {' '.join(buckets[bucket])}" for bucket in sorted(buckets)]
|
|
||||||
output_path.write_text("\n\n".join(lines) + "\n", encoding="utf-8")
|
|
||||||
logger.info("Wrote %s", output_path)
|
|
||||||
|
|
||||||
|
|
||||||
def find_audio_files(input_directory: Path) -> list[Path]:
|
|
||||||
"""Collect every audio file under ``input_directory``.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_directory: Directory to walk recursively.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Sorted list of audio file paths.
|
|
||||||
"""
|
|
||||||
return sorted(
|
|
||||||
path for path in input_directory.rglob("*") if path.is_file() and path.suffix.lower() in AUDIO_EXTENSIONS
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def configure_container_logger() -> None:
|
|
||||||
"""Configure logging for the container (stdout, INFO)."""
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s %(levelname)s %(message)s",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_arguments() -> argparse.Namespace:
|
|
||||||
"""Parse CLI arguments for the container entrypoint.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Parsed argparse namespace.
|
|
||||||
"""
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__)
|
|
||||||
parser.add_argument("--input", type=Path, default=Path("/audio"))
|
|
||||||
parser.add_argument("--output", type=Path, default=Path("/output"))
|
|
||||||
parser.add_argument("--model", default="large-v3")
|
|
||||||
parser.add_argument(
|
|
||||||
"--download-only",
|
|
||||||
action="store_true",
|
|
||||||
help="Download the model into the cache volume and exit without transcribing.",
|
|
||||||
)
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
"""Load the model, then either exit (download-only) or transcribe the directory."""
|
|
||||||
configure_container_logger()
|
|
||||||
arguments = parse_arguments()
|
|
||||||
|
|
||||||
logger.info("Loading model %s on CUDA", arguments.model)
|
|
||||||
model = WhisperModel(arguments.model, device="cuda", compute_type="float16")
|
|
||||||
|
|
||||||
if arguments.download_only:
|
|
||||||
logger.info("Model ready; exiting (download-only mode)")
|
|
||||||
return
|
|
||||||
|
|
||||||
arguments.output.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
audio_files = find_audio_files(arguments.input)
|
|
||||||
if not audio_files:
|
|
||||||
logger.warning("No audio files found in %s", arguments.input)
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info("Found %d audio file(s)", len(audio_files))
|
|
||||||
for audio_path in audio_files:
|
|
||||||
relative = audio_path.relative_to(arguments.input)
|
|
||||||
output_path = arguments.output / relative.with_suffix(".txt")
|
|
||||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
if output_path.exists():
|
|
||||||
logger.info("Skip %s (already transcribed)", relative)
|
|
||||||
continue
|
|
||||||
transcribe_file(model, audio_path, output_path)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,167 +0,0 @@
|
|||||||
"""Build and run the whisper transcription docker container on demand.
|
|
||||||
|
|
||||||
The container is started fresh for each invocation and removed on exit
|
|
||||||
(``docker run --rm``). The model is cached in a named docker volume so
|
|
||||||
only the first run pays the download cost.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import subprocess
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Annotated
|
|
||||||
|
|
||||||
import typer
|
|
||||||
|
|
||||||
from python.common import configure_logger
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
"""Paths and names for the whisper-transcribe Docker workflow."""
|
|
||||||
|
|
||||||
image_tag = "whisper-transcribe:latest"
|
|
||||||
model_volume = "whisper-models"
|
|
||||||
repo_root = Path(__file__).resolve().parents[3]
|
|
||||||
dockerfile = Path(__file__).resolve().parent / "Dockerfile"
|
|
||||||
huggingface_cache = "/root/.cache/huggingface"
|
|
||||||
|
|
||||||
|
|
||||||
def run_docker(arguments: list[str]) -> None:
|
|
||||||
"""Run a docker subcommand, streaming output and raising on failure.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
arguments: Arguments to pass to the ``docker`` binary.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
subprocess.CalledProcessError: If docker exits non-zero.
|
|
||||||
"""
|
|
||||||
logger.info("docker %s", " ".join(arguments))
|
|
||||||
subprocess.run(["docker", *arguments], check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def build_image() -> None:
|
|
||||||
"""Build the whisper-transcribe image using the repo root as build context."""
|
|
||||||
logger.info("Building image %s", Config.image_tag)
|
|
||||||
run_docker(
|
|
||||||
[
|
|
||||||
"build",
|
|
||||||
"--tag",
|
|
||||||
Config.image_tag,
|
|
||||||
"--file",
|
|
||||||
str(Config.dockerfile),
|
|
||||||
str(Config.repo_root),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def model_cache_present(model: str) -> bool:
|
|
||||||
"""Check whether the given model is already downloaded in the cache volume.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
model: faster-whisper model name (e.g. ``large-v3``).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if the HuggingFace cache directory for the model exists in the volume.
|
|
||||||
"""
|
|
||||||
cache_directory = f"hub/models--Systran--faster-whisper-{model}"
|
|
||||||
completed = subprocess.run(
|
|
||||||
[
|
|
||||||
"docker",
|
|
||||||
"run",
|
|
||||||
"--rm",
|
|
||||||
"--volume",
|
|
||||||
f"{Config.model_volume}:/cache",
|
|
||||||
"alpine",
|
|
||||||
"test",
|
|
||||||
"-d",
|
|
||||||
f"/cache/{cache_directory}",
|
|
||||||
],
|
|
||||||
check=False,
|
|
||||||
)
|
|
||||||
return completed.returncode == 0
|
|
||||||
|
|
||||||
|
|
||||||
def download_model(model: str) -> None:
|
|
||||||
"""Download the model into the cache volume and exit.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
model: faster-whisper model name.
|
|
||||||
"""
|
|
||||||
logger.info("Downloading model %s into volume %s", model, Config.model_volume)
|
|
||||||
run_docker(
|
|
||||||
[
|
|
||||||
"run",
|
|
||||||
"--rm",
|
|
||||||
"--device=nvidia.com/gpu=all",
|
|
||||||
"--ipc=host",
|
|
||||||
"--volume",
|
|
||||||
f"{Config.model_volume}:{Config.huggingface_cache}",
|
|
||||||
Config.image_tag,
|
|
||||||
"--model",
|
|
||||||
model,
|
|
||||||
"--download-only",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def transcribe(input_directory: Path, output_directory: Path, model: str) -> None:
|
|
||||||
"""Run transcription on every audio file under ``input_directory``.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_directory: Host path containing audio files (mounted read-only).
|
|
||||||
output_directory: Host path for ``.txt`` transcripts.
|
|
||||||
model: faster-whisper model name.
|
|
||||||
"""
|
|
||||||
logger.info("Transcribing %s -> %s (model=%s)", input_directory, output_directory, model)
|
|
||||||
run_docker(
|
|
||||||
[
|
|
||||||
"run",
|
|
||||||
"--rm",
|
|
||||||
"--device=nvidia.com/gpu=all",
|
|
||||||
"--ipc=host",
|
|
||||||
"--volume",
|
|
||||||
f"{input_directory}:/audio:ro",
|
|
||||||
"--volume",
|
|
||||||
f"{output_directory}:/output",
|
|
||||||
"--volume",
|
|
||||||
f"{Config.model_volume}:{Config.huggingface_cache}",
|
|
||||||
Config.image_tag,
|
|
||||||
"--model",
|
|
||||||
model,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def main(
|
|
||||||
input_directory: Annotated[Path, typer.Argument(help="Directory of audio files to transcribe.")],
|
|
||||||
output_directory: Annotated[Path, typer.Argument(help="Directory to write .txt transcripts to.")],
|
|
||||||
model: Annotated[str, typer.Option(help="faster-whisper model name.")] = "large-v3",
|
|
||||||
*,
|
|
||||||
force_download: Annotated[
|
|
||||||
bool,
|
|
||||||
typer.Option("--force-download", help="Re-download the model even if already cached."),
|
|
||||||
] = False,
|
|
||||||
) -> None:
|
|
||||||
"""Build the image, ensure the model is cached, then transcribe and stop."""
|
|
||||||
configure_logger()
|
|
||||||
|
|
||||||
resolved_input = input_directory.resolve(strict=True)
|
|
||||||
output_directory.mkdir(parents=True, exist_ok=True)
|
|
||||||
resolved_output = output_directory.resolve()
|
|
||||||
|
|
||||||
build_image()
|
|
||||||
|
|
||||||
if force_download or not model_cache_present(model):
|
|
||||||
download_model(model)
|
|
||||||
else:
|
|
||||||
logger.info("Model %s already cached in volume %s", model, Config.model_volume)
|
|
||||||
|
|
||||||
transcribe(resolved_input, resolved_output, model)
|
|
||||||
logger.info("Done. Container stopped.")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
typer.run(main)
|
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
{ inputs, pkgs, ... }:
|
{ inputs, ... }:
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
"${inputs.self}/users/math"
|
|
||||||
"${inputs.self}/users/richie"
|
"${inputs.self}/users/richie"
|
||||||
"${inputs.self}/users/steve"
|
|
||||||
"${inputs.self}/common/global"
|
"${inputs.self}/common/global"
|
||||||
|
"${inputs.self}/common/optional/desktop.nix"
|
||||||
"${inputs.self}/common/optional/docker.nix"
|
"${inputs.self}/common/optional/docker.nix"
|
||||||
"${inputs.self}/common/optional/scanner.nix"
|
"${inputs.self}/common/optional/scanner.nix"
|
||||||
"${inputs.self}/common/optional/steam.nix"
|
"${inputs.self}/common/optional/steam.nix"
|
||||||
@@ -13,17 +12,13 @@
|
|||||||
"${inputs.self}/common/optional/update.nix"
|
"${inputs.self}/common/optional/update.nix"
|
||||||
"${inputs.self}/common/optional/yubikey.nix"
|
"${inputs.self}/common/optional/yubikey.nix"
|
||||||
"${inputs.self}/common/optional/zerotier.nix"
|
"${inputs.self}/common/optional/zerotier.nix"
|
||||||
|
"${inputs.self}/common/optional/brain_substituter.nix"
|
||||||
"${inputs.self}/common/optional/nvidia.nix"
|
"${inputs.self}/common/optional/nvidia.nix"
|
||||||
./hardware.nix
|
./hardware.nix
|
||||||
./syncthing.nix
|
./syncthing.nix
|
||||||
./llms.nix
|
./llms.nix
|
||||||
];
|
];
|
||||||
|
|
||||||
boot = {
|
|
||||||
kernelPackages = pkgs.linuxPackages_6_18;
|
|
||||||
zfs.package = pkgs.zfs_2_4;
|
|
||||||
};
|
|
||||||
|
|
||||||
networking = {
|
networking = {
|
||||||
hostName = "bob";
|
hostName = "bob";
|
||||||
hostId = "7c678a41";
|
hostId = "7c678a41";
|
||||||
|
|||||||
@@ -28,6 +28,7 @@
|
|||||||
allowDiscards = true;
|
allowDiscards = true;
|
||||||
keyFileSize = 4096;
|
keyFileSize = 4096;
|
||||||
keyFile = "/dev/disk/by-id/usb-Samsung_Flash_Drive_FIT_0374620080067131-0:0";
|
keyFile = "/dev/disk/by-id/usb-Samsung_Flash_Drive_FIT_0374620080067131-0:0";
|
||||||
|
fallbackToPassword = true;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
kernelModules = [ "kvm-amd" ];
|
kernelModules = [ "kvm-amd" ];
|
||||||
|
|||||||
@@ -23,7 +23,6 @@
|
|||||||
"magistral:24b"
|
"magistral:24b"
|
||||||
"ministral-3:14b"
|
"ministral-3:14b"
|
||||||
"nemotron-3-nano:30b"
|
"nemotron-3-nano:30b"
|
||||||
"nemotron-3-nano:4b"
|
|
||||||
"nemotron-cascade-2:30b"
|
"nemotron-cascade-2:30b"
|
||||||
"qwen3-coder:30b"
|
"qwen3-coder:30b"
|
||||||
"qwen3-embedding:0.6b"
|
"qwen3-embedding:0.6b"
|
||||||
@@ -42,13 +41,11 @@
|
|||||||
"qwen3:8b"
|
"qwen3:8b"
|
||||||
"qwen3.5:27b"
|
"qwen3.5:27b"
|
||||||
"qwen3.5:35b"
|
"qwen3.5:35b"
|
||||||
"qwen3.6:35b"
|
|
||||||
"translategemma:12b"
|
"translategemma:12b"
|
||||||
"translategemma:27b"
|
"translategemma:27b"
|
||||||
"translategemma:4b"
|
"translategemma:4b"
|
||||||
"rinex20/translategemma3:12b"
|
|
||||||
];
|
];
|
||||||
models = "/zfs/storage/models";
|
models = "/zfs/models";
|
||||||
openFirewall = true;
|
openFirewall = true;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,15 +31,5 @@
|
|||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
};
|
};
|
||||||
"recordings" = {
|
|
||||||
path = "/home/richie/recordings";
|
|
||||||
devices = [
|
|
||||||
"jeeves"
|
|
||||||
"phone"
|
|
||||||
"rhapsody-in-green"
|
|
||||||
];
|
|
||||||
fsWatcherEnabled = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,6 +26,7 @@
|
|||||||
allowDiscards = true;
|
allowDiscards = true;
|
||||||
keyFileSize = 4096;
|
keyFileSize = 4096;
|
||||||
keyFile = "/dev/disk/by-id/usb-USB_SanDisk_3.2Gen1_03021630090925173333-0:0";
|
keyFile = "/dev/disk/by-id/usb-USB_SanDisk_3.2Gen1_03021630090925173333-0:0";
|
||||||
|
fallbackToPassword = true;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
kernelModules = [ "kvm-intel" ];
|
kernelModules = [ "kvm-intel" ];
|
||||||
|
|||||||
@@ -4,10 +4,9 @@ let
|
|||||||
in
|
in
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
"${inputs.self}/users/dov"
|
|
||||||
"${inputs.self}/users/math"
|
|
||||||
"${inputs.self}/users/richie"
|
"${inputs.self}/users/richie"
|
||||||
"${inputs.self}/users/steve"
|
"${inputs.self}/users/math"
|
||||||
|
"${inputs.self}/users/dov"
|
||||||
"${inputs.self}/common/global"
|
"${inputs.self}/common/global"
|
||||||
"${inputs.self}/common/optional/docker.nix"
|
"${inputs.self}/common/optional/docker.nix"
|
||||||
"${inputs.self}/common/optional/ssh_decrypt.nix"
|
"${inputs.self}/common/optional/ssh_decrypt.nix"
|
||||||
@@ -16,7 +15,6 @@ in
|
|||||||
"${inputs.self}/common/optional/zerotier.nix"
|
"${inputs.self}/common/optional/zerotier.nix"
|
||||||
./docker
|
./docker
|
||||||
./services
|
./services
|
||||||
./web_services
|
|
||||||
./hardware.nix
|
./hardware.nix
|
||||||
./networking.nix
|
./networking.nix
|
||||||
./programs.nix
|
./programs.nix
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ let
|
|||||||
inherit device;
|
inherit device;
|
||||||
keyFileSize = 4096;
|
keyFileSize = 4096;
|
||||||
keyFile = "/dev/disk/by-id/usb-XIAO_USB_Drive_24587CE29074-0:0";
|
keyFile = "/dev/disk/by-id/usb-XIAO_USB_Drive_24587CE29074-0:0";
|
||||||
|
fallbackToPassword = true;
|
||||||
};
|
};
|
||||||
makeLuksSSD =
|
makeLuksSSD =
|
||||||
device:
|
device:
|
||||||
|
|||||||
@@ -21,8 +21,7 @@ sudo zfs create media/secure/docker -o compression=zstd-9
|
|||||||
sudo zfs create media/secure/github-runners -o compression=zstd-9 -o sync=disabled
|
sudo zfs create media/secure/github-runners -o compression=zstd-9 -o sync=disabled
|
||||||
sudo zfs create media/secure/home_assistant -o compression=zstd-19
|
sudo zfs create media/secure/home_assistant -o compression=zstd-19
|
||||||
sudo zfs create media/secure/notes -o copies=2
|
sudo zfs create media/secure/notes -o copies=2
|
||||||
sudo zfs create media/secure/postgres -o mountpoint=/zfs/media/database/postgres -o recordsize=16k -o primarycache=metadata
|
sudo zfs create media/secure/postgres -o recordsize=16k -o primarycache=metadata
|
||||||
sudo zfs create media/secure/postgres-wal -o mountpoint=/zfs/media/database/postgres-wal -o recordsize=32k -o primarycache=metadata -o special_small_blocks=32K -o compression=lz4 -o secondarycache=none -o logbias=latency
|
|
||||||
sudo zfs create media/secure/services -o compression=zstd-9
|
sudo zfs create media/secure/services -o compression=zstd-9
|
||||||
sudo zfs create media/secure/share -o mountpoint=/zfs/media/share -o exec=off
|
sudo zfs create media/secure/share -o mountpoint=/zfs/media/share -o exec=off
|
||||||
|
|
||||||
|
|||||||
96
systems/jeeves/services/bluesky_firehose.nix
Normal file
96
systems/jeeves/services/bluesky_firehose.nix
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
{
|
||||||
|
pkgs,
|
||||||
|
inputs,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
commonEnv = {
|
||||||
|
PYTHONPATH = "${inputs.self}";
|
||||||
|
KAFKA_BOOTSTRAP_SERVERS = "localhost:9092";
|
||||||
|
BLUESKY_FIREHOSE_TOPIC = "bluesky.firehose.posts";
|
||||||
|
};
|
||||||
|
commonServiceConfig = {
|
||||||
|
Type = "simple";
|
||||||
|
WorkingDirectory = "${inputs.self}";
|
||||||
|
User = "richie";
|
||||||
|
Group = "users";
|
||||||
|
Restart = "on-failure";
|
||||||
|
RestartSec = "10s";
|
||||||
|
StandardOutput = "journal";
|
||||||
|
StandardError = "journal";
|
||||||
|
NoNewPrivileges = true;
|
||||||
|
ProtectSystem = "strict";
|
||||||
|
ProtectHome = "read-only";
|
||||||
|
PrivateTmp = true;
|
||||||
|
ReadOnlyPaths = [ "${inputs.self}" ];
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
systemd.services.bluesky-firehose-topic-init = {
|
||||||
|
description = "Create Kafka topic for Bluesky firehose";
|
||||||
|
after = [ "apache-kafka.service" ];
|
||||||
|
requires = [ "apache-kafka.service" ];
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
|
||||||
|
serviceConfig = {
|
||||||
|
Type = "oneshot";
|
||||||
|
RemainAfterExit = true;
|
||||||
|
ExecStart = pkgs.writeShellScript "create-bluesky-topic" ''
|
||||||
|
${pkgs.apacheKafka}/bin/kafka-topics.sh \
|
||||||
|
--bootstrap-server localhost:9092 \
|
||||||
|
--create \
|
||||||
|
--if-not-exists \
|
||||||
|
--topic bluesky.firehose.posts \
|
||||||
|
--partitions 6 \
|
||||||
|
--replication-factor 1
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.services.bluesky-firehose-producer = {
|
||||||
|
description = "Bluesky Jetstream to Kafka producer";
|
||||||
|
after = [
|
||||||
|
"network.target"
|
||||||
|
"apache-kafka.service"
|
||||||
|
"bluesky-firehose-topic-init.service"
|
||||||
|
];
|
||||||
|
requires = [
|
||||||
|
"apache-kafka.service"
|
||||||
|
"bluesky-firehose-topic-init.service"
|
||||||
|
];
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
|
||||||
|
environment = commonEnv;
|
||||||
|
|
||||||
|
serviceConfig = commonServiceConfig // {
|
||||||
|
ExecStart = "${pkgs.my_python}/bin/python -m python.data_science.firehose_producer";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.services.bluesky-firehose-consumer = {
|
||||||
|
description = "Bluesky Kafka to PostgreSQL consumer";
|
||||||
|
after = [
|
||||||
|
"network.target"
|
||||||
|
"apache-kafka.service"
|
||||||
|
"bluesky-firehose-topic-init.service"
|
||||||
|
"postgresql.service"
|
||||||
|
];
|
||||||
|
requires = [
|
||||||
|
"apache-kafka.service"
|
||||||
|
"bluesky-firehose-topic-init.service"
|
||||||
|
"postgresql.service"
|
||||||
|
];
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
|
||||||
|
environment = commonEnv // {
|
||||||
|
DATA_SCIENCE_DEV_DB = "data_science_dev";
|
||||||
|
DATA_SCIENCE_DEV_USER = "richie";
|
||||||
|
DATA_SCIENCE_DEV_HOST = "/run/postgresql";
|
||||||
|
DATA_SCIENCE_DEV_PORT = "5432";
|
||||||
|
};
|
||||||
|
|
||||||
|
serviceConfig = commonServiceConfig // {
|
||||||
|
ExecStart = "${pkgs.my_python}/bin/python -m python.data_science.firehose_consumer";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
17
systems/jeeves/services/cloud_flare_tunnel.nix
Normal file
17
systems/jeeves/services/cloud_flare_tunnel.nix
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{ pkgs, ... }:
|
||||||
|
let
|
||||||
|
vars = import ../vars.nix;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
systemd.services.cloud_flare_tunnel = {
|
||||||
|
description = "cloud_flare_tunnel proxy's traffic through cloudflare";
|
||||||
|
after = [ "network.target" ];
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
serviceConfig = {
|
||||||
|
Type = "simple";
|
||||||
|
EnvironmentFile = "${vars.secrets}/docker/cloud_flare_tunnel";
|
||||||
|
ExecStart = "${pkgs.cloudflared}/bin/cloudflared --no-autoupdate tunnel run";
|
||||||
|
Restart = "on-failure";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -2,10 +2,7 @@ let
|
|||||||
vars = import ../vars.nix;
|
vars = import ../vars.nix;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
networking.firewall.allowedTCPPorts = [
|
networking.firewall.allowedTCPPorts = [ 6443 ];
|
||||||
6443
|
|
||||||
2223
|
|
||||||
];
|
|
||||||
|
|
||||||
services.gitea = {
|
services.gitea = {
|
||||||
enable = true;
|
enable = true;
|
||||||
@@ -27,7 +24,7 @@ in
|
|||||||
ROOT_URL = "https://gitea.tmmworkshop.com/";
|
ROOT_URL = "https://gitea.tmmworkshop.com/";
|
||||||
HTTP_PORT = 6443;
|
HTTP_PORT = 6443;
|
||||||
SSH_PORT = 2223;
|
SSH_PORT = 2223;
|
||||||
SSH_LISTEN_PORT = 2223;
|
SSH_LISTEN_PORT = 2224;
|
||||||
START_SSH_SERVER = true;
|
START_SSH_SERVER = true;
|
||||||
PUBLIC_URL_DETECTION = "auto";
|
PUBLIC_URL_DETECTION = "auto";
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ global
|
|||||||
defaults
|
defaults
|
||||||
log global
|
log global
|
||||||
mode http
|
mode http
|
||||||
option httplog
|
|
||||||
retries 3
|
retries 3
|
||||||
maxconn 2000
|
maxconn 2000
|
||||||
timeout connect 5s
|
timeout connect 5s
|
||||||
@@ -23,37 +22,25 @@ defaults
|
|||||||
#Application Setup
|
#Application Setup
|
||||||
frontend ContentSwitching
|
frontend ContentSwitching
|
||||||
bind *:80 v4v6
|
bind *:80 v4v6
|
||||||
bind *:443 v4v6 ssl crt /var/lib/acme/audiobookshelf.tmmworkshop.com/full.pem crt /var/lib/acme/cache.tmmworkshop.com/full.pem crt /var/lib/acme/jellyfin.tmmworkshop.com/full.pem crt /var/lib/acme/share.tmmworkshop.com/full.pem crt /var/lib/acme/gitea.tmmworkshop.com/full.pem
|
bind *:443 v4v6 ssl crt /zfs/storage/secrets/docker/cloudflare.pem
|
||||||
mode http
|
mode http
|
||||||
|
|
||||||
# ACME challenge routing (must be first)
|
|
||||||
acl is_acme path_beg /.well-known/acme-challenge/
|
|
||||||
use_backend acme_challenge if is_acme
|
|
||||||
|
|
||||||
# tmmworkshop.com
|
# tmmworkshop.com
|
||||||
acl host_audiobookshelf hdr(host) -i audiobookshelf.tmmworkshop.com
|
acl host_audiobookshelf hdr(host) -i audiobookshelf.tmmworkshop.com
|
||||||
acl host_cache hdr(host) -i cache.tmmworkshop.com
|
acl host_cache hdr(host) -i cache.tmmworkshop.com
|
||||||
acl host_jellyfin hdr(host) -i jellyfin.tmmworkshop.com
|
acl host_jellyfin hdr(host) -i jellyfin.tmmworkshop.com
|
||||||
acl host_share hdr(host) -i share.tmmworkshop.com
|
acl host_share hdr(host) -i share.tmmworkshop.com
|
||||||
|
acl host_gcw hdr(host) -i gcw.tmmworkshop.com
|
||||||
|
acl host_n8n hdr(host) -i n8n.tmmworkshop.com
|
||||||
acl host_gitea hdr(host) -i gitea.tmmworkshop.com
|
acl host_gitea hdr(host) -i gitea.tmmworkshop.com
|
||||||
|
|
||||||
# Hosts allowed to serve plain HTTP (add entries to skip the HTTPS redirect)
|
|
||||||
acl allow_http hdr(host) -i __none__
|
|
||||||
# acl allow_http hdr(host) -i example.tmmworkshop.com
|
|
||||||
|
|
||||||
# Redirect all HTTP to HTTPS unless on the allow list or ACME challenge
|
|
||||||
http-request redirect scheme https code 301 if !{ ssl_fc } !allow_http !is_acme
|
|
||||||
|
|
||||||
use_backend audiobookshelf_nodes if host_audiobookshelf
|
use_backend audiobookshelf_nodes if host_audiobookshelf
|
||||||
use_backend cache_nodes if host_cache
|
use_backend cache_nodes if host_cache
|
||||||
use_backend jellyfin if host_jellyfin
|
use_backend jellyfin if host_jellyfin
|
||||||
use_backend share_nodes if host_share
|
use_backend share_nodes if host_share
|
||||||
|
use_backend gcw_nodes if host_gcw
|
||||||
|
use_backend n8n if host_n8n
|
||||||
use_backend gitea if host_gitea
|
use_backend gitea if host_gitea
|
||||||
|
|
||||||
backend acme_challenge
|
|
||||||
mode http
|
|
||||||
server acme 127.0.0.1:8402
|
|
||||||
|
|
||||||
backend audiobookshelf_nodes
|
backend audiobookshelf_nodes
|
||||||
mode http
|
mode http
|
||||||
server server 127.0.0.1:8000
|
server server 127.0.0.1:8000
|
||||||
@@ -73,6 +60,14 @@ backend share_nodes
|
|||||||
mode http
|
mode http
|
||||||
server server 127.0.0.1:8091
|
server server 127.0.0.1:8091
|
||||||
|
|
||||||
|
backend gcw_nodes
|
||||||
|
mode http
|
||||||
|
server server 127.0.0.1:8092
|
||||||
|
|
||||||
|
backend n8n
|
||||||
|
mode http
|
||||||
|
server server 127.0.0.1:5678
|
||||||
|
|
||||||
backend gitea
|
backend gitea
|
||||||
mode http
|
mode http
|
||||||
server server 127.0.0.1:6443
|
server server 127.0.0.1:6443
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
services.hedgedoc = {
|
|
||||||
enable = true;
|
|
||||||
settings = {
|
|
||||||
host = "0.0.0.0";
|
|
||||||
port = 3000;
|
|
||||||
domain = "192.168.90.40";
|
|
||||||
urlAddPort = true;
|
|
||||||
protocolUseSSL = false;
|
|
||||||
db = {
|
|
||||||
dialect = "postgres";
|
|
||||||
database = "hedgedoc";
|
|
||||||
username = "hedgedoc";
|
|
||||||
host = "/run/postgresql";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
networking.firewall.allowedTCPPorts = [ 3000 ];
|
|
||||||
|
|
||||||
systemd.services.hedgedoc = {
|
|
||||||
after = [ "postgresql.service" ];
|
|
||||||
requires = [ "postgresql.service" ];
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -7,13 +7,6 @@ in
|
|||||||
settings = {
|
settings = {
|
||||||
listeners = [ "PLAINTEXT://localhost:9092" ];
|
listeners = [ "PLAINTEXT://localhost:9092" ];
|
||||||
"log.dirs" = [ vars.kafka ];
|
"log.dirs" = [ vars.kafka ];
|
||||||
"num.partitions" = 6;
|
|
||||||
"default.replication.factor" = 1;
|
|
||||||
"log.retention.hours" = 168;
|
|
||||||
"log.retention.bytes" = 10737418240;
|
|
||||||
"log.segment.bytes" = 1073741824;
|
|
||||||
"log.cleanup.policy" = "delete";
|
|
||||||
"auto.create.topics.enable" = false;
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,10 +5,6 @@ in
|
|||||||
{
|
{
|
||||||
networking.firewall.allowedTCPPorts = [ 5432 ];
|
networking.firewall.allowedTCPPorts = [ 5432 ];
|
||||||
|
|
||||||
# Symlink pg_wal to a ZFS dataset on the special (metadata) vdev for fast WAL writes
|
|
||||||
# this is required for systemd sandboxing
|
|
||||||
systemd.services.postgresql.serviceConfig.ReadWritePaths = [ "/zfs/media/database/postgres-wal" ];
|
|
||||||
|
|
||||||
services.postgresql = {
|
services.postgresql = {
|
||||||
enable = true;
|
enable = true;
|
||||||
package = pkgs.postgresql_17_jit;
|
package = pkgs.postgresql_17_jit;
|
||||||
@@ -37,9 +33,6 @@ in
|
|||||||
# signalbot
|
# signalbot
|
||||||
local signalbot signalbot trust
|
local signalbot signalbot trust
|
||||||
|
|
||||||
# hedgedoc
|
|
||||||
local hedgedoc hedgedoc trust
|
|
||||||
|
|
||||||
# math
|
# math
|
||||||
local postgres math trust
|
local postgres math trust
|
||||||
host postgres math 127.0.0.1/32 trust
|
host postgres math 127.0.0.1/32 trust
|
||||||
@@ -119,19 +112,11 @@ in
|
|||||||
login = true;
|
login = true;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
{
|
|
||||||
name = "hedgedoc";
|
|
||||||
ensureDBOwnership = true;
|
|
||||||
ensureClauses = {
|
|
||||||
login = true;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
];
|
];
|
||||||
ensureDatabases = [
|
ensureDatabases = [
|
||||||
"data_science_dev"
|
"data_science_dev"
|
||||||
"hass"
|
"hass"
|
||||||
"gitea"
|
"gitea"
|
||||||
"hedgedoc"
|
|
||||||
"math"
|
"math"
|
||||||
"n8n"
|
"n8n"
|
||||||
"richie"
|
"richie"
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
zpool = ["root_pool", "storage", "media"]
|
zpool = ["root_pool", "storage", "media"]
|
||||||
services = [
|
services = [
|
||||||
"audiobookshelf",
|
"audiobookshelf",
|
||||||
|
"cloud_flare_tunnel",
|
||||||
"haproxy",
|
"haproxy",
|
||||||
"docker",
|
"docker",
|
||||||
"home-assistant",
|
"home-assistant",
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ hourly = 24
|
|||||||
daily = 0
|
daily = 0
|
||||||
monthly = 0
|
monthly = 0
|
||||||
|
|
||||||
# root_pool
|
|
||||||
["root_pool/home"]
|
["root_pool/home"]
|
||||||
15_min = 8
|
15_min = 8
|
||||||
hourly = 24
|
hourly = 24
|
||||||
@@ -28,96 +27,57 @@ monthly = 0
|
|||||||
hourly = 24
|
hourly = 24
|
||||||
daily = 30
|
daily = 30
|
||||||
monthly = 6
|
monthly = 6
|
||||||
# storage
|
|
||||||
["storage/ollama"]
|
|
||||||
15_min = 2
|
|
||||||
hourly = 0
|
|
||||||
daily = 0
|
|
||||||
monthly = 0
|
|
||||||
|
|
||||||
["storage/secure"]
|
["storage/plex"]
|
||||||
15_min = 0
|
|
||||||
hourly = 0
|
|
||||||
daily = 0
|
|
||||||
monthly = 0
|
|
||||||
|
|
||||||
["storage/secure/plex"]
|
|
||||||
15_min = 6
|
15_min = 6
|
||||||
hourly = 2
|
hourly = 2
|
||||||
daily = 1
|
daily = 1
|
||||||
monthly = 0
|
monthly = 0
|
||||||
|
|
||||||
["storage/secure/transmission"]
|
["media/plex"]
|
||||||
15_min = 4
|
15_min = 6
|
||||||
hourly = 0
|
hourly = 2
|
||||||
daily = 0
|
daily = 1
|
||||||
monthly = 0
|
monthly = 0
|
||||||
|
|
||||||
["storage/secure/secrets"]
|
["media/notes"]
|
||||||
15_min = 8
|
15_min = 8
|
||||||
hourly = 24
|
hourly = 24
|
||||||
daily = 30
|
daily = 30
|
||||||
monthly = 12
|
monthly = 12
|
||||||
|
|
||||||
# media
|
["media/docker"]
|
||||||
["media/temp"]
|
15_min = 3
|
||||||
15_min = 2
|
hourly = 12
|
||||||
hourly = 0
|
daily = 14
|
||||||
daily = 0
|
monthly = 2
|
||||||
monthly = 0
|
|
||||||
|
["media/services"]
|
||||||
["media/secure"]
|
15_min = 3
|
||||||
15_min = 0
|
hourly = 12
|
||||||
hourly = 0
|
daily = 14
|
||||||
daily = 0
|
monthly = 2
|
||||||
monthly = 0
|
|
||||||
|
["media/home_assistant"]
|
||||||
["media/secure/plex"]
|
|
||||||
15_min = 6
|
|
||||||
hourly = 2
|
|
||||||
daily = 1
|
|
||||||
monthly = 0
|
|
||||||
|
|
||||||
["media/secure/postgres-wal"]
|
|
||||||
15_min = 4
|
|
||||||
hourly = 2
|
|
||||||
daily = 0
|
|
||||||
monthly = 0
|
|
||||||
|
|
||||||
|
|
||||||
["media/secure/postgres"]
|
|
||||||
15_min = 8
|
|
||||||
hourly = 24
|
|
||||||
daily = 7
|
|
||||||
monthly = 0
|
|
||||||
|
|
||||||
["media/secure/share"]
|
|
||||||
15_min = 4
|
|
||||||
hourly = 0
|
|
||||||
daily = 0
|
|
||||||
monthly = 0
|
|
||||||
|
|
||||||
["media/secure/github-runners"]
|
|
||||||
15_min = 6
|
|
||||||
hourly = 2
|
|
||||||
daily = 1
|
|
||||||
monthly = 0
|
|
||||||
|
|
||||||
["media/secure/notes"]
|
|
||||||
15_min = 8
|
|
||||||
hourly = 24
|
|
||||||
daily = 30
|
|
||||||
monthly = 12
|
|
||||||
|
|
||||||
["media/secure/docker"]
|
|
||||||
15_min = 3
|
15_min = 3
|
||||||
hourly = 12
|
hourly = 12
|
||||||
daily = 14
|
daily = 14
|
||||||
monthly = 2
|
monthly = 2
|
||||||
|
|
||||||
# scratch
|
|
||||||
["scratch/transmission"]
|
["scratch/transmission"]
|
||||||
15_min = 2
|
15_min = 0
|
||||||
|
hourly = 0
|
||||||
|
daily = 0
|
||||||
|
monthly = 0
|
||||||
|
|
||||||
|
["storage/transmission"]
|
||||||
|
15_min = 0
|
||||||
|
hourly = 0
|
||||||
|
daily = 0
|
||||||
|
monthly = 0
|
||||||
|
|
||||||
|
["storage/ollama"]
|
||||||
|
15_min = 0
|
||||||
hourly = 0
|
hourly = 0
|
||||||
daily = 0
|
daily = 0
|
||||||
monthly = 0
|
monthly = 0
|
||||||
|
|||||||
@@ -89,16 +89,7 @@ in
|
|||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
};
|
};
|
||||||
"recordings" = {
|
#
|
||||||
path = "/home/richie/recordings";
|
|
||||||
devices = [
|
|
||||||
"bob"
|
|
||||||
"phone"
|
|
||||||
"rhapsody-in-green"
|
|
||||||
];
|
|
||||||
fsWatcherEnabled = true;
|
|
||||||
};
|
|
||||||
# davids-server
|
|
||||||
"davids-backup1" = {
|
"davids-backup1" = {
|
||||||
id = "8229p-8z3tm"; # cspell:disable-line
|
id = "8229p-8z3tm"; # cspell:disable-line
|
||||||
path = "${vars.syncthing}/davids_backups/1";
|
path = "${vars.syncthing}/davids_backups/1";
|
||||||
|
|||||||
@@ -1,62 +0,0 @@
|
|||||||
let
|
|
||||||
domains = [
|
|
||||||
"audiobookshelf"
|
|
||||||
"cache"
|
|
||||||
"gitea"
|
|
||||||
"jellyfin"
|
|
||||||
"share"
|
|
||||||
];
|
|
||||||
|
|
||||||
makeCert = name: {
|
|
||||||
name = "${name}.tmmworkshop.com";
|
|
||||||
value = {
|
|
||||||
webroot = "/var/lib/acme/.challenges";
|
|
||||||
group = "acme";
|
|
||||||
reloadServices = [ "haproxy.service" ];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
acmeServices = map (domain: "acme-${domain}.tmmworkshop.com.service") domains;
|
|
||||||
in
|
|
||||||
{
|
|
||||||
users.users.haproxy.extraGroups = [ "acme" ];
|
|
||||||
|
|
||||||
security.acme = {
|
|
||||||
acceptTerms = true;
|
|
||||||
defaults.email = "Richie@tmmworkshop.com";
|
|
||||||
certs = builtins.listToAttrs (map makeCert domains);
|
|
||||||
};
|
|
||||||
|
|
||||||
# Minimal nginx to serve ACME HTTP-01 challenge files for HAProxy
|
|
||||||
services.nginx = {
|
|
||||||
enable = true;
|
|
||||||
virtualHosts."acme-challenge" = {
|
|
||||||
listen = [
|
|
||||||
{
|
|
||||||
addr = "127.0.0.1";
|
|
||||||
port = 8402;
|
|
||||||
}
|
|
||||||
];
|
|
||||||
locations."/.well-known/acme-challenge/" = {
|
|
||||||
root = "/var/lib/acme/.challenges";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
# Ensure the challenge directory exists with correct permissions
|
|
||||||
systemd.tmpfiles.rules = [
|
|
||||||
"d /var/lib/acme/.challenges 0750 acme acme - -"
|
|
||||||
"d /var/lib/acme/.challenges/.well-known 0750 acme acme - -"
|
|
||||||
"d /var/lib/acme/.challenges/.well-known/acme-challenge 0750 acme acme - -"
|
|
||||||
];
|
|
||||||
|
|
||||||
users.users.nginx.extraGroups = [ "acme" ];
|
|
||||||
|
|
||||||
# HAProxy needs certs to exist before it can bind :443.
|
|
||||||
# NixOS's acme module generates self-signed placeholders on first boot
|
|
||||||
# via acme-<domain>.service — just make HAProxy wait for them.
|
|
||||||
systemd.services.haproxy = {
|
|
||||||
after = acmeServices;
|
|
||||||
wants = acmeServices;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{ lib, ... }:
|
|
||||||
{
|
|
||||||
imports =
|
|
||||||
let
|
|
||||||
files = builtins.attrNames (builtins.readDir ./.);
|
|
||||||
nixFiles = builtins.filter (name: lib.hasSuffix ".nix" name && name != "default.nix") files;
|
|
||||||
in
|
|
||||||
map (file: ./. + "/${file}") nixFiles;
|
|
||||||
}
|
|
||||||
@@ -14,7 +14,6 @@
|
|||||||
./llms.nix
|
./llms.nix
|
||||||
./open_webui.nix
|
./open_webui.nix
|
||||||
./qmk.nix
|
./qmk.nix
|
||||||
./sunshine.nix
|
|
||||||
./syncthing.nix
|
./syncthing.nix
|
||||||
inputs.nixos-hardware.nixosModules.framework-13-7040-amd
|
inputs.nixos-hardware.nixosModules.framework-13-7040-amd
|
||||||
];
|
];
|
||||||
@@ -24,19 +23,11 @@
|
|||||||
hostId = "6404140d";
|
hostId = "6404140d";
|
||||||
firewall = {
|
firewall = {
|
||||||
enable = true;
|
enable = true;
|
||||||
allowedTCPPorts = [
|
allowedTCPPorts = [ ];
|
||||||
8000
|
|
||||||
8080
|
|
||||||
];
|
|
||||||
};
|
};
|
||||||
networkmanager.enable = true;
|
networkmanager.enable = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
programs.appimage = {
|
|
||||||
enable = true;
|
|
||||||
binfmt = true; # allows *.AppImage to be run directly
|
|
||||||
};
|
|
||||||
|
|
||||||
services = {
|
services = {
|
||||||
openssh.ports = [ 922 ];
|
openssh.ports = [ 922 ];
|
||||||
flatpak.enable = true;
|
flatpak.enable = true;
|
||||||
|
|||||||
Binary file not shown.
@@ -8,8 +8,9 @@
|
|||||||
"deepscaler:1.5b"
|
"deepscaler:1.5b"
|
||||||
"deepseek-r1:8b"
|
"deepseek-r1:8b"
|
||||||
"gemma3:12b"
|
"gemma3:12b"
|
||||||
|
"gemma3:27b"
|
||||||
|
"gpt-oss:20b"
|
||||||
"lfm2:24b"
|
"lfm2:24b"
|
||||||
"nemotron-3-nano:4b"
|
|
||||||
"qwen3:14b"
|
"qwen3:14b"
|
||||||
"qwen3.5:27b"
|
"qwen3.5:27b"
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
{ pkgs, ... }:
|
|
||||||
{
|
|
||||||
services.sunshine = {
|
|
||||||
enable = true;
|
|
||||||
openFirewall = true;
|
|
||||||
capSysAdmin = true;
|
|
||||||
};
|
|
||||||
environment.systemPackages = [ pkgs.kdePackages.libkscreen ];
|
|
||||||
|
|
||||||
boot = {
|
|
||||||
kernelParams = [
|
|
||||||
"drm.edid_firmware=DP-4:edid/virtual-display.bin"
|
|
||||||
"video=DP-4:e"
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
hardware.firmware = [
|
|
||||||
(pkgs.runCommandLocal "virtual-display-edid"
|
|
||||||
{
|
|
||||||
compressFirmware = false;
|
|
||||||
}
|
|
||||||
''
|
|
||||||
mkdir -p $out/lib/firmware/edid
|
|
||||||
cp ${./edid/virtual-display.bin} $out/lib/firmware/edid/virtual-display.bin
|
|
||||||
''
|
|
||||||
)
|
|
||||||
];
|
|
||||||
}
|
|
||||||
@@ -55,15 +55,6 @@
|
|||||||
];
|
];
|
||||||
fsWatcherEnabled = true;
|
fsWatcherEnabled = true;
|
||||||
};
|
};
|
||||||
"recordings" = {
|
|
||||||
path = "/home/richie/recordings";
|
|
||||||
devices = [
|
|
||||||
"bob"
|
|
||||||
"jeeves"
|
|
||||||
"phone"
|
|
||||||
];
|
|
||||||
fsWatcherEnabled = true;
|
|
||||||
};
|
|
||||||
"vault" = {
|
"vault" = {
|
||||||
path = "/home/richie/vault";
|
path = "/home/richie/vault";
|
||||||
devices = [
|
devices = [
|
||||||
|
|||||||
@@ -210,9 +210,9 @@ class TestContactCache:
|
|||||||
mock_session_cls.return_value.__exit__ = MagicMock(return_value=False)
|
mock_session_cls.return_value.__exit__ = MagicMock(return_value=False)
|
||||||
mock_device = MagicMock()
|
mock_device = MagicMock()
|
||||||
mock_device.trust_level = TrustLevel.UNVERIFIED
|
mock_device.trust_level = TrustLevel.UNVERIFIED
|
||||||
mock_session.scalars.return_value.one_or_none.return_value = mock_device
|
mock_session.execute.return_value.scalar_one_or_none.return_value = mock_device
|
||||||
registry.record_contact("+1234", "abc")
|
registry.record_contact("+1234", "abc")
|
||||||
mock_session.scalars.assert_called_once()
|
mock_session.execute.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
class TestLocationCommand:
|
class TestLocationCommand:
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
{
|
{
|
||||||
programs.git = {
|
programs.git = {
|
||||||
enable = true;
|
enable = true;
|
||||||
signing.format = null;
|
|
||||||
settings = {
|
settings = {
|
||||||
user = {
|
user = {
|
||||||
email = "dov.kruger@gmail.com";
|
email = "dov.kruger@gmail.com";
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
{
|
{
|
||||||
programs.git = {
|
programs.git = {
|
||||||
enable = true;
|
enable = true;
|
||||||
signing.format = null;
|
|
||||||
settings = {
|
settings = {
|
||||||
user = {
|
user = {
|
||||||
email = "DumbPuppy208@gmail.com";
|
email = "DumbPuppy208@gmail.com";
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
{
|
{
|
||||||
programs.git = {
|
programs.git = {
|
||||||
enable = true;
|
enable = true;
|
||||||
signing.format = null;
|
|
||||||
settings = {
|
settings = {
|
||||||
user = {
|
user = {
|
||||||
email = "matthew.michal11@gmail.com";
|
email = "matthew.michal11@gmail.com";
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
imports = [
|
|
||||||
../home/global.nix
|
|
||||||
];
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
{
|
{
|
||||||
programs.git = {
|
programs.git = {
|
||||||
enable = true;
|
enable = true;
|
||||||
signing.format = null;
|
|
||||||
settings = {
|
settings = {
|
||||||
user = {
|
user = {
|
||||||
email = "Richie@tmmworkshop.com";
|
email = "Richie@tmmworkshop.com";
|
||||||
|
|||||||
@@ -22,10 +22,9 @@
|
|||||||
chromium
|
chromium
|
||||||
# dev tools
|
# dev tools
|
||||||
claude-code
|
claude-code
|
||||||
codex
|
|
||||||
gparted
|
gparted
|
||||||
jetbrains.datagrip
|
jetbrains.datagrip
|
||||||
opencode
|
|
||||||
proxychains
|
proxychains
|
||||||
|
opencode
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,15 +20,15 @@
|
|||||||
// turns off all sounds and announcements
|
// turns off all sounds and announcements
|
||||||
"accessibility.signals.terminalCommandFailed": {
|
"accessibility.signals.terminalCommandFailed": {
|
||||||
"sound": "off",
|
"sound": "off",
|
||||||
"announcement": "off",
|
"announcement": "off"
|
||||||
},
|
},
|
||||||
"accessibility.signals.terminalQuickFix": {
|
"accessibility.signals.terminalQuickFix": {
|
||||||
"sound": "off",
|
"sound": "off",
|
||||||
"announcement": "off",
|
"announcement": "off"
|
||||||
},
|
},
|
||||||
"accessibility.signals.terminalBell": {
|
"accessibility.signals.terminalBell": {
|
||||||
"sound": "off",
|
"sound": "off",
|
||||||
"announcement": "off",
|
"announcement": "off"
|
||||||
},
|
},
|
||||||
|
|
||||||
// database settings
|
// database settings
|
||||||
@@ -41,8 +41,8 @@
|
|||||||
"driver": "PostgreSQL",
|
"driver": "PostgreSQL",
|
||||||
"name": "main",
|
"name": "main",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"username": "richie",
|
"username": "richie"
|
||||||
},
|
}
|
||||||
],
|
],
|
||||||
|
|
||||||
// formatters
|
// formatters
|
||||||
@@ -55,7 +55,7 @@
|
|||||||
"[yaml]": { "editor.defaultFormatter": "redhat.vscode-yaml" },
|
"[yaml]": { "editor.defaultFormatter": "redhat.vscode-yaml" },
|
||||||
"[javascriptreact]": { "editor.defaultFormatter": "esbenp.prettier-vscode" },
|
"[javascriptreact]": { "editor.defaultFormatter": "esbenp.prettier-vscode" },
|
||||||
"[github-actions-workflow]": {
|
"[github-actions-workflow]": {
|
||||||
"editor.defaultFormatter": "redhat.vscode-yaml",
|
"editor.defaultFormatter": "redhat.vscode-yaml"
|
||||||
},
|
},
|
||||||
"[dockercompose]": {
|
"[dockercompose]": {
|
||||||
"editor.insertSpaces": true,
|
"editor.insertSpaces": true,
|
||||||
@@ -64,9 +64,9 @@
|
|||||||
"editor.quickSuggestions": {
|
"editor.quickSuggestions": {
|
||||||
"other": true,
|
"other": true,
|
||||||
"comments": false,
|
"comments": false,
|
||||||
"strings": true,
|
"strings": true
|
||||||
},
|
},
|
||||||
"editor.defaultFormatter": "redhat.vscode-yaml",
|
"editor.defaultFormatter": "redhat.vscode-yaml"
|
||||||
},
|
},
|
||||||
|
|
||||||
// spell check
|
// spell check
|
||||||
@@ -78,8 +78,7 @@
|
|||||||
"Corvidae",
|
"Corvidae",
|
||||||
"drivername",
|
"drivername",
|
||||||
"fastapi",
|
"fastapi",
|
||||||
"sandboxing",
|
"syncthing"
|
||||||
"syncthing",
|
|
||||||
],
|
],
|
||||||
|
|
||||||
// nix
|
// nix
|
||||||
@@ -97,5 +96,5 @@
|
|||||||
// new
|
// new
|
||||||
"hediet.vscode-drawio.resizeImages": null,
|
"hediet.vscode-drawio.resizeImages": null,
|
||||||
"hediet.vscode-drawio.appearance": "automatic",
|
"hediet.vscode-drawio.appearance": "automatic",
|
||||||
"claudeCode.preferredLocation": "panel",
|
"claudeCode.preferredLocation": "panel"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
../home/global.nix
|
../home/global.nix
|
||||||
|
../home/gui
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,44 +0,0 @@
|
|||||||
{
|
|
||||||
pkgs,
|
|
||||||
config,
|
|
||||||
...
|
|
||||||
}:
|
|
||||||
let
|
|
||||||
ifTheyExist = groups: builtins.filter (group: builtins.hasAttr group config.users.groups) groups;
|
|
||||||
in
|
|
||||||
{
|
|
||||||
|
|
||||||
users = {
|
|
||||||
users.steve = {
|
|
||||||
isNormalUser = true;
|
|
||||||
|
|
||||||
shell = pkgs.zsh;
|
|
||||||
group = "steve";
|
|
||||||
openssh.authorizedKeys.keys = [
|
|
||||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJH03VzDbUhzfhvwD+OsYh6GobODYaI9jdNdzWQoqFsp matth@Jove" # cspell:disable-line
|
|
||||||
];
|
|
||||||
extraGroups = [
|
|
||||||
"audio"
|
|
||||||
"video"
|
|
||||||
"wheel"
|
|
||||||
"users"
|
|
||||||
]
|
|
||||||
++ ifTheyExist [
|
|
||||||
"dialout"
|
|
||||||
"docker"
|
|
||||||
"hass"
|
|
||||||
"libvirtd"
|
|
||||||
"networkmanager"
|
|
||||||
"plugdev"
|
|
||||||
"scanner"
|
|
||||||
"transmission"
|
|
||||||
"uaccess"
|
|
||||||
"wireshark"
|
|
||||||
];
|
|
||||||
uid = 1005;
|
|
||||||
};
|
|
||||||
|
|
||||||
groups.steve.gid = 1005;
|
|
||||||
};
|
|
||||||
home-manager.users.steve = import ./systems/${config.networking.hostName}.nix;
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
imports = [
|
|
||||||
./direnv.nix
|
|
||||||
./git.nix
|
|
||||||
./zsh.nix
|
|
||||||
];
|
|
||||||
|
|
||||||
programs.starship.enable = true;
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
{
|
|
||||||
|
|
||||||
programs.direnv = {
|
|
||||||
enable = true;
|
|
||||||
enableZshIntegration = true;
|
|
||||||
nix-direnv.enable = true;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
programs.git = {
|
|
||||||
enable = true;
|
|
||||||
signing.format = null;
|
|
||||||
settings = {
|
|
||||||
user = {
|
|
||||||
email = "matthew.michal11@gmail.com";
|
|
||||||
name = "Matthew Michal";
|
|
||||||
};
|
|
||||||
pull.rebase = true;
|
|
||||||
color.ui = true;
|
|
||||||
};
|
|
||||||
lfs.enable = true;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
{
|
|
||||||
programs.zsh = {
|
|
||||||
enable = true;
|
|
||||||
syntaxHighlighting.enable = true;
|
|
||||||
history.size = 10000;
|
|
||||||
oh-my-zsh = {
|
|
||||||
enable = true;
|
|
||||||
plugins = [
|
|
||||||
"git"
|
|
||||||
"docker"
|
|
||||||
"docker-compose"
|
|
||||||
"colored-man-pages"
|
|
||||||
"rust"
|
|
||||||
"systemd"
|
|
||||||
"tmux"
|
|
||||||
"ufw"
|
|
||||||
"z"
|
|
||||||
];
|
|
||||||
};
|
|
||||||
shellAliases = {
|
|
||||||
"lrt" = "eza --icons -lsnew";
|
|
||||||
"ls" = "eza";
|
|
||||||
"ll" = "eza --long --group";
|
|
||||||
"la" = "eza --all";
|
|
||||||
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
{ config, ... }:
|
|
||||||
{
|
|
||||||
imports = [
|
|
||||||
./cli
|
|
||||||
./programs.nix
|
|
||||||
./ssh_config.nix
|
|
||||||
];
|
|
||||||
|
|
||||||
programs = {
|
|
||||||
home-manager.enable = true;
|
|
||||||
git.enable = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
home = {
|
|
||||||
username = "steve";
|
|
||||||
homeDirectory = "/home/${config.home.username}";
|
|
||||||
stateVersion = "24.05";
|
|
||||||
sessionVariables = {
|
|
||||||
FLAKE = "$HOME/dotfiles";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
{ pkgs, ... }:
|
|
||||||
{
|
|
||||||
home.packages = with pkgs; [
|
|
||||||
# cli
|
|
||||||
bat
|
|
||||||
btop
|
|
||||||
eza
|
|
||||||
fd
|
|
||||||
ffmpegthumbnailer
|
|
||||||
fzf
|
|
||||||
git
|
|
||||||
gnupg
|
|
||||||
imagemagick
|
|
||||||
jq
|
|
||||||
ncdu
|
|
||||||
ouch
|
|
||||||
p7zip
|
|
||||||
poppler
|
|
||||||
rar
|
|
||||||
ripgrep
|
|
||||||
starship
|
|
||||||
tmux
|
|
||||||
unzip
|
|
||||||
yazi
|
|
||||||
zoxide
|
|
||||||
# system info
|
|
||||||
hwloc
|
|
||||||
lynis
|
|
||||||
pciutils
|
|
||||||
smartmontools
|
|
||||||
usbutils
|
|
||||||
# networking
|
|
||||||
iperf3
|
|
||||||
nmap
|
|
||||||
wget
|
|
||||||
# python
|
|
||||||
poetry
|
|
||||||
ruff
|
|
||||||
uv
|
|
||||||
# nodejs
|
|
||||||
nodejs
|
|
||||||
# Rust packages
|
|
||||||
trunk
|
|
||||||
wasm-pack
|
|
||||||
cargo-watch
|
|
||||||
cargo-generate
|
|
||||||
cargo-audit
|
|
||||||
cargo-update
|
|
||||||
# nix
|
|
||||||
nix-init
|
|
||||||
nix-output-monitor
|
|
||||||
nix-prefetch
|
|
||||||
nix-tree
|
|
||||||
nixfmt
|
|
||||||
treefmt
|
|
||||||
];
|
|
||||||
}
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
programs.ssh = {
|
|
||||||
enable = true;
|
|
||||||
enableDefaultConfig = false;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
imports = [
|
|
||||||
../home/global.nix
|
|
||||||
];
|
|
||||||
}
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
imports = [
|
|
||||||
../home/global.nix
|
|
||||||
];
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user