Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/sandbox-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
python-version: "3.11"

- name: Install slicks
run: pip install "slicks>=2.0.1"
run: pip install "slicks>=0.2.1"

- name: Discover sensors for September 2025
env:
Expand Down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -219,3 +219,7 @@ installer/slackbot/*.jpeg

# Generated CSV data files
generated-days/

# Node dependencies
node_modules/
.claude/settings.local.json
10 changes: 9 additions & 1 deletion installer/data-downloader/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,12 @@ SENSOR_WINDOW_DAYS=7
SENSOR_LOOKBACK_DAYS=30
SCAN_INTERVAL_SECONDS=3600
VITE_API_BASE_URL=http://localhost:8000
ALLOWED_ORIGINS=http://localhost:3000,http://localhost:5173
ALLOWED_ORIGINS=http://localhost:3000,http://localhost:5173,https://daq.westernformularacing.org

# Health monitor (optional — defaults work for standard docker-compose stack)
INFLUXDB_HEALTH_DATABASE=monitoring
HEALTH_MONITOR_INTERVAL_SECONDS=60
HEALTH_MONITOR_INFLUXDB_CONTAINER=influxdb3
HEALTH_MONITOR_SCANNER_CONTAINER=data-downloader-scanner
HEALTH_MONITOR_SCANNER_API_URL=http://data-downloader-api:8000
HEALTH_MONITOR_INFLUXDB_VOLUME_SUFFIX=influxdb3-data
42 changes: 39 additions & 3 deletions installer/data-downloader/backend/app.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
from __future__ import annotations

from datetime import datetime
from datetime import datetime, timezone
import logging
from typing import List

import docker

from fastapi import BackgroundTasks, FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
Expand All @@ -25,6 +29,7 @@ class DataQueryPayload(BaseModel):

settings = get_settings()
service = DataDownloaderService(settings)
logger = logging.getLogger(__name__)

app = FastAPI(title="DAQ Data Downloader API")
app.add_middleware(
Expand All @@ -41,6 +46,36 @@ def healthcheck() -> dict:
return {"status": "ok"}


def _docker_container_running(container_name: str) -> bool:
"""Return True if Docker container is in Running state."""
try:
docker_client = docker.from_env()
container = docker_client.containers.get(container_name)
return bool(container.attrs.get("State", {}).get("Running", False))
except docker.errors.NotFound:
return False
except Exception as e:
raise RuntimeError(f"Docker inspection failed for {container_name}: {e}") from e


@app.get("/api/health-status")
def health_status() -> dict:
"""Container health derived from live Docker inspection."""
try:
scanner_status = service.get_scanner_status()
now = datetime.now(timezone.utc).isoformat()
return {
"influxdb3": _docker_container_running("influxdb3"),
"scanner": _docker_container_running("data-downloader-scanner"),
"last_updated": now,
"last_scan_duration_seconds": scanner_status.get("last_scan_duration_seconds"),
}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=503, detail=str(e))


@app.get("/api/seasons")
def list_seasons() -> List[dict]:
return service.get_seasons()
Expand Down Expand Up @@ -70,8 +105,9 @@ def save_note(key: str, payload: NotePayload, season: str | None = None) -> dict


@app.post("/api/scan")
def trigger_scan(background_tasks: BackgroundTasks) -> dict:
background_tasks.add_task(service.run_full_scan, "manual")
def trigger_scan(background_tasks: BackgroundTasks, season: str | None = None) -> dict:
season_names = [season] if season else None
background_tasks.add_task(service.run_full_scan, "manual", season_names)
return {"status": "scheduled"}


Expand Down
15 changes: 8 additions & 7 deletions installer/data-downloader/backend/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,18 @@ def _parse_origins(raw: str | None) -> List[str]:


class SeasonConfig(BaseModel):
name: str # e.g. "WFR25"
year: int # e.g. 2025
name: str # e.g. "WFR25"
year: int # e.g. 2025
database: str # e.g. "WFR25"
color: str | None = None # e.g. "222 76 153"
table: str # e.g. "WFR25" — InfluxDB table name inside the database
color: str | None = None


def _parse_seasons(raw: str | None) -> List[SeasonConfig]:
"""Parse SEASONS env var: "WFR25:2025:222 76 153,WFR26:2026:..."."""
if not raw:
# Default fallback if not set
return [SeasonConfig(name="WFR25", year=2025, database="WFR25", color="#DE4C99")]
return [SeasonConfig(name="WFR25", year=2025, database="WFR25", table="WFR25", color="#DE4C99")]

seasons = []
for part in raw.split(","):
Expand All @@ -45,13 +46,13 @@ def _parse_seasons(raw: str | None) -> List[SeasonConfig]:

color = parts[2] if len(parts) > 2 else None

# Assume DB name matches Season Name
seasons.append(SeasonConfig(name=name, year=year, database=name, color=color))
# DB and table name both match season name by convention (WFR25→WFR25, WFR26→WFR26)
seasons.append(SeasonConfig(name=name, year=year, database=name, table=name, color=color))
except ValueError:
continue

if not seasons:
return [SeasonConfig(name="WFR25", year=2025, database="WFR25")]
return [SeasonConfig(name="WFR25", year=2025, database="WFR25", table="WFR25")]

# Sort by year descending (newest first)
seasons.sort(key=lambda s: s.year, reverse=True)
Expand Down
5 changes: 3 additions & 2 deletions installer/data-downloader/backend/periodic_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@ async def run_worker():

while True:
try:
logging.info("Running scheduled scan...")
service.run_full_scan(source="periodic")
active_season = settings.seasons[0] # sorted descending by year; first = active
logging.info(f"Running scheduled scan for active season: {active_season.name}")
service.run_full_scan(source="periodic", season_names=[active_season.name])
logging.info("Finished scheduled scan.")

if daily_time:
Expand Down
3 changes: 2 additions & 1 deletion installer/data-downloader/backend/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ fastapi==0.115.4
uvicorn[standard]==0.23.2
influxdb3-python==0.16.0
pydantic==2.9.2
slicks>=0.2.0
slicks>=0.2.1
docker>=7.0.0
20 changes: 13 additions & 7 deletions installer/data-downloader/backend/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,14 +80,15 @@ def get_seasons(self) -> List[dict]:
for s in self.settings.seasons
]

def run_full_scan(self, source: str = "manual") -> Dict[str, dict]:
def run_full_scan(self, source: str = "manual", season_names: list[str] | None = None) -> Dict[str, dict]:
self.status_repo.mark_start(source)
results = {}
errors = []

try:
# Sort seasons by year descending to ensure most recent is scanned first
sorted_seasons = sorted(self.settings.seasons, key=lambda s: s.year, reverse=True)
if season_names is not None:
sorted_seasons = [s for s in sorted_seasons if s.name in season_names]
for season in sorted_seasons:
try:
logger.info(f"Scanning season {season.name} (DB: {season.database})...")
Expand All @@ -97,7 +98,7 @@ def run_full_scan(self, source: str = "manual") -> Dict[str, dict]:
host=self.settings.influx_host,
token=self.settings.influx_token,
database=season.database,
table=f"{self.settings.influx_schema}.{self.settings.influx_table}",
table=f"{self.settings.influx_schema}.{season.table}",
year=season.year,
bin_size=self.settings.scanner_bin,
include_counts=self.settings.scanner_include_counts,
Expand All @@ -116,7 +117,7 @@ def run_full_scan(self, source: str = "manual") -> Dict[str, dict]:
token=self.settings.influx_token,
database=season.database,
schema=self.settings.influx_schema,
table=self.settings.influx_table,
table=season.table,
window_days=self.settings.sensor_window_days,
lookback_days=self.settings.sensor_lookback_days,
fallback_start=fallback_start,
Expand All @@ -136,13 +137,18 @@ def run_full_scan(self, source: str = "manual") -> Dict[str, dict]:
errors.append(f"{season.name}: {str(e)}")
# Continue scanning other seasons even if one fails

total_runs = sum(v["runs"] for v in results.values())
total_sensors = sum(v["sensors"] for v in results.values())
if errors:
self.status_repo.mark_finish(success=False, error="; ".join(errors))
else:
self.status_repo.mark_finish(success=True)
self.status_repo.mark_finish(
success=True,
runs_count=total_runs,
sensors_count=total_sensors,
)

return results

except Exception as exc:
self.status_repo.mark_finish(success=False, error=str(exc))
raise
Expand Down
35 changes: 31 additions & 4 deletions installer/data-downloader/backend/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def merge_scanned_runs(self, scanned: List[dict]) -> dict:

# Keep runs that vanished but still have notes to preserve manual metadata
for key, run in existing.items():
if key not in merged:
if key not in merged and run.get("note"):
merged[key] = run

runs_list = sorted(
Expand Down Expand Up @@ -158,6 +158,11 @@ def __init__(self, data_dir: Path):
"source": None,
"last_result": None,
"error": None,
"last_successful_job_timestamp": None,
"error_count": 0,
"last_scan_runs_count": None,
"last_scan_sensors_count": None,
"last_scan_duration_seconds": None,
}
self.store = JSONStore(data_dir / "scanner_status.json", default)

Expand All @@ -178,19 +183,41 @@ def mark_start(self, source: str) -> dict:
self.store.write(payload)
return payload

def mark_finish(self, success: bool, error: str | None = None) -> dict:
def mark_finish(
self,
success: bool,
error: str | None = None,
runs_count: int | None = None,
sensors_count: int | None = None,
) -> dict:
payload = self.store.read()
now = now_iso()
payload.update(
{
"scanning": False,
"finished_at": now_iso(),
"finished_at": now,
"last_result": "success" if success else "error",
}
)
if success:
payload.pop("error", None)
payload["last_successful_job_timestamp"] = now
if runs_count is not None:
payload["last_scan_runs_count"] = runs_count
if sensors_count is not None:
payload["last_scan_sensors_count"] = sensors_count
started_at = payload.get("started_at")
if started_at:
try:
duration = (
datetime.fromisoformat(now) - datetime.fromisoformat(started_at)
).total_seconds()
payload["last_scan_duration_seconds"] = round(duration, 2)
except ValueError:
pass
else:
payload["error"] = error or "scan failed"
payload["updated_at"] = now_iso()
payload["error_count"] = payload.get("error_count", 0) + 1
payload["updated_at"] = now
self.store.write(payload)
return payload
16 changes: 15 additions & 1 deletion installer/data-downloader/frontend/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ export default function App() {
const [noteDrafts, setNoteDrafts] = useState<Record<string, string>>({});
const [savingKey, setSavingKey] = useState<string | null>(null);
const [scanState, setScanState] = useState<ScanState>("idle");
const [scanSeason, setScanSeason] = useState<string>("");
const [downloaderSelection, setDownloaderSelection] = useState<DownloaderSelection | null>(null);
const [scannerStatus, setScannerStatus] = useState<ScannerStatus | null>(null);
const sensorsSectionRef = useRef<HTMLElement | null>(null);
Expand All @@ -43,6 +44,7 @@ export default function App() {
if (seasonsList.length > 0 && !currentSeason) {
currentSeason = seasonsList[0].name;
setSelectedSeason(currentSeason);
setScanSeason(currentSeason);
}
}

Expand Down Expand Up @@ -114,7 +116,7 @@ export default function App() {
updated_at: new Date().toISOString()
}));
try {
await triggerScan();
await triggerScan(scanSeason || undefined);
setScanState("success");
if (typeof window !== "undefined") {
window.setTimeout(() => {
Expand Down Expand Up @@ -259,6 +261,18 @@ export default function App() {
)}

<div className="actions">
{seasons.length > 1 && (
<select
value={scanSeason}
onChange={(e) => setScanSeason(e.target.value)}
disabled={scanButtonDisabled}
style={{ padding: "0.5rem", borderRadius: "4px", border: "1px solid #ccc", fontSize: "0.9rem" }}
>
{seasons.map(s => (
<option key={s.name} value={s.name}>{s.name}</option>
))}
</select>
)}
<button className="button" onClick={handleScan} disabled={scanButtonDisabled}>
{scanButtonLabel}
</button>
Expand Down
5 changes: 3 additions & 2 deletions installer/data-downloader/frontend/src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,9 @@ export function fetchScannerStatus(): Promise<ScannerStatus> {
return request("/api/scanner-status");
}

export function triggerScan(): Promise<{ status: string }> {
return request("/api/scan", { method: "POST" });
export function triggerScan(season?: string): Promise<{ status: string }> {
const query = season ? `?season=${encodeURIComponent(season)}` : "";
return request(`/api/scan${query}`, { method: "POST" });
}

export function updateNote(key: string, note: string, season?: string): Promise<RunRecord> {
Expand Down
23 changes: 23 additions & 0 deletions installer/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,7 @@ services:
- "8000:8000"
volumes:
- ./data-downloader/data:/app/data
- /var/run/docker.sock:/var/run/docker.sock
restart: unless-stopped
networks:
- datalink
Expand Down Expand Up @@ -230,6 +231,28 @@ services:
data-downloader-api:
condition: service_started

health-monitor:
build: ./health-monitor
container_name: health-monitor
restart: unless-stopped
environment:
HEALTH_MONITOR_INTERVAL_SECONDS: "${HEALTH_MONITOR_INTERVAL_SECONDS:-60}"
INFLUXDB_URL: "${INFLUXDB_URL:-http://influxdb3:8181}"
INFLUXDB_ADMIN_TOKEN: "${INFLUXDB_ADMIN_TOKEN:-apiv3_dev-influxdb-admin-token}"
INFLUXDB_HEALTH_DATABASE: "${INFLUXDB_HEALTH_DATABASE:-monitoring}"
HEALTH_MONITOR_INFLUXDB_CONTAINER: "${HEALTH_MONITOR_INFLUXDB_CONTAINER:-influxdb3}"
HEALTH_MONITOR_SCANNER_CONTAINER: "${HEALTH_MONITOR_SCANNER_CONTAINER:-data-downloader-scanner}"
HEALTH_MONITOR_SCANNER_API_URL: "${HEALTH_MONITOR_SCANNER_API_URL:-http://data-downloader-api:8000}"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- datalink
depends_on:
influxdb3:
condition: service_healthy
data-downloader-api:
condition: service_started

data-downloader-frontend:
build:
context: ./data-downloader
Expand Down
Loading
Loading