working check in

This commit is contained in:
Raika Furude 2025-11-24 15:32:12 -05:00
parent 9a50ce6f2a
commit b28b0d2fac
11 changed files with 952 additions and 941 deletions

View File

@ -1,14 +1,12 @@
""" """
VitalLink Database Layer VitalLink Database Layer
SQLite persistence for patients, vitals, and audit trail SQLite persistence for patients, vitals, and audit trail
Enables replay, analysis, and incident investigation
""" """
import sqlite3 import sqlite3
import json import json
from datetime import datetime from datetime import datetime
from typing import List, Dict, Optional from typing import List, Dict, Optional
from contextlib import asynccontextmanager
import aiosqlite import aiosqlite
# ============================================================================ # ============================================================================
@ -16,23 +14,21 @@ import aiosqlite
# ============================================================================ # ============================================================================
SCHEMA_SQL = """ SCHEMA_SQL = """
-- Patients table
CREATE TABLE IF NOT EXISTS patients ( CREATE TABLE IF NOT EXISTS patients (
patient_id TEXT PRIMARY KEY, patient_id TEXT PRIMARY KEY,
band_id TEXT NOT NULL, band_id TEXT NOT NULL,
first_name TEXT NOT NULL, first_name TEXT NOT NULL,
last_name TEXT NOT NULL, last_name TEXT NOT NULL,
dob TEXT NOT NULL, dob TEXT NOT NULL,
symptoms TEXT, -- JSON array symptoms TEXT,
severity TEXT, severity TEXT,
check_in_time TIMESTAMP NOT NULL, check_in_time TEXT NOT NULL,
discharge_time TIMESTAMP, discharge_time TEXT,
current_tier TEXT DEFAULT 'NORMAL', current_tier TEXT DEFAULT 'NORMAL',
is_active BOOLEAN DEFAULT 1, is_active INTEGER DEFAULT 1,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP created_at TEXT DEFAULT CURRENT_TIMESTAMP
); );
-- Vitals readings table (time-series data)
CREATE TABLE IF NOT EXISTS vitals_readings ( CREATE TABLE IF NOT EXISTS vitals_readings (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
patient_id TEXT NOT NULL, patient_id TEXT NOT NULL,
@ -44,63 +40,33 @@ CREATE TABLE IF NOT EXISTS vitals_readings (
temp_c REAL, temp_c REAL,
activity REAL, activity REAL,
tier TEXT, tier TEXT,
flags TEXT, -- JSON array flags TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, created_at TEXT DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (patient_id) REFERENCES patients(patient_id) FOREIGN KEY (patient_id) REFERENCES patients(patient_id)
); );
-- Triage assessments (audit trail)
CREATE TABLE IF NOT EXISTS triage_assessments (
id INTEGER PRIMARY KEY AUTOINCREMENT,
patient_id TEXT NOT NULL,
assessment_time TIMESTAMP NOT NULL,
triage_level INTEGER,
tier_name TEXT,
priority_score REAL,
reasoning TEXT,
abnormalities TEXT, -- JSON array
wait_time_minutes INTEGER,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (patient_id) REFERENCES patients(patient_id)
);
-- Tier changes (for incident investigation)
CREATE TABLE IF NOT EXISTS tier_changes ( CREATE TABLE IF NOT EXISTS tier_changes (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
patient_id TEXT NOT NULL, patient_id TEXT NOT NULL,
change_time TIMESTAMP NOT NULL, change_time TEXT NOT NULL,
old_tier TEXT, old_tier TEXT,
new_tier TEXT, new_tier TEXT,
trigger_reason TEXT, trigger_reason TEXT,
vitals_snapshot TEXT, -- JSON vitals_snapshot TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, created_at TEXT DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (patient_id) REFERENCES patients(patient_id) FOREIGN KEY (patient_id) REFERENCES patients(patient_id)
); );
-- System events (audit log)
CREATE TABLE IF NOT EXISTS system_events ( CREATE TABLE IF NOT EXISTS system_events (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
event_time TIMESTAMP NOT NULL, event_time TEXT NOT NULL,
event_type TEXT NOT NULL, -- 'patient_checkin', 'discharge', 'tier_change', 'alert', etc. event_type TEXT NOT NULL,
patient_id TEXT, patient_id TEXT,
band_id TEXT, band_id TEXT,
details TEXT, -- JSON details TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP created_at TEXT DEFAULT CURRENT_TIMESTAMP
); );
-- Wristband assignments (inventory tracking)
CREATE TABLE IF NOT EXISTS wristband_assignments (
id INTEGER PRIMARY KEY AUTOINCREMENT,
band_id TEXT NOT NULL,
patient_id TEXT,
assigned_at TIMESTAMP,
released_at TIMESTAMP,
packet_count INTEGER DEFAULT 0,
band_type TEXT, -- 'real' or 'simulated'
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Indexes for performance
CREATE INDEX IF NOT EXISTS idx_vitals_patient ON vitals_readings(patient_id, timestamp); CREATE INDEX IF NOT EXISTS idx_vitals_patient ON vitals_readings(patient_id, timestamp);
CREATE INDEX IF NOT EXISTS idx_vitals_timestamp ON vitals_readings(timestamp); CREATE INDEX IF NOT EXISTS idx_vitals_timestamp ON vitals_readings(timestamp);
CREATE INDEX IF NOT EXISTS idx_patients_active ON patients(is_active); CREATE INDEX IF NOT EXISTS idx_patients_active ON patients(is_active);
@ -131,84 +97,69 @@ class VitalLinkDatabase:
if self.conn: if self.conn:
await self.conn.close() await self.conn.close()
# ======================================================================== async def save_patient(self, patient_data: Dict):
# PATIENT OPERATIONS """Save new patient to database"""
# ======================================================================== check_in_time = patient_data["check_in_time"]
if isinstance(check_in_time, datetime):
check_in_time = check_in_time.isoformat()
# Check if patient already exists
async def save_patient(self, patient_data: Dict): cursor = await self.conn.execute(
"""Save new patient to database""" "SELECT patient_id FROM patients WHERE patient_id = ?",
# Convert datetime to ISO string if needed (patient_data["patient_id"],),
check_in_time = patient_data["check_in_time"]
if isinstance(check_in_time, datetime):
check_in_time = check_in_time.isoformat()
await self.conn.execute(
"""
INSERT INTO patients (
patient_id, band_id, first_name, last_name, dob,
symptoms, severity, check_in_time, current_tier
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
patient_data["patient_id"],
patient_data["band_id"],
patient_data["first_name"],
patient_data["last_name"],
patient_data["dob"],
json.dumps(patient_data["symptoms"]),
patient_data["severity"],
check_in_time, # Now a string
patient_data.get("current_tier", "NORMAL"),
),
)
await self.conn.commit()
# Log event with serializable data
await self.log_event(
"patient_checkin",
patient_data["patient_id"],
patient_data["band_id"],
{
"first_name": patient_data["first_name"],
"last_name": patient_data["last_name"],
"symptoms": patient_data["symptoms"],
"severity": patient_data["severity"],
},
)
async def update_patient_tier(self, patient_id: str, new_tier: str):
"""Update patient's current tier"""
await self.conn.execute(
"""
UPDATE patients SET current_tier = ? WHERE patient_id = ?
""",
(new_tier, patient_id),
) )
existing = await cursor.fetchone()
if existing:
# Update instead of insert
await self.conn.execute(
"""
UPDATE patients SET
band_id = ?, current_tier = ?
WHERE patient_id = ?
""",
(
patient_data["band_id"],
patient_data.get("current_tier", "NORMAL"),
patient_data["patient_id"],
),
)
else:
# Insert new patient
await self.conn.execute(
"""
INSERT INTO patients (
patient_id, band_id, first_name, last_name, dob,
symptoms, severity, check_in_time, current_tier
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
patient_data["patient_id"],
patient_data["band_id"],
patient_data["first_name"],
patient_data["last_name"],
patient_data["dob"],
json.dumps(patient_data["symptoms"]),
patient_data["severity"],
check_in_time,
patient_data.get("current_tier", "NORMAL"),
),
)
await self.conn.commit() await self.conn.commit()
async def discharge_patient(self, patient_id: str): if not existing:
"""Mark patient as discharged""" await self.log_event(
await self.conn.execute( "patient_checkin",
""" patient_data["patient_id"],
UPDATE patients patient_data["band_id"],
SET is_active = 0, discharge_time = ? {
WHERE patient_id = ? "first_name": patient_data["first_name"],
""", "last_name": patient_data["last_name"],
(datetime.now(), patient_id), "symptoms": patient_data["symptoms"],
) "severity": patient_data["severity"],
await self.conn.commit() },
)
await self.log_event(
"discharge",
patient_id,
None,
{"discharge_time": datetime.now().isoformat()},
)
# ========================================================================
# VITALS OPERATIONS
# ========================================================================
async def save_vitals(self, vitals_data: Dict): async def save_vitals(self, vitals_data: Dict):
"""Save vital signs reading""" """Save vital signs reading"""
@ -264,10 +215,6 @@ async def save_patient(self, patient_data: Dict):
for row in rows for row in rows
] ]
# ========================================================================
# TIER CHANGE TRACKING
# ========================================================================
async def log_tier_change( async def log_tier_change(
self, patient_id: str, old_tier: str, new_tier: str, reason: str, vitals: Dict self, patient_id: str, old_tier: str, new_tier: str, reason: str, vitals: Dict
): ):
@ -281,7 +228,7 @@ async def save_patient(self, patient_data: Dict):
""", """,
( (
patient_id, patient_id,
datetime.now(), datetime.now().isoformat(),
old_tier, old_tier,
new_tier, new_tier,
reason, reason,
@ -298,7 +245,7 @@ async def save_patient(self, patient_data: Dict):
) )
async def get_tier_history(self, patient_id: str) -> List[Dict]: async def get_tier_history(self, patient_id: str) -> List[Dict]:
"""Get tier change history for incident review""" """Get tier change history"""
cursor = await self.conn.execute( cursor = await self.conn.execute(
""" """
SELECT change_time, old_tier, new_tier, trigger_reason, vitals_snapshot SELECT change_time, old_tier, new_tier, trigger_reason, vitals_snapshot
@ -322,329 +269,52 @@ async def save_patient(self, patient_data: Dict):
for row in rows for row in rows
] ]
# ======================================================================== async def log_event(
# SYSTEM EVENTS (Audit Trail)
# ========================================================================
async def log_event(
self,
event_type: str,
patient_id: Optional[str],
band_id: Optional[str],
details: Dict,
):
"""Log system event for audit trail"""
# Ensure details is JSON serializable
serializable_details = {}
for key, value in details.items():
if isinstance(value, datetime):
serializable_details[key] = value.isoformat()
else:
serializable_details[key] = value
await self.conn.execute(
"""
INSERT INTO system_events (
event_time, event_type, patient_id, band_id, details
) VALUES (?, ?, ?, ?, ?)
""",
(
datetime.now().isoformat(), # Convert to string
event_type,
patient_id,
band_id,
json.dumps(serializable_details),
),
)
await self.conn.commit()
async def get_events(
self, self,
event_type: Optional[str] = None, event_type: str,
patient_id: Optional[str] = None, patient_id: Optional[str],
hours: int = 24, band_id: Optional[str],
) -> List[Dict]: details: Dict,
"""Get system events for analysis""" ):
"""Log system event for audit trail"""
serializable_details = {}
for key, value in details.items():
if isinstance(value, datetime):
serializable_details[key] = value.isoformat()
else:
serializable_details[key] = value
query = "SELECT event_time, event_type, patient_id, band_id, details FROM system_events WHERE 1=1" await self.conn.execute(
params = []
if event_type:
query += " AND event_type = ?"
params.append(event_type)
if patient_id:
query += " AND patient_id = ?"
params.append(patient_id)
query += " AND event_time > datetime('now', '-' || ? || ' hours')"
params.append(hours)
query += " ORDER BY event_time DESC LIMIT 1000"
cursor = await self.conn.execute(query, params)
rows = await cursor.fetchall()
return [
{
"event_time": row[0],
"event_type": row[1],
"patient_id": row[2],
"band_id": row[3],
"details": json.loads(row[4]) if row[4] else {},
}
for row in rows
]
# ========================================================================
# ANALYTICS & REPLAY
# ========================================================================
async def get_session_summary(
self, start_time: datetime, end_time: datetime
) -> Dict:
"""Get summary statistics for a session (for incident review)"""
cursor = await self.conn.execute(
""" """
SELECT INSERT INTO system_events (
COUNT(DISTINCT patient_id) as total_patients, event_time, event_type, patient_id, band_id, details
COUNT(*) as total_vitals, ) VALUES (?, ?, ?, ?, ?)
AVG(hr_bpm) as avg_hr,
AVG(spo2) as avg_spo2,
AVG(temp_c) as avg_temp
FROM vitals_readings
WHERE timestamp BETWEEN ? AND ?
""", """,
(start_time.timestamp(), end_time.timestamp()), (
datetime.now().isoformat(),
event_type,
patient_id,
band_id,
json.dumps(serializable_details),
),
) )
await self.conn.commit()
row = await cursor.fetchone() async def discharge_patient(self, patient_id: str):
"""Mark patient as discharged"""
return { await self.conn.execute(
"total_patients": row[0],
"total_vitals_recorded": row[1],
"average_hr": round(row[2], 1) if row[2] else 0,
"average_spo2": round(row[3], 1) if row[3] else 0,
"average_temp": round(row[4], 2) if row[4] else 0,
}
async def export_patient_data(self, patient_id: str) -> Dict:
"""Export complete patient data for incident investigation"""
# Get patient info
cursor = await self.conn.execute(
""" """
SELECT * FROM patients WHERE patient_id = ? UPDATE patients
SET is_active = 0, discharge_time = ?
WHERE patient_id = ?
""", """,
(patient_id,), (datetime.now().isoformat(), patient_id),
) )
patient_row = await cursor.fetchone() await self.conn.commit()
if not patient_row: await self.log_event(
return None "discharge",
patient_id,
# Get all vitals None,
vitals = await self.get_patient_vitals_history(patient_id, limit=10000) {"discharge_time": datetime.now().isoformat()},
# Get tier changes
tier_changes = await self.get_tier_history(patient_id)
# Get related events
events = await self.get_events(patient_id=patient_id, hours=24)
return {
"patient_id": patient_id,
"name": f"{patient_row[2]} {patient_row[3]}",
"dob": patient_row[4],
"symptoms": json.loads(patient_row[5]) if patient_row[5] else [],
"severity": patient_row[6],
"check_in_time": patient_row[7],
"discharge_time": patient_row[8],
"total_vitals": len(vitals),
"vitals_timeline": vitals,
"tier_changes": tier_changes,
"events": events,
"export_time": datetime.now().isoformat(),
}
# ============================================================================
# REPLAY SYSTEM
# ============================================================================
class VitalsReplaySystem:
"""Replay historical vitals data for analysis"""
def __init__(self, db: VitalLinkDatabase):
self.db = db
async def replay_patient_session(self, patient_id: str, speed: float = 1.0):
"""
Replay a patient's entire session
speed: 1.0 = real-time, 10.0 = 10x faster, 0.1 = slow motion
"""
vitals = await self.db.get_patient_vitals_history(patient_id, limit=10000)
vitals.reverse() # Chronological order
if not vitals:
print(f"No data found for patient {patient_id}")
return
print(f"\n{'=' * 80}")
print(f"REPLAYING SESSION: {patient_id} ({len(vitals)} readings)")
print(f"Speed: {speed}x | Press Ctrl+C to stop")
print(f"{'=' * 80}\n")
start_time = vitals[0]["timestamp"]
for i, reading in enumerate(vitals):
# Calculate delay
if i > 0:
time_diff = reading["timestamp"] - vitals[i - 1]["timestamp"]
await asyncio.sleep(time_diff / speed)
# Display reading
elapsed = reading["timestamp"] - start_time
tier_symbol = (
"🔴"
if reading["tier"] == "EMERGENCY"
else "🟡"
if reading["tier"] == "ALERT"
else "🟢"
)
print(
f"[{elapsed:7.1f}s] {tier_symbol} Seq={reading['seq']:3d} | "
f"HR={reading['hr_bpm']:3d} SpO2={reading['spo2']:2d}% "
f"Temp={reading['temp_c']:.1f}°C | {reading['tier']}"
)
print(f"\n{'=' * 80}")
print(f"Replay complete: {len(vitals)} readings")
print(f"{'=' * 80}\n")
async def analyze_critical_events(self, patient_id: str):
"""Analyze critical tier changes and deterioration events"""
tier_changes = await self.db.get_tier_history(patient_id)
print(f"\n{'=' * 80}")
print(f"CRITICAL EVENT ANALYSIS: {patient_id}")
print(f"{'=' * 80}\n")
for change in tier_changes:
print(f"[{change['change_time']}]")
print(f" {change['old_tier']}{change['new_tier']}")
print(f" Reason: {change['reason']}")
print(f" Vitals: {change['vitals']}")
print()
print(f"{'=' * 80}\n")
# ============================================================================
# INTEGRATION HELPERS
# ============================================================================
async def init_database(db_path: str = "vitallink.db") -> VitalLinkDatabase:
"""Initialize database for use in FastAPI"""
db = VitalLinkDatabase(db_path)
await db.initialize()
return db
# ============================================================================
# CLI TOOLS
# ============================================================================
async def cli_export_patient(patient_id: str, output_file: str = None):
"""Export patient data to JSON file"""
db = VitalLinkDatabase()
await db.initialize()
data = await db.export_patient_data(patient_id)
if not data:
print(f"Patient {patient_id} not found")
return
if output_file:
with open(output_file, "w") as f:
json.dump(data, f, indent=2)
print(f"✓ Exported to {output_file}")
else:
print(json.dumps(data, indent=2))
await db.close()
async def cli_replay_session(patient_id: str, speed: float = 1.0):
"""Replay a patient session"""
db = VitalLinkDatabase()
await db.initialize()
replay = VitalsReplaySystem(db)
await replay.replay_patient_session(patient_id, speed)
await db.close()
async def cli_analyze_incident(patient_id: str):
"""Analyze critical events for a patient"""
db = VitalLinkDatabase()
await db.initialize()
replay = VitalsReplaySystem(db)
await replay.analyze_critical_events(patient_id)
# Also show vital trends
vitals = await db.get_patient_vitals_history(patient_id, limit=1000)
if vitals:
print("VITAL SIGN TRENDS:")
print(
f" HR range: {min(v['hr_bpm'] for v in vitals)} - {max(v['hr_bpm'] for v in vitals)} bpm"
) )
print(
f" SpO2 range: {min(v['spo2'] for v in vitals)} - {max(v['spo2'] for v in vitals)}%"
)
print(
f" Temp range: {min(v['temp_c'] for v in vitals):.1f} - {max(v['temp_c'] for v in vitals):.1f}°C"
)
print()
await db.close()
if __name__ == "__main__":
import argparse
import asyncio
parser = argparse.ArgumentParser(description="VitalLink Database Tools")
parser.add_argument("--export", metavar="PATIENT_ID", help="Export patient data")
parser.add_argument("--replay", metavar="PATIENT_ID", help="Replay patient session")
parser.add_argument(
"--analyze", metavar="PATIENT_ID", help="Analyze critical events"
)
parser.add_argument(
"--speed", type=float, default=1.0, help="Replay speed multiplier"
)
parser.add_argument("--output", "-o", help="Output file for export")
args = parser.parse_args()
if args.export:
asyncio.run(cli_export_patient(args.export, args.output))
elif args.replay:
asyncio.run(cli_replay_session(args.replay, args.speed))
elif args.analyze:
asyncio.run(cli_analyze_incident(args.analyze))
else:
parser.print_help()

View File

@ -1,4 +1,4 @@
68221 70573
68237 70588
68245 70601
68281 70667

File diff suppressed because it is too large Load Diff

View File

@ -1 +1 @@
68221 70573

View File

@ -3,7 +3,7 @@
> vite > vite
VITE v7.1.10 ready in 219 ms VITE v7.1.10 ready in 246 ms
➜ Local: http://localhost:5173/ ➜ Local: http://localhost:5173/
➜ Network: use --host to expose ➜ Network: use --host to expose

View File

@ -1 +1 @@
68245 70601

View File

@ -4,7 +4,7 @@
Port 5173 is in use, trying another one... Port 5173 is in use, trying another one...
VITE v7.1.10 ready in 250 ms VITE v7.1.10 ready in 228 ms
➜ Local: http://localhost:5174/ ➜ Local: http://localhost:5174/
➜ Network: use --host to expose ➜ Network: use --host to expose

View File

@ -1 +1 @@
68281 70667

View File

@ -1 +1 @@
68237 70588

Binary file not shown.