Compare commits

..

5 Commits

Author SHA1 Message Date
c23d99726d Spacing 2025-07-31 19:16:08 -06:00
a83f0d0937 fix: append 'unknown' to history on timeout or client error
Co-authored-by: aider (gemini/gemini-2.5-pro-preview-05-06) <aider@aider.chat>
2025-07-31 19:14:38 -06:00
38ab26a659 feat: add GET /state handler to return determined state
Co-authored-by: aider (gemini/gemini-2.5-pro-preview-05-06) <aider@aider.chat>
2025-07-31 19:11:06 -06:00
d923a4ac61 feat: track history of last 3 predictions
Co-authored-by: aider (gemini/gemini-2.5-pro-preview-05-06) <aider@aider.chat>
2025-07-31 19:08:54 -06:00
dd53b40909 feat: log and save low-confidence predictions
Co-authored-by: aider (gemini/gemini-2.5-pro-preview-05-06) <aider@aider.chat>
2025-07-31 19:02:02 -06:00

View File

@@ -4,6 +4,7 @@ from aiohttp import web
import logging
import os
import io
from datetime import datetime
import torch
import torch.nn.functional as F
@@ -25,6 +26,9 @@ MODEL_PATH = 'garage_door_cnn.pth'
CLASS_NAMES = ['closed', 'open'] # From training, sorted alphabetically
POLL_INTERVAL_SECONDS = 10
REQUEST_TIMEOUT_SECONDS = 5
UNSURE_CONFIDENCE_THRESHOLD = 0.97
PREDICTION_HISTORY = []
PREDICTION_HISTORY_MAX_LENGTH = 3
# --- Model Inference ---
def get_prediction(model, image_bytes, device):
@@ -71,13 +75,46 @@ async def monitor_garage_door(app):
if result:
prediction, confidence = result
logging.debug(f"Garage door status: {prediction} (confidence: {confidence:.4f})")
# Update prediction history
if confidence >= UNSURE_CONFIDENCE_THRESHOLD:
PREDICTION_HISTORY.append(prediction)
else:
PREDICTION_HISTORY.append('unknown')
# Trim history if it's too long
if len(PREDICTION_HISTORY) > PREDICTION_HISTORY_MAX_LENGTH:
PREDICTION_HISTORY.pop(0)
if confidence < UNSURE_CONFIDENCE_THRESHOLD:
# Sanitize timestamp for use in filename
timestamp = datetime.now().isoformat().replace(':', '-')
filename = f"{timestamp}.jpg"
# Construct path and save file
unsure_dir = os.path.join('data', 'unsure', prediction)
os.makedirs(unsure_dir, exist_ok=True)
filepath = os.path.join(unsure_dir, filename)
with open(filepath, 'wb') as f:
f.write(image_bytes)
logging.info(f"Low confidence prediction: {prediction} ({confidence:.4f}). Saved for review: {filepath}")
else:
logging.error(f"Failed to fetch image. Status: {response.status}, Reason: {response.reason}")
except asyncio.TimeoutError:
logging.warning("Request to camera timed out.")
PREDICTION_HISTORY.append('unknown')
if len(PREDICTION_HISTORY) > PREDICTION_HISTORY_MAX_LENGTH:
PREDICTION_HISTORY.pop(0)
except aiohttp.ClientError as e:
logging.error(f"Client error during image fetch: {e}")
PREDICTION_HISTORY.append('unknown')
if len(PREDICTION_HISTORY) > PREDICTION_HISTORY_MAX_LENGTH:
PREDICTION_HISTORY.pop(0)
except asyncio.CancelledError:
logging.info("Monitoring task cancelled.")
break
@@ -92,6 +129,18 @@ async def handle_root(request):
"""Handler for the root GET request."""
return web.Response(text="hello world")
async def handle_state(request):
"""Handler for the /state GET request."""
state = "unknown"
if len(PREDICTION_HISTORY) == PREDICTION_HISTORY_MAX_LENGTH:
if all(s == "open" for s in PREDICTION_HISTORY):
state = "open"
elif all(s == "closed" for s in PREDICTION_HISTORY):
state = "closed"
return web.Response(text=state)
async def on_startup(app):
"""Actions to perform on application startup."""
# Set up device
@@ -126,6 +175,7 @@ async def on_cleanup(app):
def main():
app = web.Application()
app.router.add_get('/', handle_root)
app.router.add_get('/state', handle_state)
app.on_startup.append(on_startup)
app.on_cleanup.append(on_cleanup)
web.run_app(app, port=8081)