fixed post request
This commit is contained in:
204
roadcast/app.py
204
roadcast/app.py
@@ -1,6 +1,8 @@
|
||||
from flask import Flask, request, jsonify
|
||||
from dotenv import load_dotenv
|
||||
from openmeteo_client import compute_index
|
||||
from train import compute_index
|
||||
from models import load_model
|
||||
from models import MLP
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
@@ -9,6 +11,7 @@ app = Flask(__name__)
|
||||
import os
|
||||
import threading
|
||||
import json
|
||||
import numpy as np # added
|
||||
|
||||
# ML imports are lazy to avoid heavy imports on simple runs
|
||||
|
||||
@@ -60,69 +63,30 @@ def health():
|
||||
except Exception as e:
|
||||
return jsonify({'ok': False, 'error': str(e)}), 500
|
||||
|
||||
if __name__ == '__main__':
|
||||
# eager load model/artifacts at startup (best-effort)
|
||||
try:
|
||||
from openmeteo_inference import init_inference
|
||||
init_inference()
|
||||
except Exception:
|
||||
pass
|
||||
app.run(debug=True)
|
||||
|
||||
@app.route('/predict', methods=['POST', 'GET'])
|
||||
def predict_endpoint():
|
||||
"""Predict route between two points given source and destination with lat and lon.
|
||||
|
||||
Expectation:
|
||||
- POST with JSON: {"source": {"lat": .., "lon": ..}, "destination": {"lat": .., "lon": ..}}
|
||||
- GET returns usage instructions for quick browser testing.
|
||||
GET uses an example payload; POST accepts JSON with 'source' and 'destination'.
|
||||
Both methods run the same model/index logic and return the same response format.
|
||||
"""
|
||||
example_payload = {
|
||||
"source": {"lat": 38.9, "lon": -77.0},
|
||||
"destination": {"lat": 38.95, "lon": -77.02}
|
||||
}
|
||||
info = "This endpoint expects a POST with JSON body."
|
||||
|
||||
info = "This endpoint expects a POST with JSON body (GET returns example payload)."
|
||||
note = (
|
||||
"Use POST to receive a prediction. Example: curl -X POST -H 'Content-Type: application/json' "
|
||||
"-d '{\"source\": {\"lat\": 38.9, \"lon\": -77.0}, \"destination\": {\"lat\": 38.95, \"lon\": -77.02}}' "
|
||||
"http://127.0.0.1:5000/predict"
|
||||
)
|
||||
|
||||
# unify request data: GET -> example, POST -> request.json
|
||||
if request.method == 'GET':
|
||||
# Return the same structure as POST but without prediction
|
||||
# response_payload = {
|
||||
# "index": None,
|
||||
# "prediction": {},
|
||||
# "called_with": "GET",
|
||||
# "diagnostics": {},
|
||||
# "example": example_payload,
|
||||
# "info": info,
|
||||
# "note": note
|
||||
# }
|
||||
data = example_payload
|
||||
else:
|
||||
data = request.json or {}
|
||||
|
||||
# For GET request, compute the road risk index using the example coordinates
|
||||
src_lat = example_payload['source']['lat']
|
||||
src_lon = example_payload['source']['lon']
|
||||
dst_lat = example_payload['destination']['lat']
|
||||
dst_lon = example_payload['destination']['lon']
|
||||
|
||||
# Use the compute_index function to get the road risk index
|
||||
index = compute_index(src_lat, src_lon)
|
||||
|
||||
# Prepare the response payload
|
||||
response_payload = {
|
||||
"index": index, # The computed index here
|
||||
"prediction": {},
|
||||
"called_with": "GET",
|
||||
"diagnostics": {},
|
||||
"example": example_payload,
|
||||
"info": info,
|
||||
"note": note
|
||||
}
|
||||
return jsonify(response_payload), 200
|
||||
|
||||
# POST request logic
|
||||
data = request.json or {}
|
||||
source = data.get('source')
|
||||
destination = data.get('destination')
|
||||
if not source or not destination:
|
||||
@@ -136,89 +100,81 @@ def predict_endpoint():
|
||||
except (TypeError, ValueError):
|
||||
return jsonify({"error": "invalid lat or lon values; must be numbers"}), 400
|
||||
|
||||
# Ensure compute_reroute exists and is callable
|
||||
# load model (loader infers architecture from checkpoint)
|
||||
try:
|
||||
from openmeteo_client import compute_reroute
|
||||
model = load_model('model.pth', MLP)
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
"error": "compute_reroute not found in openmeteo_client",
|
||||
"detail": str(e),
|
||||
"hint": "Provide openmeteo_client.compute_reroute "
|
||||
"(Open-Meteo does not need an API key)"
|
||||
}), 500
|
||||
return jsonify({"error": "model load failed", "detail": str(e)}), 500
|
||||
|
||||
if not callable(compute_reroute):
|
||||
return jsonify({"error": "openmeteo_client.compute_reroute is not callable"}), 500
|
||||
|
||||
def _extract_index(res):
|
||||
if res is None:
|
||||
return None
|
||||
if isinstance(res, (int, float)):
|
||||
return int(res)
|
||||
if isinstance(res, dict):
|
||||
for k in ('index', 'idx', 'cluster', 'cluster_idx', 'label_index', 'label_idx'):
|
||||
if k in res:
|
||||
try:
|
||||
return int(res[k])
|
||||
except Exception:
|
||||
return res[k]
|
||||
return None
|
||||
|
||||
# Call compute_reroute (Open-Meteo requires no API key)
|
||||
# infer expected input dim from model first linear weight
|
||||
try:
|
||||
result = compute_reroute(src_lat, src_lon, dst_lat, dst_lon)
|
||||
called_with = "positional"
|
||||
input_dim = None
|
||||
for v in model.state_dict().values():
|
||||
if getattr(v, "dim", None) and v.dim() == 2:
|
||||
input_dim = int(v.shape[1])
|
||||
break
|
||||
if input_dim is None:
|
||||
input_dim = 2
|
||||
except Exception:
|
||||
input_dim = 2
|
||||
|
||||
diagnostics = {"type": type(result).__name__}
|
||||
# build feature vector of correct length and populate lat/lon using preprocess meta if available
|
||||
feature_vector = np.zeros(int(input_dim), dtype=float)
|
||||
meta_path = os.path.join(os.getcwd(), 'preprocess_meta.npz')
|
||||
if os.path.exists(meta_path):
|
||||
try:
|
||||
diagnostics["repr"] = repr(result)[:1000]
|
||||
meta = np.load(meta_path, allow_pickle=True)
|
||||
cols = [str(x) for x in meta['feature_columns'].tolist()]
|
||||
means = meta.get('means')
|
||||
if means is not None and len(means) == input_dim:
|
||||
feature_vector[:] = means
|
||||
col_lower = [c.lower() for c in cols]
|
||||
if 'lat' in col_lower:
|
||||
feature_vector[col_lower.index('lat')] = src_lat
|
||||
elif 'latitude' in col_lower:
|
||||
feature_vector[col_lower.index('latitude')] = src_lat
|
||||
else:
|
||||
feature_vector[0] = src_lat
|
||||
if 'lon' in col_lower:
|
||||
feature_vector[col_lower.index('lon')] = src_lon
|
||||
elif 'longitude' in col_lower:
|
||||
feature_vector[col_lower.index('longitude')] = src_lon
|
||||
else:
|
||||
if input_dim > 1:
|
||||
feature_vector[1] = src_lon
|
||||
except Exception:
|
||||
diagnostics["repr"] = "<unrepr-able>"
|
||||
|
||||
# Normalize return types
|
||||
if isinstance(result, (list, tuple)):
|
||||
idx = None
|
||||
for el in result:
|
||||
idx = _extract_index(el)
|
||||
if idx is not None:
|
||||
break
|
||||
prediction = {"items": list(result)}
|
||||
index = idx
|
||||
elif isinstance(result, dict):
|
||||
index = _extract_index(result)
|
||||
prediction = result
|
||||
elif isinstance(result, (int, float, str)):
|
||||
index = _extract_index(result)
|
||||
prediction = {"value": result}
|
||||
else:
|
||||
index = None
|
||||
prediction = {"value": result}
|
||||
|
||||
response_payload = {
|
||||
"index": index,
|
||||
"prediction": prediction,
|
||||
"called_with": called_with,
|
||||
"diagnostics": diagnostics,
|
||||
"example": example_payload,
|
||||
"info": info,
|
||||
"note": note
|
||||
}
|
||||
|
||||
# Add warning if no routing/index info found
|
||||
expected_keys = ('route', 'path', 'distance', 'directions', 'index', 'idx', 'cluster')
|
||||
if (not isinstance(prediction, dict) or not any(k in prediction for k in expected_keys)) and index is None:
|
||||
response_payload["warning"] = (
|
||||
"No routing/index information returned from compute_reroute. "
|
||||
"See diagnostics for details."
|
||||
)
|
||||
|
||||
return jsonify(response_payload), 200
|
||||
feature_vector[:] = 0.0
|
||||
feature_vector[0] = src_lat
|
||||
if input_dim > 1:
|
||||
feature_vector[1] = src_lon
|
||||
else:
|
||||
feature_vector[0] = src_lat
|
||||
if input_dim > 1:
|
||||
feature_vector[1] = src_lon
|
||||
|
||||
# compute index using model
|
||||
try:
|
||||
index = compute_index(model, feature_vector)
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
"error": "Error processing the request",
|
||||
"detail": str(e)
|
||||
}), 500
|
||||
return jsonify({"error": "compute_index failed", "detail": str(e)}), 500
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({"error": "compute_reroute invocation failed", "detail": str(e)}), 500
|
||||
response_payload = {
|
||||
"index": index,
|
||||
"prediction": {},
|
||||
"called_with": request.method,
|
||||
"diagnostics": {"input_dim": int(input_dim)},
|
||||
"example": example_payload,
|
||||
"info": info,
|
||||
"note": note
|
||||
}
|
||||
|
||||
return jsonify(response_payload), 200
|
||||
|
||||
if __name__ == '__main__':
|
||||
# eager load model/artifacts at startup (best-effort)
|
||||
try:
|
||||
from openmeteo_inference import init_inference
|
||||
init_inference()
|
||||
except Exception:
|
||||
pass
|
||||
app.run(debug=True)
|
||||
Reference in New Issue
Block a user