From 295be1ed8ef2377894612bc142ef08ccdadcb715 Mon Sep 17 00:00:00 2001 From: default Date: Sun, 25 Jan 2026 17:36:15 +0000 Subject: [PATCH] Docker Update and Fixes --- .env.example | 18 + backend/requirements.txt | 1 + backend/src/rag/gemeni.py | 4 +- backend/src/routes/gemini.py | 46 ++- backend/src/routes/incidents.py | 329 +++++++++------- docker-compose.yml | 37 +- frontend/Dockerfile | 41 ++ frontend/server/index.js | 68 +++- .../src/lib/components/CameraScreen.svelte | 163 ++++---- .../src/lib/components/MobileHomePage.svelte | 352 +++++++++++++++--- .../lib/components/ParallaxLandscape.svelte | 4 +- .../src/lib/components/WebHomePage.svelte | 2 +- .../components/catalogue/CompanyModal.svelte | 4 +- frontend/src/lib/ts/api.ts | 52 +++ frontend/src/lib/ts/parallax/index.ts | 16 +- .../lib/ts/parallax/scenes/pollutedCity.ts | 6 +- frontend/src/routes/+layout.svelte | 9 +- frontend/src/routes/catalogue/+page.svelte | 6 +- frontend/src/routes/chat/+page.svelte | 2 +- frontend/src/routes/report/+page.svelte | 4 +- frontend/vite.config.js | 11 + 21 files changed, 886 insertions(+), 289 deletions(-) create mode 100644 .env.example create mode 100644 frontend/Dockerfile create mode 100644 frontend/src/lib/ts/api.ts diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..e2a1f37 --- /dev/null +++ b/.env.example @@ -0,0 +1,18 @@ +# Ethix Environment Configuration +# Copy this file to .env and fill in your values + +# Backend Configuration +SECRET_KEY=your_secret_key_here + +# Google Gemini API +GOOGLE_API_KEY=your_google_api_key + +# MongoDB Connection +MONGO_URI=mongodb+srv://user:password@cluster.mongodb.net/ethix + +# Ollama Configuration (optional - has default) +OLLAMA_HOST=https://ollama.website.co + +# Use MongoDB Atlas for vector storage (optional) +# Set to "true" to use Atlas instead of ChromaDB +ATLAS_VECTORS=false diff --git a/backend/requirements.txt b/backend/requirements.txt index 2ece870..b4dc8e3 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -12,3 +12,4 @@ flask-cors ollama chromadb-client pymongo +google-genai \ No newline at end of file diff --git a/backend/src/rag/gemeni.py b/backend/src/rag/gemeni.py index b0870ae..6fd7fc4 100644 --- a/backend/src/rag/gemeni.py +++ b/backend/src/rag/gemeni.py @@ -18,7 +18,7 @@ class GeminiClient : def ask (self ,prompt ,context =""): try : if context : - full_message =f"Use this information to answer: {context }\n\nQuestion: {prompt }" + full_message =f"Background Context:\n{context }\n\nUser Question: {prompt }" else : full_message =prompt @@ -26,7 +26,7 @@ class GeminiClient : model =self .model_name , contents =full_message , config ={ - 'system_instruction':'You are a concise sustainability assistant. Your responses must be a single short paragraph, maximum 6 sentences long. Do not use bullet points or multiple sections.' + 'system_instruction':'You are Ethix, an expert sustainability assistant. You have access to a database including Georgetown University sustainability reports. SEARCH THE PROVIDED CONTEXT CAREFULLY. If the context contains ANY information about Georgetown University or the user\'s query, matches, or partial matches, YOU MUST USE IT to answer. Ignore irrelevant parts of the context. If no context matches, provide general expert advice. Keep responses concise (max 6 sentences).' } ) return response .text diff --git a/backend/src/routes/gemini.py b/backend/src/routes/gemini.py index a1eddf9..25480a9 100644 --- a/backend/src/routes/gemini.py +++ b/backend/src/routes/gemini.py @@ -27,17 +27,53 @@ def ask(): print(f"Generating embedding for prompt: {prompt}") query_embedding = get_embedding(prompt) - print("Searching ChromaDB for context...") - search_results = search_documents(query_embedding, num_results=15) + print("Searching Vector Database for context...") + search_results = search_documents(query_embedding, num_results=50) + # Special handling for Georgetown University queries to ensure those docs are included + # even if generic corporate reports outrank them in vector search. + if "georgetown" in prompt.lower(): + try: + from src.mongo.connection import get_mongo_client + client = get_mongo_client() + # Use hardcoded DB name to match vector_store.py + db = client["ethix_vectors"] + collection = db["rag_documents"] + + # Fetch docs with Georgetown or MAP_INFO in the filename/source + gt_docs = list(collection.find({"source": {"$regex": "Georgetown|MAP_INFO", "$options": "i"}}).limit(30)) + + if gt_docs: + print(f"Direct Match: Found {len(gt_docs)} Georgetown specific documents.") + for doc in gt_docs: + # Normalize to match search_results format + search_results.append({ + "text": doc.get("text", ""), + "metadata": doc.get("metadata", {"source": doc.get("source", "Georgetown File")}), + "score": 1.0 # High priority + }) + except Exception as e: + print(f"Error checking Georgetown docs: {e}") + retrieved_context = "" if search_results: - print(f"Found {len(search_results)} documents.") - retrieved_context = "RELEVANT INFORMATION FROM DATABASE:\n" + print(f"Found {len(search_results)} documents (total).") + print("Sources found:") + seen_sources = set() + for res in search_results: # Include metadata if useful, e.g. brand name or date meta = res.get('metadata', {}) - source_info = f"[Source: {meta.get('type', 'doc')} - {meta.get('product_name', 'Unknown')}]" + source = meta.get('source', 'unknown') + + # Deduplication of printing and adding context if exact text overlap? + # For now just append. LLM can handle duplication. + + if source not in seen_sources: + print(f" - {source}") + seen_sources.add(source) + + source_info = f"[Source: {meta.get('type', 'doc')} - {source}]" retrieved_context += f"{source_info}\n{res['text']}\n\n" else: print("No relevant documents found.") diff --git a/backend/src/routes/incidents.py b/backend/src/routes/incidents.py index 191840b..6cb6ce9 100644 --- a/backend/src/routes/incidents.py +++ b/backend/src/routes/incidents.py @@ -86,6 +86,7 @@ class GreenwashingAnalysis (BaseModel ): recommendations :str =Field (description ="What consumers should know about this case") key_claims :List [str ]=Field (description ="List of specific environmental claims made by the company") red_flags :List [str ]=Field (description ="List of red flags or concerning practices identified") + alternatives :List [str ]=Field (description ="List of sustainable alternatives or better choices") class LogoDetection (BaseModel ): @@ -124,8 +125,11 @@ Based on this information, determine if this is a valid case of greenwashing. Co 2. Are their eco-friendly claims vague or unsubstantiated? 3. Is there a disconnect between their marketing and actual practices? 4. Are they using green imagery or terms without substance? - -Provide your analysis in the structured format requested.""" +128: 5. Suggest better, clearer, or more sustainable alternatives if applicable. +129: +130: If the provided context includes university-specific information (e.g., Georgetown University), incorporate it into your recommendations where relevant (e.g., disposal instructions, local alternatives). +131: +132: Provide your analysis in the structured format requested.""" def analyze_with_gemini (product_name :str ,user_description :str ,detected_brand :str , @@ -147,7 +151,7 @@ image_description :str ,context :str )->GreenwashingAnalysis : response =client .models .generate_content ( - model ="gemini-3-pro-preview", + model ="gemini-2.0-flash-exp", contents =prompt , config ={ "response_mime_type":"application/json", @@ -311,169 +315,240 @@ This incident has been documented for future reference and to help inform sustai -@incidents_bp .route ('/submit',methods =['POST']) -def submit_incident (): +@incidents_bp.route('/submit', methods=['POST']) +def submit_incident(): """ Submit a greenwashing incident report Expects JSON with: - product_name: Name of the product/company - - description: User's description of the misleading claim + - description: User's description - report_type: 'product' or 'company' - - image: Base64 encoded image (for product reports) - - pdf_data: Base64 encoded PDF (for company reports) + - image: Base64 encoded image + - user_id: ID of the user submitting + - is_public: Boolean, whether to make it public """ - data =request .json + data = request.json + if not data: + return jsonify({"error": "No data provided"}), 400 - if not data : - return jsonify ({"error":"No data provided"}),400 + product_name = data.get('product_name', '').strip() + user_description = data.get('description', '').strip() + report_type = data.get('report_type', 'product') + image_base64 = data.get('image') + user_id = data.get('user_id', 'anonymous') + is_public = data.get('is_public', False) - product_name =data .get ('product_name','').strip () - user_description =data .get ('description','').strip () - report_type =data .get ('report_type','product') - image_base64 =data .get ('image') + if not product_name: + return jsonify({"error": "Product name is required"}), 400 + + # Description isn't strictly required if image provides context, but good enforcement + if not user_description: + user_description = "No description provided." - if not product_name : - return jsonify ({"error":"Product name is required"}),400 + try: + detected_brand = "Unknown" + image_description = "No image provided" + environmental_claims = [] + compressed_image_base64 = None - if not user_description : - return jsonify ({"error":"Description is required"}),400 + if report_type == 'product' and image_base64: + try: + if ',' in image_base64: + image_base64 = image_base64.split(',')[1] - try : + image_bytes = base64.b64decode(image_base64) + + # Compress for storage + compressed_image_base64 = compress_image(image_bytes, max_width=600, quality=75) - detected_brand ="Unknown" - image_description ="No image provided" - environmental_claims =[] - compressed_image_base64 =None + # Analyze image with Ollama + image_analysis = analyze_image_with_ollama(image_bytes) + + if image_analysis.logos_detected: + detected_brand = image_analysis.logos_detected[0].brand + + image_description = image_analysis.description + environmental_claims = image_analysis.environmental_claims - if report_type =='product'and image_base64 : - try : + except Exception as e: + print(f"Image processing error: {e}") - if ','in image_base64 : - image_base64 =image_base64 .split (',')[1 ] + # RAG Search context + search_query = f"{product_name} {detected_brand} environmental claims sustainability greenwashing" + query_embedding = get_embedding(search_query) + search_results = search_documents(query_embedding, num_results=20) - image_bytes =base64 .b64decode (image_base64 ) + context = "" + for res in search_results: + context += f"--- Document ---\n{res['text'][:500]}\n\n" + if not context: + context = "No prior information found about this company in our database." - print ("Compressing image with OpenCV...") - compressed_image_base64 =compress_image (image_bytes ,max_width =600 ,quality =75 ) + if environmental_claims: + context += "\n--- Claims visible in submitted image ---\n" + context += "\n".join(f"- {claim}" for claim in environmental_claims) - - image_analysis =analyze_image_with_ollama (image_bytes ) - - if image_analysis .logos_detected : - detected_brand =image_analysis .logos_detected [0 ].brand - - image_description =image_analysis .description - environmental_claims =image_analysis .environmental_claims - - except Exception as e : - print (f"Image processing error: {e }") - - - - - search_query =f"{product_name } {detected_brand } environmental claims sustainability greenwashing" - query_embedding =get_embedding (search_query ) - search_results =search_documents (query_embedding ,num_results =5 ) - - context ="" - for res in search_results : - context +=f"--- Document ---\n{res ['text'][:500 ]}\n\n" - - if not context : - context ="No prior information found about this company in our database." - - - if environmental_claims : - context +="\n--- Claims visible in submitted image ---\n" - context +="\n".join (f"- {claim }"for claim in environmental_claims ) - - - analysis =analyze_with_gemini ( - product_name =product_name , - user_description =user_description , - detected_brand =detected_brand , - image_description =image_description , - context =context + # Main Analysis + analysis = analyze_with_gemini( + product_name=product_name, + user_description=user_description, + detected_brand=detected_brand, + image_description=image_description, + context=context ) + analysis_dict = analysis.model_dump() - analysis_dict =analysis .model_dump () - - - incident_data ={ - "product_name":product_name , - "user_description":user_description , - "detected_brand":detected_brand , - "image_description":image_description , - "environmental_claims":environmental_claims , - "analysis":analysis_dict , - "is_greenwashing":analysis .is_greenwashing , - "created_at":datetime .utcnow ().isoformat (), - "status":"confirmed"if analysis .is_greenwashing else "dismissed", - "report_type":report_type + incident_data = { + "product_name": product_name, + "user_description": user_description, + "detected_brand": detected_brand, + "image_description": image_description, + "environmental_claims": environmental_claims, + "analysis": analysis_dict, + "is_greenwashing": analysis.is_greenwashing, + "created_at": datetime.utcnow().isoformat(), + "status": "confirmed" if analysis.is_greenwashing else "dismissed", + "report_type": report_type, + "user_id": user_id, + "is_public": is_public } + if compressed_image_base64: + incident_data["image_base64"] = compressed_image_base64 - if compressed_image_base64 : - incident_data ["image_base64"]=compressed_image_base64 + # Save to MongoDB (All scans) + incident_id = save_to_mongodb(incident_data) - incident_id =None + # Save to Vector Store ONLY if Greenwashing AND Public + if analysis.is_greenwashing and is_public: + save_to_chromadb(incident_data, incident_id) - - if analysis .is_greenwashing : - - incident_id =save_to_mongodb (incident_data ) - - - save_to_chromadb (incident_data ,incident_id ) - - return jsonify ({ - "status":"success", - "is_greenwashing":analysis .is_greenwashing , - "incident_id":incident_id , - "analysis":analysis_dict , - "detected_brand":detected_brand , - "environmental_claims":environmental_claims + return jsonify({ + "status": "success", + "is_greenwashing": analysis.is_greenwashing, + "incident_id": incident_id, + "analysis": analysis_dict, + "detected_brand": detected_brand, + "environmental_claims": environmental_claims }) - except Exception as e : - import traceback - traceback .print_exc () - return jsonify ({ - "status":"error", - "message":str (e ) - }),500 + except Exception as e: + import traceback + traceback.print_exc() + return jsonify({ + "status": "error", + "message": str(e) + }), 500 -@incidents_bp .route ('/list',methods =['GET']) -def list_incidents (): - """Get all confirmed greenwashing incidents""" - try : - client =get_mongo_client () - db =client ["ethix"] - collection =db ["incidents"] +@incidents_bp.route('/list', methods=['GET']) +def list_incidents(): + """Get all PUBLIC confirmed greenwashing incidents""" + try: + client = get_mongo_client() + db = client["ethix"] + collection = db["incidents"] + + # Filter: Public AND Greenwashing + query = { + "is_greenwashing": True, + "is_public": True + # Note: Legacy records without 'is_public' might be hidden. + # For migration, we might want to treat missing as True or update DB. + # Assuming strictly filtered for now. + } + + incidents = list(collection.find( + query, + {"_id": 1, "product_name": 1, "detected_brand": 1, + "user_description": 1, "analysis": 1, "created_at": 1, + "image_base64": 1, "report_type": 1} + ).sort("created_at", -1).limit(50)) + + for inc in incidents: + inc["_id"] = str(inc["_id"]) + + return jsonify(incidents) + + except Exception as e: + return jsonify({"error": str(e)}), 500 - incidents =list (collection .find ( - {"is_greenwashing":True }, - {"_id":1 ,"product_name":1 ,"detected_brand":1 , - "user_description":1 ,"analysis":1 ,"created_at":1 , - "image_base64":1 ,"report_type":1 } - ).sort ("created_at",-1 ).limit (50 )) +@incidents_bp.route('/history', methods=['GET']) +def get_user_history(): + """Get scan history for a specific user""" + user_id = request.args.get('user_id') + if not user_id: + return jsonify({"error": "user_id required"}), 400 + + try: + client = get_mongo_client() + db = client["ethix"] + collection = db["incidents"] + + query = {"user_id": user_id} + + incidents = list(collection.find( + query, + {"_id": 1, "product_name": 1, "detected_brand": 1, + "analysis": 1, "created_at": 1, "image_base64": 1, "is_public": 1} + ).sort("created_at", -1).limit(50)) + + for inc in incidents: + inc["_id"] = str(inc["_id"]) + + return jsonify(incidents) + + except Exception as e: + return jsonify({"error": str(e)}), 500 - for inc in incidents : - inc ["_id"]=str (inc ["_id"]) +@incidents_bp.route('//visibility', methods=['PUT']) +def update_visibility(incident_id): + """Update incident visibility (public/private)""" + try: + from bson import ObjectId + data = request.json + is_public = data.get('is_public') + + if is_public is None: + return jsonify({"error": "is_public required"}), 400 - return jsonify (incidents ) + client = get_mongo_client() + db = client["ethix"] + collection = db["incidents"] - except Exception as e : - return jsonify ({"error":str (e )}),500 + # 1. Update MongoDB + result = collection.update_one( + {"_id": ObjectId(incident_id)}, + {"$set": {"is_public": is_public}} + ) + if result.matched_count == 0: + return jsonify({"error": "Incident not found"}), 404 + + # 2. Sync with ChromaDB (Vector Store) + # We need the incident data to insert/delete + incident = collection.find_one({"_id": ObjectId(incident_id)}) + + if is_public and incident.get("is_greenwashing", False): + # If public and greenwashing -> Add to Chroma + save_to_chromadb(incident, incident_id) + else: + # If private OR not greenwashing -> Remove from Chroma + # We need delete functionality. delete_documents_by_source uses 'source' metadata. + from src.chroma.vector_store import delete_documents_by_source + delete_documents_by_source(f"incident_{incident_id}") -@incidents_bp .route ('/',methods =['GET']) + return jsonify({"status": "success", "is_public": is_public}) + + except Exception as e: + import traceback + traceback.print_exc() + return jsonify({"error": str(e)}), 500 def get_incident (incident_id ): """Get a specific incident by ID""" try : diff --git a/docker-compose.yml b/docker-compose.yml index 4b371c7..5080074 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,17 +1,38 @@ version: '3.8' services: - # The Flask Backend + # The Flask Backend API api: - build: ./backend # Path to your Dockerfile + build: ./backend container_name: flask_api restart: always ports: - - "5000:5000" # Maps VM Port 5000 -> Container Port 5000 + - "5000:5000" environment: - - SECRET_KEY=your_secret_key_here - # Add database URLs here later + - SECRET_KEY=${SECRET_KEY:-your_secret_key_here} + - GOOGLE_API_KEY=${GOOGLE_API_KEY} + - MONGO_URI=${MONGO_URI} + - OLLAMA_HOST=${OLLAMA_HOST:-https://ollama.sirblob.co} + - ATLAS_VECTORS=${ATLAS_VECTORS:-false} + networks: + - ethix-network - # (Optional) Add a database or cache here easily later - # redis: - # image: redis:alpine + # The Frontend Server with API Proxy + frontend: + build: ./frontend + container_name: svelte_frontend + restart: always + ports: + - "3000:3000" + environment: + - NODE_ENV=production + - PORT=3000 + - API_URL=http://api:5000 + depends_on: + - api + networks: + - ethix-network + +networks: + ethix-network: + driver: bridge diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..bccc3c1 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,41 @@ +# Build stage +FROM node:20-alpine AS builder + +WORKDIR /app + +# Copy package files +COPY package.json ./ + +# Install dependencies +RUN npm install + +# Copy source code +COPY . . + +# Build the SvelteKit app +RUN npm run build + +# Production stage +FROM node:20-alpine AS production + +WORKDIR /app + +# Copy package.json for production dependencies +COPY package.json ./ + +# Install only production dependencies +RUN npm install --omit=dev + +# Copy built files from builder +COPY --from=builder /app/build ./build +COPY --from=builder /app/server ./server + +# Expose port +EXPOSE 3000 + +# Set environment variables +ENV NODE_ENV=production +ENV PORT=3000 + +# Start the server +CMD ["node", "server/index.js"] diff --git a/frontend/server/index.js b/frontend/server/index.js index 22b0d12..f7700b1 100644 --- a/frontend/server/index.js +++ b/frontend/server/index.js @@ -9,18 +9,80 @@ const __dirname = path.dirname(__filename); const app = express(); const PORT = process.env.PORT || 3000; +// Backend API URL - use Docker service name or environment variable +const API_URL = process.env.API_URL || 'http://api:5000'; +// Enable compression app.use(compression()); +// Parse JSON bodies for proxied requests +app.use(express.json({ limit: '50mb' })); +app.use(express.urlencoded({ extended: true, limit: '50mb' })); +/** + * API Proxy middleware + * Forwards all /api/* requests to the backend service + */ +app.use('/api', async (req, res) => { + const targetUrl = `${API_URL}${req.originalUrl}`; + + try { + const fetchOptions = { + method: req.method, + headers: { + 'Content-Type': req.get('Content-Type') || 'application/json', + 'Accept': req.get('Accept') || 'application/json', + }, + }; + + // Forward body for POST/PUT/PATCH requests + if (['POST', 'PUT', 'PATCH'].includes(req.method) && req.body) { + fetchOptions.body = JSON.stringify(req.body); + } + + const response = await fetch(targetUrl, fetchOptions); + + // Get content type from response + const contentType = response.headers.get('content-type'); + + // Set response headers + res.status(response.status); + if (contentType) { + res.set('Content-Type', contentType); + } + + // Handle different response types + if (contentType && (contentType.includes('application/pdf') || + contentType.includes('text/plain') || + contentType.includes('application/octet-stream'))) { + // Binary/file responses + const buffer = await response.arrayBuffer(); + res.send(Buffer.from(buffer)); + } else { + // JSON responses + const data = await response.text(); + res.send(data); + } + } catch (error) { + console.error(`API Proxy Error [${req.method} ${req.originalUrl}]:`, error.message); + res.status(502).json({ + status: 'error', + message: 'Failed to connect to backend service', + details: error.message + }); + } +}); + +// Serve static files from build directory const buildPath = path.join(__dirname, '../build'); app.use(express.static(buildPath)); - -app.get(/.*/, (req, res) => { +// SPA fallback - serve index.html for all other routes +app.get('*', (req, res) => { res.sendFile(path.join(buildPath, 'index.html')); }); app.listen(PORT, '0.0.0.0', () => { - console.log(`Server is running on http://localhost:${PORT}`); + console.log(`Frontend server running on http://localhost:${PORT}`); + console.log(`API proxy target: ${API_URL}`); }); diff --git a/frontend/src/lib/components/CameraScreen.svelte b/frontend/src/lib/components/CameraScreen.svelte index 79c8446..c1c2cdc 100644 --- a/frontend/src/lib/components/CameraScreen.svelte +++ b/frontend/src/lib/components/CameraScreen.svelte @@ -19,6 +19,8 @@ let useCamera = $state(true); let fileInputElement = $state(); + let alternatives = $state([]); + onMount(() => { const initCamera = async () => { if (useCamera) { @@ -43,6 +45,36 @@ }; }); + async function analyzeImage(imageBase64: string) { + try { + const payload = { + product_name: "Scanned Item", + description: "Scanned via camera", + report_type: "product", + image: imageBase64, + user_id: localStorage.getItem("ethix_user_id") || "anonymous", + is_public: false, + }; + + const response = await fetch("/api/incidents/submit", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload), + }); + + const data = await response.json(); + + if (data.status === "success" && data.analysis) { + return data; + } else { + throw new Error(data.message || "Analysis failed"); + } + } catch (e) { + console.error("Analysis error:", e); + return null; + } + } + function playSuccessSound() { const audio = new Audio("/report completed.mp3"); audio.volume = 0.5; @@ -55,7 +87,7 @@ if (!file) return; const reader = new FileReader(); - reader.onload = (e) => { + reader.onload = async (e) => { const imageData = e.target?.result as string; capturedImage = imageData; analyzing = true; @@ -73,7 +105,6 @@ analyzing = false; showResult = true; resultTranslateY = 0; - playSuccessSound(); typeText(); }, 1200); }; @@ -88,25 +119,26 @@ canvas.width = videoElement.videoWidth; canvas.height = videoElement.videoHeight; const ctx = canvas.getContext("2d"); + if (ctx) { ctx.drawImage(videoElement, 0, 0); - capturedImage = canvas.toDataURL("image/png"); - } + const imageData = canvas.toDataURL("image/png"); + capturedImage = imageData; - setTimeout(() => { - const newItem = { - id: Date.now(), - title: "Plastic Bottle", - date: new Date().toLocaleString(), - impact: "High", - imageUri: capturedImage, - }; - - onScanComplete(newItem); + const result = await analyzeImage(imageData); + processResult(result, imageData); + } else { analyzing = false; + } + } + + function processResult(data: any, imageUri: string) { + analyzing = false; + if (!data) { + displayTitle = "Scan Failed"; + alternatives = []; showResult = true; resultTranslateY = 0; - playSuccessSound(); typeText(); }, 1200); } @@ -164,7 +196,26 @@ - - - - - - - + {#if alternatives && alternatives.length > 0} +

+ Sustainable Alternatives +

+
+ {#each alternatives as alt} +
+
+ + {#if alt.impact_reduction} + Better + {/if} +
+

{alt.name}

+
+ {/each} +
+ {:else if displayTitle !== "Scan Failed"} +

+ ✓ No greenwashing concerns found +

+ {:else} +

+ Could not analyze this image. Please try again. +

+ {/if} {/if} @@ -382,10 +416,11 @@ height: 80px; border-radius: 50%; background-color: white; - border: 6px solid rgba(255, 255, 255, 0.3); + border: 6px solid rgba(74, 222, 128, 0.5); cursor: pointer; transition: transform 0.2s; background-clip: padding-box; + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.4); } .shutter-button:active { diff --git a/frontend/src/lib/components/MobileHomePage.svelte b/frontend/src/lib/components/MobileHomePage.svelte index b8687c8..6d90bfc 100644 --- a/frontend/src/lib/components/MobileHomePage.svelte +++ b/frontend/src/lib/components/MobileHomePage.svelte @@ -14,15 +14,174 @@ ]; interface ScanItem { - id: number; + id: string; name: string; date: string; severity: string; - image: string; + image: string | null; impact: string; + alternatives: string[]; + is_greenwashing: boolean; + is_public: boolean; } + let scanHistory = $state([]); let selectedScan = $state(null); + let userId = $state(""); + let fileInput: HTMLInputElement; + let isScanning = $state(false); + + onMount(() => { + let storedId = localStorage.getItem("ethix_user_id"); + if (!storedId) { + storedId = crypto.randomUUID(); + localStorage.setItem("ethix_user_id", storedId); + } + userId = storedId; + + fetchHistory(); + + // Listen for scan-complete event to refresh history + const handleScanComplete = () => { + fetchHistory(); + }; + window.addEventListener("scan-complete", handleScanComplete); + + return () => { + window.removeEventListener("scan-complete", handleScanComplete); + }; + }); + + async function fetchHistory() { + if (!userId) return; + try { + const res = await fetch( + `/api/incidents/history?user_id=${userId}`, + ); + const data = await res.json(); + if (Array.isArray(data)) { + scanHistory = data.map((item: any) => ({ + id: item._id, + name: item.product_name, + date: new Date(item.created_at).toLocaleDateString(), + severity: item.analysis?.severity || "Low", + image: item.image_base64 + ? `data:image/jpeg;base64,${item.image_base64}` + : null, + impact: item.analysis?.verdict || "No impact assessment", + alternatives: item.analysis?.alternatives || [], + is_greenwashing: item.analysis?.is_greenwashing || false, + is_public: item.is_public || false, + })); + } + } catch (e) { + console.error("Failed to fetch history", e); + } + } + + function triggerScan() { + if (isScanning) return; + fileInput.click(); + } + + async function handleFileSelect(e: Event) { + const target = e.target as HTMLInputElement; + if (!target.files || target.files.length === 0) return; + + const file = target.files[0]; + isScanning = true; + + const reader = new FileReader(); + reader.onload = async () => { + const base64String = reader.result as string; + await submitScan(base64String); + }; + reader.readAsDataURL(file); + } + + async function submitScan(imageBase64: string) { + try { + const payload = { + product_name: "Scanned Item", + description: "Scanned via mobile app", + report_type: "product", + image: imageBase64, + user_id: userId, + is_public: false, // Default private + }; + + const res = await fetch( + "/api/incidents/submit", + { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload), + }, + ); + + const data = await res.json(); + + if (data.status === "success") { + await fetchHistory(); + selectedScan = { + id: data.incident_id, + name: + data.detected_brand !== "Unknown" + ? data.detected_brand + : "Scanned Product", + date: "Just now", + severity: data.analysis.severity, + image: imageBase64, + impact: data.analysis.verdict, + alternatives: data.analysis.alternatives || [], + is_greenwashing: data.is_greenwashing, + is_public: false, + }; + } else { + alert("Analysis failed: " + (data.message || "Unknown error")); + } + } catch (e) { + console.error("Scan failed", e); + alert("Analysis failed. Please try again."); + } finally { + isScanning = false; + if (fileInput) fileInput.value = ""; + } + } + + async function toggleVisibility() { + if (!selectedScan) return; + const newState = !selectedScan.is_public; + + // Optimistic UI update for modal + selectedScan.is_public = newState; + + // Optimistic UI update for list + const histIndex = scanHistory.findIndex( + (s) => s.id === selectedScan!.id, + ); + if (histIndex !== -1) { + scanHistory[histIndex].is_public = newState; + } + + try { + await fetch( + `/api/incidents/${selectedScan.id}/visibility`, + { + method: "PUT", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ is_public: newState }), + }, + ); + } catch (e) { + // Revert on failure + selectedScan.is_public = !newState; + if (histIndex !== -1) { + scanHistory[histIndex].is_public = !newState; + } + alert("Failed to update visibility"); + } + } function openScan(scan: ScanItem) { selectedScan = scan; @@ -32,69 +191,83 @@ selectedScan = null; } - let greeting = $state("Hello"); - - onMount(() => { - const hour = new Date().getHours(); - if (hour < 12) greeting = "Good morning"; - else if (hour < 17) greeting = "Good afternoon"; - else greeting = "Good evening"; - }); - function getSeverityClass(severity: string): string { - return severity.toLowerCase(); + return severity?.toLowerCase() || "low"; }
+ +
- +
- 47 + {scanHistory.length} scans -
-
-
- -
- 32 - eco picksTotal Scans
+
+

Your Scans

+
+
+ {#if scanHistory.length === 0} +
+ No scans yet. Start by scanning a product! +
+ {/if} {#each scanHistory as item (item.id)}
e.key === "Escape" && closeScan()} onclick={closeScan} + onkeydown={(e) => e.key === "Escape" && closeScan()} > -
e.stopPropagation()} role="document" tabindex="-1" - onkeydown={(e) => e.key === "Escape" && closeScan()} - onclick={(e) => e.stopPropagation()} > -
+
{selectedScan.date}Scan Report
{#if selectedScan.image} {selectedScan.name[0]}{selectedScan.name + ? selectedScan.name[0] + : "?"}
{/if} @@ -192,19 +377,64 @@

{selectedScan.name}

-

+ +

+
+ Severity: {selectedScan.severity} +
+
+ + + + +

{selectedScan.impact}

-
- Severity: {selectedScan.severity} -
+ + {#if selectedScan.alternatives && selectedScan.alternatives.length > 0} +
+

+ + Sustainable Alternatives +

+
    + {#each selectedScan.alternatives as alt} +
  • + + {alt} +
  • + {/each} +
+
+ {/if}
diff --git a/frontend/src/lib/components/ParallaxLandscape.svelte b/frontend/src/lib/components/ParallaxLandscape.svelte index c7ca33a..f6b6ede 100644 --- a/frontend/src/lib/components/ParallaxLandscape.svelte +++ b/frontend/src/lib/components/ParallaxLandscape.svelte @@ -17,8 +17,8 @@ const PATH_CONFIG: Record = { "/": { sceneType: "transition", staticScene: false }, "/chat": { sceneType: "oilRig", staticScene: true }, - "/community": { sceneType: "forest", staticScene: true }, - "/report": { sceneType: "industrial", staticScene: true }, + "/goal": { sceneType: "pollutedCity", staticScene: true }, + "/report": { sceneType: "deforestation", staticScene: true }, "/catalogue": { sceneType: "transition", staticScene: false, diff --git a/frontend/src/lib/components/WebHomePage.svelte b/frontend/src/lib/components/WebHomePage.svelte index 5358f2c..b261f64 100644 --- a/frontend/src/lib/components/WebHomePage.svelte +++ b/frontend/src/lib/components/WebHomePage.svelte @@ -18,7 +18,7 @@ async function fetchStats() { try { - const res = await fetch("http://localhost:5000/api/reports/stats"); + const res = await fetch("/api/reports/stats"); const data = await res.json(); statsData = data; } catch (e) { diff --git a/frontend/src/lib/components/catalogue/CompanyModal.svelte b/frontend/src/lib/components/catalogue/CompanyModal.svelte index 9ec5894..e41d5c2 100644 --- a/frontend/src/lib/components/catalogue/CompanyModal.svelte +++ b/frontend/src/lib/components/catalogue/CompanyModal.svelte @@ -59,7 +59,7 @@ .toLowerCase() .endsWith(".txt")} @@ -74,7 +74,7 @@ />

Preview not available for this file type.

diff --git a/frontend/src/lib/ts/api.ts b/frontend/src/lib/ts/api.ts new file mode 100644 index 0000000..a9b6c6d --- /dev/null +++ b/frontend/src/lib/ts/api.ts @@ -0,0 +1,52 @@ + +export const API_BASE = '/api'; + +export function apiUrl(path: string): string { + // Ensure path starts with / + const normalizedPath = path.startsWith('/') ? path : `/${path}`; + return `${API_BASE}${normalizedPath}`; +} + +export async function apiFetch( + path: string, + options: RequestInit = {} +): Promise { + const url = apiUrl(path); + const response = await fetch(url, { + ...options, + headers: { + 'Content-Type': 'application/json', + ...options.headers, + }, + }); + + if (!response.ok) { + throw new Error(`API Error: ${response.status} ${response.statusText}`); + } + + return response.json(); +} + + +export async function apiGet(path: string): Promise { + return apiFetch(path, { method: 'GET' }); +} + + +export async function apiPost(path: string, body: any): Promise { + return apiFetch(path, { + method: 'POST', + body: JSON.stringify(body), + }); +} + +export async function apiPut(path: string, body: any): Promise { + return apiFetch(path, { + method: 'PUT', + body: JSON.stringify(body), + }); +} + +export function reportViewUrl(filename: string): string { + return `${API_BASE}/reports/view/${encodeURIComponent(filename)}`; +} diff --git a/frontend/src/lib/ts/parallax/index.ts b/frontend/src/lib/ts/parallax/index.ts index d81bd12..3b05cdb 100644 --- a/frontend/src/lib/ts/parallax/index.ts +++ b/frontend/src/lib/ts/parallax/index.ts @@ -22,8 +22,9 @@ const SCENE_ELEMENTS: Record, (dc: DrawContext) pollutedCity: drawPollutedCityScene, }; -const CUSTOM_WATER_SCENES: SceneType[] = ['ocean', 'oilRig']; -const NO_TERRAIN_SCENES: SceneType[] = ['ocean', 'oilRig']; +const CUSTOM_WATER_SCENES: SceneType[] = ['ocean', 'oilRig', 'deforestation']; +const NO_MOUNTAINS_SCENES: SceneType[] = ['ocean', 'oilRig', 'deforestation']; +const NO_HILLS_SCENES: SceneType[] = ['ocean', 'oilRig']; export function drawLandscape( ctx: CanvasRenderingContext2D, @@ -40,11 +41,16 @@ export function drawLandscape( drawSun(dc); drawClouds(dc); - const skipTerrain = NO_TERRAIN_SCENES.includes(sceneType) || - (blendToScene && NO_TERRAIN_SCENES.includes(blendToScene) && (blendProgress ?? 0) > 0.5); + const skipMountains = NO_MOUNTAINS_SCENES.includes(sceneType) || + (blendToScene && NO_MOUNTAINS_SCENES.includes(blendToScene) && (blendProgress ?? 0) > 0.5); - if (!skipTerrain) { + const skipHills = NO_HILLS_SCENES.includes(sceneType) || + (blendToScene && NO_HILLS_SCENES.includes(blendToScene) && (blendProgress ?? 0) > 0.5); + + if (!skipMountains) { drawMountains(dc); + } + if (!skipHills) { drawHills(dc); } diff --git a/frontend/src/lib/ts/parallax/scenes/pollutedCity.ts b/frontend/src/lib/ts/parallax/scenes/pollutedCity.ts index 65d694f..579474e 100644 --- a/frontend/src/lib/ts/parallax/scenes/pollutedCity.ts +++ b/frontend/src/lib/ts/parallax/scenes/pollutedCity.ts @@ -19,8 +19,10 @@ export function drawSmoggyBuildings(dc: DrawContext): void { for (let row = 0; row < windowRows; row++) { for (let col = 0; col < windowCols; col++) { - const isBroken = Math.random() > 0.85; - const isLit = Math.random() > 0.5; + // Deterministic "randomness" based on position to stop flashing + const seed = x + col * 13 + row * 71; + const isBroken = Math.abs(Math.sin(seed)) > 0.85; + const isLit = Math.cos(seed) > 0.1; if (!isBroken) { ctx.fillStyle = isLit ? 'rgba(255, 180, 100, 0.5)' : 'rgba(50, 50, 50, 0.8)'; diff --git a/frontend/src/routes/+layout.svelte b/frontend/src/routes/+layout.svelte index c80404b..aa8563f 100644 --- a/frontend/src/routes/+layout.svelte +++ b/frontend/src/routes/+layout.svelte @@ -32,7 +32,14 @@ function handleScanComplete(item: any) { recentItems = [item, ...recentItems]; + // Don't close camera here - let user view the result sheet first + // Camera will close when user clicks close button + } + + function handleCameraClose() { isCameraActive = false; + // Dispatch event to notify MobileHomePage to refresh + window.dispatchEvent(new CustomEvent('scan-complete')); } onMount(() => { @@ -147,7 +154,7 @@ {#if isCameraActive} (isCameraActive = false)} + onClose={handleCameraClose} onScanComplete={handleScanComplete} /> {/if} diff --git a/frontend/src/routes/catalogue/+page.svelte b/frontend/src/routes/catalogue/+page.svelte index 113b38b..ab2c2fe 100644 --- a/frontend/src/routes/catalogue/+page.svelte +++ b/frontend/src/routes/catalogue/+page.svelte @@ -66,7 +66,7 @@ async function fetchReports() { isLoading = true; try { - const res = await fetch("http://localhost:5000/api/reports/"); + const res = await fetch("/api/reports/"); const data = await res.json(); if (Array.isArray(data)) reports = data; } catch (e) { @@ -79,7 +79,7 @@ async function fetchIncidents() { isLoading = true; try { - const res = await fetch("http://localhost:5000/api/incidents/list"); + const res = await fetch("/api/incidents/list"); const data = await res.json(); if (Array.isArray(data)) incidents = data; } catch (e) { @@ -103,7 +103,7 @@ isLoading = true; try { const res = await fetch( - "http://localhost:5000/api/reports/search", + "/api/reports/search", { method: "POST", headers: { "Content-Type": "application/json" }, diff --git a/frontend/src/routes/chat/+page.svelte b/frontend/src/routes/chat/+page.svelte index a0b5905..10e756f 100644 --- a/frontend/src/routes/chat/+page.svelte +++ b/frontend/src/routes/chat/+page.svelte @@ -52,7 +52,7 @@ try { const response = await fetch( - "http://localhost:5000/api/gemini/ask", + "/api/gemini/ask", { method: "POST", headers: { "Content-Type": "application/json" }, diff --git a/frontend/src/routes/report/+page.svelte b/frontend/src/routes/report/+page.svelte index a2c8cd6..5a74c2f 100644 --- a/frontend/src/routes/report/+page.svelte +++ b/frontend/src/routes/report/+page.svelte @@ -130,7 +130,7 @@ if (reportType === "company") { // Use the new upload endpoint for company reports const response = await fetch( - "http://localhost:5000/api/reports/upload", + "/api/reports/upload", { method: "POST", headers: { @@ -170,7 +170,7 @@ } else { // Original product incident flow const response = await fetch( - "http://localhost:5000/api/incidents/submit", + "/api/incidents/submit", { method: "POST", headers: { diff --git a/frontend/vite.config.js b/frontend/vite.config.js index 26f0565..bcf2b5e 100644 --- a/frontend/vite.config.js +++ b/frontend/vite.config.js @@ -5,6 +5,9 @@ import tailwindcss from '@tailwindcss/vite' const host = process.env.TAURI_DEV_HOST; +// Backend API URL for development proxy +const API_TARGET = process.env.API_URL || 'http://localhost:5000'; + export default defineConfig(async () => ({ plugins: [sveltekit(), tailwindcss()], clearScreen: false, @@ -22,5 +25,13 @@ export default defineConfig(async () => ({ watch: { ignored: ["**/src-tauri/**"], }, + // Proxy API requests to backend during development + proxy: { + '/api': { + target: API_TARGET, + changeOrigin: true, + secure: false, + } + } }, }));