diff --git a/src/app/api/chat/route.js b/src/app/api/chat/route.js index 448c954..55e51e0 100644 --- a/src/app/api/chat/route.js +++ b/src/app/api/chat/route.js @@ -1,17 +1,49 @@ export async function POST(req) { try { + const token = process.env.FRIENDLI_TOKEN; const body = await req.json(); const { query } = body; - const response = await fetch("https://api.friendli.ai/dedicated", { - method: "POST", - headers: { - "Content-Type": "application/json", - "Authorization": `Bearer ${process.env.FRIENDLI_TOKEN}`, - "X-Friendli-Team": process.env.TEAM_ID, + const headers = { + Authorization: "Bearer " + token, + "Content-Type": "application/json", + }; + + const requestBody = { + model: "meta-llama-3.3-70b-instruct", + messages: [ + { + role: "system", + content: + "You\u0027re an assistant for possibly elderly or ill patients. Give responses to medical questions in a brief, understandable, and kind way.", + }, + { + role: "user", + content: query, + }, + ], + tools: [], + min_tokens: 0, + max_tokens: 2048, + temperature: 1, + top_p: 0.8, + frequency_penalty: 0, + stop: [], + response_format: null, + stream: false, + stream_options: { + include_usage: false, }, - body: JSON.stringify({ id: "idv6w0upi158", query }), - }); + }; + + const response = await fetch( + "https://api.friendli.ai/serverless/v1/chat/completions", + { + method: "POST", + headers, + body: JSON.stringify(requestBody), + } + ); if (!response.ok) { const errorDetails = await response.text(); @@ -22,8 +54,8 @@ export async function POST(req) { ); } - const data = await response.json(); - return new Response(JSON.stringify({ answer: data.answer }), { status: 200 }); + const data = await response.text(); + return new Response(JSON.stringify({ answer: data }), { status: 200 }); } catch (error) { console.error("Backend error:", error); return new Response(