tunnel update

This commit is contained in:
2024-09-15 02:45:37 -04:00
parent 8ad6e77d4d
commit 2164f330ac
5 changed files with 1046 additions and 43 deletions

View File

@@ -1,14 +1,7 @@
import localtunnel from 'localtunnel';
import { tunnelmole } from 'tunnelmole';
(async () => {
const tunnel = await localtunnel({ port: 11424 });
const url = await tunnelmole({
port: 11434
});
// the assigned public url for your tunnel
// i.e. https://abcdefgjhij.localtunnel.me
console.log(tunnel.url);
tunnel.on('close', () => {
// tunnels are closed
console.log('Tunnel closed');
});
})();
console.log(`Your localtunnel is available at ${url}`);

View File

@@ -32,6 +32,7 @@
"mongoose": "^8.6.2",
"ms": "^2.1.3",
"ollama": "^0.5.9",
"tunnelmole": "^2.2.14",
"uuid": "^10.0.0"
}
}

1058
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,6 @@
import {Ollama} from 'ollama'
const ollama = new Ollama({ host: '172.29.186.121:11434', fetch: {
headers: {
'bypass-tunnel-reminder': 'lol'
}
}});
const ollama = new Ollama({ host: process.env.AI_URL });
// const response = await ollama.chat({
// model: 'llama3:latest',
@@ -66,9 +62,6 @@ const questions = [
// ]
export default class FDAI {
constructor(){
this.ai = new Ollama({ host: process.env.AI_URL });
}
async suggestFood(questionAmount, answers, restrictions) {
@@ -79,7 +72,7 @@ export default class FDAI {
}
const response = await ollama.generate({
model: 'llama3:latest',
model: 'meta-llama-3-1-8b-1',
format: "json",
prompt:
`Give one food suggestion for these question answers and then generate a recipe.

View File

@@ -28,6 +28,8 @@ export default class RecipeAPI extends APIRoute {
let aiResult = await ai.suggestFood(recipe.currentQuestion, recipe.answers, recipe.restrictions);
console.log(aiResult);
let suggestFood = JSON.parse(aiResult.response);
let result = await db.create(suggestFood);