Skip to content

Commit

Permalink
Merge pull request #189 from mikepsinn/develop
Browse files Browse the repository at this point in the history
Simplified text2measurements endpoint  and use types from @/types/models
  • Loading branch information
mikepsinn authored Apr 26, 2024
2 parents 66cad26 + c170887 commit 322654d
Show file tree
Hide file tree
Showing 12 changed files with 271 additions and 2,300 deletions.
4 changes: 2 additions & 2 deletions apps/api-gateway/README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# FDAi API Gateway
# Decentralized FDA API Gateway

_Digital Twin Application Programming Interface (DT-API) Specification_

Expand Down Expand Up @@ -66,7 +66,7 @@ fetch('https://app.quantimo.do/api/v3/variables?limit=100', options)

### Authentication

To use the FDAi API, you first need to get an access token as described below. Once you have the token, include it in any of the [API requests documented here](https://curedao.org/api-docs) using the `Authorization` header in the format `Bearer YOUR_TOKEN_HERE`.
To use the Decentralized FDA API, you first need to get an access token as described below. Once you have the token, include it in any of the [API requests documented here](https://curedao.org/api-docs) using the `Authorization` header in the format `Bearer YOUR_TOKEN_HERE`.

#### Option 1: Use Demo Data
If you don't have your own data yet, you can use the access token `demo` in the `Authorization` header in the format `Bearer demo`.
Expand Down
3 changes: 1 addition & 2 deletions apps/nextjs/app/api/chat-with-vision/route.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
// ./app/api/chat/route.ts
import OpenAI from 'openai';
import { OpenAIStream, StreamingTextResponse } from 'ai';

Expand Down Expand Up @@ -41,4 +40,4 @@ export async function POST(req: Request) {
const stream = OpenAIStream(response);
// Respond with the stream
return new StreamingTextResponse(stream);
}
}
2 changes: 1 addition & 1 deletion apps/nextjs/app/api/image2measurements/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ export async function POST(request: NextRequest) {
// Log the receipt of the image in base64 format
console.log('Received image in base64 format');

// Utilize the provided custom prompt or a default prompt if it's not provided.
// Use the provided custom prompt or a default prompt if it's not provided.
// This prompt guides the analysis of the image by OpenAI's model.
let promptText = `
Analyze the provided image and estimate the macro and micronutrient content of any food items, and extract data about any medications or nutritional supplements present. Return the results as an array of structured JSON data with the following format:
Expand Down
60 changes: 18 additions & 42 deletions apps/nextjs/app/api/text2measurements/route.ts
Original file line number Diff line number Diff line change
@@ -1,55 +1,31 @@
import { NextRequest, NextResponse } from 'next/server';
import {MeasurementSet} from "@/app/api/text2measurements/measurementSchema";
import {createJsonTranslator, createLanguageModel} from "typechat";
import fs from "fs";
import path from "path";

const model = createLanguageModel(process.env);
let viewSchema = fs.readFileSync(
path.join(__dirname, "measurementSchema.ts"),
"utf8"
);

async function processStatement(statement: string, localDateTime?: string): Promise<MeasurementSet> {
if (localDateTime) {
viewSchema += "\n// Use the current local datetime " + localDateTime +
" to determine startDateLocal. If specified, also determine startTimeLocal, endDateLocal, and endTimeLocal or just leave them null.";
}
const translator = createJsonTranslator(model, viewSchema, "MeasurementSet");
const response = await translator.translate(statement);
if (!response.success) {
console.error(response);
throw new Error("Translation failed");
}
const measurementSet = response.data as MeasurementSet;
if (measurementSet.measurements.some((item) => item.itemType === "unknown")) {
console.log("I didn't understand the following:");
for (const item of measurementSet.measurements) {
if (item.itemType === "unknown") console.log(item.text);
}
}
return measurementSet;
}
import { text2measurements } from "@/lib/text2measurements";

export async function POST(request: NextRequest) {
// Logging the start of the image processing API call
console.log('Starting the image processing API call');

// Extracting the file (in base64 format) and an optional custom prompt
// from the request body. This is essential for processing the image using OpenAI's API.
const { prompt} = await request.json();
const { statement, localDateTime } = await request.json();

// Log the receipt of the image in base64 format
try {
// Process the statement to extract measurements
const measurements = await processStatement(prompt);
const measurements = await text2measurements(statement, localDateTime);
// If you want to save them, uncomment await dfdaPOST('/v3/measurements', measurements, session?.user.id);

// Return the analysis in the response
return NextResponse.json({ success: true, measurements: measurements });
} catch (error) {
// Log and handle any errors encountered during the request to OpenAI
console.error('Error sending request to OpenAI:', error);
return NextResponse.json({ success: false, message: 'Error sending request to OpenAI' });
}
}

export async function GET(req: NextRequest) {
const urlParams = Object.fromEntries(new URL(req.url).searchParams);
const statement = urlParams.statement as string;
const localDateTime = urlParams.localDateTime as string | null | undefined;

try {
const measurements = await text2measurements(statement, localDateTime);
// If you want to save them, uncomment await dfdaPOST('/v3/measurements', measurements, session?.user.id);
return NextResponse.json({ success: true, measurements: measurements });
} catch (error) {
console.error('Error sending request to OpenAI:', error);
return NextResponse.json({ success: false, message: 'Error sending request to OpenAI' });
}
}
Loading

0 comments on commit 322654d

Please sign in to comment.