Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

init make recipe from ingredients #7

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion main.py
Original file line number Diff line number Diff line change
@@ -1 +1,6 @@
print("Hello, World!")
# main.py
from fastapi import FastAPI
from router import test_router

app = FastAPI()
app.include_router(test_router.router)
Binary file added router/__pycache__/test_router.cpython-311.pyc
Binary file not shown.
104 changes: 104 additions & 0 deletions router/test_router.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
from fastapi import APIRouter
import base64
from openai import OpenAI

client = OpenAI()
router = APIRouter()

def encode_image(image_path):
with open(image_path, "rb") as image_file:
return base64.b64encode(image_file.read()).decode('utf-8')

def encode_frame(frame):
return base64.b64encode(frame).decode('utf-8')

def encode_images(image_paths, frames=False):
if frames is True:
return [f"data:image/jpeg;base64,{encode_frame(synthesized_frame)}" for synthesized_frame in image_paths]
return [f"data:image/jpeg;base64,{encode_image(image_path)}" for image_path in image_paths]

def test_gpt():
return chatgpt("What is the meaning of life?")

def chatgpt(query, model="gpt-4-1106-preview"):
response = client.chat.completions.create(
messages=[
{
"role": "user",
"content": query,
}
],
model=model
)

response = response.choices[0].message.content
return response

@router.post("/recipe")
def recipe(data):
client = OpenAI()
model="gpt-4-1106-preview"
response = client.chat.completions.create(
model=model,
messages=[
{
"role": "user",
"content": data
}
],
max_tokens=300,
)

return response.choices[0].message.content


@router.post("/ingredients")
def ingredients():
client = OpenAI()

response = client.chat.completions.create(
model="gpt-4-vision-preview",
messages=[
{
"role": "user",
"content": [
{"type": "text", "text": "What’s in this image?"},
{
"type": "image_url",
"image_url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
},
],
}
],
max_tokens=300,
)

return response.choices[0].message.content


def vision_api(images, prompt):
content = [{
"type": "text",
"text": prompt
}]
for image in images:
content.append({
"type": "image_url",
"image_url": image
})
response = client.chat.completions.create(
model="gpt-4-vision-preview",
messages=[
{
"role": "user",
"content": content,
}
],
max_tokens=1000,
)
return response.choices[0].message.content


if __name__ == "__main__":
response = test_gpt()
print(response)
4 changes: 4 additions & 0 deletions start.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
import uvicorn

if __name__ == "__main__":
uvicorn.run("main:app", port=7001, reload=True)