Spaces:
Sleeping
Sleeping
Update models.py
Browse files
models.py
CHANGED
|
@@ -71,7 +71,7 @@ def get_generation_prompt(
|
|
| 71 |
# Contents should be images first, then the text prompt
|
| 72 |
contents = images + [analysis_prompt]
|
| 73 |
response = GEMINI_CLIENT.models.generate_content(
|
| 74 |
-
model='
|
| 75 |
contents=contents
|
| 76 |
)
|
| 77 |
expanded_prompt = response.text.strip()
|
|
@@ -86,7 +86,7 @@ def get_generation_prompt(
|
|
| 86 |
if not OPENAI_CLIENT:
|
| 87 |
return f"OpenAI API Key missing or client failed to initialize. Fallback prompt: Fusion of provided visual elements inspired by the prompt: {prompt}."
|
| 88 |
try:
|
| 89 |
-
# Prepare contents for
|
| 90 |
contents = [
|
| 91 |
{"type": "text", "text": analysis_prompt}
|
| 92 |
]
|
|
@@ -104,18 +104,18 @@ def get_generation_prompt(
|
|
| 104 |
})
|
| 105 |
|
| 106 |
response = OPENAI_CLIENT.chat.completions.create(
|
| 107 |
-
model="gpt-
|
| 108 |
messages=[
|
| 109 |
{"role": "user", "content": contents}
|
| 110 |
],
|
| 111 |
max_tokens=500
|
| 112 |
)
|
| 113 |
expanded_prompt = response.choices[0].message.content.strip()
|
| 114 |
-
print(f"
|
| 115 |
return expanded_prompt
|
| 116 |
except Exception as e:
|
| 117 |
print(f"GPT API Error: {e}")
|
| 118 |
-
return f"Error using
|
| 119 |
|
| 120 |
# Fallback if model is unrecognized
|
| 121 |
return f"Creative synthesis of the visual elements provided, inspired by the prompt: {prompt}. Ensure photorealistic quality."
|
|
|
|
| 71 |
# Contents should be images first, then the text prompt
|
| 72 |
contents = images + [analysis_prompt]
|
| 73 |
response = GEMINI_CLIENT.models.generate_content(
|
| 74 |
+
model='Gemini-2.0-flash',
|
| 75 |
contents=contents
|
| 76 |
)
|
| 77 |
expanded_prompt = response.text.strip()
|
|
|
|
| 86 |
if not OPENAI_CLIENT:
|
| 87 |
return f"OpenAI API Key missing or client failed to initialize. Fallback prompt: Fusion of provided visual elements inspired by the prompt: {prompt}."
|
| 88 |
try:
|
| 89 |
+
# Prepare contents for gpt-image-1-low with base64 encoded images
|
| 90 |
contents = [
|
| 91 |
{"type": "text", "text": analysis_prompt}
|
| 92 |
]
|
|
|
|
| 104 |
})
|
| 105 |
|
| 106 |
response = OPENAI_CLIENT.chat.completions.create(
|
| 107 |
+
model="gpt-image-1-low",
|
| 108 |
messages=[
|
| 109 |
{"role": "user", "content": contents}
|
| 110 |
],
|
| 111 |
max_tokens=500
|
| 112 |
)
|
| 113 |
expanded_prompt = response.choices[0].message.content.strip()
|
| 114 |
+
print(f"gpt-image-1-low Analysis Output: {expanded_prompt}")
|
| 115 |
return expanded_prompt
|
| 116 |
except Exception as e:
|
| 117 |
print(f"GPT API Error: {e}")
|
| 118 |
+
return f"Error using gpt-image-1-low for analysis. Fallback prompt: Creative fusion of the three elements provided, inspired by the theme: {prompt}."
|
| 119 |
|
| 120 |
# Fallback if model is unrecognized
|
| 121 |
return f"Creative synthesis of the visual elements provided, inspired by the prompt: {prompt}. Ensure photorealistic quality."
|