10
10
from app .config .config import settings
11
11
from app .utils .uploader import ImageUploaderFactory
12
12
from app .log .logger import get_openai_logger
13
+ from app .utils .helpers import is_image_upload_configured
13
14
14
15
logger = get_openai_logger ()
15
16
@@ -251,7 +252,22 @@ def _extract_result(
251
252
return text , reasoning_content , tool_calls , thought
252
253
253
254
255
+ def _has_inline_image_part (response : Dict [str , Any ]) -> bool :
256
+ try :
257
+ for c in response .get ("candidates" , []):
258
+ for p in c .get ("content" , {}).get ("parts" , []):
259
+ if isinstance (p , dict ) and ("inlineData" in p ):
260
+ return True
261
+ except Exception :
262
+ return False
263
+ return False
264
+
265
+
254
266
def _extract_image_data (part : dict ) -> str :
267
+ # Return empty string if no uploader is configured
268
+ if not is_image_upload_configured ():
269
+ return ""
270
+
255
271
image_uploader = None
256
272
if settings .UPLOAD_PROVIDER == "smms" :
257
273
image_uploader = ImageUploaderFactory .create (
@@ -322,6 +338,10 @@ def _extract_tool_calls(
322
338
def _handle_gemini_stream_response (
323
339
response : Dict [str , Any ], model : str , stream : bool
324
340
) -> Dict [str , Any ]:
341
+ # Early return raw Gemini response if no uploader configured and contains inline images
342
+ if not is_image_upload_configured () and _has_inline_image_part (response ):
343
+ return response
344
+
325
345
text , reasoning_content , tool_calls , thought = _extract_result (
326
346
response , model , stream = stream , gemini_format = True
327
347
)
@@ -339,6 +359,10 @@ def _handle_gemini_stream_response(
339
359
def _handle_gemini_normal_response (
340
360
response : Dict [str , Any ], model : str , stream : bool
341
361
) -> Dict [str , Any ]:
362
+ # Early return raw Gemini response if no uploader configured and contains inline images
363
+ if not is_image_upload_configured () and _has_inline_image_part (response ):
364
+ return response
365
+
342
366
text , reasoning_content , tool_calls , thought = _extract_result (
343
367
response , model , stream = stream , gemini_format = True
344
368
)
0 commit comments