LiamKhoaLe commited on
Commit
eaec621
Β·
1 Parent(s): 8bafa0f

Upd agent mcp fallback logics

Browse files
Files changed (2) hide show
  1. agent.py +34 -6
  2. app.py +31 -10
agent.py CHANGED
@@ -202,18 +202,46 @@ async def call_tool(name: str, arguments: dict) -> Sequence[TextContent | ImageC
202
 
203
  # Generate content using Gemini API
204
  try:
 
 
 
 
 
 
 
 
 
205
  # Use asyncio.to_thread to make the blocking call async
206
- # The API accepts contents as a list
207
- response = await asyncio.to_thread(
208
- gemini_client.models.generate_content,
209
- model=model,
210
- contents=gemini_contents
211
- )
 
 
212
 
213
  # Extract text from response
214
  if response and hasattr(response, 'text') and response.text:
 
215
  return [TextContent(type="text", text=response.text)]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216
  else:
 
217
  return [TextContent(type="text", text="Error: No response from Gemini")]
218
 
219
  except Exception as e:
 
202
 
203
  # Generate content using Gemini API
204
  try:
205
+ # Get the model instance
206
+ gemini_model = gemini_client.models.get(model)
207
+
208
+ # Prepare generation config
209
+ generation_config = {
210
+ "temperature": temperature,
211
+ "max_output_tokens": GEMINI_MAX_OUTPUT_TOKENS
212
+ }
213
+
214
  # Use asyncio.to_thread to make the blocking call async
215
+ # The API accepts contents as a list and config as a separate parameter
216
+ def generate_sync():
217
+ return gemini_model.generate_content(
218
+ contents=gemini_contents,
219
+ config=generation_config
220
+ )
221
+
222
+ response = await asyncio.to_thread(generate_sync)
223
 
224
  # Extract text from response
225
  if response and hasattr(response, 'text') and response.text:
226
+ logger.info(f"βœ… Gemini generated content successfully ({len(response.text)} chars)")
227
  return [TextContent(type="text", text=response.text)]
228
+ elif response and hasattr(response, 'candidates') and response.candidates:
229
+ # Try to extract text from candidates
230
+ text_parts = []
231
+ for candidate in response.candidates:
232
+ if hasattr(candidate, 'content') and hasattr(candidate.content, 'parts'):
233
+ for part in candidate.content.parts:
234
+ if hasattr(part, 'text'):
235
+ text_parts.append(part.text)
236
+ if text_parts:
237
+ text = ''.join(text_parts)
238
+ logger.info(f"βœ… Gemini generated content successfully ({len(text)} chars)")
239
+ return [TextContent(type="text", text=text)]
240
+ else:
241
+ logger.warning("Gemini returned response but no text found")
242
+ return [TextContent(type="text", text="Error: No text in Gemini response")]
243
  else:
244
+ logger.warning("Gemini returned empty response")
245
  return [TextContent(type="text", text="Error: No response from Gemini")]
246
 
247
  except Exception as e:
app.py CHANGED
@@ -280,11 +280,13 @@ async def get_mcp_session():
280
  # Wait for the server to fully initialize
281
  # The server needs time to start up and be ready
282
  # Increased wait time and retries for better reliability
283
- await asyncio.sleep(5.0)
 
284
 
285
  # Verify the session works by listing tools with retries
286
- max_init_retries = 10
287
  tools_listed = False
 
288
  for init_attempt in range(max_init_retries):
289
  try:
290
  tools = await session.list_tools()
@@ -294,18 +296,32 @@ async def get_mcp_session():
294
  break
295
  except Exception as e:
296
  if init_attempt < max_init_retries - 1:
297
- wait_time = 1.0 * (init_attempt + 1) # Progressive wait: 1s, 2s, 3s...
298
  logger.debug(f"Initialization attempt {init_attempt + 1}/{max_init_retries} failed, waiting {wait_time}s before retry...")
299
  await asyncio.sleep(wait_time)
300
  else:
301
  logger.error(f"❌ Could not list tools after {max_init_retries} attempts: {e}")
302
  # Don't continue - if we can't list tools, the session is not usable
303
- await session.__aexit__(None, None, None)
304
- await stdio_ctx.__aexit__(None, None, None)
 
 
 
 
 
 
305
  return None
306
 
307
  if not tools_listed:
308
  logger.error("MCP server failed to initialize - tools could not be listed")
 
 
 
 
 
 
 
 
309
  return None
310
 
311
  # Store both the session and stdio context to keep them alive
@@ -383,14 +399,17 @@ async def call_agent(user_prompt: str, system_prompt: str = None, files: list =
383
  arguments["temperature"] = temperature
384
 
385
  logger.info(f"πŸ”§ Calling Gemini MCP tool '{generate_tool.name}' for: {user_prompt[:100]}...")
 
386
  result = await session.call_tool(generate_tool.name, arguments=arguments)
387
 
388
  # Parse result
389
  if hasattr(result, 'content') and result.content:
390
  for item in result.content:
391
  if hasattr(item, 'text'):
392
- return item.text.strip()
393
- logger.warning("Gemini MCP returned empty or invalid result")
 
 
394
  return ""
395
  except Exception as e:
396
  logger.error(f"Gemini MCP call error: {e}")
@@ -756,7 +775,7 @@ async def search_web_mcp_tool(query: str, max_results: int = 5) -> list:
756
  search_tool.name,
757
  arguments={"query": query, "max_results": max_results}
758
  )
759
-
760
  # Parse result
761
  web_content = []
762
  if hasattr(result, 'content') and result.content:
@@ -1081,6 +1100,8 @@ def autonomous_reasoning(query: str, history: list) -> dict:
1081
  return reasoning
1082
  except Exception as e:
1083
  logger.error(f"❌ Error in nested async reasoning: {e}")
 
 
1084
  else:
1085
  reasoning = loop.run_until_complete(autonomous_reasoning_gemini(query))
1086
  if reasoning and reasoning.get("query_type") != "general_info":
@@ -1594,14 +1615,14 @@ def stream_chat(
1594
  web_sources = []
1595
  web_urls = [] # Store URLs for citations
1596
  if final_use_web_search:
1597
- logger.info("🌐 Performing web search...")
1598
  web_results = search_web(message, max_results=5)
1599
  if web_results:
1600
  logger.info(f"πŸ“Š Found {len(web_results)} web search results, now summarizing with Gemini MCP...")
1601
  web_summary = summarize_web_content(web_results, message)
1602
  if web_summary and len(web_summary) > 50: # Check if we got a real summary
1603
  logger.info(f"βœ… Gemini MCP summarization successful ({len(web_summary)} chars)")
1604
- web_context = f"\n\nAdditional Web Sources:\n{web_summary}"
1605
  else:
1606
  logger.warning("⚠️ Gemini MCP summarization failed or returned empty, using raw results")
1607
  # Fallback: use first result's content
 
280
  # Wait for the server to fully initialize
281
  # The server needs time to start up and be ready
282
  # Increased wait time and retries for better reliability
283
+ logger.info("⏳ Waiting for MCP server to initialize...")
284
+ await asyncio.sleep(2.0) # Initial wait
285
 
286
  # Verify the session works by listing tools with retries
287
+ max_init_retries = 15
288
  tools_listed = False
289
+ tools = None
290
  for init_attempt in range(max_init_retries):
291
  try:
292
  tools = await session.list_tools()
 
296
  break
297
  except Exception as e:
298
  if init_attempt < max_init_retries - 1:
299
+ wait_time = 0.5 * (init_attempt + 1) # Progressive wait: 0.5s, 1s, 1.5s...
300
  logger.debug(f"Initialization attempt {init_attempt + 1}/{max_init_retries} failed, waiting {wait_time}s before retry...")
301
  await asyncio.sleep(wait_time)
302
  else:
303
  logger.error(f"❌ Could not list tools after {max_init_retries} attempts: {e}")
304
  # Don't continue - if we can't list tools, the session is not usable
305
+ try:
306
+ await session.__aexit__(None, None, None)
307
+ except:
308
+ pass
309
+ try:
310
+ await stdio_ctx.__aexit__(None, None, None)
311
+ except:
312
+ pass
313
  return None
314
 
315
  if not tools_listed:
316
  logger.error("MCP server failed to initialize - tools could not be listed")
317
+ try:
318
+ await session.__aexit__(None, None, None)
319
+ except:
320
+ pass
321
+ try:
322
+ await stdio_ctx.__aexit__(None, None, None)
323
+ except:
324
+ pass
325
  return None
326
 
327
  # Store both the session and stdio context to keep them alive
 
399
  arguments["temperature"] = temperature
400
 
401
  logger.info(f"πŸ”§ Calling Gemini MCP tool '{generate_tool.name}' for: {user_prompt[:100]}...")
402
+ logger.info(f"πŸ“‹ MCP Arguments: model={model}, temperature={temperature}, files={len(files) if files else 0}")
403
  result = await session.call_tool(generate_tool.name, arguments=arguments)
404
 
405
  # Parse result
406
  if hasattr(result, 'content') and result.content:
407
  for item in result.content:
408
  if hasattr(item, 'text'):
409
+ response_text = item.text.strip()
410
+ logger.info(f"βœ… Gemini MCP returned response ({len(response_text)} chars)")
411
+ return response_text
412
+ logger.warning("⚠️ Gemini MCP returned empty or invalid result")
413
  return ""
414
  except Exception as e:
415
  logger.error(f"Gemini MCP call error: {e}")
 
775
  search_tool.name,
776
  arguments={"query": query, "max_results": max_results}
777
  )
778
+
779
  # Parse result
780
  web_content = []
781
  if hasattr(result, 'content') and result.content:
 
1100
  return reasoning
1101
  except Exception as e:
1102
  logger.error(f"❌ Error in nested async reasoning: {e}")
1103
+ import traceback
1104
+ logger.debug(traceback.format_exc())
1105
  else:
1106
  reasoning = loop.run_until_complete(autonomous_reasoning_gemini(query))
1107
  if reasoning and reasoning.get("query_type") != "general_info":
 
1615
  web_sources = []
1616
  web_urls = [] # Store URLs for citations
1617
  if final_use_web_search:
1618
+ logger.info("🌐 Performing web search (MCP if available, else direct API)...")
1619
  web_results = search_web(message, max_results=5)
1620
  if web_results:
1621
  logger.info(f"πŸ“Š Found {len(web_results)} web search results, now summarizing with Gemini MCP...")
1622
  web_summary = summarize_web_content(web_results, message)
1623
  if web_summary and len(web_summary) > 50: # Check if we got a real summary
1624
  logger.info(f"βœ… Gemini MCP summarization successful ({len(web_summary)} chars)")
1625
+ web_context = f"\n\nAdditional Web Sources (summarized with Gemini MCP):\n{web_summary}"
1626
  else:
1627
  logger.warning("⚠️ Gemini MCP summarization failed or returned empty, using raw results")
1628
  # Fallback: use first result's content