Shreyas094 commited on
Commit
6905d6d
1 Parent(s): 203d4cf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -12
app.py CHANGED
@@ -37,10 +37,10 @@ MODELS = [
37
  "mistralai/Mixtral-8x7B-Instruct-v0.1",
38
  "@cf/meta/llama-3.1-8b-instruct",
39
  "mistralai/Mistral-Nemo-Instruct-2407",
40
- "gpt-4o-mini",
41
- "claude-3-haiku",
42
- "llama-3.1-70b",
43
- "mixtral-8x7b"
44
  ]
45
 
46
  # Initialize LlamaParse
@@ -438,7 +438,7 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
438
  if not relevant_docs:
439
  yield "No relevant information found in the selected documents. Please try selecting different documents or rephrasing your query."
440
  return
441
-
442
  context_str = "\n".join([doc.page_content for doc in relevant_docs])
443
  logging.info(f"Context length: {len(context_str)}")
444
  else:
@@ -446,23 +446,27 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
446
  yield "No documents available. Please upload PDF documents to answer questions."
447
  return
448
 
449
- if model == "@cf/meta/llama-3.1-8b-instruct":
 
 
 
 
450
  # Use Cloudflare API
451
- for response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
452
- yield response
453
  else:
454
  # Use Hugging Face API
455
- for response in get_response_from_pdf(message, model, selected_docs, num_calls=num_calls, temperature=temperature):
456
- yield response
457
  except Exception as e:
458
  logging.error(f"Error with {model}: {str(e)}")
459
  if "microsoft/Phi-3-mini-4k-instruct" in model:
460
  logging.info("Falling back to Mistral model due to Phi-3 error")
461
  fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
462
- yield from respond(message, history, fallback_model, temperature, num_calls, selected_docs, use_web_search)
463
  else:
464
  yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
465
-
466
  logging.basicConfig(level=logging.DEBUG)
467
 
468
  def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):
 
37
  "mistralai/Mixtral-8x7B-Instruct-v0.1",
38
  "@cf/meta/llama-3.1-8b-instruct",
39
  "mistralai/Mistral-Nemo-Instruct-2407",
40
+ "duckduckgo/gpt-4o-mini",
41
+ "duckduckgo/claude-3-haiku",
42
+ "duckduckgo/llama-3.1-70b",
43
+ "duckduckgo/mixtral-8x7b"
44
  ]
45
 
46
  # Initialize LlamaParse
 
438
  if not relevant_docs:
439
  yield "No relevant information found in the selected documents. Please try selecting different documents or rephrasing your query."
440
  return
441
+
442
  context_str = "\n".join([doc.page_content for doc in relevant_docs])
443
  logging.info(f"Context length: {len(context_str)}")
444
  else:
 
446
  yield "No documents available. Please upload PDF documents to answer questions."
447
  return
448
 
449
+ if model.startswith("duckduckgo/"):
450
+ # Use DuckDuckGo chat with context
451
+ for partial_response in get_response_from_duckduckgo(message, model, context_str, num_calls, temperature):
452
+ yield partial_response
453
+ elif model == "@cf/meta/llama-3.1-8b-instruct":
454
  # Use Cloudflare API
455
+ for partial_response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
456
+ yield partial_response
457
  else:
458
  # Use Hugging Face API
459
+ for partial_response in get_response_from_pdf(message, model, selected_docs, num_calls=num_calls, temperature=temperature):
460
+ yield partial_response
461
  except Exception as e:
462
  logging.error(f"Error with {model}: {str(e)}")
463
  if "microsoft/Phi-3-mini-4k-instruct" in model:
464
  logging.info("Falling back to Mistral model due to Phi-3 error")
465
  fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
466
+ yield from respond(message, history, fallback_model, temperature, num_calls, selected_docs)
467
  else:
468
  yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
469
+
470
  logging.basicConfig(level=logging.DEBUG)
471
 
472
  def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):