sounar commited on
Commit
882bd69
1 Parent(s): ebbafa5

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -0
app.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ import torch
4
+
5
+ # Load the Hugging Face model and tokenizer
6
+ model_name = "ContactDoctor/Bio-Medical-MultiModal-Llama-3-8B-V1"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True)
9
+
10
+ # Define the function to process user input
11
+ def generate_response(input_text):
12
+ try:
13
+ # Tokenize the input text
14
+ inputs = tokenizer(input_text, return_tensors="pt")
15
+
16
+ # Generate a response using the model
17
+ outputs = model.generate(
18
+ inputs["input_ids"],
19
+ max_length=256, # Limit the output length
20
+ num_return_sequences=1, # Generate a single response
21
+ temperature=0.7, # Adjust for creativity vs. determinism
22
+ top_p=0.9, # Nucleus sampling
23
+ top_k=50 # Top-k sampling
24
+ )
25
+
26
+ # Decode and return the generated text
27
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
28
+ return response
29
+
30
+ except Exception as e:
31
+ return f"Error: {str(e)}"
32
+
33
+ # Create a Gradio interface with API enabled
34
+ iface = gr.Interface(
35
+ fn=generate_response,
36
+ inputs="text",
37
+ outputs="text",
38
+ title="ContactDoctor Medical Assistant",
39
+ description="Provide input symptoms or queries and get AI-powered medical advice.",
40
+ enable_api=True # Enables API for external calls
41
+ )
42
+
43
+ # Launch the Gradio app
44
+ if __name__ == "__main__":
45
+ iface.launch()