Upload 2 files
Browse files- app.py +51 -0
- requirements.txt +6 -0
app.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from transformers import pipeline
|
3 |
+
from flask import Flask, request, jsonify
|
4 |
+
import os
|
5 |
+
from huggingface_hub import login
|
6 |
+
|
7 |
+
# Retrieve the Hugging Face token from environment variables
|
8 |
+
HF_TOKEN = os.getenv("HUGGINGFACE_TOKEN")
|
9 |
+
|
10 |
+
if HF_TOKEN is None:
|
11 |
+
raise ValueError("Hugging Face token is not set in environment variables")
|
12 |
+
|
13 |
+
# Authenticate with Hugging Face
|
14 |
+
try:
|
15 |
+
login(token=HF_TOKEN, add_to_git_credential=True)
|
16 |
+
except ValueError as e:
|
17 |
+
print(f"Error during login: {e}")
|
18 |
+
raise
|
19 |
+
|
20 |
+
# Load your model and tokenizer
|
21 |
+
model_id = "rish13/llm_for_advanced_materials" # Replace with your model repo ID
|
22 |
+
model = pipeline('text-generation', model=model_id)
|
23 |
+
|
24 |
+
# Define Gradio interface
|
25 |
+
def generate_text(prompt):
|
26 |
+
return model(prompt)[0]['generated_text']
|
27 |
+
|
28 |
+
gradio_interface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
|
29 |
+
|
30 |
+
# Initialize Flask app
|
31 |
+
app = Flask(__name__)
|
32 |
+
|
33 |
+
@app.route('/search', methods=['POST'])
|
34 |
+
def predict_endpoint():
|
35 |
+
data = request.json
|
36 |
+
prompt = data.get('prompt', '')
|
37 |
+
generated_text = generate_text(prompt)
|
38 |
+
return jsonify({"result": generated_text})
|
39 |
+
|
40 |
+
@app.route('/')
|
41 |
+
def home():
|
42 |
+
return "Welcome to the text generation API. Use the /search endpoint to generate text."
|
43 |
+
|
44 |
+
if __name__ == "__main__":
|
45 |
+
# Run the Gradio interface in a separate thread
|
46 |
+
from threading import Thread
|
47 |
+
gradio_thread = Thread(target=lambda: gradio_interface.launch(share=False, inbrowser=True))
|
48 |
+
gradio_thread.start()
|
49 |
+
|
50 |
+
# Run the Flask app
|
51 |
+
app.run(host='0.0.0.0', port=5000)
|
requirements.txt
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Flask
|
2 |
+
gradio
|
3 |
+
transformers
|
4 |
+
huggingface_hub
|
5 |
+
torch
|
6 |
+
python-dotenv
|