raoufjat commited on
Commit
1d0505c
1 Parent(s): b2d11f9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -51
app.py CHANGED
@@ -1,53 +1,8 @@
1
- from flask import Flask, request, jsonify
2
  from transformers import pipeline
3
 
4
- # Initialize Flask app
5
- app = Flask(__name__)
6
-
7
- # Load the Arabic-QwQ model (using Hugging Face pipeline for simplicity)
8
- model_pipeline = pipeline(
9
- "text-generation",
10
- model="Omartificial-Intelligence-Space/Arabic-QWQ-32B-Preview"
11
- )
12
-
13
- @app.route('/')
14
- def index():
15
- """Root endpoint, can serve an HTML form if desired."""
16
- return """
17
- <h1>Arabic-QwQ Model Demo</h1>
18
- <form action="/predict" method="post">
19
- <label>Enter your prompt:</label><br>
20
- <input type="text" name="prompt" required><br><br>
21
- <input type="submit" value="Submit">
22
- </form>
23
- """
24
-
25
- @app.route('/predict', methods=["POST"])
26
- def predict():
27
- """
28
- Route for processing user input with the model.
29
- - Accepts user input via POST request.
30
- - Runs inference with Arabic-QwQ model.
31
- - Returns response.
32
- """
33
- try:
34
- # Extract user input
35
- user_input = request.form.get("prompt")
36
-
37
- # Perform model inference
38
- output = model_pipeline(user_input, max_length=50, num_return_sequences=1)
39
-
40
- # Return inference results
41
- return jsonify({
42
- "input": user_input,
43
- "response": output[0]['generated_text'] if output else "No response generated"
44
- })
45
-
46
- except Exception as e:
47
- # Handle errors gracefully
48
- return jsonify({"error": str(e)}), 500
49
-
50
-
51
- # Run the app
52
- if __name__ == "__main__":
53
- app.run(debug=True)
 
1
+ # Use a pipeline as a high-level helper
2
  from transformers import pipeline
3
 
4
+ messages = [
5
+ {"role": "user", "content": "Who are you?"},
6
+ ]
7
+ pipe = pipeline("text-generation", model="JackCloudman/Phi-4-jackterated", trust_remote_code=True)
8
+ pipe(messages)