myalexa / app.py
almncarlo's picture
Update app.py
45ba322 verified
raw
history blame
852 Bytes
import gradio as gr
import torch
from transformers import pipeline, AutoTokenizer
# tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct")
# pipe = pipeline("text-generation", model="microsoft/Phi-3-mini-128k-instruct", trust_remote_code=True, tokenizer=tokenizer)
# gr.Interface.from_pipeline(pipe,
# title="TextGen",
# description="Using pipeline with Phi3",
# ).launch(inbrowser=True)
pipe = pipeline(
"text-generation",
model="meta-llama/Meta-Llama-3-8B",
model_kwargs={"torch_dtype": torch.bfloat16},
device_map="auto"
)
gr.Interface.from_pipeline(pipe,
title="TextGen",
description="Using pipeline with Llama3",
).launch(inbrowser=True)