File size: 852 Bytes
4571fa6 45ba322 f6cc7ad def81e4 71126de f6cc7ad 4571fa6 71126de 4571fa6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
import gradio as gr
import torch
from transformers import pipeline, AutoTokenizer
# tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct")
# pipe = pipeline("text-generation", model="microsoft/Phi-3-mini-128k-instruct", trust_remote_code=True, tokenizer=tokenizer)
# gr.Interface.from_pipeline(pipe,
# title="TextGen",
# description="Using pipeline with Phi3",
# ).launch(inbrowser=True)
pipe = pipeline(
"text-generation",
model="meta-llama/Meta-Llama-3-8B",
model_kwargs={"torch_dtype": torch.bfloat16},
device_map="auto"
)
gr.Interface.from_pipeline(pipe,
title="TextGen",
description="Using pipeline with Llama3",
).launch(inbrowser=True) |