|
import streamlit as st |
|
import torch |
|
from transformers import pipeline |
|
|
|
|
|
with st.sidebar: |
|
st.image("https://www.onepointltd.com/wp-content/uploads/2020/03/inno2.png") |
|
st.title("Campus Comrade Assistant") |
|
choice = st.radio("Navigation", ["Home/about","Meal Suggester","Accomodation available", "Assignment assist", "Events"]) |
|
st.info("This project application helps you accomplish both major and minor tasks efficiently😉.") |
|
|
|
if choice == "Home/about": |
|
st.title("Welcome to Campus Comrade Assitant") |
|
st.write("In this app you can book an event, get balanced diet meal suggestions, find accomodation around you and so much more. welcome to your onestop comrade assist app🔥") |
|
|
|
if choice == "Meal Suggester": |
|
st.title("Time to Eat") |
|
st.write("Feeling Hungry? Let get you some nice meal to silence that grumbling. In a healthy way ofcourse") |
|
|
|
|
|
|
|
pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-beta", torch_dtype=torch.bfloat16, device_map="auto") |
|
|
|
|
|
messages = [ |
|
{ |
|
"role": "system", |
|
"content": "You are a friendly chatbot who always responds in the style of a pirate", |
|
}, |
|
{"role": "user", "content": "How many helicopters can a human eat in one sitting?"}, |
|
] |
|
prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True) |
|
outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95) |
|
st.write(outputs[0]["generated_text"]) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if choice == "Accomodation available": |
|
st.title("Ready to Move in?") |
|
st.write("Find Best value rooms around you. Live in a cozy and quiet environment, or a Lively bustling Street. You decide") |
|
|
|
if choice == "Assignment assist": |
|
st.title("Let's complete that assignment") |
|
|
|
if choice == "Events": |
|
st.title("Collaborate and Jazz. Events around You") |