Spaces:
Sleeping
Sleeping
File size: 876 Bytes
8863f0b 97ee274 b221ff7 97ee274 79d1c42 97ee274 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
import streamlit as st
from llama_cpp import Llama
# Load the model
llm = Llama.from_pretrained(
repo_id="DavidAU/Command-R-01-200xq-Ultra-NEO-V1-35B-IMATRIX-GGUF",
filename="CommandR-35B-NEO-V1-D_AU-IQ3_XS-0200xq-imat13.gguf",
)
# Function to generate story (model)
def generate_story(user_input):
response = llm.create_chat_completion(
messages=[
{
"role": "user",
"content": user_input
}
]
)
return response['choices'][0]['message']['content']
# Start storytelling
def start_storytelling():
print("Welcome to the Storytelling bot.")
story_prompt = st.text_input("What would you like your story to be about?")
story_part = generate_story(story_prompt)
print("\nHere's the beginning of your story:")
print(story_part)
# Function start
start_storytelling() |