Spaces:
Runtime error
Runtime error
DrBenjamin
commited on
Commit
•
9654b35
1
Parent(s):
9ba53ed
added files
Browse files- AI_Demo.py +1 -1
- pages/💁 Open_Assistant.py +4 -1
AI_Demo.py
CHANGED
@@ -34,7 +34,7 @@ def predict_step(image):
|
|
34 |
|
35 |
|
36 |
#### Models
|
37 |
-
st.header('🤗
|
38 |
st.write('State-of-the-art diffusion models for image, text and audio generation in PyTorch.')
|
39 |
devices = ["mps", "cpu", "cuda"]
|
40 |
device = st.selectbox(label = 'Select device', options = devices, index = 1, disabled = True)
|
|
|
34 |
|
35 |
|
36 |
#### Models
|
37 |
+
st.header('🤗 Hugging Face Diffusers')
|
38 |
st.write('State-of-the-art diffusion models for image, text and audio generation in PyTorch.')
|
39 |
devices = ["mps", "cpu", "cuda"]
|
40 |
device = st.selectbox(label = 'Select device', options = devices, index = 1, disabled = True)
|
pages/💁 Open_Assistant.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
##### `💁 Open_Assistant.py`
|
2 |
-
##### Chat Llm Streaming
|
3 |
##### https://huggingface.co/spaces/olivierdehaene/chat-llm-streaming/blob/main/README.md
|
|
|
4 |
##### Please reach out to ben@benbox.org for any questions
|
5 |
#### Loading needed Python libraries
|
6 |
import streamlit as st
|
@@ -8,6 +8,9 @@ import os
|
|
8 |
from text_generation import Client, InferenceAPIClient
|
9 |
from text_generation import InferenceAPIClient
|
10 |
|
|
|
|
|
|
|
11 |
client = InferenceAPIClient("OpenAssistant/oasst-sft-1-pythia-12b")
|
12 |
text = client.generate("<|prompter|>Why is the sky blue?<|endoftext|><|assistant|>").generated_text
|
13 |
st.write(text)
|
|
|
1 |
##### `💁 Open_Assistant.py`
|
|
|
2 |
##### https://huggingface.co/spaces/olivierdehaene/chat-llm-streaming/blob/main/README.md
|
3 |
+
##### https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b
|
4 |
##### Please reach out to ben@benbox.org for any questions
|
5 |
#### Loading needed Python libraries
|
6 |
import streamlit as st
|
|
|
8 |
from text_generation import Client, InferenceAPIClient
|
9 |
from text_generation import InferenceAPIClient
|
10 |
|
11 |
+
|
12 |
+
st.header('💁 Open Assistant LLM')
|
13 |
+
st.write('This is the first iteration English supervised-fine-tuning (SFT) model of the Open-Assistant project. It is based on a Pythia 12B that was fine-tuned on ~22k human demonstrations of assistant conversations collected through the https://open-assistant.io/ human feedback web app before March 7, 2023.')
|
14 |
client = InferenceAPIClient("OpenAssistant/oasst-sft-1-pythia-12b")
|
15 |
text = client.generate("<|prompter|>Why is the sky blue?<|endoftext|><|assistant|>").generated_text
|
16 |
st.write(text)
|