Loubna ben allal
commited on
Commit
•
9684d46
1
Parent(s):
b6ad8c5
update app
Browse files
app.py
CHANGED
@@ -37,20 +37,20 @@ tokenizer1 = load_tokenizer("lvwerra/codeparrot")
|
|
37 |
model1 = load_model("lvwerra/codeparrot")
|
38 |
tokenizer2 = load_tokenizer("facebook/incoder-1B")
|
39 |
model2 = load_model("facebook/incoder-1B")
|
40 |
-
|
41 |
-
|
42 |
pipelines = {}
|
43 |
-
for
|
44 |
-
if
|
45 |
-
pipelines[
|
46 |
-
elif
|
47 |
tokenizer = load_tokenizer("facebook/incoder-1B")
|
48 |
model = load_model("facebook/incoder-1B")
|
49 |
-
pipelines[
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
|
55 |
examples = load_examples()
|
56 |
example_names = [example["name"] for example in examples]
|
@@ -94,9 +94,7 @@ elif selected_task == "Code generation":
|
|
94 |
if st.button("Generate code!"):
|
95 |
with st.spinner("Generating code..."):
|
96 |
for model in selected_models:
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
st.markdown(f"{model}:")
|
102 |
-
st.code(generated_text)
|
|
|
37 |
model1 = load_model("lvwerra/codeparrot")
|
38 |
tokenizer2 = load_tokenizer("facebook/incoder-1B")
|
39 |
model2 = load_model("facebook/incoder-1B")
|
40 |
+
tokenizer3 = load_tokenizer("facebook/opt-1.3b")
|
41 |
+
model3 = load_model("facebook/opt-1.3b")
|
42 |
pipelines = {}
|
43 |
+
for element in models:
|
44 |
+
if element == "CodeParrot":
|
45 |
+
pipelines[element] = pipeline("text-generation", model=model1, tokenizer=tokenizer1)
|
46 |
+
elif element == "InCoder":
|
47 |
tokenizer = load_tokenizer("facebook/incoder-1B")
|
48 |
model = load_model("facebook/incoder-1B")
|
49 |
+
pipelines[element] = pipeline("text-generation", model=model2, tokenizer=tokenizer2)
|
50 |
+
else:
|
51 |
+
tokenizer = load_tokenizer("facebook/opt-1.3b")
|
52 |
+
model = load_model("facebook/opt-1.3b")
|
53 |
+
pipelines[element] = pipeline("text-generation", model=model3, tokenizer=tokenizer3)
|
54 |
|
55 |
examples = load_examples()
|
56 |
example_names = [example["name"] for example in examples]
|
|
|
94 |
if st.button("Generate code!"):
|
95 |
with st.spinner("Generating code..."):
|
96 |
for model in selected_models:
|
97 |
+
pipe = pipelines[model]
|
98 |
+
generated_text = pipe(gen_prompt, **gen_kwargs)[0]['generated_text']
|
99 |
+
st.markdown(f"{model}:")
|
100 |
+
st.code(generated_text)
|
|
|
|