ChihChiu29 commited on
Commit
cd7f436
1 Parent(s): 8a76514

try again with large one

Browse files
Files changed (2) hide show
  1. main.py +4 -3
  2. tutorial.md +8 -0
main.py CHANGED
@@ -16,9 +16,10 @@ from transformers import T5Tokenizer, T5ForConditionalGeneration
16
  # model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-1B-distill")
17
  # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-small")
18
  # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-small")
19
- tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
20
- model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-base")
21
-
 
22
 
23
  token_size_limit = 128
24
 
 
16
  # model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-1B-distill")
17
  # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-small")
18
  # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-small")
19
+ # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
20
+ # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-base")
21
+ tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-large")
22
+ model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-large")
23
 
24
  token_size_limit = 128
25
 
tutorial.md CHANGED
@@ -28,3 +28,11 @@ Use the embedded address, for example:
28
  ```bash
29
  curl -X POST https://chihchiu29-mychatbot.hf.space/reply -H 'Content-Type: application/json' -d '{"msg": "hi"}'
30
  ```
 
 
 
 
 
 
 
 
 
28
  ```bash
29
  curl -X POST https://chihchiu29-mychatbot.hf.space/reply -H 'Content-Type: application/json' -d '{"msg": "hi"}'
30
  ```
31
+
32
+ ## Remove dangling images
33
+
34
+ From: https://github.com/fabric8io/docker-maven-plugin/issues/501
35
+
36
+ ```bash
37
+ docker rmi $(docker images -qa -f 'dangling=true')
38
+ ```