Teja-Gollapudi commited on
Commit
7e830cb
1 Parent(s): ee4a95b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -12
README.md CHANGED
@@ -60,21 +60,13 @@ Using Alpaca prompt template might generate better outputs for certain prompts a
60
  from transformers import pipeline
61
  model = pipeline(model="vmware/flan-t5-large-alpaca",device_map = 'auto')
62
 
 
63
 
64
- prompt_template = """Below is an instruction that describes a task. Write a response that appropriately completes the request.
65
-
66
- ### Instruction:
67
- {prompt}
68
-
69
- ### Response:
70
- """
71
-
72
- prompt = "Type your prompt herer"
73
-
74
-
75
- output = model(prompt_template.format(prompt), max_length=256, do_sample=True)
76
 
 
77
 
 
78
 
79
 
80
 
 
60
  from transformers import pipeline
61
  model = pipeline(model="vmware/flan-t5-large-alpaca",device_map = 'auto')
62
 
63
+ prompt_template = "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n### Instruction:\n{instruction}\n\n### Response:"
64
 
65
+ prompt = "YOUR PROMPT HERE"
 
 
 
 
 
 
 
 
 
 
 
66
 
67
+ output = model(prompt_template.format(instruction= prompt), max_length=256, do_sample=True)
68
 
69
+ print(output)
70
 
71
 
72