g3casey commited on
Commit
14904a7
1 Parent(s): bd62b5b

Changing to smaller model - getting out of memory error

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -9,10 +9,10 @@ os.environ["CUDA_VISIBLE_DEVICES"] = "0"
9
 
10
  def summarize(text):
11
  # Setting to use the bart-large-cnn model for summarization
12
- summarizer = pipeline("summarization")
13
 
14
  # To use the t5-base model for summarization:
15
- # summarizer = pipeline("summarization", model="t5-base", tokenizer="t5-base", framework="tf")
16
 
17
  summary_text = summarizer(text, max_length=100, min_length=5, do_sample=False)[0]['summary_text']
18
  print(f'Length of initial text: {len(text)}')
 
9
 
10
  def summarize(text):
11
  # Setting to use the bart-large-cnn model for summarization
12
+ # summarizer = pipeline("summarization")
13
 
14
  # To use the t5-base model for summarization:
15
+ summarizer = pipeline("summarization", model="t5-base", tokenizer="t5-base", framework="tf")
16
 
17
  summary_text = summarizer(text, max_length=100, min_length=5, do_sample=False)[0]['summary_text']
18
  print(f'Length of initial text: {len(text)}')