DD0101 commited on
Commit
7f9c127
1 Parent(s): 02758ba

adding arguments to predict()

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -73,6 +73,7 @@ parser.add_argument("--model_dir", default="/home/user/app/Customized_IDSF/Joint
73
  parser.add_argument("--batch_size", default=32, type=int, help="Batch size for prediction")
74
  parser.add_argument("--no_cuda", action="store_true", help="Avoid using CUDA when available")
75
 
 
76
  pred_config = parser.parse_args()
77
 
78
  # load model and args
@@ -87,11 +88,10 @@ slot_label_lst = get_slot_labels(args)
87
  pad_token_label_id = args.ignore_index
88
  tokenizer = load_tokenizer(args)
89
 
 
90
 
91
 
92
 
93
- #################### END IDSF #######################
94
-
95
  def remove_disfluency(example, prediction):
96
  characters = list(example)
97
 
@@ -118,7 +118,7 @@ def ner(text):
118
 
119
 
120
  #################### IDSF #######################
121
- prediction = lm.predict([fluency_sentence.strip().split()])
122
  words, slot_preds, intent_pred = prediction[0][0], prediction[1][0], prediction[2][0]
123
 
124
  slot_tokens = []
 
73
  parser.add_argument("--batch_size", default=32, type=int, help="Batch size for prediction")
74
  parser.add_argument("--no_cuda", action="store_true", help="Avoid using CUDA when available")
75
 
76
+ globals()['var'] = "an object"
77
  pred_config = parser.parse_args()
78
 
79
  # load model and args
 
88
  pad_token_label_id = args.ignore_index
89
  tokenizer = load_tokenizer(args)
90
 
91
+ #################### END IDSF #######################
92
 
93
 
94
 
 
 
95
  def remove_disfluency(example, prediction):
96
  characters = list(example)
97
 
 
118
 
119
 
120
  #################### IDSF #######################
121
+ prediction = lm.predict([fluency_sentence.strip().split()], pred_config, args, tokenizer, pad_token_label_id)
122
  words, slot_preds, intent_pred = prediction[0][0], prediction[1][0], prediction[2][0]
123
 
124
  slot_tokens = []