Update handler.py
Browse files- handler.py +4 -2
handler.py
CHANGED
@@ -52,8 +52,8 @@ class EndpointHandler():
|
|
52 |
result_array = []
|
53 |
if (indices_array):
|
54 |
for result_indices in indices_array:
|
55 |
-
text = self.tokenizer.decode(input_ids[result_indices[0]:result_indices[-1]
|
56 |
-
indices = [offset_mapping[result_indices[0]][0], offset_mapping[result_indices[-1]
|
57 |
if text != "" and not text.isspace():
|
58 |
while True:
|
59 |
if text[0] == " ":
|
@@ -74,6 +74,7 @@ class EndpointHandler():
|
|
74 |
if token_logits[2] > label_tolerance:
|
75 |
result_indices.append(index)
|
76 |
else:
|
|
|
77 |
labeled_result_indices.append(result_indices)
|
78 |
result_indices = []
|
79 |
|
@@ -95,6 +96,7 @@ class EndpointHandler():
|
|
95 |
result_indices.append(index)
|
96 |
else:
|
97 |
# Check if backup result overlaps at all with any labeled result. If it does just ignore it
|
|
|
98 |
overlaps_labeled_result = False
|
99 |
if (len(labeled_result_indices) > 0):
|
100 |
for index in result_indices:
|
|
|
52 |
result_array = []
|
53 |
if (indices_array):
|
54 |
for result_indices in indices_array:
|
55 |
+
text = self.tokenizer.decode(input_ids[result_indices[0]:result_indices[-1]])
|
56 |
+
indices = [offset_mapping[result_indices[0]][0], offset_mapping[result_indices[-1]][1]]
|
57 |
if text != "" and not text.isspace():
|
58 |
while True:
|
59 |
if text[0] == " ":
|
|
|
74 |
if token_logits[2] > label_tolerance:
|
75 |
result_indices.append(index)
|
76 |
else:
|
77 |
+
result_indices[1] += 1
|
78 |
labeled_result_indices.append(result_indices)
|
79 |
result_indices = []
|
80 |
|
|
|
96 |
result_indices.append(index)
|
97 |
else:
|
98 |
# Check if backup result overlaps at all with any labeled result. If it does just ignore it
|
99 |
+
result_indices[1] += 1
|
100 |
overlaps_labeled_result = False
|
101 |
if (len(labeled_result_indices) > 0):
|
102 |
for index in result_indices:
|