Spaces:
Sleeping
Sleeping
romnatall
commited on
Commit
•
26290c2
1
Parent(s):
6401a38
deploy
Browse files- app.py +48 -0
- images/Lera.png +0 -0
- images/olya.jpg +0 -0
- images/roma.jpg +0 -0
- pages/0film_reviev.py +30 -0
- pages/1toxic_messages.py +26 -0
- pages/2GPT.py +0 -0
- pages/anti_toxic/__pycache__/anti_toxic.cpython-312.pyc +0 -0
- pages/anti_toxic/anti_toxic.py +23 -0
- pages/film_review/model/__pycache__/model_logreg.cpython-312.pyc +0 -0
- pages/film_review/model/__pycache__/model_lstm.cpython-312.pyc +0 -0
- pages/film_review/model/model_logreg.joblib +3 -0
- pages/film_review/model/model_logreg.py +16 -0
- pages/film_review/model/model_logreg_vectorizer.pkl +3 -0
- pages/film_review/model/model_lstm.pt +3 -0
- pages/film_review/model/model_lstm.py +105 -0
- pages/film_review/notebook.ipynb +708 -0
- requirements.txt +5 -0
app.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from PIL import Image
|
3 |
+
st.title("NLP project")
|
4 |
+
|
5 |
+
description_show_options = ['main','film_review','toxic_messages','GPT','над проектом работали']
|
6 |
+
description_show = st.sidebar.radio("Description", description_show_options)
|
7 |
+
|
8 |
+
if description_show == 'над проектом работали':
|
9 |
+
|
10 |
+
st.title(" над проектом работали")
|
11 |
+
col1, col2, col3 = st.columns(3)
|
12 |
+
with col1:
|
13 |
+
|
14 |
+
romaimage = Image.open("images/roma.jpg")
|
15 |
+
st.image(romaimage, caption="Рома | cosplayNet enjoyer | DevOps", use_column_width=True)
|
16 |
+
with col2:
|
17 |
+
leraimage = Image.open("images/Lera.png")
|
18 |
+
st.image(leraimage, caption="Лера | UNet bender | Data Scientist", use_column_width=True)
|
19 |
+
with col3:
|
20 |
+
olyaimage = Image.open("images/olya.jpg")
|
21 |
+
st.image(olyaimage, caption="Бауржан | streamlit master | Frontender", use_column_width=True)
|
22 |
+
elif description_show == 'GPT':
|
23 |
+
st.title("GPT")
|
24 |
+
|
25 |
+
elif description_show == 'main':
|
26 |
+
st.title("main")
|
27 |
+
|
28 |
+
elif description_show == 'film_review':
|
29 |
+
st.title("film_review")
|
30 |
+
|
31 |
+
|
32 |
+
# Weighted F1-score: 0.7069352925929284
|
33 |
+
# Classification Report:
|
34 |
+
# precision recall f1-score support
|
35 |
+
|
36 |
+
# Bad 0.67 0.81 0.74 960
|
37 |
+
# Neutral 0.65 0.50 0.56 922
|
38 |
+
# Good 0.82 0.82 0.82 896
|
39 |
+
|
40 |
+
# accuracy 0.71 2778
|
41 |
+
# macro avg 0.71 0.71 0.71 2778
|
42 |
+
# weighted avg 0.71 0.71 0.71 2778
|
43 |
+
|
44 |
+
elif description_show == 'toxic_messages':
|
45 |
+
st.title("toxic_messages")
|
46 |
+
|
47 |
+
|
48 |
+
|
images/Lera.png
ADDED
images/olya.jpg
ADDED
images/roma.jpg
ADDED
pages/0film_reviev.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import re
|
3 |
+
import streamlit as st
|
4 |
+
import torch
|
5 |
+
st.title("film_review")
|
6 |
+
|
7 |
+
input_text = st.text_area("Enter your text")
|
8 |
+
from pages.film_review.model.model_lstm import *
|
9 |
+
from pages.film_review.model.model_logreg import *
|
10 |
+
|
11 |
+
@st.cache_resource
|
12 |
+
def get_model():
|
13 |
+
return torch.load("pages/film_review/model/model_lstm.pt",map_location=torch.device('cpu'))
|
14 |
+
model = get_model()
|
15 |
+
model.eval()
|
16 |
+
dec = {0:'отрицательный',1:'нейтральный',2:'положительный'}
|
17 |
+
|
18 |
+
if input_text:
|
19 |
+
with torch.no_grad():
|
20 |
+
ans = torch.nn.functional.softmax(model(input_text), dim=1)
|
21 |
+
idx = torch.argmax(ans, dim=1).item()
|
22 |
+
st.write(f'LSTM - отзыв: {dec[idx]}, уверенность: { round(ans[0][idx].item(),2)}')
|
23 |
+
|
24 |
+
st.write(f'Logreg - отзыв: {dec[ predict_tfidf(input_text)[0]]}')
|
25 |
+
|
26 |
+
|
27 |
+
|
28 |
+
|
29 |
+
else:
|
30 |
+
st.write("No text entered")
|
pages/1toxic_messages.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import re
|
3 |
+
import streamlit as st
|
4 |
+
import torch
|
5 |
+
from pages.anti_toxic.anti_toxic import *
|
6 |
+
|
7 |
+
st.title("toxic filtrer")
|
8 |
+
|
9 |
+
input_text = st.text_area("Enter your text")
|
10 |
+
|
11 |
+
|
12 |
+
dec = {0:'нормальный',1:'токсик'}
|
13 |
+
|
14 |
+
if input_text:
|
15 |
+
with torch.no_grad():
|
16 |
+
ans = predict(input_text).tolist()
|
17 |
+
|
18 |
+
if ans[1] > 0.5:
|
19 |
+
st.write(f'{dec[1]}, уверенность {round(ans[1],2)}')
|
20 |
+
else:
|
21 |
+
st.write(f'{dec[0]}, уверенность {round(ans[0],2)}')
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
else:
|
26 |
+
st.write("No text entered")
|
pages/2GPT.py
ADDED
File without changes
|
pages/anti_toxic/__pycache__/anti_toxic.cpython-312.pyc
ADDED
Binary file (1.74 kB). View file
|
|
pages/anti_toxic/anti_toxic.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
3 |
+
from catboost import CatBoostClassifier
|
4 |
+
import torch.nn as nn
|
5 |
+
import streamlit as st
|
6 |
+
|
7 |
+
@st.cache_resource
|
8 |
+
def load_model():
|
9 |
+
catboost_model = CatBoostClassifier(random_seed=42,eval_metric='Accuracy')
|
10 |
+
catboost_model.load_model("pages/anti_toxic/dont_be_toxic.cbm")
|
11 |
+
model_checkpoint = 'cointegrated/rubert-tiny-toxicity'
|
12 |
+
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
13 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)
|
14 |
+
model.classifier=nn.Dropout(0)
|
15 |
+
model.dropout = nn.Dropout(0)
|
16 |
+
return catboost_model, tokenizer, model
|
17 |
+
|
18 |
+
catboost_model, tokenizer, model = load_model()
|
19 |
+
def predict(text):
|
20 |
+
t=tokenizer(text, return_tensors='pt',truncation=True, padding=True)
|
21 |
+
with torch.no_grad():
|
22 |
+
t = model(**t)[0].tolist()[0]
|
23 |
+
return catboost_model.predict_proba(t)
|
pages/film_review/model/__pycache__/model_logreg.cpython-312.pyc
ADDED
Binary file (949 Bytes). View file
|
|
pages/film_review/model/__pycache__/model_lstm.cpython-312.pyc
ADDED
Binary file (6.91 kB). View file
|
|
pages/film_review/model/model_logreg.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6e36ba8ccd4fd99dd6d91d6e22872fb714b7c40e152ad0ea2ab02e240637400f
|
3 |
+
size 4391461
|
pages/film_review/model/model_logreg.py
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from joblib import load
|
2 |
+
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
|
3 |
+
import pickle
|
4 |
+
|
5 |
+
|
6 |
+
with open('pages/film_review/model/model_logreg_vectorizer.pkl', 'rb') as f:
|
7 |
+
vectorizer = pickle.load(f)
|
8 |
+
|
9 |
+
|
10 |
+
# Load the model
|
11 |
+
classifier = load('pages/film_review/model/model_logreg.joblib')
|
12 |
+
|
13 |
+
def predict_tfidf(text):
|
14 |
+
text_review_vectorized = vectorizer.transform([text])
|
15 |
+
prediction = classifier.predict(text_review_vectorized)
|
16 |
+
return prediction
|
pages/film_review/model/model_logreg_vectorizer.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7fc763c85441e38ede135901e446e05332a807f8bc5264d15d18646746f5c19d
|
3 |
+
size 7548801
|
pages/film_review/model/model_lstm.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5ca41a271e53df95eed8996bf8ed9ebe3be4df84726d9ce55319b7b7159de630
|
3 |
+
size 14679450
|
pages/film_review/model/model_lstm.py
ADDED
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
ATTENTION_SIZE=10
|
2 |
+
HIDDEN_SIZE=300
|
3 |
+
INPUT_SIZE=312
|
4 |
+
import torch
|
5 |
+
from transformers import AutoTokenizer, AutoModel
|
6 |
+
import torch.nn as nn
|
7 |
+
|
8 |
+
class RomanAttention(nn.Module):
|
9 |
+
def __init__(self, hidden_size: int = HIDDEN_SIZE) -> None:
|
10 |
+
super().__init__()
|
11 |
+
self.clf = nn.Sequential(
|
12 |
+
nn.Linear(HIDDEN_SIZE, 512),
|
13 |
+
nn.ReLU(),
|
14 |
+
nn.Linear(512, 1),
|
15 |
+
)
|
16 |
+
|
17 |
+
def forward(self, hidden, final_hidden):
|
18 |
+
final_hidden = final_hidden.squeeze(0).unsqueeze(1)
|
19 |
+
|
20 |
+
cat = torch.concat((hidden, final_hidden), dim=1)
|
21 |
+
clf = self.clf(cat)
|
22 |
+
vals = torch.argsort(clf, descending=False, dim=1)
|
23 |
+
index=vals[:,:ATTENTION_SIZE].squeeze(2)
|
24 |
+
index1=vals[:,ATTENTION_SIZE:].squeeze(2)
|
25 |
+
selected_values = cat[torch.arange(index.size(0)).unsqueeze(1), index]
|
26 |
+
select_clf = clf[torch.arange(index.size(0)).unsqueeze(1), index1]
|
27 |
+
unselected_values = cat[torch.arange(index.size(0)).unsqueeze(1), index1]*select_clf*select_clf
|
28 |
+
mean_unselected = torch.mean(unselected_values, dim=1)
|
29 |
+
return torch.cat((selected_values, mean_unselected.unsqueeze(1)), dim=1)
|
30 |
+
|
31 |
+
|
32 |
+
import pytorch_lightning as lg
|
33 |
+
|
34 |
+
|
35 |
+
m = AutoModel.from_pretrained("cointegrated/rubert-tiny2")
|
36 |
+
emb=m.embeddings
|
37 |
+
#emb.dropout=nn.Dropout(0)
|
38 |
+
for param in emb.parameters():
|
39 |
+
param.requires_grad = False
|
40 |
+
|
41 |
+
tokenizer = AutoTokenizer.from_pretrained("cointegrated/rubert-tiny2")
|
42 |
+
def tokenize(text):
|
43 |
+
t=tokenizer(text, padding=True, truncation=True,pad_to_multiple_of=300,max_length=300)['input_ids']
|
44 |
+
if len(t) <30:
|
45 |
+
t+=[0]*(30-len(t))
|
46 |
+
return t
|
47 |
+
|
48 |
+
|
49 |
+
class MyModel(lg.LightningModule):
|
50 |
+
def __init__(self):
|
51 |
+
super().__init__()
|
52 |
+
|
53 |
+
self.lstm = nn.LSTM(INPUT_SIZE, HIDDEN_SIZE, batch_first=True)
|
54 |
+
self.attn = RomanAttention(HIDDEN_SIZE)
|
55 |
+
self.clf = nn.Sequential(
|
56 |
+
nn.Linear(HIDDEN_SIZE*(ATTENTION_SIZE+1), 100),
|
57 |
+
nn.Dropout(),
|
58 |
+
nn.ReLU(),
|
59 |
+
nn.Linear(100, 3)
|
60 |
+
)
|
61 |
+
|
62 |
+
self.criterion = nn.CrossEntropyLoss()
|
63 |
+
self.optimizer = torch.optim.Adam(self.parameters(), lr=0.001)
|
64 |
+
self.early_stopping = lg.callbacks.EarlyStopping(
|
65 |
+
monitor='val_acc',
|
66 |
+
min_delta=0.01,
|
67 |
+
patience=2,
|
68 |
+
verbose=True,
|
69 |
+
mode='max'
|
70 |
+
)
|
71 |
+
self.verbose=False
|
72 |
+
|
73 |
+
def forward(self, x):
|
74 |
+
if type(x) == str:
|
75 |
+
x = torch.tensor([tokenize(x)])
|
76 |
+
embeddings = emb(x)
|
77 |
+
output, (h_n, c_n) = self.lstm(embeddings)
|
78 |
+
attention = self.attn(output, c_n)
|
79 |
+
out =attention #torch.cat((output, attention), dim=1)
|
80 |
+
out = nn.Flatten()(out)
|
81 |
+
out_clf = self.clf(out)
|
82 |
+
return out_clf
|
83 |
+
|
84 |
+
|
85 |
+
def training_step(self, batch, batch_idx):
|
86 |
+
x, y = batch
|
87 |
+
y_pred = self(x)
|
88 |
+
loss = self.criterion(y_pred, y)
|
89 |
+
|
90 |
+
accuracy = (torch.argmax(y_pred, dim=1) == y).float().mean()
|
91 |
+
self.log('train_loss', loss, on_epoch=True, prog_bar=True)
|
92 |
+
self.log('train_accuracy', accuracy , on_epoch=True, prog_bar=True)
|
93 |
+
return loss
|
94 |
+
|
95 |
+
def validation_step(self, batch, batch_idx):
|
96 |
+
x, y = batch
|
97 |
+
y_pred = self(x)
|
98 |
+
loss = self.criterion(y_pred, y)
|
99 |
+
accuracy = ( torch.argmax(y_pred, dim=1) == y).float().mean()
|
100 |
+
self.log('val_loss', loss , on_epoch=True, prog_bar=True)
|
101 |
+
self.log('val_accuracy', accuracy , on_epoch=True, prog_bar=True)
|
102 |
+
return loss
|
103 |
+
|
104 |
+
def configure_optimizers(self):
|
105 |
+
return self.optimizer
|
pages/film_review/notebook.ipynb
ADDED
@@ -0,0 +1,708 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 83,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [
|
8 |
+
{
|
9 |
+
"name": "stdout",
|
10 |
+
"output_type": "stream",
|
11 |
+
"text": [
|
12 |
+
"0\n",
|
13 |
+
"100\n",
|
14 |
+
"200\n",
|
15 |
+
"300\n",
|
16 |
+
"400\n",
|
17 |
+
"500\n",
|
18 |
+
"600\n",
|
19 |
+
"700\n",
|
20 |
+
"800\n",
|
21 |
+
"900\n",
|
22 |
+
"1000\n",
|
23 |
+
"1100\n",
|
24 |
+
"1200\n",
|
25 |
+
"1300\n",
|
26 |
+
"1400\n",
|
27 |
+
"1500\n",
|
28 |
+
"1600\n",
|
29 |
+
"1700\n",
|
30 |
+
"1800\n",
|
31 |
+
"1900\n",
|
32 |
+
"2000\n",
|
33 |
+
"2100\n",
|
34 |
+
"2200\n",
|
35 |
+
"2300\n",
|
36 |
+
"2400\n",
|
37 |
+
"2500\n",
|
38 |
+
"2600\n",
|
39 |
+
"2700\n",
|
40 |
+
"2800\n",
|
41 |
+
"2900\n",
|
42 |
+
"3000\n",
|
43 |
+
"3100\n",
|
44 |
+
"3200\n",
|
45 |
+
"3300\n",
|
46 |
+
"3400\n",
|
47 |
+
"3500\n",
|
48 |
+
"3600\n",
|
49 |
+
"3700\n",
|
50 |
+
"3800\n",
|
51 |
+
"3900\n",
|
52 |
+
"4000\n"
|
53 |
+
]
|
54 |
+
},
|
55 |
+
{
|
56 |
+
"ename": "KeyboardInterrupt",
|
57 |
+
"evalue": "",
|
58 |
+
"output_type": "error",
|
59 |
+
"traceback": [
|
60 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
61 |
+
"\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
|
62 |
+
"Cell \u001b[0;32mIn[83], line 47\u001b[0m\n\u001b[1;32m 44\u001b[0m \u001b[38;5;28mprint\u001b[39m(i)\n\u001b[1;32m 45\u001b[0m t\u001b[38;5;241m=\u001b[39mdf\u001b[38;5;241m.\u001b[39miloc[i][\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mX\u001b[39m\u001b[38;5;124m'\u001b[39m]\n\u001b[0;32m---> 47\u001b[0m t \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mtokenizer\u001b[49m\u001b[43m(\u001b[49m\u001b[43mt\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mreturn_tensors\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mpt\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43mtruncation\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpadding\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39mtolist()[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 48\u001b[0m x\u001b[38;5;241m.\u001b[39mappend(t)\n\u001b[1;32m 49\u001b[0m y\u001b[38;5;241m.\u001b[39mappend(df\u001b[38;5;241m.\u001b[39miloc[i][\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mY\u001b[39m\u001b[38;5;124m'\u001b[39m])\n",
|
63 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
64 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
65 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/transformers/models/bert/modeling_bert.py:1564\u001b[0m, in \u001b[0;36mBertForSequenceClassification.forward\u001b[0;34m(self, input_ids, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, labels, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 1556\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1557\u001b[0m \u001b[38;5;124;03mlabels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):\u001b[39;00m\n\u001b[1;32m 1558\u001b[0m \u001b[38;5;124;03m Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,\u001b[39;00m\n\u001b[1;32m 1559\u001b[0m \u001b[38;5;124;03m config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If\u001b[39;00m\n\u001b[1;32m 1560\u001b[0m \u001b[38;5;124;03m `config.num_labels > 1` a classification loss is computed (Cross-Entropy).\u001b[39;00m\n\u001b[1;32m 1561\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1562\u001b[0m return_dict \u001b[38;5;241m=\u001b[39m return_dict \u001b[38;5;28;01mif\u001b[39;00m return_dict \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39muse_return_dict\n\u001b[0;32m-> 1564\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbert\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1565\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1566\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1567\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1568\u001b[0m \u001b[43m \u001b[49m\u001b[43mposition_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mposition_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1569\u001b[0m \u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhead_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1570\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs_embeds\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs_embeds\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1571\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1572\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1573\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1574\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1576\u001b[0m pooled_output \u001b[38;5;241m=\u001b[39m outputs[\u001b[38;5;241m1\u001b[39m]\n\u001b[1;32m 1578\u001b[0m pooled_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdropout(pooled_output)\n",
|
66 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
67 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
68 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/transformers/models/bert/modeling_bert.py:1013\u001b[0m, in \u001b[0;36mBertModel.forward\u001b[0;34m(self, input_ids, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, encoder_hidden_states, encoder_attention_mask, past_key_values, use_cache, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 1004\u001b[0m head_mask \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_head_mask(head_mask, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39mnum_hidden_layers)\n\u001b[1;32m 1006\u001b[0m embedding_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membeddings(\n\u001b[1;32m 1007\u001b[0m input_ids\u001b[38;5;241m=\u001b[39minput_ids,\n\u001b[1;32m 1008\u001b[0m position_ids\u001b[38;5;241m=\u001b[39mposition_ids,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1011\u001b[0m past_key_values_length\u001b[38;5;241m=\u001b[39mpast_key_values_length,\n\u001b[1;32m 1012\u001b[0m )\n\u001b[0;32m-> 1013\u001b[0m encoder_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mencoder\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1014\u001b[0m \u001b[43m \u001b[49m\u001b[43membedding_output\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1015\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextended_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1016\u001b[0m \u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhead_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1017\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoder_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1018\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_attention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoder_extended_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1019\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_values\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpast_key_values\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1020\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1021\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1022\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1023\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1024\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1025\u001b[0m sequence_output \u001b[38;5;241m=\u001b[39m encoder_outputs[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 1026\u001b[0m pooled_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpooler(sequence_output) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpooler \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
69 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
70 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
71 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/transformers/models/bert/modeling_bert.py:607\u001b[0m, in \u001b[0;36mBertEncoder.forward\u001b[0;34m(self, hidden_states, attention_mask, head_mask, encoder_hidden_states, encoder_attention_mask, past_key_values, use_cache, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 596\u001b[0m layer_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_gradient_checkpointing_func(\n\u001b[1;32m 597\u001b[0m layer_module\u001b[38;5;241m.\u001b[39m\u001b[38;5;21m__call__\u001b[39m,\n\u001b[1;32m 598\u001b[0m hidden_states,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 604\u001b[0m output_attentions,\n\u001b[1;32m 605\u001b[0m )\n\u001b[1;32m 606\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 607\u001b[0m layer_outputs \u001b[38;5;241m=\u001b[39m \u001b[43mlayer_module\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 608\u001b[0m \u001b[43m \u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 609\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 610\u001b[0m \u001b[43m \u001b[49m\u001b[43mlayer_head_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 611\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 612\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 613\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_value\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 614\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 615\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 617\u001b[0m hidden_states \u001b[38;5;241m=\u001b[39m layer_outputs[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 618\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m use_cache:\n",
|
72 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
73 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
74 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/transformers/models/bert/modeling_bert.py:497\u001b[0m, in \u001b[0;36mBertLayer.forward\u001b[0;34m(self, hidden_states, attention_mask, head_mask, encoder_hidden_states, encoder_attention_mask, past_key_value, output_attentions)\u001b[0m\n\u001b[1;32m 485\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\n\u001b[1;32m 486\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 487\u001b[0m hidden_states: torch\u001b[38;5;241m.\u001b[39mTensor,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 494\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tuple[torch\u001b[38;5;241m.\u001b[39mTensor]:\n\u001b[1;32m 495\u001b[0m \u001b[38;5;66;03m# decoder uni-directional self-attention cached key/values tuple is at positions 1,2\u001b[39;00m\n\u001b[1;32m 496\u001b[0m self_attn_past_key_value \u001b[38;5;241m=\u001b[39m past_key_value[:\u001b[38;5;241m2\u001b[39m] \u001b[38;5;28;01mif\u001b[39;00m past_key_value \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 497\u001b[0m self_attention_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mattention\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 498\u001b[0m \u001b[43m \u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 499\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 500\u001b[0m \u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 501\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 502\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_value\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mself_attn_past_key_value\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 503\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 504\u001b[0m attention_output \u001b[38;5;241m=\u001b[39m self_attention_outputs[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 506\u001b[0m \u001b[38;5;66;03m# if decoder, the last output is tuple of self-attn cache\u001b[39;00m\n",
|
75 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
76 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
77 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/transformers/models/bert/modeling_bert.py:427\u001b[0m, in \u001b[0;36mBertAttention.forward\u001b[0;34m(self, hidden_states, attention_mask, head_mask, encoder_hidden_states, encoder_attention_mask, past_key_value, output_attentions)\u001b[0m\n\u001b[1;32m 417\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\n\u001b[1;32m 418\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 419\u001b[0m hidden_states: torch\u001b[38;5;241m.\u001b[39mTensor,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 425\u001b[0m output_attentions: Optional[\u001b[38;5;28mbool\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m 426\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tuple[torch\u001b[38;5;241m.\u001b[39mTensor]:\n\u001b[0;32m--> 427\u001b[0m self_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mself\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 428\u001b[0m \u001b[43m \u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 429\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 430\u001b[0m \u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 431\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 432\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 433\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_value\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 434\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 435\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 436\u001b[0m attention_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39moutput(self_outputs[\u001b[38;5;241m0\u001b[39m], hidden_states)\n\u001b[1;32m 437\u001b[0m outputs \u001b[38;5;241m=\u001b[39m (attention_output,) \u001b[38;5;241m+\u001b[39m self_outputs[\u001b[38;5;241m1\u001b[39m:] \u001b[38;5;66;03m# add attentions if we output them\u001b[39;00m\n",
|
78 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
79 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
80 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/transformers/models/bert/modeling_bert.py:355\u001b[0m, in \u001b[0;36mBertSelfAttention.forward\u001b[0;34m(self, hidden_states, attention_mask, head_mask, encoder_hidden_states, encoder_attention_mask, past_key_value, output_attentions)\u001b[0m\n\u001b[1;32m 352\u001b[0m attention_scores \u001b[38;5;241m=\u001b[39m attention_scores \u001b[38;5;241m+\u001b[39m attention_mask\n\u001b[1;32m 354\u001b[0m \u001b[38;5;66;03m# Normalize the attention scores to probabilities.\u001b[39;00m\n\u001b[0;32m--> 355\u001b[0m attention_probs \u001b[38;5;241m=\u001b[39m \u001b[43mnn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfunctional\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msoftmax\u001b[49m\u001b[43m(\u001b[49m\u001b[43mattention_scores\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdim\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 357\u001b[0m \u001b[38;5;66;03m# This is actually dropping out entire tokens to attend to, which might\u001b[39;00m\n\u001b[1;32m 358\u001b[0m \u001b[38;5;66;03m# seem a bit unusual, but is taken from the original Transformer paper.\u001b[39;00m\n\u001b[1;32m 359\u001b[0m attention_probs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdropout(attention_probs)\n",
|
81 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/torch/nn/functional.py:1858\u001b[0m, in \u001b[0;36msoftmax\u001b[0;34m(input, dim, _stacklevel, dtype)\u001b[0m\n\u001b[1;32m 1856\u001b[0m dim \u001b[38;5;241m=\u001b[39m _get_softmax_dim(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msoftmax\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28minput\u001b[39m\u001b[38;5;241m.\u001b[39mdim(), _stacklevel)\n\u001b[1;32m 1857\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m dtype \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 1858\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43minput\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msoftmax\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdim\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1859\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1860\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[38;5;28minput\u001b[39m\u001b[38;5;241m.\u001b[39msoftmax(dim, dtype\u001b[38;5;241m=\u001b[39mdtype)\n",
|
82 |
+
"\u001b[0;31mKeyboardInterrupt\u001b[0m: "
|
83 |
+
]
|
84 |
+
}
|
85 |
+
],
|
86 |
+
"source": [
|
87 |
+
"import pandas as pd\n",
|
88 |
+
"import numpy as np\n",
|
89 |
+
"import matplotlib.pyplot as plt\n",
|
90 |
+
"import json\n",
|
91 |
+
"import catboost\n",
|
92 |
+
"from sklearn.calibration import LabelEncoder\n",
|
93 |
+
"from sklearn.model_selection import train_test_split\n",
|
94 |
+
"import torch\n",
|
95 |
+
"from transformers import AutoTokenizer, AutoModel\n",
|
96 |
+
"import torch.nn as nn\n",
|
97 |
+
"\n",
|
98 |
+
"\n",
|
99 |
+
"if not 'data' in globals():\n",
|
100 |
+
" with open('kinopoisk.jsonl', 'r') as json_file:\n",
|
101 |
+
" data = []\n",
|
102 |
+
" for line in json_file:\n",
|
103 |
+
" data.append(json.loads(line))\n",
|
104 |
+
"\n",
|
105 |
+
"from torch.utils.data import DataLoader, TensorDataset\n",
|
106 |
+
"\n",
|
107 |
+
"\n",
|
108 |
+
"\n",
|
109 |
+
"df = pd.DataFrame(data)\n",
|
110 |
+
"df['X'] = df['content']\n",
|
111 |
+
"encode={\"Good\":2,\"Bad\":0,\"Neutral\":1}\n",
|
112 |
+
"df['Y'] = df['grade3'].map(encode)\n",
|
113 |
+
"\n",
|
114 |
+
"\n",
|
115 |
+
"import torch\n",
|
116 |
+
"from transformers import AutoTokenizer, AutoModelForSequenceClassification\n",
|
117 |
+
"from catboost import CatBoostClassifier\n",
|
118 |
+
"import torch.nn as nn\n",
|
119 |
+
"\n",
|
120 |
+
"model_checkpoint = 'cointegrated/rubert-tiny-toxicity'\n",
|
121 |
+
"tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\n",
|
122 |
+
"model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)\n",
|
123 |
+
"model.classifier=nn.Dropout(0)\n",
|
124 |
+
"model.dropout = nn.Dropout(0)\n",
|
125 |
+
"\n",
|
126 |
+
"x,y=[],[]\n",
|
127 |
+
"# if 'train_X' not in globals():\n",
|
128 |
+
"for i in range(len(df)):\n",
|
129 |
+
" if i%100==0:\n",
|
130 |
+
" print(i)\n",
|
131 |
+
" t=df.iloc[i]['X']\n",
|
132 |
+
"\n",
|
133 |
+
" t = model(**tokenizer(t, return_tensors='pt',truncation=True, padding=True))[0].tolist()[0]\n",
|
134 |
+
" x.append(t)\n",
|
135 |
+
" y.append(df.iloc[i]['Y'])\n",
|
136 |
+
" \n",
|
137 |
+
"x = np.array(x)\n",
|
138 |
+
"y = np.array(y)\n",
|
139 |
+
"\n",
|
140 |
+
"\n",
|
141 |
+
"\n",
|
142 |
+
"X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42)\n",
|
143 |
+
"\n",
|
144 |
+
"from sklearn.utils.class_weight import compute_class_weight\n",
|
145 |
+
"classes = np.unique(y)\n",
|
146 |
+
"weights = compute_class_weight(class_weight='balanced', classes=classes, y=y)\n",
|
147 |
+
"catboost = CatBoostClassifier( eval_metric='Accuracy',class_weights=weights)\n",
|
148 |
+
"catboost.fit(X_train , y_train, verbose=False,plot =True,eval_set=( X_test, y_test))\n",
|
149 |
+
"\n",
|
150 |
+
"catboost.save_model('filmreview.cbm')"
|
151 |
+
]
|
152 |
+
},
|
153 |
+
{
|
154 |
+
"cell_type": "code",
|
155 |
+
"execution_count": 81,
|
156 |
+
"metadata": {},
|
157 |
+
"outputs": [
|
158 |
+
{
|
159 |
+
"ename": "CatBoostError",
|
160 |
+
"evalue": "/src/catboost/catboost/libs/model/model_import_interface.h:19: Model file doesn't exist: catboost_model.cbm",
|
161 |
+
"output_type": "error",
|
162 |
+
"traceback": [
|
163 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
164 |
+
"\u001b[0;31mCatBoostError\u001b[0m Traceback (most recent call last)",
|
165 |
+
"Cell \u001b[0;32mIn[81], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m catboost_model \u001b[38;5;241m=\u001b[39m catboost\u001b[38;5;241m.\u001b[39mCatBoostClassifier(random_seed\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m42\u001b[39m,eval_metric\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mAccuracy\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m----> 2\u001b[0m \u001b[43mcatboost_model\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mload_model\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcatboost_model.cbm\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n",
|
166 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/catboost/core.py:3424\u001b[0m, in \u001b[0;36mCatBoost.load_model\u001b[0;34m(self, fname, format, stream, blob)\u001b[0m\n\u001b[1;32m 3421\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m CatBoostError(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExactly one of fname/stream/blob arguments mustn\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mt be None\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 3423\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m fname \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 3424\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_load_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mformat\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3425\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m stream \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 3426\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_load_from_stream(stream)\n",
|
167 |
+
"File \u001b[0;32m~/anaconda3/envs/cv/lib/python3.12/site-packages/catboost/core.py:1899\u001b[0m, in \u001b[0;36m_CatBoostBase._load_model\u001b[0;34m(self, model_file, format)\u001b[0m\n\u001b[1;32m 1897\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m CatBoostError(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mInvalid fname type=\u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m: must be str() or pathlib.Path().\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mformat(\u001b[38;5;28mtype\u001b[39m(model_file)))\n\u001b[1;32m 1898\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_params \u001b[38;5;241m=\u001b[39m {}\n\u001b[0;32m-> 1899\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_object\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_load_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel_file\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mformat\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1900\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_set_trained_model_attributes()\n\u001b[1;32m 1901\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m key, value \u001b[38;5;129;01min\u001b[39;00m iteritems(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_params()):\n",
|
168 |
+
"File \u001b[0;32m_catboost.pyx:5202\u001b[0m, in \u001b[0;36m_catboost._CatBoost._load_model\u001b[0;34m()\u001b[0m\n",
|
169 |
+
"File \u001b[0;32m_catboost.pyx:5205\u001b[0m, in \u001b[0;36m_catboost._CatBoost._load_model\u001b[0;34m()\u001b[0m\n",
|
170 |
+
"\u001b[0;31mCatBoostError\u001b[0m: /src/catboost/catboost/libs/model/model_import_interface.h:19: Model file doesn't exist: catboost_model.cbm"
|
171 |
+
]
|
172 |
+
}
|
173 |
+
],
|
174 |
+
"source": [
|
175 |
+
"catboost_model = catboost.CatBoostClassifier(random_seed=42,eval_metric='Accuracy')\n",
|
176 |
+
"catboost_model.load_model(\"catboost_kino.cbm\")\n",
|
177 |
+
"tokenizer = AutoTokenizer.from_pretrained(\"cointegrated/rubert-tiny2\")\n",
|
178 |
+
"model = AutoModel.from_pretrained(\"cointegrated/rubert-tiny2\")\n",
|
179 |
+
"def embed_bert_cls(text, model, tokenizer):\n",
|
180 |
+
" t = tokenizer(text, padding=True, truncation=True, return_tensors='pt')\n",
|
181 |
+
" with torch.no_grad():\n",
|
182 |
+
" model_output = model(**{k: v.to(model.device) for k, v in t.items()})\n",
|
183 |
+
" embeddings = model_output.last_hidden_state[:, 0, :]\n",
|
184 |
+
" embeddings = torch.nn.functional.normalize(embeddings)\n",
|
185 |
+
" return embeddings[0].cpu().numpy()\n",
|
186 |
+
"\n",
|
187 |
+
"\n",
|
188 |
+
"def predict(text):\n",
|
189 |
+
" embeddings = embed_bert_cls(text, model, tokenizer)\n",
|
190 |
+
" return catboost_model.predict_proba(embeddings.reshape(1, -1))[0]\n"
|
191 |
+
]
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"cell_type": "code",
|
195 |
+
"execution_count": 147,
|
196 |
+
"metadata": {},
|
197 |
+
"outputs": [
|
198 |
+
{
|
199 |
+
"data": {
|
200 |
+
"text/plain": [
|
201 |
+
"torch.int64"
|
202 |
+
]
|
203 |
+
},
|
204 |
+
"execution_count": 147,
|
205 |
+
"metadata": {},
|
206 |
+
"output_type": "execute_result"
|
207 |
+
}
|
208 |
+
],
|
209 |
+
"source": [
|
210 |
+
"dataiter = iter(train_loader)\n",
|
211 |
+
"sample_x, sample_y = next(dataiter)\n",
|
212 |
+
"sample_y.dtype "
|
213 |
+
]
|
214 |
+
},
|
215 |
+
{
|
216 |
+
"cell_type": "code",
|
217 |
+
"execution_count": 150,
|
218 |
+
"metadata": {},
|
219 |
+
"outputs": [
|
220 |
+
{
|
221 |
+
"data": {
|
222 |
+
"text/plain": [
|
223 |
+
"tensor([2, 2, 2, 1, 2, 2, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2,\n",
|
224 |
+
" 2, 2, 2, 2, 2, 2, 2, 2])"
|
225 |
+
]
|
226 |
+
},
|
227 |
+
"execution_count": 150,
|
228 |
+
"metadata": {},
|
229 |
+
"output_type": "execute_result"
|
230 |
+
}
|
231 |
+
],
|
232 |
+
"source": [
|
233 |
+
"from ast import mod\n",
|
234 |
+
"import pandas as pd\n",
|
235 |
+
"import numpy as np\n",
|
236 |
+
"\n",
|
237 |
+
"from sklearn.model_selection import train_test_split\n",
|
238 |
+
"\n",
|
239 |
+
"\n",
|
240 |
+
"\n",
|
241 |
+
"\n",
|
242 |
+
"\n",
|
243 |
+
"df = pd.read_csv('toxic.csv')\n",
|
244 |
+
"\n",
|
245 |
+
"x,y=[],[]\n",
|
246 |
+
"\n",
|
247 |
+
"if 'train_X' not in globals():\n",
|
248 |
+
" for i in range(len(df)):\n",
|
249 |
+
" if i%100==0:\n",
|
250 |
+
" print(i)\n",
|
251 |
+
" t=df.iloc[i]['comment']\n",
|
252 |
+
"\n",
|
253 |
+
" t = model(**tokenizer(t, return_tensors='pt',truncation=True, padding=True))[0].tolist()[0]\n",
|
254 |
+
" x.append(t)\n",
|
255 |
+
" y.append(df.iloc[i]['toxic'])\n",
|
256 |
+
"x = np.array(x)\n",
|
257 |
+
"y = np.array(y)\n",
|
258 |
+
"\n",
|
259 |
+
"train_X, test_X, train_y, test_y = train_test_split(x, y, test_size=0.2, random_state=42)\n",
|
260 |
+
"from sklearn.utils.class_weight import compute_class_weight\n",
|
261 |
+
"classes = np.unique(y)\n",
|
262 |
+
"weights = compute_class_weight(class_weight='balanced', classes=classes, y=y)\n",
|
263 |
+
"\n",
|
264 |
+
"\n",
|
265 |
+
"catboost = CatBoostClassifier( eval_metric='Accuracy',class_weights=weights)\n",
|
266 |
+
"catboost.fit(train_X, train_y, verbose=False,plot =True,eval_set=(test_X, test_y))\n",
|
267 |
+
"\n",
|
268 |
+
"#save\n",
|
269 |
+
"torch.save(catboost.state_dict(), 'model.pt')\n",
|
270 |
+
"\n",
|
271 |
+
"\n",
|
272 |
+
"import torch\n",
|
273 |
+
"from transformers import AutoTokenizer, AutoModelForSequenceClassification\n",
|
274 |
+
"from catboost import CatBoostClassifier\n",
|
275 |
+
"import torch.nn as nn\n",
|
276 |
+
"catboost_model = catboost.CatBoostClassifier(random_seed=42,eval_metric='Accuracy')\n",
|
277 |
+
"catboost_model.load_model(\"catboost_model.cbm\")\n",
|
278 |
+
"model_checkpoint = 'cointegrated/rubert-tiny-toxicity'\n",
|
279 |
+
"tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\n",
|
280 |
+
"model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)\n",
|
281 |
+
"model.classifier=nn.Dropout(0)\n",
|
282 |
+
"model.dropout = nn.Dropout(0)\n",
|
283 |
+
"\n",
|
284 |
+
"def predict(text):\n",
|
285 |
+
" t=tokenizer(text, return_tensors='pt',truncation=True, padding=True)\n",
|
286 |
+
" t = model(**t)[0].tolist()[0]\n",
|
287 |
+
" return t\n"
|
288 |
+
]
|
289 |
+
},
|
290 |
+
{
|
291 |
+
"cell_type": "code",
|
292 |
+
"execution_count": 149,
|
293 |
+
"metadata": {},
|
294 |
+
"outputs": [
|
295 |
+
{
|
296 |
+
"data": {
|
297 |
+
"text/plain": [
|
298 |
+
"torch.float32"
|
299 |
+
]
|
300 |
+
},
|
301 |
+
"execution_count": 149,
|
302 |
+
"metadata": {},
|
303 |
+
"output_type": "execute_result"
|
304 |
+
}
|
305 |
+
],
|
306 |
+
"source": [
|
307 |
+
"model(sample_x).dtype "
|
308 |
+
]
|
309 |
+
},
|
310 |
+
{
|
311 |
+
"cell_type": "code",
|
312 |
+
"execution_count": 43,
|
313 |
+
"metadata": {},
|
314 |
+
"outputs": [
|
315 |
+
{
|
316 |
+
"data": {
|
317 |
+
"text/plain": [
|
318 |
+
"tensor([[ 0.0038, -0.0042, -0.1281]], grad_fn=<AddmmBackward0>)"
|
319 |
+
]
|
320 |
+
},
|
321 |
+
"execution_count": 43,
|
322 |
+
"metadata": {},
|
323 |
+
"output_type": "execute_result"
|
324 |
+
}
|
325 |
+
],
|
326 |
+
"source": [
|
327 |
+
"model(t['input_ids'])"
|
328 |
+
]
|
329 |
+
},
|
330 |
+
{
|
331 |
+
"cell_type": "code",
|
332 |
+
"execution_count": null,
|
333 |
+
"metadata": {},
|
334 |
+
"outputs": [],
|
335 |
+
"source": []
|
336 |
+
},
|
337 |
+
{
|
338 |
+
"cell_type": "code",
|
339 |
+
"execution_count": 1,
|
340 |
+
"metadata": {},
|
341 |
+
"outputs": [
|
342 |
+
{
|
343 |
+
"data": {
|
344 |
+
"application/vnd.jupyter.widget-view+json": {
|
345 |
+
"model_id": "36c96d4a680b45329f6f5536ad04e38f",
|
346 |
+
"version_major": 2,
|
347 |
+
"version_minor": 0
|
348 |
+
},
|
349 |
+
"text/plain": [
|
350 |
+
"tokenizer_config.json: 0%| | 0.00/377 [00:00<?, ?B/s]"
|
351 |
+
]
|
352 |
+
},
|
353 |
+
"metadata": {},
|
354 |
+
"output_type": "display_data"
|
355 |
+
},
|
356 |
+
{
|
357 |
+
"data": {
|
358 |
+
"application/vnd.jupyter.widget-view+json": {
|
359 |
+
"model_id": "b20871e0bbeb4f249f96f8b678933712",
|
360 |
+
"version_major": 2,
|
361 |
+
"version_minor": 0
|
362 |
+
},
|
363 |
+
"text/plain": [
|
364 |
+
"vocab.txt: 0%| | 0.00/241k [00:00<?, ?B/s]"
|
365 |
+
]
|
366 |
+
},
|
367 |
+
"metadata": {},
|
368 |
+
"output_type": "display_data"
|
369 |
+
},
|
370 |
+
{
|
371 |
+
"data": {
|
372 |
+
"application/vnd.jupyter.widget-view+json": {
|
373 |
+
"model_id": "4fb9a55a45e04386aa1cfacc53b84bd6",
|
374 |
+
"version_major": 2,
|
375 |
+
"version_minor": 0
|
376 |
+
},
|
377 |
+
"text/plain": [
|
378 |
+
"tokenizer.json: 0%| | 0.00/468k [00:00<?, ?B/s]"
|
379 |
+
]
|
380 |
+
},
|
381 |
+
"metadata": {},
|
382 |
+
"output_type": "display_data"
|
383 |
+
},
|
384 |
+
{
|
385 |
+
"data": {
|
386 |
+
"application/vnd.jupyter.widget-view+json": {
|
387 |
+
"model_id": "37920dd7d41f4f19804848fcf1431b06",
|
388 |
+
"version_major": 2,
|
389 |
+
"version_minor": 0
|
390 |
+
},
|
391 |
+
"text/plain": [
|
392 |
+
"special_tokens_map.json: 0%| | 0.00/112 [00:00<?, ?B/s]"
|
393 |
+
]
|
394 |
+
},
|
395 |
+
"metadata": {},
|
396 |
+
"output_type": "display_data"
|
397 |
+
},
|
398 |
+
{
|
399 |
+
"data": {
|
400 |
+
"application/vnd.jupyter.widget-view+json": {
|
401 |
+
"model_id": "1200fc72cc22450d960480fa65e15234",
|
402 |
+
"version_major": 2,
|
403 |
+
"version_minor": 0
|
404 |
+
},
|
405 |
+
"text/plain": [
|
406 |
+
"config.json: 0%| | 0.00/957 [00:00<?, ?B/s]"
|
407 |
+
]
|
408 |
+
},
|
409 |
+
"metadata": {},
|
410 |
+
"output_type": "display_data"
|
411 |
+
},
|
412 |
+
{
|
413 |
+
"data": {
|
414 |
+
"application/vnd.jupyter.widget-view+json": {
|
415 |
+
"model_id": "7231e2ea8f6f469992d3d47d37e61c9a",
|
416 |
+
"version_major": 2,
|
417 |
+
"version_minor": 0
|
418 |
+
},
|
419 |
+
"text/plain": [
|
420 |
+
"model.safetensors: 0%| | 0.00/47.2M [00:00<?, ?B/s]"
|
421 |
+
]
|
422 |
+
},
|
423 |
+
"metadata": {},
|
424 |
+
"output_type": "display_data"
|
425 |
+
}
|
426 |
+
],
|
427 |
+
"source": []
|
428 |
+
},
|
429 |
+
{
|
430 |
+
"cell_type": "code",
|
431 |
+
"execution_count": 4,
|
432 |
+
"metadata": {},
|
433 |
+
"outputs": [],
|
434 |
+
"source": []
|
435 |
+
},
|
436 |
+
{
|
437 |
+
"cell_type": "code",
|
438 |
+
"execution_count": null,
|
439 |
+
"metadata": {},
|
440 |
+
"outputs": [],
|
441 |
+
"source": [
|
442 |
+
"model"
|
443 |
+
]
|
444 |
+
},
|
445 |
+
{
|
446 |
+
"cell_type": "code",
|
447 |
+
"execution_count": null,
|
448 |
+
"metadata": {},
|
449 |
+
"outputs": [],
|
450 |
+
"source": [
|
451 |
+
"from ast import mod\n",
|
452 |
+
"import pandas as pd\n",
|
453 |
+
"import numpy as np\n",
|
454 |
+
"\n",
|
455 |
+
"from sklearn.model_selection import train_test_split\n",
|
456 |
+
"\n",
|
457 |
+
"\n",
|
458 |
+
"\n",
|
459 |
+
"\n",
|
460 |
+
"\n",
|
461 |
+
"df = pd.read_csv('toxic.csv')\n",
|
462 |
+
"\n",
|
463 |
+
"x,y=[],[]\n",
|
464 |
+
"\n",
|
465 |
+
"if 'train_X' not in globals():\n",
|
466 |
+
" for i in range(len(df)):\n",
|
467 |
+
" if i%100==0:\n",
|
468 |
+
" print(i)\n",
|
469 |
+
" t=df.iloc[i]['comment']\n",
|
470 |
+
"\n",
|
471 |
+
" t = model(**tokenizer(t, return_tensors='pt',truncation=True, padding=True))[0].tolist()[0]\n",
|
472 |
+
" x.append(t)\n",
|
473 |
+
" y.append(df.iloc[i]['toxic'])\n",
|
474 |
+
"x = np.array(x)\n",
|
475 |
+
"y = np.array(y)\n",
|
476 |
+
"\n",
|
477 |
+
"train_X, test_X, train_y, test_y = train_test_split(x, y, test_size=0.2, random_state=42)\n",
|
478 |
+
"from sklearn.utils.class_weight import compute_class_weight\n",
|
479 |
+
"classes = np.unique(y)\n",
|
480 |
+
"weights = compute_class_weight(class_weight='balanced', classes=classes, y=y)\n",
|
481 |
+
"catboost = CatBoostClassifier( eval_metric='Accuracy',class_weights=weights)\n",
|
482 |
+
"catboost.fit(train_X, train_y, verbose=False,plot =True,eval_set=(test_X, test_y))\n",
|
483 |
+
"\n",
|
484 |
+
"#save\n",
|
485 |
+
"torch.save(catboost.state_dict(), 'model.pt')\n",
|
486 |
+
"\n",
|
487 |
+
"\n",
|
488 |
+
"import torch\n",
|
489 |
+
"from transformers import AutoTokenizer, AutoModelForSequenceClassification\n",
|
490 |
+
"from catboost import CatBoostClassifier\n",
|
491 |
+
"import torch.nn as nn\n",
|
492 |
+
"catboost_model = catboost.CatBoostClassifier(random_seed=42,eval_metric='Accuracy')\n",
|
493 |
+
"catboost_model.load_model(\"catboost_model.cbm\")\n",
|
494 |
+
"model_checkpoint = 'cointegrated/rubert-tiny-toxicity'\n",
|
495 |
+
"tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\n",
|
496 |
+
"model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)\n",
|
497 |
+
"model.classifier=nn.Dropout(0)\n",
|
498 |
+
"model.dropout = nn.Dropout(0)\n",
|
499 |
+
"\n",
|
500 |
+
"def predict(text):\n",
|
501 |
+
" t=tokenizer(text, return_tensors='pt',truncation=True, padding=True)\n",
|
502 |
+
" t = model(**t)[0].tolist()[0]\n",
|
503 |
+
" return t\n"
|
504 |
+
]
|
505 |
+
},
|
506 |
+
{
|
507 |
+
"cell_type": "code",
|
508 |
+
"execution_count": 34,
|
509 |
+
"metadata": {},
|
510 |
+
"outputs": [],
|
511 |
+
"source": [
|
512 |
+
"catboost.save_model('dont_be_toxic.cbm')"
|
513 |
+
]
|
514 |
+
},
|
515 |
+
{
|
516 |
+
"cell_type": "code",
|
517 |
+
"execution_count": 79,
|
518 |
+
"metadata": {},
|
519 |
+
"outputs": [
|
520 |
+
{
|
521 |
+
"data": {
|
522 |
+
"text/plain": [
|
523 |
+
"array([0.04576194, 0.95423806])"
|
524 |
+
]
|
525 |
+
},
|
526 |
+
"execution_count": 79,
|
527 |
+
"metadata": {},
|
528 |
+
"output_type": "execute_result"
|
529 |
+
}
|
530 |
+
],
|
531 |
+
"source": [
|
532 |
+
"import torch\n",
|
533 |
+
"from transformers import AutoTokenizer, AutoModelForSequenceClassification\n",
|
534 |
+
"from catboost import CatBoostClassifier\n",
|
535 |
+
"import torch.nn as nn\n",
|
536 |
+
"\n",
|
537 |
+
"catboost_model = CatBoostClassifier(random_seed=42,eval_metric='Accuracy')\n",
|
538 |
+
"catboost_model.load_model(\"../anti_toxic/dont_be_toxic.cbm\")\n",
|
539 |
+
"model_checkpoint = 'cointegrated/rubert-tiny-toxicity'\n",
|
540 |
+
"tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\n",
|
541 |
+
"model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)\n",
|
542 |
+
"model.classifier=nn.Dropout(0)\n",
|
543 |
+
"model.dropout = nn.Dropout(0)\n",
|
544 |
+
"\n",
|
545 |
+
"def predict(text):\n",
|
546 |
+
" t=tokenizer(text, return_tensors='pt',truncation=True, padding=True)\n",
|
547 |
+
" t = model(**t)[0].tolist()[0]\n",
|
548 |
+
" return catboost_model.predict_proba(t)\n"
|
549 |
+
]
|
550 |
+
},
|
551 |
+
{
|
552 |
+
"cell_type": "code",
|
553 |
+
"execution_count": 43,
|
554 |
+
"metadata": {},
|
555 |
+
"outputs": [
|
556 |
+
{
|
557 |
+
"ename": "IndexError",
|
558 |
+
"evalue": "invalid index to scalar variable.",
|
559 |
+
"output_type": "error",
|
560 |
+
"traceback": [
|
561 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
562 |
+
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
|
563 |
+
"Cell \u001b[0;32mIn[43], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mpredict\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mмяу\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n",
|
564 |
+
"Cell \u001b[0;32mIn[42], line 17\u001b[0m, in \u001b[0;36mpredict\u001b[0;34m(text)\u001b[0m\n\u001b[1;32m 15\u001b[0m t\u001b[38;5;241m=\u001b[39mtokenizer(text, return_tensors\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mpt\u001b[39m\u001b[38;5;124m'\u001b[39m,truncation\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, padding\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 16\u001b[0m t \u001b[38;5;241m=\u001b[39m model(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mt)[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39mtolist()[\u001b[38;5;241m0\u001b[39m]\n\u001b[0;32m---> 17\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mcatboost_model\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpredict_proba\u001b[49m\u001b[43m(\u001b[49m\u001b[43mt\u001b[49m\u001b[43m)\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m]\u001b[49m\n",
|
565 |
+
"\u001b[0;31mIndexError\u001b[0m: invalid index to scalar variable."
|
566 |
+
]
|
567 |
+
}
|
568 |
+
],
|
569 |
+
"source": []
|
570 |
+
},
|
571 |
+
{
|
572 |
+
"cell_type": "code",
|
573 |
+
"execution_count": 33,
|
574 |
+
"metadata": {},
|
575 |
+
"outputs": [
|
576 |
+
{
|
577 |
+
"data": {
|
578 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAApIAAAIjCAYAAACwHvu2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABT3UlEQVR4nO3de3zP9f//8ft72MHYCdusnAmTHIvl/LHMIRHSoppSDk2OOVXkUA05q4xPxRKVDlYU2cdiYbGmRZKQQ8WMHGbDNtv794ev9693m2yv9vZ+2/t27fK6XHo/X8/X8/V4vT8f9ejxfL6eb5PZbDYLAAAAKCIXewcAAACAWxOJJAAAAAwhkQQAAIAhJJIAAAAwhEQSAAAAhpBIAgAAwBASSQAAABhCIgkAAABDSCQBAABgCIkkgH904MABderUSd7e3jKZTIqNjS3W8Y8cOSKTyaTly5cX67i3svbt26t9+/b2DgMAbohEErgFHDp0SIMHD1bNmjXl7u4uLy8vtWrVSgsWLNClS5dseu+IiAjt2bNHr7zyilasWKHmzZvb9H4304ABA2QymeTl5VXg93jgwAGZTCaZTCbNnj27yOMfP35cU6ZMUUpKSjFECwCOp7S9AwDwz7744gs99NBDcnNz0+OPP64777xT2dnZ2rp1q8aOHau9e/dq6dKlNrn3pUuXlJiYqBdeeEHDhg2zyT2qVaumS5cuqUyZMjYZ/0ZKly6tixcvau3aterbt6/VuZUrV8rd3V2XL182NPbx48c1depUVa9eXY0bNy70dRs3bjR0PwC42UgkAQd2+PBhhYeHq1q1aoqPj1flypUt5yIjI3Xw4EF98cUXNrv/qVOnJEk+Pj42u4fJZJK7u7vNxr8RNzc3tWrVSu+//36+RHLVqlXq1q2bPvnkk5sSy8WLF1W2bFm5urrelPsBwL/F1DbgwGbNmqWMjAy9/fbbVknkNbVr19aIESMsn69cuaLp06erVq1acnNzU/Xq1fX8888rKyvL6rrq1avr/vvv19atW3XPPffI3d1dNWvW1LvvvmvpM2XKFFWrVk2SNHbsWJlMJlWvXl3S1Snha3//V1OmTJHJZLJqi4uLU+vWreXj46Ny5cqpbt26ev755y3nr7dGMj4+Xm3atJGnp6d8fHzUo0cP7du3r8D7HTx4UAMGDJCPj4+8vb31xBNP6OLFi9f/Yv+mX79+Wr9+vc6dO2dpS0pK0oEDB9SvX798/c+cOaPnnntODRs2VLly5eTl5aUuXbrohx9+sPTZvHmz7r77bknSE088YZkiv/ac7du315133qnk5GS1bdtWZcuWtXwvf18jGRERIXd393zPHxYWJl9fXx0/frzQzwoAxYlEEnBga9euVc2aNXXvvfcWqv9TTz2lyZMnq2nTppo3b57atWunqKgohYeH5+t78OBB9enTR/fdd5/mzJkjX19fDRgwQHv37pUk9erVS/PmzZMkPfLII1qxYoXmz59fpPj37t2r+++/X1lZWZo2bZrmzJmjBx54QNu2bfvH6/73v/8pLCxMaWlpmjJlikaPHq3t27erVatWOnLkSL7+ffv21YULFxQVFaW+fftq+fLlmjp1aqHj7NWrl0wmkz799FNL26pVq1SvXj01bdo0X/9ff/1VsbGxuv/++zV37lyNHTtWe/bsUbt27SxJXf369TVt2jRJ0qBBg7RixQqtWLFCbdu2tYzz559/qkuXLmrcuLHmz5+vDh06FBjfggULVKlSJUVERCg3N1eStGTJEm3cuFGLFi1SUFBQoZ8VAIqVGYBDOn/+vFmSuUePHoXqn5KSYpZkfuqpp6zan3vuObMkc3x8vKWtWrVqZknmhIQES1taWprZzc3NPGbMGEvb4cOHzZLMr732mtWYERER5mrVquWL4aWXXjL/9R8r8+bNM0synzp16rpxX7vHsmXLLG2NGzc2+/v7m//8809L2w8//GB2cXExP/744/nu9+STT1qN+eCDD5orVKhw3Xv+9Tk8PT3NZrPZ3KdPH3PHjh3NZrPZnJubaw4MDDRPnTq1wO/g8uXL5tzc3HzP4ebmZp42bZqlLSkpKd+zXdOuXTuzJHN0dHSB59q1a2fV9tVXX5klmV9++WXzr7/+ai5Xrpy5Z8+eN3xGALAlKpKAg0pPT5cklS9fvlD9v/zyS0nS6NGjrdrHjBkjSfnWUgYHB6tNmzaWz5UqVVLdunX166+/Go75766trfzss8+Ul5dXqGtOnDihlJQUDRgwQH5+fpb2u+66S/fdd5/lOf9qyJAhVp/btGmjP//80/IdFka/fv20efNmpaamKj4+XqmpqQVOa0tX11W6uFz9x2dubq7+/PNPy7T9rl27Cn1PNzc3PfHEE4Xq26lTJw0ePFjTpk1Tr1695O7uriVLlhT6XgBgCySSgIPy8vKSJF24cKFQ/Y8ePSoXFxfVrl3bqj0wMFA+Pj46evSoVXvVqlXzjeHr66uzZ88ajDi/hx9+WK1atdJTTz2lgIAAhYeHa/Xq1f+YVF6Ls27duvnO1a9fX6dPn1ZmZqZV+9+fxdfXV5KK9Cxdu3ZV+fLl9eGHH2rlypW6++67832X1+Tl5WnevHmqU6eO3NzcVLFiRVWqVEm7d+/W+fPnC33P2267rUgv1syePVt+fn5KSUnRwoUL5e/vX+hrAcAWSCQBB+Xl5aWgoCD9+OOPRbru7y+7XE+pUqUKbDebzYbvcW393jUeHh5KSEjQ//73Pz322GPavXu3Hn74Yd133335+v4b/+ZZrnFzc1OvXr0UExOjNWvWXLcaKUmvvvqqRo8erbZt2+q9997TV199pbi4ODVo0KDQlVfp6vdTFN9//73S0tIkSXv27CnStQBgCySSgAO7//77dejQISUmJt6wb7Vq1ZSXl6cDBw5YtZ88eVLnzp2zvIFdHHx9fa3ecL7m71VPSXJxcVHHjh01d+5c/fTTT3rllVcUHx+vr7/+usCxr8W5f//+fOd+/vlnVaxYUZ6env/uAa6jX79++v7773XhwoUCX1C65uOPP1aHDh309ttvKzw8XJ06dVJoaGi+76SwSX1hZGZm6oknnlBwcLAGDRqkWbNmKSkpqdjGBwAjSCQBBzZu3Dh5enrqqaee0smTJ/OdP3TokBYsWCDp6tSspHxvVs+dO1eS1K1bt2KLq1atWjp//rx2795taTtx4oTWrFlj1e/MmTP5rr22MffftyS6pnLlymrcuLFiYmKsErMff/xRGzdutDynLXTo0EHTp0/X66+/rsDAwOv2K1WqVL5q50cffaQ//vjDqu1awltQ0l1U48eP17FjxxQTE6O5c+eqevXqioiIuO73CAA3AxuSAw6sVq1aWrVqlR5++GHVr1/f6pdttm/fro8++kgDBgyQJDVq1EgRERFaunSpzp07p3bt2mnnzp2KiYlRz549r7u1jBHh4eEaP368HnzwQQ0fPlwXL17U4sWLdccdd1i9bDJt2jQlJCSoW7duqlatmtLS0vTmm2/q9ttvV+vWra87/muvvaYuXbooJCREAwcO1KVLl7Ro0SJ5e3trypQpxfYcf+fi4qIXX3zxhv3uv/9+TZs2TU888YTuvfde7dmzRytXrlTNmjWt+tWqVUs+Pj6Kjo5W+fLl5enpqRYtWqhGjRpFiis+Pl5vvvmmXnrpJct2RMuWLVP79u01adIkzZo1q0jjAUBxoSIJOLgHHnhAu3fvVp8+ffTZZ58pMjJSEyZM0JEjRzRnzhwtXLjQ0vett97S1KlTlZSUpJEjRyo+Pl4TJ07UBx98UKwxVahQQWvWrFHZsmU1btw4xcTEKCoqSt27d88Xe9WqVfXOO+8oMjJSb7zxhtq2bav4+Hh5e3tfd/zQ0FBt2LBBFSpU0OTJkzV79my1bNlS27ZtK3ISZgvPP/+8xowZo6+++kojRozQrl279MUXX6hKlSpW/cqUKaOYmBiVKlVKQ4YM0SOPPKItW7YU6V4XLlzQk08+qSZNmuiFF16wtLdp00YjRozQnDlz9O233xbLcwFAUZnMRVmNDgAAAPwfKpIAAAAwhEQSAAAAhpBIAgAAwBASSQAAABhCIgkAAABDSCQBAABgCIkkAAAADCmRv2zj0WSYvUMAYCNnk163dwgAbMTdjlmJLXOHS9+X3H9uUZEEAACAISWyIgkAAFAkJmprRpBIAgAAmEz2juCWRPoNAAAAQ6hIAgAAMLVtCN8aAAAADKEiCQAAwBpJQ6hIAgAAwBAqkgAAAKyRNIRvDQAAAIZQkQQAAGCNpCEkkgAAAExtG8K3BgAAAEOoSAIAADC1bQgVSQAAABhCRRIAAIA1kobwrQEAAMAQKpIAAACskTSEiiQAAAAMoSIJAADAGklDSCQBAACY2jaE9BsAAACGUJEEAABgatsQvjUAAAAYQkUSAACAiqQhfGsAAAAwhIokAACAC29tG0FFEgAAAIZQkQQAAGCNpCF8awAAACaT7Y4iSkhIUPfu3RUUFCSTyaTY2Nh8ffbt26cHHnhA3t7e8vT01N13361jx45Zzl++fFmRkZGqUKGCypUrp969e+vkyZNWYxw7dkzdunVT2bJl5e/vr7Fjx+rKlStFipVEEgAAwIFkZmaqUaNGeuONNwo8f+jQIbVu3Vr16tXT5s2btXv3bk2aNEnu7u6WPqNGjdLatWv10UcfacuWLTp+/Lh69eplOZ+bm6tu3bopOztb27dvV0xMjJYvX67JkycXKVaT2Ww2G3tMx+XRZJi9QwBgI2eTXrd3CABsxN2OC+48QmfYbOxL/5tg+FqTyaQ1a9aoZ8+elrbw8HCVKVNGK1asKPCa8+fPq1KlSlq1apX69OkjSfr5559Vv359JSYmqmXLllq/fr3uv/9+HT9+XAEBAZKk6OhojR8/XqdOnZKrq2uh4qMiCQAAYENZWVlKT0+3OrKysgyNlZeXpy+++EJ33HGHwsLC5O/vrxYtWlhNfycnJysnJ0ehoaGWtnr16qlq1apKTEyUJCUmJqphw4aWJFKSwsLClJ6err179xY6HhJJAAAAG66RjIqKkre3t9URFRVlKMy0tDRlZGRoxowZ6ty5szZu3KgHH3xQvXr10pYtWyRJqampcnV1lY+Pj9W1AQEBSk1NtfT5axJ57fy1c4XFW9sAAAA2NHHiRI0ePdqqzc3NzdBYeXl5kqQePXpo1KhRkqTGjRtr+/btio6OVrt27f5dsEVEIgkAAGDD7X/c3NwMJ45/V7FiRZUuXVrBwcFW7fXr19fWrVslSYGBgcrOzta5c+esqpInT55UYGCgpc/OnTutxrj2Vve1PoXB1DYAAMAtwtXVVXfffbf2799v1f7LL7+oWrVqkqRmzZqpTJky2rRpk+X8/v37dezYMYWEhEiSQkJCtGfPHqWlpVn6xMXFycvLK1+S+k+oSAIAABjY79FWMjIydPDgQcvnw4cPKyUlRX5+fqpatarGjh2rhx9+WG3btlWHDh20YcMGrV27Vps3b5YkeXt7a+DAgRo9erT8/Pzk5eWlZ599ViEhIWrZsqUkqVOnTgoODtZjjz2mWbNmKTU1VS+++KIiIyOLVD0lkQQAAHCgX7b57rvv1KFDB8vna+srIyIitHz5cj344IOKjo5WVFSUhg8frrp16+qTTz5R69atLdfMmzdPLi4u6t27t7KyshQWFqY333zTcr5UqVJat26dhg4dqpCQEHl6eioiIkLTpk0rUqzsIwnglsI+kkDJZdd9JDvPtdnYlzaMvnGnWxQVSQAAAAea2r6VOE4dFwAAALcUKpIAAAAOtEbyVsK3BgAAAEOoSAIAALBG0hAqkgAAADCEiiQAAABrJA0hkQQAACCRNIRvDQAAAIZQkQQAAOBlG0OoSAIAAMAQKpIAAACskTSEbw0AAACGUJEEAABgjaQhVCQBAABgCBVJAAAA1kgaQiIJAADA1LYhpN8AAAAwhIokAABweiYqkoZQkQQAAIAhVCQBAIDToyJpDBVJAAAAGEJFEgAAgIKkIVQkAQAAYAgVSQAA4PRYI2kMiSQAAHB6JJLGMLUNAAAAQ6hIAgAAp0dF0hgqkgAAADCEiiQAAHB6VCSNoSIJAAAAQ6hIAgAAUJA0hIokAAAADKEiCQAAnB5rJI2hIgkAAABDqEgCAACnR0XSGBJJAADg9EgkjWFqGwAAAIZQkQQAAE6PiqQxVCQBAABgCBVJAAAACpKGUJEEAACAIVQkAQCA02ONpDFUJAEAAGAIFUkAAOD0qEgaQyIJAACcHomkMUxtAwAAwBASSQAAAJMNjyJKSEhQ9+7dFRQUJJPJpNjY2Ov2HTJkiEwmk+bPn2/VfubMGfXv319eXl7y8fHRwIEDlZGRYdVn9+7datOmjdzd3VWlShXNmjWryLGSSAIAADiQzMxMNWrUSG+88cY/9luzZo2+/fZbBQUF5TvXv39/7d27V3FxcVq3bp0SEhI0aNAgy/n09HR16tRJ1apVU3Jysl577TVNmTJFS5cuLVKsrJEEAABOz5HWSHbp0kVdunT5xz5//PGHnn32WX311Vfq1q2b1bl9+/Zpw4YNSkpKUvPmzSVJixYtUteuXTV79mwFBQVp5cqVys7O1jvvvCNXV1c1aNBAKSkpmjt3rlXCeSNUJAEAAGwoKytL6enpVkdWVpbh8fLy8vTYY49p7NixatCgQb7ziYmJ8vHxsSSRkhQaGioXFxft2LHD0qdt27ZydXW19AkLC9P+/ft19uzZQsdCIgkAAJyeyWSy2REVFSVvb2+rIyoqynCsM2fOVOnSpTV8+PACz6empsrf39+qrXTp0vLz81NqaqqlT0BAgFWfa5+v9SkMprYBAABsaOLEiRo9erRVm5ubm6GxkpOTtWDBAu3atcshpuPtlkimp6cXuq+Xl5cNIwEAAM7OlkmZm5ub4cTx77755hulpaWpatWqlrbc3FyNGTNG8+fP15EjRxQYGKi0tDSr665cuaIzZ84oMDBQkhQYGKiTJ09a9bn2+VqfwrBbIunj43PD/9HMZrNMJpNyc3NvUlQAAMAZOUJ1rzAee+wxhYaGWrWFhYXpscce0xNPPCFJCgkJ0blz55ScnKxmzZpJkuLj45WXl6cWLVpY+rzwwgvKyclRmTJlJElxcXGqW7eufH19Cx2P3RLJr7/+2l63BgAAcFgZGRk6ePCg5fPhw4eVkpIiPz8/Va1aVRUqVLDqX6ZMGQUGBqpu3bqSpPr166tz5856+umnFR0drZycHA0bNkzh4eGWrYL69eunqVOnauDAgRo/frx+/PFHLViwQPPmzStSrHZLJNu1a2evWwMAAFhzoILkd999pw4dOlg+X1tfGRERoeXLlxdqjJUrV2rYsGHq2LGjXFxc1Lt3by1cuNBy3tvbWxs3blRkZKSaNWumihUravLkyUXa+keSTGaz2VykK2zo4sWLOnbsmLKzs63a77rrriKN49FkWHGGBcCBnE163d4hALARdzu+Ahw05FObjX08upfNxrY3h3hr+9SpU3riiSe0fv36As+zRhIAANjSrbJG0tE4xD6SI0eO1Llz57Rjxw55eHhow4YNiomJUZ06dfT555/bOzwAAAAUwCEqkvHx8frss8/UvHlzubi4qFq1arrvvvvk5eWlqKiofD/9AwAAUJyoSBrjEBXJzMxMyw7svr6+OnXqlCSpYcOG2rVrlz1DAwAAwHU4RCJZt25d7d+/X5LUqFEjLVmyRH/88Yeio6NVuXJlO0cHAABKOlv+RGJJ5hBT2yNGjNCJEyckSS+99JI6d+6slStXytXVtdCvuQMAABhWsvM9m3GIRPLRRx+1/H2zZs109OhR/fzzz6pataoqVqxox8gAAABwPQ6RSP5d2bJl1bRpU3uHAQAAnERJn4K2FYdIJM1msz7++GN9/fXXSktLU15entX5Tz+13SahAAAAMMYhEsmRI0dqyZIl6tChgwICAvivAgAAcFORexjjEInkihUr9Omnn6pr1672DgUAAACF5BCJpLe3t2rWrGnvMGAnrZrW0qjHQ9U0uKoqV/JW31FLtXbzbsv5S98X/NvKz89bo3nvbpIkjRsYpi5tGuiuO25X9pUrqtx2XL7+7e+5Qy89c78a1A5S5qVsrVy7Qy+9sVa5uXn5+gKwneTvkrT8nbe176cfderUKc1b+Ib+0zFUkpSTk6PXF87X1m8S9Pvvv6l8uXJqEXKvRowaI3//AMsYR44c1rzZs5Ty/S7l5OSozh11FfnsCN3ToqW9Hgu3OCqSxjjEPpJTpkzR1KlTdenSJXuHAjvw9HDTnl/+0MioDws8Xz10otUx6KX3lJeXpzWbUix9XMuU0qdx3+u/H39T4BgN77hNsYuGauP2n9TykRl6bMI76tauoV4e3sMWjwTgH1y6dFF169bVxBdfynfu8uXL+nnfTxo0ZKg+/OhTzV3wuo4cPqwRw4Za9Xv2mSHKzc3Vf9+J0fsffaq6devp2cghOv1/P2gB4OZwiIpk37599f7778vf31/Vq1dXmTJlrM7z6zYl28ZtP2njtp+ue/7knxesPndv31Bbkg7oyB9/Wtpejv5SkvRo9xYFjtGnU1P9eOC4opZukCT9+ttpvbAgVu/NfFKvLPlSGRez/u1jACik1m3aqXWbdgWeK1++vJa8tcyqbeILk9Q//CGdOH5clYOCdPbsGR07ekRTp7+iO+rWkySNGD1GH36wSgcPHlDFSpVs/gwoeahIGuMQiWRERISSk5P16KOP8rIN/pG/X3l1bn2nnp68okjXubmW1uWsHKu2S1k58nB3VZP6VfVN8oHiDBNAMcrIyJDJZFJ5Ly9Jko+Pr6rXqKG1n8WqXv1gubq66uPVH8qvQgUFBzewc7S4ZZF6GOIQieQXX3yhr776Sq1bty7ytVlZWcrKsq4mmfNyZXIpVVzhwYE82r2FLly8rNj4lCJdF7d9n4b166C+nZvp4427FFjBS88P6iJJqlzJywaRAigOWVlZmj93trp07aZy5cpJulo5WvrWco0c/ozuvaepXFxc5OfnpzeXvCUvb287Rww4F4dYI1mlShV5eRn7l3lUVJS8vb2tjisnk4s5QjiKx3u01Ifrv1NW9pUiXbfp25/1/PxYLXw+XOd3zNfuzybrq617JUl5eWZbhArgX8rJydHY0SNkNpv1wuSplnaz2axXX54qP78KWvbuSq384CN1+E+ohkcO0alTaXaMGLcyfmvbGIdIJOfMmaNx48bpyJEjRb524sSJOn/+vNVROqBZ8QcJu2vVpJbq1gjUsjXbDV2/8L14BbYdqzu6TtbtHSZY3gw//Pvp4gwTQDHIycnR2DEjdeL4cS156x1LNVKSdu74VglbNmvm7Hlq0rSZ6gc30AuTp8jdzV2fx8baL2jACTnE1Pajjz6qixcvqlatWipbtmy+l23OnDlz3Wvd3Nzk5uZm1ca0dskU0TNEyT8d055f/vhX45w4dV6S1Ldzc/124oy+//m34ggPQDG5lkQeO3pUby17Vz4+vlbnr+3w4fK3So/JxSSzme28YExJrxzaikMkkvPnz7d3CLAjTw9X1ary/9+yrH5bBd11x206m35Rv6WelSSV93RXr/uaaMLcNQWOUSXQV75eZVWlsq9KubjorjtukyQd+u2UMi9lS5JGPd5RG7fvU15ennp0bKznnrhPj457h6lt4Ca7mJmpY8eOWT7/8fvv+nnfPnl7e6tipUp6btRw7dv3kxa9sUR5ubmWLX28vb1VxtVVjRo3lpeXl158foIGD42Um7ubPv14tf74/Q+1adveTk8FOCeT2Wy2679Fc3JyNHjwYE2aNEk1atQoljE9mgwrlnFwc7RpVkcb3xqRr33F599q0EvvSZKe7NVKrz3XWzU6Pa/0jMv5+i6d+qgeeyD/RsSdnlpgeSN7/ZJn1bh+FbmVKa09v/yhV5au/8dth+CYziYVvEE9bh1JO3foqScez9f+QI8HNSRymLp26ljgdW8te1d333N1i6+9P+7RogXz9dPeH3XlSo5q1a6jwUOfue62Qrg1uNuxvFX7ufU2G/vg7C42G9ve7J5ISlf/KzMlJYVEEsANkUgCJReJ5K3HIV626dmzp2JZIA0AAOyEt7aNcYg1knXq1NG0adO0bds2NWvWTJ6enlbnhw8fbqfIAACAMyjh+Z7NOEQi+fbbb8vHx0fJyclKTrbeA9JkMpFIAgAAOCCHSCQPHz5s7xAAAIATK+lT0LbiEGsk/8psNssB3v8BAADADThMIvnuu++qYcOG8vDwkIeHh+666y6tWLHC3mEBAAAnYDLZ7ijJHGJqe+7cuZo0aZKGDRumVq1aSZK2bt2qIUOG6PTp0xo1apSdIwQAAMDfOUQiuWjRIi1evFiPP/7/N6h94IEH1KBBA02ZMoVEEgAA2JSLSwkvHdqIQ0xtnzhxQvfee2++9nvvvVcnTpywQ0QAAAC4EYdIJGvXrq3Vq1fna//www9Vp04dO0QEAACcCWskjXGIqe2pU6fq4YcfVkJCgmWN5LZt27Rp06YCE0wAAIDixPY/xjhERbJ3797asWOHKlSooNjYWMXGxqpixYrauXOnHnzwQXuHBwAAgAI4REVSkpo1a6aVK1faOwwAAOCEKEgaY9dE0sXF5YalZJPJpCtXrtykiAAAAFBYdk0k16xZc91ziYmJWrhwofLy8m5iRAAAwBmxRtIYuyaSPXr0yNe2f/9+TZgwQWvXrlX//v01bdo0O0QGAACAG3GIl20k6fjx43r66afVsGFDXblyRSkpKYqJiVG1atXsHRoAACjhTCaTzY6SzO6J5Pnz5zV+/HjVrl1be/fu1aZNm7R27Vrdeeed9g4NAAAA/8CuU9uzZs3SzJkzFRgYqPfff7/AqW4AAABbK+GFQ5uxayI5YcIEeXh4qHbt2oqJiVFMTEyB/T799NObHBkAAHAmJX0K2lbsmkg+/vjj/A8HAABwi7JrIrl8+XJ73h4AAEASU9tG2f1lGwAAANyaHOYnEgEAAOyFpXbGUJEEAACAIVQkAQCA06MgaQwVSQAAAAeSkJCg7t27KygoSCaTSbGxsZZzOTk5Gj9+vBo2bChPT08FBQXp8ccf1/Hjx63GOHPmjPr37y8vLy/5+Pho4MCBysjIsOqze/dutWnTRu7u7qpSpYpmzZpV5FhJJAEAgNNzpJ9IzMzMVKNGjfTGG2/kO3fx4kXt2rVLkyZN0q5du/Tpp59q//79euCBB6z69e/fX3v37lVcXJzWrVunhIQEDRo0yHI+PT1dnTp1UrVq1ZScnKzXXntNU6ZM0dKlS4v2vZnNZnORn9DBeTQZZu8QANjI2aTX7R0CABtxt+OCu7tf2Wyzsbc+F6KsrCyrNjc3N7m5ud3wWpPJpDVr1qhnz57X7ZOUlKR77rlHR48eVdWqVbVv3z4FBwcrKSlJzZs3lyRt2LBBXbt21e+//66goCAtXrxYL7zwglJTU+Xq6irp6g/FxMbG6ueffy70s1GRBAAATs9kst0RFRUlb29vqyMqKqrYYj9//rxMJpN8fHwkSYmJifLx8bEkkZIUGhoqFxcX7dixw9Knbdu2liRSksLCwrR//36dPXu20PfmZRsAAOD0bLn9z8SJEzV69GirtsJUIwvj8uXLGj9+vB555BF5eXlJklJTU+Xv72/Vr3Tp0vLz81NqaqqlT40aNaz6BAQEWM75+voW6v4kkgAAADZU2GnsosrJyVHfvn1lNpu1ePHiYh+/MEgkAQCA07vVtv+5lkQePXpU8fHxlmqkJAUGBiotLc2q/5UrV3TmzBkFBgZa+pw8edKqz7XP1/oUBmskAQAAbiHXksgDBw7of//7nypUqGB1PiQkROfOnVNycrKlLT4+Xnl5eWrRooWlT0JCgnJycix94uLiVLdu3UJPa0skkgAAAA61/U9GRoZSUlKUkpIiSTp8+LBSUlJ07Ngx5eTkqE+fPvruu++0cuVK5ebmKjU1VampqcrOzpYk1a9fX507d9bTTz+tnTt3atu2bRo2bJjCw8MVFBQkSerXr59cXV01cOBA7d27Vx9++KEWLFiQby3nDb83tv8BcCth+x+g5LLn9j8hMxNsNnbi+LZF6r9582Z16NAhX3tERISmTJmS7yWZa77++mu1b99e0tUNyYcNG6a1a9fKxcVFvXv31sKFC1WuXDlL/927dysyMlJJSUmqWLGinn32WY0fP75IsZJIArilkEgCJZc9E8l7Z9kukdw+rmiJ5K2EqW0AAAAYwlvbAADA6dlyH8mSjEQSAAA4PfJIY5jaBgAAgCFUJAEAgNNjatsYKpIAAAAwhIokAABwelQkjaEiCQAAAEOoSAIAAKdHQdIYKpIAAAAwhIokAABweqyRNIZEEgAAOD3ySGOY2gYAAIAhVCQBAIDTY2rbGCqSAAAAMISKJAAAcHoUJI2hIgkAAABDqEgCAACn50JJ0hAqkgAAADCEiiQAAHB6FCSNIZEEAABOj+1/jGFqGwAAAIZQkQQAAE7PhYKkIVQkAQAAYAgVSQAA4PRYI2kMFUkAAAAYQkUSAAA4PQqSxlCRBAAAgCFUJAEAgNMziZKkESSSAADA6bH9jzFMbQMAAMAQKpIAAMDpsf2PMVQkAQAAYAgVSQAA4PQoSBpDRRIAAACGUJEEAABOz4WSpCFUJAEAAGAIFUkAAOD0KEgaQyIJAACcHtv/GMPUNgAAAAyhIgkAAJweBUljqEgCAADAECqSAADA6bH9jzFUJAEAAGAIFUkAAOD0qEcaQ0USAAAAhlCRBAAATo99JI2hIgkAAJyei8l2R1ElJCSoe/fuCgoKkslkUmxsrNV5s9msyZMnq3LlyvLw8FBoaKgOHDhg1efMmTPq37+/vLy85OPjo4EDByojI8Oqz+7du9WmTRu5u7urSpUqmjVrVpFjJZEEAABwIJmZmWrUqJHeeOONAs/PmjVLCxcuVHR0tHbs2CFPT0+FhYXp8uXLlj79+/fX3r17FRcXp3Xr1ikhIUGDBg2ynE9PT1enTp1UrVo1JScn67XXXtOUKVO0dOnSIsXK1DYAAHB6jjS13aVLF3Xp0qXAc2azWfPnz9eLL76oHj16SJLeffddBQQEKDY2VuHh4dq3b582bNigpKQkNW/eXJK0aNEide3aVbNnz1ZQUJBWrlyp7OxsvfPOO3J1dVWDBg2UkpKiuXPnWiWcN0JFEgAAwIaysrKUnp5udWRlZRka6/Dhw0pNTVVoaKilzdvbWy1atFBiYqIkKTExUT4+PpYkUpJCQ0Pl4uKiHTt2WPq0bdtWrq6ulj5hYWHav3+/zp49W+h4SCQBAIDTM5lsd0RFRcnb29vqiIqKMhRnamqqJCkgIMCqPSAgwHIuNTVV/v7+VudLly4tPz8/qz4FjfHXexQGU9sAAAA2NHHiRI0ePdqqzc3NzU7RFC8SSQAA4PRsuUbSzc2t2BLHwMBASdLJkydVuXJlS/vJkyfVuHFjS5+0tDSr665cuaIzZ85Yrg8MDNTJkyet+lz7fK1PYRQqkfz8888LPeADDzxQ6L4AAAAovBo1aigwMFCbNm2yJI7p6enasWOHhg4dKkkKCQnRuXPnlJycrGbNmkmS4uPjlZeXpxYtWlj6vPDCC8rJyVGZMmUkSXFxcapbt658fX0LHU+hEsmePXsWajCTyaTc3NxC3xwAAMARGNnv0VYyMjJ08OBBy+fDhw8rJSVFfn5+qlq1qkaOHKmXX35ZderUUY0aNTRp0iQFBQVZ8rX69eurc+fOevrppxUdHa2cnBwNGzZM4eHhCgoKkiT169dPU6dO1cCBAzV+/Hj9+OOPWrBggebNm1ekWAuVSObl5RVpUAAAgFuJI23/891336lDhw6Wz9fWV0ZERGj58uUaN26cMjMzNWjQIJ07d06tW7fWhg0b5O7ubrlm5cqVGjZsmDp27CgXFxf17t1bCxcutJz39vbWxo0bFRkZqWbNmqlixYqaPHlykbb+kSST2Ww2/8vndTgeTYbZOwQANnI26XV7hwDARtzt+ObGEx/ssdnYy8Ib2mxsezP0P1lmZqa2bNmiY8eOKTs72+rc8OHDiyUwAACAm8Vx6pG3liInkt9//726du2qixcvKjMzU35+fjp9+rTKli0rf39/EkkAAAAnUeQNyUeNGqXu3bvr7Nmz8vDw0LfffqujR4+qWbNmmj17ti1iBAAAsCkXk8lmR0lW5EQyJSVFY8aMkYuLi0qVKqWsrCxVqVJFs2bN0vPPP2+LGAEAAOCAipxIlilTRi4uVy/z9/fXsWPHJF19++e3334r3ugAAABuAlv+RGJJVuQ1kk2aNFFSUpLq1Kmjdu3aafLkyTp9+rRWrFihO++80xYxAgAAwAEVuSL56quvWn6S55VXXpGvr6+GDh2qU6dOaenSpcUeIAAAgK2ZTCabHSVZkSuSzZs3t/y9v7+/NmzYUKwBAQAA4NZgx60/AQAAHEMJLxzaTJETyRo1avxjmfbXX3/9VwEBAADcbCV9mx5bKXIiOXLkSKvPOTk5+v7777VhwwaNHTu2uOICAACAgytyIjlixIgC29944w199913/zogAACAm42CpDFFfmv7erp06aJPPvmkuIYDAACAgyu2l20+/vhj+fn5FddwAAAAN01J36bHVgxtSP7XL9tsNis1NVWnTp3Sm2++WazBAQAAwHEVOZHs0aOHVSLp4uKiSpUqqX379qpXr16xBmfUb9/Mt3cIAGxk4TeH7B0CABsZ16GW3e5dbGv9nEyRE8kpU6bYIAwAAADcaoqcgJcqVUppaWn52v/880+VKlWqWIICAAC4mfiJRGOKXJE0m80FtmdlZcnV1fVfBwQAAHCzuZTsfM9mCp1ILly4UNLVjP2tt95SuXLlLOdyc3OVkJDgMGskAQAAYHuFTiTnzZsn6WpFMjo62moa29XVVdWrV1d0dHTxRwgAAGBjVCSNKXQiefjwYUlShw4d9Omnn8rX19dmQQEAAMDxFXmN5Ndff22LOAAAAOympL8UYytFfmu7d+/emjlzZr72WbNm6aGHHiqWoAAAAOD4ipxIJiQkqGvXrvnau3TpooSEhGIJCgAA4GZyMdnuKMmKnEhmZGQUuM1PmTJllJ6eXixBAQAAwPEVOZFs2LChPvzww3ztH3zwgYKDg4slKAAAgJvJZLLdUZIV+WWbSZMmqVevXjp06JD+85//SJI2bdqkVatW6eOPPy72AAEAAGzNpaRnfDZS5ESye/fuio2N1auvvqqPP/5YHh4eatSokeLj4+Xn52eLGAEAAOCAipxISlK3bt3UrVs3SVJ6erref/99Pffcc0pOTlZubm6xBggAAGBrRV7rB0n/4ntLSEhQRESEgoKCNGfOHP3nP//Rt99+W5yxAQAAwIEVqSKZmpqq5cuX6+2331Z6err69u2rrKwsxcbG8qINAAC4ZbFE0phCVyS7d++uunXravfu3Zo/f76OHz+uRYsW2TI2AAAAOLBCVyTXr1+v4cOHa+jQoapTp44tYwIAALipeGvbmEJXJLdu3aoLFy6oWbNmatGihV5//XWdPn3alrEBAADAgRU6kWzZsqX++9//6sSJExo8eLA++OADBQUFKS8vT3Fxcbpw4YIt4wQAALAZNiQ3pshvbXt6eurJJ5/U1q1btWfPHo0ZM0YzZsyQv7+/HnjgAVvECAAAYFP81rYx/2rbpLp162rWrFn6/fff9f777xdXTAAAALgFGNqQ/O9KlSqlnj17qmfPnsUxHAAAwE3FyzbGsJE7AAAADCmWiiQAAMCtjIKkMVQkAQAAYAgVSQAA4PRK+tvVtkJFEgAAAIZQkQQAAE7PJEqSRpBIAgAAp8fUtjFMbQMAAMAQEkkAAOD0HOUnEnNzczVp0iTVqFFDHh4eqlWrlqZPny6z2WzpYzabNXnyZFWuXFkeHh4KDQ3VgQMHrMY5c+aM+vfvLy8vL/n4+GjgwIHKyMgojq/KCokkAACAg5g5c6YWL16s119/Xfv27dPMmTM1a9YsLVq0yNJn1qxZWrhwoaKjo7Vjxw55enoqLCxMly9ftvTp37+/9u7dq7i4OK1bt04JCQkaNGhQscfLGkkAAOD0TA6yI/n27dvVo0cPdevWTZJUvXp1vf/++9q5c6ekq9XI+fPn68UXX1SPHj0kSe+++64CAgIUGxur8PBw7du3Txs2bFBSUpKaN28uSVq0aJG6du2q2bNnKygoqNjipSIJAABgQ1lZWUpPT7c6srKyCux77733atOmTfrll18kST/88IO2bt2qLl26SJIOHz6s1NRUhYaGWq7x9vZWixYtlJiYKElKTEyUj4+PJYmUpNDQULm4uGjHjh3F+mwkkgAAwOnZco1kVFSUvL29rY6oqKgC45gwYYLCw8NVr149lSlTRk2aNNHIkSPVv39/SVJqaqokKSAgwOq6gIAAy7nU1FT5+/tbnS9durT8/PwsfYoLU9sAAAA2NHHiRI0ePdqqzc3NrcC+q1ev1sqVK7Vq1So1aNBAKSkpGjlypIKCghQREXEzwi0SEkkAAOD0bLlE0s3N7bqJ49+NHTvWUpWUpIYNG+ro0aOKiopSRESEAgMDJUknT55U5cqVLdedPHlSjRs3liQFBgYqLS3NatwrV67ozJkzluuLC1PbAADA6bmYTDY7iuLixYtycbFOz0qVKqW8vDxJUo0aNRQYGKhNmzZZzqenp2vHjh0KCQmRJIWEhOjcuXNKTk629ImPj1deXp5atGhh9CsqEBVJAAAAB9G9e3e98sorqlq1qho0aKDvv/9ec+fO1ZNPPinp6tvlI0eO1Msvv6w6deqoRo0amjRpkoKCgtSzZ09JUv369dW5c2c9/fTTio6OVk5OjoYNG6bw8PBifWNbIpEEAABwmJ9IXLRokSZNmqRnnnlGaWlpCgoK0uDBgzV58mRLn3HjxikzM1ODBg3SuXPn1Lp1a23YsEHu7u6WPitXrtSwYcPUsWNHubi4qHfv3lq4cGGxx2sy/3Wr9BLidMYVe4cAwEbeSTpq7xAA2Mi4DrXsdu+FWw/bbOzhrWvYbGx7oyIJAACcnoPsR37L4WUbAAAAGEJFEgAAOD0XUZI0gookAAAADKEiCQAAnB5rJI0hkQQAAE7PUbb/udUwtQ0AAABDqEgCAACnV9SfMsRVVCQBAABgCBVJAADg9ChIGkNFEgAAAIZQkQQAAE6PNZLGUJEEAACAIVQkAQCA06MgaQyJJAAAcHpM0RrD9wYAAABDqEgCAACnZ2Ju2xAqkgAAADCEiiQAAHB61CONoSIJAAAAQ6hIAgAAp8eG5MZQkQQAAIAhVCQBAIDTox5pDIkkAABwesxsG8PUNgAAAAyhIgkAAJweG5IbQ0USAAAAhlCRBAAATo/KmjF8bwAAADCEiiQAAHB6rJE0hookAAAADKEiCQAAnB71SGOoSAIAAMAQKpIAAMDpsUbSGBJJAADg9JiiNYbvDQAAAIZQkQQAAE6PqW1jqEgCAADAECqSAADA6VGPNIaKJAAAAAyhIgkAAJweSySNoSIJAAAAQ6hIAgAAp+fCKklDSCQBAIDTY2rbGKa2AQAAYAgVSQAA4PRMTG0bQkUSAAAAhpBIAgAAp2cy2e4oqj/++EOPPvqoKlSoIA8PDzVs2FDfffed5bzZbNbkyZNVuXJleXh4KDQ0VAcOHLAa48yZM+rfv7+8vLzk4+OjgQMHKiMj499+TfmQSAIAADiIs2fPqlWrVipTpozWr1+vn376SXPmzJGvr6+lz6xZs7Rw4UJFR0drx44d8vT0VFhYmC5fvmzp079/f+3du1dxcXFat26dEhISNGjQoGKP12Q2m83FPqqdnc64Yu8QANjIO0lH7R0CABsZ16GW3e69Ye8pm43duUGlQvedMGGCtm3bpm+++abA82azWUFBQRozZoyee+45SdL58+cVEBCg5cuXKzw8XPv27VNwcLCSkpLUvHlzSdKGDRvUtWtX/f777woKCvr3D/V/qEgCAADYUFZWltLT062OrKysAvt+/vnnat68uR566CH5+/urSZMm+u9//2s5f/jwYaWmpio0NNTS5u3trRYtWigxMVGSlJiYKB8fH0sSKUmhoaFycXHRjh07ivXZSCQBAIDTs+UayaioKHl7e1sdUVFRBcbx66+/avHixapTp46++uorDR06VMOHD1dMTIwkKTU1VZIUEBBgdV1AQIDlXGpqqvz9/a3Oly5dWn5+fpY+xYXtfwAAgNOz5YbkEydO1OjRo63a3NzcCuybl5en5s2b69VXX5UkNWnSRD/++KOio6MVERFhuyANoiIJAABgQ25ubvLy8rI6rpdIVq5cWcHBwVZt9evX17FjxyRJgYGBkqSTJ09a9Tl58qTlXGBgoNLS0qzOX7lyRWfOnLH0KS4kkgAAwOmZbPhXUbRq1Ur79++3avvll19UrVo1SVKNGjUUGBioTZs2Wc6np6drx44dCgkJkSSFhITo3LlzSk5OtvSJj49XXl6eWrRoYfQrKhBT2wAAAA5i1KhRuvfee/Xqq6+qb9++2rlzp5YuXaqlS5dKkkwmk0aOHKmXX35ZderUUY0aNTRp0iQFBQWpZ8+ekq5WMDt37qynn35a0dHRysnJ0bBhwxQeHl6sb2xLJJIAAABycZBfSLz77ru1Zs0aTZw4UdOmTVONGjU0f/589e/f39Jn3LhxyszM1KBBg3Tu3Dm1bt1aGzZskLu7u6XPypUrNWzYMHXs2FEuLi7q3bu3Fi5cWOzx2n0fyfPnzys3N1d+fn5W7WfOnFHp0qXl5eVV5DHZRxIoudhHEii57LmP5KafT9ts7I71KtpsbHuz+xrJ8PBwffDBB/naV69erfDwcDtEBAAAnI2jrJG81dg9kdyxY4c6dOiQr719+/bFvmkmAAAAio/d10hmZWXpypX8U9E5OTm6dOmSHSICAADOxpb7SJZkdq9I3nPPPZY3kf4qOjpazZo1s0NEAADA2TC1bYzdK5Ivv/yyQkND9cMPP6hjx46SpE2bNikpKUkbN260c3QAAAC4HrtXJFu1aqXExERVqVJFq1ev1tq1a1W7dm3t3r1bbdq0sXd4AADACbiYbHeUZHavSEpS48aNtXLlSnuHAQAAgCKwSyKZnp5u2R8yPT39H/sa2UcSAACgKEr6WkZbsUsi6evrqxMnTsjf318+Pj4yFfCqlNlslslkUm5urh0iBAAAwI3YJZGMj4+3/JJNfHx8gYkknNe77/xXW76O09Ejh+Xm5q6GdzXW0OGjVa16DUlS+vlzemvJG9r57XadTD0hXx9ftWnfUU8PfVblype3jNOqWYN8Y0999TWFhnW9ac8CwFpeXq6+X7dSB3d8rUvpZ1XW2091QkLVuOsjln8XHPl+m/YlfKk/jx1UVuYF9XxhkSpUsf7Fk5+/Wa9DOzfrz98OKufyJT06d7XcypazxyOhhCAVMcYuiWS7du0sf9++fXt7hAAHlrIrSb0eekT1GzRUbu4VLXl9gUZFPq2VH38uD4+yOn3qlE6fStOwkc+peo1aOnniuF6LmqbTp9P0yqz5VmM9/9LLanlva8vncuVZKgHY0+6vPta+LV+q7YDR8q1cTaePHtA3786Tq4enGvynhyQpJ+uyAms3UM1mbbT1vYJ/G/hKdpZub9BMtzdopu9il9/EJwDwV3Z/2WbKlCmaPHmyXFysXyA/f/68hgwZovfff99OkcFe5r5uva/oC1Nf0f2hbbR/309q3LS5atauo1dfW2A5f3uVqhr0zAhNmzReV65cUenS////1uXLe6lCxUo3LXYA/yzt159UrVFLVW14jySpfMUA/frdZp068oulT52WV7eCu3D65HXHubNjT0nSif27bRcsnAoFSWPsvv3P22+/rdatW+vXX3+1tG3evFkNGzbUoUOH7BgZHEVmxgVJkpeX93X7ZGRckKdnOaskUpLmzHxZXf/TSk89/rDWffapzGazTWMF8M/8awbr+M8pOn/yd0nSn7//qtSDP+n2Bs3tHBmcnYvJZLOjJLN7RXL37t0aPHiwGjdurDlz5uiXX37RggULNHbsWE2dOvWG12dlZSkrK8u6LaeU3NzcbBUybqK8vDwtmD1TdzVqopq16xTY59zZs1r+VrQe6PWQVftTQ4ap2d0t5O7uoZ3fbtOcGdN16eJFPfTIozcjdAAFaBT2kHIuX9THUwbLZHKR2Zyn5j0eV+0WHewdGgAD7J5I+vr6avXq1Xr++ec1ePBglS5dWuvXr7f8ys2NREVF5Us4x06cpHHPT7ZFuLjJ5sx4Wb8eOqDFb68o8HxmRobGjhiqGjVraeCgZ6zOPfH0UMvf31Gvvi5duqRVK5aRSAJ29GvyNzq082u1f3KcfIOq6s/fftWOj5aqrHcF1QkJtXd4cGIlu25oO3af2pakRYsWacGCBXrkkUdUs2ZNDR8+XD/88EOhrp04caLOnz9vdYwYM97GEeNmmDPzZW3fukWLliyTf0BgvvOZmZka/exglfX01KuzF6p0mTL/OF6DO+9S2slUZWdn2ypkADeQ9OnbuivsIdW6u538bquhOi07qkHHnvphw2p7hwbAALtXJDt37qzvvvtOMTEx6tOnjy5duqTRo0erZcuWmjp1qsaNG/eP17u5ueWbxs7OuGLLkGFjZrNZc2e9ooSvN+n1pcsVdNvt+fpkZmRo1LBBcnV11cy5rxdqKcOBX35WeS8vubq62iJsAIVwJTtLJpN1DcPF5eoUN2BXlCQNsXsimZubq927dysoKEiS5OHhocWLF+v+++/XU089dcNEEiXPnBnTFbfhS82Yu0hly5bVn6dPSZLKlSsvN3d3ZWZkaGTk08q6fFmTp89QZmaGMjMzJEk+vn4qVaqUtiZ8rTN//qk7GzaSq5urkr5N1Lvv/FePPDbAjk8GoGrDFkpZ/4E8/SrJt3I1/fnbIf34vzWqc28nS5+szAvKOJOmi+fOSJLlxRwPL1+V9b66B/HF82d0Kf2s0k8dlySd/eOIyrh7qJyfv9w8ywvAzWEyO/BrrKdPn1bFihWLfh0VyVtaQRuJS1f3hOz2wIPa9d1OPTv4iQL7fLx2oyoH3aZvt3+j6Nfn6/ffjklms26rUlUP9gnXAw/2ybfVFG4t7yQdtXcI+BeyL1/Urs9X6EjKdl2+cF5lvf1U8+52atKtn0qVvro85Zftcfrm3Xn5rm3SrZ+adr+6xnnX2vf0/Rer8vVp8/go3XHvfbZ9CNjMuA61btzJRnYcOm+zsVvUuv6uI7c6h0gkDx06pPnz52vfvn2SpODgYI0cOVI1a9Y0NB6JJFBykUgCJReJ5K3H7qWZr776SsHBwdq5c6fuuusu3XXXXdqxY4eCg4MVFxdn7/AAAIATMJlsd5Rkdl8jOWHCBI0aNUozZszI1z5+/Hjddx9TFAAAwLZKeL5nM3avSO7bt08DBw7M1/7kk0/qp59+skNEAAAAKAy7J5KVKlVSSkpKvvaUlBT5+/vf/IAAAIDzMdnwKMHsNrU9bdo0Pffcc3r66ac1aNAg/frrr7r33nslSdu2bdPMmTM1evRoe4UHAACAG7DbW9ulSpXSiRMnVKlSJc2fP19z5szR8eNX9wMLCgrS2LFjNXz4cJkMrFLlrW2g5OKtbaDksudb298dTrfZ2M1reNlsbHuzW0XyWv5qMpk0atQojRo1ShcuXJAklS/PZrIAAACOzq5vbf+92kgCCQAA7KGkb9NjK3ZNJO+4444bTl2fOXPmJkUDAACAorBrIjl16lR5e5fc3d4BAMCtgYKkMXZNJMPDw9niBwAA2B+ZpCF220fSyNvYAAAAcBx2f2sbAADA3kyUJA2xWyKZl5dnr1sDAACgGNh1jSQAAIAjYMWdMXb/rW0AAADcmqhIAgAAp0dB0hgqkgAAADCEiiQAAAAlSUNIJAEAgNNj+x9jmNoGAACAIVQkAQCA02P7H2OoSAIAAMAQKpIAAMDpUZA0hookAAAADKEiCQAAQEnSECqSAAAAMIREEgAAOD2TDf/6N2bMmCGTyaSRI0da2i5fvqzIyEhVqFBB5cqVU+/evXXy5Emr644dO6Zu3bqpbNmy8vf319ixY3XlypV/FUtBSCQBAAAcUFJSkpYsWaK77rrLqn3UqFFau3atPvroI23ZskXHjx9Xr169LOdzc3PVrVs3ZWdna/v27YqJidHy5cs1efLkYo+RRBIAADg9k8l2hxEZGRnq37+//vvf/8rX19fSfv78eb399tuaO3eu/vOf/6hZs2ZatmyZtm/frm+//VaStHHjRv30009677331LhxY3Xp0kXTp0/XG2+8oezs7OL4uixIJAEAgNMz2fDIyspSenq61ZGVlfWP8URGRqpbt24KDQ21ak9OTlZOTo5Ve7169VS1alUlJiZKkhITE9WwYUMFBARY+oSFhSk9PV179+418vVcF4kkAACADUVFRcnb29vqiIqKum7/Dz74QLt27SqwT2pqqlxdXeXj42PVHhAQoNTUVEufvyaR185fO1ec2P4HAADAhtv/TJw4UaNHj7Zqc3NzK7Dvb7/9phEjRiguLk7u7u62C6qYUJEEAACwITc3N3l5eVkd10skk5OTlZaWpqZNm6p06dIqXbq0tmzZooULF6p06dIKCAhQdna2zp07Z3XdyZMnFRgYKEkKDAzM9xb3tc/X+hQXEkkAAOD0HGX7n44dO2rPnj1KSUmxHM2bN1f//v0tf1+mTBlt2rTJcs3+/ft17NgxhYSESJJCQkK0Z88epaWlWfrExcXJy8tLwcHBxfOF/R+mtgEAABxE+fLldeedd1q1eXp6qkKFCpb2gQMHavTo0fLz85OXl5eeffZZhYSEqGXLlpKkTp06KTg4WI899phmzZql1NRUvfjii4qMjLxuJdQoEkkAAOD0jG7TYw/z5s2Ti4uLevfuraysLIWFhenNN9+0nC9VqpTWrVunoUOHKiQkRJ6enoqIiNC0adOKPRaT2Ww2F/uodnY6o/h3bgfgGN5JOmrvEADYyLgOtex27/2pF202dt3AsjYb296oSAIAAKd3CxUkHQqJJAAAAJmkIby1DQAAAEOoSAIAAKdX1G16cBUVSQAAABhCRRIAADi9W2n7H0dCRRIAAACGUJEEAABOj4KkMVQkAQAAYAgVSQAAAEqShpBIAgAAp8f2P8YwtQ0AAABDqEgCAACnx/Y/xlCRBAAAgCFUJAEAgNOjIGkMFUkAAAAYQkUSAACAkqQhVCQBAABgCBVJAADg9NhH0hgSSQAA4PTY/scYprYBAABgCBVJAADg9ChIGkNFEgAAAIZQkQQAAE6PNZLGUJEEAACAIVQkAQAAWCVpCBVJAAAAGEJFEgAAOD3WSBpDIgkAAJweeaQxTG0DAADAECqSAADA6TG1bQwVSQAAABhCRRIAADg9E6skDaEiCQAAAEOoSAIAAFCQNISKJAAAAAyhIgkAAJweBUljSCQBAIDTY/sfY5jaBgAAgCFUJAEAgNNj+x9jqEgCAADAECqSAAAAFCQNoSIJAAAAQ6hIAgAAp0dB0hgqkgAAADCEiiQAAHB67CNpDIkkAABwemz/YwxT2wAAAA4iKipKd999t8qXLy9/f3/17NlT+/fvt+pz+fJlRUZGqkKFCipXrpx69+6tkydPWvU5duyYunXrprJly8rf319jx47VlStXij1eEkkAAOD0TCbbHUWxZcsWRUZG6ttvv1VcXJxycnLUqVMnZWZmWvqMGjVKa9eu1UcffaQtW7bo+PHj6tWrl+V8bm6uunXrpuzsbG3fvl0xMTFavny5Jk+eXFxfl4XJbDabi31UOzudUfwZNwDH8E7SUXuHAMBGxnWoZbd7n72Ya7OxfcuWMnztqVOn5O/vry1btqht27Y6f/68KlWqpFWrVqlPnz6SpJ9//ln169dXYmKiWrZsqfXr1+v+++/X8ePHFRAQIEmKjo7W+PHjderUKbm6uhbLc0lUJAEAAGwqKytL6enpVkdWVlahrj1//rwkyc/PT5KUnJysnJwchYaGWvrUq1dPVatWVWJioiQpMTFRDRs2tCSRkhQWFqb09HTt3bu3uB5LEokkAACATUVFRcnb29vqiIqKuuF1eXl5GjlypFq1aqU777xTkpSamipXV1f5+PhY9Q0ICFBqaqqlz1+TyGvnr50rTry1DQAAnJ4tt/+ZOHGiRo8ebdXm5uZ2w+siIyP1448/auvWrbYK7V8jkQQAALAhNze3QiWOfzVs2DCtW7dOCQkJuv322y3tgYGBys7O1rlz56yqkidPnlRgYKClz86dO63Gu/ZW97U+xYWpbQAA4PRMNvyrKMxms4YNG6Y1a9YoPj5eNWrUsDrfrFkzlSlTRps2bbK07d+/X8eOHVNISIgkKSQkRHv27FFaWpqlT1xcnLy8vBQcHPwvvqX8qEgCAACn5yi/bBMZGalVq1bps88+U/ny5S1rGr29veXh4SFvb28NHDhQo0ePlp+fn7y8vPTss88qJCRELVu2lCR16tRJwcHBeuyxxzRr1iylpqbqxRdfVGRkZJErozfC9j8Abils/wOUXPbc/if9cp7NxvZyL/wEsOk6Ge2yZcs0YMAASVc3JB8zZozef/99ZWVlKSwsTG+++abVtPXRo0c1dOhQbd68WZ6enoqIiNCMGTNUunTx1hBJJAHcUkgkgZLLnonkBRsmkuWLkEjeakrukwEAAMCmWCMJAADgIGskbzVUJAEAAGAIFUkAAOD0irpND66iIgkAAABDqEgCAACn5yj7SN5qqEgCAADAECqSAADA6VGQNIZEEgAAgEzSEKa2AQAAYAgVSQAA4PTY/scYKpIAAAAwhIokAABwemz/YwwVSQAAABhiMpvNZnsHARiVlZWlqKgoTZw4UW5ubvYOB0Ax4s834PhIJHFLS09Pl7e3t86fPy8vLy97hwOgGPHnG3B8TG0DAADAEBJJAAAAGEIiCQAAAENIJHFLc3Nz00svvcRCfKAE4s834Ph42QYAAACGUJEEAACAISSSAAAAMIREEgAAAIaQSAIF2Lx5s0wmk86dO2fvUAAUgD+jgGMgkYTNDRgwQCaTSTNmzLBqj42NlclkslNUAGzNZDL94zFlyhTDY9977706ceKEvL29iy9gAEVGIombwt3dXTNnztTZs2eLbczs7OxiGwtA8Ttx4oTlmD9/vry8vKzannvuOcNju7q6KjAwkP8YBeyMRBI3RWhoqAIDAxUVFXXdPp988okaNGggNzc3Va9eXXPmzLE6X716dU2fPl2PP/64vLy8NGjQIC1fvlw+Pj5at26d6tatq7Jly6pPnz66ePGiYmJiVL16dfn6+mr48OHKzc21jLVixQo1b95c5cuXV2BgoPr166e0tDSbPT/gjAIDAy2Ht7e3TCaT5bO/v7/mzp2r22+/XW5ubmrcuLE2bNggSTKbzQoNDVVYWJiu7VB35swZ3X777Zo8ebKkgqe2t23bpvbt26ts2bLy9fVVWFhYsf7HK4D8SCRxU5QqVUqvvvqqFi1apN9//z3f+eTkZPXt21fh4eHas2ePpkyZokmTJmn58uVW/WbPnq1GjRrp+++/16RJkyRJFy9e1MKFC/XBBx9ow4YN2rx5sx588EF9+eWX+vLLL7VixQotWbJEH3/8sWWcnJwcTZ8+XT/88INiY2N15MgRDRgwwJZfAYC/WLBggebMmaPZs2dr9+7dCgsL0wMPPKADBw7IZDIpJiZGSUlJWrhwoSRpyJAhuu222yyJ5N+lpKSoY8eOCg4OVmJiorZu3aru3btb/QckABswAzYWERFh7tGjh9lsNptbtmxpfvLJJ81ms9m8Zs0a87X/C/br18983333WV03duxYc3BwsOVztWrVzD179rTqs2zZMrMk88GDBy1tgwcPNpctW9Z84cIFS1tYWJh58ODB140xKSnJLMlyzddff22WZD579mzRHxhAPsuWLTN7e3tbPgcFBZlfeeUVqz533323+ZlnnrF8Xr16tdnd3d08YcIEs6enp/mXX36xnPv7n9FHHnnE3KpVK5s+A4D8qEjippo5c6ZiYmK0b98+q/Z9+/apVatWVm2tWrXSgQMHrCoKzZs3zzdm2bJlVatWLcvngIAAVa9eXeXKlbNq++vUdXJysrp3766qVauqfPnyateunSTp2LFj/+4BAdxQenq6jh8/XuCf+b/+s+Ghhx7Sgw8+qBkzZmj27NmqU6fOdce8VpEEcHORSOKmatu2rcLCwjRx4kRD13t6euZrK1OmjNVnk8lUYFteXp4kKTMzU2FhYfLy8tLKlSuVlJSkNWvWSOIFHsCRXLx4UcnJySpVqpQOHDjwj309PDxuUlQA/opEEjfdjBkztHbtWiUmJlra6tevr23btln127Ztm+644w6VKlWqWO//888/688//9SMGTPUpk0b1atXjxdtgJvIy8tLQUFBBf6ZDw4OtnweM2aMXFxctH79ei1cuFDx8fHXHfOuu+7Spk2bbBYzgIKVtncAcD4NGzZU//79LYvopav/wrj77rs1ffp0Pfzww0pMTNTrr7+uN998s9jvX7VqVbm6umrRokUaMmSIfvzxR02fPr3Y7wPg+saOHauXXnpJtWrVUuPGjbVs2TKlpKRo5cqVkqQvvvhC77zzjhITE9W0aVONHTtWERER2r17t3x9ffONN3HiRDVs2FDPPPOMhgwZIldXV3399dd66KGHVLFixZv9eIDToCIJu5g2bZplqlmSmjZtqtWrV+uDDz7QnXfeqcmTJ2vatGk2eZO6UqVKWr58uT766CMFBwdb1l8BuHmGDx+u0aNHa8yYMWrYsKE2bNigzz//XHXq1NGpU6c0cOBATZkyRU2bNpUkTZ06VQEBARoyZEiB491xxx3auHGjfvjhB91zzz0KCQnRZ599ptKlqZcAtmQym/9vky4AAACgCKhIAgAAwBASSQAAABhCIgkAAABDSCQBAABgCIkkAAAADCGRBAAAgCEkkgAAADCERBIAAACGkEgCcFgDBgxQz549LZ/bt2+vkSNH3vQ4Nm/eLJPJpHPnzt30ewOAIyORBFBkAwYMkMlkkslkkqurq2rXrq1p06bpypUrNr3vp59+WujfRSf5AwDb40dIARjSuXNnLVu2TFlZWfryyy8VGRmpMmXKaOLEiVb9srOz5erqWiz39PPzK5ZxAADFg4okAEPc3NwUGBioatWqaejQoQoNDdXnn39umY5+5ZVXFBQUpLp160qSfvvtN/Xt21c+Pj7y8/NTjx49dOTIEct4ubm5Gj16tHx8fFShQgWNGzdOZrPZ6p5/n9rOysrS+PHjVaVKFbm5ual27dp6++23deTIEXXo0EGS5OvrK5PJpAEDBkiS8vLyFBUVpRo1asjDw0ONGjXSxx9/bHWfL7/8UnfccYc8PDzUoUMHqzgBAP8fiSSAYuHh4aHs7GxJ0qZNm7R//37FxcVp3bp1ysnJUVhYmMqXL69vvvlG27ZtU7ly5dS5c2fLNXPmzNHy5cv1zjvvaOvWrTpz5ozWrFnzj/d8/PHH9f7772vhwoXat2+flixZonLlyqlKlSr65JNPJEn79+/XiRMntGDBAklSVFSU3n33XUVHR2vv3r0aNWqUHn30UW3ZskXS1YS3V69e6t69u1JSUvTUU09pwoQJtvraAOCWxtQ2gH/FbDZr06ZN+uqrr/Tss8/q1KlT8vT01FtvvWWZ0n7vvfeUl5ent956SyaTSZK0bNky+fj4aPPmzerUqZPmz5+viRMnqlevXpKk6OhoffXVV9e97y+//KLVq1crLi5OoaGhkqSaNWtazl+bBvf395ePj4+kqxXMV199Vf/73/8UEhJiuWbr1q1asmSJ2rVrp8WLF6tWrVqaM2eOJKlu3bras2ePZs6cWYzfGgCUDCSSAAxZt26dypUrp5ycHOXl5alfv36aMmWKIiMj1bBhQ6t1kT/88IMOHjyo8uXLW41x+fJlHTp0SOfPn9eJEyfUokULy7nSpUurefPm+aa3r0lJSVGpUqXUrl27Qsd88OBBXbx4Uffdd59Ve3Z2tpo0aSJJ2rdvn1UckixJJwDAGokkAEM6dOigxYsXy9XVVUFBQSpd+v//48TT09Oqb0ZGhpo1a6aVK1fmG6dSpUqG7u/h4VHkazIyMiRJX3zxhW677Tarc25ubobiAABnRiIJwBBPT0/Vrl27UH2bNm2qDz/8UP7+/vLy8iqwT+XKlbVjxw61bdtWknTlyhUlJyeradOmBfZv2LCh8vLytGXLFsvU9l9dq4jm5uZa2oKDg+Xm5qZjx45dt5JZv359ff7551Zt33777Y0fEgCcEC/bALC5/v37q2LFiurRo4e++eYbHT58WJs3b9bw4cP1+++/S5JGjBihGTNmKDY2Vj///LOeeeaZf9wDsnr16oqIiNCTTz6p2NhYy5irV6+WJFWrVk0mk0nr1q3TqVOnlJGRofLly+u5557TqFGjFBMTo0OHDmnXrl1atGiRYmJiJElDhgzRgQMHNHbsWO3fv1+rVq3S8uXLbf0VAcAtiUQSgM2VLVtWCQkJqlq1qnr16qX69etr4MCBunz5sqVCOWbMGD322GOKiIhQSEiIypcvrwcffPAfx128eLH69OmjZ555RvXq1dPTTz+tzMxMSdJtt92mqVOnasKECQoICNCwYcMkSdOnT9ekSZMUFRWl+vXrq3Pnzvriiy9Uo0YNSVLVqlX1ySefKDY2Vo0aNVJ0dLReffVVG347AHDrMpmvt5IdAAAA+AdUJAEAAGAIiSQAAAAMIZEEAACAISSSAAAAMIREEgAAAIaQSAIAAMAQEkkAAAAYQiIJAAAAQ0gkAQAAYAiJJAAAAAwhkQQAAIAh/w+XY9rUe4TJXwAAAABJRU5ErkJggg==",
|
579 |
+
"text/plain": [
|
580 |
+
"<Figure size 800x600 with 2 Axes>"
|
581 |
+
]
|
582 |
+
},
|
583 |
+
"metadata": {},
|
584 |
+
"output_type": "display_data"
|
585 |
+
},
|
586 |
+
{
|
587 |
+
"name": "stdout",
|
588 |
+
"output_type": "stream",
|
589 |
+
"text": [
|
590 |
+
"Weighted F1-score: 0.8761177534326371\n",
|
591 |
+
"Classification Report:\n",
|
592 |
+
" precision recall f1-score support\n",
|
593 |
+
"\n",
|
594 |
+
" Normal 0.88 0.93 0.91 1847\n",
|
595 |
+
" Toxic 0.86 0.78 0.82 1036\n",
|
596 |
+
"\n",
|
597 |
+
" accuracy 0.88 2883\n",
|
598 |
+
" macro avg 0.87 0.86 0.86 2883\n",
|
599 |
+
"weighted avg 0.88 0.88 0.88 2883\n",
|
600 |
+
"\n"
|
601 |
+
]
|
602 |
+
}
|
603 |
+
],
|
604 |
+
"source": [
|
605 |
+
"\n",
|
606 |
+
"import torch\n",
|
607 |
+
"\n",
|
608 |
+
"\n",
|
609 |
+
"\n",
|
610 |
+
"def ultrareport(all_preds, all_targets,classes):\n",
|
611 |
+
" import matplotlib.pyplot as plt\n",
|
612 |
+
" import seaborn as sns\n",
|
613 |
+
" from sklearn.metrics import confusion_matrix, classification_report, f1_score\n",
|
614 |
+
"\n",
|
615 |
+
" def plot_confusion_matrix(y_true, y_pred, classes):\n",
|
616 |
+
" cm = confusion_matrix(y_true, y_pred)\n",
|
617 |
+
" plt.figure(figsize=(8, 6))\n",
|
618 |
+
" sns.heatmap(cm, annot=True, fmt=\"d\", cmap=\"Blues\", xticklabels=classes, yticklabels=classes)\n",
|
619 |
+
" plt.xlabel('Predicted')\n",
|
620 |
+
" plt.ylabel('Actual')\n",
|
621 |
+
" plt.title('Confusion Matrix')\n",
|
622 |
+
" plt.show()\n",
|
623 |
+
"\n",
|
624 |
+
"\n",
|
625 |
+
" plot_confusion_matrix(all_targets, all_preds, classes)\n",
|
626 |
+
"\n",
|
627 |
+
" f1 = f1_score(all_targets, all_preds, average='weighted')\n",
|
628 |
+
" report = classification_report(all_targets, all_preds, target_names=classes)\n",
|
629 |
+
" print(\"Weighted F1-score:\", f1)\n",
|
630 |
+
" print(\"Classification Report:\")\n",
|
631 |
+
" print(report)\n",
|
632 |
+
"\n",
|
633 |
+
"classes = [\"Normal\", \"Toxic\"]\n",
|
634 |
+
"all_preds, all_targets = test_y, catboost.predict(test_X)\n",
|
635 |
+
"ultrareport(all_preds, all_targets,classes)\n"
|
636 |
+
]
|
637 |
+
},
|
638 |
+
{
|
639 |
+
"cell_type": "code",
|
640 |
+
"execution_count": 29,
|
641 |
+
"metadata": {},
|
642 |
+
"outputs": [
|
643 |
+
{
|
644 |
+
"data": {
|
645 |
+
"text/plain": [
|
646 |
+
"array([5. , 0.55555556])"
|
647 |
+
]
|
648 |
+
},
|
649 |
+
"execution_count": 29,
|
650 |
+
"metadata": {},
|
651 |
+
"output_type": "execute_result"
|
652 |
+
}
|
653 |
+
],
|
654 |
+
"source": [
|
655 |
+
"weights"
|
656 |
+
]
|
657 |
+
},
|
658 |
+
{
|
659 |
+
"cell_type": "code",
|
660 |
+
"execution_count": 25,
|
661 |
+
"metadata": {},
|
662 |
+
"outputs": [
|
663 |
+
{
|
664 |
+
"data": {
|
665 |
+
"text/plain": [
|
666 |
+
"count 14412.000000\n",
|
667 |
+
"mean 0.334860\n",
|
668 |
+
"std 0.471958\n",
|
669 |
+
"min 0.000000\n",
|
670 |
+
"25% 0.000000\n",
|
671 |
+
"50% 0.000000\n",
|
672 |
+
"75% 1.000000\n",
|
673 |
+
"max 1.000000\n",
|
674 |
+
"Name: toxic, dtype: float64"
|
675 |
+
]
|
676 |
+
},
|
677 |
+
"execution_count": 25,
|
678 |
+
"metadata": {},
|
679 |
+
"output_type": "execute_result"
|
680 |
+
}
|
681 |
+
],
|
682 |
+
"source": [
|
683 |
+
"df['toxic'].describe()"
|
684 |
+
]
|
685 |
+
}
|
686 |
+
],
|
687 |
+
"metadata": {
|
688 |
+
"kernelspec": {
|
689 |
+
"display_name": "cv",
|
690 |
+
"language": "python",
|
691 |
+
"name": "python3"
|
692 |
+
},
|
693 |
+
"language_info": {
|
694 |
+
"codemirror_mode": {
|
695 |
+
"name": "ipython",
|
696 |
+
"version": 3
|
697 |
+
},
|
698 |
+
"file_extension": ".py",
|
699 |
+
"mimetype": "text/x-python",
|
700 |
+
"name": "python",
|
701 |
+
"nbconvert_exporter": "python",
|
702 |
+
"pygments_lexer": "ipython3",
|
703 |
+
"version": "3.12.2"
|
704 |
+
}
|
705 |
+
},
|
706 |
+
"nbformat": 4,
|
707 |
+
"nbformat_minor": 2
|
708 |
+
}
|
requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Pillow==10.3.0
|
2 |
+
pytorch_lightning==2.2.1
|
3 |
+
streamlit==1.32.2
|
4 |
+
torch==2.2.2
|
5 |
+
transformers==4.39.3
|