sapthesh commited on
Commit
51caba6
Β·
verified Β·
1 Parent(s): 6f9d080

Delete proxy_model.py

Browse files
Files changed (1) hide show
  1. proxy_model.py +0 -33
proxy_model.py DELETED
@@ -1,33 +0,0 @@
1
- import os
2
- import requests
3
- import torch
4
- from transformers import AutoTokenizer, AutoConfig, AutoModelForCausalLM
5
-
6
- class RemoteModelProxy:
7
- def __init__(self, model_id):
8
- self.model_id = model_id
9
- self.tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
10
-
11
- # Load the configuration and remove the quantization configuration
12
- config = AutoConfig.from_pretrained(model_id, trust_remote_code=True)
13
- if hasattr(config, 'quantization_config'):
14
- del config.quantization_config
15
-
16
- self.config = config
17
- self.model = AutoModelForCausalLM.from_pretrained(model_id, config=self.config, trust_remote_code=True)
18
-
19
- def classify_text(self, text):
20
- inputs = self.tokenizer(text, return_tensors="pt", padding=True, truncation=True)
21
- logits = self.model(**inputs)
22
- probabilities = torch.softmax(logits, dim=-1).tolist()[0]
23
- predicted_class = torch.argmax(logits, dim=-1).item()
24
- return {
25
- "Predicted Class": predicted_class,
26
- "Probabilities": probabilities
27
- }
28
-
29
- if __name__ == "__main__":
30
- model_id = "deepseek-ai/DeepSeek-V3"
31
- proxy = RemoteModelProxy(model_id)
32
- result = proxy.classify_text("Your input text here")
33
- print(result)