Update README.md
Browse files
README.md
CHANGED
@@ -50,3 +50,34 @@ model = AutoModelForCausalLM.from_pretrained("inetnuc/llama-3-8b-chat-nuclear")
|
|
50 |
inputs = tokenizer("what is the iaea approach for cyber security?", return_tensors="pt")
|
51 |
outputs = model.generate(**inputs, max_new_tokens=128)
|
52 |
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
inputs = tokenizer("what is the iaea approach for cyber security?", return_tensors="pt")
|
51 |
outputs = model.generate(**inputs, max_new_tokens=128)
|
52 |
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
53 |
+
|
54 |
+
|
55 |
+
## Files and Versions
|
56 |
+
|
57 |
+
| File Name | Description |
|
58 |
+
|----------------------------------|--------------------------------------------------|
|
59 |
+
| .gitattributes | Initial commit |
|
60 |
+
| README.md | Model description and usage |
|
61 |
+
| adapter_config.json | Configuration for adapter |
|
62 |
+
| adapter_model.safetensors | Finetuned model weights |
|
63 |
+
| config.json | Configuration for base model |
|
64 |
+
| generation_config.json | Generation configuration for model |
|
65 |
+
| model-00001-of-00007.safetensors | Part of the base model weights |
|
66 |
+
| model-00002-of-00007.safetensors | Part of the base model weights |
|
67 |
+
| model-00003-of-00007.safetensors | Part of the base model weights |
|
68 |
+
| model-00004-of-00007.safetensors | Part of the base model weights |
|
69 |
+
| model-00005-of-00007.safetensors | Part of the base model weights |
|
70 |
+
| model-00006-of-00007.safetensors | Part of the base model weights |
|
71 |
+
| model-00007-of-00007.safetensors | Part of the base model weights |
|
72 |
+
| model.safetensors.index.json | Index for the model weights |
|
73 |
+
| special_tokens_map.json | Special tokens mapping |
|
74 |
+
| tokenizer.json | Tokenizer data |
|
75 |
+
| tokenizer_config.json | Configuration for tokenizer |
|
76 |
+
|
77 |
+
|
78 |
+
## Model Card Authors
|
79 |
+
MUSTAFA UMUT OZBEK
|
80 |
+
|
81 |
+
## Contact
|
82 |
+
https://www.linkedin.com/in/mustafaumutozbek/
|
83 |
+
https://x.com/m_umut_ozbek
|