Josephgflowers commited on
Commit
121854f
1 Parent(s): 0fdc3d9

Upload LM.py

Browse files
Files changed (1) hide show
  1. LM.py +3 -3
LM.py CHANGED
@@ -165,10 +165,10 @@ class ModifiedLlamaDecoderLayer(nn.Module):
165
  # Load the pre-trained model
166
 
167
  # Load the configuration from the pre-trained model
168
- config = AutoConfig.from_pretrained('/home/joe/Music/220-agent')
169
 
170
  # Load the pre-trained model
171
- pretrained_model = LlamaForCausalLM.from_pretrained('/home/joe/Music/220-agent')
172
 
173
  # Replace the decoder layers with modified layers
174
  for i in range(config.num_hidden_layers):
@@ -183,7 +183,7 @@ modified_model = pretrained_model
183
  # Save the model and tokenizer
184
  output_dir = "./saved_model"
185
  modified_model.save_pretrained(output_dir)
186
- tokenizer = AutoTokenizer.from_pretrained('/home/joe/Music/220-agent', legacy=False)
187
  tokenizer.save_pretrained(output_dir)
188
 
189
  print(f"Model and tokenizer saved to {output_dir}")
 
165
  # Load the pre-trained model
166
 
167
  # Load the configuration from the pre-trained model
168
+ config = AutoConfig.from_pretrained('Josephgflowers/TinyLlama-v1.1-Cinders-World')
169
 
170
  # Load the pre-trained model
171
+ pretrained_model = LlamaForCausalLM.from_pretrained('Josephgflowers/TinyLlama-v1.1-Cinders-World')
172
 
173
  # Replace the decoder layers with modified layers
174
  for i in range(config.num_hidden_layers):
 
183
  # Save the model and tokenizer
184
  output_dir = "./saved_model"
185
  modified_model.save_pretrained(output_dir)
186
+ tokenizer = AutoTokenizer.from_pretrained('Josephgflowers/TinyLlama-v1.1-Cinders-World', legacy=False)
187
  tokenizer.save_pretrained(output_dir)
188
 
189
  print(f"Model and tokenizer saved to {output_dir}")