update
Browse files- modeling_eagle_chat.py +30 -0
modeling_eagle_chat.py
CHANGED
@@ -115,6 +115,36 @@ class Eagle2ChatModel(PreTrainedModel):
|
|
115 |
self.conv_template = get_conv_template(self.template)
|
116 |
self.system_message = self.conv_template.system_message
|
117 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
118 |
def forward(
|
119 |
self,
|
120 |
pixel_values: torch.FloatTensor,
|
|
|
115 |
self.conv_template = get_conv_template(self.template)
|
116 |
self.system_message = self.conv_template.system_message
|
117 |
|
118 |
+
if config.use_backbone_lora:
|
119 |
+
self.wrap_backbone_lora(r=config.use_backbone_lora, lora_alpha=2 * config.use_backbone_lora)
|
120 |
+
|
121 |
+
if config.use_llm_lora:
|
122 |
+
self.wrap_llm_lora(r=config.use_llm_lora, lora_alpha=2 * config.use_llm_lora)
|
123 |
+
|
124 |
+
def wrap_backbone_lora(self, r=128, lora_alpha=256, lora_dropout=0.05):
|
125 |
+
lora_config = LoraConfig(
|
126 |
+
r=r,
|
127 |
+
target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2'],
|
128 |
+
lora_alpha=lora_alpha,
|
129 |
+
lora_dropout=lora_dropout,
|
130 |
+
)
|
131 |
+
self.vision_model = get_peft_model(self.vision_model, lora_config)
|
132 |
+
self.vision_model.print_trainable_parameters()
|
133 |
+
|
134 |
+
def wrap_llm_lora(self, r=128, lora_alpha=256, lora_dropout=0.05):
|
135 |
+
lora_config = LoraConfig(
|
136 |
+
r=r,
|
137 |
+
target_modules=['self_attn.q_proj', 'self_attn.k_proj', 'self_attn.v_proj', 'self_attn.o_proj',
|
138 |
+
'mlp.gate_proj', 'mlp.down_proj', 'mlp.up_proj'],
|
139 |
+
lora_alpha=lora_alpha,
|
140 |
+
lora_dropout=lora_dropout,
|
141 |
+
task_type='CAUSAL_LM'
|
142 |
+
)
|
143 |
+
self.language_model = get_peft_model(self.language_model, lora_config)
|
144 |
+
self.language_model.enable_input_require_grads()
|
145 |
+
self.language_model.print_trainable_parameters()
|
146 |
+
|
147 |
+
|
148 |
def forward(
|
149 |
self,
|
150 |
pixel_values: torch.FloatTensor,
|