qwerrwe / src /axolotl /utils /lora_embeddings.py
winglian's picture
Phi2 multipack (#1173)
814aee6 unverified
raw
history blame
375 Bytes
"""
helpers for lora embeddings
"""
def get_linear_embedding_layers(model_type):
"""
returns the linear embedding layers needed for loras, dependent on the model arch
"""
if model_type == "gpt_neox":
return ["embed_in", "embed_out"]
if model_type == "falcon":
return ["word_embeddings", "lm_head"]
return ["embed_tokens", "lm_head"]