id
stringlengths 7
117
| author
stringclasses 6
values | sha
null | created_at
unknown | last_modified
null | disabled
null | downloads
int64 0
18.6M
| downloads_all_time
null | gated
bool 1
class | gguf
null | inference
null | likes
int64 0
4.77k
| library_name
stringclasses 36
values | tags
sequencelengths 1
430
| pipeline_tag
stringclasses 32
values | mask_token
null | model_index
null | trending_score
int64 0
132
| architectures
sequencelengths 1
5
⌀ | bos_token_id
int64 -1
256k
⌀ | eos_token_id
int64 -1
256k
⌀ | hidden_act
stringclasses 15
values | hidden_size
int64 1
20.5k
⌀ | initializer_range
float64 0
1
⌀ | intermediate_size
int64 1
98.3k
⌀ | max_position_embeddings
int64 8
1.05M
⌀ | model_type
stringclasses 530
values | num_attention_heads
int64 1
5k
⌀ | num_hidden_layers
int64 -1
8.93k
⌀ | num_key_value_heads
int64 1
160
⌀ | rms_norm_eps
float64 0
7
⌀ | rope_theta
float64 1k
1,000B
⌀ | sliding_window
int64 0
262k
⌀ | tie_word_embeddings
bool 2
classes | torch_dtype
stringclasses 8
values | transformers_version
stringclasses 207
values | use_cache
bool 2
classes | vocab_size
int64 -1
5.03M
⌀ | attention_bias
bool 2
classes | attention_dropout
float64 0
0.5
⌀ | head_dim
int64 2
256
⌀ | mlp_bias
bool 2
classes | pretraining_tp
int64 0
8
⌀ | rope_scaling
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
openaccess-ai-collective/manticore-13b-chat-pyg | null | null | "2023-05-22T16:21:57Z" | null | null | 1,610 | null | null | null | null | 30 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"en",
"dataset:anon8231489123/ShareGPT_Vicuna_unfiltered",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"dataset:ehartford/WizardLM_alpaca_evol_instruct_70k_unfiltered",
"dataset:QingyiSi/Alpaca-CoT",
"dataset:teknium/GPT4-LLM-Cleaned",
"dataset:teknium/GPTeacher-General-Instruct",
"dataset:metaeval/ScienceQA_text_only",
"dataset:hellaswag",
"dataset:openai/summarize_from_feedback",
"dataset:riddle_sense",
"dataset:gsm8k",
"dataset:ewof/code-alpaca-instruct-unfiltered",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float16 | 4.28.0.dev0 | true | 32,000 | null | null | null | null | null | null |
tiiuae/falcon-40b | null | null | "2023-05-24T12:08:30Z" | null | null | 150,158 | null | null | null | null | 2,418 | transformers | [
"transformers",
"pytorch",
"safetensors",
"falcon",
"text-generation",
"custom_code",
"en",
"de",
"es",
"fr",
"dataset:tiiuae/falcon-refinedweb",
"arxiv:2205.14135",
"arxiv:1911.02150",
"arxiv:2101.00027",
"arxiv:2005.14165",
"arxiv:2104.09864",
"arxiv:2306.01116",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"FalconForCausalLM"
] | 11 | 11 | null | 8,192 | 0.02 | null | null | falcon | 128 | 60 | null | null | null | null | null | bfloat16 | 4.27.4 | true | 65,024 | null | 0 | null | null | null | null |
tiiuae/falcon-40b-instruct | null | null | "2023-05-25T10:14:36Z" | null | null | 128,990 | null | null | null | null | 1,172 | transformers | [
"transformers",
"pytorch",
"falcon",
"text-generation",
"custom_code",
"en",
"dataset:tiiuae/falcon-refinedweb",
"arxiv:2205.14135",
"arxiv:1911.02150",
"arxiv:2005.14165",
"arxiv:2104.09864",
"arxiv:2306.01116",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"FalconForCausalLM"
] | 11 | 11 | null | 8,192 | 0.02 | null | null | falcon | 128 | 60 | null | null | null | null | null | bfloat16 | 4.26.0 | true | 65,024 | null | 0 | null | null | null | null |
TheBloke/guanaco-7B-GPTQ | null | null | "2023-05-25T20:16:56Z" | null | null | 67 | null | null | null | null | 28 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"base_model:timdettmers/guanaco-7b",
"base_model:quantized:timdettmers/guanaco-7b",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.29.2 | true | 32,000 | null | null | null | null | null | null |
openbmb/cpm-bee-10b | null | null | "2023-05-26T17:52:46Z" | null | null | 81 | null | null | null | null | 169 | transformers | [
"transformers",
"pytorch",
"cpmbee",
"feature-extraction",
"text-generation",
"custom_code",
"en",
"zh",
"region:us"
] | text-generation | null | null | 1 | [
"CpmBeeForCausalLM"
] | null | null | null | 4,096 | null | null | null | cpmbee | 32 | 48 | null | null | null | null | null | null | null | null | 86,583 | null | null | null | null | null | null |
TheBloke/gorilla-7B-GPTQ | null | null | "2023-05-28T01:16:17Z" | null | null | 77 | null | null | null | null | 26 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:2305.15334",
"base_model:gorilla-llm/gorilla-7b-hf-delta-v0",
"base_model:quantized:gorilla-llm/gorilla-7b-hf-delta-v0",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.29.2 | true | 32,000 | null | null | null | null | null | null |
rustformers/bloomz-ggml | null | null | "2023-05-28T12:38:15Z" | null | null | 62 | null | null | null | null | 6 | transformers | [
"transformers",
"llm-rs",
"ggml",
"text-generation",
"ak",
"ar",
"as",
"bm",
"bn",
"ca",
"code",
"en",
"es",
"eu",
"fon",
"fr",
"gu",
"hi",
"id",
"ig",
"ki",
"kn",
"lg",
"ln",
"ml",
"mr",
"ne",
"nso",
"ny",
"or",
"pa",
"pt",
"rn",
"rw",
"sn",
"st",
"sw",
"ta",
"te",
"tn",
"ts",
"tum",
"tw",
"ur",
"vi",
"wo",
"xh",
"yo",
"zh",
"zu",
"dataset:bigscience/xP3",
"arxiv:2211.01786",
"license:bigscience-bloom-rail-1.0",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
James-WYang/BigTranslate | null | null | "2023-05-28T13:40:23Z" | null | null | 804 | null | null | null | null | 47 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2305.18098",
"license:lgpl-3.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LLaMAForCausalLM"
] | 0 | 1 | silu | 5,120 | 0.02 | 13,824 | null | llama | 40 | 40 | null | 0.000001 | null | null | false | float16 | 4.21.0 | false | 53,613 | null | null | null | null | null | null |
cognitivecomputations/Wizard-Vicuna-30B-Uncensored | null | null | "2023-05-30T01:08:00Z" | null | null | 2,012 | null | null | null | null | 146 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"uncensored",
"en",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"license:other",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 6,656 | 0.02 | 17,920 | 2,048 | llama | 52 | 60 | null | 0.000001 | null | null | false | float32 | 4.28.1 | true | 32,000 | null | null | null | null | null | null |
TheBloke/Wizard-Vicuna-30B-Uncensored-GPTQ | null | null | "2023-05-30T03:11:00Z" | null | null | 875 | null | null | null | null | 563 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"uncensored",
"en",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"base_model:cognitivecomputations/Wizard-Vicuna-30B-Uncensored",
"base_model:quantized:cognitivecomputations/Wizard-Vicuna-30B-Uncensored",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 6,656 | 0.02 | 17,920 | 2,048 | llama | 52 | 60 | null | 0.000001 | null | null | false | float16 | 4.30.0.dev0 | true | 32,000 | null | null | null | null | null | null |
wenge-research/yayi-7b | null | null | "2023-06-02T02:23:58Z" | null | null | 898 | null | null | null | null | 29 | transformers | [
"transformers",
"pytorch",
"bloom",
"text-generation",
"yayi",
"zh",
"en",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BloomForCausalLM"
] | 1 | 2 | null | 4,096 | 0.02 | null | null | bloom | null | null | null | null | null | null | null | bfloat16 | 4.28.1 | false | 250,684 | null | 0 | null | null | 4 | null |
OpenAssistant/falcon-40b-sft-top1-560 | null | null | "2023-06-02T17:53:28Z" | null | null | 112 | null | null | null | null | 50 | transformers | [
"transformers",
"pytorch",
"RefinedWeb",
"text-generation",
"sft",
"custom_code",
"en",
"de",
"es",
"fr",
"dataset:OpenAssistant/oasst1",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"RWForCausalLM"
] | 11 | 11 | null | 8,192 | 0.02 | null | null | RefinedWeb | null | null | null | null | null | null | null | bfloat16 | 4.28.0.dev0 | true | 65,040 | null | 0 | null | null | null | null |
nomic-ai/gpt4all-falcon | null | null | "2023-06-02T18:15:37Z" | null | null | 196 | null | null | null | null | 50 | transformers | [
"transformers",
"pytorch",
"safetensors",
"RefinedWebModel",
"text-generation",
"custom_code",
"en",
"dataset:nomic-ai/gpt4all-j-prompt-generations",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"RWForCausalLM"
] | 11 | 11 | null | 4,544 | 0.02 | null | null | RefinedWebModel | null | null | null | null | null | null | null | bfloat16 | 4.28.1 | false | 65,024 | null | 0 | null | null | null | null |
PygmalionAI/metharme-1.3b | null | null | "2023-06-02T21:39:05Z" | null | null | 1,361 | null | null | null | null | 23 | transformers | [
"transformers",
"pytorch",
"safetensors",
"gpt_neox",
"text-generation",
"en",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPTNeoXForCausalLM"
] | 0 | 0 | gelu | 2,048 | 0.02 | 8,192 | 2,048 | gpt_neox | 16 | 24 | null | null | null | null | false | bfloat16 | 4.30.0.dev0 | true | 50,304 | null | null | null | null | null | null |
TheBloke/WizardLM-Uncensored-Falcon-40B-GPTQ | null | null | "2023-06-03T11:50:52Z" | null | null | 71 | null | null | null | null | 59 | transformers | [
"transformers",
"safetensors",
"RefinedWeb",
"text-generation",
"custom_code",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"RWForCausalLM"
] | 1 | 2 | null | 8,192 | 0.02 | null | null | RefinedWeb | null | null | null | null | null | null | null | float32 | 4.30.0.dev0 | true | 65,025 | null | 0 | null | null | null | null |
TheBloke/Nous-Hermes-13B-GPTQ | null | null | "2023-06-03T13:12:08Z" | null | null | 297 | null | null | null | null | 175 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"self-instruct",
"distillation",
"en",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | bfloat16 | 4.29.2 | true | 32,001 | null | null | null | null | null | null |
Waterhorse/chessgpt-chat-v1 | null | null | "2023-06-03T21:18:08Z" | null | null | 220 | null | null | null | null | 10 | transformers | [
"transformers",
"pytorch",
"gpt_neox",
"text-generation",
"en",
"dataset:Waterhorse/chess_data",
"dataset:anon8231489123/ShareGPT_Vicuna_unfiltered",
"dataset:OpenAssistant/oasst1",
"dataset:vicgalle/alpaca-gpt4",
"arxiv:2306.09200",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPTNeoXForCausalLM"
] | 0 | 0 | gelu | 2,560 | 0.02 | 10,240 | 2,048 | gpt_neox | 32 | 32 | null | null | null | null | false | float16 | 4.28.1 | true | 50,432 | null | null | null | null | null | null |
IDEA-CCNL/Ziya-LLaMA-13B-v1.1 | null | null | "2023-06-07T02:32:30Z" | null | null | 61 | null | null | null | null | 52 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"zh",
"arxiv:2210.08590",
"license:gpl-3.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float16 | 4.29.0.dev0 | true | 39,424 | null | null | null | null | null | null |
openlm-research/open_llama_7b | null | null | "2023-06-07T08:54:38Z" | null | null | 47,176 | null | null | null | null | 126 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"dataset:togethercomputer/RedPajama-Data-1T",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.30.0.dev0 | true | 32,000 | null | null | null | null | null | null |
HuggingFaceH4/starchat-beta | null | null | "2023-06-07T11:23:47Z" | null | null | 1,574 | null | null | null | null | 262 | transformers | [
"transformers",
"pytorch",
"tensorboard",
"safetensors",
"gpt_bigcode",
"text-generation",
"generated_from_trainer",
"license:bigcode-openrail-m",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPTBigCodeForCausalLM"
] | 0 | 0 | null | null | 0.02 | null | null | gpt_bigcode | null | null | null | null | null | null | null | bfloat16 | 4.28.1 | true | 49,156 | null | null | null | null | null | null |
nmitchko/medguanaco-65b-GPTQ | null | null | "2023-06-07T17:34:55Z" | null | null | 50 | null | null | null | null | 4 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"medical",
"en",
"arxiv:2106.09685",
"arxiv:2303.14070",
"base_model:TheBloke/guanaco-65B-GPTQ",
"base_model:finetune:TheBloke/guanaco-65B-GPTQ",
"license:cc",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 22,016 | 2,048 | llama | 64 | 80 | null | 0.00001 | null | null | false | float16 | 4.29.2 | true | 32,000 | null | null | null | null | null | null |
mosaicml/mpt-30b-chat | null | null | "2023-06-09T20:01:17Z" | null | null | 1,889 | null | null | null | null | 203 | transformers | [
"transformers",
"pytorch",
"mpt",
"text-generation",
"Composer",
"MosaicML",
"llm-foundry",
"conversational",
"custom_code",
"dataset:camel-ai/code",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"dataset:anon8231489123/ShareGPT_Vicuna_unfiltered",
"dataset:timdettmers/openassistant-guanaco",
"dataset:camel-ai/math",
"dataset:camel-ai/biology",
"dataset:camel-ai/chemistry",
"dataset:camel-ai/ai_society",
"dataset:jondurbin/airoboros-gpt4-1.2",
"dataset:LongConversations",
"dataset:camel-ai/physics",
"arxiv:2205.14135",
"arxiv:2108.12409",
"arxiv:2010.04245",
"license:cc-by-nc-sa-4.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"MPTForCausalLM"
] | null | null | null | null | null | null | null | mpt | null | null | null | null | null | null | null | bfloat16 | 4.30.2 | false | 50,432 | null | null | null | null | null | null |
Austism/chronos-hermes-13b | null | null | "2023-06-13T02:36:03Z" | null | null | 807 | null | null | null | null | 52 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"chatbot",
"storywriting",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float16 | 4.28.0 | true | 32,001 | null | null | null | null | null | null |
baichuan-inc/Baichuan-7B | null | null | "2023-06-13T07:47:16Z" | null | null | 16,648 | null | null | null | null | 833 | transformers | [
"transformers",
"pytorch",
"baichuan",
"text-generation",
"custom_code",
"zh",
"en",
"arxiv:1910.07467",
"arxiv:2009.03300",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BaiChuanForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | baichuan | 32 | 32 | null | 0.000001 | null | null | false | float32 | 4.29.1 | true | 64,000 | null | null | null | null | null | null |
openlm-research/open_llama_13b | null | null | "2023-06-15T10:51:45Z" | null | null | 1,686 | null | null | null | null | 456 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"dataset:togethercomputer/RedPajama-Data-1T",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float16 | 4.30.0.dev0 | true | 32,000 | null | null | null | null | null | null |
javirandor/passgpt-10characters | null | null | "2023-06-15T15:46:33Z" | null | null | 906 | null | null | null | null | 3 | transformers | [
"transformers",
"pytorch",
"safetensors",
"gpt2",
"text-generation",
"passwords",
"cybersecurity",
"arxiv:2306.01545",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 0 | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.29.2 | true | 99 | null | null | null | null | null | null |
lmsys/vicuna-7b-v1.3 | null | null | "2023-06-18T03:36:42Z" | null | null | 53,651 | null | null | null | null | 129 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2302.13971",
"arxiv:2306.05685",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.28.1 | true | 32,000 | null | null | null | null | null | null |
lmsys/vicuna-13b-v1.3 | null | null | "2023-06-18T03:38:59Z" | null | null | 8,661 | null | null | null | null | 194 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2302.13971",
"arxiv:2306.05685",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float16 | 4.28.1 | true | 32,000 | null | null | null | null | null | null |
mosaicml/mpt-7b-8k-instruct | null | null | "2023-06-18T22:32:42Z" | null | null | 1,006 | null | null | null | null | 26 | transformers | [
"transformers",
"pytorch",
"mpt",
"text-generation",
"Composer",
"MosaicML",
"llm-foundry",
"custom_code",
"dataset:competition_math",
"dataset:knkarthick/dialogsum",
"dataset:mosaicml/dolly_hhrlhf",
"dataset:duorc",
"dataset:emozilla/quality",
"dataset:scrolls/summ_screen_fd",
"dataset:spider",
"arxiv:2205.14135",
"arxiv:2108.12409",
"arxiv:2010.04245",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"MPTForCausalLM"
] | null | null | null | null | null | null | null | mpt | null | null | null | null | null | null | null | bfloat16 | 4.30.2 | false | 50,432 | null | null | null | null | null | null |
lmsys/vicuna-33b-v1.3 | null | null | "2023-06-21T06:29:44Z" | null | null | 4,767 | null | null | null | null | 288 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2302.13971",
"arxiv:2306.05685",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 6,656 | 0.02 | 17,920 | 2,048 | llama | 52 | 60 | null | 0.000001 | null | null | false | float16 | 4.28.1 | true | 32,000 | null | null | null | null | null | null |
medicalai/ClinicalGPT-base-zh | null | null | "2023-06-21T12:31:50Z" | null | null | 2,900 | null | null | null | null | 39 | transformers | [
"transformers",
"pytorch",
"bloom",
"text-generation",
"medical",
"license:afl-3.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BloomForCausalLM"
] | 1 | 2 | null | 4,096 | 0.02 | null | null | bloom | null | null | null | null | null | null | null | bfloat16 | 4.29.0 | true | 250,880 | null | 0 | null | null | 1 | null |
mosaicml/mpt-30b-instruct | null | null | "2023-06-21T17:12:11Z" | null | null | 2,688 | null | null | null | null | 101 | transformers | [
"transformers",
"pytorch",
"mpt",
"text-generation",
"Composer",
"MosaicML",
"llm-foundry",
"custom_code",
"dataset:competition_math",
"dataset:knkarthick/dialogsum",
"dataset:mosaicml/dolly_hhrlhf",
"dataset:duorc",
"dataset:emozilla/quality",
"dataset:scrolls/summ_screen_fd",
"dataset:spider",
"dataset:gsm8k",
"dataset:allenai/qasper",
"arxiv:2205.14135",
"arxiv:2108.12409",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"MPTForCausalLM"
] | null | null | null | null | null | null | null | mpt | null | null | null | null | null | null | null | bfloat16 | 4.28.1 | false | 50,432 | null | null | null | null | null | null |
pankajmathur/orca_mini_13b | null | null | "2023-06-22T18:42:14Z" | null | null | 623 | null | null | null | null | 99 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"dataset:psmathur/alpaca_orca",
"dataset:psmathur/dolly-v2_orca",
"dataset:psmathur/WizardLM_Orca",
"arxiv:2306.02707",
"license:cc-by-nc-sa-4.0",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float32 | 4.29.1 | true | 32,000 | null | null | null | null | null | null |
mosaicml/mpt-7b-8k-chat | null | null | "2023-06-22T22:00:29Z" | null | null | 1,178 | null | null | null | null | 40 | transformers | [
"transformers",
"pytorch",
"mpt",
"text-generation",
"Composer",
"MosaicML",
"llm-foundry",
"conversational",
"custom_code",
"dataset:camel-ai/code",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"dataset:anon8231489123/ShareGPT_Vicuna_unfiltered",
"dataset:timdettmers/openassistant-guanaco",
"dataset:camel-ai/math",
"dataset:camel-ai/biology",
"dataset:camel-ai/chemistry",
"dataset:camel-ai/ai_society",
"dataset:jondurbin/airoboros-gpt4-1.2",
"dataset:LongConversations",
"dataset:camel-ai/physics",
"arxiv:2205.14135",
"arxiv:2108.12409",
"arxiv:2010.04245",
"license:cc-by-nc-sa-4.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"MPTForCausalLM"
] | null | null | null | null | null | null | null | mpt | null | null | null | null | null | null | null | bfloat16 | 4.30.2 | false | 50,432 | null | null | null | null | null | null |
nnpy/opt-350m-instruct | null | null | "2023-06-25T15:01:09Z" | null | null | 121 | null | null | null | null | 4 | transformers | [
"transformers",
"pytorch",
"safetensors",
"opt",
"text-generation",
"dataset:openchat/openchat_sharegpt4_dataset",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"OPTForCausalLM"
] | 2 | 2 | null | 1,024 | null | null | 2,048 | opt | 16 | 24 | null | null | null | null | null | float32 | 4.30.1 | true | 50,272 | null | 0 | null | null | null | null |
openbmb/UltraLM-13b | null | null | "2023-06-26T06:43:47Z" | null | null | 1,580 | null | null | null | null | 71 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"dataset:stingning/ultrachat",
"arxiv:2305.14233",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float32 | 4.30.2 | true | 32,001 | null | null | null | null | null | null |
TheBloke/Wizard-Vicuna-13B-Uncensored-SuperHOT-8K-GPTQ | null | null | "2023-06-27T03:55:57Z" | null | null | 398 | null | null | null | null | 142 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"custom_code",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 8,192 | llama | 40 | 40 | null | 0.000001 | null | null | false | float16 | 4.30.0.dev0 | true | 32,000 | null | null | null | null | null | null |
Salesforce/xgen-7b-8k-base | null | null | "2023-06-28T00:57:54Z" | null | null | 2,224 | null | null | null | null | 319 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2309.03450",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 4,096 | 0.02 | 11,008 | 8,192 | llama | 32 | 32 | null | 0.000001 | null | null | false | float32 | 4.29.2 | true | 51,200 | null | null | null | null | null | null |
Salesforce/xgen-7b-8k-inst | null | null | "2023-06-28T06:13:56Z" | null | null | 3,591 | null | null | null | null | 95 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2309.03450",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 4,096 | 0.02 | 11,008 | 8,192 | llama | 32 | 32 | null | 0.000001 | null | null | false | float32 | 4.29.2 | true | 51,200 | null | null | null | null | null | null |
commaai/commavq-gpt2m | null | null | "2023-06-29T02:45:55Z" | null | null | 18 | null | null | null | null | 9 | transformers | [
"transformers",
"pytorch",
"onnx",
"gpt2",
"text-generation",
"unconditional-image-generation",
"dataset:commaai/commavq",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | unconditional-image-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | null | null | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | false | null | 4.29.2 | true | 1,025 | null | null | null | null | null | null |
Writer/palmyra-med-20b | null | null | "2023-06-29T12:56:09Z" | null | null | 4,060 | null | null | null | null | 33 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"medical",
"palmyra",
"en",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.008165 | null | null | gpt2 | null | null | null | null | null | null | null | float16 | 4.30.0.dev0 | false | 50,259 | null | null | null | null | null | null |
syzymon/long_llama_3b | null | null | "2023-06-30T13:23:07Z" | null | null | 115 | null | null | null | null | 120 | transformers | [
"transformers",
"pytorch",
"safetensors",
"longllama",
"text-generation",
"text-generation-inference",
"custom_code",
"dataset:togethercomputer/RedPajama-Data-1T",
"arxiv:2307.03170",
"arxiv:2305.16300",
"license:apache-2.0",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LongLlamaForCausalLM"
] | 1 | 2 | silu | 3,200 | 0.02 | 8,640 | 2,048 | longllama | 32 | 26 | null | 0.000001 | null | null | false | bfloat16 | 4.30.0 | true | 32,000 | null | null | null | null | null | null |
cognitivecomputations/dolphin-llama-13b | null | null | "2023-07-02T15:57:49Z" | null | null | 29 | null | null | null | null | 63 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:other",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | 40 | 0.000001 | null | null | false | float16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
NumbersStation/nsql-350M | null | null | "2023-07-04T02:29:52Z" | null | null | 343 | null | null | null | null | 33 | transformers | [
"transformers",
"pytorch",
"codegen",
"text-generation",
"license:bsd-3-clause",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"CodeGenForCausalLM"
] | 1 | 50,256 | null | null | 0.02 | null | null | codegen | null | null | null | null | null | null | false | float32 | 4.28.1 | true | 51,200 | null | null | null | null | null | null |
projecte-aina/aguila-7b | null | null | "2023-07-05T13:29:04Z" | null | null | 6,183 | null | null | null | null | 57 | transformers | [
"transformers",
"pytorch",
"safetensors",
"RefinedWebModel",
"text-generation",
"aguila",
"falcon",
"spanish",
"catalan",
"custom_code",
"en",
"es",
"ca",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"RWForCausalLM"
] | 50,256 | 50,256 | null | 4,544 | 0.02 | null | 2,048 | RefinedWebModel | null | null | null | null | null | null | null | float16 | 4.35.2 | true | 50,257 | null | 0 | null | null | null | null |
internlm/internlm-7b | null | null | "2023-07-06T01:37:10Z" | null | null | 1,800 | null | null | null | null | 94 | transformers | [
"transformers",
"pytorch",
"internlm",
"feature-extraction",
"text-generation",
"custom_code",
"region:us"
] | text-generation | null | null | 1 | [
"InternLMForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | internlm | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.29.2 | true | 103,168 | null | null | null | null | null | null |
openlm-research/open_llama_7b_v2 | null | null | "2023-07-06T08:23:04Z" | null | null | 6,389 | null | null | null | null | 115 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"dataset:tiiuae/falcon-refinedweb",
"dataset:bigcode/starcoderdata",
"dataset:togethercomputer/RedPajama-Data-1T",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.30.1 | true | 32,000 | null | null | null | null | null | null |
Salesforce/codegen25-7b-multi_P | null | null | "2023-07-06T20:09:50Z" | null | null | 730 | null | null | null | null | 133 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"code",
"dataset:bigcode/starcoderdata",
"arxiv:2305.02309",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | float32 | 4.29.2 | true | 51,200 | null | null | null | null | null | null |
TheBloke/Wizard-Vicuna-7B-Uncensored-SuperHOT-8K-GPTQ | null | null | "2023-07-06T22:21:20Z" | null | null | 97 | null | null | null | null | 16 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"custom_code",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 8,192 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.30.0.dev0 | true | 32,000 | null | null | null | null | null | null |
Maykeye/TinyLLama-v0 | null | null | "2023-07-08T04:50:15Z" | null | null | 141,273 | null | null | null | null | 26 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 64 | 0.02 | 256 | 2,048 | llama | 16 | 8 | null | 0.000001 | null | null | false | bfloat16 | 4.30.2 | true | 32,000 | null | null | null | null | null | null |
baichuan-inc/Baichuan-13B-Chat | null | null | "2023-07-08T05:58:27Z" | null | null | 4,467 | null | null | null | null | 629 | transformers | [
"transformers",
"pytorch",
"baichuan",
"text-generation",
"custom_code",
"zh",
"en",
"arxiv:2104.09864",
"arxiv:2108.12409",
"arxiv:2009.03300",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BaichuanForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,696 | null | baichuan | 40 | 40 | null | 0.000001 | null | null | false | bfloat16 | 4.29.2 | true | 64,000 | null | null | null | null | null | null |
yhyhy3/open_llama_7b_v2_med_instruct | null | null | "2023-07-09T17:19:43Z" | null | null | 1,372 | null | null | null | null | 8 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"medical",
"code",
"en",
"dataset:ehartford/dolphin",
"dataset:LinhDuong/chatdoctor-200k",
"dataset:sahil2801/code_instructions_120k",
"dataset:medalpaca/medical_meadow_mediqa",
"dataset:kaiokendev/SuperCOT-dataset",
"dataset:tiiuae/falcon-refinedweb",
"dataset:bigcode/starcoderdata",
"dataset:togethercomputer/RedPajama-Data-1T",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.31.0.dev0 | false | 32,000 | null | null | null | null | null | null |
shikras/shikra-7b-delta-v1-0708 | null | null | "2023-07-10T15:57:55Z" | null | null | 16 | null | null | null | null | 3 | transformers | [
"transformers",
"pytorch",
"shikra",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"ShikraLlamaForCausalLM"
] | 0 | 1 | silu | 4,096 | 0.02 | 11,008 | 2,048 | shikra | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.28.0.dev0 | false | 32,003 | null | null | null | null | null | null |
DAMO-NLP-MT/polylm-13b | null | null | "2023-07-13T13:48:44Z" | null | null | 787 | null | null | null | null | 52 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"custom_code",
"zh",
"en",
"es",
"fr",
"pt",
"ru",
"de",
"it",
"ar",
"ja",
"ko",
"th",
"vi",
"id",
"nl",
"pl",
"tr",
"he",
"arxiv:2307.06018",
"arxiv:2104.09864",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"PolyLMHeadModel"
] | null | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | null | 4.31.0 | true | 256,000 | null | null | null | null | null | null |
openlm-research/open_llama_3b_v2 | null | null | "2023-07-16T00:39:43Z" | null | null | 49,378 | null | null | null | null | 147 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"dataset:tiiuae/falcon-refinedweb",
"dataset:bigcode/starcoderdata",
"dataset:togethercomputer/RedPajama-Data-1T",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 3,200 | 0.02 | 8,640 | 2,048 | llama | 32 | 26 | null | 0.000001 | null | null | false | float16 | 4.31.0.dev0 | true | 32,000 | null | null | null | null | null | null |
WeOpenML/Alpaca-7B-v1 | null | null | "2023-07-16T12:19:28Z" | null | null | 826 | null | null | null | null | 2 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2306.05087",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | bfloat16 | 4.28.1 | true | 32,001 | null | null | null | null | null | null |
squarelike/Gugugo-koen-1.3B-V0.95 | null | null | "2023-07-17T15:44:40Z" | null | null | 43 | null | null | null | null | 3 | transformers | [
"transformers",
"pytorch",
"safetensors",
"gpt_neox",
"text-generation",
"translation",
"en",
"ko",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | translation | null | null | 1 | [
"GPTNeoXForCausalLM"
] | 0 | 2 | gelu | 2,048 | 0.02 | 8,192 | 2,048 | gpt_neox | 16 | 24 | null | null | null | null | false | float16 | 4.31.0.dev0 | true | 30,080 | null | 0 | null | null | null | null |
usamakenway/Wizard-Vicuna-7B-Uncensored-SuperHOT-8K-AutoGPTQ | null | null | "2023-07-18T09:56:47Z" | null | null | 111 | null | null | null | null | 1 | transformers | [
"transformers",
"llama",
"text-generation",
"custom_code",
"license:other",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 8,192 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.30.0.dev0 | true | 32,000 | null | null | null | null | null | null |
TheBloke/Llama-2-7B-GGML | null | null | "2023-07-18T17:06:01Z" | null | null | 276 | null | null | null | null | 218 | transformers | [
"transformers",
"llama",
"facebook",
"meta",
"pytorch",
"llama-2",
"text-generation",
"en",
"arxiv:2307.09288",
"base_model:meta-llama/Llama-2-7b-hf",
"base_model:finetune:meta-llama/Llama-2-7b-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/Llama-2-7B-Chat-GGML | null | null | "2023-07-18T17:38:15Z" | null | null | 3,139 | null | null | null | null | 851 | transformers | [
"transformers",
"llama",
"facebook",
"meta",
"pytorch",
"llama-2",
"text-generation",
"en",
"arxiv:2307.09288",
"base_model:meta-llama/Llama-2-7b-chat-hf",
"base_model:finetune:meta-llama/Llama-2-7b-chat-hf",
"license:other",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/Llama-2-13B-chat-GGML | null | null | "2023-07-18T18:03:26Z" | null | null | 507 | null | null | null | null | 693 | transformers | [
"transformers",
"llama",
"facebook",
"meta",
"pytorch",
"llama-2",
"text-generation",
"en",
"arxiv:2307.09288",
"base_model:meta-llama/Llama-2-13b-chat-hf",
"base_model:finetune:meta-llama/Llama-2-13b-chat-hf",
"license:other",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/Llama-2-13B-chat-GPTQ | null | null | "2023-07-18T18:28:36Z" | null | null | 15,238 | null | null | null | null | 362 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"facebook",
"meta",
"pytorch",
"llama-2",
"en",
"arxiv:2307.09288",
"base_model:meta-llama/Llama-2-13b-chat-hf",
"base_model:quantized:meta-llama/Llama-2-13b-chat-hf",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.30.2 | true | 32,000 | null | null | null | null | null | null |
daryl149/llama-2-7b-chat-hf | null | null | "2023-07-18T18:36:56Z" | null | null | 4,031 | null | null | null | null | 122 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.29.0.dev0 | true | 32,000 | null | null | null | null | null | null |
coreml-projects/Llama-2-7b-chat-coreml | null | null | "2023-07-18T19:20:20Z" | null | null | 398 | null | null | null | null | 134 | transformers | [
"transformers",
"coreml",
"llama",
"text-generation",
"meta",
"llama-2",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | 32,000 | null | null | null | null | null | null |
NousResearch/Llama-2-7b-chat-hf | null | null | "2023-07-18T19:45:53Z" | null | null | 623,172 | null | null | null | null | 175 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"facebook",
"meta",
"llama-2",
"en",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float16 | 4.31.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/Llama-2-70B-Chat-GPTQ | null | null | "2023-07-18T23:33:13Z" | null | null | 9,526 | null | null | null | null | 260 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"facebook",
"meta",
"pytorch",
"llama-2",
"en",
"arxiv:2307.09288",
"base_model:meta-llama/Llama-2-70b-chat-hf",
"base_model:quantized:meta-llama/Llama-2-70b-chat-hf",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
JackFram/llama-68m | null | null | "2023-07-19T02:20:03Z" | null | null | 489,527 | null | null | null | null | 23 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"dataset:wikipedia",
"arxiv:2305.09781",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 0 | 2 | silu | 768 | 0.02 | 3,072 | 2,048 | llama | 12 | 2 | null | 0.000001 | null | null | false | float32 | 4.30.0.dev0 | true | 32,000 | null | null | null | null | null | null |
TheBloke/Llama-2-70B-fp16 | null | null | "2023-07-19T02:21:20Z" | null | null | 1,183 | null | null | null | null | 47 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"facebook",
"meta",
"pytorch",
"llama-2",
"en",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
liuhaotian/llava-llama-2-7b-chat-lightning-lora-preview | null | null | "2023-07-19T07:59:12Z" | null | null | 211 | null | null | null | null | 11 | transformers | [
"transformers",
"llava",
"text-generation",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llava | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.30.2 | false | 32,000 | null | null | null | null | null | null |
Tap-M/Luna-AI-Llama2-Uncensored | null | null | "2023-07-19T09:16:29Z" | null | null | 1,188 | null | null | null | null | 139 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:cc-by-sa-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.00001 | null | null | false | float32 | 4.28.1 | false | 32,000 | null | null | null | null | null | null |
beomi/llama-2-ko-7b | null | null | "2023-07-20T03:25:25Z" | null | null | 11,447 | null | null | null | null | 175 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"facebook",
"meta",
"llama-2",
"kollama",
"llama-2-ko",
"en",
"ko",
"doi:10.57967/hf/1098",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | bfloat16 | 4.28.0.dev0 | true | 46,336 | null | null | null | null | 1 | null |
LinkSoul/Chinese-Llama-2-7b | null | null | "2023-07-20T08:23:15Z" | null | null | 1,969 | null | null | null | null | 308 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"zh",
"en",
"dataset:LinkSoul/instruction_merge_set",
"license:openrail",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float32 | 4.28.1 | false | 32,000 | null | null | null | null | null | null |
stabilityai/StableBeluga2 | null | null | "2023-07-20T15:09:28Z" | null | null | 1,492 | null | null | null | null | 882 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"dataset:conceptofmind/cot_submix_original",
"dataset:conceptofmind/flan2021_submix_original",
"dataset:conceptofmind/t0_submix_original",
"dataset:conceptofmind/niv2_submix_original",
"arxiv:2307.09288",
"arxiv:2306.02707",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float32 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
pe4enov/ruGPT-3.5-13B-8bit | null | null | "2023-07-20T17:06:07Z" | null | null | 8 | null | null | null | null | 7 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"ru",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float16 | 4.32.0.dev0 | true | 50,272 | null | null | null | null | null | null |
NousResearch/Nous-Hermes-Llama2-13b | null | null | "2023-07-20T23:25:25Z" | null | null | 50,901 | null | null | null | null | 305 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"llama-2",
"self-instruct",
"distillation",
"synthetic instruction",
"en",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | bfloat16 | 4.32.0.dev0 | true | 32,032 | null | null | null | null | 1 | null |
line-corporation/japanese-large-lm-1.7b | null | null | "2023-07-21T00:46:33Z" | null | null | 874 | null | null | null | null | 26 | transformers | [
"transformers",
"pytorch",
"safetensors",
"gpt2",
"text-generation",
"ja",
"dataset:wikipedia",
"dataset:mc4",
"dataset:cc100",
"dataset:oscar",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 1 | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | true | null | 4.28.1 | true | 51,200 | null | null | null | null | null | null |
HuggingFaceM4/idefics-9b-instruct | null | null | "2023-07-24T15:51:18Z" | null | null | 10,557 | null | null | null | null | 104 | transformers | [
"transformers",
"pytorch",
"safetensors",
"idefics",
"image-text-to-text",
"multimodal",
"text",
"image",
"image-to-text",
"text-generation",
"en",
"dataset:HuggingFaceM4/OBELICS",
"dataset:wikipedia",
"dataset:facebook/pmd",
"dataset:laion/laion2B-en",
"arxiv:2204.14198",
"arxiv:2306.16527",
"arxiv:2303.12733",
"arxiv:2302.05442",
"arxiv:1910.07467",
"arxiv:2204.02311",
"arxiv:2306.05425",
"arxiv:1808.10584",
"arxiv:2109.05014",
"arxiv:2307.06281",
"license:other",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"IdeficsForVisionText2Text"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | idefics | 32 | 32 | null | 0.000001 | null | null | false | bfloat16 | 4.28.0.dev0 | true | 32,000 | null | null | null | null | null | null |
WizardLMTeam/WizardLM-13B-V1.2 | null | null | "2023-07-25T13:51:28Z" | null | null | 1,375 | null | null | null | null | 225 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2304.12244",
"arxiv:2306.08568",
"arxiv:2308.09583",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.29.2 | false | 32,000 | null | null | null | null | 2 | null |
stabilityai/StableBeluga-13B | null | null | "2023-07-27T02:54:21Z" | null | null | 9,950 | null | null | null | null | 114 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"en",
"dataset:conceptofmind/cot_submix_original",
"dataset:conceptofmind/flan2021_submix_original",
"dataset:conceptofmind/t0_submix_original",
"dataset:conceptofmind/niv2_submix_original",
"arxiv:2307.09288",
"arxiv:2306.02707",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
CobraMamba/mamba-gpt-3b-v2 | null | null | "2023-07-27T06:06:44Z" | null | null | 821 | null | null | null | null | 16 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"gpt",
"llm",
"large language model",
"en",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 3,200 | 0.02 | 8,640 | 2,048 | llama | 32 | 26 | 32 | 0.000001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
jondurbin/airoboros-l2-13b-gpt4-2.0 | null | null | "2023-07-27T09:21:05Z" | null | null | 827 | null | null | null | null | 16 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"dataset:jondurbin/airoboros-gpt4-2.0",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | bfloat16 | 4.31.0 | false | 32,000 | null | null | null | null | 1 | null |
dicta-il/dictalm-7b-instruct | null | null | "2023-07-28T07:43:53Z" | null | null | 544 | null | null | null | null | 13 | transformers | [
"transformers",
"pytorch",
"megatron_gpt",
"text-generation",
"custom_code",
"he",
"arxiv:2309.14568",
"license:cc-by-4.0",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"MegatronGPTForCausalLM"
] | 2 | 2 | gelu | 4,096 | 0.01 | 10,880 | 2,048 | megatron_gpt | 32 | 32 | null | null | null | null | false | float16 | 4.31.0 | true | 56,064 | null | 0 | null | null | null | null |
lmsys/vicuna-13b-v1.5 | null | null | "2023-07-29T04:44:46Z" | null | null | 68,870 | null | null | null | null | 209 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2307.09288",
"arxiv:2306.05685",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
photonmz/llava-roco-8bit | null | null | "2023-07-29T13:32:48Z" | null | null | 25 | null | null | null | null | 14 | transformers | [
"transformers",
"pytorch",
"llava",
"text-generation",
"biology",
"medical",
"image-to-text",
"en",
"dataset:photonmz/roco-instruct-65k",
"license:afl-3.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | image-to-text | null | null | 1 | [
"LlavaLlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llava | 32 | 32 | 32 | 0.00001 | null | null | false | float16 | 4.31.0 | false | 32,004 | null | null | null | null | 1 | null |
upstage/SOLAR-0-70b-16bit | null | null | "2023-07-30T01:10:53Z" | null | null | 3,811 | null | null | null | null | 257 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"upstage",
"llama-2",
"instruct",
"instruction",
"en",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.31.0 | false | 32,000 | null | null | null | null | 1 | null |
rinna/bilingual-gpt-neox-4b | null | null | "2023-07-31T02:34:03Z" | null | null | 2,960 | null | null | null | null | 29 | transformers | [
"transformers",
"pytorch",
"safetensors",
"gpt_neox",
"text-generation",
"ja",
"en",
"dataset:mc4",
"dataset:cc100",
"dataset:wikipedia",
"dataset:EleutherAI/pile",
"dataset:togethercomputer/RedPajama-Data-1T",
"arxiv:2404.01657",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"GPTNeoXForCausalLM"
] | 2 | 3 | gelu | 2,816 | 0.02 | 11,264 | 2,048 | gpt_neox | 22 | 36 | null | null | null | null | false | float16 | null | true | 65,536 | null | 0.1 | null | null | null | null |
hfl/chinese-alpaca-2-7b | null | null | "2023-07-31T03:53:55Z" | null | null | 1,263 | null | null | null | null | 161 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"zh",
"en",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float16 | 4.31.0 | true | 55,296 | null | null | null | null | 1 | null |
NumbersStation/nsql-llama-2-7B | null | null | "2023-07-31T22:58:50Z" | null | null | 407 | null | null | null | null | 79 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float32 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
wesley7137/Llama-2-13B-Nous-Hermes-vicuna-uncensored-mastermod-spych | null | null | "2023-07-31T23:55:38Z" | null | null | 40 | null | null | null | null | 3 | transformers | [
"transformers",
"pytorch",
"tensorboard",
"opt",
"text-generation",
"autotrain",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"OPTForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | opt | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.32.0.dev0 | false | 32,032 | null | 0 | null | null | 1 | null |
TheBloke/airoboros-l2-13b-gpt4-m2.0-GPTQ | null | null | "2023-08-01T00:22:38Z" | null | null | 56 | null | null | null | null | 29 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"dataset:jondurbin/airoboros-gpt4-m2.0",
"base_model:jondurbin/airoboros-l2-13b-gpt4-m2.0",
"base_model:quantized:jondurbin/airoboros-l2-13b-gpt4-m2.0",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | bfloat16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
umaru97/gpt2-product-review-generation | null | null | "2023-08-02T12:15:07Z" | null | null | 25 | null | null | null | null | 1 | transformers | [
"transformers",
"pytorch",
"tensorboard",
"gpt2",
"text-generation",
"generated_from_trainer",
"base_model:openai-community/gpt2-medium",
"base_model:finetune:openai-community/gpt2-medium",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.31.0 | true | 50,257 | null | null | null | null | null | null |
Qwen/Qwen-7B | null | null | "2023-08-03T02:51:18Z" | null | null | 18,692 | null | null | null | null | 368 | transformers | [
"transformers",
"safetensors",
"qwen",
"text-generation",
"custom_code",
"zh",
"en",
"arxiv:2309.16609",
"license:other",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"QWenLMHeadModel"
] | null | null | null | 4,096 | 0.02 | 22,016 | 32,768 | qwen | 32 | 32 | null | null | null | null | false | null | 4.32.0 | true | 151,936 | null | null | null | null | null | null |
Qwen/Qwen-7B-Chat | null | null | "2023-08-03T03:01:31Z" | null | null | 59,048 | null | null | null | null | 751 | transformers | [
"transformers",
"safetensors",
"qwen",
"text-generation",
"custom_code",
"zh",
"en",
"arxiv:2309.16609",
"arxiv:2305.08322",
"arxiv:2009.03300",
"arxiv:2305.05280",
"arxiv:2210.03629",
"license:other",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"QWenLMHeadModel"
] | null | null | null | 4,096 | 0.02 | 22,016 | 32,768 | qwen | 32 | 32 | null | null | null | null | false | null | 4.32.0 | true | 151,936 | null | null | null | null | null | null |
jarradh/llama2_70b_chat_uncensored | null | null | "2023-08-03T10:31:41Z" | null | null | 925 | null | null | null | null | 69 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"uncensored",
"wizard",
"vicuna",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"arxiv:2305.14314",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 2,048 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float32 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
xinlai/LISA-13B-llama2-v0-explanatory | null | null | "2023-08-03T17:48:18Z" | null | null | 50 | null | null | null | null | 5 | transformers | [
"transformers",
"pytorch",
"llava",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlavaLlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llava | 40 | 40 | null | 0.00001 | null | null | false | bfloat16 | 4.29.0 | false | 32,004 | null | null | null | null | null | null |
yulan-team/YuLan-Chat-2-13b-fp16 | null | null | "2023-08-04T04:12:11Z" | null | null | 1,402 | null | null | null | null | 14 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 8,192 | llama | 40 | 40 | null | 0.00001 | null | null | false | bfloat16 | 4.28.1 | true | 51,200 | null | null | null | null | null | null |
garage-bAInd/Platypus2-70B-instruct | null | null | "2023-08-04T22:20:35Z" | null | null | 2,721 | null | null | null | null | 174 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"en",
"dataset:garage-bAInd/Open-Platypus",
"dataset:Open-Orca/OpenOrca",
"arxiv:2308.07317",
"arxiv:2307.09288",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.31.0 | false | 32,000 | null | null | null | null | 1 | null |
cognitivecomputations/WizardLM-1.0-Uncensored-Llama2-13b | null | null | "2023-08-06T05:24:46Z" | null | null | 330 | null | null | null | null | 49 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"dataset:ehartford/WizardLM_evol_instruct_V2_196k_unfiltered_merged_split",
"license:llama2",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.31.0 | false | 32,000 | null | null | null | null | 1 | null |
ckandemir/solidity-generator | null | null | "2023-08-07T00:32:18Z" | null | null | 26 | null | null | null | null | 9 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"generated_from_trainer",
"en",
"dataset:mwritescode/slither-audited-smart-contracts",
"base_model:codeparrot/codeparrot-small",
"base_model:finetune:codeparrot/codeparrot-small",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.31.0 | true | 32,768 | null | null | null | null | null | null |
Orkhan/llama-2-7b-absa | null | null | "2023-08-07T20:02:57Z" | null | null | 702 | null | null | null | null | 10 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"code",
"en",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |