id
stringlengths 7
117
| author
stringclasses 6
values | sha
null | created_at
unknown | last_modified
null | disabled
null | downloads
int64 0
18.6M
| downloads_all_time
null | gated
bool 1
class | gguf
null | inference
null | likes
int64 0
4.77k
| library_name
stringclasses 36
values | tags
sequencelengths 1
430
| pipeline_tag
stringclasses 32
values | mask_token
null | model_index
null | trending_score
int64 0
132
| architectures
sequencelengths 1
5
⌀ | bos_token_id
int64 -1
256k
⌀ | eos_token_id
int64 -1
256k
⌀ | hidden_act
stringclasses 15
values | hidden_size
int64 1
20.5k
⌀ | initializer_range
float64 0
1
⌀ | intermediate_size
int64 1
98.3k
⌀ | max_position_embeddings
int64 8
1.05M
⌀ | model_type
stringclasses 530
values | num_attention_heads
int64 1
5k
⌀ | num_hidden_layers
int64 -1
8.93k
⌀ | num_key_value_heads
int64 1
160
⌀ | rms_norm_eps
float64 0
7
⌀ | rope_theta
float64 1k
1,000B
⌀ | sliding_window
int64 0
262k
⌀ | tie_word_embeddings
bool 2
classes | torch_dtype
stringclasses 8
values | transformers_version
stringclasses 207
values | use_cache
bool 2
classes | vocab_size
int64 -1
5.03M
⌀ | attention_bias
bool 2
classes | attention_dropout
float64 0
0.5
⌀ | head_dim
int64 2
256
⌀ | mlp_bias
bool 2
classes | pretraining_tp
int64 0
8
⌀ | rope_scaling
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
gaussfer/ushur_faq_llm_merged_16bit | null | null | "2024-11-07T13:17:36Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:unsloth/mistral-7b-bnb-4bit",
"base_model:finetune:unsloth/mistral-7b-bnb-4bit",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | bfloat16 | 4.46.2 | true | 32,000 | null | 0 | 128 | null | null | null |
AIFunOver/SmolLM2-360M-Instruct-openvino-fp16 | null | null | "2024-11-07T13:18:23Z" | null | null | 13 | null | null | null | null | 0 | transformers | [
"transformers",
"openvino",
"llama",
"text-generation",
"nncf",
"fp16",
"conversational",
"en",
"base_model:HuggingFaceTB/SmolLM2-360M-Instruct",
"base_model:finetune:HuggingFaceTB/SmolLM2-360M-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 960 | 0.02 | 2,560 | 8,192 | llama | 15 | 32 | 5 | 0.00001 | 100,000 | null | true | bfloat16 | 4.45.2 | true | 49,152 | false | 0 | 64 | false | 1 | null |
DoHyun83/llama-ins | null | null | "2024-11-07T13:19:59Z" | null | null | 45 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gguf",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"en",
"base_model:unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit",
"base_model:quantized:unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us",
"conversational"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 46,336 | false | 0 | null | false | 1 | null |
AIFunOver/SmolLM2-360M-Instruct-openvino-4bit | null | null | "2024-11-07T13:23:14Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"openvino",
"llama",
"text-generation",
"nncf",
"4-bit",
"conversational",
"en",
"base_model:HuggingFaceTB/SmolLM2-360M-Instruct",
"base_model:quantized:HuggingFaceTB/SmolLM2-360M-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 960 | 0.02 | 2,560 | 8,192 | llama | 15 | 32 | 5 | 0.00001 | 100,000 | null | true | bfloat16 | 4.45.2 | true | 49,152 | false | 0 | 64 | false | 1 | null |
vitus48683/Qwen2.5-7B-ko-quant-merge-v3 | null | null | "2024-11-07T13:38:07Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"krx",
"conversational",
"ko",
"base_model:Qwen/Qwen2.5-7B",
"base_model:merge:Qwen/Qwen2.5-7B",
"base_model:Qwen/Qwen2.5-7B-Instruct",
"base_model:merge:Qwen/Qwen2.5-7B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 151,665 | null | 0 | null | null | null | null |
AIFunOver/Qwen2.5-7B-Instruct-openvino-8bit | null | null | "2024-11-07T13:41:34Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"openvino",
"qwen2",
"text-generation",
"chat",
"nncf",
"8-bit",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-7B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-7B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | true | 152,064 | null | 0 | null | null | null | null |
lamos38667/krx_Qwen2-7B-Instruct-d20241107 | null | null | "2024-11-07T13:52:53Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"krx",
"sft",
"conversational",
"en",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
Sayankotor/RegularLlama | null | null | "2024-11-07T13:53:41Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
kotlarska2/gpt2_osobnosti | null | null | "2024-11-07T13:55:03Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
vitus48683/Qwen2.5-7B-ko-quant-merge-v4 | null | null | "2024-11-07T14:02:38Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"krx",
"conversational",
"ko",
"base_model:Qwen/Qwen2.5-7B",
"base_model:merge:Qwen/Qwen2.5-7B",
"base_model:Qwen/Qwen2.5-7B-Instruct",
"base_model:merge:Qwen/Qwen2.5-7B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 151,665 | null | 0 | null | null | null | null |
AIFunOver/Qwen2.5-7B-Instruct-openvino-fp16 | null | null | "2024-11-07T14:06:40Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"openvino",
"qwen2",
"text-generation",
"chat",
"nncf",
"fp16",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-7B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-7B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | true | 152,064 | null | 0 | null | null | null | null |
sinking8/justice_biased_gemma | null | null | "2024-11-07T14:15:30Z" | null | null | 163 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"llama-factory",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu | 2,048 | 0.02 | 16,384 | 8,192 | gemma | 8 | 18 | 1 | 0.000001 | 10,000 | null | null | bfloat16 | 4.44.2 | true | 256,000 | false | 0 | 256 | null | null | null |
kotlarska2/gpt2_osobnosti_2 | null | null | "2024-11-07T14:17:18Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
disi-unibo-nlp/mistral-SFT-medqa-medmcqa-triples-cot-2bs-2acc-3ep | null | null | "2024-11-07T14:30:02Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:unsloth/mistral-7b-instruct-v0.3-bnb-4bit",
"base_model:finetune:unsloth/mistral-7b-instruct-v0.3-bnb-4bit",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.1 | true | 32,768 | null | 0 | 128 | null | null | null |
toastloaf/gpt2-magpie-ultra | null | null | "2024-11-07T14:32:12Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"tensorboard",
"safetensors",
"gpt2",
"text-generation",
"autotrain",
"text-generation-inference",
"conversational",
"dataset:argilla/magpie-ultra-v0.1",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct | null | null | "2024-11-07T14:43:15Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | 10,000 | null | false | bfloat16 | 4.45.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
teka38/Qwen0.5-finetuned | null | null | "2024-11-07T15:20:15Z" | null | null | 46 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,024 | 0.02 | 2,816 | 32,768 | qwen2 | 16 | 24 | 16 | 0.000001 | 1,000,000 | null | true | float32 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
AIFunOver/Qwen2.5-7B-Instruct-openvino-4bit | null | null | "2024-11-07T15:31:34Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"openvino",
"qwen2",
"text-generation",
"chat",
"nncf",
"4-bit",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-7B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-7B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | true | 152,064 | null | 0 | null | null | null | null |
dareljones619/gemma-2b-sql-finetuned | null | null | "2024-11-07T15:53:28Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu | 2,048 | 0.02 | 16,384 | 8,192 | gemma | 8 | 18 | 1 | 0.000001 | 10,000 | null | null | float16 | 4.41.0 | true | 256,000 | false | 0 | 256 | null | null | null |
MaziyarPanahi/magnum-v4-12b-GGUF | null | null | "2024-11-07T16:12:29Z" | null | null | 111 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:anthracite-org/magnum-v4-12b",
"base_model:quantized:anthracite-org/magnum-v4-12b",
"region:us",
"imatrix",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
mergekit-community/mergekit-ties-mtbkpmt | null | null | "2024-11-07T16:12:54Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2306.01708",
"base_model:Qwen/Qwen2.5-7B",
"base_model:merge:Qwen/Qwen2.5-7B",
"base_model:Qwen/Qwen2.5-7B-Instruct",
"base_model:merge:Qwen/Qwen2.5-7B-Instruct",
"base_model:Qwen/Qwen2.5-Coder-7B-Instruct",
"base_model:merge:Qwen/Qwen2.5-Coder-7B-Instruct",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.1 | true | 151,665 | null | 0 | null | null | null | null |
zelk12/MT-Merge1-IF-gemma-2-MT1g1MT4g1-9B | null | null | "2024-11-07T16:16:48Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT1-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT1-Gen1-gemma-2-9B",
"base_model:zelk12/MT4-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT4-Gen1-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
mashleburneded/Mobiusv0.5 | null | null | "2024-11-07T16:24:57Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:mistralai/Codestral-22B-v0.1",
"base_model:merge:mistralai/Codestral-22B-v0.1",
"base_model:mistralai/Mistral-Small-Instruct-2409",
"base_model:merge:mistralai/Mistral-Small-Instruct-2409",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 6,144 | 0.02 | 16,384 | 32,768 | mistral | 48 | 40 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 32,768 | null | 0 | 128 | null | null | null |
zelk12/MT-Merge1-BB-gemma-2-MT1g1MT5g1-9B | null | null | "2024-11-07T16:32:47Z" | null | null | 16 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT1-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT1-Gen1-gemma-2-9B",
"base_model:zelk12/MT5-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT5-Gen1-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
WaveCut/IlyaGusev-saiga_nemo_12b_sft_m9_d16_slerp-EXL2-4bpw | null | null | "2024-11-07T16:34:27Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 14,336 | 1,024,000 | mistral | 32 | 40 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.45.1 | true | 131,072 | null | 0 | 128 | null | null | null |
zelk12/MT-Merge1-MA-gemma-2-MT4g1MT1g1-9B | null | null | "2024-11-07T16:47:35Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT1-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT1-Gen1-gemma-2-9B",
"base_model:zelk12/MT4-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT4-Gen1-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
RyanYr/self-reflect_ministral8Bit_mMQA_dpo_iter1 | null | null | "2024-11-07T16:48:47Z" | null | null | 35 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:mistralai/Ministral-8B-Instruct-2410",
"base_model:finetune:mistralai/Ministral-8B-Instruct-2410",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | bfloat16 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
Gnider/new_rugpt_test_rossiya | null | null | "2024-11-07T16:59:18Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 1 | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.45.1 | true | 50,257 | null | null | null | null | null | null |
zelk12/MT-Merge1-GP-gemma-2-MT5g1MT3g1-9B | null | null | "2024-11-07T17:00:39Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT3-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT3-Gen1-gemma-2-9B",
"base_model:zelk12/MT5-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT5-Gen1-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
DeusImperator/EVA-Qwen2.5-32B-v0.2_exl2_4.6bpw | null | null | "2024-11-07T17:06:52Z" | null | null | 23 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"conversational",
"dataset:anthracite-org/kalo-opus-instruct-22k-no-refusal",
"dataset:Nopm/Opus_WritingStruct",
"dataset:Gryphe/Sonnet3.5-SlimOrcaDedupCleaned",
"dataset:Gryphe/Sonnet3.5-Charcard-Roleplay",
"dataset:Gryphe/ChatGPT-4o-Writing-Prompts",
"dataset:Epiculous/Synthstruct-Gens-v1.1-Filtered-n-Cleaned",
"dataset:Epiculous/SynthRP-Gens-v1.1-Filtered-n-Cleaned",
"dataset:nothingiisreal/Reddit-Dirty-And-WritingPrompts",
"dataset:allura-org/Celeste-1.x-data-mixture",
"dataset:cognitivecomputations/dolphin-2.9.3",
"base_model:EVA-UNIT-01/EVA-Qwen2.5-32B-v0.2",
"base_model:quantized:EVA-UNIT-01/EVA-Qwen2.5-32B-v0.2",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | null | 151,643 | silu | 5,120 | 0.02 | 27,648 | 131,072 | qwen2 | 40 | 64 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.45.1 | false | 152,064 | null | 0 | null | null | null | null |
NotoriousH2/Qwen2.5_1.5B_Med_BlockExpansion | null | null | "2024-11-07T17:08:21Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 1,536 | 0.02 | 8,960 | 131,072 | qwen2 | 12 | 30 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
Sharat19/Chroma-Powered-RAG-System | null | null | "2024-11-07T17:09:03Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt_neo",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPTNeoForCausalLM"
] | 50,256 | 50,256 | null | 2,048 | 0.02 | null | 2,048 | gpt_neo | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | 0 | null | null | null | null |
zelk12/MT-Merge1-MU-gemma-2-MT4g1MT1g1-9B | null | null | "2024-11-07T17:11:31Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT1-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT1-Gen1-gemma-2-9B",
"base_model:zelk12/MT4-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT4-Gen1-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
MaziyarPanahi/MN-GRAND-Gutenberg-Lyra4-Lyra-12B-MADNESS-GGUF | null | null | "2024-11-07T17:26:01Z" | null | null | 105 | null | null | null | null | 0 | null | [
"gguf",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:DavidAU/MN-GRAND-Gutenberg-Lyra4-Lyra-12B-MADNESS",
"base_model:quantized:DavidAU/MN-GRAND-Gutenberg-Lyra4-Lyra-12B-MADNESS",
"region:us",
"imatrix",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
zelk12/MT-Merge1-MM-gemma-2-MT4g1MT2g1-9B | null | null | "2024-11-07T17:26:52Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT2-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT2-Gen1-gemma-2-9B",
"base_model:zelk12/MT4-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT4-Gen1-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
Grayx/rnd_26 | null | null | "2024-11-07T17:32:17Z" | null | null | 769 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 1,408 | 0.02 | 4,340 | 2,048 | llama | 22 | 24 | 22 | 0.00001 | 10,000 | null | false | float32 | 4.44.1 | false | 50,257 | false | 0 | 64 | false | 1 | null |
Nisk36/finetuned-lmsys_vicuna-7b-v1.5 | null | null | "2024-11-07T17:37:59Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.45.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
Grayx/rnd_27 | null | null | "2024-11-07T17:39:16Z" | null | null | 771 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 1,408 | 0.02 | 4,340 | 2,048 | llama | 22 | 24 | 22 | 0.00001 | 10,000 | null | false | float32 | 4.44.1 | false | 50,257 | false | 0 | 64 | false | 1 | null |
ryan98153/removeLLM-fine-tuned | null | null | "2024-11-07T17:48:25Z" | null | null | 26 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 960 | 0.02 | 2,560 | 2,048 | llama | 15 | 32 | 5 | 0.00001 | 10,000 | null | true | float32 | 4.44.2 | true | 49,152 | false | 0 | null | false | 1 | null |
zelk12/MT-Merge1-MAMU-gemma-2-9B | null | null | "2024-11-07T17:53:42Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT-Merge1-MA-gemma-2-MT4g1MT1g1-9B",
"base_model:merge:zelk12/MT-Merge1-MA-gemma-2-MT4g1MT1g1-9B",
"base_model:zelk12/MT-Merge1-MU-gemma-2-MT4g1MT1g1-9B",
"base_model:merge:zelk12/MT-Merge1-MU-gemma-2-MT4g1MT1g1-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
Gnider/rugpt3med_rossiya_6k | null | null | "2024-11-07T18:05:42Z" | null | null | 56 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 1 | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.45.1 | true | 50,257 | null | null | null | null | null | null |
zelk12/MT-Merge1-MMG-gemma-2-9B | null | null | "2024-11-07T18:07:39Z" | null | null | 16 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT-Merge1-GP-gemma-2-MT5g1MT3g1-9B",
"base_model:merge:zelk12/MT-Merge1-GP-gemma-2-MT5g1MT3g1-9B",
"base_model:zelk12/MT-Merge1-MM-gemma-2-MT4g1MT2g1-9B",
"base_model:merge:zelk12/MT-Merge1-MM-gemma-2-MT4g1MT2g1-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
EmTpro01/CodeLlama-7b-java-16bit | null | null | "2024-11-07T18:14:58Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"sft",
"en",
"dataset:amztheory/alpaca-code-java",
"base_model:meta-llama/CodeLlama-7b-hf",
"base_model:finetune:meta-llama/CodeLlama-7b-hf",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 16,384 | llama | 32 | 32 | 32 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 32,016 | false | 0 | 128 | false | 1 | null |
zelk12/MT-Merge1-BI-gemma-2-9B | null | null | "2024-11-07T18:15:45Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT-Merge1-BB-gemma-2-MT1g1MT5g1-9B",
"base_model:merge:zelk12/MT-Merge1-BB-gemma-2-MT1g1MT5g1-9B",
"base_model:zelk12/MT-Merge1-IF-gemma-2-MT1g1MT4g1-9B",
"base_model:merge:zelk12/MT-Merge1-IF-gemma-2-MT1g1MT4g1-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
zelk12/MT5-Gen1-MMGBI-gemma-2-9B | null | null | "2024-11-07T18:24:29Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT-Merge1-BI-gemma-2-9B",
"base_model:merge:zelk12/MT-Merge1-BI-gemma-2-9B",
"base_model:zelk12/MT-Merge1-MMG-gemma-2-9B",
"base_model:merge:zelk12/MT-Merge1-MMG-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
MaziyarPanahi/Mistral-Nemo-Kurdish-Instruct-GGUF | null | null | "2024-11-07T18:46:50Z" | null | null | 101 | null | null | null | null | 0 | null | [
"gguf",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:nazimali/Mistral-Nemo-Kurdish-Instruct",
"base_model:quantized:nazimali/Mistral-Nemo-Kurdish-Instruct",
"region:us",
"imatrix",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
RyanYr/self-reflect_ministral8Bit_mMQA_dpo_iter1_as16 | null | null | "2024-11-07T18:47:53Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:mistralai/Ministral-8B-Instruct-2410",
"base_model:finetune:mistralai/Ministral-8B-Instruct-2410",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | bfloat16 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
Gnider/rugpt3med_rossia15k | null | null | "2024-11-07T18:56:44Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 1 | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.45.1 | true | 50,264 | null | null | null | null | null | null |
xxhe/amazon-review-2023-dpo-mistral-7b-instruct-iter-1 | null | null | "2024-11-07T19:31:48Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.1 | true | 32,768 | null | 0 | 128 | null | null | null |
Mario12355/merged_lora | null | null | "2024-11-07T19:55:18Z" | null | null | 33 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"sft",
"en",
"base_model:unsloth/llama-3-8b-Instruct-bnb-4bit",
"base_model:quantized:unsloth/llama-3-8b-Instruct-bnb-4bit",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
broalantap/GPT2-large-4-48000steps | null | null | "2024-11-07T20:08:53Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | bfloat16 | 4.41.2 | true | 50,321 | null | null | null | null | null | null |
cilantro9246/xhytn6p | null | null | "2024-11-07T20:09:12Z" | null | null | 92 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 28 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | false | 100,288 | false | 0 | 128 | false | 1 | null |
C10X/qwen234 | null | null | "2024-11-07T20:12:27Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"base_model:C10X/model92",
"base_model:merge:C10X/model92",
"base_model:Qwen/Qwen2.5-3B-Instruct",
"base_model:merge:Qwen/Qwen2.5-3B-Instruct",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.45.1 | true | 151,936 | null | 0 | null | null | null | null |
Pearush/phi_moe_15 | null | null | "2024-11-07T20:15:36Z" | null | null | 17 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phimoe",
"text-generation",
"conversational",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"PhiMoEForCausalLM"
] | 1 | 32,000 | silu | 4,096 | 0.02 | 960 | 4,096 | phimoe | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | bfloat16 | 4.41.2 | true | 32,064 | true | 0 | null | null | null | null |
MaziyarPanahi/calme-3.2-instruct-3b | null | null | "2024-11-07T20:28:42Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"chat",
"qwen",
"qwen2.5",
"finetune",
"french",
"english",
"conversational",
"fr",
"en",
"dataset:MaziyarPanahi/french_instruct_sharegpt",
"dataset:arcee-ai/EvolKit-20k",
"base_model:Qwen/Qwen2.5-3B",
"base_model:finetune:Qwen/Qwen2.5-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.45.2 | true | 151,936 | null | 0 | null | null | null | null |
cilantro9246/kch22h8 | null | null | "2024-11-07T20:37:32Z" | null | null | 112 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 44 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.0 | false | 96,000 | false | 0 | null | false | 1 | null |
MaziyarPanahi/calme-3.3-instruct-3b | null | null | "2024-11-07T20:38:41Z" | null | null | 16 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"chat",
"qwen",
"qwen2.5",
"finetune",
"french",
"english",
"conversational",
"fr",
"en",
"dataset:MaziyarPanahi/french_instruct_sharegpt",
"dataset:arcee-ai/EvolKit-20k",
"base_model:Qwen/Qwen2.5-3B",
"base_model:finetune:Qwen/Qwen2.5-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.45.2 | true | 151,936 | null | 0 | null | null | null | null |
AIFunOver/Llama-2-7b-hf-openvino-fp16 | null | null | "2024-11-07T20:44:53Z" | null | null | 2 | null | null | null | null | 0 | null | [
"pytorch",
"safetensors",
"openvino",
"llama",
"facebook",
"meta",
"llama-2",
"nncf",
"fp16",
"text-generation",
"en",
"base_model:meta-llama/Llama-2-7b-hf",
"base_model:finetune:meta-llama/Llama-2-7b-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.45.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
ToastyPigeon/MS-Meadowlark-Alt-22B | null | null | "2024-11-07T20:48:14Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Alfitaria/mistral-small-fujin-qlora",
"base_model:merge:Alfitaria/mistral-small-fujin-qlora",
"base_model:ToastyPigeon/mistral-small-springdragon-qlora",
"base_model:merge:ToastyPigeon/mistral-small-springdragon-qlora",
"base_model:unsloth/Mistral-Small-Instruct-2409",
"base_model:merge:unsloth/Mistral-Small-Instruct-2409",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 6,144 | 0.02 | 16,384 | 131,072 | mistral | 48 | 56 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.43.3 | true | 32,768 | null | 0 | 128 | null | null | null |
RyanYr/self-reflect_ministral8Bit_mMQA_dpo_iter1_as128 | null | null | "2024-11-07T20:55:21Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:mistralai/Ministral-8B-Instruct-2410",
"base_model:finetune:mistralai/Ministral-8B-Instruct-2410",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | bfloat16 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
neody/nemma-100m | null | null | "2024-11-07T21:08:13Z" | null | null | 235 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"en",
"dataset:HuggingFaceFW/fineweb-edu",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | null | 512 | 0.02 | 1,024 | 3,096 | gemma2 | 8 | 32 | 4 | 0.000001 | 10,000 | 4,096 | null | float32 | 4.46.2 | true | 50,257 | false | 0 | 64 | null | null | null |
MaziyarPanahi/calme-3.2-instruct-3b-GGUF | null | null | "2024-11-07T21:18:56Z" | null | null | 327 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.2-instruct-3b",
"base_model:quantized:MaziyarPanahi/calme-3.2-instruct-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
bbunzeck/gpt-wee-small | null | null | "2024-11-07T21:42:42Z" | null | null | 75 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"en",
"dataset:nilq/babylm-10M",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 0 | 0 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.26.1 | true | 4,000 | null | null | null | null | null | null |
bbunzeck/gpt-wee-small-curriculum | null | null | "2024-11-07T21:44:05Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"en",
"dataset:nilq/babylm-10M",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 0 | 0 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.26.1 | true | 4,000 | null | null | null | null | null | null |
bbunzeck/gpt-wee-large | null | null | "2024-11-07T21:45:06Z" | null | null | 106 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"en",
"dataset:nilq/babylm-10M",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 0 | 0 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.26.1 | true | 16,000 | null | null | null | null | null | null |
MaziyarPanahi/calme-3.3-instruct-3b-GGUF | null | null | "2024-11-07T21:46:42Z" | null | null | 275 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.3-instruct-3b",
"base_model:quantized:MaziyarPanahi/calme-3.3-instruct-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
bbunzeck/gpt-wee-large-curriculum | null | null | "2024-11-07T21:47:16Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"en",
"dataset:nilq/babylm-10M",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 0 | 0 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.26.1 | true | 16,000 | null | null | null | null | null | null |
harishnair04/Gemma-medtr-2b-sft | null | null | "2024-11-07T21:50:25Z" | null | null | 22 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"trl",
"sft",
"quantization",
"4bit",
"LoRA",
"en",
"dataset:harishnair04/mtsamples",
"base_model:google/gemma-2-2b",
"base_model:quantized:google/gemma-2-2b",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 2,304 | 0.02 | 9,216 | 8,192 | gemma2 | 8 | 26 | 4 | 0.000001 | 10,000 | 4,096 | null | float32 | 4.47.0.dev0 | true | 256,000 | false | 0 | 256 | null | null | null |
MaziyarPanahi/calme-3.2-baguette-3b | null | null | "2024-11-07T21:52:55Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"chat",
"qwen",
"qwen2.5",
"finetune",
"french",
"english",
"conversational",
"fr",
"en",
"dataset:MaziyarPanahi/french_instruct_sharegpt",
"dataset:MaziyarPanahi/calme-legalkit-v0.2",
"base_model:Qwen/Qwen2.5-3B",
"base_model:finetune:Qwen/Qwen2.5-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.45.2 | true | 151,665 | null | 0 | null | null | null | null |
Zekunli/qwen2.5-1.5b-lora-w-cot-w-cor | null | null | "2024-11-07T21:59:23Z" | null | null | 89 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.45.0.dev0 | true | 151,936 | null | 0 | null | null | null | null |
MaziyarPanahi/calme-3.3-baguette-3b | null | null | "2024-11-07T22:01:52Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"chat",
"qwen",
"qwen2.5",
"finetune",
"french",
"english",
"conversational",
"fr",
"en",
"dataset:MaziyarPanahi/french_instruct_sharegpt",
"dataset:MaziyarPanahi/calme-legalkit-v0.2",
"base_model:Qwen/Qwen2.5-3B",
"base_model:finetune:Qwen/Qwen2.5-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.45.2 | true | 151,936 | null | 0 | null | null | null | null |
xrusnack/gpt2_cs_book_descriptions | null | null | "2024-11-07T22:03:02Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
MaziyarPanahi/calme-3.1-baguette-3b-GGUF | null | null | "2024-11-07T22:17:31Z" | null | null | 103,028 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.1-baguette-3b",
"base_model:quantized:MaziyarPanahi/calme-3.1-baguette-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
MaziyarPanahi/calme-3.2-qwenloi-3b | null | null | "2024-11-07T22:20:09Z" | null | null | 22 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"chat",
"qwen",
"qwen2.5",
"finetune",
"french",
"legal",
"loi",
"conversational",
"fr",
"en",
"dataset:MaziyarPanahi/calme-legalkit-v0.2",
"base_model:Qwen/Qwen2.5-3B",
"base_model:finetune:Qwen/Qwen2.5-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.45.2 | true | 151,936 | null | 0 | null | null | null | null |
yjwon/mpg9_gemma9b_sft_dpo_beta5e-2_epoch4 | null | null | "2024-11-07T22:26:54Z" | null | null | 24 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mpg9_gemma9b_sft_dpo_beta5e-2_epoch3 | null | null | "2024-11-07T22:26:55Z" | null | null | 24 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mpg9_gemma9b_sft_dpo_beta5e-2_epoch5 | null | null | "2024-11-07T22:26:59Z" | null | null | 24 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mpg9_gemma9b_sft_dpo_beta5e-2_epoch1 | null | null | "2024-11-07T22:26:59Z" | null | null | 16 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mpg9_gemma9b_sft_dpo_beta5e-2_epoch2 | null | null | "2024-11-07T22:26:59Z" | null | null | 24 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
AIFunOver/Llama-2-13b-hf-openvino-8bit | null | null | "2024-11-07T22:34:30Z" | null | null | 2 | null | null | null | null | 0 | null | [
"pytorch",
"safetensors",
"openvino",
"llama",
"facebook",
"meta",
"llama-2",
"nncf",
"8-bit",
"text-generation",
"en",
"base_model:meta-llama/Llama-2-13b-hf",
"base_model:quantized:meta-llama/Llama-2-13b-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.45.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
yjwon/mp_mistral7bv3_sft_dpo_beta5e-2_epoch1 | null | null | "2024-11-07T22:35:16Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta5e-2_epoch2 | null | null | "2024-11-07T22:43:58Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
MaziyarPanahi/calme-3.2-baguette-3b-GGUF | null | null | "2024-11-07T22:45:32Z" | null | null | 103,033 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.2-baguette-3b",
"base_model:quantized:MaziyarPanahi/calme-3.2-baguette-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta5e-2_epoch3 | null | null | "2024-11-07T22:53:02Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
twodigit/hades_9b3-checkpoint-150000 | null | null | "2024-11-07T22:55:21Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.43.4 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta5e-2_epoch4 | null | null | "2024-11-07T22:56:31Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta5e-2_epoch5 | null | null | "2024-11-07T23:01:04Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
twodigit/hades_9b3-checkpoint-180000 | null | null | "2024-11-07T23:02:11Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.43.4 | true | 256,000 | false | 0 | 256 | null | null | null |
MaziyarPanahi/calme-3.3-baguette-3b-GGUF | null | null | "2024-11-07T23:12:48Z" | null | null | 4,784 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.3-baguette-3b",
"base_model:quantized:MaziyarPanahi/calme-3.3-baguette-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
AIFunOver/Llama-2-13b-hf-openvino-fp16 | null | null | "2024-11-07T23:15:41Z" | null | null | 4 | null | null | null | null | 0 | null | [
"pytorch",
"safetensors",
"openvino",
"llama",
"facebook",
"meta",
"llama-2",
"nncf",
"fp16",
"text-generation",
"en",
"base_model:meta-llama/Llama-2-13b-hf",
"base_model:finetune:meta-llama/Llama-2-13b-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.45.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
shadmantabib/embed | null | null | "2024-11-07T23:36:31Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"xlm-roberta",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"XLMRobertaForCausalLM"
] | 0 | 2 | gelu | 768 | 0.02 | 3,072 | 514 | xlm-roberta | 12 | 12 | null | null | null | null | null | float32 | 4.45.1 | true | 250,002 | null | null | null | null | null | null |
atrokhym/granite-3.0-2b-instruct-pirate | null | null | "2024-11-07T23:57:16Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"granite",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"GraniteForCausalLM"
] | 0 | 0 | silu | 2,048 | 0.02 | 8,192 | 4,096 | granite | 32 | 40 | 8 | 0.00001 | 10,000 | null | true | float32 | 4.46.2 | true | 49,155 | false | 0.1 | null | false | null | null |
buddhist-nlp/gemma-2-mitra-it-8int | null | null | "2024-11-08T00:37:29Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | float32 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
yiran-wang3/ds_coder_adamw_iter1 | null | null | "2024-11-08T00:55:08Z" | null | null | 54 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"alignment-handbook",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"dataset:self-generate/ds_coder_original_cn_mining_oj_iter0-binarized",
"base_model:deepseek-ai/deepseek-coder-7b-instruct-v1.5",
"base_model:finetune:deepseek-ai/deepseek-coder-7b-instruct-v1.5",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,000 | 100,015 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 30 | 32 | 0.000001 | 10,000 | null | false | bfloat16 | 4.45.0 | true | 102,400 | false | 0 | 128 | false | 1 | null |
luaqi/sn29_back_v17 | null | null | "2024-11-08T01:14:03Z" | null | null | 189 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.2 | false | 32,064 | false | 0 | null | null | null | null |
AIFunOver/Llama-2-13b-hf-openvino-4bit | null | null | "2024-11-08T01:38:59Z" | null | null | 2 | null | null | null | null | 0 | null | [
"pytorch",
"safetensors",
"openvino",
"llama",
"facebook",
"meta",
"llama-2",
"nncf",
"4-bit",
"text-generation",
"en",
"base_model:meta-llama/Llama-2-13b-hf",
"base_model:quantized:meta-llama/Llama-2-13b-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.45.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
Saxo/Linkbricks-Horizon-AI-Korean-Advanced-22B | null | null | "2024-11-08T01:41:22Z" | null | null | 13 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"ko",
"en",
"jp",
"cn",
"dataset:Saxo/ko_cn_translation_tech_social_science_linkbricks_single_dataset",
"dataset:Saxo/ko_jp_translation_tech_social_science_linkbricks_single_dataset",
"dataset:Saxo/en_ko_translation_tech_science_linkbricks_single_dataset_with_prompt_text_huggingface",
"dataset:Saxo/en_ko_translation_social_science_linkbricks_single_dataset_with_prompt_text_huggingface",
"dataset:Saxo/ko_aspect_sentiment_sns_mall_sentiment_linkbricks_single_dataset_with_prompt_text_huggingface",
"dataset:Saxo/ko_summarization_linkbricks_single_dataset_with_prompt_text_huggingface",
"dataset:Saxo/OpenOrca_cleaned_kor_linkbricks_single_dataset_with_prompt_text_huggingface",
"dataset:Saxo/ko_government_qa_total_linkbricks_single_dataset_with_prompt_text_huggingface_sampled",
"dataset:Saxo/ko-news-corpus-1",
"dataset:Saxo/ko-news-corpus-2",
"dataset:Saxo/ko-news-corpus-3",
"dataset:Saxo/ko-news-corpus-4",
"dataset:Saxo/ko-news-corpus-5",
"dataset:Saxo/ko-news-corpus-6",
"dataset:Saxo/ko-news-corpus-7",
"dataset:Saxo/ko-news-corpus-8",
"dataset:Saxo/ko-news-corpus-9",
"dataset:maywell/ko_Ultrafeedback_binarized",
"dataset:youjunhyeok/ko-orca-pair-and-ultrafeedback-dpo",
"dataset:lilacai/glaive-function-calling-v2-sharegpt",
"dataset:kuotient/gsm8k-ko",
"base_model:mistralai/Mistral-Small-Instruct-2409",
"base_model:finetune:mistralai/Mistral-Small-Instruct-2409",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 6,144 | 0.02 | 16,384 | 32,768 | mistral | 48 | 56 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.43.2 | true | 32,768 | null | 0 | 128 | null | null | null |
AIFunOver/Qwen2.5-Coder-7B-Instruct-openvino-8bit | null | null | "2024-11-08T02:18:21Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"openvino",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"nncf",
"8-bit",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-7B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-7B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | true | 152,064 | null | 0 | null | null | null | null |
twosmoothslateslabs/Flora-Mistral-7B-v1.0 | null | null | "2024-11-08T02:24:45Z" | null | null | 45 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gguf",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:nbeerbower/Hermes2-Gutenberg2-Mistral-7B",
"base_model:quantized:nbeerbower/Hermes2-Gutenberg2-Mistral-7B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 32,000 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | float16 | 4.44.1 | false | 32,002 | null | 0 | 128 | null | null | null |
yiran-wang3/ds_coder_adamw_iter2 | null | null | "2024-11-08T02:38:38Z" | null | null | 69 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"alignment-handbook",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"dataset:self-generate/ds_coder_sppo_hard_new_cn_mining_oj_iter1-binarized",
"base_model:yiran-wang3/ds_coder_adamw_iter1",
"base_model:finetune:yiran-wang3/ds_coder_adamw_iter1",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,000 | 100,015 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 30 | 32 | 0.000001 | 10,000 | null | false | bfloat16 | 4.45.0 | true | 102,400 | false | 0 | 128 | false | 1 | null |