id
stringlengths 7
117
| author
stringclasses 6
values | sha
null | created_at
unknown | last_modified
null | disabled
null | downloads
int64 0
18.6M
| downloads_all_time
null | gated
bool 1
class | gguf
null | inference
null | likes
int64 0
4.77k
| library_name
stringclasses 36
values | tags
sequencelengths 1
430
| pipeline_tag
stringclasses 32
values | mask_token
null | model_index
null | trending_score
int64 0
132
| architectures
sequencelengths 1
5
⌀ | bos_token_id
int64 -1
256k
⌀ | eos_token_id
int64 -1
256k
⌀ | hidden_act
stringclasses 15
values | hidden_size
int64 1
20.5k
⌀ | initializer_range
float64 0
1
⌀ | intermediate_size
int64 1
98.3k
⌀ | max_position_embeddings
int64 8
1.05M
⌀ | model_type
stringclasses 530
values | num_attention_heads
int64 1
5k
⌀ | num_hidden_layers
int64 -1
8.93k
⌀ | num_key_value_heads
int64 1
160
⌀ | rms_norm_eps
float64 0
7
⌀ | rope_theta
float64 1k
1,000B
⌀ | sliding_window
int64 0
262k
⌀ | tie_word_embeddings
bool 2
classes | torch_dtype
stringclasses 8
values | transformers_version
stringclasses 207
values | use_cache
bool 2
classes | vocab_size
int64 -1
5.03M
⌀ | attention_bias
bool 2
classes | attention_dropout
float64 0
0.5
⌀ | head_dim
int64 2
256
⌀ | mlp_bias
bool 2
classes | pretraining_tp
int64 0
8
⌀ | rope_scaling
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
mlx-community/Qwen2.5-Coder-14B-Instruct-4bit | null | null | "2024-11-11T18:29:38Z" | null | null | 25 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-14B-Instruct-8bit | null | null | "2024-11-11T18:29:42Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-3B-Instruct-8bit | null | null | "2024-11-11T18:30:03Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-3B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.43.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-3B-Instruct-bf16 | null | null | "2024-11-11T18:30:15Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-3B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-3B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.43.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-3B-Instruct-4bit | null | null | "2024-11-11T18:30:22Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-3B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.43.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-0.5B-Instruct-4bit | null | null | "2024-11-11T18:30:45Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.43.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-0.5B-Instruct-8bit | null | null | "2024-11-11T18:30:51Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.43.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-0.5B-Instruct-bf16 | null | null | "2024-11-11T18:30:59Z" | null | null | 1 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.43.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-0.5B-bf16 | null | null | "2024-11-11T18:31:12Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-0.5B",
"base_model:finetune:Qwen/Qwen2.5-Coder-0.5B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.40.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-0.5B-8bit | null | null | "2024-11-11T18:31:18Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-0.5B",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.40.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-0.5B-4bit | null | null | "2024-11-11T18:31:22Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-0.5B",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.40.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-3B-4bit | null | null | "2024-11-11T18:31:28Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-3B",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.40.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-3B-8bit | null | null | "2024-11-11T18:31:32Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-3B",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.40.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-3B-bf16 | null | null | "2024-11-11T18:31:37Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-3B",
"base_model:finetune:Qwen/Qwen2.5-Coder-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.40.1 | true | 151,936 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-14B-bf16 | null | null | "2024-11-11T18:31:48Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B",
"base_model:finetune:Qwen/Qwen2.5-Coder-14B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.00001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-14B-4bit | null | null | "2024-11-11T18:31:55Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.00001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-14B-8bit | null | null | "2024-11-11T18:32:00Z" | null | null | 1 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.00001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
rudradhar/gemma-finetuned-merged | null | null | "2024-11-11T18:46:33Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.44.2 | true | 256,000 | false | 0 | 256 | null | null | null |
nspyrou/llama-2-7b-accounting | null | null | "2024-11-11T19:08:51Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"dataset:algohype/accounting",
"arxiv:1910.09700",
"base_model:NousResearch/Llama-2-7b-chat-hf",
"base_model:finetune:NousResearch/Llama-2-7b-chat-hf",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.46.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
devlancer/for-fun-0123 | null | null | "2024-11-11T19:09:26Z" | null | null | 62 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 28 | 8 | 0.00001 | 500,000 | null | false | float32 | 4.44.0 | false | 100,288 | false | 0 | 128 | false | 1 | null |
2ndBestKiller/CaTinyLlama_M1_CA_TinyLlaama_M1 | null | null | "2024-11-11T19:22:32Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float32 | 4.45.0.dev0 | true | 32,001 | false | 0 | 64 | false | 1 | null |
theo77186/Qwen2.5-Coder-1.5B-Instruct-20241106 | null | null | "2024-11-11T19:31:42Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2309.00071",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-1.5B",
"base_model:finetune:Qwen/Qwen2.5-Coder-1.5B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.43.1 | true | 151,936 | null | 0 | null | null | null | null |
MartinKu/results_llama2_7b_whole_enron | null | null | "2024-11-11T19:36:24Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float32 | 4.46.2 | false | 32,000 | false | 0 | 128 | false | 1 | null |
BenevolenceMessiah/Qwen2.5-72B-Instruct-abliterated-2x-TIES-v1.0 | null | null | "2024-11-11T19:38:06Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2306.01708",
"base_model:abacusai/Dracarys2-72B-Instruct",
"base_model:merge:abacusai/Dracarys2-72B-Instruct",
"base_model:huihui-ai/Qwen2.5-72B-Instruct-abliterated",
"base_model:merge:huihui-ai/Qwen2.5-72B-Instruct-abliterated",
"base_model:rombodawg/Rombos-LLM-V2.5-Qwen-72b",
"base_model:merge:rombodawg/Rombos-LLM-V2.5-Qwen-72b",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 8,192 | 0.02 | 29,568 | 32,768 | qwen2 | 64 | 80 | 8 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 151,665 | null | 0 | null | null | null | null |
toastloaf/autotrain-gpt2-finetune-crab-4batches | null | null | "2024-11-11T19:48:52Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"tensorboard",
"safetensors",
"gpt2",
"text-generation",
"autotrain",
"text-generation-inference",
"conversational",
"dataset:THU-KEG/Crab-SFT",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
VLKVLK/media-file-recognizer-tiny-llama-1.1b | null | null | "2024-11-11T19:49:28Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"llama-factory",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | bfloat16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
Mortie1/new-nlp-hw3-llama | null | null | "2024-11-11T20:00:22Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"LLaMa",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MyLLaMa"
] | null | null | null | null | null | null | null | LLaMa | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | null | null | null | null | null | null | null | null |
abhiramvad/codeparrot-ds | null | null | "2024-11-11T20:04:12Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 0 | 0 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,000 | null | null | null | null | null | null |
pxyyy/rlhflow_mixture_clean_empty_round_with_dart_downsampled-20k-nolisa | null | null | "2024-11-11T20:28:03Z" | null | null | 57 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
Alexanders/custom-llama-hse-casual-lm3 | null | null | "2024-11-11T20:30:17Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMaModelHub"
] | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | float32 | 4.46.0 | null | 32,000 | null | null | null | null | null | null |
amirlandau/ByteCodeLLM_gemma2_hf_V1 | null | null | "2024-11-11T20:30:54Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 2,304 | 0.02 | 9,216 | 8,192 | gemma2 | 8 | 26 | 4 | 0.000001 | 10,000 | 4,096 | null | float16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
Mortie1/new-nlp-hw3-llama1 | null | null | "2024-11-11T20:39:37Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"LLaMa",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MyLLaMa"
] | null | null | null | null | null | null | null | LLaMa | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | null | null | null | null | null | null | null | null |
pxyyy/rlhflow_mixture_clean_empty_round_with_dart_scalebiosampled-20k-nolisa | null | null | "2024-11-11T20:44:26Z" | null | null | 69 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
mrs83/Kurtis-SmolLM2-1.7B-Instruct | null | null | "2024-11-11T20:49:32Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"SmolLM2",
"text-generation-inference",
"question-answering",
"en",
"dataset:mrs83/kurtis_mental_health_final",
"base_model:HuggingFaceTB/SmolLM2-1.7B-Instruct",
"base_model:quantized:HuggingFaceTB/SmolLM2-1.7B-Instruct",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | question-answering | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 8,192 | llama | 32 | 24 | 32 | 0.00001 | 130,000 | null | true | bfloat16 | 4.46.2 | true | 49,152 | false | 0 | 64 | false | 1 | null |
IAidarI/llama-model | null | null | "2024-11-11T21:00:36Z" | null | null | 55 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama_0.001",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMAModel"
] | null | null | null | null | null | null | null | llama_0.001 | null | null | null | null | null | null | null | float32 | 4.46.2 | null | 32,000 | null | null | null | null | null | null |
RyanYr/self-reflect_ministral8Bit_mg_dpo_psdp2 | null | null | "2024-11-11T21:08:07Z" | null | null | 61 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | float32 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
crisp-im/mirage-phi3-instruct-rank | null | null | "2024-11-11T21:10:29Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"llama-factory",
"conversational",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 4,096 | phi3 | 32 | 32 | 32 | 0.00001 | 10,000 | 2,047 | false | bfloat16 | 4.44.2 | true | 32,064 | false | 0 | null | null | null | null |
Aurora-Gem/Opt_lora16_qwen2.5_7B_model_25k-1111-3 | null | null | "2024-11-11T21:16:22Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 4,096 | qwen2 | 28 | 28 | 4 | 0.000001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
lalainy/ECE-PRYMMAL-7B-DAM-UNTRAINED-V1 | null | null | "2024-11-11T21:23:20Z" | null | null | 25 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | float32 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
k2rks/Qwen2.5-Coder-14B-Instruct-mlx-4bit | null | null | "2024-11-11T21:53:59Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
sahilgul/gemma2_9b_4bit_q | null | null | "2024-11-11T21:59:21Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | float16 | 4.45.1 | true | 256,000 | false | 0 | 256 | null | null | null |
toastloaf/autotrain-gpt2-finetune-dolly-1k | null | null | "2024-11-11T22:05:32Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"tensorboard",
"safetensors",
"gpt2",
"text-generation",
"autotrain",
"text-generation-inference",
"conversational",
"dataset:trl-internal-testing/dolly-chatml-sft",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
BlouseJury/Qwen2.5-Coder-32B-Instruct-EXL2-4.0bpw | null | null | "2024-11-11T22:15:04Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2309.00071",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-32B",
"base_model:finetune:Qwen/Qwen2.5-Coder-32B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
amirlandau/ByteCodeLLM_gemma2_hf_V2_full | null | null | "2024-11-11T22:25:35Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu | 2,048 | 0.02 | 16,384 | 8,192 | gemma | 8 | 18 | 1 | 0.000001 | 10,000 | null | null | float16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
AndreyRzhaksinskiy/CDS-starcoder2-7b-20241112 | null | null | "2024-11-11T23:07:50Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"starcoder2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Starcoder2ForCausalLM"
] | 0 | 0 | gelu_pytorch_tanh | 4,608 | 0.018042 | 18,432 | 16,384 | starcoder2 | 36 | 32 | 4 | null | 1,000,000 | 4,096 | null | float16 | 4.46.0 | true | 49,152 | null | 0.1 | null | null | null | null |
RyanYr/self-reflect_ministral8Bit_mg_bon_iter1 | null | null | "2024-11-11T23:35:37Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | float32 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_npo_lora_HP_v1 | null | null | "2024-11-11T23:47:03Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_ga_lora_HP_v3 | null | null | "2024-11-11T23:48:55Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
Onlydrinkwater/llama2-1B-50krope-scratch | null | null | "2024-11-12T00:02:09Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 4,096 | llama | 32 | 16 | 8 | 0.00001 | 500,000 | null | false | float32 | 4.46.1 | true | 32,000 | false | 0 | 64 | false | 1 | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_npo_gdr_lora_HP_v1 | null | null | "2024-11-12T00:21:24Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
danielhanchen/Qwen2.5-Coder-1.5B-bnb-4bit | null | null | "2024-11-12T00:24:41Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_ga_klr_lora_HP_v1 | null | null | "2024-11-12T00:25:09Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_ga_gdr_lora_HP_v2 | null | null | "2024-11-12T00:34:03Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_npo_klr_lora_HP_v1 | null | null | "2024-11-12T00:35:35Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
unsloth/Qwen2.5-Coder-0.5B-bnb-4bit | null | null | "2024-11-12T00:56:16Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-0.5B",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
unsloth/Qwen2.5-Coder-0.5B | null | null | "2024-11-12T00:56:36Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-0.5B",
"base_model:finetune:Qwen/Qwen2.5-Coder-0.5B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
unsloth/Qwen2.5-Coder-0.5B-Instruct | null | null | "2024-11-12T00:57:14Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
unsloth/Qwen2.5-Coder-0.5B-Instruct-bnb-4bit | null | null | "2024-11-12T00:57:50Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
unsloth/Qwen2.5-Coder-3B-bnb-4bit | null | null | "2024-11-12T01:00:03Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-3B",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
unsloth/Qwen2.5-Coder-3B | null | null | "2024-11-12T01:01:05Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-3B",
"base_model:finetune:Qwen/Qwen2.5-Coder-3B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_npo_lora_HICS_kmeans_llm2vec_2000_relative_p20_v1 | null | null | "2024-11-12T01:04:27Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
unsloth/Qwen2.5-Coder-3B-Instruct | null | null | "2024-11-12T01:07:22Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-3B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-3B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
lazydok/Gemma2-9b-it-fin-v0.3 | null | null | "2024-11-12T01:07:48Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:unsloth/gemma-2-9b-it-bnb-4bit",
"base_model:finetune:unsloth/gemma-2-9b-it-bnb-4bit",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.0 | true | 256,000 | false | 0 | 256 | null | null | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_ga_HICS_kmeans_llm2vec_2000_relative_p20_v2 | null | null | "2024-11-12T01:08:37Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
unsloth/Qwen2.5-Coder-32B | null | null | "2024-11-12T01:17:13Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-32B",
"base_model:finetune:Qwen/Qwen2.5-Coder-32B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
ZhangShenao/gemma9b-sft-m | null | null | "2024-11-12T01:22:04Z" | null | null | 29 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"trl",
"sft",
"generated_from_trainer",
"conversational",
"dataset:generator",
"base_model:google/gemma-2-9b-it",
"base_model:finetune:google/gemma-2-9b-it",
"license:gemma",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.45.2 | false | 256,000 | false | 0 | 256 | null | null | null |
unsloth/Qwen2.5-Coder-3B-Instruct-bnb-4bit | null | null | "2024-11-12T01:22:31Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-3B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_ga_gdr_lora_HICS_kmeans_llm2vec_2000_relative_p20_v1 | null | null | "2024-11-12T01:23:50Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_npo_gdr_lora_HICS_kmeans_llm2vec_2000_relative_p20_v1 | null | null | "2024-11-12T01:23:56Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_npo_klr_lora_HICS_kmeans_llm2vec_2000_relative_p20_v1 | null | null | "2024-11-12T01:28:13Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
concept-unlearning/Llama-2-7b-hf_ft_lora_all_novels_v1_ft_ga_klr_lora_HICS_kmeans_llm2vec_2000_relative_p20_v1 | null | null | "2024-11-12T01:34:36Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
unsloth/Qwen2.5-Coder-32B-bnb-4bit | null | null | "2024-11-12T01:42:13Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-32B",
"base_model:quantized:Qwen/Qwen2.5-Coder-32B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
rawsh/mirrorqwen2.5-0.5b-ORPO-1 | null | null | "2024-11-12T01:50:13Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"trl",
"orpo",
"unsloth",
"arxiv:2403.07691",
"base_model:rawsh/mirrorqwen2.5-0.5b-SFT",
"base_model:finetune:rawsh/mirrorqwen2.5-0.5b-SFT",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.46.2 | false | 151,936 | null | 0 | null | null | null | null |
pxyyy/rlhflow_mixture_clean_empty_round_with_dart_downsampled-600k-nolisa | null | null | "2024-11-12T01:55:58Z" | null | null | 59 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
unsloth/Qwen2.5-Coder-32B-Instruct-bnb-4bit | null | null | "2024-11-12T02:00:44Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-32B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-32B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
mnoukhov/SmolLM2-135M-tldr-sft | null | null | "2024-11-12T02:07:16Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"generated_from_trainer",
"trl",
"sft",
"conversational",
"base_model:HuggingFaceTB/SmolLM2-135M",
"base_model:finetune:HuggingFaceTB/SmolLM2-135M",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 0 | 0 | silu | 576 | 0.041667 | 1,536 | 8,192 | llama | 9 | 30 | 3 | 0.00001 | 100,000 | null | true | float32 | 4.46.2 | true | 49,152 | false | 0 | 64 | false | 1 | null |
NESPED-GEN/TinyLlama-text2SQL-alias-ourschema | null | null | "2024-11-12T02:18:40Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
NESPED-GEN/TinyLlama-text2SQL-alias-indentacao-ourschema | null | null | "2024-11-12T02:25:03Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
yjwon/mp_gemma9b_sft_dpo_beta2e-1_epoch2 | null | null | "2024-11-12T02:31:45Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mp_gemma9b_sft_dpo_beta2e-1_epoch4 | null | null | "2024-11-12T02:31:45Z" | null | null | 17 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mp_gemma9b_sft_dpo_beta2e-1_epoch5 | null | null | "2024-11-12T02:31:53Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mp_gemma9b_sft_dpo_beta2e-1_epoch1 | null | null | "2024-11-12T02:31:54Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
yjwon/mp_gemma9b_sft_dpo_beta2e-1_epoch3 | null | null | "2024-11-12T02:31:55Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
unsloth/Qwen2.5-Coder-14B-Instruct-bnb-4bit | null | null | "2024-11-12T02:37:23Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
AImused/cold40 | null | null | "2024-11-12T02:37:45Z" | null | null | 1,226 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 28 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.0 | false | 100,288 | false | 0 | 128 | false | 1 | null |
unsloth/Qwen2.5-Coder-14B-Instruct | null | null | "2024-11-12T03:01:12Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
unsloth/Qwen2.5-Coder-14B-bnb-4bit | null | null | "2024-11-12T03:05:13Z" | null | null | 1 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-14B",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
knguyennguyen/Phi-3-mini-4k-instruct | null | null | "2024-11-12T03:13:42Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"generated_from_trainer",
"custom_code",
"base_model:microsoft/Phi-3-mini-4k-instruct",
"base_model:finetune:microsoft/Phi-3-mini-4k-instruct",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 4,096 | phi3 | 32 | 32 | 32 | 0.00001 | 10,000 | 2,047 | false | bfloat16 | 4.44.2 | true | 32,064 | false | 0 | null | null | null | null |
c01zaut/Qwen2.5-14B-Instruct-rk3588-1.1.1 | null | null | "2024-11-12T03:19:07Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"chat",
"conversational",
"en",
"arxiv:2309.00071",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-14B",
"base_model:finetune:Qwen/Qwen2.5-14B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
unsloth/Qwen2.5-Coder-14B | null | null | "2024-11-12T03:21:07Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-14B",
"base_model:finetune:Qwen/Qwen2.5-Coder-14B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
Zekunli/qwen2.5-1.5b-toolverifier-alpaca-ds | null | null | "2024-11-12T03:36:33Z" | null | null | 95 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 1,536 | 0.02 | 8,960 | 131,072 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.45.0.dev0 | true | 151,936 | null | 0 | null | null | null | null |
pxyyy/rlhflow_mixture_clean_empty_round_with_dart_scalebiosampled-600k-wlisa | null | null | "2024-11-12T03:47:19Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
pxyyy/rlhflow_mixture_clean_empty_round_with_dart_downsampled-600k-wlisa | null | null | "2024-11-12T03:55:33Z" | null | null | 61 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta2e-1_epoch1 | null | null | "2024-11-12T04:04:57Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta2e-1_epoch2 | null | null | "2024-11-12T04:08:56Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
ColdAsIce123/Phi3_Cold_v1 | null | null | "2024-11-12T04:12:55Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.0 | false | 32,064 | false | 0 | null | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta2e-1_epoch3 | null | null | "2024-11-12T04:16:17Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta2e-1_epoch4 | null | null | "2024-11-12T04:19:27Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta2e-1_epoch5 | null | null | "2024-11-12T04:22:28Z" | null | null | 1 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta5e-2_epoch1 | null | null | "2024-11-12T04:26:08Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |