id
stringlengths 7
117
| author
stringclasses 6
values | sha
null | created_at
unknown | last_modified
null | disabled
null | downloads
int64 0
18.6M
| downloads_all_time
null | gated
bool 1
class | gguf
null | inference
null | likes
int64 0
4.77k
| library_name
stringclasses 36
values | tags
sequencelengths 1
430
| pipeline_tag
stringclasses 32
values | mask_token
null | model_index
null | trending_score
int64 0
132
| architectures
sequencelengths 1
5
⌀ | bos_token_id
int64 -1
256k
⌀ | eos_token_id
int64 -1
256k
⌀ | hidden_act
stringclasses 15
values | hidden_size
int64 1
20.5k
⌀ | initializer_range
float64 0
1
⌀ | intermediate_size
int64 1
98.3k
⌀ | max_position_embeddings
int64 8
1.05M
⌀ | model_type
stringclasses 530
values | num_attention_heads
int64 1
5k
⌀ | num_hidden_layers
int64 -1
8.93k
⌀ | num_key_value_heads
int64 1
160
⌀ | rms_norm_eps
float64 0
7
⌀ | rope_theta
float64 1k
1,000B
⌀ | sliding_window
int64 0
262k
⌀ | tie_word_embeddings
bool 2
classes | torch_dtype
stringclasses 8
values | transformers_version
stringclasses 207
values | use_cache
bool 2
classes | vocab_size
int64 -1
5.03M
⌀ | attention_bias
bool 2
classes | attention_dropout
float64 0
0.5
⌀ | head_dim
int64 2
256
⌀ | mlp_bias
bool 2
classes | pretraining_tp
int64 0
8
⌀ | rope_scaling
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
SufficientPrune3897/magnum-v4-123b-exl2-2.65bpw | null | null | "2024-11-10T14:04:02Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"chat",
"conversational",
"en",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 12,288 | 0.02 | 28,672 | 131,072 | mistral | 96 | 88 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.45.0.dev0 | false | 32,768 | null | 0 | 128 | null | null | null |
win10/WhiteRabbitNeo-2.5-Qwen-2.5-Coder-12.3B | null | null | "2024-11-10T14:11:27Z" | null | null | 21 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:WhiteRabbitNeo/WhiteRabbitNeo-2.5-Qwen-2.5-Coder-7B",
"base_model:finetune:WhiteRabbitNeo/WhiteRabbitNeo-2.5-Qwen-2.5-Coder-7B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 128,245 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 48 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | false | 152,064 | null | 0 | null | null | null | null |
win10/Qwen2.5-Math-12.3B-Instruct | null | null | "2024-11-10T14:16:43Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Qwen/Qwen2.5-Math-7B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Math-7B-Instruct",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 4,096 | qwen2 | 28 | 48 | 4 | 0.000001 | 10,000 | null | false | bfloat16 | 4.45.2 | true | 152,064 | null | 0 | null | null | null | null |
t2ance/slim_full_pretrain_inner_45000 | null | null | "2024-11-10T14:31:24Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
0xayman/Qwen-1.5B-fc-v3.1 | null | null | "2024-11-10T14:31:41Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
win10/Qwen2.5-12.3B | null | null | "2024-11-10T14:32:58Z" | null | null | 13 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Qwen/Qwen2.5-7B",
"base_model:finetune:Qwen/Qwen2.5-7B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 48 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | true | 152,064 | null | 0 | null | null | null | null |
ihughes15234/phi35_tictactoe_dpo_firstonly_2epoch | null | null | "2024-11-10T14:42:03Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:ihughes15234/phi_3_5_mini_tictactoe1200",
"base_model:finetune:ihughes15234/phi_3_5_mini_tictactoe1200",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 32,064 | false | 0 | 96 | false | 1 | null |
Savoxism/codeparrot-ds | null | null | "2024-11-10T14:51:05Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"tensorboard",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 0 | 0 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.45.1 | true | 50,000 | null | null | null | null | null | null |
ICT3214-Group5/MD5_gpt_neo_v1.1.3 | null | null | "2024-11-10T15:01:29Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt_neo",
"text-generation",
"generated_from_trainer",
"base_model:EleutherAI/gpt-neo-125m",
"base_model:finetune:EleutherAI/gpt-neo-125m",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPTNeoForCausalLM"
] | 50,256 | 50,256 | null | 768 | 0.02 | null | 2,048 | gpt_neo | null | null | null | null | null | null | null | float32 | 4.46.1 | true | 50,257 | null | 0 | null | null | null | null |
t2ance/slim_full_pretrain_inner_50000 | null | null | "2024-11-10T15:09:10Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
michizavrel14/my_small_gpt2_hasek_dataset | null | null | "2024-11-10T15:32:13Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
msu-rcc-lair/RuadaptQwen-32B-instruct | null | null | "2024-11-10T15:33:44Z" | null | null | 0 | null | null | null | null | 0 | null | [
"safetensors",
"qwen2",
"text-generation",
"conversational",
"ru",
"dataset:IlyaGusev/saiga_scored",
"dataset:IlyaGusev/saiga_preferences",
"dataset:dichspace/darulm",
"license:apache-2.0",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | null | 147,077 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | true | 147,200 | null | 0 | null | null | null | null |
visdata/llama_mm_0 | null | null | "2024-11-10T15:34:26Z" | null | null | 113 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 1,408 | 0.02 | 4,340 | 2,048 | llama | 22 | 24 | 22 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | false | 50,257 | false | 0 | 64 | false | 1 | null |
visdata/llama_mm_1 | null | null | "2024-11-10T15:45:38Z" | null | null | 114 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 1,408 | 0.02 | 4,340 | 2,048 | llama | 22 | 24 | 22 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | false | 50,257 | false | 0 | 64 | false | 1 | null |
t2ance/slim_full_pretrain_inner_55000 | null | null | "2024-11-10T15:47:05Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
ICT3214-Group5/MD5_gpt_neo_v1.1.4 | null | null | "2024-11-10T16:00:09Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt_neo",
"text-generation",
"generated_from_trainer",
"base_model:EleutherAI/gpt-neo-125m",
"base_model:finetune:EleutherAI/gpt-neo-125m",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPTNeoForCausalLM"
] | 50,256 | 50,256 | null | 768 | 0.02 | null | 2,048 | gpt_neo | null | null | null | null | null | null | null | float32 | 4.46.1 | true | 50,257 | null | 0 | null | null | null | null |
ihughes15234/phi35_tictactoe_dpo1epoch_v3 | null | null | "2024-11-10T16:07:21Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:ihughes15234/phi35_tictactoe_dpo6epoch_v2",
"base_model:finetune:ihughes15234/phi35_tictactoe_dpo6epoch_v2",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 32,064 | false | 0 | 96 | false | 1 | null |
yejinkim/forget10_expert_epoch7 | null | null | "2024-11-10T16:20:03Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"PhiForCausalLM"
] | null | null | gelu_new | 2,048 | 0.02 | 8,192 | 2,048 | phi | 32 | 24 | 32 | null | 10,000 | null | false | float32 | 4.46.0.dev0 | false | 51,200 | null | 0 | null | null | null | null |
second-state/Qwen2.5-Coder-14B-Instruct-GGUF | null | null | "2024-11-10T16:22:23Z" | null | null | 230 | null | null | null | null | 0 | transformers | [
"transformers",
"gguf",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
gaianet/Qwen2.5-Coder-14B-Instruct-GGUF | null | null | "2024-11-10T16:22:34Z" | null | null | 78 | null | null | null | null | 0 | transformers | [
"transformers",
"gguf",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
SufficientPrune3897/magnum-v4-123b-exl2-RPCAL-2.6bpw | null | null | "2024-11-10T16:22:34Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"chat",
"conversational",
"en",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 12,288 | 0.02 | 28,672 | 131,072 | mistral | 96 | 88 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.45.0.dev0 | false | 32,768 | null | 0 | 128 | null | null | null |
t2ance/slim_full_pretrain_inner_60000 | null | null | "2024-11-10T16:25:00Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
RyanYr/self-reflect_ministral8Bit_mg_dpo_psdp1 | null | null | "2024-11-10T16:29:55Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:RyanYr/self-reflect_ministral8Bit_mg_dpo_psdp2",
"base_model:finetune:RyanYr/self-reflect_ministral8Bit_mg_dpo_psdp2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | bfloat16 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
kikeavi36/Orpo_Qwen2.5-3B-Instruct-FT | null | null | "2024-11-10T16:30:06Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,644 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.45.2 | true | 151,665 | null | 0 | null | null | null | null |
theprint/RuDolph-Hermes-7B | null | null | "2024-11-10T16:45:17Z" | null | null | 46 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:cognitivecomputations/dolphin-2.2.1-mistral-7b",
"base_model:merge:cognitivecomputations/dolphin-2.2.1-mistral-7b",
"base_model:teknium/OpenHermes-2.5-Mistral-7B",
"base_model:merge:teknium/OpenHermes-2.5-Mistral-7B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 32,000 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | bfloat16 | 4.45.2 | false | 32,002 | null | 0 | 128 | null | null | null |
t2ance/slim_full_pretrain_inner_65000 | null | null | "2024-11-10T17:03:03Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
alea-institute/kl3m-003-1.7b | null | null | "2024-11-10T17:06:13Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt_neox",
"text-generation",
"kl3m",
"kl3m-003",
"legal",
"financial",
"enterprise",
"slm",
"en",
"license:cc-by-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPTNeoXForCausalLM"
] | null | null | gelu | 2,048 | 0.02 | 8,192 | 8,192 | gpt_neox | 32 | 32 | null | null | 10,000 | null | false | bfloat16 | 4.46.1 | true | 32,768 | true | 0 | null | null | null | null |
saipragatheeswarg/salary_predictor_gguf | null | null | "2024-11-10T17:06:34Z" | null | null | 66 | null | null | null | null | 0 | transformers | [
"transformers",
"gguf",
"llama",
"text-generation-inference",
"unsloth",
"text-generation",
"en",
"base_model:meta-llama/Llama-3.1-8B-Instruct",
"base_model:quantized:meta-llama/Llama-3.1-8B-Instruct",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
tmickleydoyle/tinyllama-colorist-v0 | null | null | "2024-11-10T17:09:09Z" | null | null | 181 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.46.2 | true | 32,000 | false | 0 | 64 | false | 1 | null |
shashikanth-a/model | null | null | "2024-11-10T17:12:27Z" | null | null | 38 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"en",
"base_model:unsloth/llama-3-8b-bnb-4bit",
"base_model:finetune:unsloth/llama-3-8b-bnb-4bit",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
minhnguyent546/Med-Alpaca-2-7b-chat | null | null | "2024-11-10T17:13:13Z" | null | null | 17 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.42.3 | true | 32,000 | false | 0 | null | false | 1 | null |
Gopalatius/Gemma-2-9b-it-QA-ID-merged | null | null | "2024-11-10T17:23:50Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 256,000 | 256,001 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | float16 | 4.45.2 | true | 256,002 | false | 0 | 256 | null | null | null |
ihughes15234/phi35_tictactoe_dpo1epoch_v5 | null | null | "2024-11-10T17:32:57Z" | null | null | 26 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:ihughes15234/phi_3_5_mini_tictactoe1200",
"base_model:finetune:ihughes15234/phi_3_5_mini_tictactoe1200",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 32,064 | false | 0 | 96 | false | 1 | null |
pwork7/gemma7b_meta_math_1epoch_with_kn | null | null | "2024-11-10T17:40:48Z" | null | null | 11 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,072 | 0.02 | 24,576 | 8,192 | gemma | 16 | 28 | 16 | 0.000001 | 10,000 | null | null | bfloat16 | 4.43.3 | false | 256,000 | false | 0 | 256 | null | null | null |
ICT3214-Group5/MD5_gpt_neo_v1.1.5 | null | null | "2024-11-10T17:54:59Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt_neo",
"text-generation",
"generated_from_trainer",
"base_model:EleutherAI/gpt-neo-125m",
"base_model:finetune:EleutherAI/gpt-neo-125m",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPTNeoForCausalLM"
] | 50,256 | 50,256 | null | 768 | 0.02 | null | 2,048 | gpt_neo | null | null | null | null | null | null | null | float32 | 4.46.1 | true | 50,257 | null | 0 | null | null | null | null |
pxyyy/rlhflow_mixture_clean_empty_round_with_dart_scalebiosampled-600k | null | null | "2024-11-10T18:16:41Z" | null | null | 125 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
pwork7/gemma7b_meta_math_2epoch_with_kn | null | null | "2024-11-10T18:38:31Z" | null | null | 11 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,072 | 0.02 | 24,576 | 8,192 | gemma | 16 | 28 | 16 | 0.000001 | 10,000 | null | null | bfloat16 | 4.43.3 | false | 256,000 | false | 0 | 256 | null | null | null |
jacobhoffmann/TestGen_v2.1-codegemma-7b | null | null | "2024-11-10T18:54:17Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,072 | 0.02 | 24,576 | 8,192 | gemma | 16 | 28 | 16 | 0.000001 | 10,000 | null | null | float16 | 4.47.0.dev0 | true | 256,000 | false | 0 | 256 | null | null | null |
zelk12/MT-Gen2-IF-gemma-2-MTMMT1-9B | null | null | "2024-11-10T18:56:36Z" | null | null | 11 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT-Merge-gemma-2-9B",
"base_model:merge:zelk12/MT-Merge-gemma-2-9B",
"base_model:zelk12/MT1-gemma-2-9B",
"base_model:merge:zelk12/MT1-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
ihughes15234/phi35_tictactoe_dpo2epoch_v5 | null | null | "2024-11-10T18:58:50Z" | null | null | 26 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:ihughes15234/phi35_tictactoe_dpo1epoch_v5",
"base_model:finetune:ihughes15234/phi35_tictactoe_dpo1epoch_v5",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 32,064 | false | 0 | 96 | false | 1 | null |
zelk12/MT-Gen2-BB-gemma-2-MTMMT2-9B | null | null | "2024-11-10T19:08:18Z" | null | null | 11 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT-Merge-gemma-2-9B",
"base_model:merge:zelk12/MT-Merge-gemma-2-9B",
"base_model:zelk12/MT2-gemma-2-9B",
"base_model:merge:zelk12/MT2-gemma-2-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
kanishka/opt-babylm2-rewritten-clean-spacy-32k-earlystop-40epochs_seed-42_1e-3 | null | null | "2024-11-10T19:10:32Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"opt",
"text-generation",
"generated_from_trainer",
"dataset:kanishka/babylm2-rewritten-clean-spacy",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"OPTForCausalLM"
] | 1 | 1 | null | 768 | null | null | 256 | opt | 12 | 12 | null | null | null | null | null | float32 | 4.45.1 | true | 32,768 | null | 0 | null | null | null | null |
zelk12/MT-Gen2-MA-gemma-2-MT4RAv0.1t0.25-9B | null | null | "2024-11-10T19:18:14Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT4-gemma-2-9B",
"base_model:merge:zelk12/MT4-gemma-2-9B",
"base_model:zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25",
"base_model:merge:zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
broalantap/GPT2-large-4-56000steps | null | null | "2024-11-10T19:28:16Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | bfloat16 | 4.41.2 | true | 50,321 | null | null | null | null | null | null |
zelk12/MT-Gen2-MUB-gemma-2-9B | null | null | "2024-11-10T20:04:49Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT-Gen2-BB-gemma-2-MTMMT2-9B",
"base_model:merge:zelk12/MT-Gen2-BB-gemma-2-MTMMT2-9B",
"base_model:zelk12/MT-Gen2-MU-gemma-2-MT1RAv0.1t0.25-9B",
"base_model:merge:zelk12/MT-Gen2-MU-gemma-2-MT1RAv0.1t0.25-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
Apel-sin/rewiz-qwen-2.5-14b | null | null | "2024-11-10T20:13:36Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"sft",
"theprint",
"rewiz",
"en",
"dataset:theprint/ReWiz",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 13,824 | 131,072 | qwen2 | 40 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
pwork7/gemma7b_meta_math_3epoch_with_kn | null | null | "2024-11-10T21:00:43Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,072 | 0.02 | 24,576 | 8,192 | gemma | 16 | 28 | 16 | 0.000001 | 10,000 | null | null | bfloat16 | 4.43.3 | false | 256,000 | false | 0 | 256 | null | null | null |
m7alek/MathQA | null | null | "2024-11-10T21:16:25Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"text-generation-inference",
"ar",
"en",
"dataset:m7alek/ninth_file",
"dataset:m7alek/Fifth_file",
"dataset:m7alek/eighth_file",
"base_model:nvidia/Llama-3.1-Nemotron-70B-Instruct-HF",
"base_model:quantized:nvidia/Llama-3.1-Nemotron-70B-Instruct-HF",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu | 3,072 | 0.02 | 24,576 | 8,192 | gemma | 16 | 28 | 16 | 0.000001 | 10,000 | null | null | float32 | 4.39.3 | true | 256,000 | false | 0 | 256 | null | null | null |
sloshywings/Qwen2.5-Coder-7B-Instruct-Uncensored | null | null | "2024-11-10T21:23:13Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 152,064 | null | 0 | null | null | null | null |
GitBag/reasoning_rebel_iter_2_1731041913_eta_1e1_lr_3e-7_1731215964 | null | null | "2024-11-10T21:23:55Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.1 | true | 128,256 | false | 0 | 128 | false | 1 | null |
GitBag/reasoning_rebel_iter_2_1731041913_eta_1e2_lr_3e-7_1731238952 | null | null | "2024-11-10T21:31:13Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.1 | true | 128,256 | false | 0 | 128 | false | 1 | null |
thiagoads/llama-legalpt | null | null | "2024-11-10T21:34:51Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 512 | 0.02 | 2,048 | 512 | llama | 32 | 4 | 32 | 0.000001 | 10,000 | null | false | float32 | 4.46.0.dev0 | true | 29,794 | false | 0 | 16 | false | 1 | null |
GitBag/reasoning_rebel_iter_2_1731041913_eta_1e7_lr_3e-7_1731263911 | null | null | "2024-11-10T21:36:21Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.1 | true | 128,256 | false | 0 | 128 | false | 1 | null |
alea-institute/kl3m-002-170m-patent | null | null | "2024-11-10T21:36:40Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt_neox",
"text-generation",
"kl3m",
"kl3m-002",
"patent",
"all the patents",
"slm",
"en",
"license:cc-by-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPTNeoXForCausalLM"
] | 0 | 1 | gelu | 1,024 | 0.02 | 1,024 | 4,096 | gpt_neox | 16 | 16 | 8 | 0.000001 | 10,000 | null | false | float32 | 4.38.0 | false | 32,768 | true | 0 | null | null | null | null |
GitBag/reasoning_rebel_iter_2_1731041913_eta_1e3_lr_3e-7_1731243878 | null | null | "2024-11-10T21:40:40Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.1 | true | 128,256 | false | 0 | 128 | false | 1 | null |
VLKVLK/media-file-recognizer-v2 | null | null | "2024-11-10T21:41:17Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"llama-factory",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
GitBag/reasoning_rebel_iter_2_1731041913_eta_1e4_lr_3e-7_1731249005 | null | null | "2024-11-10T21:45:56Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.1 | true | 128,256 | false | 0 | 128 | false | 1 | null |
GitBag/reasoning_rebel_iter_2_1731041913_eta_1e6_lr_3e-7_1731258952 | null | null | "2024-11-10T21:51:07Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.1 | true | 128,256 | false | 0 | 128 | false | 1 | null |
GitBag/reasoning_rebel_iter_2_1731041913_eta_1e8_lr_3e-7_1731268881 | null | null | "2024-11-10T21:56:21Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.1 | true | 128,256 | false | 0 | 128 | false | 1 | null |
AlSamCur123/MinistralTextCompleteContinuedFine | null | null | "2024-11-10T21:56:51Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"sft",
"conversational",
"en",
"base_model:unsloth/mistral-7b-instruct-v0.3-bnb-4bit",
"base_model:finetune:unsloth/mistral-7b-instruct-v0.3-bnb-4bit",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 32,768 | null | 0 | 128 | null | null | null |
GitBag/reasoning_rebel_iter_2_1731041913_eta_1e5_lr_3e-7_1731253987 | null | null | "2024-11-10T22:01:46Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.1 | true | 128,256 | false | 0 | 128 | false | 1 | null |
tanquangduong/Qwen2.5-0.5B-Instruct-TinyStories | null | null | "2024-11-10T22:25:01Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"sft",
"conversational",
"en",
"base_model:unsloth/Qwen2.5-0.5B",
"base_model:finetune:unsloth/Qwen2.5-0.5B",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
qgallouedec/tiny-BartForCausalLM | null | null | "2024-11-10T22:40:58Z" | null | null | 916 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"bart",
"text-generation",
"trl",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"BartForCausalLM"
] | 0 | 2 | null | null | null | null | 1,024 | bart | null | 12 | null | null | null | null | null | float32 | 4.47.0.dev0 | true | 50,270 | null | 0 | null | null | null | null |
mergekit-community/GutenBerg_Nyxora_magnum-v4-27b | null | null | "2024-11-10T22:47:49Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2203.05482",
"base_model:DazzlingXeno/GutenBerg_Nyxora",
"base_model:merge:DazzlingXeno/GutenBerg_Nyxora",
"base_model:anthracite-org/magnum-v4-27b",
"base_model:merge:anthracite-org/magnum-v4-27b",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 8 | gelu_pytorch_tanh | 4,608 | 0.02 | 36,864 | 8,192 | gemma2 | 32 | 46 | 16 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.2 | false | 256,000 | false | 0 | 128 | null | null | null |
qgallouedec/tiny-LlamaForCausalLM | null | null | "2024-11-10T23:20:15Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"trl",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8 | 0.02 | 32 | 2,048 | llama | 4 | 2 | 2 | 0.000001 | 10,000 | null | false | float32 | 4.47.0.dev0 | true | 32,023 | false | 0 | 2 | false | 1 | null |
KR-X-AI/krx-qwen2.5-7b-instruct-v3-m-e-m-s | null | null | "2024-11-10T23:28:44Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:KR-X-AI/krx-qwen2.5-7b-instruct-v2-m-e",
"base_model:finetune:KR-X-AI/krx-qwen2.5-7b-instruct-v2-m-e",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta2e-1_epoch1 | null | null | "2024-11-10T23:37:40Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta2e-1_epoch2 | null | null | "2024-11-10T23:42:21Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
rawsh/mirrorqwen2.5-0.5b-SimPO-1 | null | null | "2024-11-10T23:45:17Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"trl",
"cpo",
"unsloth",
"arxiv:2401.08417",
"base_model:rawsh/mirrorqwen2.5-0.5b-SimPO-0",
"base_model:finetune:rawsh/mirrorqwen2.5-0.5b-SimPO-0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.46.2 | false | 151,936 | null | 0 | null | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta2e-1_epoch3 | null | null | "2024-11-10T23:45:49Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta2e-1_epoch4 | null | null | "2024-11-10T23:48:48Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mp_mistral7bv3_sft_dpo_beta2e-1_epoch5 | null | null | "2024-11-10T23:51:49Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta1e-1_epoch1 | null | null | "2024-11-10T23:55:36Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta1e-1_epoch2 | null | null | "2024-11-10T23:59:08Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
NESPED-GEN/TinyLlama-text2SQL-v0 | null | null | "2024-11-11T00:03:07Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta1e-1_epoch3 | null | null | "2024-11-11T00:03:46Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta1e-1_epoch4 | null | null | "2024-11-11T00:06:45Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
NESPED-GEN/TinyLlama-text2SQL-indentacao | null | null | "2024-11-11T00:11:26Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta1e-1_epoch5 | null | null | "2024-11-11T00:22:55Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_ogd_rms_epoch1 | null | null | "2024-11-11T00:29:52Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
NESPED-GEN/TinyLlama-text2SQL-alias | null | null | "2024-11-11T00:30:04Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
yjwon/mpg27_mistral7bv3_sft_ogd_rms_epoch2 | null | null | "2024-11-11T00:33:12Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_ogd_rms_epoch3 | null | null | "2024-11-11T00:38:08Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
NESPED-GEN/TinyLlama-text2SQL-alias-indentacao | null | null | "2024-11-11T00:39:07Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
pavl0/LLaMA100M | null | null | "2024-11-11T00:41:05Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"pavl0_llama",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMA"
] | null | null | null | null | null | null | null | pavl0_llama | null | null | null | null | null | null | null | float32 | 4.46.2 | null | 32,000 | null | null | null | null | null | null |
yjwon/mpg27_mistral7bv3_sft_ogd_rms_epoch4 | null | null | "2024-11-11T00:42:02Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_ogd_rms_epoch5 | null | null | "2024-11-11T00:45:30Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
SingularityHJY/GPTNeoX-160M-Minipile | null | null | "2024-11-11T00:59:20Z" | null | null | 23 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt_neox",
"text-generation",
"generated_from_trainer",
"custom_code",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPTNeoXForCausalLM"
] | 0 | 0 | gelu | 768 | 0.02 | 3,072 | 2,048 | gpt_neox | 12 | 12 | null | null | 10,000 | null | false | bfloat16 | 4.45.0 | true | 50,304 | true | 0 | null | null | null | null |
Onlydrinkwater/llama2-1B-scratch | null | null | "2024-11-11T01:05:50Z" | null | null | 131 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 4,096 | llama | 32 | 16 | 8 | 0.00001 | 10,000 | null | false | float32 | 4.46.1 | true | 32,000 | false | 0 | 64 | false | 1 | null |
thiagoads/bitllama-legalpt | null | null | "2024-11-11T01:05:55Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 512 | 0.02 | 2,048 | 512 | llama | 32 | 4 | 32 | 0.000001 | 10,000 | null | false | float32 | 4.44.2 | true | 29,794 | false | 0 | null | false | 1 | null |
Zongxiong/Test2 | null | null | "2024-11-11T01:08:38Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float32 | 4.45.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
thiagoads/bitnet-legalpt | null | null | "2024-11-11T01:45:45Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 512 | 0.02 | 2,048 | 512 | llama | 32 | 4 | 32 | 0.000001 | 10,000 | null | false | float32 | 4.46.0.dev0 | true | 29,794 | false | 0 | 16 | false | 1 | null |
dnwns/jw-chrome-dataset-short | null | null | "2024-11-11T01:54:35Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float32 | 4.46.2 | false | 128,256 | false | 0 | 128 | false | 1 | null |
Srijith-rkr/deepseek_SFT_history | null | null | "2024-11-11T02:46:47Z" | null | null | 44 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,000 | 100,015 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 30 | 32 | 0.000001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 102,400 | false | 0 | 128 | false | 1 | null |
0xayman/Qwen-1.5B-fc-v4 | null | null | "2024-11-11T02:47:36Z" | null | null | 41 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
annaaimeri/modelo-personalizado | null | null | "2024-11-11T02:52:50Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"base_model:DeepESP/gpt2-spanish",
"base_model:finetune:DeepESP/gpt2-spanish",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
yejinkim/forget1_expert_epoch1 | null | null | "2024-11-11T02:57:52Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"PhiForCausalLM"
] | null | null | gelu_new | 2,048 | 0.02 | 8,192 | 2,048 | phi | 32 | 24 | 32 | null | 10,000 | null | false | float32 | 4.46.0.dev0 | false | 51,200 | null | 0 | null | null | null | null |
RyanYr/self-reflect_mistralSmallit_mg_dpo_iter1 | null | null | "2024-11-11T03:23:53Z" | null | null | 35 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:mistralai/Mistral-Small-Instruct-2409",
"base_model:finetune:mistralai/Mistral-Small-Instruct-2409",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 6,144 | 0.02 | 16,384 | 32,768 | mistral | 48 | 56 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | false | 32,769 | null | 0 | 128 | null | null | null |
kayfour/T3Q-Qwen2.5-7B-it-ko-safe2 | null | null | "2024-11-11T03:37:01Z" | null | null | 21 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
rawsh/mirrorqwen2.5-0.5b-SimPO-2 | null | null | "2024-11-11T03:47:46Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"trl",
"cpo",
"unsloth",
"arxiv:2401.08417",
"base_model:rawsh/mirrorqwen2.5-0.5b-SimPO-1",
"base_model:finetune:rawsh/mirrorqwen2.5-0.5b-SimPO-1",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.46.2 | false | 151,936 | null | 0 | null | null | null | null |