Nathan Habib commited on
Commit
91ac9f7
2 Parent(s): 1452f14 a460921

Merge branch 'main' of https://huggingface.co/datasets/open-llm-leaderboard/requests

Browse files
HanNayeoniee/LHK_44_eval_request_False_float16_Original.json CHANGED
@@ -7,7 +7,7 @@
7
  "params": 10.732,
8
  "architectures": "LlamaForCausalLM",
9
  "weight_type": "Original",
10
- "status": "RUNNING",
11
  "submitted_time": "2024-01-29T10:29:49Z",
12
  "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets",
13
  "job_id": "1571183",
 
7
  "params": 10.732,
8
  "architectures": "LlamaForCausalLM",
9
  "weight_type": "Original",
10
+ "status": "FINISHED",
11
  "submitted_time": "2024-01-29T10:29:49Z",
12
  "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets",
13
  "job_id": "1571183",
Qwen/Qwen2-beta-14B_eval_request_False_bfloat16_Original.json CHANGED
@@ -5,13 +5,13 @@
5
  "private": false,
6
  "precision": "bfloat16",
7
  "weight_type": "Original",
8
- "status": "PENDING",
9
  "submitted_time": "2024-01-29T10:39:12Z",
10
  "model_type": "pretrained",
11
  "likes": 0,
12
  "params": 14.167,
13
  "license": "other",
14
  "json_filepath": "",
15
- "job_id": "1571438",
16
- "job_start_time": "2024-01-29T10:44:32.432381"
17
- }
 
5
  "private": false,
6
  "precision": "bfloat16",
7
  "weight_type": "Original",
8
+ "status": "RUNNING",
9
  "submitted_time": "2024-01-29T10:39:12Z",
10
  "model_type": "pretrained",
11
  "likes": 0,
12
  "params": 14.167,
13
  "license": "other",
14
  "json_filepath": "",
15
+ "job_id": "1574110",
16
+ "job_start_time": "2024-01-29T14:23:11.524058"
17
+ }
SC99/Mistral-7B-summ-ia3-tuned-8h_eval_request_False_bfloat16_Adapter.json CHANGED
@@ -7,7 +7,7 @@
7
  "params": 7.0,
8
  "architectures": "?",
9
  "weight_type": "Adapter",
10
- "status": "RUNNING",
11
  "submitted_time": "2024-01-29T08:57:48Z",
12
  "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets",
13
  "job_id": "1570795",
 
7
  "params": 7.0,
8
  "architectures": "?",
9
  "weight_type": "Adapter",
10
+ "status": "FINISHED",
11
  "submitted_time": "2024-01-29T08:57:48Z",
12
  "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets",
13
  "job_id": "1570795",
SC99/Mistral-7B-summ-lora-tuned-8h_eval_request_False_bfloat16_Adapter.json CHANGED
@@ -7,7 +7,7 @@
7
  "params": 7.0,
8
  "architectures": "?",
9
  "weight_type": "Adapter",
10
- "status": "RUNNING",
11
  "submitted_time": "2024-01-29T09:00:46Z",
12
  "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets",
13
  "job_id": "1570811",
 
7
  "params": 7.0,
8
  "architectures": "?",
9
  "weight_type": "Adapter",
10
+ "status": "FINISHED",
11
  "submitted_time": "2024-01-29T09:00:46Z",
12
  "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets",
13
  "job_id": "1570811",
yanolja/Bookworm-10.7B-v0.4-DPO_eval_request_False_float16_Original.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model": "yanolja/Bookworm-10.7B-v0.4-DPO", "base_model": "yanolja/KoSOLAR-10.7B-v0.2", "revision": "main", "private": false, "precision": "float16", "params": 10.805, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-01-29T14:23:35Z", "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null}