Vexemous commited on
Commit
0136c38
1 Parent(s): 1d5013b

Upload tokenizer

Browse files
Files changed (2) hide show
  1. README.md +5 -5
  2. tokenizer.json +1 -6
README.md CHANGED
@@ -1,18 +1,19 @@
1
  ---
2
  license: apache-2.0
3
- base_model: t5-small
4
  tags:
5
  - generated_from_trainer
6
  datasets:
7
  - xsum
8
  metrics:
9
  - rouge
 
 
10
  model-index:
11
  - name: t5-small-finetuned-xsum
12
  results:
13
  - task:
14
- name: Sequence-to-sequence Language Modeling
15
  type: text2text-generation
 
16
  dataset:
17
  name: xsum
18
  type: xsum
@@ -20,10 +21,9 @@ model-index:
20
  split: train[:10%]
21
  args: default
22
  metrics:
23
- - name: Rouge1
24
- type: rouge
25
  value: 27.0616
26
- pipeline_tag: summarization
27
  ---
28
 
29
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
 
1
  ---
2
  license: apache-2.0
 
3
  tags:
4
  - generated_from_trainer
5
  datasets:
6
  - xsum
7
  metrics:
8
  - rouge
9
+ base_model: t5-small
10
+ pipeline_tag: summarization
11
  model-index:
12
  - name: t5-small-finetuned-xsum
13
  results:
14
  - task:
 
15
  type: text2text-generation
16
+ name: Sequence-to-sequence Language Modeling
17
  dataset:
18
  name: xsum
19
  type: xsum
 
21
  split: train[:10%]
22
  args: default
23
  metrics:
24
+ - type: rouge
 
25
  value: 27.0616
26
+ name: Rouge1
27
  ---
28
 
29
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 128,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {