Upload folder using huggingface_hub
Browse files- 1_Pooling/config.json +10 -0
- README.md +1106 -3
- config.json +24 -0
- config_sentence_transformers.json +10 -0
- model.safetensors +3 -0
- modules.json +14 -0
- sentence_bert_config.json +4 -0
- special_tokens_map.json +51 -0
- tokenizer.json +0 -0
- tokenizer_config.json +72 -0
- training_args.bin +3 -0
- vocab.txt +0 -0
1_Pooling/config.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"word_embedding_dimension": 768,
|
3 |
+
"pooling_mode_cls_token": true,
|
4 |
+
"pooling_mode_mean_tokens": false,
|
5 |
+
"pooling_mode_max_tokens": false,
|
6 |
+
"pooling_mode_mean_sqrt_len_tokens": false,
|
7 |
+
"pooling_mode_weightedmean_tokens": false,
|
8 |
+
"pooling_mode_lasttoken": false,
|
9 |
+
"include_prompt": true
|
10 |
+
}
|
README.md
CHANGED
@@ -1,3 +1,1106 @@
|
|
1 |
-
---
|
2 |
-
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
language: []
|
3 |
+
library_name: sentence-transformers
|
4 |
+
tags:
|
5 |
+
- sentence-transformers
|
6 |
+
- sentence-similarity
|
7 |
+
- feature-extraction
|
8 |
+
- generated_from_trainer
|
9 |
+
- dataset_size:89218
|
10 |
+
- loss:MultipleNegativesRankingLoss
|
11 |
+
base_model: sentence-transformers/multi-qa-mpnet-base-dot-v1
|
12 |
+
datasets: []
|
13 |
+
metrics:
|
14 |
+
- cosine_accuracy@1
|
15 |
+
- cosine_accuracy@3
|
16 |
+
- cosine_accuracy@5
|
17 |
+
- cosine_accuracy@10
|
18 |
+
- cosine_precision@1
|
19 |
+
- cosine_precision@3
|
20 |
+
- cosine_precision@5
|
21 |
+
- cosine_precision@10
|
22 |
+
- cosine_recall@1
|
23 |
+
- cosine_recall@3
|
24 |
+
- cosine_recall@5
|
25 |
+
- cosine_recall@10
|
26 |
+
- cosine_ndcg@10
|
27 |
+
- cosine_mrr@10
|
28 |
+
- cosine_map@100
|
29 |
+
- dot_accuracy@1
|
30 |
+
- dot_accuracy@3
|
31 |
+
- dot_accuracy@5
|
32 |
+
- dot_accuracy@10
|
33 |
+
- dot_precision@1
|
34 |
+
- dot_precision@3
|
35 |
+
- dot_precision@5
|
36 |
+
- dot_precision@10
|
37 |
+
- dot_recall@1
|
38 |
+
- dot_recall@3
|
39 |
+
- dot_recall@5
|
40 |
+
- dot_recall@10
|
41 |
+
- dot_ndcg@10
|
42 |
+
- dot_mrr@10
|
43 |
+
- dot_map@100
|
44 |
+
widget:
|
45 |
+
- source_sentence: Pulmonary stenoses, brachytelephalangy, inner ear deafness
|
46 |
+
sentences:
|
47 |
+
- "This article needs more medical references for verification or relies too heavily\
|
48 |
+
\ on primary sources. Please review the contents of the article and add the appropriate\
|
49 |
+
\ references if you can. Unsourced or poorly sourced material may be challenged\
|
50 |
+
\ and removed. \nFind sources: \"Chondropathy\" – news · newspapers · books ·\
|
51 |
+
\ scholar · JSTOR (October 2020) \n \nChondropathy \nSpecialtyOrthopedics \
|
52 |
+
\ \n \nChondropathy refers to a disease of the cartilage. It is frequently divided\
|
53 |
+
\ into 5 grades, with 0-2 defined as normal and 3-4 defined as diseased.\n\n##\
|
54 |
+
\ Contents\n\n * 1 Some common diseases affecting/involving the cartilage\n \
|
55 |
+
\ * 2 Repairing articular cartilage damage\n * 3 References\n * 4 External links\n\
|
56 |
+
\n## Some common diseases affecting/involving the cartilage[edit]"
|
57 |
+
- 'A number sign (#) is used with this entry because of evidence that Keutel syndrome
|
58 |
+
(KTLS) is caused by homozygous mutation in the gene encoding the human matrix
|
59 |
+
Gla protein (MGP; 154870) on chromosome 12p12.
|
60 |
+
|
61 |
+
|
62 |
+
Description
|
63 |
+
|
64 |
+
|
65 |
+
Keutel syndrome is an autosomal recessive disorder characterized by multiple peripheral
|
66 |
+
pulmonary stenoses, brachytelephalangy, inner ear deafness, and abnormal cartilage
|
67 |
+
ossification or calcification (summary by Khosroshahi et al., 2014).
|
68 |
+
|
69 |
+
|
70 |
+
Clinical Features'
|
71 |
+
- '## Description
|
72 |
+
|
73 |
+
|
74 |
+
Primary or spontaneous detachment of the retina occurs due to underlying ocular
|
75 |
+
disease and often involves the vitreous as well as the retina. The precipitating
|
76 |
+
event is formation of a retinal tear or hole, which permits fluid to accumulate
|
77 |
+
under the sensory layers of the retina and creates an intraretinal cleavage that
|
78 |
+
destroys the neurosensory process of visual reception. Vitreoretinal degeneration
|
79 |
+
and tear formation are painless phenomena, and in most cases, significant vitreoretinal
|
80 |
+
pathology is found only after detachment of the retina starts to cause loss of
|
81 |
+
vision or visual field. Without surgical intervention, retinal detachment will
|
82 |
+
almost inevitably lead to total blindness (summary by McNiel and McPherson, 1971).
|
83 |
+
|
84 |
+
|
85 |
+
Clinical Features'
|
86 |
+
- source_sentence: APS, catastrophic, diagnostic criteria, treatment options
|
87 |
+
sentences:
|
88 |
+
- 'A number sign (#) is used with this entry because of evidence that myofibrillar
|
89 |
+
myopathy-8 (MFM8) is caused by homozygous or compound heterozygous mutation in
|
90 |
+
the PYROXD1 gene (617220) on chromosome 12p12.
|
91 |
+
|
92 |
+
|
93 |
+
Description
|
94 |
+
|
95 |
+
|
96 |
+
Myofibrillar myopathy-8 is an autosomal recessive myopathy characterized by childhood
|
97 |
+
onset of slowly progressive proximal muscle weakness and atrophy resulting in
|
98 |
+
increased falls, gait problems, and difficulty running or climbing stairs. Upper
|
99 |
+
and lower limbs are affected, and some individuals develop distal muscle weakness
|
100 |
+
and atrophy. Ambulation is generally preserved, and patients do not have significant
|
101 |
+
respiratory compromise. Muscle biopsy shows a mix of myopathic features, including
|
102 |
+
myofibrillar inclusions and sarcomeric disorganization (summary by O''Grady et
|
103 |
+
al., 2016).
|
104 |
+
|
105 |
+
|
106 |
+
For a general phenotypic description and a discussion of genetic heterogeneity
|
107 |
+
of myofibrillar myopathy, see MFM1 (601419).
|
108 |
+
|
109 |
+
|
110 |
+
Clinical Features'
|
111 |
+
- "Rectal tenesmus \nSpecialtyGeneral surgery \n \nRectal tenesmus is a feeling\
|
112 |
+
\ of incomplete defecation. It is the sensation of inability or difficulty to\
|
113 |
+
\ empty the bowel at defecation, even if the bowel contents have already been\
|
114 |
+
\ evacuated. Tenesmus indicates the feeling of a residue, and is not always correlated\
|
115 |
+
\ with the actual presence of residual fecal matter in the rectum. It is frequently\
|
116 |
+
\ painful and may be accompanied by involuntary straining and other gastrointestinal\
|
117 |
+
\ symptoms. Tenesmus has both a nociceptive and a neuropathic component.\n\nVesical\
|
118 |
+
\ tenesmus is a similar condition, experienced as a feeling of incomplete voiding\
|
119 |
+
\ despite the bladder being empty.\n\nOften, rectal tenesmus is simply called\
|
120 |
+
\ tenesmus. The term rectal tenesmus is a retronym to distinguish defecation-related\
|
121 |
+
\ tenesmus from vesical tenesmus.[1]"
|
122 |
+
- "This article needs additional citations for verification. Please help improve\
|
123 |
+
\ this article by adding citations to reliable sources. Unsourced material may\
|
124 |
+
\ be challenged and removed. \nFind sources: \"Catastrophic antiphospholipid\
|
125 |
+
\ syndrome\" – news · newspapers · books · scholar · JSTOR (February 2018) (Learn\
|
126 |
+
\ how and when to remove this template message) \n \nCatastrophic antiphospholipid\
|
127 |
+
\ syndrome \nOther namesCatastrophic APS"
|
128 |
+
- source_sentence: Excess cholesterol, foam cells, gallbladder wall changes
|
129 |
+
sentences:
|
130 |
+
- "Cholesterolosis of gallbladder \nMicrograph of cholesterolosis of the gallbladder,\
|
131 |
+
\ with an annotated foam cell. H&E stain. \nSpecialtyGastroenterology \n \n\
|
132 |
+
In surgical pathology, strawberry gallbladder, more formally cholesterolosis of\
|
133 |
+
\ the gallbladder and gallbladder cholesterolosis, is a change in the gallbladder\
|
134 |
+
\ wall due to excess cholesterol.[1]\n\nThe name strawberry gallbladder comes\
|
135 |
+
\ from the typically stippled appearance of the mucosal surface on gross examination,\
|
136 |
+
\ which resembles a strawberry. Cholesterolosis results from abnormal deposits\
|
137 |
+
\ of cholesterol esters in macrophages within the lamina propria (foam cells)\
|
138 |
+
\ and in mucosal epithelium. The gallbladder may be affected in a patchy localized\
|
139 |
+
\ form or in a diffuse form. The diffuse form macroscopically appears as a bright\
|
140 |
+
\ red mucosa with yellow mottling (due to lipid), hence the term strawberry gallbladder.\
|
141 |
+
\ It is not tied to cholelithiasis (gallstones) or cholecystitis (inflammation\
|
142 |
+
\ of the gallbladder).[2]\n\n## Contents"
|
143 |
+
- Meningococcal meningitis is an acute bacterial disease caused by Neisseria meningitides
|
144 |
+
that presents usually, but not always, with a rash (non blanching petechial or
|
145 |
+
purpuric rash), progressively developing signs of meningitis (fever, vomiting,
|
146 |
+
headache, photophobia, and neck stiffness) and later leading to confusion, delirium
|
147 |
+
and drowsiness. Neck stiffness and photophobia are often absent in infants and
|
148 |
+
young children who may manifest nonspecific signs such as irritability, inconsolable
|
149 |
+
crying, poor feeding, and a bulging fontanel. Meningococcal meningitis may also
|
150 |
+
present as part of early or late onset sepsis in neonates. The disease is potentially
|
151 |
+
fatal. Surviving patients may develop neurological sequelae that include sensorineural
|
152 |
+
hearing loss, seizures, spasticity, attention deficits and intellectual disability.
|
153 |
+
- "Retiform parapsoriasis \nSpecialtyDermatology \n \nRetiform parapsoriasis\
|
154 |
+
\ is a cutaneous condition, considered to be a type of large-plaque parapsoriasis.[1]\
|
155 |
+
\ It is characterized by widespread, ill-defined plaques on the skin, that have\
|
156 |
+
\ a net-like or zebra-striped pattern.[2] Skin atrophy, a wasting away of the\
|
157 |
+
\ cutaneous tissue, usually occurs within the area of these plaques.[1]\n\n##\
|
158 |
+
\ See also[edit]\n\n * Parapsoriasis\n * Poikiloderma vasculare atrophicans\n\
|
159 |
+
\ * List of cutaneous conditions\n\n## References[edit]\n\n 1. ^ a b Lambert\
|
160 |
+
\ WC, Everett MA (Oct 1981). \"The nosology of parapsoriasis\". J. Am. Acad. Dermatol.\
|
161 |
+
\ 5 (4): 373–95. doi:10.1016/S0190-9622(81)70100-2. PMID 7026622.\n 2. ^ Rapini,\
|
162 |
+
\ Ronald P.; Bolognia, Jean L.; Jorizzo, Joseph L. (2007). Dermatology: 2-Volume\
|
163 |
+
\ Set. St. Louis: Mosby. ISBN 1-4160-2999-0.\n\n## External links[edit]\n\nClassification\n\
|
164 |
+
\nD\n\n * ICD-10: L41.5\n * ICD-9-CM: 696.2\n\n \n \n * v\n * t\n * e\n\
|
165 |
+
\nPapulosquamous disorders \n \nPsoriasis\n\nPustular"
|
166 |
+
- source_sentence: Pulmonary hypoplasia, respiratory insufficiency, megaureter, hydronephrosis
|
167 |
+
sentences:
|
168 |
+
- 'A rare fetal lower urinary tract obstruction (LUTO) characterized by closure
|
169 |
+
or failure to develop an opening in the urethra and resulting in obstructive uropathy
|
170 |
+
presenting in utero as megacystis, oligohydramnios or anhydramnios, and potter
|
171 |
+
sequence.
|
172 |
+
|
173 |
+
|
174 |
+
## Epidemiology
|
175 |
+
|
176 |
+
|
177 |
+
Prevalence is unknown, but is higher in males than females.
|
178 |
+
|
179 |
+
|
180 |
+
## Clinical description
|
181 |
+
|
182 |
+
|
183 |
+
Atresia of urethra often presents on routine antenatal ultrasound with megacystis,
|
184 |
+
oligohydramnios or anhydramnios and sometimes urinary ascites. It may cause fetal
|
185 |
+
death. In cases that survive to birth, additional symptoms include respiratory
|
186 |
+
insufficiency due to pulmonary hypoplasia, megaureter, hydronephrosis and enlarged
|
187 |
+
often cystic and functionally impaired/non-functional dysplastic kidneys as well
|
188 |
+
as abdominal distention. Furthermore, a Potter sequence can be found due to oligo-
|
189 |
+
or anhydramnios. Patients may present with patent urachus or vesicocutaneous fistula.
|
190 |
+
|
191 |
+
|
192 |
+
## Etiology'
|
193 |
+
- X-linked distal spinal muscular atrophy type 3 is a rare distal hereditary motor
|
194 |
+
neuropathy characterized by slowly progressive atrophy and weakness of distal
|
195 |
+
muscles of hands and feet with normal deep tendon reflexes or absent ankle reflexes
|
196 |
+
and minimal or no sensory loss, sometimes mild proximal weakness in the legs and
|
197 |
+
feet and hand deformities in males.
|
198 |
+
- 'A number sign (#) is used with this entry because Chudley-McCullough syndrome
|
199 |
+
(CMCS) is caused by homozygous or compound heterozygous mutation in the GPSM2
|
200 |
+
gene (609245) on chromosome 1p13.
|
201 |
+
|
202 |
+
|
203 |
+
Description
|
204 |
+
|
205 |
+
|
206 |
+
Chudley-McCullough syndrome is an autosomal recessive neurologic disorder characterized
|
207 |
+
by early-onset sensorineural deafness and specific brain anomalies on MRI, including
|
208 |
+
hypoplasia of the corpus callosum, enlarged cysterna magna with mild focal cerebellar
|
209 |
+
dysplasia, and nodular heterotopia. Some patients have hydrocephalus. Psychomotor
|
210 |
+
development is normal (summary by Alrashdi et al., 2011).
|
211 |
+
|
212 |
+
|
213 |
+
Clinical Features'
|
214 |
+
- source_sentence: Thyroid-stimulating hormone receptor gene, chromosome 14q31, homozygous
|
215 |
+
mutation
|
216 |
+
sentences:
|
217 |
+
- 'A number sign (#) is used with this entry because dermatofibrosarcoma protuberans
|
218 |
+
is caused in most cases by a specific fusion of the COL1A1 gene (120150) with
|
219 |
+
the PDGFB gene (190040); see 190040.0002.
|
220 |
+
|
221 |
+
|
222 |
+
Description
|
223 |
+
|
224 |
+
|
225 |
+
Dermatofibrosarcoma protuberans (DFSP) is an uncommon, locally aggressive, but
|
226 |
+
rarely metastasizing tumor of the deep dermis and subcutaneous tissue. It typically
|
227 |
+
presents during early or middle adult life and is most frequently located on the
|
228 |
+
trunk and proximal extremities (Sandberg et al., 2003).
|
229 |
+
|
230 |
+
|
231 |
+
Clinical Features
|
232 |
+
|
233 |
+
|
234 |
+
DFSP was first described by Taylor (1890). Sirvent et al. (2003) stated that,
|
235 |
+
because DFSP is relatively rare, grows slowly, and has a low level of aggressiveness,
|
236 |
+
its clinical significance has been underestimated. In particular, they noted that
|
237 |
+
the existence of pediatric cases has been overlooked.
|
238 |
+
|
239 |
+
|
240 |
+
Gardner et al. (1998) described a father and son with dermatofibrosarcoma protuberans.
|
241 |
+
The tumors arose at ages 43 and 14 years, respectively.'
|
242 |
+
- "Visuospatial dysgnosia is a loss of the sense of \"whereness\" in the relation\
|
243 |
+
\ of oneself to one's environment and in the relation of objects to each other.[1]\
|
244 |
+
\ Visuospatial dysgnosia is often linked with topographical disorientation.\n\n\
|
245 |
+
## Contents\n\n * 1 Symptoms\n * 2 Lesion areas\n * 3 Case studies\n * 4 Therapies\n\
|
246 |
+
\ * 5 References\n\n## Symptoms[edit]\n\nThe syndrome rarely presents itself\
|
247 |
+
\ the same way in every patient. Some symptoms that occur may be:"
|
248 |
+
- 'A number sign (#) is used with this entry because of evidence that congenital
|
249 |
+
nongoitrous hypothyroidism-1 (CHNG1) is caused by homozygous or compound heterozygous
|
250 |
+
mutation in the gene encoding the thyroid-stimulating hormone receptor (TSHR;
|
251 |
+
603372) on chromosome 14q31.
|
252 |
+
|
253 |
+
|
254 |
+
Description
|
255 |
+
|
256 |
+
|
257 |
+
Resistance to thyroid-stimulating hormone (TSH; see 188540), a hallmark of congenital
|
258 |
+
nongoitrous hypothyroidism, causes increased levels of plasma TSH and low levels
|
259 |
+
of thyroid hormone. Only a subset of patients develop frank hypothyroidism; the
|
260 |
+
remainder are euthyroid and asymptomatic (so-called compensated hypothyroidism)
|
261 |
+
and are usually detected by neonatal screening programs (Paschke and Ludgate,
|
262 |
+
1997).
|
263 |
+
|
264 |
+
|
265 |
+
### Genetic Heterogeneity of Congenital Nongoitrous Hypothyroidism'
|
266 |
+
pipeline_tag: sentence-similarity
|
267 |
+
model-index:
|
268 |
+
- name: SentenceTransformer based on sentence-transformers/multi-qa-mpnet-base-dot-v1
|
269 |
+
results:
|
270 |
+
- task:
|
271 |
+
type: information-retrieval
|
272 |
+
name: Information Retrieval
|
273 |
+
dataset:
|
274 |
+
name: Unknown
|
275 |
+
type: unknown
|
276 |
+
metrics:
|
277 |
+
- type: cosine_accuracy@1
|
278 |
+
value: 0.1900990099009901
|
279 |
+
name: Cosine Accuracy@1
|
280 |
+
- type: cosine_accuracy@3
|
281 |
+
value: 0.5756875687568757
|
282 |
+
name: Cosine Accuracy@3
|
283 |
+
- type: cosine_accuracy@5
|
284 |
+
value: 0.7932893289328933
|
285 |
+
name: Cosine Accuracy@5
|
286 |
+
- type: cosine_accuracy@10
|
287 |
+
value: 0.8704070407040704
|
288 |
+
name: Cosine Accuracy@10
|
289 |
+
- type: cosine_precision@1
|
290 |
+
value: 0.1900990099009901
|
291 |
+
name: Cosine Precision@1
|
292 |
+
- type: cosine_precision@3
|
293 |
+
value: 0.19189585625229189
|
294 |
+
name: Cosine Precision@3
|
295 |
+
- type: cosine_precision@5
|
296 |
+
value: 0.15865786578657867
|
297 |
+
name: Cosine Precision@5
|
298 |
+
- type: cosine_precision@10
|
299 |
+
value: 0.08704070407040705
|
300 |
+
name: Cosine Precision@10
|
301 |
+
- type: cosine_recall@1
|
302 |
+
value: 0.1900990099009901
|
303 |
+
name: Cosine Recall@1
|
304 |
+
- type: cosine_recall@3
|
305 |
+
value: 0.5756875687568757
|
306 |
+
name: Cosine Recall@3
|
307 |
+
- type: cosine_recall@5
|
308 |
+
value: 0.7932893289328933
|
309 |
+
name: Cosine Recall@5
|
310 |
+
- type: cosine_recall@10
|
311 |
+
value: 0.8704070407040704
|
312 |
+
name: Cosine Recall@10
|
313 |
+
- type: cosine_ndcg@10
|
314 |
+
value: 0.526584144074431
|
315 |
+
name: Cosine Ndcg@10
|
316 |
+
- type: cosine_mrr@10
|
317 |
+
value: 0.41522683220700946
|
318 |
+
name: Cosine Mrr@10
|
319 |
+
- type: cosine_map@100
|
320 |
+
value: 0.4194005014371134
|
321 |
+
name: Cosine Map@100
|
322 |
+
- type: dot_accuracy@1
|
323 |
+
value: 0.188998899889989
|
324 |
+
name: Dot Accuracy@1
|
325 |
+
- type: dot_accuracy@3
|
326 |
+
value: 0.5761826182618262
|
327 |
+
name: Dot Accuracy@3
|
328 |
+
- type: dot_accuracy@5
|
329 |
+
value: 0.7954895489548955
|
330 |
+
name: Dot Accuracy@5
|
331 |
+
- type: dot_accuracy@10
|
332 |
+
value: 0.8710671067106711
|
333 |
+
name: Dot Accuracy@10
|
334 |
+
- type: dot_precision@1
|
335 |
+
value: 0.188998899889989
|
336 |
+
name: Dot Precision@1
|
337 |
+
- type: dot_precision@3
|
338 |
+
value: 0.19206087275394204
|
339 |
+
name: Dot Precision@3
|
340 |
+
- type: dot_precision@5
|
341 |
+
value: 0.15909790979097907
|
342 |
+
name: Dot Precision@5
|
343 |
+
- type: dot_precision@10
|
344 |
+
value: 0.08710671067106711
|
345 |
+
name: Dot Precision@10
|
346 |
+
- type: dot_recall@1
|
347 |
+
value: 0.188998899889989
|
348 |
+
name: Dot Recall@1
|
349 |
+
- type: dot_recall@3
|
350 |
+
value: 0.5761826182618262
|
351 |
+
name: Dot Recall@3
|
352 |
+
- type: dot_recall@5
|
353 |
+
value: 0.7954895489548955
|
354 |
+
name: Dot Recall@5
|
355 |
+
- type: dot_recall@10
|
356 |
+
value: 0.8710671067106711
|
357 |
+
name: Dot Recall@10
|
358 |
+
- type: dot_ndcg@10
|
359 |
+
value: 0.5265923432373186
|
360 |
+
name: Dot Ndcg@10
|
361 |
+
- type: dot_mrr@10
|
362 |
+
value: 0.4149802896956161
|
363 |
+
name: Dot Mrr@10
|
364 |
+
- type: dot_map@100
|
365 |
+
value: 0.41904239679820193
|
366 |
+
name: Dot Map@100
|
367 |
+
---
|
368 |
+
|
369 |
+
# SentenceTransformer based on sentence-transformers/multi-qa-mpnet-base-dot-v1
|
370 |
+
|
371 |
+
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/multi-qa-mpnet-base-dot-v1](https://huggingface.co/sentence-transformers/multi-qa-mpnet-base-dot-v1). It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
|
372 |
+
|
373 |
+
## Model Details
|
374 |
+
|
375 |
+
### Model Description
|
376 |
+
- **Model Type:** Sentence Transformer
|
377 |
+
- **Base model:** [sentence-transformers/multi-qa-mpnet-base-dot-v1](https://huggingface.co/sentence-transformers/multi-qa-mpnet-base-dot-v1) <!-- at revision 3af7c6da5b3e1bea796ef6c97fe237538cbe6e7f -->
|
378 |
+
- **Maximum Sequence Length:** 512 tokens
|
379 |
+
- **Output Dimensionality:** 768 tokens
|
380 |
+
- **Similarity Function:** Dot Product
|
381 |
+
<!-- - **Training Dataset:** Unknown -->
|
382 |
+
<!-- - **Language:** Unknown -->
|
383 |
+
<!-- - **License:** Unknown -->
|
384 |
+
|
385 |
+
### Model Sources
|
386 |
+
|
387 |
+
- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
|
388 |
+
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
|
389 |
+
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
|
390 |
+
|
391 |
+
### Full Model Architecture
|
392 |
+
|
393 |
+
```
|
394 |
+
SentenceTransformer(
|
395 |
+
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: MPNetModel
|
396 |
+
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
|
397 |
+
)
|
398 |
+
```
|
399 |
+
|
400 |
+
## Usage
|
401 |
+
|
402 |
+
### Direct Usage (Sentence Transformers)
|
403 |
+
|
404 |
+
First install the Sentence Transformers library:
|
405 |
+
|
406 |
+
```bash
|
407 |
+
pip install -U sentence-transformers
|
408 |
+
```
|
409 |
+
|
410 |
+
Then you can load this model and run inference.
|
411 |
+
```python
|
412 |
+
from sentence_transformers import SentenceTransformer
|
413 |
+
|
414 |
+
# Download from the 🤗 Hub
|
415 |
+
model = SentenceTransformer("sentence_transformers_model_id")
|
416 |
+
# Run inference
|
417 |
+
sentences = [
|
418 |
+
'Thyroid-stimulating hormone receptor gene, chromosome 14q31, homozygous mutation',
|
419 |
+
'A number sign (#) is used with this entry because of evidence that congenital nongoitrous hypothyroidism-1 (CHNG1) is caused by homozygous or compound heterozygous mutation in the gene encoding the thyroid-stimulating hormone receptor (TSHR; 603372) on chromosome 14q31.\n\nDescription\n\nResistance to thyroid-stimulating hormone (TSH; see 188540), a hallmark of congenital nongoitrous hypothyroidism, causes increased levels of plasma TSH and low levels of thyroid hormone. Only a subset of patients develop frank hypothyroidism; the remainder are euthyroid and asymptomatic (so-called compensated hypothyroidism) and are usually detected by neonatal screening programs (Paschke and Ludgate, 1997).\n\n### Genetic Heterogeneity of Congenital Nongoitrous Hypothyroidism',
|
420 |
+
'Visuospatial dysgnosia is a loss of the sense of "whereness" in the relation of oneself to one\'s environment and in the relation of objects to each other.[1] Visuospatial dysgnosia is often linked with topographical disorientation.\n\n## Contents\n\n * 1 Symptoms\n * 2 Lesion areas\n * 3 Case studies\n * 4 Therapies\n * 5 References\n\n## Symptoms[edit]\n\nThe syndrome rarely presents itself the same way in every patient. Some symptoms that occur may be:',
|
421 |
+
]
|
422 |
+
embeddings = model.encode(sentences)
|
423 |
+
print(embeddings.shape)
|
424 |
+
# [3, 768]
|
425 |
+
|
426 |
+
# Get the similarity scores for the embeddings
|
427 |
+
similarities = model.similarity(embeddings, embeddings)
|
428 |
+
print(similarities.shape)
|
429 |
+
# [3, 3]
|
430 |
+
```
|
431 |
+
|
432 |
+
<!--
|
433 |
+
### Direct Usage (Transformers)
|
434 |
+
|
435 |
+
<details><summary>Click to see the direct usage in Transformers</summary>
|
436 |
+
|
437 |
+
</details>
|
438 |
+
-->
|
439 |
+
|
440 |
+
<!--
|
441 |
+
### Downstream Usage (Sentence Transformers)
|
442 |
+
|
443 |
+
You can finetune this model on your own dataset.
|
444 |
+
|
445 |
+
<details><summary>Click to expand</summary>
|
446 |
+
|
447 |
+
</details>
|
448 |
+
-->
|
449 |
+
|
450 |
+
<!--
|
451 |
+
### Out-of-Scope Use
|
452 |
+
|
453 |
+
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
454 |
+
-->
|
455 |
+
|
456 |
+
## Evaluation
|
457 |
+
|
458 |
+
### Metrics
|
459 |
+
|
460 |
+
#### Information Retrieval
|
461 |
+
|
462 |
+
* Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)
|
463 |
+
|
464 |
+
| Metric | Value |
|
465 |
+
|:--------------------|:----------|
|
466 |
+
| cosine_accuracy@1 | 0.1901 |
|
467 |
+
| cosine_accuracy@3 | 0.5757 |
|
468 |
+
| cosine_accuracy@5 | 0.7933 |
|
469 |
+
| cosine_accuracy@10 | 0.8704 |
|
470 |
+
| cosine_precision@1 | 0.1901 |
|
471 |
+
| cosine_precision@3 | 0.1919 |
|
472 |
+
| cosine_precision@5 | 0.1587 |
|
473 |
+
| cosine_precision@10 | 0.087 |
|
474 |
+
| cosine_recall@1 | 0.1901 |
|
475 |
+
| cosine_recall@3 | 0.5757 |
|
476 |
+
| cosine_recall@5 | 0.7933 |
|
477 |
+
| cosine_recall@10 | 0.8704 |
|
478 |
+
| cosine_ndcg@10 | 0.5266 |
|
479 |
+
| cosine_mrr@10 | 0.4152 |
|
480 |
+
| cosine_map@100 | 0.4194 |
|
481 |
+
| dot_accuracy@1 | 0.189 |
|
482 |
+
| dot_accuracy@3 | 0.5762 |
|
483 |
+
| dot_accuracy@5 | 0.7955 |
|
484 |
+
| dot_accuracy@10 | 0.8711 |
|
485 |
+
| dot_precision@1 | 0.189 |
|
486 |
+
| dot_precision@3 | 0.1921 |
|
487 |
+
| dot_precision@5 | 0.1591 |
|
488 |
+
| dot_precision@10 | 0.0871 |
|
489 |
+
| dot_recall@1 | 0.189 |
|
490 |
+
| dot_recall@3 | 0.5762 |
|
491 |
+
| dot_recall@5 | 0.7955 |
|
492 |
+
| dot_recall@10 | 0.8711 |
|
493 |
+
| dot_ndcg@10 | 0.5266 |
|
494 |
+
| dot_mrr@10 | 0.415 |
|
495 |
+
| **dot_map@100** | **0.419** |
|
496 |
+
|
497 |
+
<!--
|
498 |
+
## Bias, Risks and Limitations
|
499 |
+
|
500 |
+
*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
|
501 |
+
-->
|
502 |
+
|
503 |
+
<!--
|
504 |
+
### Recommendations
|
505 |
+
|
506 |
+
*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
|
507 |
+
-->
|
508 |
+
|
509 |
+
## Training Details
|
510 |
+
|
511 |
+
### Training Dataset
|
512 |
+
|
513 |
+
#### Unnamed Dataset
|
514 |
+
|
515 |
+
|
516 |
+
* Size: 89,218 training samples
|
517 |
+
* Columns: <code>queries</code> and <code>chunks</code>
|
518 |
+
* Approximate statistics based on the first 1000 samples:
|
519 |
+
| | queries | chunks |
|
520 |
+
|:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
521 |
+
| type | string | string |
|
522 |
+
| details | <ul><li>min: 7 tokens</li><li>mean: 18.07 tokens</li><li>max: 63 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 161.59 tokens</li><li>max: 299 tokens</li></ul> |
|
523 |
+
* Samples:
|
524 |
+
| queries | chunks |
|
525 |
+
|:--------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
526 |
+
| <code>Polyhydramnios, megalencephaly, symptomatic epilepsy</code> | <code>A number sign (#) is used with this entry because of evidence that polyhydramnios, megalencephaly, and symptomatic epilepsy (PMSE) is caused by homozygous mutation in the STRADA gene (608626) on chromosome 17q23.<br><br>Clinical Features</code> |
|
527 |
+
| <code>Polyhydramnios, megalencephaly, STRADA gene mutation</code> | <code>A number sign (#) is used with this entry because of evidence that polyhydramnios, megalencephaly, and symptomatic epilepsy (PMSE) is caused by homozygous mutation in the STRADA gene (608626) on chromosome 17q23.<br><br>Clinical Features</code> |
|
528 |
+
| <code>Megalencephaly, symptomatic epilepsy, chromosome 17q23</code> | <code>A number sign (#) is used with this entry because of evidence that polyhydramnios, megalencephaly, and symptomatic epilepsy (PMSE) is caused by homozygous mutation in the STRADA gene (608626) on chromosome 17q23.<br><br>Clinical Features</code> |
|
529 |
+
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
530 |
+
```json
|
531 |
+
{
|
532 |
+
"scale": 1,
|
533 |
+
"similarity_fct": "dot_score"
|
534 |
+
}
|
535 |
+
```
|
536 |
+
|
537 |
+
### Evaluation Dataset
|
538 |
+
|
539 |
+
#### Unnamed Dataset
|
540 |
+
|
541 |
+
|
542 |
+
* Size: 18,180 evaluation samples
|
543 |
+
* Columns: <code>queries</code> and <code>chunks</code>
|
544 |
+
* Approximate statistics based on the first 1000 samples:
|
545 |
+
| | queries | chunks |
|
546 |
+
|:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
547 |
+
| type | string | string |
|
548 |
+
| details | <ul><li>min: 6 tokens</li><li>mean: 18.35 tokens</li><li>max: 82 tokens</li></ul> | <ul><li>min: 4 tokens</li><li>mean: 152.55 tokens</li><li>max: 312 tokens</li></ul> |
|
549 |
+
* Samples:
|
550 |
+
| queries | chunks |
|
551 |
+
|:-----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
552 |
+
| <code>Weight loss, anorexia, fatigue, epigastric pain and discomfort</code> | <code>Undifferentiated carcinoma of stomach is a rare epithelial tumour of the stomach that lacks any features of differentiation beyond an epithelial phenotype. The presenting symptoms are usually vague and nonspecific, such as weight loss, anorexia, fatigue, epigastric pain and discomfort, heartburn and nausea, vomiting or hematemesis. Patients may also be asymptomatic. Ascites, jaundice, intestinal obstruction and peripheral lymphadenopathy indicate advanced stages and metastatic spread.</code> |
|
553 |
+
| <code>Heartburn, nausea, vomiting, hematemesis</code> | <code>Undifferentiated carcinoma of stomach is a rare epithelial tumour of the stomach that lacks any features of differentiation beyond an epithelial phenotype. The presenting symptoms are usually vague and nonspecific, such as weight loss, anorexia, fatigue, epigastric pain and discomfort, heartburn and nausea, vomiting or hematemesis. Patients may also be asymptomatic. Ascites, jaundice, intestinal obstruction and peripheral lymphadenopathy indicate advanced stages and metastatic spread.</code> |
|
554 |
+
| <code>Ascites, jaundice, intestinal obstruction, peripheral lymphadenopathy</code> | <code>Undifferentiated carcinoma of stomach is a rare epithelial tumour of the stomach that lacks any features of differentiation beyond an epithelial phenotype. The presenting symptoms are usually vague and nonspecific, such as weight loss, anorexia, fatigue, epigastric pain and discomfort, heartburn and nausea, vomiting or hematemesis. Patients may also be asymptomatic. Ascites, jaundice, intestinal obstruction and peripheral lymphadenopathy indicate advanced stages and metastatic spread.</code> |
|
555 |
+
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
556 |
+
```json
|
557 |
+
{
|
558 |
+
"scale": 1,
|
559 |
+
"similarity_fct": "dot_score"
|
560 |
+
}
|
561 |
+
```
|
562 |
+
|
563 |
+
### Training Hyperparameters
|
564 |
+
#### Non-Default Hyperparameters
|
565 |
+
|
566 |
+
- `eval_strategy`: steps
|
567 |
+
- `per_device_train_batch_size`: 32
|
568 |
+
- `per_device_eval_batch_size`: 32
|
569 |
+
- `learning_rate`: 2e-05
|
570 |
+
- `num_train_epochs`: 50
|
571 |
+
- `warmup_ratio`: 0.1
|
572 |
+
- `fp16`: True
|
573 |
+
- `load_best_model_at_end`: True
|
574 |
+
- `eval_on_start`: True
|
575 |
+
- `batch_sampler`: no_duplicates
|
576 |
+
|
577 |
+
#### All Hyperparameters
|
578 |
+
<details><summary>Click to expand</summary>
|
579 |
+
|
580 |
+
- `overwrite_output_dir`: False
|
581 |
+
- `do_predict`: False
|
582 |
+
- `eval_strategy`: steps
|
583 |
+
- `prediction_loss_only`: True
|
584 |
+
- `per_device_train_batch_size`: 32
|
585 |
+
- `per_device_eval_batch_size`: 32
|
586 |
+
- `per_gpu_train_batch_size`: None
|
587 |
+
- `per_gpu_eval_batch_size`: None
|
588 |
+
- `gradient_accumulation_steps`: 1
|
589 |
+
- `eval_accumulation_steps`: None
|
590 |
+
- `torch_empty_cache_steps`: None
|
591 |
+
- `learning_rate`: 2e-05
|
592 |
+
- `weight_decay`: 0.0
|
593 |
+
- `adam_beta1`: 0.9
|
594 |
+
- `adam_beta2`: 0.999
|
595 |
+
- `adam_epsilon`: 1e-08
|
596 |
+
- `max_grad_norm`: 1.0
|
597 |
+
- `num_train_epochs`: 50
|
598 |
+
- `max_steps`: -1
|
599 |
+
- `lr_scheduler_type`: linear
|
600 |
+
- `lr_scheduler_kwargs`: {}
|
601 |
+
- `warmup_ratio`: 0.1
|
602 |
+
- `warmup_steps`: 0
|
603 |
+
- `log_level`: passive
|
604 |
+
- `log_level_replica`: warning
|
605 |
+
- `log_on_each_node`: True
|
606 |
+
- `logging_nan_inf_filter`: True
|
607 |
+
- `save_safetensors`: True
|
608 |
+
- `save_on_each_node`: False
|
609 |
+
- `save_only_model`: False
|
610 |
+
- `restore_callback_states_from_checkpoint`: False
|
611 |
+
- `no_cuda`: False
|
612 |
+
- `use_cpu`: False
|
613 |
+
- `use_mps_device`: False
|
614 |
+
- `seed`: 42
|
615 |
+
- `data_seed`: None
|
616 |
+
- `jit_mode_eval`: False
|
617 |
+
- `use_ipex`: False
|
618 |
+
- `bf16`: False
|
619 |
+
- `fp16`: True
|
620 |
+
- `fp16_opt_level`: O1
|
621 |
+
- `half_precision_backend`: auto
|
622 |
+
- `bf16_full_eval`: False
|
623 |
+
- `fp16_full_eval`: False
|
624 |
+
- `tf32`: None
|
625 |
+
- `local_rank`: 0
|
626 |
+
- `ddp_backend`: None
|
627 |
+
- `tpu_num_cores`: None
|
628 |
+
- `tpu_metrics_debug`: False
|
629 |
+
- `debug`: []
|
630 |
+
- `dataloader_drop_last`: True
|
631 |
+
- `dataloader_num_workers`: 0
|
632 |
+
- `dataloader_prefetch_factor`: None
|
633 |
+
- `past_index`: -1
|
634 |
+
- `disable_tqdm`: False
|
635 |
+
- `remove_unused_columns`: True
|
636 |
+
- `label_names`: None
|
637 |
+
- `load_best_model_at_end`: True
|
638 |
+
- `ignore_data_skip`: False
|
639 |
+
- `fsdp`: []
|
640 |
+
- `fsdp_min_num_params`: 0
|
641 |
+
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
|
642 |
+
- `fsdp_transformer_layer_cls_to_wrap`: None
|
643 |
+
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
|
644 |
+
- `deepspeed`: None
|
645 |
+
- `label_smoothing_factor`: 0.0
|
646 |
+
- `optim`: adamw_torch
|
647 |
+
- `optim_args`: None
|
648 |
+
- `adafactor`: False
|
649 |
+
- `group_by_length`: False
|
650 |
+
- `length_column_name`: length
|
651 |
+
- `ddp_find_unused_parameters`: None
|
652 |
+
- `ddp_bucket_cap_mb`: None
|
653 |
+
- `ddp_broadcast_buffers`: False
|
654 |
+
- `dataloader_pin_memory`: True
|
655 |
+
- `dataloader_persistent_workers`: False
|
656 |
+
- `skip_memory_metrics`: True
|
657 |
+
- `use_legacy_prediction_loop`: False
|
658 |
+
- `push_to_hub`: False
|
659 |
+
- `resume_from_checkpoint`: None
|
660 |
+
- `hub_model_id`: None
|
661 |
+
- `hub_strategy`: every_save
|
662 |
+
- `hub_private_repo`: False
|
663 |
+
- `hub_always_push`: False
|
664 |
+
- `gradient_checkpointing`: False
|
665 |
+
- `gradient_checkpointing_kwargs`: None
|
666 |
+
- `include_inputs_for_metrics`: False
|
667 |
+
- `eval_do_concat_batches`: True
|
668 |
+
- `fp16_backend`: auto
|
669 |
+
- `push_to_hub_model_id`: None
|
670 |
+
- `push_to_hub_organization`: None
|
671 |
+
- `mp_parameters`:
|
672 |
+
- `auto_find_batch_size`: False
|
673 |
+
- `full_determinism`: False
|
674 |
+
- `torchdynamo`: None
|
675 |
+
- `ray_scope`: last
|
676 |
+
- `ddp_timeout`: 1800
|
677 |
+
- `torch_compile`: False
|
678 |
+
- `torch_compile_backend`: None
|
679 |
+
- `torch_compile_mode`: None
|
680 |
+
- `dispatch_batches`: None
|
681 |
+
- `split_batches`: None
|
682 |
+
- `include_tokens_per_second`: False
|
683 |
+
- `include_num_input_tokens_seen`: False
|
684 |
+
- `neftune_noise_alpha`: None
|
685 |
+
- `optim_target_modules`: None
|
686 |
+
- `batch_eval_metrics`: False
|
687 |
+
- `eval_on_start`: True
|
688 |
+
- `eval_use_gather_object`: False
|
689 |
+
- `batch_sampler`: no_duplicates
|
690 |
+
- `multi_dataset_batch_sampler`: proportional
|
691 |
+
|
692 |
+
</details>
|
693 |
+
|
694 |
+
### Training Logs
|
695 |
+
<details><summary>Click to expand</summary>
|
696 |
+
|
697 |
+
| Epoch | Step | Training Loss | loss | dot_map@100 |
|
698 |
+
|:-----------:|:--------:|:-------------:|:----------:|:-----------:|
|
699 |
+
| 0 | 0 | - | 1.1605 | 0.2419 |
|
700 |
+
| 0.1435 | 100 | 1.2016 | - | - |
|
701 |
+
| 0.2869 | 200 | 0.7627 | - | - |
|
702 |
+
| 0.4304 | 300 | 0.5559 | - | - |
|
703 |
+
| 0.5739 | 400 | 0.4541 | - | - |
|
704 |
+
| 0.7174 | 500 | 0.1451 | 0.3600 | 0.3913 |
|
705 |
+
| 0.8608 | 600 | 0.3841 | - | - |
|
706 |
+
| 1.0057 | 700 | 0.3334 | - | - |
|
707 |
+
| 1.1492 | 800 | 0.3898 | - | - |
|
708 |
+
| 1.2927 | 900 | 0.3576 | - | - |
|
709 |
+
| 1.4362 | 1000 | 0.3563 | 0.2719 | 0.4127 |
|
710 |
+
| 1.5796 | 1100 | 0.3186 | - | - |
|
711 |
+
| 1.7231 | 1200 | 0.098 | - | - |
|
712 |
+
| 1.8666 | 1300 | 0.3038 | - | - |
|
713 |
+
| 2.0115 | 1400 | 0.2629 | - | - |
|
714 |
+
| 2.1549 | 1500 | 0.3221 | 0.2579 | 0.4155 |
|
715 |
+
| 2.2984 | 1600 | 0.2936 | - | - |
|
716 |
+
| 2.4419 | 1700 | 0.2867 | - | - |
|
717 |
+
| 2.5854 | 1800 | 0.2614 | - | - |
|
718 |
+
| 2.7288 | 1900 | 0.0716 | - | - |
|
719 |
+
| 2.8723 | 2000 | 0.2655 | 0.2546 | 0.4152 |
|
720 |
+
| 3.0172 | 2100 | 0.2187 | - | - |
|
721 |
+
| 3.1607 | 2200 | 0.2623 | - | - |
|
722 |
+
| 3.3042 | 2300 | 0.2462 | - | - |
|
723 |
+
| 3.4476 | 2400 | 0.2363 | - | - |
|
724 |
+
| 3.5911 | 2500 | 0.213 | 0.2866 | 0.4227 |
|
725 |
+
| 3.7346 | 2600 | 0.0487 | - | - |
|
726 |
+
| 3.8780 | 2700 | 0.222 | - | - |
|
727 |
+
| 4.0230 | 2800 | 0.1851 | - | - |
|
728 |
+
| 4.1664 | 2900 | 0.224 | - | - |
|
729 |
+
| 4.3099 | 3000 | 0.2111 | 0.2562 | 0.4215 |
|
730 |
+
| 4.4534 | 3100 | 0.1984 | - | - |
|
731 |
+
| 4.5968 | 3200 | 0.1707 | - | - |
|
732 |
+
| 4.7403 | 3300 | 0.0331 | - | - |
|
733 |
+
| 4.8838 | 3400 | 0.1896 | - | - |
|
734 |
+
| 5.0287 | 3500 | 0.1548 | 0.2643 | 0.4151 |
|
735 |
+
| 5.1722 | 3600 | 0.19 | - | - |
|
736 |
+
| 5.3156 | 3700 | 0.1656 | - | - |
|
737 |
+
| 5.4591 | 3800 | 0.1626 | - | - |
|
738 |
+
| 5.6026 | 3900 | 0.1303 | - | - |
|
739 |
+
| 5.7461 | 4000 | 0.0264 | 0.2952 | 0.4186 |
|
740 |
+
| 5.8895 | 4100 | 0.1563 | - | - |
|
741 |
+
| 6.0344 | 4200 | 0.1286 | - | - |
|
742 |
+
| 6.1779 | 4300 | 0.1436 | - | - |
|
743 |
+
| 6.3214 | 4400 | 0.1352 | - | - |
|
744 |
+
| 6.4648 | 4500 | 0.1344 | 0.2668 | 0.4218 |
|
745 |
+
| 6.6083 | 4600 | 0.1069 | - | - |
|
746 |
+
| 6.7518 | 4700 | 0.0171 | - | - |
|
747 |
+
| 6.8953 | 4800 | 0.1246 | - | - |
|
748 |
+
| 7.0402 | 4900 | 0.1074 | - | - |
|
749 |
+
| 7.1836 | 5000 | 0.1192 | 0.2837 | 0.4166 |
|
750 |
+
| 7.3271 | 5100 | 0.1176 | - | - |
|
751 |
+
| 7.4706 | 5200 | 0.111 | - | - |
|
752 |
+
| 7.6141 | 5300 | 0.0889 | - | - |
|
753 |
+
| 7.7575 | 5400 | 0.0202 | - | - |
|
754 |
+
| 7.9010 | 5500 | 0.1059 | 0.2797 | 0.4166 |
|
755 |
+
| 8.0459 | 5600 | 0.0854 | - | - |
|
756 |
+
| 8.1894 | 5700 | 0.0989 | - | - |
|
757 |
+
| 8.3329 | 5800 | 0.0963 | - | - |
|
758 |
+
| 8.4763 | 5900 | 0.0967 | - | - |
|
759 |
+
| 8.6198 | 6000 | 0.0635 | 0.2974 | 0.4223 |
|
760 |
+
| 8.7633 | 6100 | 0.0215 | - | - |
|
761 |
+
| 8.9067 | 6200 | 0.0897 | - | - |
|
762 |
+
| 9.0516 | 6300 | 0.0693 | - | - |
|
763 |
+
| 9.1951 | 6400 | 0.0913 | - | - |
|
764 |
+
| 9.3386 | 6500 | 0.0883 | 0.2812 | 0.4171 |
|
765 |
+
| 9.4821 | 6600 | 0.0849 | - | - |
|
766 |
+
| 9.6255 | 6700 | 0.0525 | - | - |
|
767 |
+
| 9.7690 | 6800 | 0.0196 | - | - |
|
768 |
+
| 9.9125 | 6900 | 0.0799 | - | - |
|
769 |
+
| 10.0574 | 7000 | 0.0603 | 0.2899 | 0.4132 |
|
770 |
+
| 10.2009 | 7100 | 0.0816 | - | - |
|
771 |
+
| 10.3443 | 7200 | 0.0771 | - | - |
|
772 |
+
| 10.4878 | 7300 | 0.0746 | - | - |
|
773 |
+
| 10.6313 | 7400 | 0.0373 | - | - |
|
774 |
+
| **10.7747** | **7500** | **0.0181** | **0.3148** | **0.419** |
|
775 |
+
| 10.9182 | 7600 | 0.0702 | - | - |
|
776 |
+
| 11.0631 | 7700 | 0.0531 | - | - |
|
777 |
+
| 11.2066 | 7800 | 0.0671 | - | - |
|
778 |
+
| 11.3501 | 7900 | 0.0742 | - | - |
|
779 |
+
| 11.4935 | 8000 | 0.0728 | 0.2878 | 0.4177 |
|
780 |
+
| 11.6370 | 8100 | 0.0331 | - | - |
|
781 |
+
| 11.7805 | 8200 | 0.0206 | - | - |
|
782 |
+
| 11.9240 | 8300 | 0.0605 | - | - |
|
783 |
+
| 12.0689 | 8400 | 0.05 | - | - |
|
784 |
+
| 12.2123 | 8500 | 0.06 | 0.3169 | 0.4180 |
|
785 |
+
| 12.3558 | 8600 | 0.0613 | - | - |
|
786 |
+
| 12.4993 | 8700 | 0.0649 | - | - |
|
787 |
+
| 12.6428 | 8800 | 0.0257 | - | - |
|
788 |
+
| 12.7862 | 8900 | 0.0184 | - | - |
|
789 |
+
| 12.9297 | 9000 | 0.055 | 0.3107 | 0.4189 |
|
790 |
+
| 13.0746 | 9100 | 0.0417 | - | - |
|
791 |
+
| 13.2181 | 9200 | 0.0537 | - | - |
|
792 |
+
| 13.3615 | 9300 | 0.0558 | - | - |
|
793 |
+
| 13.5050 | 9400 | 0.0619 | - | - |
|
794 |
+
| 13.6485 | 9500 | 0.0217 | 0.3140 | 0.4173 |
|
795 |
+
| 13.7920 | 9600 | 0.0257 | - | - |
|
796 |
+
| 13.9354 | 9700 | 0.0398 | - | - |
|
797 |
+
| 14.0803 | 9800 | 0.041 | - | - |
|
798 |
+
| 14.2238 | 9900 | 0.0451 | - | - |
|
799 |
+
| 14.3673 | 10000 | 0.0485 | 0.3085 | 0.4188 |
|
800 |
+
| 14.5108 | 10100 | 0.0565 | - | - |
|
801 |
+
| 14.6542 | 10200 | 0.0159 | - | - |
|
802 |
+
| 14.7977 | 10300 | 0.0258 | - | - |
|
803 |
+
| 14.9412 | 10400 | 0.0364 | - | - |
|
804 |
+
| 15.0861 | 10500 | 0.0368 | 0.3144 | 0.4163 |
|
805 |
+
| 15.2296 | 10600 | 0.0447 | - | - |
|
806 |
+
| 15.3730 | 10700 | 0.0479 | - | - |
|
807 |
+
| 15.5165 | 10800 | 0.0535 | - | - |
|
808 |
+
| 15.6600 | 10900 | 0.0139 | - | - |
|
809 |
+
| 15.8034 | 11000 | 0.0257 | 0.3149 | 0.4151 |
|
810 |
+
| 15.9469 | 11100 | 0.0324 | - | - |
|
811 |
+
| 16.0918 | 11200 | 0.0374 | - | - |
|
812 |
+
| 16.2353 | 11300 | 0.0339 | - | - |
|
813 |
+
| 16.3788 | 11400 | 0.0423 | - | - |
|
814 |
+
| 16.5222 | 11500 | 0.0512 | 0.3209 | 0.4164 |
|
815 |
+
| 16.6657 | 11600 | 0.0121 | - | - |
|
816 |
+
| 16.8092 | 11700 | 0.0245 | - | - |
|
817 |
+
| 16.9527 | 11800 | 0.0323 | - | - |
|
818 |
+
| 17.0976 | 11900 | 0.0321 | - | - |
|
819 |
+
| 17.2410 | 12000 | 0.034 | 0.3211 | 0.4140 |
|
820 |
+
| 17.3845 | 12100 | 0.0387 | - | - |
|
821 |
+
| 17.5280 | 12200 | 0.0482 | - | - |
|
822 |
+
| 17.6714 | 12300 | 0.0096 | - | - |
|
823 |
+
| 17.8149 | 12400 | 0.0252 | - | - |
|
824 |
+
| 17.9584 | 12500 | 0.0299 | 0.3169 | 0.4170 |
|
825 |
+
| 18.1033 | 12600 | 0.0351 | - | - |
|
826 |
+
| 18.2468 | 12700 | 0.032 | - | - |
|
827 |
+
| 18.3902 | 12800 | 0.0348 | - | - |
|
828 |
+
| 18.5337 | 12900 | 0.0452 | - | - |
|
829 |
+
| 18.6772 | 13000 | 0.0076 | 0.3273 | 0.4158 |
|
830 |
+
| 18.8207 | 13100 | 0.0241 | - | - |
|
831 |
+
| 18.9641 | 13200 | 0.0277 | - | - |
|
832 |
+
| 19.1090 | 13300 | 0.0331 | - | - |
|
833 |
+
| 19.2525 | 13400 | 0.0264 | - | - |
|
834 |
+
| 19.3960 | 13500 | 0.0311 | 0.3272 | 0.4151 |
|
835 |
+
| 19.5395 | 13600 | 0.0437 | - | - |
|
836 |
+
| 19.6829 | 13700 | 0.0049 | - | - |
|
837 |
+
| 19.8264 | 13800 | 0.0263 | - | - |
|
838 |
+
| 19.9699 | 13900 | 0.0231 | - | - |
|
839 |
+
| 20.1148 | 14000 | 0.0303 | 0.3293 | 0.4200 |
|
840 |
+
| 20.2582 | 14100 | 0.0229 | - | - |
|
841 |
+
| 20.4017 | 14200 | 0.032 | - | - |
|
842 |
+
| 20.5452 | 14300 | 0.0395 | - | - |
|
843 |
+
| 20.6887 | 14400 | 0.0045 | - | - |
|
844 |
+
| 20.8321 | 14500 | 0.0244 | 0.3202 | 0.4144 |
|
845 |
+
| 20.9756 | 14600 | 0.0219 | - | - |
|
846 |
+
| 21.1205 | 14700 | 0.0291 | - | - |
|
847 |
+
| 21.2640 | 14800 | 0.0212 | - | - |
|
848 |
+
| 21.4075 | 14900 | 0.029 | - | - |
|
849 |
+
| 21.5509 | 15000 | 0.0357 | 0.3312 | 0.4147 |
|
850 |
+
| 21.6944 | 15100 | 0.0025 | - | - |
|
851 |
+
| 21.8379 | 15200 | 0.0252 | - | - |
|
852 |
+
| 21.9813 | 15300 | 0.0229 | - | - |
|
853 |
+
| 22.1263 | 15400 | 0.0261 | - | - |
|
854 |
+
| 22.2697 | 15500 | 0.0198 | 0.3392 | 0.4123 |
|
855 |
+
| 22.4132 | 15600 | 0.0259 | - | - |
|
856 |
+
| 22.5567 | 15700 | 0.0343 | - | - |
|
857 |
+
| 22.7001 | 15800 | 0.0022 | - | - |
|
858 |
+
| 22.8436 | 15900 | 0.0237 | - | - |
|
859 |
+
| 22.9871 | 16000 | 0.0199 | 0.3346 | 0.4146 |
|
860 |
+
| 23.1320 | 16100 | 0.0263 | - | - |
|
861 |
+
| 23.2755 | 16200 | 0.0173 | - | - |
|
862 |
+
| 23.4189 | 16300 | 0.0276 | - | - |
|
863 |
+
| 23.5624 | 16400 | 0.03 | - | - |
|
864 |
+
| 23.7059 | 16500 | 0.0022 | 0.3430 | 0.4195 |
|
865 |
+
| 23.8494 | 16600 | 0.0253 | - | - |
|
866 |
+
| 23.9928 | 16700 | 0.0182 | - | - |
|
867 |
+
| 24.1377 | 16800 | 0.0216 | - | - |
|
868 |
+
| 24.2812 | 16900 | 0.0194 | - | - |
|
869 |
+
| 24.4247 | 17000 | 0.0242 | 0.3335 | 0.4132 |
|
870 |
+
| 24.5681 | 17100 | 0.0289 | - | - |
|
871 |
+
| 24.7116 | 17200 | 0.0013 | - | - |
|
872 |
+
| 24.8551 | 17300 | 0.0253 | - | - |
|
873 |
+
| 24.9986 | 17400 | 0.0137 | - | - |
|
874 |
+
| 25.1435 | 17500 | 0.0219 | 0.3481 | 0.4118 |
|
875 |
+
| 25.2869 | 17600 | 0.017 | - | - |
|
876 |
+
| 25.4304 | 17700 | 0.0261 | - | - |
|
877 |
+
| 25.5739 | 17800 | 0.0298 | - | - |
|
878 |
+
| 25.7174 | 17900 | 0.0013 | - | - |
|
879 |
+
| 25.8608 | 18000 | 0.0257 | 0.3407 | 0.4160 |
|
880 |
+
| 26.0057 | 18100 | 0.014 | - | - |
|
881 |
+
| 26.1492 | 18200 | 0.0215 | - | - |
|
882 |
+
| 26.2927 | 18300 | 0.0161 | - | - |
|
883 |
+
| 26.4362 | 18400 | 0.0228 | - | - |
|
884 |
+
| 26.5796 | 18500 | 0.0246 | 0.3404 | 0.4131 |
|
885 |
+
| 26.7231 | 18600 | 0.0017 | - | - |
|
886 |
+
| 26.8666 | 18700 | 0.0244 | - | - |
|
887 |
+
| 27.0115 | 18800 | 0.0124 | - | - |
|
888 |
+
| 27.1549 | 18900 | 0.019 | - | - |
|
889 |
+
| 27.2984 | 19000 | 0.0151 | 0.3451 | 0.4139 |
|
890 |
+
| 27.4419 | 19100 | 0.0216 | - | - |
|
891 |
+
| 27.5854 | 19200 | 0.0255 | - | - |
|
892 |
+
| 27.7288 | 19300 | 0.0016 | - | - |
|
893 |
+
| 27.8723 | 19400 | 0.0251 | - | - |
|
894 |
+
| 28.0172 | 19500 | 0.0133 | 0.3416 | 0.4109 |
|
895 |
+
| 28.1607 | 19600 | 0.016 | - | - |
|
896 |
+
| 28.3042 | 19700 | 0.0186 | - | - |
|
897 |
+
| 28.4476 | 19800 | 0.0185 | - | - |
|
898 |
+
| 28.5911 | 19900 | 0.0225 | - | - |
|
899 |
+
| 28.7346 | 20000 | 0.0009 | 0.3463 | 0.4144 |
|
900 |
+
| 28.8780 | 20100 | 0.0249 | - | - |
|
901 |
+
| 29.0230 | 20200 | 0.0132 | - | - |
|
902 |
+
| 29.1664 | 20300 | 0.0145 | - | - |
|
903 |
+
| 29.3099 | 20400 | 0.0174 | - | - |
|
904 |
+
| 29.4534 | 20500 | 0.0172 | 0.3425 | 0.4092 |
|
905 |
+
| 29.5968 | 20600 | 0.0235 | - | - |
|
906 |
+
| 29.7403 | 20700 | 0.0009 | - | - |
|
907 |
+
| 29.8838 | 20800 | 0.0242 | - | - |
|
908 |
+
| 30.0287 | 20900 | 0.0128 | - | - |
|
909 |
+
| 30.1722 | 21000 | 0.0133 | 0.3482 | 0.4131 |
|
910 |
+
| 30.3156 | 21100 | 0.0158 | - | - |
|
911 |
+
| 30.4591 | 21200 | 0.0226 | - | - |
|
912 |
+
| 30.6026 | 21300 | 0.0188 | - | - |
|
913 |
+
| 30.7461 | 21400 | 0.0009 | - | - |
|
914 |
+
| 30.8895 | 21500 | 0.0249 | 0.3483 | 0.4132 |
|
915 |
+
| 31.0344 | 21600 | 0.0116 | - | - |
|
916 |
+
| 31.1779 | 21700 | 0.0117 | - | - |
|
917 |
+
| 31.3214 | 21800 | 0.0162 | - | - |
|
918 |
+
| 31.4648 | 21900 | 0.0184 | - | - |
|
919 |
+
| 31.6083 | 22000 | 0.0178 | 0.3390 | 0.4145 |
|
920 |
+
| 31.7518 | 22100 | 0.0012 | - | - |
|
921 |
+
| 31.8953 | 22200 | 0.0215 | - | - |
|
922 |
+
| 32.0402 | 22300 | 0.014 | - | - |
|
923 |
+
| 32.1836 | 22400 | 0.0105 | - | - |
|
924 |
+
| 32.3271 | 22500 | 0.0131 | 0.3556 | 0.4144 |
|
925 |
+
| 32.4706 | 22600 | 0.0199 | - | - |
|
926 |
+
| 32.6141 | 22700 | 0.0158 | - | - |
|
927 |
+
| 32.7575 | 22800 | 0.0018 | - | - |
|
928 |
+
| 32.9010 | 22900 | 0.0236 | - | - |
|
929 |
+
| 33.0459 | 23000 | 0.0131 | 0.3480 | 0.4136 |
|
930 |
+
| 33.1894 | 23100 | 0.0121 | - | - |
|
931 |
+
| 33.3329 | 23200 | 0.0164 | - | - |
|
932 |
+
| 33.4763 | 23300 | 0.0209 | - | - |
|
933 |
+
| 33.6198 | 23400 | 0.0119 | - | - |
|
934 |
+
| 33.7633 | 23500 | 0.0029 | 0.3575 | 0.4180 |
|
935 |
+
| 33.9067 | 23600 | 0.0201 | - | - |
|
936 |
+
| 34.0516 | 23700 | 0.0121 | - | - |
|
937 |
+
| 34.1951 | 23800 | 0.0109 | - | - |
|
938 |
+
| 34.3386 | 23900 | 0.0132 | - | - |
|
939 |
+
| 34.4821 | 24000 | 0.0203 | 0.3446 | 0.4141 |
|
940 |
+
| 34.6255 | 24100 | 0.0087 | - | - |
|
941 |
+
| 34.7690 | 24200 | 0.0032 | - | - |
|
942 |
+
| 34.9125 | 24300 | 0.0182 | - | - |
|
943 |
+
| 35.0574 | 24400 | 0.0116 | - | - |
|
944 |
+
| 35.2009 | 24500 | 0.0105 | 0.3587 | 0.4117 |
|
945 |
+
| 35.3443 | 24600 | 0.018 | - | - |
|
946 |
+
| 35.4878 | 24700 | 0.0194 | - | - |
|
947 |
+
| 35.6313 | 24800 | 0.0076 | - | - |
|
948 |
+
| 35.7747 | 24900 | 0.0029 | - | - |
|
949 |
+
| 35.9182 | 25000 | 0.0167 | 0.3529 | 0.4156 |
|
950 |
+
| 36.0631 | 25100 | 0.0105 | - | - |
|
951 |
+
| 36.2066 | 25200 | 0.0097 | - | - |
|
952 |
+
| 36.3501 | 25300 | 0.0165 | - | - |
|
953 |
+
| 36.4935 | 25400 | 0.0187 | - | - |
|
954 |
+
| 36.6370 | 25500 | 0.0062 | 0.3517 | 0.4173 |
|
955 |
+
| 36.7805 | 25600 | 0.0034 | - | - |
|
956 |
+
| 36.9240 | 25700 | 0.0173 | - | - |
|
957 |
+
| 37.0689 | 25800 | 0.0091 | - | - |
|
958 |
+
| 37.2123 | 25900 | 0.0093 | - | - |
|
959 |
+
| 37.3558 | 26000 | 0.0152 | 0.3605 | 0.4147 |
|
960 |
+
| 37.4993 | 26100 | 0.0193 | - | - |
|
961 |
+
| 37.6428 | 26200 | 0.0065 | - | - |
|
962 |
+
| 37.7862 | 26300 | 0.0036 | - | - |
|
963 |
+
| 37.9297 | 26400 | 0.017 | - | - |
|
964 |
+
| 38.0746 | 26500 | 0.009 | 0.3627 | 0.4178 |
|
965 |
+
| 38.2181 | 26600 | 0.0087 | - | - |
|
966 |
+
| 38.3615 | 26700 | 0.0129 | - | - |
|
967 |
+
| 38.5050 | 26800 | 0.0199 | - | - |
|
968 |
+
| 38.6485 | 26900 | 0.0047 | - | - |
|
969 |
+
| 38.7920 | 27000 | 0.0104 | 0.3535 | 0.4191 |
|
970 |
+
| 38.9354 | 27100 | 0.0106 | - | - |
|
971 |
+
| 39.0803 | 27200 | 0.0083 | - | - |
|
972 |
+
| 39.2238 | 27300 | 0.0091 | - | - |
|
973 |
+
| 39.3673 | 27400 | 0.0143 | - | - |
|
974 |
+
| 39.5108 | 27500 | 0.018 | 0.3586 | 0.4137 |
|
975 |
+
| 39.6542 | 27600 | 0.0055 | - | - |
|
976 |
+
| 39.7977 | 27700 | 0.0097 | - | - |
|
977 |
+
| 39.9412 | 27800 | 0.0111 | - | - |
|
978 |
+
| 40.0861 | 27900 | 0.0091 | - | - |
|
979 |
+
| 40.2296 | 28000 | 0.009 | 0.3540 | 0.4166 |
|
980 |
+
| 40.3730 | 28100 | 0.0145 | - | - |
|
981 |
+
| 40.5165 | 28200 | 0.0165 | - | - |
|
982 |
+
| 40.6600 | 28300 | 0.0041 | - | - |
|
983 |
+
| 40.8034 | 28400 | 0.009 | - | - |
|
984 |
+
| 40.9469 | 28500 | 0.0091 | 0.3541 | 0.4159 |
|
985 |
+
| 41.0918 | 28600 | 0.0106 | - | - |
|
986 |
+
| 41.2353 | 28700 | 0.0064 | - | - |
|
987 |
+
| 41.3788 | 28800 | 0.0125 | - | - |
|
988 |
+
| 41.5222 | 28900 | 0.0172 | - | - |
|
989 |
+
| 41.6657 | 29000 | 0.0028 | 0.3550 | 0.4151 |
|
990 |
+
| 41.8092 | 29100 | 0.0097 | - | - |
|
991 |
+
| 41.9527 | 29200 | 0.0086 | - | - |
|
992 |
+
| 42.0976 | 29300 | 0.0099 | - | - |
|
993 |
+
| 42.2410 | 29400 | 0.0064 | - | - |
|
994 |
+
| 42.3845 | 29500 | 0.0127 | 0.3619 | 0.4150 |
|
995 |
+
| 42.5280 | 29600 | 0.0157 | - | - |
|
996 |
+
| 42.6714 | 29700 | 0.0025 | - | - |
|
997 |
+
| 42.8149 | 29800 | 0.0095 | - | - |
|
998 |
+
| 42.9584 | 29900 | 0.0087 | - | - |
|
999 |
+
| 43.1033 | 30000 | 0.0094 | 0.3591 | 0.4153 |
|
1000 |
+
| 43.2468 | 30100 | 0.007 | - | - |
|
1001 |
+
| 43.3902 | 30200 | 0.0114 | - | - |
|
1002 |
+
| 43.5337 | 30300 | 0.0166 | - | - |
|
1003 |
+
| 43.6772 | 30400 | 0.0023 | - | - |
|
1004 |
+
| 43.8207 | 30500 | 0.01 | 0.3582 | 0.4172 |
|
1005 |
+
| 43.9641 | 30600 | 0.0097 | - | - |
|
1006 |
+
| 44.1090 | 30700 | 0.01 | - | - |
|
1007 |
+
| 44.2525 | 30800 | 0.007 | - | - |
|
1008 |
+
| 44.3960 | 30900 | 0.0106 | - | - |
|
1009 |
+
| 44.5395 | 31000 | 0.0164 | 0.3626 | 0.4151 |
|
1010 |
+
| 44.6829 | 31100 | 0.0017 | - | - |
|
1011 |
+
| 44.8264 | 31200 | 0.0113 | - | - |
|
1012 |
+
| 44.9699 | 31300 | 0.0081 | - | - |
|
1013 |
+
| 45.1148 | 31400 | 0.0095 | - | - |
|
1014 |
+
| 45.2582 | 31500 | 0.0061 | 0.3669 | 0.4152 |
|
1015 |
+
| 45.4017 | 31600 | 0.0111 | - | - |
|
1016 |
+
| 45.5452 | 31700 | 0.0157 | - | - |
|
1017 |
+
| 45.6887 | 31800 | 0.0015 | - | - |
|
1018 |
+
| 45.8321 | 31900 | 0.0109 | - | - |
|
1019 |
+
| 45.9756 | 32000 | 0.0085 | 0.3595 | 0.4139 |
|
1020 |
+
| 46.1205 | 32100 | 0.0096 | - | - |
|
1021 |
+
| 46.2640 | 32200 | 0.0062 | - | - |
|
1022 |
+
| 46.4075 | 32300 | 0.0111 | - | - |
|
1023 |
+
| 46.5509 | 32400 | 0.017 | - | - |
|
1024 |
+
| 46.6944 | 32500 | 0.0013 | 0.3631 | 0.4154 |
|
1025 |
+
| 46.8379 | 32600 | 0.0123 | - | - |
|
1026 |
+
| 46.9813 | 32700 | 0.0076 | - | - |
|
1027 |
+
| 47.1263 | 32800 | 0.0088 | - | - |
|
1028 |
+
| 47.2697 | 32900 | 0.0065 | - | - |
|
1029 |
+
| 47.4132 | 33000 | 0.0116 | 0.3656 | 0.4148 |
|
1030 |
+
| 47.5567 | 33100 | 0.0142 | - | - |
|
1031 |
+
| 47.7001 | 33200 | 0.0009 | - | - |
|
1032 |
+
| 47.8436 | 33300 | 0.0101 | - | - |
|
1033 |
+
| 47.9871 | 33400 | 0.0069 | - | - |
|
1034 |
+
| 48.1320 | 33500 | 0.0087 | 0.3643 | 0.4160 |
|
1035 |
+
| 48.2755 | 33600 | 0.005 | - | - |
|
1036 |
+
| 48.4189 | 33700 | 0.0118 | - | - |
|
1037 |
+
| 48.5624 | 33800 | 0.0147 | - | - |
|
1038 |
+
| 48.7059 | 33900 | 0.0008 | - | - |
|
1039 |
+
| 48.8494 | 34000 | 0.0115 | 0.3632 | 0.4158 |
|
1040 |
+
| 48.9928 | 34100 | 0.006 | - | - |
|
1041 |
+
| 49.1377 | 34200 | 0.0089 | - | - |
|
1042 |
+
| 49.2812 | 34300 | 0.0063 | - | - |
|
1043 |
+
| 49.4247 | 34400 | 0.0126 | - | - |
|
1044 |
+
| 49.5681 | 34500 | 0.0142 | 0.3643 | 0.4157 |
|
1045 |
+
| 49.7116 | 34600 | 0.0008 | - | - |
|
1046 |
+
| 49.8551 | 34700 | 0.0137 | - | - |
|
1047 |
+
| 49.9986 | 34800 | 0.0044 | 0.3148 | 0.4190 |
|
1048 |
+
|
1049 |
+
* The bold row denotes the saved checkpoint.
|
1050 |
+
</details>
|
1051 |
+
|
1052 |
+
### Framework Versions
|
1053 |
+
- Python: 3.11.9
|
1054 |
+
- Sentence Transformers: 3.0.1
|
1055 |
+
- Transformers: 4.43.3
|
1056 |
+
- PyTorch: 2.3.1+cu121
|
1057 |
+
- Accelerate: 0.30.1
|
1058 |
+
- Datasets: 2.19.2
|
1059 |
+
- Tokenizers: 0.19.1
|
1060 |
+
|
1061 |
+
## Citation
|
1062 |
+
|
1063 |
+
### BibTeX
|
1064 |
+
|
1065 |
+
#### Sentence Transformers
|
1066 |
+
```bibtex
|
1067 |
+
@inproceedings{reimers-2019-sentence-bert,
|
1068 |
+
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
|
1069 |
+
author = "Reimers, Nils and Gurevych, Iryna",
|
1070 |
+
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
|
1071 |
+
month = "11",
|
1072 |
+
year = "2019",
|
1073 |
+
publisher = "Association for Computational Linguistics",
|
1074 |
+
url = "https://arxiv.org/abs/1908.10084",
|
1075 |
+
}
|
1076 |
+
```
|
1077 |
+
|
1078 |
+
#### MultipleNegativesRankingLoss
|
1079 |
+
```bibtex
|
1080 |
+
@misc{henderson2017efficient,
|
1081 |
+
title={Efficient Natural Language Response Suggestion for Smart Reply},
|
1082 |
+
author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
|
1083 |
+
year={2017},
|
1084 |
+
eprint={1705.00652},
|
1085 |
+
archivePrefix={arXiv},
|
1086 |
+
primaryClass={cs.CL}
|
1087 |
+
}
|
1088 |
+
```
|
1089 |
+
|
1090 |
+
<!--
|
1091 |
+
## Glossary
|
1092 |
+
|
1093 |
+
*Clearly define terms in order to be accessible across audiences.*
|
1094 |
+
-->
|
1095 |
+
|
1096 |
+
<!--
|
1097 |
+
## Model Card Authors
|
1098 |
+
|
1099 |
+
*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
|
1100 |
+
-->
|
1101 |
+
|
1102 |
+
<!--
|
1103 |
+
## Model Card Contact
|
1104 |
+
|
1105 |
+
*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
|
1106 |
+
-->
|
config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "sentence-transformers/multi-qa-mpnet-base-dot-v1",
|
3 |
+
"architectures": [
|
4 |
+
"MPNetModel"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"eos_token_id": 2,
|
9 |
+
"hidden_act": "gelu",
|
10 |
+
"hidden_dropout_prob": 0.1,
|
11 |
+
"hidden_size": 768,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 3072,
|
14 |
+
"layer_norm_eps": 1e-05,
|
15 |
+
"max_position_embeddings": 514,
|
16 |
+
"model_type": "mpnet",
|
17 |
+
"num_attention_heads": 12,
|
18 |
+
"num_hidden_layers": 12,
|
19 |
+
"pad_token_id": 1,
|
20 |
+
"relative_attention_num_buckets": 32,
|
21 |
+
"torch_dtype": "float32",
|
22 |
+
"transformers_version": "4.43.3",
|
23 |
+
"vocab_size": 30527
|
24 |
+
}
|
config_sentence_transformers.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"__version__": {
|
3 |
+
"sentence_transformers": "3.0.1",
|
4 |
+
"transformers": "4.43.3",
|
5 |
+
"pytorch": "2.3.1+cu121"
|
6 |
+
},
|
7 |
+
"prompts": {},
|
8 |
+
"default_prompt_name": null,
|
9 |
+
"similarity_fn_name": "dot"
|
10 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b200d6a9cf9cd40c890fbd1e368001ecbce535dc2d15cab248ae261d600cec06
|
3 |
+
size 437967672
|
modules.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[
|
2 |
+
{
|
3 |
+
"idx": 0,
|
4 |
+
"name": "0",
|
5 |
+
"path": "",
|
6 |
+
"type": "sentence_transformers.models.Transformer"
|
7 |
+
},
|
8 |
+
{
|
9 |
+
"idx": 1,
|
10 |
+
"name": "1",
|
11 |
+
"path": "1_Pooling",
|
12 |
+
"type": "sentence_transformers.models.Pooling"
|
13 |
+
}
|
14 |
+
]
|
sentence_bert_config.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"max_seq_length": 512,
|
3 |
+
"do_lower_case": false
|
4 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"cls_token": {
|
10 |
+
"content": "<s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"eos_token": {
|
17 |
+
"content": "</s>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"mask_token": {
|
24 |
+
"content": "<mask>",
|
25 |
+
"lstrip": true,
|
26 |
+
"normalized": false,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
},
|
30 |
+
"pad_token": {
|
31 |
+
"content": "<pad>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false
|
36 |
+
},
|
37 |
+
"sep_token": {
|
38 |
+
"content": "</s>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false
|
43 |
+
},
|
44 |
+
"unk_token": {
|
45 |
+
"content": "[UNK]",
|
46 |
+
"lstrip": false,
|
47 |
+
"normalized": false,
|
48 |
+
"rstrip": false,
|
49 |
+
"single_word": false
|
50 |
+
}
|
51 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "<s>",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"1": {
|
12 |
+
"content": "<pad>",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"2": {
|
20 |
+
"content": "</s>",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"3": {
|
28 |
+
"content": "<unk>",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": true,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"104": {
|
36 |
+
"content": "[UNK]",
|
37 |
+
"lstrip": false,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
},
|
43 |
+
"30526": {
|
44 |
+
"content": "<mask>",
|
45 |
+
"lstrip": true,
|
46 |
+
"normalized": false,
|
47 |
+
"rstrip": false,
|
48 |
+
"single_word": false,
|
49 |
+
"special": true
|
50 |
+
}
|
51 |
+
},
|
52 |
+
"bos_token": "<s>",
|
53 |
+
"clean_up_tokenization_spaces": true,
|
54 |
+
"cls_token": "<s>",
|
55 |
+
"do_lower_case": true,
|
56 |
+
"eos_token": "</s>",
|
57 |
+
"mask_token": "<mask>",
|
58 |
+
"max_length": 250,
|
59 |
+
"model_max_length": 512,
|
60 |
+
"pad_to_multiple_of": null,
|
61 |
+
"pad_token": "<pad>",
|
62 |
+
"pad_token_type_id": 0,
|
63 |
+
"padding_side": "right",
|
64 |
+
"sep_token": "</s>",
|
65 |
+
"stride": 0,
|
66 |
+
"strip_accents": null,
|
67 |
+
"tokenize_chinese_chars": true,
|
68 |
+
"tokenizer_class": "MPNetTokenizer",
|
69 |
+
"truncation_side": "right",
|
70 |
+
"truncation_strategy": "longest_first",
|
71 |
+
"unk_token": "[UNK]"
|
72 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f1af92278d7ba86efaea01f144fd976c7562e1da5cea8c92e07b25963350f618
|
3 |
+
size 5624
|
vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|