File size: 315 Bytes
4fff3a2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "architectures": [
    "CoherenceMomentumModel"
  ],
  "contrastive_loss_weight": 0.85,
  "margin": 0.1,
  "max_len": 600,
  "model_size": "base",
  "momentum_coefficient": 0.9999999,
  "num_negs": 5,
  "num_rank_negs": 50,
  "queue_size": 1000,
  "torch_dtype": "float32",
  "transformers_version": "4.21.1"
}