Wrong `position_ids` in text encoder fp16 variant
#1
by
eliphatfs
- opened
>>> import torch
>>> a = torch.load('pytorch_model.fp16.bin')
>>> a['text_model.embeddings.position_ids']
tensor([[ 8444249301319680, 8725724278030336, 9007199254740992,
9288674231451648, 9570149208162304, 9851624184872960,
10133099161583616, 10414574138294272, 10696049115004928,
10977524091715584, 11258999068426240, 11540474045136896,
11821949021847552, 12103423998558208, 12384898975268864,
12666373951979520, 12947848928690176, 13229323905400832,
13510798882111488, 13792273858822144, 14073748835532800,
14355223812243456, 14636698788954112, 14918173765664768,
15199648742375424, 15481123719086080, 15762598695796736,
16044073672507392, 16325548649218048, 16607023625928704,
16888498602639360, 17169973579350016, 17451448556060672,
17732923532771328, 18014398509481984, 18295873486192640,
18577348462903296, 18858823439613952, 19140298416324608,
19421773393035264, 19703248369745920, 19984723346456576,
20266198323167232, 20547673299877888, 20829148276588544,
21110623253299200, 21392098230009856, 1072419661267599360,
-8173729707676034109, 1596812233387381019, -7972154969943076967,
-7403480456281942721, -8181168058474554682, 1342942162511304923,
909329312444713969, -7522248136750953471, 1588526436168139634,
-7784176620914764754, 1325493393941206319, 1759520036330017771,
1247239510812957021, 1772329200857749871, 1404859622153097035,
-7356894681119385560, 894413786494275126, -7143671278802397751,
-7241769942592547601, -8086757745401361447, 1763898182184829285,
-7537731811784324500, 940012558736919008, -7457098875370928110,
923964022557153547, 1495354434782761292, -8761869511253616922,
-7751670551540885576, 1602881520329496742]])
eliphatfs
changed discussion title from
wrong `position_ids` in text encoder fp16 variant
to Wrong `position_ids` in text encoder fp16 variant