luqiyi commited on
Commit
225e567
1 Parent(s): 36bed93

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -19,7 +19,7 @@ Here is an example of GPTDynamics:
19
  {"id": 201, "loss_trajectory": [{"step": 1, "loss": 2.661651134490967}, {"step": 2, "loss": 2.3306431770324707}, {"step": 3, "loss": 2.03875732421875}, {"step": 4, "loss": 2.03875732421875}, {"step": 5, "loss": 1.743143916130066}, {"step": 6, "loss": 1.4888012409210205}, {"step": 7, "loss": 1.2995624542236328}, {"step": 8, "loss": 1.154435396194458}, {"step": 9, "loss": 1.0413002967834473}, {"step": 10, "loss": 0.944778323173523}, {"step": 11, "loss": 0.944778323173523}, {"step": 12, "loss": 0.8778289556503296}, {"step": 13, "loss": 0.8155273795127869}, {"step": 14, "loss": 0.7719510793685913}, {"step": 15, "loss": 0.743318498134613}, {"step": 16, "loss": 0.7230879068374634}, {"step": 17, "loss": 0.7014121413230896}, {"step": 18, "loss": 0.6848206520080566}, {"step": 19, "loss": 0.6771003007888794}, {"step": 20, "loss": 0.6715677976608276}, {"step": 21, "loss": 0.6617311239242554}, {"step": 22, "loss": 0.6589836478233337}, {"step": 23, "loss": 0.6560938358306885}, {"step": 24, "loss": 0.6462780833244324}, {"step": 25, "loss": 0.6388468146324158}, {"step": 26, "loss": 0.6293094754219055}, {"step": 27, "loss": 0.6265830993652344}, {"step": 28, "loss": 0.6162292957305908}, {"step": 29, "loss": 0.6083053946495056}, {"step": 30, "loss": 0.6056196093559265}, {"step": 31, "loss": 0.6099292039871216}, {"step": 32, "loss": 0.6157264709472656}, {"step": 33, "loss": 0.6204148530960083}, {"step": 34, "loss": 0.6296204924583435}, {"step": 35, "loss": 0.6403841376304626}, {"step": 36, "loss": 0.652870774269104}, {"step": 37, "loss": 0.6713826656341553}, {"step": 38, "loss": 0.6812401413917542}, {"step": 39, "loss": 0.6874089241027832}, {"step": 40, "loss": 0.6968488097190857}, {"step": 41, "loss": 0.7042997479438782}, {"step": 42, "loss": 0.7002748847007751}, {"step": 43, "loss": 0.6977438926696777}, {"step": 44, "loss": 0.6954635977745056}, {"step": 45, "loss": 0.6966844201087952}, {"step": 46, "loss": 0.695155143737793}, {"step": 47, "loss": 0.6946768760681152}, {"step": 48, "loss": 0.6923564076423645}, {"step": 49, "loss": 0.6908800601959229}, {"step": 50, "loss": 0.6927938461303711}, {"step": 51, "loss": 0.6945635676383972}, {"step": 52, "loss": 0.6978188157081604}, {"step": 53, "loss": 0.7048851251602173}, {"step": 54, "loss": 0.7114452123641968}, {"step": 55, "loss": 0.7197942137718201}, {"step": 56, "loss": 0.7273781299591064}, {"step": 57, "loss": 0.7309868931770325}, {"step": 58, "loss": 0.7392228245735168}, {"step": 59, "loss": 0.7478148341178894}, {"step": 60, "loss": 0.7554481029510498}, {"step": 61, "loss": 0.7621862292289734}, {"step": 62, "loss": 0.7660795450210571}, {"step": 63, "loss": 0.7729960083961487}, {"step": 64, "loss": 0.7787044644355774}, {"step": 65, "loss": 0.7865316271781921}, {"step": 66, "loss": 0.7893784046173096}, {"step": 67, "loss": 0.7897890210151672}, {"step": 68, "loss": 0.7911185622215271}, {"step": 69, "loss": 0.7901228666305542}, {"step": 70, "loss": 0.786424994468689}, {"step": 71, "loss": 0.7833899855613708}, {"step": 72, "loss": 0.7841241359710693}, {"step": 73, "loss": 0.7885948419570923}, {"step": 74, "loss": 0.7922827005386353}, {"step": 75, "loss": 0.7996699213981628}, {"step": 76, "loss": 0.8086601495742798}, {"step": 77, "loss": 0.8154159784317017}, {"step": 78, "loss": 0.8235976696014404}, {"step": 79, "loss": 0.8295583724975586}, {"step": 80, "loss": 0.8354929685592651}, {"step": 81, "loss": 0.8384872674942017}, {"step": 82, "loss": 0.8431093692779541}, {"step": 83, "loss": 0.8491389155387878}, {"step": 84, "loss": 0.85647052526474}, {"step": 85, "loss": 0.8622291684150696}, {"step": 86, "loss": 0.8699511289596558}, {"step": 87, "loss": 0.8779494762420654}, {"step": 88, "loss": 0.8841904997825623}, {"step": 89, "loss": 0.8887885808944702}, {"step": 90, "loss": 0.8933967351913452}, {"step": 91, "loss": 0.89702308177948}, {"step": 92, "loss": 0.9009832739830017}, {"step": 93, "loss": 0.9048527479171753}, {"step": 94, "loss": 0.9068139791488647}, {"step": 95, "loss": 0.9083170294761658}, {"step": 96, "loss": 0.9079004526138306}]}
20
  }
21
  ```
22
- ## Citation Information
23
  ```
24
  @article{liu2024training,
25
  title={On Training Data Influence of GPT Models},
 
19
  {"id": 201, "loss_trajectory": [{"step": 1, "loss": 2.661651134490967}, {"step": 2, "loss": 2.3306431770324707}, {"step": 3, "loss": 2.03875732421875}, {"step": 4, "loss": 2.03875732421875}, {"step": 5, "loss": 1.743143916130066}, {"step": 6, "loss": 1.4888012409210205}, {"step": 7, "loss": 1.2995624542236328}, {"step": 8, "loss": 1.154435396194458}, {"step": 9, "loss": 1.0413002967834473}, {"step": 10, "loss": 0.944778323173523}, {"step": 11, "loss": 0.944778323173523}, {"step": 12, "loss": 0.8778289556503296}, {"step": 13, "loss": 0.8155273795127869}, {"step": 14, "loss": 0.7719510793685913}, {"step": 15, "loss": 0.743318498134613}, {"step": 16, "loss": 0.7230879068374634}, {"step": 17, "loss": 0.7014121413230896}, {"step": 18, "loss": 0.6848206520080566}, {"step": 19, "loss": 0.6771003007888794}, {"step": 20, "loss": 0.6715677976608276}, {"step": 21, "loss": 0.6617311239242554}, {"step": 22, "loss": 0.6589836478233337}, {"step": 23, "loss": 0.6560938358306885}, {"step": 24, "loss": 0.6462780833244324}, {"step": 25, "loss": 0.6388468146324158}, {"step": 26, "loss": 0.6293094754219055}, {"step": 27, "loss": 0.6265830993652344}, {"step": 28, "loss": 0.6162292957305908}, {"step": 29, "loss": 0.6083053946495056}, {"step": 30, "loss": 0.6056196093559265}, {"step": 31, "loss": 0.6099292039871216}, {"step": 32, "loss": 0.6157264709472656}, {"step": 33, "loss": 0.6204148530960083}, {"step": 34, "loss": 0.6296204924583435}, {"step": 35, "loss": 0.6403841376304626}, {"step": 36, "loss": 0.652870774269104}, {"step": 37, "loss": 0.6713826656341553}, {"step": 38, "loss": 0.6812401413917542}, {"step": 39, "loss": 0.6874089241027832}, {"step": 40, "loss": 0.6968488097190857}, {"step": 41, "loss": 0.7042997479438782}, {"step": 42, "loss": 0.7002748847007751}, {"step": 43, "loss": 0.6977438926696777}, {"step": 44, "loss": 0.6954635977745056}, {"step": 45, "loss": 0.6966844201087952}, {"step": 46, "loss": 0.695155143737793}, {"step": 47, "loss": 0.6946768760681152}, {"step": 48, "loss": 0.6923564076423645}, {"step": 49, "loss": 0.6908800601959229}, {"step": 50, "loss": 0.6927938461303711}, {"step": 51, "loss": 0.6945635676383972}, {"step": 52, "loss": 0.6978188157081604}, {"step": 53, "loss": 0.7048851251602173}, {"step": 54, "loss": 0.7114452123641968}, {"step": 55, "loss": 0.7197942137718201}, {"step": 56, "loss": 0.7273781299591064}, {"step": 57, "loss": 0.7309868931770325}, {"step": 58, "loss": 0.7392228245735168}, {"step": 59, "loss": 0.7478148341178894}, {"step": 60, "loss": 0.7554481029510498}, {"step": 61, "loss": 0.7621862292289734}, {"step": 62, "loss": 0.7660795450210571}, {"step": 63, "loss": 0.7729960083961487}, {"step": 64, "loss": 0.7787044644355774}, {"step": 65, "loss": 0.7865316271781921}, {"step": 66, "loss": 0.7893784046173096}, {"step": 67, "loss": 0.7897890210151672}, {"step": 68, "loss": 0.7911185622215271}, {"step": 69, "loss": 0.7901228666305542}, {"step": 70, "loss": 0.786424994468689}, {"step": 71, "loss": 0.7833899855613708}, {"step": 72, "loss": 0.7841241359710693}, {"step": 73, "loss": 0.7885948419570923}, {"step": 74, "loss": 0.7922827005386353}, {"step": 75, "loss": 0.7996699213981628}, {"step": 76, "loss": 0.8086601495742798}, {"step": 77, "loss": 0.8154159784317017}, {"step": 78, "loss": 0.8235976696014404}, {"step": 79, "loss": 0.8295583724975586}, {"step": 80, "loss": 0.8354929685592651}, {"step": 81, "loss": 0.8384872674942017}, {"step": 82, "loss": 0.8431093692779541}, {"step": 83, "loss": 0.8491389155387878}, {"step": 84, "loss": 0.85647052526474}, {"step": 85, "loss": 0.8622291684150696}, {"step": 86, "loss": 0.8699511289596558}, {"step": 87, "loss": 0.8779494762420654}, {"step": 88, "loss": 0.8841904997825623}, {"step": 89, "loss": 0.8887885808944702}, {"step": 90, "loss": 0.8933967351913452}, {"step": 91, "loss": 0.89702308177948}, {"step": 92, "loss": 0.9009832739830017}, {"step": 93, "loss": 0.9048527479171753}, {"step": 94, "loss": 0.9068139791488647}, {"step": 95, "loss": 0.9083170294761658}, {"step": 96, "loss": 0.9079004526138306}]}
20
  }
21
  ```
22
+ # Citation Information
23
  ```
24
  @article{liu2024training,
25
  title={On Training Data Influence of GPT Models},