mihaimasala commited on
Commit
d3506e7
1 Parent(s): 46be52b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +23 -8
README.md CHANGED
@@ -542,8 +542,8 @@ Use the code below to get started with the model.
542
  ```python
543
  from transformers import AutoTokenizer, AutoModelForCausalLM
544
 
545
- tokenizer = AutoTokenizer.from_pretrained("OpenLLM-Ro/RoLlama2-7b-Instruct")
546
- model = AutoModelForCausalLM.from_pretrained("OpenLLM-Ro/RoLlama2-7b-Instruct")
547
 
548
  instruction = "Care este cel mai înalt vârf muntos din România?"
549
  chat = [
@@ -578,16 +578,18 @@ print(tokenizer.decode(outputs[0]))
578
  <td>RoLlama2-7b-Instruct-2024-05-14</td><td><center><strong>45.71</strong></center></td><td><center>43.66</center></td><td><center>39.70</center></td><td><center><strong>70.34</strong></center></td><td><center>57.36</center></td><td><center><strong>18.78</strong></center></td><td><center>44.44</center></td>
579
  </tr>
580
  <tr>
581
- <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em>44.50</em></center></td><td><center><em><strong>44.73</strong></em></center></td><td><center><em><strong>40.39</strong></em></center></td><td><center><em>63.67</em></center></td><td><center><em><strong>59.12</strong></em></center></td><td><center><em>13.29</em></center></td><td><center><em><strong>45.78</strong></em></center></td>
 
 
 
582
  </tr>
583
  </tbody>
584
  </table>
585
 
586
-
587
  ## Downstream tasks
588
 
589
 
590
- <table>
591
  <tbody>
592
  <tr>
593
  <td></td>
@@ -621,6 +623,9 @@ print(tokenizer.decode(outputs[0]))
621
  <tr>
622
  <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em><strong>97.66</strong></em></center></td><td><center><em>62.41</em></center></td><td><center><em>97.97</em></center></td><td><center><em>60.89</em></center></td><td><center><em>27.13</em></center></td><td><center><em>19.39</em></center></td><td><center><em><strong>27.63</strong></em></center></td><td><center><em>39.75</em></center></td>
623
  </tr>
 
 
 
624
  </tbody>
625
  </table>
626
 
@@ -659,9 +664,13 @@ print(tokenizer.decode(outputs[0]))
659
  <tr>
660
  <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em><strong>45.71</strong></em></center></td><td><center><em><strong>65.08</strong></em></center></td><td><center><em>59.24</em></center></td><td><center><em>74.25</em></center></td><td><center><em>59.69</em></center></td><td><center><em>57.16</em></center></td><td><center><em><strong>84.66</strong></em></center></td><td><center><em><strong>85.07</strong></em></center></td>
661
  </tr>
 
 
 
662
  </tbody>
663
  </table>
664
 
 
665
  ## Romanian MT-Bench
666
 
667
  <table>
@@ -680,13 +689,15 @@ print(tokenizer.decode(outputs[0]))
680
  <td>RoLlama2-7b-Instruct-2024-05-14</td><td><center>3.86</center></td><td><center>4.67</center></td><td><center>3.04</center></td><td><center><strong>160/160</strong></center></td>
681
  </tr>
682
  <tr>
683
- <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em><strong>4.43</strong></em></center></td><td><center><em><strong>4.92</strong></em></center></td><td><center><em><strong>3.94</strong></em></center></td><td><center><em><strong>160/160</strong></em></center></td>
 
 
 
684
  </tr>
685
  </tbody>
686
  </table>
687
 
688
 
689
-
690
  ## RoCulturaBench
691
 
692
 
@@ -704,7 +715,10 @@ print(tokenizer.decode(outputs[0]))
704
  <td>RoLlama2-7b-Instruct-2024-05-14</td><td><center>3.77</center></td><td><center><strong>100/100</strong></center></td>
705
  </tr>
706
  <tr>
707
- <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em><strong>4.08</strong></em></center></td><td><center><em><strong>100/100</strong></em></center></td>
 
 
 
708
  </tr>
709
  </tbody>
710
  </table>
@@ -712,6 +726,7 @@ print(tokenizer.decode(outputs[0]))
712
 
713
 
714
 
 
715
  ## RoLlama2 Model Family
716
 
717
  | Model | Link |
 
542
  ```python
543
  from transformers import AutoTokenizer, AutoModelForCausalLM
544
 
545
+ tokenizer = AutoTokenizer.from_pretrained("OpenLLM-Ro/RoLlama2-7b-Instruct-2024-10-09")
546
+ model = AutoModelForCausalLM.from_pretrained("OpenLLM-Ro/RoLlama2-7b-Instruct-2024-10-09")
547
 
548
  instruction = "Care este cel mai înalt vârf muntos din România?"
549
  chat = [
 
578
  <td>RoLlama2-7b-Instruct-2024-05-14</td><td><center><strong>45.71</strong></center></td><td><center>43.66</center></td><td><center>39.70</center></td><td><center><strong>70.34</strong></center></td><td><center>57.36</center></td><td><center><strong>18.78</strong></center></td><td><center>44.44</center></td>
579
  </tr>
580
  <tr>
581
+ <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em>44.50</em></center></td><td><center><em><strong>44.73</strong></em></center></td><td><center><em><strong>40.39</strong></em></center></td><td><center><em>63.67</em></center></td><td><center><em>59.12</em></center></td><td><center><em>13.29</em></center></td><td><center><em><strong>45.78</strong></em></center></td>
582
+ </tr>
583
+ <tr>
584
+ <td>RoLlama2-7b-Instruct-DPO-2024-10-09</td><td><center>43.20</center></td><td><center>44.24</center></td><td><center>38.39</center></td><td><center>62.57</center></td><td><center><strong>59.20</strong></center></td><td><center>15.72</center></td><td><center>39.07</center></td>
585
  </tr>
586
  </tbody>
587
  </table>
588
 
 
589
  ## Downstream tasks
590
 
591
 
592
+ v<table>
593
  <tbody>
594
  <tr>
595
  <td></td>
 
623
  <tr>
624
  <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em><strong>97.66</strong></em></center></td><td><center><em>62.41</em></center></td><td><center><em>97.97</em></center></td><td><center><em>60.89</em></center></td><td><center><em>27.13</em></center></td><td><center><em>19.39</em></center></td><td><center><em><strong>27.63</strong></em></center></td><td><center><em>39.75</em></center></td>
625
  </tr>
626
+ <tr>
627
+ <td>RoLlama2-7b-Instruct-DPO-2024-10-09</td><td><center>97.31</center></td><td><center>60.56</center></td><td><center>-</center></td><td><center>-</center></td><td><center>26.56</center></td><td><center>21.68</center></td><td><center>-</center></td><td><center>-</center></td>
628
+ </tr>
629
  </tbody>
630
  </table>
631
 
 
664
  <tr>
665
  <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em><strong>45.71</strong></em></center></td><td><center><em><strong>65.08</strong></em></center></td><td><center><em>59.24</em></center></td><td><center><em>74.25</em></center></td><td><center><em>59.69</em></center></td><td><center><em>57.16</em></center></td><td><center><em><strong>84.66</strong></em></center></td><td><center><em><strong>85.07</strong></em></center></td>
666
  </tr>
667
+ <tr>
668
+ <td>RoLlama2-7b-Instruct-DPO-2024-10-09</td><td><center>35.78</center></td><td><center>59.31</center></td><td><center>-</center></td><td><center>-</center></td><td><center>61.22</center></td><td><center>58.41</center></td><td><center>-</center></td><td><center>-</center></td>
669
+ </tr>
670
  </tbody>
671
  </table>
672
 
673
+
674
  ## Romanian MT-Bench
675
 
676
  <table>
 
689
  <td>RoLlama2-7b-Instruct-2024-05-14</td><td><center>3.86</center></td><td><center>4.67</center></td><td><center>3.04</center></td><td><center><strong>160/160</strong></center></td>
690
  </tr>
691
  <tr>
692
+ <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em>4.43</em></center></td><td><center><em>4.92</em></center></td><td><center><em>3.94</em></center></td><td><center><em><strong>160/160</strong></em></center></td>
693
+ </tr>
694
+ <tr>
695
+ <td>RoLlama2-7b-Instruct-DPO-2024-10-09</td><td><center><strong>4.61</strong></center></td><td><center><strong>5.15</strong></center></td><td><center><strong>4.06</strong></center></td><td><center><strong>160/160</strong></center></td>
696
  </tr>
697
  </tbody>
698
  </table>
699
 
700
 
 
701
  ## RoCulturaBench
702
 
703
 
 
715
  <td>RoLlama2-7b-Instruct-2024-05-14</td><td><center>3.77</center></td><td><center><strong>100/100</strong></center></td>
716
  </tr>
717
  <tr>
718
+ <td><em>RoLlama2-7b-Instruct-2024-10-09</em></td><td><center><em>4.08</em></center></td><td><center><em><strong>100/100</strong></em></center></td>
719
+ </tr>
720
+ <tr>
721
+ <td>RoLlama2-7b-Instruct-DPO-2024-10-09</td><td><center><strong>4.80</strong></center></td><td><center><strong>100/100</strong></center></td>
722
  </tr>
723
  </tbody>
724
  </table>
 
726
 
727
 
728
 
729
+
730
  ## RoLlama2 Model Family
731
 
732
  | Model | Link |