elsayedissa
commited on
Commit
•
08cf7b9
1
Parent(s):
a40d74c
Training in progress, step 8000
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/pytorch_model.bin +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scaler.pt +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +252 -3
- pytorch_model.bin +1 -1
- runs/Dec26_19-57-19_gpu07.cyverse.org/events.out.tfevents.1672109970.gpu07.cyverse.org.126369.0 +2 -2
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 12347192855
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:63d424da125909272adbcd6febb158fb464d384fcc7052b7279506ac6f5969ac
|
3 |
size 12347192855
|
last-checkpoint/pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 6173655480
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00f04d4ca24866b389dc2df4457f672ba5912421795c884ce9a09b3caf5317e1
|
3 |
size 6173655480
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14575
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fbfadcfc0728788eb8464114731524e59e0eeea3de78fb600f90df25108582a6
|
3 |
size 14575
|
last-checkpoint/scaler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 557
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2033db9ad36b6f8a091d260383726ce676844e1562b9251b94e781e66324da99
|
3 |
size 557
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 627
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:72ca6c784ba72f382b1bcb047ab8308017dc36fd5ba0484d4a1d02017086a52b
|
3 |
size 627
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
"best_metric": 0.11536111854119403,
|
3 |
"best_model_checkpoint": "/storage/elsayedissa/whisper-large-v2-spanish/checkpoint-1000",
|
4 |
-
"epoch": 0.
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -1749,11 +1749,260 @@
|
|
1749 |
"eval_steps_per_second": 0.063,
|
1750 |
"eval_wer": 0.10483351660871203,
|
1751 |
"step": 7000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1752 |
}
|
1753 |
],
|
1754 |
"max_steps": 25000,
|
1755 |
"num_train_epochs": 1,
|
1756 |
-
"total_flos": 2.
|
1757 |
"trial_name": null,
|
1758 |
"trial_params": null
|
1759 |
}
|
|
|
1 |
{
|
2 |
"best_metric": 0.11536111854119403,
|
3 |
"best_model_checkpoint": "/storage/elsayedissa/whisper-large-v2-spanish/checkpoint-1000",
|
4 |
+
"epoch": 0.27769099934048386,
|
5 |
+
"global_step": 8000,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
1749 |
"eval_steps_per_second": 0.063,
|
1750 |
"eval_wer": 0.10483351660871203,
|
1751 |
"step": 7000
|
1752 |
+
},
|
1753 |
+
{
|
1754 |
+
"epoch": 0.24,
|
1755 |
+
"learning_rate": 7.339591836734694e-06,
|
1756 |
+
"loss": 0.2032,
|
1757 |
+
"step": 7025
|
1758 |
+
},
|
1759 |
+
{
|
1760 |
+
"epoch": 0.24,
|
1761 |
+
"learning_rate": 7.329387755102041e-06,
|
1762 |
+
"loss": 0.1564,
|
1763 |
+
"step": 7050
|
1764 |
+
},
|
1765 |
+
{
|
1766 |
+
"epoch": 0.25,
|
1767 |
+
"learning_rate": 7.319183673469389e-06,
|
1768 |
+
"loss": 0.1912,
|
1769 |
+
"step": 7075
|
1770 |
+
},
|
1771 |
+
{
|
1772 |
+
"epoch": 0.25,
|
1773 |
+
"learning_rate": 7.309387755102041e-06,
|
1774 |
+
"loss": 0.2008,
|
1775 |
+
"step": 7100
|
1776 |
+
},
|
1777 |
+
{
|
1778 |
+
"epoch": 0.25,
|
1779 |
+
"learning_rate": 7.299183673469389e-06,
|
1780 |
+
"loss": 0.1955,
|
1781 |
+
"step": 7125
|
1782 |
+
},
|
1783 |
+
{
|
1784 |
+
"epoch": 0.25,
|
1785 |
+
"learning_rate": 7.288979591836736e-06,
|
1786 |
+
"loss": 0.1671,
|
1787 |
+
"step": 7150
|
1788 |
+
},
|
1789 |
+
{
|
1790 |
+
"epoch": 0.25,
|
1791 |
+
"learning_rate": 7.278775510204082e-06,
|
1792 |
+
"loss": 0.178,
|
1793 |
+
"step": 7175
|
1794 |
+
},
|
1795 |
+
{
|
1796 |
+
"epoch": 0.25,
|
1797 |
+
"learning_rate": 7.268571428571429e-06,
|
1798 |
+
"loss": 0.1683,
|
1799 |
+
"step": 7200
|
1800 |
+
},
|
1801 |
+
{
|
1802 |
+
"epoch": 0.25,
|
1803 |
+
"learning_rate": 7.258367346938776e-06,
|
1804 |
+
"loss": 0.1977,
|
1805 |
+
"step": 7225
|
1806 |
+
},
|
1807 |
+
{
|
1808 |
+
"epoch": 0.25,
|
1809 |
+
"learning_rate": 7.248163265306123e-06,
|
1810 |
+
"loss": 0.159,
|
1811 |
+
"step": 7250
|
1812 |
+
},
|
1813 |
+
{
|
1814 |
+
"epoch": 0.25,
|
1815 |
+
"learning_rate": 7.23795918367347e-06,
|
1816 |
+
"loss": 0.205,
|
1817 |
+
"step": 7275
|
1818 |
+
},
|
1819 |
+
{
|
1820 |
+
"epoch": 0.25,
|
1821 |
+
"learning_rate": 7.2277551020408164e-06,
|
1822 |
+
"loss": 0.1717,
|
1823 |
+
"step": 7300
|
1824 |
+
},
|
1825 |
+
{
|
1826 |
+
"epoch": 0.25,
|
1827 |
+
"learning_rate": 7.217551020408163e-06,
|
1828 |
+
"loss": 0.1921,
|
1829 |
+
"step": 7325
|
1830 |
+
},
|
1831 |
+
{
|
1832 |
+
"epoch": 0.26,
|
1833 |
+
"learning_rate": 7.207346938775511e-06,
|
1834 |
+
"loss": 0.1541,
|
1835 |
+
"step": 7350
|
1836 |
+
},
|
1837 |
+
{
|
1838 |
+
"epoch": 0.26,
|
1839 |
+
"learning_rate": 7.197142857142858e-06,
|
1840 |
+
"loss": 0.189,
|
1841 |
+
"step": 7375
|
1842 |
+
},
|
1843 |
+
{
|
1844 |
+
"epoch": 0.26,
|
1845 |
+
"learning_rate": 7.186938775510205e-06,
|
1846 |
+
"loss": 0.1533,
|
1847 |
+
"step": 7400
|
1848 |
+
},
|
1849 |
+
{
|
1850 |
+
"epoch": 0.26,
|
1851 |
+
"learning_rate": 7.176734693877551e-06,
|
1852 |
+
"loss": 0.2029,
|
1853 |
+
"step": 7425
|
1854 |
+
},
|
1855 |
+
{
|
1856 |
+
"epoch": 0.26,
|
1857 |
+
"learning_rate": 7.166530612244899e-06,
|
1858 |
+
"loss": 0.1711,
|
1859 |
+
"step": 7450
|
1860 |
+
},
|
1861 |
+
{
|
1862 |
+
"epoch": 0.26,
|
1863 |
+
"learning_rate": 7.1563265306122455e-06,
|
1864 |
+
"loss": 0.1906,
|
1865 |
+
"step": 7475
|
1866 |
+
},
|
1867 |
+
{
|
1868 |
+
"epoch": 0.26,
|
1869 |
+
"learning_rate": 7.146122448979592e-06,
|
1870 |
+
"loss": 0.1598,
|
1871 |
+
"step": 7500
|
1872 |
+
},
|
1873 |
+
{
|
1874 |
+
"epoch": 0.26,
|
1875 |
+
"learning_rate": 7.135918367346939e-06,
|
1876 |
+
"loss": 0.1761,
|
1877 |
+
"step": 7525
|
1878 |
+
},
|
1879 |
+
{
|
1880 |
+
"epoch": 0.26,
|
1881 |
+
"learning_rate": 7.125714285714286e-06,
|
1882 |
+
"loss": 0.1641,
|
1883 |
+
"step": 7550
|
1884 |
+
},
|
1885 |
+
{
|
1886 |
+
"epoch": 0.26,
|
1887 |
+
"learning_rate": 7.115510204081634e-06,
|
1888 |
+
"loss": 0.1971,
|
1889 |
+
"step": 7575
|
1890 |
+
},
|
1891 |
+
{
|
1892 |
+
"epoch": 0.26,
|
1893 |
+
"learning_rate": 7.10530612244898e-06,
|
1894 |
+
"loss": 0.138,
|
1895 |
+
"step": 7600
|
1896 |
+
},
|
1897 |
+
{
|
1898 |
+
"epoch": 0.26,
|
1899 |
+
"learning_rate": 7.095102040816327e-06,
|
1900 |
+
"loss": 0.1919,
|
1901 |
+
"step": 7625
|
1902 |
+
},
|
1903 |
+
{
|
1904 |
+
"epoch": 0.27,
|
1905 |
+
"learning_rate": 7.0848979591836745e-06,
|
1906 |
+
"loss": 0.1698,
|
1907 |
+
"step": 7650
|
1908 |
+
},
|
1909 |
+
{
|
1910 |
+
"epoch": 0.27,
|
1911 |
+
"learning_rate": 7.074693877551021e-06,
|
1912 |
+
"loss": 0.1832,
|
1913 |
+
"step": 7675
|
1914 |
+
},
|
1915 |
+
{
|
1916 |
+
"epoch": 0.27,
|
1917 |
+
"learning_rate": 7.064489795918368e-06,
|
1918 |
+
"loss": 0.1643,
|
1919 |
+
"step": 7700
|
1920 |
+
},
|
1921 |
+
{
|
1922 |
+
"epoch": 0.27,
|
1923 |
+
"learning_rate": 7.054285714285714e-06,
|
1924 |
+
"loss": 0.1919,
|
1925 |
+
"step": 7725
|
1926 |
+
},
|
1927 |
+
{
|
1928 |
+
"epoch": 0.27,
|
1929 |
+
"learning_rate": 7.044081632653061e-06,
|
1930 |
+
"loss": 0.1675,
|
1931 |
+
"step": 7750
|
1932 |
+
},
|
1933 |
+
{
|
1934 |
+
"epoch": 0.27,
|
1935 |
+
"learning_rate": 7.0338775510204085e-06,
|
1936 |
+
"loss": 0.1848,
|
1937 |
+
"step": 7775
|
1938 |
+
},
|
1939 |
+
{
|
1940 |
+
"epoch": 0.27,
|
1941 |
+
"learning_rate": 7.023673469387756e-06,
|
1942 |
+
"loss": 0.1878,
|
1943 |
+
"step": 7800
|
1944 |
+
},
|
1945 |
+
{
|
1946 |
+
"epoch": 0.27,
|
1947 |
+
"learning_rate": 7.013469387755103e-06,
|
1948 |
+
"loss": 0.2086,
|
1949 |
+
"step": 7825
|
1950 |
+
},
|
1951 |
+
{
|
1952 |
+
"epoch": 0.27,
|
1953 |
+
"learning_rate": 7.00326530612245e-06,
|
1954 |
+
"loss": 0.1665,
|
1955 |
+
"step": 7850
|
1956 |
+
},
|
1957 |
+
{
|
1958 |
+
"epoch": 0.27,
|
1959 |
+
"learning_rate": 6.993061224489797e-06,
|
1960 |
+
"loss": 0.2067,
|
1961 |
+
"step": 7875
|
1962 |
+
},
|
1963 |
+
{
|
1964 |
+
"epoch": 0.27,
|
1965 |
+
"learning_rate": 6.982857142857143e-06,
|
1966 |
+
"loss": 0.1687,
|
1967 |
+
"step": 7900
|
1968 |
+
},
|
1969 |
+
{
|
1970 |
+
"epoch": 0.28,
|
1971 |
+
"learning_rate": 6.97265306122449e-06,
|
1972 |
+
"loss": 0.1953,
|
1973 |
+
"step": 7925
|
1974 |
+
},
|
1975 |
+
{
|
1976 |
+
"epoch": 0.28,
|
1977 |
+
"learning_rate": 6.962448979591837e-06,
|
1978 |
+
"loss": 0.1844,
|
1979 |
+
"step": 7950
|
1980 |
+
},
|
1981 |
+
{
|
1982 |
+
"epoch": 0.28,
|
1983 |
+
"learning_rate": 6.952244897959184e-06,
|
1984 |
+
"loss": 0.1728,
|
1985 |
+
"step": 7975
|
1986 |
+
},
|
1987 |
+
{
|
1988 |
+
"epoch": 0.28,
|
1989 |
+
"learning_rate": 6.942040816326531e-06,
|
1990 |
+
"loss": 0.1722,
|
1991 |
+
"step": 8000
|
1992 |
+
},
|
1993 |
+
{
|
1994 |
+
"epoch": 0.28,
|
1995 |
+
"eval_loss": 0.18894970417022705,
|
1996 |
+
"eval_runtime": 31292.0854,
|
1997 |
+
"eval_samples_per_second": 0.992,
|
1998 |
+
"eval_steps_per_second": 0.062,
|
1999 |
+
"eval_wer": 0.10428864386126549,
|
2000 |
+
"step": 8000
|
2001 |
}
|
2002 |
],
|
2003 |
"max_steps": 25000,
|
2004 |
"num_train_epochs": 1,
|
2005 |
+
"total_flos": 2.717658906624e+20,
|
2006 |
"trial_name": null,
|
2007 |
"trial_params": null
|
2008 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 6173655480
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00f04d4ca24866b389dc2df4457f672ba5912421795c884ce9a09b3caf5317e1
|
3 |
size 6173655480
|
runs/Dec26_19-57-19_gpu07.cyverse.org/events.out.tfevents.1672109970.gpu07.cyverse.org.126369.0
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:076eb2bada8a4a94fa9d189e543bfecad75db25a7e323324e16521b9cba64240
|
3 |
+
size 57173
|