w2v2-ssl-checkpoints (583 files)
checksums.txt | 56.45kB |
README.md | 1.50kB |
ssl/0gpu/w2v2.ssl.step_000000000000.loss_7444.54.init.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000005000.loss_7188.81.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000010000.loss_7168.55.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000015000.loss_6682.07.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000020000.loss_6497.93.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000025000.loss_6277.31.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000030000.loss_6025.61.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000035000.loss_6039.42.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000040000.loss_5957.96.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000045000.loss_5917.45.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000050000.loss_5837.13.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000055000.loss_5936.26.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000060000.loss_5911.98.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000065000.loss_5874.27.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000070000.loss_5764.21.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000075000.loss_5782.19.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000080000.loss_5732.64.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000085000.loss_5677.32.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000090000.loss_5672.40.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000095000.loss_5656.57.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000100000.loss_5628.42.progress.ckpt | 380.24MB |
ssl/0gpu/w2v2.ssl.step_000000105000.loss_5621.47.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000110000.loss_5648.58.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000115000.loss_5636.35.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000120000.loss_5526.06.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000125000.loss_5616.91.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000130000.loss_5487.45.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000135000.loss_5437.77.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000140000.loss_5409.28.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000145000.loss_5473.95.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000150000.loss_5476.54.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000155000.loss_5467.03.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000160000.loss_5481.95.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000165000.loss_5477.53.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000170000.loss_5473.69.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000175000.loss_5467.65.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000180000.loss_5338.09.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000185000.loss_5399.08.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000190000.loss_5356.92.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000195000.loss_5276.18.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000200000.loss_5269.03.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000205000.loss_5299.19.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000210000.loss_5332.16.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000215000.loss_5357.34.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000220000.loss_5365.65.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000225000.loss_5278.49.progress.ckpt | 380.25MB |
ssl/0gpu/w2v2.ssl.step_000000230000.loss_5246.24.progress.ckpt | 380.25MB |
Type: Dataset
Tags: self-supervised learning, wav2vec 2.0
Bibtex:
Tags: self-supervised learning, wav2vec 2.0
Bibtex:
@article{, title= {w2v2-ssl-checkpoints}, journal= {}, author= {Nik Vaessen and David A. van Leeuwen}, year= {2024}, url= {https://github.com/nikvaessen/w2v2-batch-size}, abstract= {This is a companion dataset to the paper "The Effect of Batch Size on Contrastive Self-Supervised Speech Representation Learning". We provide the progression checkpoint during pre-training (every 5k steps) with various batch sizes.}, keywords= {self-supervised learning, wav2vec 2.0}, terms= {}, license= {MIT}, superseded= {} }
No comments yet
Add a comment