@inproceedings{4162b84c7a8a4a3888eb51f713945e95,
title = "BitNet B1.58 Reloaded: State-of-the-Art Performance Also on Smaller Networks",
abstract = "Recently proposed methods for 1-bit and 1.58-bit quantization aware training investigate the performance and behavior of these methods in the context of large language models, finding state-of-the-art performance for models with more than 3B parameters. In this work, we investigate 1.58-bit quantization for small language and vision models ranging from 100K to 48M parameters. We introduce a variant of BitNet b1.58, which allows to rely on the median rather than the mean in the quantization process. Through extensive experiments we investigate the performance of 1.58-bit models obtained through quantization aware training. We further investigate the robustness of 1.58-bit quantization-aware training to changes in the learning rate and regularization through weight decay, finding different patterns for small language and vision models than previously reported for large language models. Our results showcase that 1.58-bit quantization-aware training provides state-of-the-art performance for small language models when doubling hidden layer sizes and reaches or even surpasses state-of-the-art performance for small vision models of identical size. Ultimately, we demonstrate that 1.58-bit quantization-aware training is a viable and promising approach also for training smaller deep learning networks, facilitating deployment of such models in low-resource use-cases and encouraging future research.",
keywords = "Deep learning, Green machine learning, Image classification, Quantization-Aware training, Small language models",
author = "Jacob Nielsen and Peter Schneider-Kamp",
year = "2024",
doi = "10.1007/978-3-031-66705-3_20",
language = "English",
isbn = "9783031667046",
series = "Communications in Computer and Information Science",
publisher = "Springer Science+Business Media",
pages = "301--315",
editor = "Ana Fred and Allel Hadjali and Oleg Gusikhin and Carlo Sansone",
booktitle = "Deep Learning Theory and Applications - 5th International Conference, DeLTA 2024, Proceedings",
address = "United States",
note = "5th International Conference on Deep Learning Theory and Applications, DeLTA 2024 ; Conference date: 10-07-2024 Through 11-07-2024",
}