cicdatopea
commited on
Commit
•
7c97270
1
Parent(s):
ee9e320
Update README.md
Browse files
README.md
CHANGED
@@ -2,6 +2,8 @@
|
|
2 |
license: apache-2.0
|
3 |
datasets:
|
4 |
- NeelNanda/pile-10k
|
|
|
|
|
5 |
---
|
6 |
|
7 |
---
|
@@ -142,4 +144,4 @@ The license on this model does not constitute legal advice. We are not responsib
|
|
142 |
|
143 |
@article{cheng2023optimize, title={Optimize weight rounding via signed gradient descent for the quantization of llms}, author={Cheng, Wenhua and Zhang, Weiwei and Shen, Haihao and Cai, Yiyang and He, Xin and Lv, Kaokao and Liu, Yi}, journal={arXiv preprint arXiv:2309.05516}, year={2023} }
|
144 |
|
145 |
-
[arxiv](https://arxiv.org/abs/2309.05516) [github](https://github.com/intel/auto-round)
|
|
|
2 |
license: apache-2.0
|
3 |
datasets:
|
4 |
- NeelNanda/pile-10k
|
5 |
+
base_model:
|
6 |
+
- Qwen/Qwen2-7B
|
7 |
---
|
8 |
|
9 |
---
|
|
|
144 |
|
145 |
@article{cheng2023optimize, title={Optimize weight rounding via signed gradient descent for the quantization of llms}, author={Cheng, Wenhua and Zhang, Weiwei and Shen, Haihao and Cai, Yiyang and He, Xin and Lv, Kaokao and Liu, Yi}, journal={arXiv preprint arXiv:2309.05516}, year={2023} }
|
146 |
|
147 |
+
[arxiv](https://arxiv.org/abs/2309.05516) [github](https://github.com/intel/auto-round)
|