You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: README.md
+2-2Lines changed: 2 additions & 2 deletions
Display the source diff
Display the rich diff
Original file line number
Diff line number
Diff line change
@@ -333,7 +333,7 @@ If you find our LLaMA-Adapter code and paper useful, please kindly cite:
333
333
```bash
334
334
@article{zhang2023llamaadapter,
335
335
title = {LLaMA-Adapter: Efficient Fine-tuning of Language Models with Zero-init Attention},
336
-
author={Zhang, Renrui and Han, Jiaming and Zhou, Aojun and Hu, Xiangfei and Yan, Shilin and Lu, Pan and Li, Hongsheng and Gao, Peng and Qiao Yu},
336
+
author={Zhang, Renrui and Han, Jiaming and Zhou, Aojun and Hu, Xiangfei and Yan, Shilin and Lu, Pan and Li, Hongsheng and Gao, Peng and Qiao, Yu},
337
337
journal={arXiv preprint arXiv:2303.16199},
338
338
year={2023}
339
339
}
@@ -343,7 +343,7 @@ If you find our LLaMA-Adapter V2 code and paper useful, please kindly cite:
343
343
```bash
344
344
@article{gao2023llamaadapterv2,
345
345
title = {LLaMA-Adapter V2: Parameter-Efficient Visual Instruction Model},
346
-
author={Gao, Peng and Han, Jiaming and Zhang, Renrui and Lin, Ziyi and Geng, Shijie and Zhou, Aojun and Zhang, Wei and Lu, Pan and He, Conghui and Yue, Xiangyu and Li, Hongsheng and Qiao Yu},
346
+
author={Gao, Peng and Han, Jiaming and Zhang, Renrui and Lin, Ziyi and Geng, Shijie and Zhou, Aojun and Zhang, Wei and Lu, Pan and He, Conghui and Yue, Xiangyu and Li, Hongsheng and Qiao, Yu},
0 commit comments