MatteoOmenetti
commited on
Update README.md
Browse files
README.md
CHANGED
@@ -29,4 +29,34 @@ The **Code Formula Model** processes an image of a code snippet or formula at 12
|
|
29 |
version = {1.0.0},
|
30 |
year = {2024}
|
31 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
```
|
|
|
29 |
version = {1.0.0},
|
30 |
year = {2024}
|
31 |
}
|
32 |
+
|
33 |
+
@article{kirillov2023segany,
|
34 |
+
title={Segment Anything},
|
35 |
+
author={Kirillov, Alexander and Mintun, Eric and Ravi, Nikhila and Mao, Hanzi and Rolland, Chloe and Gustafson, Laura and Xiao, Tete and Whitehead, Spencer and Berg, Alexander C. and Lo, Wan-Yen and Doll{\'a}r, Piotr and Girshick, Ross},
|
36 |
+
journal={arXiv:2304.02643},
|
37 |
+
year={2023}
|
38 |
+
}
|
39 |
+
|
40 |
+
@misc{zhang2022opt,
|
41 |
+
title={OPT: Open Pre-trained Transformer Language Models},
|
42 |
+
author={Susan Zhang and Stephen Roller and Naman Goyal and Mikel Artetxe and Moya Chen and Shuohui Chen and Christopher Dewan and Mona Diab and Xian Li and Xi Victoria Lin and Todor Mihaylov and Myle Ott and Sam Shleifer and Kurt Shuster and Daniel Simig and Punit Singh Koura and Anjali Sridhar and Tianlu Wang and Luke Zettlemoyer},
|
43 |
+
year={2022},
|
44 |
+
eprint={2205.01068},
|
45 |
+
archivePrefix={arXiv},
|
46 |
+
primaryClass={cs.CL}
|
47 |
+
}
|
48 |
+
|
49 |
+
@article{wei2023vary,
|
50 |
+
title={Vary: Scaling up the Vision Vocabulary for Large Vision-Language Models},
|
51 |
+
author={Wei, Haoran and Kong, Lingyu and Chen, Jinyue and Zhao, Liang and Ge, Zheng and Yang, Jinrong and Sun, Jianjian and Han, Chunrui and Zhang, Xiangyu},
|
52 |
+
journal={arXiv preprint arXiv:2312.06109},
|
53 |
+
year={2023}
|
54 |
+
}
|
55 |
+
|
56 |
+
@article{wei2024small,
|
57 |
+
title={Small Language Model Meets with Reinforced Vision Vocabulary},
|
58 |
+
author={Wei, Haoran and Kong, Lingyu and Chen, Jinyue and Zhao, Liang and Ge, Zheng and Yu, En and Sun, Jianjian and Han, Chunrui and Zhang, Xiangyu},
|
59 |
+
journal={arXiv preprint arXiv:2401.12503},
|
60 |
+
year={2024}
|
61 |
+
}
|
62 |
```
|