Update README.md
Browse files
README.md
CHANGED
@@ -47,11 +47,10 @@ InLegalBERT beats LegalBERT as well as all other baselines/variants we have used
|
|
47 |
### Citation
|
48 |
```
|
49 |
@inproceedings{paul-2022-pretraining,
|
50 |
-
doi = {10.48550/ARXIV.2209.06049},
|
51 |
url = {https://arxiv.org/abs/2209.06049},
|
52 |
author = {Paul, Shounak and Mandal, Arpan and Goyal, Pawan and Ghosh, Saptarshi},
|
53 |
title = {Pre-trained Language Models for the Legal Domain: A Case Study on Indian Law},
|
54 |
-
booktitle = {Proceedings of ICAIL 2023}
|
55 |
year = {2023},
|
56 |
}
|
57 |
```
|
|
|
47 |
### Citation
|
48 |
```
|
49 |
@inproceedings{paul-2022-pretraining,
|
|
|
50 |
url = {https://arxiv.org/abs/2209.06049},
|
51 |
author = {Paul, Shounak and Mandal, Arpan and Goyal, Pawan and Ghosh, Saptarshi},
|
52 |
title = {Pre-trained Language Models for the Legal Domain: A Case Study on Indian Law},
|
53 |
+
booktitle = {Proceedings of 19th International Conference on Artificial Intelligence and Law - ICAIL 2023}
|
54 |
year = {2023},
|
55 |
}
|
56 |
```
|