Update README.md
Browse files
README.md
CHANGED
@@ -46,14 +46,13 @@ InLegalBERT beats LegalBERT as well as all other baselines/variants we have used
|
|
46 |
|
47 |
### Citation
|
48 |
```
|
49 |
-
@
|
50 |
doi = {10.48550/ARXIV.2209.06049},
|
51 |
url = {https://arxiv.org/abs/2209.06049},
|
52 |
author = {Paul, Shounak and Mandal, Arpan and Goyal, Pawan and Ghosh, Saptarshi},
|
53 |
title = {Pre-trained Language Models for the Legal Domain: A Case Study on Indian Law},
|
54 |
-
|
55 |
year = {2023},
|
56 |
-
copyright = {Creative Commons Attribution 4.0 International}
|
57 |
}
|
58 |
```
|
59 |
|
|
|
46 |
|
47 |
### Citation
|
48 |
```
|
49 |
+
@inproceedings{paul-2022-pretraining,
|
50 |
doi = {10.48550/ARXIV.2209.06049},
|
51 |
url = {https://arxiv.org/abs/2209.06049},
|
52 |
author = {Paul, Shounak and Mandal, Arpan and Goyal, Pawan and Ghosh, Saptarshi},
|
53 |
title = {Pre-trained Language Models for the Legal Domain: A Case Study on Indian Law},
|
54 |
+
booktitle = {Proceedings of ICAIL 2023}
|
55 |
year = {2023},
|
|
|
56 |
}
|
57 |
```
|
58 |
|