update citation
Browse files
README.md
CHANGED
@@ -19,6 +19,8 @@ This model is released under the [Creative Commons 4.0 International License](ht
|
|
19 |
|
20 |
#### Reference
|
21 |
|
|
|
|
|
22 |
```
|
23 |
@InProceedings{sugimoto_nlp2023_jmedroberta,
|
24 |
author = "杉本海人 and 壹岐太一 and 知田悠生 and 金沢輝一 and 相澤彰子",
|
@@ -29,6 +31,18 @@ This model is released under the [Creative Commons 4.0 International License](ht
|
|
29 |
}
|
30 |
```
|
31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
## Datasets used for pre-training
|
33 |
|
34 |
- abstracts (train: 1.6GB (10M sentences), validation: 0.2GB (1.3M sentences))
|
|
|
19 |
|
20 |
#### Reference
|
21 |
|
22 |
+
Ja:
|
23 |
+
|
24 |
```
|
25 |
@InProceedings{sugimoto_nlp2023_jmedroberta,
|
26 |
author = "杉本海人 and 壹岐太一 and 知田悠生 and 金沢輝一 and 相澤彰子",
|
|
|
31 |
}
|
32 |
```
|
33 |
|
34 |
+
En:
|
35 |
+
|
36 |
+
```
|
37 |
+
@InProceedings{sugimoto_nlp2023_jmedroberta,
|
38 |
+
author = "Sugimoto, Kaito and Iki, Taichi and Chida, Yuki and Kanazawa, Teruhito and Aizawa, Akiko",
|
39 |
+
title = "J{M}ed{R}o{BERT}a: a Japanese Pre-trained Language Model on Academic Articles in Medical Sciences (in Japanese)",
|
40 |
+
booktitle = "Proceedings of the 29th Annual Meeting of the Association for Natural Language Processing",
|
41 |
+
year = "2023",
|
42 |
+
url = "https://www.anlp.jp/proceedings/annual_meeting/2023/pdf_dir/P3-1.pdf"
|
43 |
+
}
|
44 |
+
```
|
45 |
+
|
46 |
## Datasets used for pre-training
|
47 |
|
48 |
- abstracts (train: 1.6GB (10M sentences), validation: 0.2GB (1.3M sentences))
|