Commit
·
0463357
1
Parent(s):
ce7704b
Update README.md
Browse files
README.md
CHANGED
@@ -30,8 +30,19 @@ model = AutoModelForMaskedLM.from_pretrained("jannikskytt/MeDa-Bert")
|
|
30 |
### Citing
|
31 |
|
32 |
```
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
```
|
|
|
30 |
### Citing
|
31 |
|
32 |
```
|
33 |
+
@inproceedings{pedersen-etal-2023-meda,
|
34 |
+
title = "{M}e{D}a-{BERT}: A medical {D}anish pretrained transformer model",
|
35 |
+
author = "Pedersen, Jannik and
|
36 |
+
Laursen, Martin and
|
37 |
+
Vinholt, Pernille and
|
38 |
+
Savarimuthu, Thiusius Rajeeth",
|
39 |
+
booktitle = "Proceedings of the 24th Nordic Conference on Computational Linguistics (NoDaLiDa)",
|
40 |
+
month = may,
|
41 |
+
year = "2023",
|
42 |
+
address = "T{\'o}rshavn, Faroe Islands",
|
43 |
+
publisher = "University of Tartu Library",
|
44 |
+
url = "https://aclanthology.org/2023.nodalida-1.31",
|
45 |
+
pages = "301--307",
|
46 |
+
abstract = "This paper introduces a medical Danish BERT-based language model (MeDa-BERT) and medical Danish word embeddings. The word embeddings and MeDa-BERT were pretrained on a new medical Danish corpus consisting of 133M tokens from medical Danish books and text from the internet. The models showed improved performance over general-domain models on medical Danish classification tasks. The medical word embeddings and MeDa-BERT are publicly available.",
|
47 |
+
}
|
48 |
```
|