MEIRa / conf /experiment /onto_pseudo_static.yaml
KawshikManikantan's picture
upload_trial
98e2ea5
raw
history blame
644 Bytes
# @package _global_
# This configuration trains a joint model for Ontonotes, Litbank, and Preco.
# Note that OntoNotes and Preco are downsampled in this configuration.
# OntoNotes also uses pseudo-singletons
# Model name in CRAC 2021: longdoc^S Joint + PS 30K
defaults:
- override /datasets: ontonotes
- override /trainer: train.yaml
- override /model: model.yaml
trainer:
log_frequency: 500
max_evals: 20
eval_per_k_steps: null
patience: 10
model:
doc_encoder:
add_speaker_tokens: True
memory:
type: "static"
datasets:
ontonotes:
singleton_file: ontonotes/ment_singletons_longformer_speaker/30.jsonlines