# Load model directly | |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM | |
tokenizer = AutoTokenizer.from_pretrained("grammarly/coedit-large") | |
model = AutoModelForSeq2SeqLM.from_pretrained("grammarly/coedit-large") |
# Load model directly | |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM | |
tokenizer = AutoTokenizer.from_pretrained("grammarly/coedit-large") | |
model = AutoModelForSeq2SeqLM.from_pretrained("grammarly/coedit-large") |