|
--- |
|
datasets: |
|
- osunlp/SMolInstruct |
|
language: |
|
- en |
|
base_model: |
|
- mistralai/Mistral-7B-Instruct-v0.3 |
|
tags: |
|
- chemistry |
|
library_name: mlx |
|
--- |
|
|
|
Finetuned on SmolInstruct's property prediction instruction dataset and HoneyBee's instruction dataset. |
|
|
|
[LoRA Config Parameters] |
|
train: true, |
|
fine_tune_type: lora, |
|
seed: 0, |
|
num_layers: 8, |
|
batch_size: 2, |
|
iters: 1000, |
|
val_batches: 25, |
|
learning_rate: 1e-5, |
|
steps_per_report: 10, |
|
steps_per_eval: 200, |
|
resume_adapter_file: null, |
|
adapter_path: "adapters", |
|
save_every: 100, |
|
test: false, |
|
test_batches: 100, |
|
max_seq_length: 2048, |
|
grad_checkpoint: false, |
|
lora_parameters: |
|
keys: ["self_attn.q_proj", "self_attn.v_proj"] |
|
rank: 8 |
|
alpha: 8 |
|
dropout: 0.0 |
|
scale: 20.0 |