Spaces:
Runtime error
Runtime error
File size: 1,983 Bytes
fd52b7f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
from pathlib import Path
import click
import torch
from sklearn.metrics import f1_score
from torch.utils import data
from utils import *
from model import createDeepLabv3
from trainer import train_model
@click.command()
@click.option("--data-directory",
required=True,
help="Specify the data directory.")
@click.option("--exp_directory",
required=True,
help="Specify the experiment directory.")
@click.option(
"--epochs",
default=25,
type=int,
help="Specify the number of epochs you want to run the experiment for.")
@click.option("--batch-size",
default=4,
type=int,
help="Specify the batch size for the dataloader.")
def main(data_directory, exp_directory, epochs, batch_size):
# Create the deeplabv3 resnet101 model which is pretrained on a subset
# of COCO train2017, on the 20 categories that are present in the Pascal VOC dataset.
model = createDeepLabv3()
model.train()
data_directory = Path(data_directory)
# Create the experiment directory if not present
exp_directory = Path(exp_directory)
if not exp_directory.exists():
exp_directory.mkdir()
# Specify the loss function
criterion = torch.nn.MSELoss(reduction='mean')
# Specify the optimizer with a lower learning rate
optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)
# Specify the evaluation metrics
metrics = {'f1_score': f1_score, 'iou': iou}
# Create the dataloader
dataloaders = get_dataloader_single_folder(
data_directory, batch_size=batch_size)
_ = train_model(model,
criterion,
dataloaders,
optimizer,
bpath=exp_directory,
metrics=metrics,
num_epochs=epochs)
# Save the trained model
torch.save(model, exp_directory / 'weights.pt')
if __name__ == "__main__":
main() |