|
from safetensors.torch import save_file |
|
import torch |
|
from torchvision.models import resnet34 |
|
|
|
model_path = "RESNET_34_cancer_350px_lr_1e-2_decay_5_jitter_val6slides_harder_tcga_none_0403_0204_0.9826153355179645_16.t7" |
|
|
|
orig_model = torch.load(model_path, map_location="cpu") |
|
state_dict = orig_model["model"].module.state_dict() |
|
keys_missing = [ |
|
"bn1.num_batches_tracked", |
|
"layer1.0.bn1.num_batches_tracked", |
|
"layer1.0.bn2.num_batches_tracked", |
|
"layer1.1.bn1.num_batches_tracked", |
|
"layer1.1.bn2.num_batches_tracked", |
|
"layer1.2.bn1.num_batches_tracked", |
|
"layer1.2.bn2.num_batches_tracked", |
|
"layer2.0.bn1.num_batches_tracked", |
|
"layer2.0.bn2.num_batches_tracked", |
|
"layer2.0.downsample.1.num_batches_tracked", |
|
"layer2.1.bn1.num_batches_tracked", |
|
"layer2.1.bn2.num_batches_tracked", |
|
"layer2.2.bn1.num_batches_tracked", |
|
"layer2.2.bn2.num_batches_tracked", |
|
"layer2.3.bn1.num_batches_tracked", |
|
"layer2.3.bn2.num_batches_tracked", |
|
"layer3.0.bn1.num_batches_tracked", |
|
"layer3.0.bn2.num_batches_tracked", |
|
"layer3.0.downsample.1.num_batches_tracked", |
|
"layer3.1.bn1.num_batches_tracked", |
|
"layer3.1.bn2.num_batches_tracked", |
|
"layer3.2.bn1.num_batches_tracked", |
|
"layer3.2.bn2.num_batches_tracked", |
|
"layer3.3.bn1.num_batches_tracked", |
|
"layer3.3.bn2.num_batches_tracked", |
|
"layer3.4.bn1.num_batches_tracked", |
|
"layer3.4.bn2.num_batches_tracked", |
|
"layer3.5.bn1.num_batches_tracked", |
|
"layer3.5.bn2.num_batches_tracked", |
|
"layer4.0.bn1.num_batches_tracked", |
|
"layer4.0.bn2.num_batches_tracked", |
|
"layer4.0.downsample.1.num_batches_tracked", |
|
"layer4.1.bn1.num_batches_tracked", |
|
"layer4.1.bn2.num_batches_tracked", |
|
"layer4.2.bn1.num_batches_tracked", |
|
"layer4.2.bn2.num_batches_tracked", |
|
] |
|
assert not any( |
|
key in state_dict.keys() for key in keys_missing |
|
), "key present that should be missing" |
|
for key in keys_missing: |
|
state_dict[key] = torch.as_tensor(0) |
|
torch.save(state_dict, "pytorch_model.pt") |
|
save_file(state_dict, "model.safetensors") |
|
|
|
model = resnet34(weights=None) |
|
model.fc = torch.nn.Linear(model.fc.in_features, out_features=5, bias=True) |
|
model.load_state_dict(state_dict) |
|
model_jit = torch.jit.script(model, example_inputs=[(torch.ones(1, 3, 224, 224),)]) |
|
torch.jit.save(model_jit, "torchscript_model.bin") |
|
|