276U 320P
Browse files- anydoor/image_log/train/conditioning_gs-000000_e-000000_b-000000.png +2 -2
- anydoor/image_log/train/control_gs-000000_e-000000_b-000000.png +2 -2
- anydoor/image_log/train/reconstruction_gs-000000_e-000000_b-000000.png +2 -2
- anydoor/image_log/train/samples_cfg_scale_9.00_gs-000000_e-000000_b-000000.png +2 -2
- anydoor/run_train_anydoor.py +1 -1
- strip_state.py +2 -2
anydoor/image_log/train/conditioning_gs-000000_e-000000_b-000000.png
CHANGED
Git LFS Details
|
Git LFS Details
|
anydoor/image_log/train/control_gs-000000_e-000000_b-000000.png
CHANGED
Git LFS Details
|
Git LFS Details
|
anydoor/image_log/train/reconstruction_gs-000000_e-000000_b-000000.png
CHANGED
Git LFS Details
|
Git LFS Details
|
anydoor/image_log/train/samples_cfg_scale_9.00_gs-000000_e-000000_b-000000.png
CHANGED
Git LFS Details
|
Git LFS Details
|
anydoor/run_train_anydoor.py
CHANGED
@@ -42,7 +42,7 @@ if save_memory:
|
|
42 |
# accumulate_grad_batches=1
|
43 |
|
44 |
# Configs
|
45 |
-
resume_path = '/workspace/train-wefadoor-master/anydoor/
|
46 |
batch_size = 8
|
47 |
logger_freq = 1000
|
48 |
learning_rate = 1e-5
|
|
|
42 |
# accumulate_grad_batches=1
|
43 |
|
44 |
# Configs
|
45 |
+
resume_path = '/workspace/train-wefadoor-master/anydoor/step_300k_u.ckpt'
|
46 |
batch_size = 8
|
47 |
logger_freq = 1000
|
48 |
learning_rate = 1e-5
|
strip_state.py
CHANGED
@@ -1,10 +1,10 @@
|
|
1 |
import torch
|
2 |
|
3 |
# Load the checkpoint
|
4 |
-
checkpoint = torch.load('/workspace/train-wefadoor-master/anydoor/
|
5 |
|
6 |
# Extract the state dictionary
|
7 |
state_dict = checkpoint['state_dict'] if 'state_dict' in checkpoint else checkpoint
|
8 |
|
9 |
# Save the state dictionary to a new checkpoint
|
10 |
-
torch.save(state_dict, '/workspace/train-wefadoor-master/anydoor/
|
|
|
1 |
import torch
|
2 |
|
3 |
# Load the checkpoint
|
4 |
+
checkpoint = torch.load('/workspace/train-wefadoor-master/anydoor/lightning_logs/version_4/checkpoints/300k_u.ckpt', map_location='cpu')
|
5 |
|
6 |
# Extract the state dictionary
|
7 |
state_dict = checkpoint['state_dict'] if 'state_dict' in checkpoint else checkpoint
|
8 |
|
9 |
# Save the state dictionary to a new checkpoint
|
10 |
+
torch.save(state_dict, '/workspace/train-wefadoor-master/anydoor/lightning_logs/version_4/checkpoints/300k_u.ckpt')
|