parquet-converter commited on
Commit
52a3adc
1 Parent(s): e6f928c

Update parquet files

Browse files
README.md DELETED
@@ -1,3 +0,0 @@
1
- ---
2
- license: openrail
3
- ---
 
 
 
 
RobertLau--decoder_json/json-train.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea8e59d8e490c4fac123c171371183ea4a5eacbaa7ffac9a9b4c894a5948fbad
3
+ size 38836
decoder_config.json DELETED
@@ -1,119 +0,0 @@
1
- {
2
- "seed": 1,
3
-
4
- "decoder": {
5
- "unets": [
6
- {
7
- "dim": 256,
8
- "cond_dim": 512,
9
- "image_embed_dim": 768,
10
- "text_embed_dim": 768,
11
- "cond_on_text_encodings": false,
12
- "channels": 3,
13
- "dim_mults": [1, 2, 3, 4],
14
- "num_resnet_blocks": 4,
15
- "attn_heads": 8,
16
- "attn_dim_head": 64,
17
- "sparse_attn": true,
18
- "memory_efficient": true,
19
- "self_attn": [false, true, true, true]
20
- },
21
- {
22
- "dim": 256,
23
- "cond_dim": 256,
24
- "image_embed_dim": 768,
25
- "cond_on_text_encodings": false,
26
- "channels": 3,
27
- "dim_mults": [1, 2, 3, 4],
28
- "num_resnet_blocks": 3,
29
- "sparse_attn": false,
30
- "memory_efficient": false,
31
- "self_attn": [false, false, false, false]
32
- }
33
- ],
34
- "image_sizes": [64, 256],
35
- "channels": 3,
36
- "timesteps": 30,
37
- "loss_type": "l2",
38
- "beta_schedule": ["cosine", "cosine"],
39
- "learned_variance": false
40
- },
41
- "data": {
42
- "webdataset_base_url": "pipe:aws s3 cp --quiet s3://s-datasets/laion-aesthetic/data/laion2B-en-aesthetic/{}.tar -",
43
- "img_embeddings_url": "s3://s-datasets/laion-aesthetic/ordered_embeddings/",
44
- "num_workers": 6,
45
- "batch_size": 3,
46
- "start_shard": 0,
47
- "end_shard": 5247,
48
- "shard_width": 5,
49
- "index_width": 4,
50
- "splits": {
51
- "train": 0.75,
52
- "val": 0.15,
53
- "test": 0.1
54
- },
55
- "shuffle_train": false,
56
- "resample_train": true,
57
- "preprocessing": {
58
- "RandomResizedCrop": {
59
- "size": [256, 256],
60
- "scale": [0.75, 1.0],
61
- "ratio": [1.0, 1.0]
62
- },
63
- "ToTensor": true
64
- }
65
- },
66
- "train": {
67
- "epochs": 1000,
68
- "lr": 1.2e-4,
69
- "wd": 0.0,
70
- "max_grad_norm": 0.5,
71
- "save_every_n_samples": 400000,
72
- "n_sample_images": 10,
73
- "device": "cuda:0",
74
- "epoch_samples": 2000000,
75
- "validation_samples": 60000,
76
- "use_ema": true,
77
- "ema_beta": 0.9999,
78
- "save_all": false,
79
- "save_latest": true,
80
- "save_best": true,
81
- "unet_training_mask": [false, true]
82
- },
83
- "evaluate": {
84
- "n_evaluation_samples": 30,
85
- "FID": {
86
- "feature": 64
87
- },
88
- "LPIPS": {
89
- "net_type": "vgg",
90
- "reduction": "mean"
91
- }
92
- },
93
- "tracker": {
94
- "data_path": "/fsx/aidan/upsamplers/.tracker-upsampling",
95
- "overwrite_data_path": true,
96
-
97
- "log": {
98
- "log_type": "wandb",
99
-
100
- "wandb_entity": "veldrovive",
101
- "wandb_project": "upsamplers",
102
-
103
- "verbose": true
104
- },
105
-
106
- "save": [{
107
- "save_to": "wandb"
108
- },{
109
- "save_to": "huggingface",
110
- "huggingface_repo": "Veldrovive/upsamplers",
111
-
112
- "save_latest_to": "latest.pth",
113
- "save_best_to": "best.pth",
114
- "save_meta_to": "./",
115
-
116
- "save_type": "model"
117
- }]
118
- }
119
- }