nousr commited on
Commit
0cfed93
1 Parent(s): c1aa7b1

Upload prior/prior_config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. prior/prior_config.json +88 -0
prior/prior_config.json ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "prior": {
3
+ "clip": {
4
+ "make": "openai",
5
+ "model": "ViT-L/14"
6
+ },
7
+ "net": {
8
+ "dim": 768,
9
+ "depth": 12,
10
+ "num_timesteps": 1000,
11
+ "num_time_embeds": 1,
12
+ "num_image_embeds": 1,
13
+ "num_text_embeds": 1,
14
+ "dim_head": 64,
15
+ "heads": 12,
16
+ "ff_mult": 4,
17
+ "norm_out": true,
18
+ "attn_dropout": 0.05,
19
+ "ff_dropout": 0.05,
20
+ "final_proj": true,
21
+ "normformer": true,
22
+ "rotary_emb": true
23
+ },
24
+ "image_embed_dim": 768,
25
+ "image_size": 224,
26
+ "image_channels": 3,
27
+ "timesteps": 1000,
28
+ "sample_timesteps": 64,
29
+ "cond_drop_prob": 0.0,
30
+ "loss_type": "l2",
31
+ "predict_x_start": true,
32
+ "beta_schedule": "cosine",
33
+ "condition_on_text_encodings": true
34
+ },
35
+ "data": {
36
+ "batch_size": 256,
37
+ "num_data_points": 250000000,
38
+ "eval_every_seconds": 1800,
39
+ "image_url": "s3://s-datasets/laion-aesthetic/embeddings/img_emb",
40
+ "meta_url": "s3://s-datasets/laion-aesthetic/embeddings/metadata",
41
+ "splits": {
42
+ "train": 0.9,
43
+ "val": 2.4e-05,
44
+ "test":0.09997599999999998
45
+ }
46
+ },
47
+ "train": {
48
+ "epochs": 5,
49
+ "lr": 1.1e-4,
50
+ "wd": 6.02e-2,
51
+ "max_grad_norm": 0.5,
52
+ "use_ema": true,
53
+ "ema_beta": 0.9999,
54
+ "amp": false,
55
+ "save_every_seconds": 3600,
56
+ "eval_timesteps": [64, 250, 1000]
57
+ },
58
+ "tracker": {
59
+ "data_path": ".prior-updates",
60
+ "overwrite_data_path": true,
61
+ "log": {
62
+ "log_type": "wandb",
63
+ "wandb_entity": "nousr_laion",
64
+ "wandb_project": "dalle2_diffusion_prior",
65
+ "wandb_resume": false,
66
+ "verbose": true
67
+ },
68
+ "load": {
69
+ "resume": false
70
+ },
71
+ "save": [
72
+ {
73
+ "save_to": "local",
74
+ "save_type": "checkpoint",
75
+ "save_latest_to": ".prior-updates/latest_checkpoint.pth",
76
+ "save_best_to": ".prior-updates/best_checkpoint.pth"
77
+ },
78
+ {
79
+ "save_to": "huggingface",
80
+ "huggingface_repo": "laion/DALLE2-PyTorch",
81
+ "save_meta_to": "prior/",
82
+ "save_latest_to": "prior/latest.pth",
83
+ "save_best_to": "prior/best.pth",
84
+ "save_type": "model"
85
+ }
86
+ ]
87
+ }
88
+ }