Upload 2 files
Browse files- bria_scheduler.json.out +11 -0
- bria_transformer.json.out +18 -0
bria_scheduler.json.out
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_class_name": "FlowMatchEulerDiscreteScheduler",
|
3 |
+
"_diffusers_version": "0.30.0.dev0",
|
4 |
+
"base_image_seq_len": 256,
|
5 |
+
"base_shift": 0.5,
|
6 |
+
"max_image_seq_len": 4096,
|
7 |
+
"max_shift": 1.15,
|
8 |
+
"num_train_timesteps": 1000,
|
9 |
+
"shift": 1.0,
|
10 |
+
"use_dynamic_shifting": false
|
11 |
+
}
|
bria_transformer.json.out
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_class_name": "FluxTransformer2DModel",
|
3 |
+
"_diffusers_version": "0.30.0.dev0",
|
4 |
+
"_name_or_path": "/home/eyal/.cache/huggingface/hub/models--black-forest-labs--FLUX.1-dev/snapshots/01aa605f2c300568dd6515476f04565a954fcb59/transformer",
|
5 |
+
"attention_head_dim": 128,
|
6 |
+
"axes_dims_rope": [
|
7 |
+
16,
|
8 |
+
56,
|
9 |
+
56
|
10 |
+
],
|
11 |
+
"guidance_embeds": false,
|
12 |
+
"in_channels": 64,
|
13 |
+
"joint_attention_dim": 4096,
|
14 |
+
"num_attention_heads": 16,
|
15 |
+
"num_layers": 22,
|
16 |
+
"num_single_layers": 10,
|
17 |
+
"patch_size": 1
|
18 |
+
}
|