{ "_class_name": "FluxTransformer2DModel", "_diffusers_version": "0.30.0.dev0", "_name_or_path": "/home/eyal/.cache/huggingface/hub/models--black-forest-labs--FLUX.1-dev/snapshots/01aa605f2c300568dd6515476f04565a954fcb59/transformer", "attention_head_dim": 128, "axes_dims_rope": [ 16, 56, 56 ], "guidance_embeds": false, "in_channels": 64, "joint_attention_dim": 4096, "num_attention_heads": 16, "num_layers": 22, "num_single_layers": 10, "patch_size": 1 }