maxin-cn commited on
Commit
4f16b68
·
verified ·
1 Parent(s): cee3c11

Update transformer/config.json

Browse files
Files changed (1) hide show
  1. transformer/config.json +0 -7
transformer/config.json CHANGED
@@ -4,24 +4,17 @@
4
  "activation_fn": "gelu-approximate",
5
  "attention_bias": true,
6
  "attention_head_dim": 72,
7
- "attention_type": "default",
8
  "caption_channels": 4096,
9
  "cross_attention_dim": 1152,
10
- "double_self_attention": false,
11
  "dropout": 0.0,
12
  "in_channels": 4,
13
  "norm_elementwise_affine": false,
14
  "norm_eps": 1e-06,
15
- "norm_num_groups": 32,
16
  "norm_type": "ada_norm_single",
17
  "num_attention_heads": 16,
18
  "num_embeds_ada_norm": 1000,
19
  "num_layers": 28,
20
- "num_vector_embeds": null,
21
- "only_cross_attention": false,
22
  "out_channels": 8,
23
  "patch_size": 2,
24
  "sample_size": 64,
25
- "upcast_attention": false,
26
- "use_linear_projection": false
27
  }
 
4
  "activation_fn": "gelu-approximate",
5
  "attention_bias": true,
6
  "attention_head_dim": 72,
 
7
  "caption_channels": 4096,
8
  "cross_attention_dim": 1152,
 
9
  "dropout": 0.0,
10
  "in_channels": 4,
11
  "norm_elementwise_affine": false,
12
  "norm_eps": 1e-06,
 
13
  "norm_type": "ada_norm_single",
14
  "num_attention_heads": 16,
15
  "num_embeds_ada_norm": 1000,
16
  "num_layers": 28,
 
 
17
  "out_channels": 8,
18
  "patch_size": 2,
19
  "sample_size": 64,
 
 
20
  }