jcaip commited on
Commit
fff016c
·
verified ·
1 Parent(s): 86e0a4d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -47
config.json CHANGED
@@ -26,53 +26,7 @@
26
  "intermediate_size_mlp": 16384,
27
  "layer_types": [
28
  "chunked_attention",
29
- "chunked_attention",
30
- "chunked_attention",
31
- "full_attention",
32
- "chunked_attention",
33
- "chunked_attention",
34
- "chunked_attention",
35
- "full_attention",
36
- "chunked_attention",
37
- "chunked_attention",
38
- "chunked_attention",
39
- "full_attention",
40
- "chunked_attention",
41
- "chunked_attention",
42
- "chunked_attention",
43
- "full_attention",
44
- "chunked_attention",
45
- "chunked_attention",
46
- "chunked_attention",
47
- "full_attention",
48
- "chunked_attention",
49
- "chunked_attention",
50
- "chunked_attention",
51
- "full_attention",
52
- "chunked_attention",
53
- "chunked_attention",
54
- "chunked_attention",
55
- "full_attention",
56
- "chunked_attention",
57
- "chunked_attention",
58
- "chunked_attention",
59
- "full_attention",
60
- "chunked_attention",
61
- "chunked_attention",
62
- "chunked_attention",
63
- "full_attention",
64
- "chunked_attention",
65
- "chunked_attention",
66
- "chunked_attention",
67
- "full_attention",
68
- "chunked_attention",
69
- "chunked_attention",
70
- "chunked_attention",
71
- "full_attention",
72
- "chunked_attention",
73
- "chunked_attention",
74
- "chunked_attention",
75
- "full_attention"
76
  ],
77
  "max_position_embeddings": 10485760,
78
  "model_type": "llama4_text",
 
26
  "intermediate_size_mlp": 16384,
27
  "layer_types": [
28
  "chunked_attention",
29
+ "chunked_attention"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  ],
31
  "max_position_embeddings": 10485760,
32
  "model_type": "llama4_text",