lhallee commited on
Commit
ffd710b
·
verified ·
1 Parent(s): c5845c6

Upload E1ForMaskedLM

Browse files
Files changed (1) hide show
  1. config.json +8 -1
config.json CHANGED
@@ -2,6 +2,13 @@
2
  "architectures": [
3
  "E1ForMaskedLM"
4
  ],
 
 
 
 
 
 
 
5
  "bos_token_id": 1,
6
  "clip_qkv": 8,
7
  "dtype": "bfloat16",
@@ -26,7 +33,7 @@
26
  "rope_theta_global": 500000.0,
27
  "rope_theta_within_seq": 10000.0,
28
  "tie_word_embeddings": false,
29
- "transformers_version": "4.57.1",
30
  "use_cache": true,
31
  "vocab_size": 34
32
  }
 
2
  "architectures": [
3
  "E1ForMaskedLM"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "modeling_e1.E1Config",
7
+ "AutoModel": "modeling_e1.E1Model",
8
+ "AutoModelForMaskedLM": "modeling_e1.E1ForMaskedLM",
9
+ "AutoModelForSequenceClassification": "modeling_e1.E1ForSequenceClassification",
10
+ "AutoModelForTokenClassification": "modeling_e1.E1ForTokenClassification"
11
+ },
12
  "bos_token_id": 1,
13
  "clip_qkv": 8,
14
  "dtype": "bfloat16",
 
33
  "rope_theta_global": 500000.0,
34
  "rope_theta_within_seq": 10000.0,
35
  "tie_word_embeddings": false,
36
+ "transformers_version": "4.57.3",
37
  "use_cache": true,
38
  "vocab_size": 34
39
  }