vikp commited on
Commit
b2c3dad
·
1 Parent(s): c9cbc49

Upload FlashGPTNeoXForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +4 -4
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "cleaner_model_v3.pth",
3
  "architectures": [
4
- "GPTNeoXForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 0,
@@ -15,8 +15,8 @@
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 2048,
17
  "model_type": "gpt_neox",
18
- "num_attention_heads": 8,
19
- "num_hidden_layers": 16,
20
  "pad_token_id": 1,
21
  "rope_scaling": {
22
  "factor": 4.0,
 
1
  {
2
+ "_name_or_path": "EleutherAI/pythia-1.4b-deduped",
3
  "architectures": [
4
+ "FlashGPTNeoXForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 0,
 
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 2048,
17
  "model_type": "gpt_neox",
18
+ "num_attention_heads": 16,
19
+ "num_hidden_layers": 24,
20
  "pad_token_id": 1,
21
  "rope_scaling": {
22
  "factor": 4.0,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a75b5234fd44205510c2e89514fb943c936ca1a532fa443078ea362f6d635fa5
3
- size 4047197149
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4cff9f2a3a6a3e04bc58f56ebdbf39e2ac88ec28f7c4e8aa19b711b71bc0b11
3
+ size 5658700617