wdavies commited on
Commit
b59e188
verified
1 Parent(s): e7d4609

Upload DistilBertForQuestionAnswering

Browse files
Files changed (2) hide show
  1. config.json +2 -11
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,28 +1,19 @@
1
  {
2
- "_name_or_path": "question_trained",
3
  "activation": "gelu",
4
  "architectures": [
5
- "DistilBertForSequenceClassification"
6
  ],
7
  "attention_dropout": 0.1,
8
  "dim": 768,
9
  "dropout": 0.1,
10
  "hidden_dim": 3072,
11
- "id2label": {
12
- "0": "NEGATIVE",
13
- "1": "POSITIVE"
14
- },
15
  "initializer_range": 0.02,
16
- "label2id": {
17
- "NEGATIVE": 0,
18
- "POSITIVE": 1
19
- },
20
  "max_position_embeddings": 512,
21
  "model_type": "distilbert",
22
  "n_heads": 12,
23
  "n_layers": 6,
24
  "pad_token_id": 0,
25
- "problem_type": "single_label_classification",
26
  "qa_dropout": 0.1,
27
  "seq_classif_dropout": 0.2,
28
  "sinusoidal_pos_embds": false,
 
1
  {
2
+ "_name_or_path": "extract_question_trained",
3
  "activation": "gelu",
4
  "architectures": [
5
+ "DistilBertForQuestionAnswering"
6
  ],
7
  "attention_dropout": 0.1,
8
  "dim": 768,
9
  "dropout": 0.1,
10
  "hidden_dim": 3072,
 
 
 
 
11
  "initializer_range": 0.02,
 
 
 
 
12
  "max_position_embeddings": 512,
13
  "model_type": "distilbert",
14
  "n_heads": 12,
15
  "n_layers": 6,
16
  "pad_token_id": 0,
 
17
  "qa_dropout": 0.1,
18
  "seq_classif_dropout": 0.2,
19
  "sinusoidal_pos_embds": false,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fd211fd255966d9236fbc74abda1272aec42d245a6a8e71c775ffad4bb2c4323
3
- size 267832560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4446803cadd61e23a75b12e99a0396036fc38d16ff39a9e7cded592266af3abf
3
+ size 265470032