aframson commited on
Commit
427fa15
·
1 Parent(s): 12d4029
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -8,17 +8,17 @@
8
  "AutoModelForCausalLM": "modelLM.OBILanguageModel",
9
  "AutoModelForQuestionAnswering": "modelLM.OBILanguageModel"
10
  },
11
- "batch_size": 5,
12
- "block_size": 20,
13
  "device": "cpu",
14
  "eval_interval": 100,
15
  "hidden_dropout_prob": 0.1,
16
  "hidden_size": 4,
17
  "learning_rate": 0.001,
18
- "max_iters": 900,
19
  "num_attention_heads": 2,
20
  "num_hidden_layers": 2,
21
  "torch_dtype": "float32",
22
  "transformers_version": "4.30.2",
23
- "vocab_size": 142211
24
  }
 
8
  "AutoModelForCausalLM": "modelLM.OBILanguageModel",
9
  "AutoModelForQuestionAnswering": "modelLM.OBILanguageModel"
10
  },
11
+ "batch_size": 6,
12
+ "block_size": 6,
13
  "device": "cpu",
14
  "eval_interval": 100,
15
  "hidden_dropout_prob": 0.1,
16
  "hidden_size": 4,
17
  "learning_rate": 0.001,
18
+ "max_iters": 500,
19
  "num_attention_heads": 2,
20
  "num_hidden_layers": 2,
21
  "torch_dtype": "float32",
22
  "transformers_version": "4.30.2",
23
+ "vocab_size": 5000
24
  }