Charles1973 commited on
Commit
4d7a2e1
·
1 Parent(s): c8f45a3
Files changed (1) hide show
  1. app.py +12 -12
app.py CHANGED
@@ -8,18 +8,18 @@
8
  # %%time
9
 
10
  # ファインチューニングの実行
11
- python ./transformers/examples/pytorch/language-modeling/run_clm.py \
12
- --model_name_or_path=rinna/japanese-gpt2-medium \
13
- --train_file=natsumesouseki.txt \
14
- --validation_file=natsumesouseki.txt \
15
- --do_train \
16
- --do_eval \
17
- --num_train_epochs=3 \
18
- --save_steps=5000 \
19
- --save_total_limit=3 \
20
- --per_device_train_batch_size=1 \
21
- --per_device_eval_batch_size=1 \
22
- --output_dir=output/
23
 
24
  from transformers import T5Tokenizer, AutoModelForCausalLM
25
 
 
8
  # %%time
9
 
10
  # ファインチューニングの実行
11
+ # python ./transformers/examples/pytorch/language-modeling/run_clm.py \
12
+ # --model_name_or_path=rinna/japanese-gpt2-medium \
13
+ # --train_file=natsumesouseki.txt \
14
+ # --validation_file=natsumesouseki.txt \
15
+ # --do_train \
16
+ # --do_eval \
17
+ # --num_train_epochs=3 \
18
+ # --save_steps=5000 \
19
+ # --save_total_limit=3 \
20
+ # --per_device_train_batch_size=1 \
21
+ # --per_device_eval_batch_size=1 \
22
+ # --output_dir=output/
23
 
24
  from transformers import T5Tokenizer, AutoModelForCausalLM
25