Amelia10_ft_StyleTTS2 / config.json
nonoJDWAOIDAWKDA's picture
Upload StyleTTS2 checkpoint epoch_2nd_00007.pth with all inference components
67d3d75 verified
{
"model_params": {
"decoder": {
"resblock_dilation_sizes": [
[
1,
3,
5
],
[
1,
3,
5
],
[
1,
3,
5
]
],
"resblock_kernel_sizes": [
3,
7,
11
],
"type": "hifigan",
"upsample_initial_channel": 512,
"upsample_kernel_sizes": [
20,
10,
6,
4
],
"upsample_rates": [
10,
5,
3,
2
]
},
"diffusion": {
"dist": {
"estimate_sigma_data": true,
"mean": -3.0,
"sigma_data": 0.2,
"std": 1.0
},
"embedding_mask_proba": 0.1,
"transformer": {
"head_features": 64,
"multiplier": 2,
"num_heads": 8,
"num_layers": 3
}
},
"dim_in": 64,
"dropout": 0.2,
"hidden_dim": 512,
"max_conv_dim": 512,
"max_dur": 50,
"multispeaker": false,
"n_layer": 3,
"n_mels": 80,
"n_token": 178,
"slm": {
"hidden": 768,
"initial_channel": 64,
"model": "microsoft/wavlm-base-plus",
"nlayers": 13,
"sr": 16000
},
"style_dim": 128
},
"training_config": {
"epochs": 8,
"batch_size": 2,
"max_len": 630,
"optimizer": {
"bert_lr": 1e-05,
"ft_lr": 0.0001,
"lr": 0.0001
},
"loss_params": {
"diff_epoch": 1,
"joint_epoch": 110,
"lambda_F0": 1.0,
"lambda_ce": 20.0,
"lambda_diff": 1.0,
"lambda_dur": 1.0,
"lambda_gen": 1.0,
"lambda_mel": 5.0,
"lambda_mono": 1.0,
"lambda_norm": 1.0,
"lambda_s2s": 1.0,
"lambda_slm": 1.0,
"lambda_sty": 1.0
}
},
"preprocess_params": {
"spect_params": {
"hop_length": 300,
"n_fft": 2048,
"win_length": 1200
},
"sr": 24000
},
"data_params": {
"OOD_data": "Data/OOD_texts.txt",
"min_length": 50,
"root_path": "Data/wavs",
"train_data": "Data/train_list.txt",
"val_data": "Data/val_list.txt"
},
"model_state": {
"epoch": 7,
"iterations": 7856,
"val_loss": 0.4541005790233612
},
"training_metrics": {
"train_loss": [],
"val_loss": [
56.0,
24.0,
35.0,
43.0,
22.0,
13.0,
21.0,
5.0,
53.0,
45.0,
41.0,
25.0,
3.0,
44.0,
40.0,
18.0,
17.0,
0.0,
9.0,
52.0
],
"dur_loss": [
0.502,
0.482,
0.476,
0.466,
0.473,
0.464,
0.464,
0.459,
0.467,
0.47,
0.463,
0.458,
0.498,
0.483,
0.472,
0.473,
0.465,
0.469,
0.459,
0.454
],
"F0_loss": [
1.759,
1.681,
1.706,
1.622,
1.67,
1.749,
1.749,
1.72,
1.733,
1.71,
1.702,
1.661,
1.743,
1.683,
1.642,
1.678,
1.703,
1.679,
1.635,
1.733
],
"epochs": [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20
]
}
}