forked from Ghadjeres/DeepBach
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdeepBach.py
95 lines (84 loc) · 2.88 KB
/
deepBach.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
"""
@author: Gaetan Hadjeres
"""
import click
from DatasetManager.chorale_dataset import ChoraleDataset
from DatasetManager.dataset_manager import DatasetManager
from DatasetManager.metadata import FermataMetadata, TickMetadata, KeyMetadata
from DeepBach.model_manager import DeepBach
@click.command()
@click.option('--note_embedding_dim', default=20,
help='size of the note embeddings')
@click.option('--meta_embedding_dim', default=20,
help='size of the metadata embeddings')
@click.option('--num_layers', default=2,
help='number of layers of the LSTMs')
@click.option('--lstm_hidden_size', default=256,
help='hidden size of the LSTMs')
@click.option('--dropout_lstm', default=0.5,
help='amount of dropout between LSTM layers')
@click.option('--linear_hidden_size', default=256,
help='hidden size of the Linear layers')
@click.option('--batch_size', default=256,
help='training batch size')
@click.option('--num_epochs', default=5,
help='number of training epochs')
@click.option('--train', is_flag=True,
help='train the specified model for num_epochs')
@click.option('--num_iterations', default=500,
help='number of parallel pseudo-Gibbs sampling iterations')
@click.option('--sequence_length_ticks', default=64,
help='length of the generated chorale (in ticks)')
def main(note_embedding_dim,
meta_embedding_dim,
num_layers,
lstm_hidden_size,
dropout_lstm,
linear_hidden_size,
batch_size,
num_epochs,
train,
num_iterations,
sequence_length_ticks,
):
dataset_manager = DatasetManager()
metadatas = [
FermataMetadata(),
TickMetadata(subdivision=4),
KeyMetadata()
]
chorale_dataset_kwargs = {
'voice_ids': [0, 1, 2, 3],
'metadatas': metadatas,
'sequences_size': 8,
'subdivision': 4
}
bach_chorales_dataset: ChoraleDataset = dataset_manager.get_dataset(
name='bach_chorales',
**chorale_dataset_kwargs
)
dataset = bach_chorales_dataset
deepbach = DeepBach(
dataset=dataset,
note_embedding_dim=note_embedding_dim,
meta_embedding_dim=meta_embedding_dim,
num_layers=num_layers,
lstm_hidden_size=lstm_hidden_size,
dropout_lstm=dropout_lstm,
linear_hidden_size=linear_hidden_size
)
if train:
deepbach.train(batch_size=batch_size,
num_epochs=num_epochs)
else:
deepbach.load()
deepbach.cuda()
print('Generation')
score, tensor_chorale, tensor_metadata = deepbach.generation(
num_iterations=num_iterations,
sequence_length_ticks=sequence_length_ticks,
)
score.show('txt')
score.show()
if __name__ == '__main__':
main()