xxxxxxxxxx
81
// music_rnn_generate
let DRUMS = {
notes: [
{ pitch: 36, quantizedStartStep: 0, quantizedEndStep: 1, isDrum: true },
{ pitch: 38, quantizedStartStep: 0, quantizedEndStep: 1, isDrum: true },
{ pitch: 42, quantizedStartStep: 0, quantizedEndStep: 1, isDrum: true },
{ pitch: 46, quantizedStartStep: 0, quantizedEndStep: 1, isDrum: true },
{ pitch: 42, quantizedStartStep: 2, quantizedEndStep: 3, isDrum: true },
{ pitch: 42, quantizedStartStep: 3, quantizedEndStep: 4, isDrum: true },
{ pitch: 42, quantizedStartStep: 4, quantizedEndStep: 5, isDrum: true },
{ pitch: 50, quantizedStartStep: 4, quantizedEndStep: 5, isDrum: true },
{ pitch: 36, quantizedStartStep: 6, quantizedEndStep: 7, isDrum: true },
{ pitch: 38, quantizedStartStep: 6, quantizedEndStep: 7, isDrum: true },
{ pitch: 42, quantizedStartStep: 6, quantizedEndStep: 7, isDrum: true },
{ pitch: 45, quantizedStartStep: 6, quantizedEndStep: 7, isDrum: true },
{ pitch: 36, quantizedStartStep: 8, quantizedEndStep: 9, isDrum: true },
{ pitch: 42, quantizedStartStep: 8, quantizedEndStep: 9, isDrum: true },
{ pitch: 46, quantizedStartStep: 8, quantizedEndStep: 9, isDrum: true },
{ pitch: 42, quantizedStartStep: 10, quantizedEndStep: 11, isDrum: true },
{ pitch: 48, quantizedStartStep: 10, quantizedEndStep: 11, isDrum: true },
{ pitch: 50, quantizedStartStep: 10, quantizedEndStep: 11, isDrum: true },
],
quantizationInfo: {stepsPerQuarter: 4},
tempos: [{time: 0, qpm: 120}],
totalQuantizedSteps: 11
};
// MIDI notes for Drums
// 36: Bass Drum 1
// 38: Acoustic Snare
// 42: Closed Hi-Hat
// 45: Low Tom
// 46: Open Hi-Hat
// 48: Hi-Mid Tom
// 50: High Tom
let kit = new Tone.Players(
{
"36" : "samples/505/kick.mp3",
"38" : "samples/505/snare.mp3",
"42" : "samples/505/hh.mp3",
"46" : "samples/505/hho.mp3",
"50" : "samples/707/Tom-707-Hi.mp3",
"48" : "samples/707/Tom-707-Mid.mp3",
"49" : "samples/707/Crash-707.mp3", // Crash cymbal
"45" : "samples/707/Tom-707-Low.mp3",
"51" : "samples/707/Ride-707.mp3"
}
);
kit.toMaster();
// Initialize the model
let music_rnn = new mm.MusicRNN('https://storage.googleapis.com/magentadata/js/checkpoints/music_rnn/drum_kit_rnn');
music_rnn.initialize();
let rnn_steps = 20;
let rnn_temperature = 1.5;
// continueSequence returns a Promise
music_rnn
.continueSequence(DRUMS, rnn_steps, rnn_temperature)
.then(play);
function play(sample){
console.log(sample);
// Adapt note array to a format Tone.Part understand (objects must include a 'time' property)
for(note of sample.notes){
// Assume 120bpm -> * 0.5
note.time = note.quantizedStartStep * 0.5 * 0.25;
}
let part = new Tone.Part(function(time, value){
// let duration = (value.quantizedEndStep - value.quantizedStartStep) * 0.5;
let s = kit.get(value.pitch).start();
console.log(value);
}, sample.notes).start(0);
Tone.Transport.start();
}