diff --git a/app/initializers/catch-errors.js b/app/initializers/catch-errors.js index 3d4bec34..3d0e9511 100644 --- a/app/initializers/catch-errors.js +++ b/app/initializers/catch-errors.js @@ -1,13 +1,13 @@ import Ember from 'ember'; -// all uncaught errors will be caught here -// you can use `message` to make sure it's the error you're looking for -// returning true overrides the default window behaviour -window.onerror = function(message, file, lineNumber, columnNumber, error) { - console.warn(message, error && error.stack); - window.error = error; - return true; -}; +// // all uncaught errors will be caught here +// // you can use `message` to make sure it's the error you're looking for +// // returning true overrides the default window behaviour +// window.onerror = function(message, file, lineNumber, columnNumber, error) { +// console.warn(message, error && error.stack); +// window.error = error; +// return true; +// }; export default { name: 'CatchErrors', diff --git a/app/lib/soundtouch.js b/app/lib/soundtouch.js index 476950e8..1860d291 100644 --- a/app/lib/soundtouch.js +++ b/app/lib/soundtouch.js @@ -40,7 +40,7 @@ FifoSampleBuffer.prototype.clear = function() { // // TODO(TECHDEBT): window.BUFFER_SIZE set by mix builder window.MAX_BUFFER_SIZE = 16384; -window.BUFFER_SIZE = MAX_BUFFER_SIZE / 8; +window.BUFFER_SIZE = window.MAX_BUFFER_SIZE / 8; const SAMPLE_DRIFT_TOLERANCE = 512; export function SoundtouchBufferSource(buffer) { @@ -69,17 +69,19 @@ SoundtouchBufferSource.prototype = { } }; -export function createSoundtouchNode({ audioContext, filter, startTime, offsetTime, endTime, defaultTempo, defaultPitch }) { - console.log('createSoundtouchNode') +// TODO(TRACKMULTIGRID): audioBpm not a constant +export function createSoundtouchNode({ audioContext, filter, startTime, offsetTime, endTime, audioBpm, defaultPitch }) { const channelCount = 2; const windowBufferSize = window.BUFFER_SIZE; - if (!(audioContext && filter - && isValidNumber(startTime) && isValidNumber(offsetTime) && isValidNumber(endTime))) { + if (!(audioContext && filter && + isValidNumber(startTime) && isValidNumber(offsetTime) && isValidNumber(endTime))) { Ember.Logger.warn('Must provide all params to createSoundtouchNode', endTime); return; } + audioBpm = isValidNumber(audioBpm) ? audioBpm : 128; // TODO(TECHDEBT): share default bpm + const samples = new Float32Array(windowBufferSize * channelCount); const sampleRate = audioContext.sampleRate || 44100; const startSample = ~~(offsetTime * sampleRate); @@ -101,11 +103,15 @@ export function createSoundtouchNode({ audioContext, filter, startTime, offsetTi const l = outputs[0][0]; const r = outputs[0][1]; - // naively take first pitch and tempo values for this sample + // naively take first pitch value for this sample const pitch = parameters.pitch && parameters.pitch[0]; - const tempo = parameters.tempo && parameters.tempo[0]; const soundtouch = filter.pipe; + // TODO(MULTIGRID): need to minimize dspBufLength. + // is it possible to align dspBufLength with automation clip TICKs? + const syncBpm = parameters.bpm && parameters.bpm[0]; + const tempo = (isValidNumber(syncBpm) && isValidNumber(audioBpm)) ? (syncBpm / audioBpm) : 1; + if (isValidNumber(pitch)) { soundtouch.pitchSemitones = pitch; } @@ -158,14 +164,11 @@ export function createSoundtouchNode({ audioContext, filter, startTime, offsetTi r[i] = (samples[filterFrame * 2 + 1] * isPlaying[i]) || 0; filterFrame += isPlaying[i]; } - }; + } defaultPitch = parseFloat(defaultPitch); defaultPitch = isValidNumber(defaultPitch) ? defaultPitch : 0; - defaultTempo = parseFloat(defaultTempo); - defaultTempo = isValidNumber(defaultTempo) ? defaultTempo : 1; - const node = new AudioWorkerNode(audioContext, onaudioprocess, { numberOfInputs: 2, numberOfOutputs: 2, @@ -176,8 +179,8 @@ export function createSoundtouchNode({ audioContext, filter, startTime, offsetTi defaultValue: defaultPitch, }, { - name: 'tempo', - defaultValue: defaultTempo, + name: 'bpm', + defaultValue: 1, }, { name: 'isPlaying', diff --git a/app/lib/web-audio/soundtouch-node.js b/app/lib/web-audio/soundtouch-node.js index be3faa82..d2f8814a 100644 --- a/app/lib/web-audio/soundtouch-node.js +++ b/app/lib/web-audio/soundtouch-node.js @@ -20,8 +20,8 @@ export default Ember.ObjectProxy.extend( node: null, // set by `start` method, unset by `disconnect` outputNode: null, - // TODO(V2): TODO(MULTIGRID): tempo, transpose dynamic - start(startTime, offsetTime, endTime, tempo, transpose) { + // TODO(V2): transpose dynamic + start(startTime, offsetTime, endTime, audioBpm, transpose) { // Ember.Logger.log('currentTime', this.get('audioContext.currentTime')); // Ember.Logger.log('startSource', startTime, offsetTime); this.stop(); @@ -36,7 +36,7 @@ export default Ember.ObjectProxy.extend( startTime, offsetTime, endTime, - defaultTempo: tempo, + audioBpm, defaultPitch: transpose, }); this.set('node', node); diff --git a/app/lib/web-audio/track-source-node.js b/app/lib/web-audio/track-source-node.js index 09aed47f..39a403c5 100644 --- a/app/lib/web-audio/track-source-node.js +++ b/app/lib/web-audio/track-source-node.js @@ -3,7 +3,6 @@ import Ember from 'ember'; import BufferSourceNode from './buffer-source-node'; import RequireAttributes from 'linx/lib/require-attributes'; -// TODO(REFACTOR): create base track FX chain + audio source node + soundtouch node export default BufferSourceNode.extend({ // params @@ -17,142 +16,3 @@ export default BufferSourceNode.extend({ return ''; }, }); - - -/* global SimpleFilter:true */ - -// import SoundTouch from 'linx/lib/soundtouch'; -// import { WebAudioBufferSource, getWebAudioNode } from 'linx/lib/soundtouch'; - - // TODO(REFACTOR) move into fx chain - // updateTempo: function() { - // var wavesurfer = this.get('wavesurfer'); - // var tempo = this.get('tempo'); - // if (wavesurfer) { - // wavesurfer.setTempo(tempo); - // } - // }.observes('wavesurfer', 'tempo'), - - // updatePitch: function() { - // var wavesurfer = this.get('wavesurfer'); - // var pitch = this.get('pitch'); - // if (wavesurfer) { - // wavesurfer.setPitch(pitch); - // } - // }.observes('wavesurfer', 'pitch'), - - // updateVolume: function() { - // var wavesurfer = this.get('wavesurfer'); - // var volume = this.get('volume'); - // if (wavesurfer) { - - // // TODO(EASY): remove this check, only for two-way binding to input - // try { - // volume = parseFloat(volume); - // } catch(e) {} - - // if (typeof volume !== 'number' || !volume) { - // volume = 0; - // } - - // wavesurfer.setVolume(volume); - // } - // }.observes('wavesurfer', 'volume'), - - -// TODO(REFACTOR): figure this out -// -// Wavesurfer + SoundTouch Integration -// - -// Wavesurfer.setTempo = function(tempo) { -// this.backend.setTempo(tempo); -// }; - -// Wavesurfer.setPitch = function(pitch) { -// this.backend.setPitch(pitch); -// }; - -// Wavesurfer.WebAudio.setTempo = function(tempo) { -// // Ember.Logger.log("setting tempo", tempo); -// if (typeof tempo !== 'number' || !tempo) { -// tempo = 1; -// } - -// // update startPosition and lastPlay for new tempo -// this.startPosition += this.getPlayedTime(); -// this.lastPlay = this.ac.currentTime; - -// this.linxTempo = this.playbackRate = tempo; - -// // update soundtouch tempo -// var soundtouch = this.soundtouch; -// if (soundtouch) { -// soundtouch.tempo = tempo; -// } -// }; - -// Wavesurfer.WebAudio.setPitch = function(pitch) { -// // Ember.Logger.log("setting pitch", pitch); - -// // TODO: remove this check, only for two-way binding to input -// try { -// pitch = parseFloat(pitch); -// } catch(e) { - -// } -// if (typeof pitch !== 'number') { -// pitch = 0; -// } - -// this.linxPitch = pitch; - -// // update soundtouch pitch -// var soundtouch = this.soundtouch; -// if (soundtouch) { -// soundtouch.pitchSemitones = pitch; -// } -// }; - -// // 'play' is equivalent to 'create and connect soundtouch source' -// Wavesurfer.WebAudio.play = function(start, end) { -// if (!this.isPaused()) { -// this.pause(); -// } - -// var adjustedTime = this.seekTo(start, end); -// start = adjustedTime.start; -// end = adjustedTime.end; -// this.scheduledPause = end; -// var startSample = ~~(start * this.ac.sampleRate); - -// // init soundtouch -// this.soundtouch = new SoundTouch(); -// this.setPitch(this.linxPitch); -// this.setTempo(this.linxTempo); - -// // hook up soundtouch node -// this.soundtouchSource = new WebAudioBufferSource(this.buffer); -// this.soundtouchFilter = new SimpleFilter(this.soundtouchSource, this.soundtouch); -// this.soundtouchFilter.sourcePosition = startSample; -// this.soundtouchNode = getWebAudioNode(this.ac, this.soundtouchFilter); -// this.soundtouchNode.connect(this.analyser); - -// this.setState(this.PLAYING_STATE); -// this.fireEvent('play'); -// }; - -// // 'pause' is equivalent to 'disconnect soundtouch source' -// Wavesurfer.WebAudio.pause = function() { -// this.scheduledPause = null; -// this.startPosition += this.getPlayedTime(); - -// this.soundtouchNode && this.soundtouchNode.disconnect(); - -// this.setState(this.PAUSED_STATE); -// }; - -// // turn into no-op -// Wavesurfer.WebAudio.createSource = function() {}; - -// export default Wavesurfer; diff --git a/app/mixins/playable-arrangement.js b/app/mixins/playable-arrangement.js index 4a44aeea..fb7519e5 100644 --- a/app/mixins/playable-arrangement.js +++ b/app/mixins/playable-arrangement.js @@ -6,7 +6,7 @@ import withDefault from 'linx/lib/computed/with-default'; import Metronome from './playable-arrangement/metronome'; import WebAudioMergerNode from 'linx/lib/web-audio/merger-node'; import computedObject from 'linx/lib/computed/object'; -import BeatGrid from 'linx/models/track/audio-meta/beat-grid'; +import BeatGrid from './playable-arrangement/beat-grid'; import { flatten, isValidNumber } from 'linx/lib/utils'; import GainNode from 'linx/lib/web-audio/gain-node'; @@ -15,7 +15,14 @@ export default Ember.Mixin.create( RequireAttributes('clips', 'audioContext'), ReadinessMixin('isPlayableArrangementReady'), { - // params + // required params + clips: null, + bpmScale: null, // or beatGrid + audioContext: null, + + // optional params + outputNode: Ember.computed.reads('audioContext.destination'), + playpause(beat) { this.get('metronome').playpause(beat); }, @@ -44,20 +51,17 @@ export default Ember.Mixin.create( this.get('metronome').seekToBeat(beat); }, - // optional params - outputNode: Ember.computed.reads('audioContext.destination'), - bpm: 128.0, - isPlaying: Ember.computed.reads('metronome.isPlaying'), + duration: Ember.computed.reads('beatGrid.duration'), metronome: computedObject(Metronome, { 'audioContext': 'audioContext', - 'arrangement': 'this' + 'beatGrid': 'beatGrid', }), beatGrid: computedObject(BeatGrid, { - duration: 'duration', - bpm: 'bpm', + bpmScale: 'bpmScale', + beatCount: 'beatCount', timeSignature: 'timeSignature', }), @@ -83,17 +87,11 @@ export default Ember.Mixin.create( return this.get('beatCount') / this.get('timeSignature'); }), - // duration of arrangement in [s] - // TODO(MULTIGRID) - duration: Ember.computed('metronome.bpm', 'beatCount', function() { - return this.get('metronome').getDuration(0, this.get('beatCount')); - }), - getRemainingDuration() { - const metronome = this.get('metronome'); + const beatGrid = this.get('beatGrid'); const beatCount = this.get('beatCount'); const currentBeat = this.getCurrentBeat(); - return metronome.getDuration(currentBeat, beatCount - currentBeat); + return beatGrid.getDuration(currentBeat, beatCount - currentBeat); }, getCurrentBeat() { diff --git a/app/mixins/playable-arrangement/automatable-clip/control.js b/app/mixins/playable-arrangement/automatable-clip/control.js index 5e36b920..25853065 100644 --- a/app/mixins/playable-arrangement/automatable-clip/control.js +++ b/app/mixins/playable-arrangement/automatable-clip/control.js @@ -39,20 +39,9 @@ export default function(audioParamPath) { // optional params description: '', - // isSuspended: false, + valueScale: Ember.computed(() => d3.scale.identity()), defaultValue: 0, - - // TODO(TECHDEBT): share more cleanly - valueScale: Ember.computed('type', function() { - switch (this.get('type')) { - case CONTROL_TYPE_DELAY_CUTOFF: - case CONTROL_TYPE_FILTER_HIGHPASS_CUTOFF: - case CONTROL_TYPE_FILTER_LOWPASS_CUTOFF: - return d3.scale.log().domain([20, 22050]).range([0, 1]); - default: - return d3.scale.identity(); - } - }), + // isSuspended: false, _initClipListeners: Ember.on('init', function() { const clip = this.get('clip'); diff --git a/app/mixins/playable-arrangement/beat-grid.js b/app/mixins/playable-arrangement/beat-grid.js new file mode 100644 index 00000000..56a8f35f --- /dev/null +++ b/app/mixins/playable-arrangement/beat-grid.js @@ -0,0 +1,213 @@ +import Ember from 'ember'; +import d3 from 'd3'; + +import LinearScale from 'linx/lib/linear-scale'; +import QuantizeScale from 'linx/lib/quantize-scale'; +import computedObject from 'linx/lib/computed/object'; +import { timeToBeatUtil, bpmToSpb, isValidNumber } from 'linx/lib/utils'; + +export const BAR_QUANTIZATION = 'bar'; +export const BEAT_QUANTIZATION = 'beat'; +export const TICK_QUANTIZATION = 'tick'; +export const MS10_QUANTIZATION = '10ms'; +export const MS1_QUANTIZATION = '1ms'; +export const SAMPLE_QUANTIZATION = 'sample'; + +export const TICKS_PER_BEAT = 120; + + +export default Ember.Object.extend({ + + // required params + bpmScale: null, + beatCount: null, + + // optional params + timeSignature: 4, + + // calculate duration from integral of bpmScale curve + duration: Ember.computed('bpmScale', 'beatCount', function() { + const bpmScale = this.get('bpmScale'); + const beatCount = this.get('beatCount'); + const domain = bpmScale.domain(); + + // add durations from each linear interval + return [0].concat(domain).reduce((duration, startBeat, i) => { + if (i === domain.length) { + return duration; + } + + const endBeat = domain[i + 1]; + + const startBpm = bpmScale(startBeat); + const endBpm = bpmScale(endBeat); + + const beatCount = endBeat - startBeat; + const averageBpm = Math.abs((endBpm - startBpm) / 2.0); + const minutes = beatCount / averageBpm; + const seconds = minutes * 60; + + return duration + seconds; + }, 0); + }), + + timeToBeat(time) { + return this.get('beatScale').invert(time); + }, + + beatToTime(beat) { + return this.get('beatScale')(beat); + }, + + beatToBar(beat) { + return beat / this.get('timeSignature'); + }, + + barToBeat(bar) { + return bar * this.get('timeSignature'); + }, + + timeToBar(time) { + return this.get('barScale').invert(time); + }, + + barToTime(bar) { + return this.get('barScale')(bar); + }, + + quantizeBeat(beat) { + return this.get('quantizeBeatScale').invert(beat); + }, + + quantizeBar(bar) { + return this.get('quantizeBarScale').invert(bar); + }, + + beatToQuantizedBar(beat) { + return this.quantizeBar(this.beatToBar(beat)); + }, + + beatToQuantizedDownbeat(beat) { + return this.barToBeat(this.beatToQuantizedBar(beat)); + }, + + timeToQuantizedBeat(time) { + return this.quantizeBeat(this.timeToBeat(time)); + }, + + timeToQuantizedBar(time) { + return this.quantizeBar(this.timeToBar(time)); + }, + + // returns time duration of given beat interval + getDuration(startBeat, endBeat) { + const startTime = this.beatToTime(startBeat); + const endTime = this.beatToTime(endBeat); + return endTime - startTime; + }, + + // returns beat count of given time interval + getBeatCount(startTime, endTime) { + const startBeat = this.timeToBeat(startTime); + const endBeat = this.timeToBeat(endTime); + return endBeat - startBeat; + }, + + // Beat Scale + // domain is beats [b] + // range is time [s] + beatScaleDomain: Ember.computed('beatCount', 'bpmScale', function() { + const { beatCount, bpmScale } = this.getProperties('beatCount', 'bpmScale'); + return [0].concat(bpmScale.domain()).concat([beatCount]); + }), + beatScaleRange: Ember.computed('duration', function() { + return [0, this.get('duration')]; + }), + beatScale: Ember.computed('beatScaleDomain', 'beatScaleRange', function() { + return d3.scale.linear() + .domain(this.get('beatScaleDomain') || []) + .range(this.get('beatScaleRange') || []); + }), + quantizeBeatScale: Ember.computed('beatScale', function() { + const beatScale = this.get('beatScale'); + + return d3.scale.linear() + .domain(beatScale.domain()) + .rangeRound(beatScale.range()) + }), + + // Bar Scale + // domain is beats [b] + // range is time [s] + barScaleDomain: Ember.computed('beatScaleDomain', 'timeSignature', function() { + const { beatScale, timeSignature } = this.getProperties('beatScale', 'timeSignature'); + + return beatScale.domain().map((n) => n / timeSignature); + }), + barScale: Ember.computed('barScaleDomain', 'beatScaleRange', function() { + return d3.scale.linear() + .domain(this.get('barScaleDomain') || []) + .range(this.get('beatScaleRange') || []); + }), + quantizeBarScale: Ember.computed('barScale', function() { + const barScale = this.get('barScale'); + + return d3.scale.linear() + .domain(barScale.domain()) + .rangeRound(barScale.range()) + }), + + toString() { + return ''; + }, +}); + +// provides dynamically updating beat grid properties +// supports constants and paths +function beatGridPropertyGenerator(beatGridFunctionName) { + return function(beatGridPath, unitOrPath) { + const isPath = !isValidNumber(unitOrPath); + + const getUnit = function(context) { + return isPath ? context.get(unitOrPath) : unitOrPath; + } + + return Ember.computed(`${beatGridPath}.beatScale`, isPath ? unitOrPath : '', { + get() { + const unit = getUnit(this); + const beatGrid = this.get(beatGridPath); + + return beatGrid && beatGrid[beatGridFunctionName](unit); + }, + + // TODO(TECHDEBT): this only works for timeToBeat + set(key, beat) { + // Ember.Logger.log(`set ${beatGridFunctionName}`, beat); + Ember.assert('Must set `${beatGridFunctionName} to valid number', isValidNumber(beat)); + + const beatGrid = this.get(beatGridPath); + const time = beatGrid && beatGrid.beatToTime(beat); + + this.set(unitOrPath, time); + + return beat; + }, + }); + }; +} + +// beat | time +export const computedBeatToTime = beatGridPropertyGenerator('beatToTime'); +export const computedTimeToBeat = beatGridPropertyGenerator('timeToBeat'); + +// beat | bar +export const computedBeatToBar = beatGridPropertyGenerator('beatToBar'); +export const computedBarToBeat = beatGridPropertyGenerator('barToBeat'); + +// bar | time +export const computedBarToTime = beatGridPropertyGenerator('barToTime'); +export const computedTimeToBar = beatGridPropertyGenerator('timeToBar'); + +// quantize +export const computedQuantizeBeat = beatGridPropertyGenerator('quantizeBeat'); +export const computedQuantizeBar = beatGridPropertyGenerator('quantizeBar'); diff --git a/app/mixins/playable-arrangement/clip.js b/app/mixins/playable-arrangement/clip.js index 8737a518..6b1a73b7 100644 --- a/app/mixins/playable-arrangement/clip.js +++ b/app/mixins/playable-arrangement/clip.js @@ -29,6 +29,7 @@ export default Ember.Mixin.create(Ember.Evented, { session: Ember.inject.service(), fakeAudioContext: Ember.computed.reads('session.audioContext'), fakeMetronome: Ember.computed(function() { + // TODO(MULTIGRID) fix this return Metronome.create({ arrangement: { bpm: 128 } }); @@ -53,12 +54,12 @@ export default Ember.Mixin.create(Ember.Evented, { // returns absolute start time of this clip from metronome's frame of reference getAbsoluteStartTime() { - return this.get('metronome').beatToTime(this.get('startBeat')); + return this.get('metronome').beatToAbsTime(this.get('startBeat')); }, // returns absolute start time of this clip from metronome's frame of reference getAbsoluteEndTime() { - return this.get('metronome').beatToTime(this.get('endBeat')); + return this.get('metronome').beatToAbsTime(this.get('endBeat')); }, // returns absolute time from metronome's frame of reference @@ -67,9 +68,8 @@ export default Ember.Mixin.create(Ember.Evented, { }, // duration of clip in [s] - // TODO(MULTIGRID) - duration: Ember.computed('metronome.bpm', 'startBeat', 'beatCount', function() { - return this.get('metronome').getDuration(this.get('startBeat'), this.get('beatCount')); + duration: Ember.computed('arrangement.beatGrid', 'startBeat', 'endBeat', function() { + return this.get('arrangement.beatGrid').getDuration(this.get('startBeat'), this.get('endBeat')); }), endBeat: add('startBeat', 'beatCount'), @@ -97,7 +97,7 @@ export default Ember.Mixin.create(Ember.Evented, { // TODO(REFACTOR): turn isValid into validness mixin? isValid: Ember.computed.and('isValidStartBeat', 'isValidEndBeat', 'isValidBeatCount'), - clipScheduleDidChange: Ember.observer('isValid', 'isDisabled', 'startBeat', 'beatCount', 'duration', 'metronome.{absSeekTime,isPlaying,bpm}', function() { + clipScheduleDidChange: Ember.observer('isValid', 'isDisabled', 'startBeat', 'beatCount', 'duration', 'metronome.{absSeekTime,isPlaying}', 'arrangement.beatGrid', function() { this.set('isScheduled', this.get('metronome.isPlaying')); Ember.run.once(this, 'triggerScheduleEvents'); }), diff --git a/app/mixins/playable-arrangement/metronome.js b/app/mixins/playable-arrangement/metronome.js index 6f6c0189..5571f488 100644 --- a/app/mixins/playable-arrangement/metronome.js +++ b/app/mixins/playable-arrangement/metronome.js @@ -3,22 +3,20 @@ import Ember from 'ember'; import _ from 'npm:underscore'; import RequireAttributes from 'linx/lib/require-attributes'; -import { beatToTime, timeToBeat, clamp, isNumber, isValidNumber } from 'linx/lib/utils'; +import { clamp, isNumber, isValidNumber } from 'linx/lib/utils'; import Clock from 'linx/lib/clock'; // Holds rhythym based on clock -// TODO(REFACTOR): TODO(MULTIGRID): refactor metronome to have a beatgrid? export default Ember.Object.extend( Ember.Evented, { // required params audioContext: null, - arrangement: null, + beatGrid: null, // params seekBeat: 0, // [b] last seeked beat absSeekTime: 0, // [s] time of last seek in clock frame of reference - bpm: 128.0, isPlaying: false, // clock: Ember.computed('audioContext', function() { @@ -44,24 +42,9 @@ export default Ember.Object.extend( // returns WAAclock event // callbackAtBeat(callback, beat) { - // return this.callbackAtTime(callback, this.beatToTime(beat)); + // return this.callbackAtTime(callback, this.beatToAbsTime(beat)); // }, - // TODO(V2): clean this up - _updateBpm: Ember.observer('arrangement.bpm', function() { - const bpm = this.get('arrangement.bpm'); - - if (isValidNumber(bpm)) { - this.seekToBeat(this.getCurrentBeat()); - this.set('bpm', bpm); - } - }).on('init'), - - // TODO(MULTIRGID) TODO(REFACTOR) - getDuration(startBeat, beatCount) { - return beatToTime(beatCount, this.get('bpm')); - }, - seekToBeat(beat) { // Ember.Logger.log("metronome seekToBeat", beat); @@ -111,12 +94,11 @@ export default Ember.Object.extend( } }, - // TODO(MULTIGRID): turn into beatgrid // returns absolute time at which given beat will occur in audioContext - beatToTime(beat) { + beatToAbsTime(beat) { beat -= this.getCurrentBeat(); - return this.getAbsTime() + beatToTime(beat, this.get('bpm')); + return this.getAbsTime() + this.get('beatGrid').beatToTime(beat); }, // Returns current metronome beat @@ -137,7 +119,11 @@ export default Ember.Object.extend( }, _getPlayedBeats() { - return timeToBeat(this._getPlayedTime(), this.get('bpm')); + const beatGrid = this.get('beatGrid'); + const startBeat = this.get('seekBeat'); + const startTime = beatGrid.beatToTime(startBeat); + const endTime = startTime + this._getPlayedTime(); + return beatGrid.getBeatCount(startTime, endTime); }, destroy() { diff --git a/app/mixins/playable-arrangement/track-clip.js b/app/mixins/playable-arrangement/track-clip.js index e8f89a52..1ecec8d2 100644 --- a/app/mixins/playable-arrangement/track-clip.js +++ b/app/mixins/playable-arrangement/track-clip.js @@ -1,9 +1,10 @@ import Ember from 'ember'; +import d3 from 'd3'; + import RequireAttributes from 'linx/lib/require-attributes'; import AutomatableClipMixin from './automatable-clip'; import PlayableClipMixin from './clip'; -import TrackSourceNode from 'linx/lib/web-audio/track-source-node'; import GainNode from 'linx/lib/web-audio/gain-node'; import TunaDelayNode from 'linx/lib/web-audio/tuna/delay-node'; import TunaFilterNode from 'linx/lib/web-audio/tuna/filter-node'; @@ -23,6 +24,7 @@ import { import { default as AutomatableClipControlMixin, CONTROL_TYPE_VOLUME, + CONTROL_TYPE_BPM, CONTROL_TYPE_PITCH, CONTROL_TYPE_DELAY_WET, CONTROL_TYPE_DELAY_CUTOFF, @@ -30,7 +32,10 @@ import { CONTROL_TYPE_FILTER_LOWPASS_CUTOFF } from './automatable-clip/control'; -// TODO(CLEANUP): nest under track-clip/controls/gain? +function _createFilterCutoffScale() { + return d3.scale.log().domain([20, 22050]).range([0, 1]); +} + const TrackVolumeControl = Ember.Object.extend( AutomatableClipControlMixin('trackVolumeNode.gain'), { @@ -38,6 +43,15 @@ const TrackVolumeControl = Ember.Object.extend( defaultValue: 1, }); +const TrackBpmControl = Ember.Object.extend( + AutomatableClipControlMixin('soundtouchNode.bpm'), { + + type: CONTROL_TYPE_BPM, + defaultValue: 128, // TODO(TECHDEBT): bpm default constant used in many places + + // TODO(MULTIGRID): does this need a valueScale other than identity? +}); + const TrackPitchControl = Ember.Object.extend( AutomatableClipControlMixin('soundtouchNode.pitch'), { @@ -57,6 +71,7 @@ const TrackDelayCutoffControl = Ember.Object.extend( type: CONTROL_TYPE_DELAY_CUTOFF, defaultValue: 2000, + valueScale: Ember.computed(() => _createFilterCutoffScale()), }); const TrackHighpassFilterCutoffControl = Ember.Object.extend( @@ -64,6 +79,7 @@ const TrackHighpassFilterCutoffControl = Ember.Object.extend( type: CONTROL_TYPE_FILTER_HIGHPASS_CUTOFF, defaultValue: 20, + valueScale: Ember.computed(() => _createFilterCutoffScale()), }); const TrackLowpassFilterCutoffControl = Ember.Object.extend( @@ -71,6 +87,7 @@ const TrackLowpassFilterCutoffControl = Ember.Object.extend( type: CONTROL_TYPE_FILTER_LOWPASS_CUTOFF, defaultValue: 22050, + valueScale: Ember.computed(() => _createFilterCutoffScale()), }); @@ -95,6 +112,7 @@ export default Ember.Mixin.create( controls: Ember.computed(function() { return [ TrackVolumeControl.create({ clip: this }), + TrackBpmControl.create({ clip: this }), TrackPitchControl.create({ clip: this }), TrackDelayWetControl.create({ clip: this }), TrackDelayCutoffControl.create({ clip: this }), @@ -147,21 +165,14 @@ export default Ember.Mixin.create( return audioBeatGrid.beatToTime(audioStartBeat + clipBeat); }, - // TODO(V2): dynamic tempo - audioScheduleDidChange: Ember.observer('audioBinary.isReady', 'audioStartBeat', 'audioBeatCount', 'tempo', 'transpose', 'gain', function() { + // TODO(TRACKMULTIGRID): audioBpm not constant + audioScheduleDidChange: Ember.observer('audioBinary.isReady', 'audioStartBeat', 'audioBeatCount', 'audioBpm', 'transpose', 'gain', function() { Ember.run.once(this, 'startSource'); }).on('schedule'), - tempo: Ember.computed('syncBpm', 'audioBpm', function() { - const syncBpm = this.get('syncBpm'); - const audioBpm = this.get('audioBpm'); - - return (isValidNumber(syncBpm) && isValidNumber(audioBpm)) ? (syncBpm / audioBpm) : 1; - }), - startSource() { if (this.get('isScheduled') && this.get('audioBinary.isReady')) { - const { tempo, transpose } = this.getProperties('tempo', 'transpose'); + const { audioBpm, transpose } = this.getProperties('audioBpm', 'transpose'); // if starting in past, start now instead let startTime = Math.max(this.getAbsoluteTime(), this.getAbsoluteStartTime()); let offsetTime = this.getCurrentAudioTime(); @@ -173,9 +184,9 @@ export default Ember.Mixin.create( offsetTime = 0; } - Ember.Logger.log('startTrack', this.get('track.title'), startTime, offsetTime, endTime, tempo, transpose); + Ember.Logger.log('startTrack', this.get('track.title'), startTime, offsetTime, endTime, transpose); const node = this.get('soundtouchNode'); - node && node.start(startTime, offsetTime, endTime, tempo, transpose); + node && node.start(startTime, offsetTime, endTime, audioBpm, transpose); } else { this.stopSource(); } @@ -190,15 +201,6 @@ export default Ember.Mixin.create( // // Web Audio Nodes // - // TODO(REFACTOR): how to distinguish between track gain, fx gain, arrangement gain? - // TODO(REFACTOR): set GainControl.defaultValue based on track.audioMeta.loudness - // that might mean making a specific TrackGainNode? - // trackSourceNode: computedObject(TrackSourceNode, { - // 'audioContext': 'audioContext', - // 'track': 'track', - // 'outputNode': 'outputNode.content', - // }), - soundtouchNode: computedObject(SoundtouchNode, { 'audioContext': 'audioContext', 'audioBuffer': 'audioBinary.audioBuffer', diff --git a/app/models/mix.js b/app/models/mix.js index 22575660..3febe6a9 100644 --- a/app/models/mix.js +++ b/app/models/mix.js @@ -2,11 +2,13 @@ import Ember from 'ember'; import DS from 'ember-data'; import _ from 'npm:underscore'; +import d3 from 'd3'; import CrudMixin from 'linx/mixins/models/crud'; import OrderedHasManyMixin from 'linx/mixins/models/ordered-has-many'; import PlayableArrangementMixin from 'linx/mixins/playable-arrangement'; +import { flatten, isValidNumber } from 'linx/lib/utils'; export default DS.Model.extend( CrudMixin, @@ -18,12 +20,21 @@ export default DS.Model.extend( _mixItems: DS.hasMany('mix/item', { async: true }), title: DS.attr('string'), - bpm: DS.attr('number', { defaultValue: 128 }), timeSignature: DS.attr('number', { defaultValue: 4.0 }), + // DEPRECATED, from pre-multigrid + bpm: DS.attr('number', { defaultValue: 128 }), + // implement playable-arrangement session: Ember.inject.service(), audioContext: Ember.computed.reads('session.audioContext'), + bpmScale: Ember.computed('_bpmControlPoints.@each.{beat,value}', function() { + const bpmControlPoints = this.get('_bpmControlPoints'); + return d3.scale.linear() + .domain(bpmControlPoints.mapBy('beat')) + .range(bpmControlPoints.mapBy('value')) + .clamp(true); + }), tracks: Ember.computed.mapBy('items', 'track.content'), transitions: Ember.computed.mapBy('items', 'transition.content'), @@ -32,6 +43,41 @@ export default DS.Model.extend( transitionClips: Ember.computed.mapBy('items', 'transitionClip'), clips: Ember.computed.uniq('trackClips', 'transitionClips'), + _transitionBpmClips: Ember.computed.mapBy('transitions', 'bpmClip'), + _bpmControlPointsRelationships: Ember.computed.mapBy('transitionBpmClips', 'controlPoints'), + _bpmControlPoints: Ember.computed('_bpmControlPointsRelationships.[]', 'bpm', function() { + const bpmControlPointsRelationships = this.get('_bpmControlPointsRelationships'); + + // if we dont have any bpm control points, fake it with base mix bpm + if (Ember.isEmpty(bpmControlPointsRelationships)) { + const mixBpm = this.get('bpm'); + + // because the bpmScale is clamped, this sets a constant bpm + return [ + { + beat: 0, + value: mixBpm, + }, + { + beat: 1, + value: mixBpm, + }, + ]; + } else { + return flatten(bpmControlPointsRelationships).map((controlPoint) => { + // offset control point by transitionClip startBeat + const transitionStartBeat = controlPoint.get('automationClip.transition.transitionClip.startBeat'); + const beat = isValidNumber(transitionStartBeat) ? + controlPoint.get('beat') + transitionStartBeat : controlPoint.get('beat'); + + return { + beat, + value: controlPoint.get('value'), + }; + }); + } + }), + trackAt(index) { const item = this.objectAt(index); return item && item.get('track.content'); @@ -55,7 +101,6 @@ export default DS.Model.extend( return item.setTrack(track); }); - // TODO(REFACTOR2): possible bug with items being proxies return Ember.RSVP.all(items).then((items) => { return this.replace(index, 0, items); }); diff --git a/app/models/mix/transition.js b/app/models/mix/transition.js index a64576a9..281e4a4b 100644 --- a/app/models/mix/transition.js +++ b/app/models/mix/transition.js @@ -9,6 +9,7 @@ import DependentRelationshipMixin from 'linx/mixins/models/dependent-relationshi import { CONTROL_TYPE_VOLUME, + CONTROL_TYPE_BPM, CONTROL_TYPE_DELAY_WET, CONTROL_TYPE_DELAY_CUTOFF, CONTROL_TYPE_FILTER_HIGHPASS_CUTOFF, @@ -20,6 +21,7 @@ export default DS.Model.extend( PlayableArrangementMixin, DependentRelationshipMixin('fromTrackAutomationClips'), DependentRelationshipMixin('toTrackAutomationClips'), + DependentRelationshipMixin('bpmClip'), ReadinessMixin('isTransitionReady'), { title: DS.attr('string'), @@ -27,6 +29,7 @@ export default DS.Model.extend( beatCount: DS.attr('number', { defaultValue: 16 }), transitionClip: DS.belongsTo('mix/transition-clip'), + bpmClip: DS.belongsTo('mix/transition/automation-clip'), fromTrackClip: Ember.computed.reads('transitionClip.fromTrackClip'), toTrackClip: Ember.computed.reads('transitionClip.toTrackClip'), @@ -36,8 +39,13 @@ export default DS.Model.extend( // implementing PlayableArrangement audioContext: Ember.computed.reads('transitionClip.audioContext'), outputNode: Ember.computed.reads('transitionClip.outputNode.content'), - clips: Ember.computed.uniq('fromTrackAutomationClips', 'toTrackAutomationClips'), - bpm: Ember.computed.reads('transitionClip.mix.bpm'), + clips: Ember.computed.uniq('fromTrackAutomationClips', 'toTrackAutomationClips', '_bpmClips'), + beatGrid: Ember.computed.reads('transitionClip.mix.beatGrid'), + + _bpmClips: Ember.computed('bpmClip', function() { + return [this.get('bpmClip')].filter((clip) => !!clip); + }), + // optimizes this transition, with given constraints // TODO(REFACTOR2): rethink this. convert to ember-concurrency @@ -112,7 +120,12 @@ export default DS.Model.extend( toTrackLowpassCutoffClip ]; - const clips = fromTrackAutomationClips.concat(toTrackAutomationClips); + const bpmClip = store.createRecord('mix/transition/automation-clip', { + controlType: CONTROL_TYPE_BPM, + transition: this, + }); + + const clips = fromTrackAutomationClips.concat(toTrackAutomationClips).concat([bpmClip]); // TODO(TECHDEBT): save automation clips BEFORE adding items. otherwise, we get a weird bug // where control points are removed from relationship while saving, if only one has changed @@ -194,8 +207,21 @@ export default DS.Model.extend( this.set('fromTrackClip.delayBypass', true); } + // TODO(TRACKMULTIGRID): needs update + bpmClip.addControlPoints([ + { + beat: 0, + value: this.get('fromTrackClip.track.audioMeta.bpm') + }, + { + beat: beatCount, + value: this.get('toTrackClip.track.audioMeta.bpm') + } + ]); + this.get('fromTrackAutomationClips').addObjects(fromTrackAutomationClips); this.get('toTrackAutomationClips').addObjects(toTrackAutomationClips); + this.set('bpmClip', bpmClip); this.set('beatCount', beatCount); return this; @@ -219,7 +245,8 @@ export default DS.Model.extend( destroyAutomationClips() { return Ember.RSVP.all([ this.destroyFromTrackAutomationClips(), - this.destroyToTrackAutomationClips() + this.destroyToTrackAutomationClips(), + this.get('bpmClip').then((clip) => clip && clip.destroyRecord()) ]); }, }); diff --git a/app/models/track.js b/app/models/track.js index 01f389f7..641d0370 100644 --- a/app/models/track.js +++ b/app/models/track.js @@ -117,7 +117,7 @@ export default DS.Model.extend( this.get('audioMeta.content').setProperties({ barGridTime, - tempo: parseFloat(meta.overall_tempo_straight), + bpm: parseFloat(meta.overall_tempo_straight), timeSignature: parseInt(meta.clicks_per_bar), }); diff --git a/app/models/track/audio-meta.js b/app/models/track/audio-meta.js index dfa16b24..32867133 100644 --- a/app/models/track/audio-meta.js +++ b/app/models/track/audio-meta.js @@ -110,11 +110,11 @@ export default DS.Model.extend( duration: 'duration', bpm: 'bpm', timeSignature: 'timeSignature', - // TODO(MULTIGRID): make this use real thing + // TODO(TRACKMULTIGRID): make this use real thing barGridTime: 0, }), - // TODO(MULTIGRID): adapt for multiple grid markers. Piecewise-Scale? or a long domain/range? + // TODO(TRACKMULTIGRID): adapt for multiple grid markers. Piecewise-Scale? or a long domain/range? nudge(value) { Ember.assert('Cannot nudge AudioMeta.barGridTime without numeric value', isValidNumber(value)); diff --git a/app/models/track/audio-meta/beat-grid.js b/app/models/track/audio-meta/beat-grid.js index 2d53e522..c2aae774 100644 --- a/app/models/track/audio-meta/beat-grid.js +++ b/app/models/track/audio-meta/beat-grid.js @@ -123,13 +123,13 @@ export default Ember.Object.extend({ 'range': 'quantizeBarScaleRange', }), - // TODO(MULTIGRID): this will depend on the grid markers and bpm + // TODO(TRACKMULTIGRID): this will depend on the grid markers and bpm beatCount: Ember.computed('duration', 'bpm', function() { return timeToBeat(this.get('duration'), this.get('bpm')); }), // the time of the first actual beat in the raw audio file - // TODO(MULTIGRID): this supposes a constant bpm in the audio file + // TODO(TRACKMULTIGRID): this supposes a constant bpm in the audio file firstBarOffset: Ember.computed('barGridTime', 'bpm', 'timeSignature', function() { const bpm = this.get('bpm'); const timeSignature = this.get('timeSignature'); diff --git a/tests/unit/models/audio-meta/beat-grid-test.js b/tests/unit/models/audio-meta/beat-grid-test.js index 1715fbeb..5fb7d747 100644 --- a/tests/unit/models/audio-meta/beat-grid-test.js +++ b/tests/unit/models/audio-meta/beat-grid-test.js @@ -78,7 +78,6 @@ describe('BeatGrid', function() { }); }); - // TODO(MULTIGRID): rework describe('#nudge', function() { let nudgeAmount = 0.005, previousStart; diff --git a/tests/unit/models/track/audio-meta-test.js b/tests/unit/models/track/audio-meta-test.js index d3ecbb0d..e3bfee11 100644 --- a/tests/unit/models/track/audio-meta-test.js +++ b/tests/unit/models/track/audio-meta-test.js @@ -61,7 +61,6 @@ describe('AudioMetaModel', function() { lastWholeBeat: 776, lastWholeBar: 194, - // TODO(MULTIGRID): this needs to change 'sortedGridMarkers.length': 1, 'sortedSectionMarkers.length': function() { return analysis.get('confidentSections.length'); }, });