using NAudio.Utils; namespace QuikDawEditor.SampleProviders; public class TrackSampleProvider : ISampleProvider { //internal float leftMultiplier, rightMultiplier, m_pan; //internal IPanStrategy m_panStrategy; public string RelRes() { ReleaseResources(); return "Resources released"; } internal Track myTrack; public TrackSampleProvider() { //Pan = 0; //m_panStrategy = new SinPanStrategy(); //multipliers = m_panStrategy.GetMultipliers(Pan); m_waveFormat = DefaultPlayWaveFormat; args = new StreamVolumeEventArgs() { MaxSampleValues = maxSamples }; // overflowFactor = (WaveFormat.SampleRate / 1000D) / 2; } private WaveFormat m_waveFormat; private float[] sourceBuffer; //private const int maxInputs = 1024; private float[] maxSamples = new float[2]; public WaveFormat WaveFormat { get { return m_waveFormat; } } //double overflowFactor; internal float currClipGainFactor = 1; internal Clip currClip; internal double currentPlayingPos; private bool clipWasChanged = false; //private List lastFloatPair = new List(); public int Read(float[] buffer, int offset, int count) { clipWasChanged = false; if (!projPlayer.IsProjectPlaying) { Array.Clear(buffer, 0, count); return count; }// Don't read any clip data if not playing int outputSamples = 0; Clip currClip = myTrack.GetCurrentPlayingClip; if (myTrack.LastCurrentClip == null) myTrack.LastCurrentClip = currClip; //To ensure that the playing position value doesn't change while in this Read method currentPlayingPos = projPlayer.CurrentPlayingPosMS; if (currClip != null) { sourceBuffer = BufferHelpers.Ensure(sourceBuffer, count); if (currClip.GetIndexInTrack != myTrack.LastCurrentClip.GetIndexInTrack) { //Change clips during midplay if (currClip.IsAudioClip) { //if (currClip.ClipLeftMs != myTrack.LastCurrentClip.ClipRightMs) //{ //Debug.WriteLine("Reset clip: " + currClip.GetIndexInTrack.ToString()); currClip.ResetMe(currentPlayingPos); //projPlayer.StopAntiClickOn = true; clipWasChanged = true; //} } if (currClip.IsMidiClip) myTrack.LastCurrentClip.StopAllPlayingMidiNotes(); myTrack.LastCurrentClip = currClip; } //else // currClip.BeginningClipPlay = false; lock (myTrack.Clips) { if (!currClip.IsMuted && !currClip.IsOrphanClip && currClip.ClipLeftMs <= currentPlayingPos) //if (!currClip.IsMuted && !currClip.IsOrphanClip && currClip.ClipLeftMs < currentPlayingPos) { int samplesRead = currClip.IsAudioClip ? currClip.myClipSampleProvider.Read(sourceBuffer, 0, count) : count; int outIndex = offset; float prevClipGainFac = currClipGainFactor; //__________________GAIN POINT AUTOMATION_____________________________________________________________________ try { //can be further refactored and optimized... double ClipRelTimeMs = currentPlayingPos - currClip.ClipVirtualStartMs; GainPoint gp1 = currClip.GainPoints.LastOrDefault(gp => gp.sourcePointms < ClipRelTimeMs) ?? currClip.GainPoints.FirstOrDefault(); GainPoint gp2 = currClip.GainPoints.FirstOrDefault(gp => gp.sourcePointms > ClipRelTimeMs) ?? currClip.GainPoints.LastOrDefault(); currClipGainFactor = (gp1 == null || gp2 == null) ? 1 : GetGainValBetweenGainPoints(ClipRelTimeMs, gp1, gp2); if (float.IsNaN(currClipGainFactor)) currClipGainFactor = gp1.GainValue; // Debug.WriteLine("curr CLipgainFac=" + currClipGainFactor.ToString()); } catch { currClipGainFactor = 1; Debug.WriteLine("Error getting clip gain value between gain points..."); } currClipGainFactor *= currClip.ClipGainFactor; //if (Math.Abs(currClipGainFactor - prevClipGainFac) >= 0.1) //{ // //Debug.WriteLine("curr - prev=" + (currClipGainFactor - prevClipGainFac).ToString()); // prevClipGainFac = currClipGainFactor; //} //else // currClipGainFactor = prevClipGainFac; //_____________________________________________________________________________________________________________ //Send midi information to VSTi here if (currClip.IsMidiClip) { //Array.Clear(sourceBuffer, 0, count); //??? currClip.SendClipMidiData(currentPlayingPos); } if (currClip.IsAudioClip) { //********PITCH SHIFT************************************************* if (currClip.PitchChangeValue != 0) sourceBuffer = currClip.PitchShiftedSamples(sourceBuffer, offset, samplesRead); //******************************************************************* //Set track buffer to be whatever the current clip buffer is, modified by clip automation gain factor: Clipsampleprovider always supplies stereo samples for (int inIndex = 0; inIndex <= samplesRead - 2; inIndex += 2) { if (inIndex >= outputSamples) { if (currClip.IsReversed) { buffer[outIndex] = sourceBuffer[inIndex + 1] * currClipGainFactor; buffer[outIndex + 1] = sourceBuffer[inIndex] * currClipGainFactor; } else { buffer[outIndex] = sourceBuffer[inIndex] * currClipGainFactor; buffer[outIndex + 1] = sourceBuffer[inIndex + 1] * currClipGainFactor; } } //else //{ for future clip overlapping play? // buffer[outIndex] += sourceBuffer[inIndex] * currClipGainFactor; // buffer[outIndex + 1] += sourceBuffer[inIndex + 1] * currClipGainFactor; //} //Debug.WriteLine("buffer=" + buffer[outIndex].ToString()); //if (buffer[outIndex] != 0f) // Debug.WriteLine("outputBuffer = " + buffer[outIndex]); outIndex += 2; } } outputSamples = Math.Max(samplesRead, outputSamples); } } } else { //currClip is null -- stop any notes that were playing as clip ends if (myTrack.IsMidiTrack) { //Debug.WriteLine("clip was null"); if (myTrack.LastCurrentClip != null) { myTrack.LastCurrentClip.StopAllPlayingMidiNotes(); myTrack.LastCurrentClip = null; currClipGainFactor = 0; return count; } ClearBuffer(ref buffer, 0, count); } outputSamples = count; } if (outputSamples == 0) // To return silence { outputSamples = count; ClearBuffer(ref buffer, 0, count); if (!(maxSamples[0] == 0)) { maxSamples[0] = 0; StreamVolume?.Invoke(this, args); } return outputSamples; } return outputSamples; } //float antiClickFac = 1; int samplesRead = 0; int volDisplayTime = 0; public int ReadSubmix(float[] buffer, int offset, int count) { volDisplayTime += 1; if (volDisplayTime == 5) volDisplayTime = 0; //only update vol meters every 5th time (50 ms) //if (!projPlayer.IsProjectPlaying) { Array.Clear(buffer, 0, count); return count; } //don't need to clear submix track because it has no data int outputSamples = 0; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, count); double CurrentPlayingPosMs = projPlayer.CurrentPlayingPosMS; lock (myTrack.SubTracks) { foreach (Track t in myTrack.SubTracks) {//****************************************** samplesRead = t.myTrackSampleProvider.Read(buffer, offset, count); // bool hasCurrentClip = t.GetCurrentPlayingClip != null; //if (t.GetsPlayed) //{ // //Have to clear array because all tracks are always playing (if not muted or passed last clip) (DO NOT REMOVE: THIS IS ABSOLUTELY NECESSARY FOR AUDIO TRACKS) // Array.Clear(sourceBuffer, 0, sourceBuffer.Length); // int outIndex = offset; // if (t.IsSubmixTrack) // samplesRead = t.myTrackSampleProvider.ReadSubmix(sourceBuffer, 0, count); // else // { // samplesRead = t.myTrackSampleProvider.Read(sourceBuffer, 0, count); // int rsamps = samplesRead; // if (t.IsMidiTrack) //this method also applies the current clip gain factor to vsti sampless // samplesRead = t.GetVstiSamples(ref sourceBuffer, samplesRead, projPlayer.blockSize); // } // t.ProcessTrackSamples(ref sourceBuffer, offset, samplesRead, projPlayer.blockSize, CurrentPlayingPosMs, hasCurrentClip || t.IsSubmixTrack); // //For track audio output meter display // if (!IsMixingDown) // if (!t.IsRecordingArmed) // if (volDisplayTime == 0) // Dispatcher.CurrentDispatcher.Invoke(() => // { // t.TrackOutputVolume = GetAverageSamples(sourceBuffer); // }, DispatcherPriority.Render); // for (int inIndex = 0; inIndex <= samplesRead - 2; inIndex += 2) // { // if (inIndex >= outputSamples) // { // buffer[outIndex] = sourceBuffer[inIndex]; // buffer[outIndex + 1] = sourceBuffer[inIndex + 1]; // } // else // { // buffer[outIndex] += sourceBuffer[inIndex]; // buffer[outIndex + 1] += sourceBuffer[inIndex + 1]; // } // outIndex += 2; // } outputSamples = Math.Max(samplesRead, outputSamples); //} // //double clipOutputVolumeB = GetAverageSamples(buffer); // //if (Double.IsNaN(clipOutputVolumeB)) // // Debug.WriteLine("CLIP volume after GAINFACTOR bad: " + clipOutputVolumeB.ToString()); } } if (outputSamples == 0) // To return silence { outputSamples = count; ClearBuffer(ref buffer, 0, count); if (!(maxSamples[0] == 0)) { maxSamples[0] = 0; StreamVolume?.Invoke(this, args); } } return outputSamples; } private StreamVolumeEventArgs args; //public int SamplesPerNotification { get { return m_SamplesPerNotification; } set { m_SamplesPerNotification = value; } } //private int m_SamplesPerNotification = 1000; public event EventHandler StreamVolume; private void ClearBuffer(ref float[] buffer, int offset, int count) { for (int n = 0; n < count; n++) buffer[n + offset] = 0; } private float voldiff = 0; private float voldir; } //// Simplistic "balance" control - treating the mono input as if it was stereo. In the centre, both channels full volume. Opposite channel decays linearly as balance is turned to to one side //public class StereoBalanceStrategy : IPanStrategy //{ // public StereoSamplePair GetMultipliers(float pan) // { // float leftChannel = pan <= 0 ? 1.0F : (1 - pan) / 2.0F; // float rightChannel = pan >= 0 ? 1.0F : (pan + 1) / 2.0F; // return new StereoSamplePair() { Left = leftChannel, Right = rightChannel }; // } //} ///// Square Root Pan, thanks to Yuval Naveh //public class SquareRootPanStrategy : IPanStrategy //{ // public StereoSamplePair GetMultipliers(float pan) // { // // -1..+1 -> 1..0 // float normPan = (-pan + 1) / 2; // float leftChannel = Convert.ToSingle(Math.Sqrt(normPan)); // float rightChannel = Convert.ToSingle(Math.Sqrt(1 - normPan)); // return new StereoSamplePair() { Left = leftChannel, Right = rightChannel }; // } //} //public class SinPanStrategy : IPanStrategy //{ // private const float HalfPi = 1.570796F; // public StereoSamplePair GetMultipliers(float pan) // { // // -1..+1 -> 1..0 // float normPan = (-pan + 1) / 2; // float leftChannel = Convert.ToSingle(Math.Sin(normPan * HalfPi)); // float rightChannel = Convert.ToSingle(Math.Cos(normPan * HalfPi)); // return new StereoSamplePair() { Left = leftChannel, Right = rightChannel }; // } //} //public class LinearPanStrategy : IPanStrategy //{ // /// Gets the left and right channel multipliers for this pan value // public StereoSamplePair GetMultipliers(float pan) // { // // -1..+1 -> 1..0 // float normPan = (-pan + 1) / 2; // float leftChannel = normPan; // float rightChannel = 1 - normPan; // return new StereoSamplePair() { Left = leftChannel, Right = rightChannel }; // } //} public class StreamVolumeEventArgs : EventArgs { private float[] m_MaxSampleValues; public float[] MaxSampleValues { get { return m_MaxSampleValues; } set { m_MaxSampleValues = value; } } }