using NAudio.Utils; using NAudio.Wave; using QuikDawEditor.EditingClasses; using System; using System.Windows.Threading; using static QuikDawEditor.EDITING.MiscMethods; using static QuikDawEditor.EDITING.StaticProperties; using static QuikDawEditor.EDITING.AudioMethods; using System.Diagnostics; using QuikDawEditor.MiscClasses; namespace QuikDawEditor.SampleProviders; public class MasterMixingSampleProvider : ISampleProvider { private WaveFormat m_waveFormat; private float[] sourceBuffer; private int channels = 2; // Always create a stereo output... private float[] maxSamples = new float[2]; private float thissampleValue; private int sampleCount = 0; private const int maxInputs = 1024; public string RelRes() { ReleaseResources(); return "Resources released"; } public MasterMixingSampleProvider(WaveFormat waveFormat, Project podep) { m_waveFormat = waveFormat; maxSamples = new float[channels]; args = new StreamVolumeEventArgs() { MaxSampleValues = maxSamples }; //m_SamplesPerNotification = 2000; } public void ResetAllClips(double playposMS) { foreach (Track t in editingProject.GetAllTracksInProject) if (t.trackType != TrackType.Submix) t.ResetClips(playposMS); } public WaveFormat WaveFormat { get { return m_waveFormat; } } int samplesRead; int MeterUpdateCycle = 7; int volDisplayTime = 0; //public bool Mute; public int Read(float[] buffer, int offset, int count) { //if (Mute) { Array.Clear(buffer, 0, buffer.Length); return count; } // add global project mute function? volDisplayTime += 1; if (volDisplayTime == MeterUpdateCycle) volDisplayTime = 0; //only update vol meters every Nth time (updatefrequency = MeterUpdateCycle x processingrate_ms) int outputSamples = 0; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, count); Array.Clear(sourceBuffer); //clear or else track will include previous track sample double MasterTimeMs = projPlayer.CurrentPlayingPosMS; //getting the playtime once here applies to all samples read in this method lock (sourceBuffer) { int outIndex = offset; lock (editingProject.Tracks) { for (int sourceindex = editingProject.Tracks.Count - 1; sourceindex > -1; sourceindex -= 1) // cycle through each input source, backwards { Track thisTrack = editingProject.Tracks[sourceindex]; bool hasCurrentClip = thisTrack.GetCurrentPlayingClip != null; if (thisTrack.GetsPlayed) { if (thisTrack.IsSubmixTrack) samplesRead = thisTrack.myTrackSampleProvider.ReadSubmix(sourceBuffer, 0, count); else { //this method also applies the current clip gain factor to vsti sampless samplesRead = thisTrack.myTrackSampleProvider.Read(sourceBuffer, 0, count); if (thisTrack.IsMidiTrack) { //Debug.WriteLine("samples read= " + sourceBuffer.Length); Array.Clear(sourceBuffer, 0, sourceBuffer.Length); //Have to clear array (DO NOT REMOVE: THIS IS ABSOLUTELY NECESSARY FOR MIDI TRACKS) samplesRead = thisTrack.GetVstiSamples(ref sourceBuffer, samplesRead, projPlayer.blockSize); } } outIndex = offset; ////////////SHOULD SET TO NOT PROCESS EMPTY AUDIO TRACKS ..... (EMPTY MIDI TRACKS MUST STILL BE PROCESSED for INPUT sound if armed) //Get processed track samples (effects, volume/pan automation, track slider volume thisTrack.ProcessTrackSamples(ref sourceBuffer, offset, samplesRead, projPlayer.blockSize, MasterTimeMs, hasCurrentClip || thisTrack.IsSubmixTrack); //OUTPUT meter display if (!IsMixingDown) if (!thisTrack.IsRecordingArmed) if (volDisplayTime == 0) Dispatcher.CurrentDispatcher.Invoke(() => { thisTrack.TrackOutputVolume = GetAverageSamples(sourceBuffer); }, DispatcherPriority.Render); if (!Double.IsNaN(thisTrack.TrackOutputVolume)) { //Add track samples to master buffer (Track mixing takes place here) for (int inIndex = 0; inIndex <= samplesRead - 2; inIndex += 2) { if (inIndex >= outputSamples) { buffer[outIndex] = sourceBuffer[inIndex]; buffer[outIndex + 1] = sourceBuffer[inIndex + 1]; //Debugging //if (buffer[outIndex] != 0f && outIndex % 1000 == 0) //Debug.WriteLine("Track: " + thisTrack.TrackName + ":::: MASTERbuffer[outindex]=" + buffer[0]); } else { // mix with existing data buffer[outIndex] += sourceBuffer[inIndex]; buffer[outIndex + 1] += sourceBuffer[inIndex + 1]; } outIndex += 2; } outputSamples = Math.Max(samplesRead, outputSamples); } else { //Error in track sound data Array.Clear(sourceBuffer, 0, sourceBuffer.Length); //Debug.WriteLine("Couldn't read track #" + thisTrack.GetTrackIndex.ToString() + ": " + thisTrack.TrackOutputVolume.ToString()); } } } } } //Apply master Fx and automation editingProject.MasterTrack.ProcessMasterTrackSamples(ref buffer, offset, outputSamples, projPlayer.blockSize, MasterTimeMs); editingProject.MasterTrack.AddMetronome(ref buffer, offset, ref outputSamples, MasterTimeMs); //For master audio output meter display if (!IsMixingDown) if (volDisplayTime == 0) Dispatcher.CurrentDispatcher.Invoke(() => { editingProject.MasterTrack.TrackOutputVolume = GetAverageSamples(buffer); }, DispatcherPriority.Background); if (outputSamples == 0) // To return silence if empty { outputSamples = count; ClearBuffer(ref buffer, 0, count); if (!(maxSamples[0] == 0)) { maxSamples[0] = 0; StreamVolume?.Invoke(this, args); } } return outputSamples; } private void ClearBuffer(ref float[] buffer, int offset, int count) { for (int n = 0; n < count; n++) buffer[n + offset] = 0; } public bool ReadFully { get { return m_ReadFully; } set { m_ReadFully = value; } } private bool m_ReadFully; private StreamVolumeEventArgs args; //public int SamplesPerNotification { get { return m_SamplesPerNotification; } set { m_SamplesPerNotification = value; } } //private int m_SamplesPerNotification; public event EventHandler StreamVolume; public class StreamVolumeEventArgs : EventArgs { public float[] MaxSampleValues { get {return m_MaxSampleValues; } set { m_MaxSampleValues = value; } } private float[] m_MaxSampleValues = new float[2]; } }