using NAudio.Dsp; using NAudio.Wave; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Runtime.CompilerServices; using System.Text.Json.Serialization; using System.Windows; using System.Windows.Media; using System.Windows.Threading; using static QuikDawEditor.EDITING.StaticProperties; using static QuikDawEditor.EDITING.MiscMethods; using QuikDawEditor.SampleProviders; using QuikDawEditor.VST; using QuikDawEditor.MiscClasses; namespace QuikDawEditor.EditingClasses; public class MasterTrack : INotifyPropertyChanged { public event PropertyChangedEventHandler PropertyChanged; private void NotifyPropertyChanged([CallerMemberName] String propertyName = "") { if (PropertyChanged != null) { PropertyChanged(this, new PropertyChangedEventArgs(propertyName)); } } internal List VolumeMonitorList = new List(); //For Haas effect, use 5 ms, = (5/200 ms = .025) * 8820 = approx. 220 samples delayed internal List HaasBuffer = new List(new float[0]); public bool ReceivingAdjustment = false; private double _HaasDelay = 0; public double HaasDelay { get { return _HaasDelay; } set { _HaasDelay = value; NotifyPropertyChanged(nameof(HaasDelay)); this.HaasBuffer = new List(new float[delayedSamples]); } } private int delayedSamples { get { return (int)(_HaasDelay * 44.1D); } } public LinearGradientBrush TrackBackgroundBrush { get { Color leftColor = Colors.Gainsboro; Color midColor = Colors.Gray; Color rightColor = Colors.DimGray; LinearGradientBrush _TrackBackgroundBrush = new LinearGradientBrush() { GradientStops = new GradientStopCollection() { new GradientStop (leftColor, 0.0), new GradientStop(midColor, 0.4), new GradientStop(rightColor, 1.0) } }; foreach (AutomationLane autoclip in this.AutomationLanes) autoclip.AutoClipBackgroundBrush = new LinearGradientBrush(_TrackBackgroundBrush.GradientStops, new Point(0, 1), new Point(0, 0)); return _TrackBackgroundBrush; } } public bool IsRecordingArmed { get { return false; } } public string VolumeDBString { get { return Math.Round(VolumeDB, 2).ToString(); } } public LinearGradientBrush MasterTrackAutoClipsBrush { get { return new LinearGradientBrush(Colors.LightGray, Colors.Gray, new Point(0, 1), new Point(0, 0)); } } private float _Volume = 0; public float Volume { get { return _Volume; } set { _Volume = value; NotifyPropertyChanged(nameof(VolumeDB)); } } public float relativeAutoTrackVolume = 1; private Visibility _VolumeTextVisibility = Visibility.Hidden; [JsonIgnore(Condition = JsonIgnoreCondition.Always)] public Visibility VolumeTextVisibility { get { return _VolumeTextVisibility; } set { _VolumeTextVisibility = value; NotifyPropertyChanged(nameof(VolumeTextVisibility)); } } [JsonIgnore(Condition = JsonIgnoreCondition.Always)] public double VolumeDB { get { return this.Volume == 0 ? -36 : 20 * Math.Log10(this.Volume * relativeAutoTrackVolume); } set { Volume = value <= -36 ? 0 : (float)Math.Pow(10, value / 20); } } private double _TrackOutputVolume = 0; [JsonIgnore(Condition = JsonIgnoreCondition.Always)] public double TrackOutputVolume { get { return _TrackOutputVolume; } set { _TrackOutputVolume = value; NotifyPropertyChanged(nameof(TrackOutputDB)); } } public double TrackOutputDB { get { return TrackOutputVolume == 0 ? -36D : 20D * Math.Log10(TrackOutputVolume); } } private bool _IsTrackCollapsed = false; public bool IsTrackCollapsed { get { return _IsTrackCollapsed; } set { _IsTrackCollapsed = value; NotifyPropertyChanged(nameof(TrackVisibility)); } } public Visibility TrackVisibility { get { return IsTrackCollapsed ? Visibility.Collapsed : Visibility.Visible; } } private double _TrackYScale = 1; public double TrackYScale { get { return _TrackYScale; } set { _TrackYScale = value; NotifyPropertyChanged(nameof(TrackYScale)); NotifyPropertyChanged(nameof(ReverseTrackYScale)); } } public double ReverseTrackYScale { get { return 1 / _TrackYScale; } } public string MetronomeImage { get { return _MetronomeOn ? "/EDITING/images/MetronomeOn.png" : "/EDITING/images/MetronomeOff.png"; } } private bool _MetronomeOn = false; public bool MetronomeOn { get { return _MetronomeOn; } set { _MetronomeOn = value; NotifyPropertyChanged(nameof(MetronomeImage)); } } public bool SettingFilters = false; public int SelectedAutomationLaneIndex { get; set; } = 0; private bool _IsTrackEQActive = false; public bool IsTrackEQActive { get { return _IsTrackEQActive; } set { _IsTrackEQActive = value; NotifyPropertyChanged(nameof(IsTrackEQActive)); } } private bool _IsHaasDelayActive = false; public bool IsHaasDelayActive { get { return _IsHaasDelayActive; } set { _IsHaasDelayActive = value; NotifyPropertyChanged(nameof(IsHaasDelayActive)); } } public ObservableCollection AutomationLanes { get; set; } = new ObservableCollection(); public EqualizerBand[] EQBands { get; set; } = new EqualizerBand[8]; public ObservableCollection MasterTrackEffectVsts { get; set; } = new ObservableCollection(); public IntPair FxWinLocation { get; set; } = new IntPair() { int1 = 400, int2 = 300 }; [JsonConstructor] public MasterTrack(EqualizerBand[] EQBands) { this.EQBands = EQBands; bandCount = EQBands.Length; filters = new BiQuadFilter[2, EQBands.Length]; CreateFilters(); double setAutoWidthMs = 10000; // dummy initializer - actually automation clips widths are bound to the Mastertrack width if (AutomationLanes.Count == 0) { AutomationLanes.Add(new AutomationLane("Volume", 0, 2, null)); foreach (AutomationLane autoclip in AutomationLanes) { if (autoclip.autoPoints.Count == 0) autoclip.autoPoints = new ObservableCollection() { new AutoPoint() { sourcePointms = 0, AutoValue = 1, IsLeftEdgeAutoPoint = true }, new AutoPoint() { sourcePointms = setAutoWidthMs, AutoValue = 1, IsRightEdgeAutoPoint = true } }; autoclip.NotifyAutoPointsChanged(); } } foreach (AutomationLane autoclip in AutomationLanes) autoclip.IsMasterTrackAutoClip = true; } public MasterTrack() { Volume = 1; EQBands = new EqualizerBand[] { new EqualizerBand {Bandwidth = 0.8f, Frequency = 100, Gain = 0}, new EqualizerBand {Bandwidth = 0.8f, Frequency = 200, Gain = 0}, new EqualizerBand { Bandwidth = 0.8f, Frequency = 400, Gain = 0 }, new EqualizerBand { Bandwidth = 0.8f, Frequency = 800, Gain = 0 }, new EqualizerBand { Bandwidth = 0.8f, Frequency = 1200, Gain = 0 }, new EqualizerBand { Bandwidth = 0.8f, Frequency = 2400, Gain = 0 }, new EqualizerBand { Bandwidth = 0.8f, Frequency = 4800, Gain = 0 }, new EqualizerBand { Bandwidth = 0.8f, Frequency = 9600, Gain = 0 }, }; bandCount = EQBands.Length; filters = new BiQuadFilter[2, EQBands.Length]; CreateFilters(); double setAutoWidthMs = 10000; // dummy initializer - actually automation clips widths are bound to the Mastertrack width if (AutomationLanes.Count == 0) { AutomationLanes.Add(new AutomationLane("Volume", 0, 2, null)); foreach (AutomationLane autoclip in AutomationLanes) { if (autoclip.autoPoints.Count == 0) autoclip.autoPoints = new ObservableCollection() { new AutoPoint() { sourcePointms = 0, AutoValue = 1, IsLeftEdgeAutoPoint = true }, new AutoPoint() { sourcePointms = setAutoWidthMs, AutoValue = 1, IsRightEdgeAutoPoint = true } }; autoclip.NotifyAutoPointsChanged(); } } foreach (AutomationLane autoclip in AutomationLanes) autoclip.IsMasterTrackAutoClip = true; } internal ClipSampleProvider MetronomeCSP; public WaveFormat WaveFormat = DefaultPlayWaveFormat; public readonly BiQuadFilter[,] filters; private bool updated; private int channels = 2; public int bandCount = 0; public float[] EqualizedSamples(float[] samplesToEqualize, int offset, int outputSamples) { if (updated) { CreateFilters(); updated = false; } for (int n = 0; n < outputSamples; n++) { int ch = n % channels; for (int band = 0; band < bandCount; band++) samplesToEqualize[offset + n] = filters[ch, band].Transform(samplesToEqualize[offset + n]); } return samplesToEqualize; } private void CreateFilters() { for (int bandIndex = 0; bandIndex < bandCount; bandIndex++) { var band = EQBands[bandIndex]; for (int n = 0; n < 2; n++) { if (filters[n, bandIndex] == null) filters[n, bandIndex] = BiQuadFilter.PeakingEQ(WaveFormat.SampleRate, band.Frequency, band.Bandwidth, band.Gain); else filters[n, bandIndex].SetPeakingEq(WaveFormat.SampleRate, band.Frequency, band.Bandwidth, band.Gain); } } } public void UpdateFilters() { updated = true; CreateFilters(); } public void InitializeVsts() { foreach (ActiveVstPlugin vplugin in this.MasterTrackEffectVsts) { if (vplugin.ReturnedWithoutError) { //Create vst contexts and set their parameters from saved data Application.Current.Dispatcher.Invoke(() => { //must be invoke or else doesn't affect the plugin context try { vplugin.OpenActivePluginContext(); for (int paramno = 0; paramno < vplugin.myContext.PluginInfo.ParameterCount; paramno++) vplugin.myContext.PluginCommandStub.Commands.SetParameter(paramno, vplugin.VstParameters[paramno].ParameterValue); } catch (Exception ex) { Debug.WriteLine("error loading Effect vst:\n" + ex.Message); vplugin.ReturnedWithoutError = false; } }, DispatcherPriority.Normal); } } } float trackAutomationFactor = 1; internal void ProcessMasterTrackSamples(ref float[] sourceBuffer, int offset, int samplesRead, int blockSize, double MasterTimeMs) { //EQ if (IsTrackEQActive) if (!SettingFilters) sourceBuffer = EqualizedSamples(sourceBuffer, offset, samplesRead); //HAAS DELAY if (IsHaasDelayActive) { //Produce Haas effect for track for (int bno = 0; bno < samplesRead; bno += 2) HaasBuffer.Add(sourceBuffer[bno + 1]); try { for (int bno = 0; bno < sourceBuffer.Length; bno += 2) { sourceBuffer[bno + 1] = HaasBuffer[0]; HaasBuffer.RemoveAt(0); } } //catch { HaasBuffer.Clear(); } catch { } } //SEND TO VST PLUGINS for (int vstno = 0; vstno < MasterTrackEffectVsts.Count; vstno++) { if (MasterTrackEffectVsts[vstno].IsActive && !MasterTrackEffectVsts[vstno].causedRunningError) { for (int sampsSent = 0; sampsSent <= samplesRead - blockSize *2; sampsSent += blockSize) { //for (int n = 0; n < blockSize * 2; n += m_waveFormat.Channels) for (int n = 0; n < blockSize * 2; n += 2) { MasterTrackEffectVsts[vstno].inputBuffers[0][n / 2] = sourceBuffer[sampsSent + n]; MasterTrackEffectVsts[vstno].inputBuffers[1][n / 2] = sourceBuffer[sampsSent + n + 1]; } try { MasterTrackEffectVsts[vstno].myContext.PluginCommandStub.Commands.ProcessReplacing(MasterTrackEffectVsts[vstno].inputBuffers, MasterTrackEffectVsts[vstno].outputBuffers); } catch (Win32Exception ex) { Debug.WriteLine("UNHANDLED---unhandled::::::::::::::::\n" + ex.Message); MasterTrackEffectVsts[vstno].causedRunningError = true; } ////for (int n = 0; n < blockSize * 2; n += m_waveFormat.Channels) for (int n = 0; n < blockSize * 2; n += 2) { sourceBuffer[sampsSent + n] = MasterTrackEffectVsts[vstno].outputBuffers[0][n / 2]; sourceBuffer[sampsSent + n + 1] = MasterTrackEffectVsts[vstno].outputBuffers[1][n / 2]; } } } } try { trackAutomationFactor = CalculateValBetweenAutoPoints(MasterTimeMs, AutomationLanes[0]); } catch (Exception ex) { trackAutomationFactor = 1; Debug.WriteLine("Couldn't get master gain factor\n" + ex.Message); } float trackVolFactorL = Volume * trackAutomationFactor; float trackVolFactorR = Volume * trackAutomationFactor; //Debug.WriteLine("trackvolfactorL=" + trackVolFactorL.ToString()); //Debug.WriteLine("relativeautotrackvolume=" + relativeAutoTrackVolume.ToString()); for (int inIndex = 0; inIndex <= samplesRead - 2; inIndex += 2) { sourceBuffer[inIndex] *= trackVolFactorL; sourceBuffer[inIndex + 1] *= trackVolFactorR; } } internal void AddMetronome(ref float[] buffer, int offset, ref int outputSamples, double MasterTimeMs) { float[] sourceBuffer = new float[buffer.Length]; if (editingProject.MasterTrack.MetronomeOn) //current metronomeon setting overrides app settings { if ((projPlayer.IsProjectPlaying && QuikDawEditor.Properties.Settings.Default.MetronomePlay) | (projPlayer.IsProjectRecording && QuikDawEditor.Properties.Settings.Default.MetronomeRecord)) { int outIndex = 0; int samplesRead = MetronomeCSP.Read(sourceBuffer, 0, sourceBuffer.Length); for (int inIndex = 0; inIndex <= samplesRead - 2; inIndex += 2) { buffer[outIndex] += sourceBuffer[inIndex] * QuikDawEditor.Properties.Settings.Default.MetronomeVol; buffer[outIndex + 1] += sourceBuffer[inIndex + 1] * QuikDawEditor.Properties.Settings.Default.MetronomeVol; outIndex += 2; } if (MasterTimeMs % MillisecondsPerBeat < projPlayer.ProcessingRateMs) MetronomeCSP.myClipStream.clipWaveStreamNormal.Position = 0; outputSamples = Math.Max(samplesRead, outputSamples); } } } }