using NAudio.Wave; using System; using System.ComponentModel; using System.Diagnostics; using System.IO; using System.Linq; using System.Windows; using System.Windows.Threading; using static QuikDawEditor.EDITING.MiscMethods; using static QuikDawEditor.EDITING.AudioMethods; using static QuikDawEditor.EDITING.StaticProperties; using QuikDawEditor.EditingClasses; using QuikDawEditor.Undo; using QuikDawEditor.MiscClasses; namespace QuikDawEditor; public partial class ProjectPlayer { internal void ArmForAudioRecording(int RecInputNo) { if (!AudioDeviceOK) { MessageBox.Show("Audio device not available"); return; } //Must dispose and create a new asioout if recording is audio StopPlayTimer(); IsProjectPlaying = false; playAsioOut.Stop(); playAsioOut.Dispose(); playAsioOut = new AsioOut(SelectedAudioOutIndex); bufferedSampleProvider.ClearBuffer(); playAsioOut.InputChannelOffset = Properties.Settings.Default.RecordInIndex; playAsioOut.InitRecordAndPlayback(bufferedSampleProvider.ToWaveProvider16(), 1, 44100); playAsioOut.AudioAvailable += AsioOut_AudioAvailable; playAsioOut.Play(); StartPlayTimer(); //For absolute minimal latency for pass-through monitoring, in AudioAvailableEvent, copy directly to the OutputBuffers, and set WrittenToOutputBuffers = true. This means you don't need a BufferedWaxeProvider. } internal void DisarmForAudioRecording() { //Must dispose and create a new asioout UnarmAudio(); playAsioOut?.Stop(); playAsioOut?.Dispose(); try { CreateNewAsioOut(); } catch { MessageBox.Show("Audio device not available..."); } } internal void UnarmAudio() { StopPlayTimer(); IsProjectPlaying = false; playAsioOut.AudioAvailable -= AsioOut_AudioAvailable; } WaveFileWriter recordingWFWriter; string recordingFileName; private bool _IsProjectRecording = false; public bool IsProjectRecording { get { return _IsProjectRecording; } set { //stop midi/audio track recording switch (editingProject.GetRecordingTrack.trackType) { case TrackType.Audio: if (value) { //RECORDING recordingFileName = ProjectRecordingDirectory + "\\" + "NewRecording_" + DateTime.Now.ToString().Replace(":", "-").Replace("/", "-") + ".wav"; recordingWFWriter = new WaveFileWriter(recordingFileName, new WaveFormat(44100, 1)); InitializeAudioRecordingClip(); } _IsProjectRecording = value; _IsProjectPlaying = value; if (!_IsProjectRecording) { recordingWFWriter?.Close(); recordingWFWriter.Dispose(); ConvertRecordedWaveToMp3AndAddClip(recordingFileName); } break; case TrackType.Midi: if (value) { InitializeMidiRecordingClip(); QDMidiNote mnotex = new QDMidiNote(); //necessary for some reason!!!!!? } else StopRecordingMidiAndMakeClip(); _IsProjectRecording = value; _IsProjectPlaying = value; break; } NotifyPropertyChanged(nameof(RecButImgSource)); NotifyPropertyChanged(nameof(IsProjectRecording)); NotifyPropertyChanged(nameof(RecordButEnabled)); NotifyPropertyChanged(nameof(ControlsDPCoverVisible)); } } private void InitializeAudioRecordingClip() { //MessageBox.Show("recordingtrackindex=" + editingProject.RecordingTrack.trackIndex.ToString()); RecordStartPosMS = CurrentPlayingPosMS; CurrentPlayingPosMS -= Properties.Settings.Default.LeadInMeasures * editingProject.BeatsPerMeasure * MillisecondsPerBeat; ChangePlayPos(CurrentPlayingPosMS); Track recTrack = editingProject.GetRecordingTrack; recTrack.audioRecordingClip.ClipWidthMs = 0; recTrack.audioRecordingClip.ClipVirtualStartMs = RecordStartPosMS; recTrack.audioRecordingClip.IsVisible = true; recTrack.audioRecordingClip.UpdateRecordingClipTransform(); } private void ConvertRecordedWaveToMp3AndAddClip(string waveFileName) { string mp3FileName = waveFileName.Replace(".wav", ".mp3"); ConvertWaveToMP3(waveFileName, mp3FileName); string movedmp3FileName = EditingProjectClipsDirectory + "\\" + Path.GetFileName(mp3FileName); File.Move(mp3FileName, movedmp3FileName); mp3FileName = movedmp3FileName; Track recTrack = editingProject.GetRecordingTrack; recTrack.audioRecordingClip.IsVisible = false; Clip firstClipRight = recTrack.Clips.FirstOrDefault(clp => clp.ClipLeftMs >= recTrack.audioRecordingClip.ClipVirtualStartMs); double addingClipWidthMs = firstClipRight == null ? recTrack.audioRecordingClip.ClipWidthMs : Math.Min(recTrack.audioRecordingClip.ClipWidthMs, firstClipRight.ClipLeftMs - recTrack.audioRecordingClip.ClipVirtualStartMs); AddNewAudioClipToTrack(Path.GetFileName(mp3FileName), recTrack, recTrack.audioRecordingClip.ClipVirtualStartMs, addingClipWidthMs, false, false); recTrack.audioRecordingClip.recordingWavePoints.Clear(); editingProject.NeedsSaving = true; } double timeSegment = 0; private void AsioOut_AudioAvailable(object sender, AsioAudioAvailableEventArgs e) { //sampspertime = 512 var samples = new float[e.SamplesPerBuffer * e.InputBuffers.Length]; e.GetAsInterleavedSamples(samples); //Debug.WriteLine("#samps=" + samples.Length.ToString()); timeSegment += 11.8; //approximate of 512/44100 if (timeSegment >= 50) { timeSegment = 0; float avVal = GetAverageSamples(samples); double percVal = Math.Abs(Math.Log(avVal) / 5); percVal = 1D - percVal - 0.25D; percVal = Double.IsNaN(percVal) ? 0 : Math.Max(0, percVal); Dispatcher.CurrentDispatcher.Invoke(() => { Track recTrack = editingProject.GetRecordingTrack; if (recTrack != null) try { recTrack.TrackOutputVolume = avVal; } catch { Debug.WriteLine("recording track was null in audioavailable method"); } if (IsProjectRecording && CurrentPlayingPosMS >= RecordStartPosMS) //if (IsProjectRecording) { recTrack.audioRecordingClip.ClipWidthMs = CurrentPlayingPosMS - RecordStartPosMS; recTrack.audioRecordingClip.recordingWavePoints.Add(new Point(recTrack.audioRecordingClip.ClipWidthMs, percVal)); recTrack.audioRecordingClip.UpdateRecordingWavePoints(); } }, DispatcherPriority.Background); } if (IsProjectRecording && CurrentPlayingPosMS >= RecordStartPosMS) //if (IsProjectRecording) if (recordingWFWriter != null) recordingWFWriter.WriteSamples(samples, 0, samples.Length); } }