|
Great work. Its framework 2.0 and today it can have some improvements but idea and effort is fantastic.
|
|
|
|
|
Why its showing Audio input device polling stopped,Its stuck with this message.Please help,its urgent
|
|
|
|
|
Hello everybody,
Can anyone please help me with the following:
I used the open source code from mr. Morton to create a sound activated recorder.
The recorder works fine one some laptops/PC's, however, on other ones, the quality of the recorded file is very bad, as if the sound is 'chopped'.
I placed an example on https://www.youtube.com/watch?v=G4HqlUGcqMU&feature=youtu.be
Is this about some bad entered buffering/sampling parameter, or is it a bigger problem?
I would be relieved if someone can help me figure this out.
Jef
This is the code:
Quote: private void RecordForm_Load(object sender, EventArgs e)
{
_isPlayer = true; // audio output for testing
_isTest = false; // signal generation for testing
_isSaving = false;
_isShown = true;
_recordings = new List<string>();
_current = 0;
exit = false;
gedaan = false;
replaying = false;
if (WaveNative.waveInGetNumDevs() != 0)
{
if (_isPlayer == true)
_streamOut = new FifoStream();
_audioFrame = new AudioFrame(_isTest);
_audioFrame.IsDetectingEvents = true;
_audioFrame.AmplitudeThreshold = Properties.Settings.Default.AmpTreshold;
numericUpDown1.Value = _audioFrame.AmplitudeThreshold / 1000;
_streamMemory = new MemoryStream();
Start();
buttonLive.BackColor = Color.Red;
}
else
{
noinput = true;
string message = "";
string title = "";
switch (taal)
{
case "NL":
message = "Geen audio-apparaat gedetecteerd ...";
title = "Fout";
break;
case "FR":
message = "Aucun appareil audio détecté ...";
title = "Erreur";
break;
case "EN":
message = "No audio device detected ...";
title = "Error";
break;
case "DE":
break;
}
MessageBox.Show(message, title, MessageBoxButtons.OK, MessageBoxIcon.Error);
}
}
private void Start()
{
Stop();
try
{
_waveFormat = new WaveFormat(44100, 16, 2);
_recorder = new WaveInRecorder(-1, _waveFormat, 8192 * 2, 3, new BufferDoneEventHandler(DataArrived));
if (_isPlayer == true)
_player = new WaveOutPlayer(-1, _waveFormat, 8192 * 2, 3, new BufferFillEventHandler(Filler));
}
catch (Exception ex)
{
//_recorder = null;
//_player = null;
//GC.Collect();
//Start();
}
}
private void Stop()
{
if (_recorder != null)
try
{
//_recorder = null;
_recorder.Dispose();
}
finally
{
_recorder = null;
}
if (_isPlayer == true)
{
if (_player != null)
try
{
_player.Dispose(true);
}
finally
{
_player = null;
}
_streamOut.Flush(); // clear all pending data
}
}
private void Filler(IntPtr data, int size)
{
if (_isPlayer == true)
{
if (_playerBuffer == null || _playerBuffer.Length < size)
_playerBuffer = new byte[size];
if (_streamOut.Length >= size)
_streamOut.Read(_playerBuffer, 0, size);
else
for (int i = 0; i < _playerBuffer.Length; i++)
_playerBuffer[i] = 0;
System.Runtime.InteropServices.Marshal.Copy(_playerBuffer, 0, data, size);
}
}
private void DataArrived(IntPtr data, int size)
{
if (!gedaan)
{
if (_isSaving == true)
{
byte[] recBuffer = new byte[size];
System.Runtime.InteropServices.Marshal.Copy(data, recBuffer, 0, size);
_streamMemory.Write(recBuffer, 0, recBuffer.Length);
}
if (_recorderBuffer == null || _recorderBuffer.Length != size)
_recorderBuffer = new byte[size];
if (_recorderBuffer != null)
{
System.Runtime.InteropServices.Marshal.Copy(data, _recorderBuffer, 0, size);
if (_isPlayer == true)
_streamOut.Write(_recorderBuffer, 0, _recorderBuffer.Length);
try
{
_audioFrame.Process(ref _recorderBuffer);
}
catch (Exception eks)
{
}
if (_audioFrame.IsEventActive == true)
{
//beeps afspringen
foreach (Form form in Application.OpenForms)
{
if (form.GetType() == typeof(MainForm))
{
//nog te testen
((MainForm)form).wordtOpgeroepen = true;
}
}
//player afspringen
try
{
controls.stop();
player.close();
_current = _recordings.Count();
replaying = false;
}
catch
{
}
buttonBack.BackColor = Control.DefaultBackColor;
buttonForward.BackColor = Control.DefaultBackColor;
buttonLive.BackColor = Color.Red;
if (_recorder != null)
{
//sh*t happens
try
{
label1.Invoke(new UpdateLabelCallback(this.UpdateLabel));
}
catch (Exception ez)
{
}
}
if (_isSaving == false)
{
_sampleFilename = DateTime.Now.ToString("yyyy-MM-dd (HHmmss)") + ".wav";
_timeLastDetection = DateTime.Now;
_isSaving = true;
}
else
{
_timeLastDetection = DateTime.Now;
foreach (Form form in Application.OpenForms)
{
if (form.GetType() == typeof(MainForm))
{
((MainForm)form).updateRecBool();
}
}
}
//Invoke(new MethodInvoker(AmplitudeEvent));
}
else
{
if (!replaying)
{
if (DateTime.Now.Subtract(_timeLastDetection).Seconds > 1)
{
foreach (Form form in Application.OpenForms)
{
if (form.GetType() == typeof(MainForm))
{
//nog te testen
((MainForm)form).wordtOpgeroepen = false;
}
}
}
}
//
}
//1 is seconds to save
if (_isSaving == true && DateTime.Now.Subtract(_timeLastDetection).Seconds > 1)
{
bool isExists = System.IO.Directory.Exists(Properties.Settings.Default.RecPath);
if (!isExists)
{
System.IO.Directory.CreateDirectory(Properties.Settings.Default.RecPath);
}
byte[] waveBuffer = new byte[16];
_streamWave = WaveStream.CreateStream(_streamMemory, _waveFormat);
waveBuffer = new byte[_streamWave.Length - _streamWave.Position];
_streamWave.Read(waveBuffer, 0, waveBuffer.Length);
//if (Properties.Settings.Default.SettingOutputPath != "")
_streamFile = new FileStream(Properties.Settings.Default.RecPath + "\\" + _sampleFilename, FileMode.Create);
/*else
_streamFile = new FileStream(_sampleFilename, FileMode.Create);*/
_streamFile.Write(waveBuffer, 0, waveBuffer.Length);
if (_streamWave != null) { _streamWave.Close(); }
if (_streamFile != null) { _streamFile.Close(); }
_streamMemory = new MemoryStream();
_isSaving = false;
_recordings.Add(Properties.Settings.Default.RecPath + "\\" + _sampleFilename);
_current = _recordings.Count();
try
{
label1.Invoke(new UpdateLabelCallback(this.UpdateLabel));
}
catch (Exception dddd)
{
}
if (_current == 999)
{
_current = 0;
_recordings.Clear();
}
//Invoke(new MethodInvoker(FileSavedEvent));
}
_audioFrame.RenderTimeDomainLeft(ref pictureBoxTimeDomainLeft);
}
}
else
{
try
{
foreach (Form form in Application.OpenForms)
{
if (form.GetType() == typeof(MainForm))
{
//nog te testen
((MainForm)form).wordtOpgeroepen = false;
}
}
}
catch (Exception ekzep)
{
}
}
}
|
|
|
|
|
The program did not find my sound card. I use Windows 8.1 and Realtek sound card. Message: No input device detected.
The program fails this code:
if (WaveNative.waveInGetNumDevs () == 0)
What is the solution?
Thank you.
|
|
|
|
|
public void Dispose()
{
if (m_Thread != null)
try
{
m_Finished = true;
if (m_WaveIn != IntPtr.Zero)
WaveNative.waveInReset(m_WaveIn);
m_Thread.Join();
m_DoneProc = null;
FreeBuffers();
if (m_WaveIn != IntPtr.Zero)
WaveNative.waveInClose(m_WaveIn);
}
finally
{
m_Thread = null;
m_WaveIn = IntPtr.Zero;
}
GC.SuppressFinalize(this);
}
|
|
|
|
|
if (m_WaveIn != IntPtr.Zero)
WaveNative.waveInReset(m_WaveIn);
WaitForAllBuffers();
---> m_Thread.Abort();
m_Thread.Join();
m_DoneProc = null;
FreeBuffers();
if (m_WaveIn != IntPtr.Zero)
WaveNative.waveInClose(m_WaveIn);
and
private void WaitForAllBuffers()
{
WaveInBuffer Buf = m_Buffers;
//while (Buf.NextBuffer != m_Buffers)
//{
// Buf.WaitFor();
// Buf = Buf.NextBuffer;
//}
}
then it works.
Can you help how i can reduce de drawarea?
I want only draw til 500 HZ not the complete spectrum?
Thank you
|
|
|
|
|
Hello everybody,
maybe you can help me. How can I reduce the Frequenz area. For example til 5000 Hz?
Thank you for answering!!!
|
|
|
|
|
While this is an extremely useful example it could be a lot better.
There is a lot of copypasta going on, instead of having separate left and right, you'd be better to create single usercontrol and reuse that.
You're throwing ref around like candy, in many places it's not needed at all, ideally in a high level language like C# you should avoid ref and out, and use return.
There's a lot of two and three letter vars, which can make trying to understand what's going on (specifically the fouriertransform, which I'm trying to understand).
Finally changing the ArrayList in _fftLeftSpect and _fftRightSpect to a regular List<double[]> Type is far more efficient because you're adding to the beginning and removing from the end at each Process()
All that being said, I have found this extremely useful, and have (and still am) learned a lot from it, Thankyou for releasing it.
|
|
|
|
|
at the run time maximum frequency shows lowest and minimum frequency shows highest . why is that ? .is max and min for decibel values?
|
|
|
|
|
Yes min and max are decibel variables
|
|
|
|
|
//FormMain.cs
/* Copyright (C) 2008 Jeff Morton (jeffrey.raymond.morton@gmail.com)
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.IO;
namespace SoundCatcher
{
public partial class FormMain : Form
{
private WaveInRecorder _recorder;
private byte[] _recorderBuffer;
private WaveOutPlayer _player;
private byte[] _playerBuffer;
private WaveFormat _waveFormat;
private AudioFrame _audioFrame;
private FifoStream _streamOut;
private MemoryStream _streamMemory;
private Stream _streamWave;
private FileStream _streamFile;
private bool _isPlayer = false; // audio output for testing
private bool _isTest = false; // signal generation for testing
private bool _isSaving = false;
private bool _isShown = true;
private string _sampleFilename;
private DateTime _timeLastDetection;
public FormMain()
{
InitializeComponent();
}
private void FormMain_Load(object sender, EventArgs e)
{
if (WaveNative.waveInGetNumDevs() == 0)
{
textBoxConsole.AppendText(DateTime.Now.ToString() + " : No audio input devices detected\r\n");
}
else
{
textBoxConsole.AppendText(DateTime.Now.ToString() + " : Audio input device detected\r\n");
if (_isPlayer == true)
_streamOut = new FifoStream();
_audioFrame = new AudioFrame(_isTest);
_audioFrame.IsDetectingEvents = Properties.Settings.Default.SettingIsDetectingEvents;
_audioFrame.AmplitudeThreshold = Properties.Settings.Default.SettingAmplitudeThreshold;
_streamMemory = new MemoryStream();
Start();
}
}
private void FormMain_Resize(object sender, EventArgs e)
{
if (_audioFrame != null)
{
_audioFrame.RenderTimeDomainLeft(ref pictureBoxTimeDomainLeft);
_audioFrame.RenderFrequencyDomainLeft(ref pictureBoxFrequencyDomainLeft, Properties.Settings.Default.SettingSamplesPerSecond);
_audioFrame.RenderSpectrogramLeft(ref pictureBoxSpectrogramLeft);
if (Properties.Settings.Default.SettingChannels == 2)
{
_audioFrame.RenderTimeDomainRight(ref pictureBoxTimeDomainRight);
_audioFrame.RenderFrequencyDomainRight(ref pictureBoxFrequencyDomainRight, Properties.Settings.Default.SettingSamplesPerSecond);
_audioFrame.RenderSpectrogramRight(ref pictureBoxSpectrogramRight);
}
}
}
private void FormMain_SizeChanged(object sender, EventArgs e)
{
if (_isShown & this.WindowState == FormWindowState.Minimized)
{
foreach (Form f in this.MdiChildren)
{
f.WindowState = FormWindowState.Normal;
}
this.ShowInTaskbar = false;
this.Visible = false;
notifyIcon1.Visible = true;
_isShown = false;
}
}
private void FormMain_FormClosing(object sender, FormClosingEventArgs e)
{
Stop();
if (_isSaving == true)
{
byte[] waveBuffer = new byte[Properties.Settings.Default.SettingBitsPerSample];
_streamWave = WaveStream.CreateStream(_streamMemory, _waveFormat);
waveBuffer = new byte[_streamWave.Length - _streamWave.Position];
_streamWave.Read(waveBuffer, 0, waveBuffer.Length);
if (Properties.Settings.Default.SettingOutputPath != "")
_streamFile = new FileStream(Properties.Settings.Default.SettingOutputPath + "\\" + _sampleFilename, FileMode.Create);
else
_streamFile = new FileStream(_sampleFilename, FileMode.Create);
_streamFile.Write(waveBuffer, 0, waveBuffer.Length);
_isSaving = false;
}
if (_streamOut != null)
try
{
_streamOut.Close();
}
finally
{
_streamOut = null;
}
if (_streamWave != null)
try
{
_streamWave.Close();
}
finally
{
_streamWave = null;
}
if (_streamFile != null)
try
{
_streamFile.Close();
}
finally
{
_streamFile = null;
}
if (_streamMemory != null)
try
{
_streamMemory.Close();
}
finally
{
_streamMemory = null;
}
}
private void notifyIcon1_MouseDoubleClick(object sender, MouseEventArgs e)
{
notifyIcon1.Visible = false;
this.Visible = true;
this.ShowInTaskbar = true;
this.WindowState = FormWindowState.Normal;
_isShown = true;
}
private void aboutToolStripMenuItem_Click(object sender, EventArgs e)
{
FormAboutDialog form = new FormAboutDialog();
form.Show();
}
private void exitToolStripMenuItem_Click(object sender, EventArgs e)
{
this.Close();
}
private void optionsToolStripMenuItem_Click(object sender, EventArgs e)
{
FormOptionsDialog form = new FormOptionsDialog();
if (form.ShowDialog() == DialogResult.OK)
{
_audioFrame.IsDetectingEvents = form.IsDetectingEvents;
_audioFrame.AmplitudeThreshold = form.AmplitudeThreshold;
}
}
private void settingsToolStripMenuItem_Click(object sender, EventArgs e)
{
FormSettingsDialog form = new FormSettingsDialog();
if (form.ShowDialog() == DialogResult.OK)
{
Stop();
if (_isSaving == true)
{
byte[] waveBuffer = new byte[Properties.Settings.Default.SettingBitsPerSample];
_streamWave = WaveStream.CreateStream(_streamMemory, _waveFormat);
waveBuffer = new byte[_streamWave.Length - _streamWave.Position];
_streamWave.Read(waveBuffer, 0, waveBuffer.Length);
_streamFile = new FileStream(_sampleFilename, FileMode.Create);
_streamFile.Write(waveBuffer, 0, waveBuffer.Length);
_isSaving = false;
}
if (_streamOut != null)
try
{
_streamOut.Close();
}
finally
{
_streamOut = null;
}
if (_streamWave != null)
try
{
_streamWave.Close();
}
finally
{
_streamWave = null;
}
if (_streamFile != null)
try
{
_streamFile.Close();
}
finally
{
_streamFile = null;
}
if (_streamMemory != null)
try
{
_streamMemory.Close();
}
finally
{
_streamMemory = null;
}
if (_isPlayer == true)
_streamOut = new FifoStream();
_audioFrame = new AudioFrame(_isTest);
_audioFrame.IsDetectingEvents = Properties.Settings.Default.SettingIsDetectingEvents;
_audioFrame.AmplitudeThreshold = Properties.Settings.Default.SettingAmplitudeThreshold;
_streamMemory = new MemoryStream();
Start();
}
}
private void Start()
{
Stop();
try
{
_waveFormat = new WaveFormat(Properties.Settings.Default.SettingSamplesPerSecond, Properties.Settings.Default.SettingBitsPerSample, Properties.Settings.Default.SettingChannels);
_recorder = new WaveInRecorder(Properties.Settings.Default.SettingAudioInputDevice, _waveFormat, Properties.Settings.Default.SettingBytesPerFrame * Properties.Settings.Default.SettingChannels, 3, new BufferDoneEventHandler(DataArrived));
if (_isPlayer == true)
_player = new WaveOutPlayer(Properties.Settings.Default.SettingAudioOutputDevice, _waveFormat, Properties.Settings.Default.SettingBytesPerFrame * Properties.Settings.Default.SettingChannels, 3, new BufferFillEventHandler(Filler));
textBoxConsole.AppendText(DateTime.Now.ToString() + " : Audio input device polling started\r\n");
textBoxConsole.AppendText(DateTime.Now + " : Device = " + Properties.Settings.Default.SettingAudioInputDevice.ToString() + "\r\n");
textBoxConsole.AppendText(DateTime.Now + " : Channels = " + Properties.Settings.Default.SettingChannels.ToString() + "\r\n");
textBoxConsole.AppendText(DateTime.Now + " : Bits per sample = " + Properties.Settings.Default.SettingBitsPerSample.ToString() + "\r\n");
textBoxConsole.AppendText(DateTime.Now + " : Samples per second = " + Properties.Settings.Default.SettingSamplesPerSecond.ToString() + "\r\n");
textBoxConsole.AppendText(DateTime.Now + " : Frame size = " + Properties.Settings.Default.SettingBytesPerFrame.ToString() + "\r\n");
}
catch (Exception ex)
{
textBoxConsole.AppendText(DateTime.Now + " : " + ex.InnerException.ToString() + "\r\n");
}
}
private void Stop()
{
if (_recorder != null)
try
{
_recorder.Dispose();
}
finally
{
_recorder = null;
}
if (_isPlayer == true)
{
if (_player != null)
try
{
_player.Dispose();
}
finally
{
_player = null;
}
_streamOut.Flush(); // clear all pending data
}
textBoxConsole.AppendText(DateTime.Now.ToString() + " : Audio input device polling stopped\r\n");
}
private void Filler(IntPtr data, int size)
{
if (_isPlayer == true)
{
if (_playerBuffer == null || _playerBuffer.Length < size)
_playerBuffer = new byte[size];
if (_streamOut.Length >= size)
_streamOut.Read(_playerBuffer, 0, size);
else
for (int i = 0; i < _playerBuffer.Length; i++)
_playerBuffer[i] = 0;
System.Runtime.InteropServices.Marshal.Copy(_playerBuffer, 0, data, size);
}
}
private void DataArrived(IntPtr data, int size)
{
if (_isSaving == true)
{
byte[] recBuffer = new byte[size];
System.Runtime.InteropServices.Marshal.Copy(data, recBuffer, 0, size);
_streamMemory.Write(recBuffer, 0, recBuffer.Length);
}
if (_recorderBuffer == null || _recorderBuffer.Length != size)
_recorderBuffer = new byte[size];
if (_recorderBuffer != null)
{
System.Runtime.InteropServices.Marshal.Copy(data, _recorderBuffer, 0, size);
if (_isPlayer == true)
_streamOut.Write(_recorderBuffer, 0, _recorderBuffer.Length);
_audioFrame.Process(ref _recorderBuffer);
if (_audioFrame.IsEventActive == true)
{
if (_isSaving == false && Properties.Settings.Default.SettingIsSaving == true)
{
_sampleFilename = DateTime.Now.ToString("yyyyMMddHHmmss") + ".wav";
_timeLastDetection = DateTime.Now;
_isSaving = true;
}
else
{
_timeLastDetection = DateTime.Now;
}
Invoke(new MethodInvoker(AmplitudeEvent));
}
if (_isSaving == true && DateTime.Now.Subtract(_timeLastDetection).Seconds > Properties.Settings.Default.SettingSecondsToSave)
{
byte[] waveBuffer = new byte[Properties.Settings.Default.SettingBitsPerSample];
_streamWave = WaveStream.CreateStream(_streamMemory, _waveFormat);
waveBuffer = new byte[_streamWave.Length - _streamWave.Position];
_streamWave.Read(waveBuffer, 0, waveBuffer.Length);
if (Properties.Settings.Default.SettingOutputPath != "")
_streamFile = new FileStream(Properties.Settings.Default.SettingOutputPath + "\\" + _sampleFilename, FileMode.Create);
else
_streamFile = new FileStream(_sampleFilename, FileMode.Create);
_streamFile.Write(waveBuffer, 0, waveBuffer.Length);
if (_streamWave != null) { _streamWave.Close(); }
if (_streamFile != null) { _streamFile.Close(); }
_streamMemory = new MemoryStream();
_isSaving = false;
Invoke(new MethodInvoker(FileSavedEvent));
}
_audioFrame.RenderTimeDomainLeft(ref pictureBoxTimeDomainLeft);
_audioFrame.RenderFrequencyDomainLeft(ref pictureBoxFrequencyDomainLeft, Properties.Settings.Default.SettingSamplesPerSecond);
_audioFrame.RenderSpectrogramLeft(ref pictureBoxSpectrogramLeft);
if (Properties.Settings.Default.SettingChannels == 2)
{
_audioFrame.RenderTimeDomainRight(ref pictureBoxTimeDomainRight);
_audioFrame.RenderFrequencyDomainRight(ref pictureBoxFrequencyDomainRight, Properties.Settings.Default.SettingSamplesPerSecond);
_audioFrame.RenderSpectrogramRight(ref pictureBoxSpectrogramRight);
}
}
}
private void AmplitudeEvent()
{
toolStripStatusLabel1.Text = "Last event: " + _timeLastDetection.ToString();
}
private void FileSavedEvent()
{
textBoxConsole.AppendText(_timeLastDetection.ToString() + " : File " + _sampleFilename + " saved\r\n");
}
}
}
|
|
|
|
|
// WaveIn.cs
// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY
// KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR
// PURPOSE.
//
// This material may not be duplicated in whole or in part, except for
// personal use, without the express written consent of the author.
//
// Email: ianier@hotmail.com
//
// Copyright (C) 1999-2003 Ianier Munoz. All Rights Reserved.
using System;
using System.Threading;
using System.Runtime.InteropServices;
namespace SoundCatcher
{
internal class WaveInHelper
{
public static void Try(int err)
{
if (err != WaveNative.MMSYSERR_NOERROR)
throw new Exception(err.ToString());
}
}
public delegate void BufferDoneEventHandler(IntPtr data, int size);
internal class WaveInBuffer : IDisposable
{
public WaveInBuffer NextBuffer;
private AutoResetEvent m_RecordEvent = new AutoResetEvent(false);
private IntPtr m_WaveIn;
private WaveNative.WaveHdr m_Header;
private byte[] m_HeaderData;
private GCHandle m_HeaderHandle;
private GCHandle m_HeaderDataHandle;
private bool m_Recording;
internal static int recNum = 0;
internal static int bufNum = 0;
internal static void WaveInProc(IntPtr hdrvr, int uMsg, int dwUser, ref WaveNative.WaveHdr wavhdr, int dwParam2)
{
if (uMsg == WaveNative.MM_WIM_DATA)
{
try
{
GCHandle h = (GCHandle)wavhdr.dwUser;
WaveInBuffer buf = (WaveInBuffer)h.Target;
buf.OnCompleted();
}
catch
{
}
}
}
public WaveInBuffer(IntPtr waveInHandle, int size)
{
bufNum++;
m_WaveIn = waveInHandle;
m_HeaderHandle = GCHandle.Alloc(m_Header, GCHandleType.Pinned);
m_Header.dwUser = (IntPtr)GCHandle.Alloc(this);
m_HeaderData = new byte[size];
m_HeaderDataHandle = GCHandle.Alloc(m_HeaderData, GCHandleType.Pinned);
m_Header.lpData = m_HeaderDataHandle.AddrOfPinnedObject();
m_Header.dwBufferLength = size;
WaveInHelper.Try(WaveNative.waveInPrepareHeader(m_WaveIn, ref m_Header, Marshal.SizeOf(m_Header)));
}
~WaveInBuffer()
{
Dispose();
}
public void Dispose()
{
if (m_Header.lpData != IntPtr.Zero)
{
WaveNative.waveInUnprepareHeader(m_WaveIn, ref m_Header, Marshal.SizeOf(m_Header));
m_HeaderHandle.Free();
m_Header.lpData = IntPtr.Zero;
}
m_RecordEvent.Close();
if (m_HeaderDataHandle.IsAllocated)
m_HeaderDataHandle.Free();
GC.SuppressFinalize(this);
}
public int Size
{
get { return m_Header.dwBufferLength; }
}
public IntPtr Data
{
get { return m_Header.lpData; }
}
public bool Record()
{
lock (this)
{
recNum++;
m_RecordEvent.Reset();
m_Recording = WaveNative.waveInAddBuffer(m_WaveIn, ref m_Header, Marshal.SizeOf(m_Header)) == WaveNative.MMSYSERR_NOERROR;
return m_Recording;
}
}
public void WaitFor()
{
//here we have hang on exit
if (recNum < 2 && bufNum > 1)
return;
if (m_Recording)
m_Recording = m_RecordEvent.WaitOne();
else
Thread.Sleep(0);
}
private void OnCompleted()
{
recNum--;
m_RecordEvent.Set();
m_Recording = false;
}
}
public class WaveInRecorder : IDisposable
{
private IntPtr m_WaveIn;
private WaveInBuffer m_Buffers; // linked list
private WaveInBuffer m_CurrentBuffer;
private Thread m_Thread;
private BufferDoneEventHandler m_DoneProc;
private bool m_Finished;
private WaveNative.WaveDelegate m_BufferProc = new WaveNative.WaveDelegate(WaveInBuffer.WaveInProc);
public static int DeviceCount
{
get { return WaveNative.waveInGetNumDevs(); }
}
public WaveInRecorder(int device, WaveFormat format, int bufferSize, int bufferCount, BufferDoneEventHandler doneProc)
{
WaveInBuffer.bufNum = WaveInBuffer.recNum = 0;
m_DoneProc = doneProc;
WaveInHelper.Try(WaveNative.waveInOpen(out m_WaveIn, device, format, m_BufferProc, IntPtr.Zero, WaveNative.CALLBACK_FUNCTION));
AllocateBuffers(bufferSize, bufferCount);
for (int i = 0; i < bufferCount; i++)
{
SelectNextBuffer();
m_CurrentBuffer.Record();
}
WaveInHelper.Try(WaveNative.waveInStart(m_WaveIn));
m_Thread = new Thread(new ThreadStart(ThreadProc));
m_Thread.Start();
}
~WaveInRecorder()
{
Dispose();
}
public void Dispose()
{
if (m_Thread != null)
try
{
m_Finished = true;
if (m_WaveIn != IntPtr.Zero)
WaveNative.waveInReset(m_WaveIn);
WaitForAllBuffers();
m_Thread.Join();
m_DoneProc = null;
FreeBuffers();
if (m_WaveIn != IntPtr.Zero)
WaveNative.waveInClose(m_WaveIn);
}
finally
{
m_Thread = null;
m_WaveIn = IntPtr.Zero;
}
GC.SuppressFinalize(this);
}
private void ThreadProc()
{
while (!m_Finished)
{
Advance();
if (m_DoneProc != null && !m_Finished && m_CurrentBuffer != null)
m_DoneProc(m_CurrentBuffer.Data, m_CurrentBuffer.Size);
if (m_CurrentBuffer != null) m_CurrentBuffer.Record();
}
}
private void AllocateBuffers(int bufferSize, int bufferCount)
{
FreeBuffers();
if (bufferCount > 0)
{
m_Buffers = new WaveInBuffer(m_WaveIn, bufferSize);
WaveInBuffer Prev = m_Buffers;
try
{
for (int i = 1; i < bufferCount; i++)
{
WaveInBuffer Buf = new WaveInBuffer(m_WaveIn, bufferSize);
Prev.NextBuffer = Buf;
Prev = Buf;
}
}
finally
{
Prev.NextBuffer = m_Buffers;
}
}
}
private void FreeBuffers()
{
m_CurrentBuffer = null;
if (m_Buffers != null)
{
WaveInBuffer First = m_Buffers;
m_Buffers = null;
WaveInBuffer Current = First;
do
{
WaveInBuffer Next = Current.NextBuffer;
Current.Dispose();
Current = Next;
} while (Current != First);
}
}
private void Advance()
{
SelectNextBuffer();
m_CurrentBuffer.WaitFor();
}
private void SelectNextBuffer()
{
m_CurrentBuffer = m_CurrentBuffer == null ? m_Buffers : m_CurrentBuffer.NextBuffer;
}
private void WaitForAllBuffers()
{
WaveInBuffer Buf = m_Buffers;
while (Buf.NextBuffer != m_Buffers)
{
Buf.WaitFor();
Buf = Buf.NextBuffer;
}
}
}
}
|
|
|
|
|
// WaveOut.cs
// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY
// KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR
// PURPOSE.
//
// This material may not be duplicated in whole or in part, except for
// personal use, without the express written consent of the author.
//
// Email: ianier@hotmail.com
//
// Copyright (C) 1999-2003 Ianier Munoz. All Rights Reserved.
using System;
using System.Threading;
using System.Runtime.InteropServices;
namespace SoundCatcher
{
internal class WaveOutHelper
{
public static void Try(int err)
{
if (err != WaveNative.MMSYSERR_NOERROR)
throw new Exception(err.ToString());
}
}
public delegate void BufferFillEventHandler(IntPtr data, int size);
internal class WaveOutBuffer : IDisposable
{
public WaveOutBuffer NextBuffer;
private AutoResetEvent m_PlayEvent = new AutoResetEvent(false);
private IntPtr m_WaveOut;
private WaveNative.WaveHdr m_Header;
private byte[] m_HeaderData;
private GCHandle m_HeaderHandle;
private GCHandle m_HeaderDataHandle;
internal static int bufNum = 0;
internal static int recNum = 0;
private bool m_Playing;
internal static void WaveOutProc(IntPtr hdrvr, int uMsg, int dwUser, ref WaveNative.WaveHdr wavhdr, int dwParam2)
{
if (uMsg == WaveNative.MM_WOM_DONE)
{
try
{
GCHandle h = (GCHandle)wavhdr.dwUser;
WaveOutBuffer buf = (WaveOutBuffer)h.Target;
buf.OnCompleted();
}
catch
{
}
}
}
public WaveOutBuffer(IntPtr waveOutHandle, int size)
{
bufNum++;
m_WaveOut = waveOutHandle;
m_HeaderHandle = GCHandle.Alloc(m_Header, GCHandleType.Pinned);
m_Header.dwUser = (IntPtr)GCHandle.Alloc(this);
m_HeaderData = new byte[size];
m_HeaderDataHandle = GCHandle.Alloc(m_HeaderData, GCHandleType.Pinned);
m_Header.lpData = m_HeaderDataHandle.AddrOfPinnedObject();
m_Header.dwBufferLength = size;
WaveOutHelper.Try(WaveNative.waveOutPrepareHeader(m_WaveOut, ref m_Header, Marshal.SizeOf(m_Header)));
}
~WaveOutBuffer()
{
Dispose();
}
public void Dispose()
{
if (m_Header.lpData != IntPtr.Zero)
{
WaveNative.waveOutUnprepareHeader(m_WaveOut, ref m_Header, Marshal.SizeOf(m_Header));
m_HeaderHandle.Free();
m_Header.lpData = IntPtr.Zero;
}
m_PlayEvent.Close();
if (m_HeaderDataHandle.IsAllocated)
m_HeaderDataHandle.Free();
GC.SuppressFinalize(this);
}
public int Size
{
get { return m_Header.dwBufferLength; }
}
public IntPtr Data
{
get { return m_Header.lpData; }
}
public bool Play()
{
recNum++;
lock (this)
{
m_PlayEvent.Reset();
m_Playing = WaveNative.waveOutWrite(m_WaveOut, ref m_Header, Marshal.SizeOf(m_Header)) == WaveNative.MMSYSERR_NOERROR;
return m_Playing;
}
}
public void WaitFor()
{
if (recNum < 2 && bufNum > 1)
return;
if (m_Playing)
{
m_Playing = m_PlayEvent.WaitOne();
}
else
{
Thread.Sleep(0);
}
}
public void OnCompleted()
{
recNum--;
m_PlayEvent.Set();
m_Playing = false;
}
}
public class WaveOutPlayer : IDisposable
{
private IntPtr m_WaveOut;
private WaveOutBuffer m_Buffers; // linked list
private WaveOutBuffer m_CurrentBuffer;
private Thread m_Thread;
private BufferFillEventHandler m_FillProc;
private bool m_Finished;
private byte m_zero;
private WaveNative.WaveDelegate m_BufferProc = new WaveNative.WaveDelegate(WaveOutBuffer.WaveOutProc);
public static int DeviceCount
{
get { return WaveNative.waveOutGetNumDevs(); }
}
public WaveOutPlayer(int device, WaveFormat format, int bufferSize, int bufferCount, BufferFillEventHandler fillProc)
{
m_zero = format.wBitsPerSample == 8 ? (byte)128 : (byte)0;
m_FillProc = fillProc;
WaveOutHelper.Try(WaveNative.waveOutOpen(out m_WaveOut, device, format, m_BufferProc, IntPtr.Zero, WaveNative.CALLBACK_FUNCTION));
AllocateBuffers(bufferSize, bufferCount);
m_Thread = new Thread(new ThreadStart(ThreadProc));
m_Thread.Start();
}
~WaveOutPlayer()
{
Dispose();
}
public void Dispose()
{
if (m_Thread != null)
try
{
m_Finished = true;
if (m_WaveOut != IntPtr.Zero)
WaveNative.waveOutReset(m_WaveOut);
m_Thread.Join();
m_FillProc = null;
FreeBuffers();
if (m_WaveOut != IntPtr.Zero)
WaveNative.waveOutClose(m_WaveOut);
}
finally
{
m_Thread = null;
m_WaveOut = IntPtr.Zero;
}
GC.SuppressFinalize(this);
}
private void ThreadProc()
{
while (!m_Finished)
{
Advance();
if (m_FillProc != null && !m_Finished)
m_FillProc(m_CurrentBuffer.Data, m_CurrentBuffer.Size);
else
{
// zero out buffer
byte v = m_zero;
byte[] b = new byte[m_CurrentBuffer.Size];
for (int i = 0; i < b.Length; i++)
b[i] = v;
Marshal.Copy(b, 0, m_CurrentBuffer.Data, b.Length);
}
m_CurrentBuffer.Play();
}
WaitForAllBuffers();
}
private void AllocateBuffers(int bufferSize, int bufferCount)
{
FreeBuffers();
if (bufferCount > 0)
{
m_Buffers = new WaveOutBuffer(m_WaveOut, bufferSize);
WaveOutBuffer Prev = m_Buffers;
try
{
for (int i = 1; i < bufferCount; i++)
{
WaveOutBuffer Buf = new WaveOutBuffer(m_WaveOut, bufferSize);
Prev.NextBuffer = Buf;
Prev = Buf;
}
}
finally
{
Prev.NextBuffer = m_Buffers;
}
}
}
private void FreeBuffers()
{
m_CurrentBuffer = null;
if (m_Buffers != null)
{
WaveOutBuffer First = m_Buffers;
m_Buffers = null;
WaveOutBuffer Current = First;
do
{
WaveOutBuffer Next = Current.NextBuffer;
Current.Dispose();
Current = Next;
} while (Current != First);
}
}
private void Advance()
{
m_CurrentBuffer = m_CurrentBuffer == null ? m_Buffers : m_CurrentBuffer.NextBuffer;
m_CurrentBuffer.WaitFor();
}
private void WaitForAllBuffers()
{
WaveOutBuffer Buf = m_Buffers;
while (Buf.NextBuffer != m_Buffers)
{
Buf.WaitFor();
Buf = Buf.NextBuffer;
}
}
}
}
|
|
|
|
|
// AudioFrame.cs
/* Copyright (C) 2008 Jeff Morton (jeffrey.raymond.morton@gmail.com)
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
using System;
using System.Collections;
using System.Drawing;
using System.Drawing.Imaging;
using System.Windows.Forms;
namespace SoundCatcher
{
class AudioFrame
{
private double[] _waveLeft;
private double[] _fftLeft;
private ArrayList _fftLeftSpect = new ArrayList();
private int _maxHeightLeftSpect = 0;
private double[] _waveRight;
private double[] _fftRight;
private ArrayList _fftRightSpect = new ArrayList();
private int _maxHeightRightSpect = 0;
private SignalGenerator _signalGenerator;
private bool _isTest = false;
public bool IsDetectingEvents = false;
public bool IsEventActive = false;
public int AmplitudeThreshold = 16384;
//private int fileCount;
public AudioFrame()
{
}
public AudioFrame(bool isTest)
{
_isTest = isTest;
}
/// <summary>
/// Process 16 bit sample
/// </summary>
/// <param name="wave"></param>
public void Process(ref byte[] wave)
{
//add code to work with only one signal
IsEventActive = false;
if (Properties.Settings.Default.SettingChannels == 1)
{
_waveLeft = new double[wave.Length / 2];
// _waveRight = new double[wave.Length / 2]; //delete later
}
else
{
_waveLeft = new double[wave.Length / 4];
_waveRight = new double[wave.Length / 4];
}
if (_isTest == false)
{
// Split out channels from sample
int h = 0;
if (Properties.Settings.Default.SettingChannels == 1)
{
for (int i = 0; i < wave.Length; i += 2)
{
_waveLeft[h] = (double)BitConverter.ToInt16(wave, i);
if (IsDetectingEvents == true)
if (_waveLeft[h] > AmplitudeThreshold || _waveLeft[h] < -AmplitudeThreshold)
IsEventActive = true;
/*
_waveRight[h] = (double)BitConverter.ToInt16(wave, i); //delete later
if (IsDetectingEvents == true)
if (_waveLeft[h] > AmplitudeThreshold || _waveLeft[h] < -AmplitudeThreshold)
IsEventActive = true;
*/
h++;
}
//_waveRight = _waveLeft;
}
else
{
for (int i = 0; i < wave.Length; i += 4)
{
_waveLeft[h] = (double)BitConverter.ToInt16(wave, i);
if (IsDetectingEvents == true)
if (_waveLeft[h] > AmplitudeThreshold || _waveLeft[h] < -AmplitudeThreshold)
IsEventActive = true;
_waveRight[h] = (double)BitConverter.ToInt16(wave, i + 2);
if (IsDetectingEvents == true)
if (_waveLeft[h] > AmplitudeThreshold || _waveLeft[h] < -AmplitudeThreshold)
IsEventActive = true;
h++;
}
}
}
else
{
// Generate artificial sample for testing
_signalGenerator = new SignalGenerator();
_signalGenerator.SetWaveform("Sine");
_signalGenerator.SetSamplingRate(44100);
_signalGenerator.SetSamples(8192);
_signalGenerator.SetFrequency(4096);
_signalGenerator.SetAmplitude(32768);
_waveLeft = _signalGenerator.GenerateSignal();
if (Properties.Settings.Default.SettingChannels == 2) _waveRight = _signalGenerator.GenerateSignal();
}
// Generate frequency domain data in decibels
_fftLeft = FourierTransform.FFT(ref _waveLeft);
_fftLeftSpect.Add(_fftLeft);
if (_fftLeftSpect.Count > _maxHeightLeftSpect)
_fftLeftSpect.RemoveAt(0);
if (Properties.Settings.Default.SettingChannels == 2)
{
_fftRight = FourierTransform.FFT(ref _waveRight);
_fftRightSpect.Add(_fftRight);
if (_fftRightSpect.Count > _maxHeightRightSpect)
_fftRightSpect.RemoveAt(0);
}
}
/// <summary>
/// Render time domain to PictureBox
/// </summary>
/// <param name="pictureBox"></param>
public void RenderTimeDomainLeft(ref PictureBox pictureBox)
{
// Set up for drawing
Bitmap canvas = new Bitmap(pictureBox.Width, pictureBox.Height);
Graphics offScreenDC = Graphics.FromImage(canvas);
Pen pen = new System.Drawing.Pen(Color.WhiteSmoke);
// Determine channnel boundries
int width = canvas.Width;
int height = canvas.Height;
double center = height / 2;
// Draw left channel
double scale = 0.5 * height / 32768; // a 16 bit sample has values from -32768 to 32767
int xPrev = 0, yPrev = 0;
for (int x = 0; x < width; x++)
{
int y = (int)(center + (_waveLeft[_waveLeft.Length / width * x] * scale));
if (x == 0)
{
xPrev = 0;
yPrev = y;
}
else
{
pen.Color = Color.Green;
offScreenDC.DrawLine(pen, xPrev, yPrev, x, y);
xPrev = x;
yPrev = y;
}
}
// Clean up
pictureBox.Image = canvas;
offScreenDC.Dispose();
}
/// <summary>
/// Render time domain to PictureBox
/// </summary>
/// <param name="pictureBox"></param>
public void RenderTimeDomainRight(ref PictureBox pictureBox)
{
// Set up for drawing
Bitmap canvas = new Bitmap(pictureBox.Width, pictureBox.Height);
Graphics offScreenDC = Graphics.FromImage(canvas);
Pen pen = new System.Drawing.Pen(Color.WhiteSmoke);
// Determine channnel boundries
int width = canvas.Width;
int height = canvas.Height;
double center = height / 2;
// Draw left channel
double scale = 0.5 * height / 32768; // a 16 bit sample has values from -32768 to 32767
int xPrev = 0, yPrev = 0;
for (int x = 0; x < width; x++)
{
int y = (int)(center + (_waveRight[_waveRight.Length / width * x] * scale));
if (x == 0)
{
xPrev = 0;
yPrev = y;
}
else
{
pen.Color = Color.Green;
offScreenDC.DrawLine(pen, xPrev, yPrev, x, y);
xPrev = x;
yPrev = y;
}
}
// Clean up
pictureBox.Image = canvas;
offScreenDC.Dispose();
}
/// <summary>
/// Render frequency domain to PictureBox
/// </summary>
/// <param name="pictureBox"></param>
/// <param name="samples"></param>
public void RenderFrequencyDomainLeft(ref PictureBox pictureBox, int samples)
{
// Set up for drawing
Bitmap canvas = new Bitmap(pictureBox.Width, pictureBox.Height);
Graphics offScreenDC = Graphics.FromImage(canvas);
SolidBrush brush = new System.Drawing.SolidBrush(Color.FromArgb(128, 255, 255, 255));
Pen pen = new System.Drawing.Pen(Color.WhiteSmoke);
Font font = new Font("Arial", 10);
// Determine channnel boundries
int width = canvas.Width;
int height = canvas.Height;
double min = double.MaxValue;
double minHz = 0;
double max = double.MinValue;
double maxHz = 0;
double range = 0;
double scale = 0;
double scaleHz = (double)(samples / 2) / (double)_fftLeft.Length;
// get left min/max
for (int x = 0; x < _fftLeft.Length; x++)
{
double amplitude = _fftLeft[x];
if (min > amplitude)
{
min = amplitude;
minHz = (double)x * scaleHz;
}
if (max < amplitude)
{
max = amplitude;
maxHz = (double)x * scaleHz;
}
}
// get left range
if (min < 0 || max < 0)
if (min < 0 && max < 0)
range = max - min;
else
range = Math.Abs(min) + max;
else
range = max - min;
scale = range / height;
// draw left channel
for (int xAxis = 0; xAxis < width; xAxis++)
{
double amplitude = (double)_fftLeft[(int)(((double)(_fftLeft.Length) / (double)(width)) * xAxis)];
if (amplitude == double.NegativeInfinity || amplitude == double.PositiveInfinity || amplitude == double.MinValue || amplitude == double.MaxValue)
amplitude = 0;
int yAxis;
if (amplitude < 0)
yAxis = (int)(height - ((amplitude - min) / scale));
else
yAxis = (int)(0 + ((max - amplitude) / scale));
if (yAxis < 0)
yAxis = 0;
if (yAxis > height)
yAxis = height;
pen.Color = pen.Color = Color.FromArgb(0, GetColor(min, max, range, amplitude), 0);
offScreenDC.DrawLine(pen, xAxis, height, xAxis, yAxis);
}
offScreenDC.DrawString("Min: " + minHz.ToString(".#") + " Hz (±" + scaleHz.ToString(".#") + ") = " + min.ToString(".###") + " dB", font, brush, 0 + 1, 0 + 1);
offScreenDC.DrawString("Max: " + maxHz.ToString(".#") + " Hz (±" + scaleHz.ToString(".#") + ") = " + max.ToString(".###") + " dB", font, brush, 0 + 1, 0 + 18);
// Clean up
pictureBox.Image = canvas;
offScreenDC.Dispose();
}
/// <summary>
/// Render frequency domain to PictureBox
/// </summary>
/// <param name="pictureBox"></param>
/// <param name="samples"></param>
public void RenderFrequencyDomainRight(ref PictureBox pictureBox, int samples)
{
// Set up for drawing
Bitmap canvas = new Bitmap(pictureBox.Width, pictureBox.Height);
Graphics offScreenDC = Graphics.FromImage(canvas);
SolidBrush brush = new System.Drawing.SolidBrush(Color.FromArgb(128, 255, 255, 255));
Pen pen = new System.Drawing.Pen(Color.WhiteSmoke);
Font font = new Font("Arial", 10);
// Determine channnel boundries
int width = canvas.Width;
int height = canvas.Height;
double min = double.MaxValue;
double minHz = 0;
double max = double.MinValue;
double maxHz = 0;
double range = 0;
double scale = 0;
double scaleHz = (double)(samples / 2) / (double)_fftRight.Length;
// get left min/max
for (int x = 0; x < _fftRight.Length; x++)
{
double amplitude = _fftRight[x];
if (min > amplitude && amplitude != double.NegativeInfinity)
{
min = amplitude;
minHz = (double)x * scaleHz;
}
if (max < amplitude && amplitude != double.PositiveInfinity)
{
max = amplitude;
maxHz = (double)x * scaleHz;
}
}
// get right range
if (min < 0 || max < 0)
if (min < 0 && max < 0)
range = max - min;
else
range = Math.Abs(min) + max;
else
range = max - min;
scale = range / height;
// draw right channel
for (int xAxis = 0; xAxis < width; xAxis++)
{
double amplitude = (double)_fftRight[(int)(((double)(_fftRight.Length) / (double)(width)) * xAxis)];
if (amplitude == double.NegativeInfinity || amplitude == double.PositiveInfinity || amplitude == double.MinValue || amplitude == double.MaxValue)
amplitude = 0;
int yAxis;
if (amplitude < 0)
yAxis = (int)(height - ((amplitude - min) / scale));
else
yAxis = (int)(0 + ((max - amplitude) / scale));
if (yAxis < 0)
yAxis = 0;
if (yAxis > height)
yAxis = height;
pen.Color = pen.Color = Color.FromArgb(0, GetColor(min, max, range, amplitude), 0);
offScreenDC.DrawLine(pen, xAxis, height, xAxis, yAxis);
}
offScreenDC.DrawString("Min: " + minHz.ToString(".#") + " Hz (±" + scaleHz.ToString(".#") + ") = " + min.ToString(".###") + " dB", font, brush, 0 + 1, 0 + 1);
offScreenDC.DrawString("Max: " + maxHz.ToString(".#") + " Hz (±" + scaleHz.ToString(".#") + ") = " + max.ToString(".###") + " dB", font, brush, 0 + 1, 0 + 18);
// Clean up
pictureBox.Image = canvas;
offScreenDC.Dispose();
}
/// <summary>
/// Render waterfall spectrogram to PictureBox
/// </summary>
/// <param name="pictureBox"></param>
public void RenderSpectrogramLeft(ref PictureBox pictureBox)
{
Bitmap canvas = new Bitmap(pictureBox.Width, pictureBox.Height);
Graphics offScreenDC = Graphics.FromImage(canvas);
// Determine channnel boundries
int width = canvas.Width;
int height = canvas.Height;
double min = double.MaxValue;
double max = double.MinValue;
double range = 0;
if (height > _maxHeightLeftSpect)
_maxHeightLeftSpect = height;
// get min/max
for (int w = 0; w < _fftLeftSpect.Count; w++)
for (int x = 0; x < ((double[])_fftLeftSpect[w]).Length; x++)
{
double amplitude = ((double[])_fftLeftSpect[w])[x];
if (min > amplitude)
{
min = amplitude;
}
if (max < amplitude)
{
max = amplitude;
}
}
// get range
if (min < 0 || max < 0)
if (min < 0 && max < 0)
range = max - min;
else
range = Math.Abs(min) + max;
else
range = max - min;
// lock image
PixelFormat format = canvas.PixelFormat;
BitmapData data = canvas.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, format);
int stride = data.Stride;
int offset = stride - width * 4;
try
{
unsafe
{
byte* pixel = (byte*)data.Scan0.ToPointer();
// for each cloumn
for (int y = 0; y <= height; y++)
{
if (y < _fftLeftSpect.Count)
{
// for each row
for (int x = 0; x < width; x++, pixel += 4)
{
double amplitude = ((double[])_fftLeftSpect[_fftLeftSpect.Count - y - 1])[(int)(((double)(_fftLeft.Length) / (double)(width)) * x)];
double color = GetColor(min, max, range, amplitude);
pixel[0] = (byte)0;
pixel[1] = (byte)color;
pixel[2] = (byte)0;
pixel[3] = (byte)255;
}
pixel += offset;
}
}
}
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
}
// unlock image
canvas.UnlockBits(data);
// Clean up
pictureBox.Image = canvas;
offScreenDC.Dispose();
}
/// <summary>
/// Render waterfall spectrogram to PictureBox
/// </summary>
/// <param name="pictureBox"></param>
public void RenderSpectrogramRight(ref PictureBox pictureBox)
{
Bitmap canvas = new Bitmap(pictureBox.Width, pictureBox.Height);
Graphics offScreenDC = Graphics.FromImage(canvas);
// Determine channnel boundries
int width = canvas.Width;
int height = canvas.Height;
double min = double.MaxValue;
double max = double.MinValue;
double range = 0;
if (height > _maxHeightRightSpect)
_maxHeightRightSpect = height;
// get min/max
for (int w = 0; w < _fftRightSpect.Count; w++)
for (int x = 0; x < ((double[])_fftRightSpect[w]).Length; x++)
{
double amplitude = ((double[])_fftRightSpect[w])[x];
if (min > amplitude)
{
min = amplitude;
}
if (max < amplitude)
{
max = amplitude;
}
}
// get range
if (min < 0 || max < 0)
if (min < 0 && max < 0)
range = max - min;
else
range = Math.Abs(min) + max;
else
range = max - min;
// lock image
PixelFormat format = canvas.PixelFormat;
BitmapData data = canvas.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, format);
int stride = data.Stride;
int offset = stride - width * 4;
try
{
unsafe
{
byte* pixel = (byte*)data.Scan0.ToPointer();
// for each cloumn
for (int y = 0; y <= height; y++)
{
if (y < _fftRightSpect.Count)
{
// for each row
for (int x = 0; x < width; x++, pixel += 4)
{
double amplitude = ((double[])_fftRightSpect[_fftRightSpect.Count - y - 1])[(int)(((double)(_fftRight.Length) / (double)(width)) * x)];
double color = GetColor(min, max, range, amplitude);
pixel[0] = (byte)0;
pixel[1] = (byte)color;
pixel[2] = (byte)0;
pixel[3] = (byte)255;
}
pixel += offset;
}
}
}
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
}
// unlock image
canvas.UnlockBits(data);
// Clean up
pictureBox.Image = canvas;
offScreenDC.Dispose();
}
/// <summary>
/// Get color in the range of 0-255 for amplitude sample
/// </summary>
/// <param name="min"></param>
/// <param name="max"></param>
/// <param name="range"></param>
/// <param name="amplitude"></param>
/// <returns></returns>
private static int GetColor(double min, double max, double range, double amplitude)
{
double color;
if (min != double.NegativeInfinity && min != double.MaxValue & max != double.PositiveInfinity && max != double.MinValue && range != 0)
{
if (min < 0 || max < 0)
if (min < 0 && max < 0)
color = (255 / range) * (Math.Abs(min) - Math.Abs(amplitude));
else
if (amplitude < 0)
color = (255 / range) * (Math.Abs(min) - Math.Abs(amplitude));
else
color = (255 / range) * (amplitude + Math.Abs(min));
else
color = (255 / range) * (amplitude - min);
}
else
color = 0;
return (int)color;
}
}
}
|
|
|
|
|
Hi, very nice project!
One problem though: The program completely locks up when closing the MainForm.
Traced the problem down to following part in WaveInBuffer class:
public void WaitFor()
{
if (m_Recording)
m_Recording = m_RecordEvent.WaitOne();
else
Thread.Sleep(0);
}
It hangs on the WaitOne method, but I have no clue on how to fix it. Does anyone have a clue? Solutions posted in similar threads don't work for me. Working on a Windows 8.1 machine.
|
|
|
|
|
Hi all
I am s speech therapist working on an index called nasalance which is calculated so: Left channel amp/(left+right)amp. If it is visualized, it can be used as a biofeedback in people with cleft palate. To measure nasalance ratio, two mics collect sounds from mouth and nose in same time (one corresponds to left and another to right channel). Now I want to show graphically and realtime this ratio using a stereo preamp connecting to line in. Could anyone write the code for me? In fact I have not enough knowledge in programming!
Thanks
modified 25-Jun-14 13:59pm.
|
|
|
|
|
|
Excellent Article and very helpful.
Would it be possible to listen to the speaker output instead of an microphone source?
If you know of any articles that can point me in the right directin that would be greatly appreciated.
Again thank you for you did Jeff.
Have a great day
|
|
|
|
|
hy,
Thank you one more time for this awesome app! Realy helpfull.
I implemented your method of creating the visualisations into my app.
Now im doing this with two recordingdevices simultaniously and i would like to add some latency to the one inputdevice in order to bring the two streams into sync.
How would i go about doing that?
Thank you in advance.
|
|
|
|
|
|
Someone can help me for made an operation: I'd like to extract a signal modulated on a carrier signal of 1800 MHz.
Any suggestions?
Andrea Galbarini
|
|
|
|
|
При запуске программы. На этой строке кода "WaveNative.waveInOpen(out m_WaveIn, device, format, m_BufferProc, IntPtr.Zero, WaveNative.CALLBACK_FUNCTION)" вылетает и выдает ошибку "В экземпляре объекта не задана ссылка на объект."
Что делать??
modified 4-Apr-13 8:07am.
|
|
|
|
|
Thank you for your code very nice.
is there a way to select the input device (in case of multiple input devices) instead of the default one ?
thank you again for your code.
|
|
|
|
|
Its a brilliant code...... thank you
|
|
|
|
|
|