110,534
社区成员
发帖
与我相关
我的任务
分享
using System;
using System . Threading;
using System . Windows . Forms;
using Microsoft . DirectX . DirectSound;
using System . Net . Sockets;
using System . Diagnostics;
/// <summary>
/// Class to interface with the microphone
/// </summary>
static partial class Microphone
{
#region Operating Parameters
/// <summary>
/// The sample rate we should use; The system expects either 16000 or 44100
/// </summary>
internal static int SampleRate = 44100;
static int _Volume = -300;
internal static int Volume
{
set
{
_Volume = value;
if (null != playbackBuffer)
try{playbackBuffer . Volume = value;}
catch{}
}
}
/// <summary>
/// The control we will have playback cooperate with
/// </summary>
static Control _ReferenceControl;
internal static Control ReferenceControl
{
set
{
playbackDevice.SetCooperativeLevel(_ReferenceControl = value, CooperativeLevel.Priority);
}
}
#endregion
#region Internal Variables
/// <summary>
/// The number of bytes in a buffer. This is typically 5ms of sound
/// </summary>
static int bufferSize;
const int _bufferPositions = 8;
/// <summary>
/// A separate wait-event for each slot in the buffer
/// </summary>
/// <remarks>
/// We need a separate one for each slot since this aren't countable. That is
/// if second slot is ready before we process the first, we'll fall one slot
/// behind, and not be able to catch up. This is latency, which is really
/// noticable to the ear
/// </remarks>
static AutoResetEvent[] notificationEvent = new AutoResetEvent[_bufferPositions];
static volatile bool Runnable=false;
static FullDuplex fullDuplex;
/// <summary>
/// The sound device
/// </summary>
/// <remarks>
/// Debugging in Visual Studio will throw up an MDA saying that a LoaderLock
/// violation occurred. Ignore it
/// </remarks>
static Capture captureDevice = new Capture();
/// <summary>
/// The buffer where the sample sounds are placed
/// </summary>
internal static CaptureBuffer captureBuffer;
/// <summary>
/// The play back device we be using for the side tones
/// </summary>
/// <remarks>
/// We create it early so that we can set its cooperation at the start of
/// execution. Otherwise we often get cross-thread exceptions
/// </remarks>
static Device playbackDevice = new Device();
/// <summary>
/// The buffer used to play sound back from
/// </summary>
internal static SecondaryBuffer playbackBuffer;
#endregion
/// <summary>
/// This is the main loop of sampling the microphone and passing it on
/// </summary>
static void MicrophoneLoop()
{
int Idx = 0;
// Start playing after we've queued the first sample
bool playing = false;
captureBuffer.Start(true);
try
{
while (Runnable)
for (int I = 0, offset=0; Runnable && I < _bufferPositions; I++, offset+= bufferSize)
{
// Wait for the sample areas to be ready
notificationEvent[I] . WaitOne(Timeout.Infinite, true);
// Get the sound samples
byte[] buffer = (byte[]) captureBuffer.Read(offset, typeof(byte), LockFlag.None, bufferSize);
// Write them to the playback buffer
playbackBuffer . Write(Idx, buffer, LockFlag.None);
Idx += buffer . Length;
if (Idx >= 3*bufferSize)
Idx -= 3*bufferSize;
if (!playing)
{
playbackBuffer . Volume = _Volume;
playbackBuffer . Play(0, BufferPlayFlags.Looping);
playing = true;
}
}
}
catch{}
// Release resources we are using
if (null != playbackBuffer)
{
playbackBuffer . Stop();
playbackBuffer . Dispose();
}
playbackBuffer= null;
if (null != captureBuffer)
captureBuffer.Dispose();
captureBuffer = null;
for (int I = 0; I < notificationEvent . Length; I++)
if (null != notificationEvent[I])
{
notificationEvent[I] . Close();
notificationEvent[I] = null;
}
if (null != fullDuplex)
fullDuplex . Dispose();
fullDuplex = null;
}
#region Buffer Description
static WaveFormat WFormat()
{
//Set up the wave format to be captured
WaveFormat waveFormat = new WaveFormat();
waveFormat.Channels = 1;
waveFormat.FormatTag = WaveFormatTag.Pcm;
waveFormat.SamplesPerSecond = SampleRate; // Because that is what Skype wants
waveFormat.BitsPerSample = 16;
waveFormat.BlockAlign = 2;
waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond;
return waveFormat;
}
#endregion
public static void StartMicrophone()
{
WaveFormat waveFormat = WFormat();
bufferSize = (int)(waveFormat.AverageBytesPerSecond*0.015);
if (bufferSize % 2 != 0)
bufferSize++;
CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription();
captureBufferDescription.BufferBytes = _bufferPositions * bufferSize;
captureBufferDescription.Format = waveFormat;
// Setup playback buffer if we're using side tones
BufferDescription playbackBufferDescription = null;
playbackBufferDescription = new BufferDescription(waveFormat);
playbackBufferDescription.BufferBytes = 3 * bufferSize;
playbackBufferDescription.ControlPositionNotify = true;//**
playbackBufferDescription.ControlPan = true;//**
playbackBufferDescription.ControlFrequency = true;//**
playbackBufferDescription.GlobalFocus = true;
playbackBufferDescription.StickyFocus = true;
playbackBufferDescription.ControlVolume = true;
playbackBufferDescription.DeferLocation = true;
playbackBufferDescription.CanGetCurrentPosition = true;
#if false
// for some reason this won't let me play sound (in the thread)
// I left it here in case someone can make it work
// Enable noise cancellation and filter if 16Khz
if (16000 == SampleRate && UseSideTones)
{
// DirectSound (according to docs) only supports this at 16Khz
CaptureEffectDescription[] Effects = new CaptureEffectDescription[2];
Effects[0] . LocateInSoftware = true;
Effects[0] . GuidEffectsClass = DSoundHelper.CaptureEffectsClassAcousticEchoCancellation;
Effects[0] . GuidEffectsInstance = DSoundHelper.CaptureEffectsSystemAcousticEchoCancellation;
Effects[1] . LocateInSoftware = true;
Effects[1] . GuidEffectsClass = DSoundHelper.CaptureEffectsClassNoiseSuppression;
Effects[1] . GuidEffectsInstance = DSoundHelper.CaptureEffectsSystemNoiseSuppression;
captureBufferDescription.CaptureEffectDescription = Effects;
captureBufferDescription.ControlEffects = true;
playbackBufferDescription.ControlEffects = true;
fullDuplex = new FullDuplex(captureBufferDescription, playbackBufferDescription,
_ReferenceControl, CooperativeLevel.Priority,
ref captureBuffer, ref playbackBuffer);
}
else
#endif
{
captureBuffer = new CaptureBuffer(captureBufferDescription, captureDevice);
playbackBuffer = new SecondaryBuffer(playbackBufferDescription, playbackDevice);
}
// Create the notifications of the sampels are ready
BufferPositionNotify[] positionNotify = new BufferPositionNotify[_bufferPositions + 1];
for (int i = 0; i < _bufferPositions; i++)
{
positionNotify[i].Offset = bufferSize * (i+1) - 1;
notificationEvent[i] = new AutoResetEvent(false);
positionNotify[i].EventNotifyHandle = notificationEvent[i].SafeWaitHandle.DangerousGetHandle();
}
Notify N = new Notify(captureBuffer);
N . SetNotificationPositions(positionNotify, _bufferPositions);
N . Dispose();
Runnable=true;
echoThread = new Thread(new ThreadStart(MicrophoneLoop));
echoThread . Priority = ThreadPriority . AboveNormal;
echoThread . Start();
}
static Thread echoThread;
/// <summary>
/// This is used to stop the Microphone capture and processing resources
/// and threads.
/// </summary>
internal static void Stop()
{
if (!Runnable || null == echoThread)
return ;
// Stop the background with a "handshake"
// * We set Runnable to false
// * The background thread sets it true when it stops
// * We wait for it to stop
// * Then we clear the thread reference
Runnable = false;
echoThread . Join();
echoThread = null;
}
}
string str = "姓名张某某性别男民族汉出生1990年3月18日住址北京市朝阳区世纪东方嘉园109楼公民身份号码110105199001182517";
Regex reg = new Regex("(?<key>姓名)(?<value>(?<=姓名).*?(?=性别))");
MatchCollection mc = reg.Matches(str);
foreach (Match m in mc)
{
key=m.Groups["key"].Value;
value=m.Groups["value"].Value;
}