vendredi 10 juin 2016

WASAPI: Play sine wave sound in minimum latency without glitches (exclusive-event driven mode)


I'm trying to play a simple sineous waveform using the Windows Audio Session API (WASAPI) in exclusive mode, but encountering sound glitches no matter what I do. I've been using the MSDN Exclusive-Mode Streams example as a reference point, and here's how the slightly adapted code currently looks like.

Setup code:

-- <variable declarations, incl. "HRESULT hr; BYTE *pData;" > --

// also, hr is checked for errors every step of the way

hr = CoCreateInstance(
    CLSID_MMDeviceEnumerator, NULL,
    CLSCTX_ALL, IID_IMMDeviceEnumerator,
    (void**)&pEnumerator);

hr = pEnumerator->GetDefaultAudioEndpoint(
    eRender, eConsole, &pDevice);

hr = pDevice->Activate(
    IID_IAudioClient, CLSCTX_ALL,
    NULL, (void**)&pAudioClient);


REFERENCE_TIME DefaultDevicePeriod = 0, MinimumDevicePeriod = 0;
hr = pAudioClient->GetDevicePeriod(&DefaultDevicePeriod, &MinimumDevicePeriod);

WAVEFORMATEX wave_format = {};
wave_format.wFormatTag = WAVE_FORMAT_PCM;
wave_format.nChannels = 2;
wave_format.nSamplesPerSec = 44100;
wave_format.nAvgBytesPerSec = 44100 * 2 * 16 / 8;
wave_format.nBlockAlign = 2 * 16 / 8;
wave_format.wBitsPerSample = 16;

hr = pAudioClient->IsFormatSupported(
    AUDCLNT_SHAREMODE_EXCLUSIVE,
    &wave_format,
    NULL // can't suggest a "closest match" in exclusive mode
    );

hr = pAudioClient->Initialize(
    AUDCLNT_SHAREMODE_EXCLUSIVE,
    AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
    MinimumDevicePeriod,
    MinimumDevicePeriod,
    &wave_format,
    NULL);


// Get the actual size of the allocated buffer.
hr = pAudioClient->GetBufferSize(&bufferFrameCount);

INT32 FrameSize_bytes = bufferFrameCount * wave_format.nChannels * wave_format.wBitsPerSample / 8;

hr = pAudioClient->GetService(
    IID_IAudioRenderClient,
    (void**)&pRenderClient);

hEvent = CreateEvent(nullptr, false, false, nullptr);
if (hEvent == INVALID_HANDLE_VALUE) { printf("CreateEvent failedn");  return -1; }

hr = pAudioClient->SetEventHandle(hEvent);

Buffer setup:

const size_t num_samples = FrameSize_bytes / sizeof(unsigned short);

unsigned short *samples = new unsigned short[num_samples];

float min = (float)(std::numeric_limits<unsigned short>::min)();
float max = (float)(std::numeric_limits<unsigned short>::max)();
float halfmax = max / 2.0;
float dt = 1.0 / (float)wave_format.nSamplesPerSec;

float freq = (float)wave_format.nSamplesPerSec / (float)bufferFrameCount;

for (int i = 0; i < num_samples/2; ++i) {
    float t = (float)i * dt;
    samples[2*i] = sin_minmax_Hz(min, max, freq, t);
    samples[2*i + 1] = sin_minmax_Hz(min, max, freq, t);
}

hr = pRenderClient->GetBuffer(bufferFrameCount, &pData);

memcpy(pData, samples, FrameSize_bytes);

hr = pRenderClient->ReleaseBuffer(bufferFrameCount, flags);

DWORD taskIndex = 0;
hTask = AvSetMmThreadCharacteristics(TEXT("Pro Audio"), &taskIndex);

if (hTask == NULL) {
    hr = E_FAIL;
    IF_ERROR_EXIT(hr);
}

The function sin_minmax_Hz is defined as follows:

#define TWO_PI (3.14159265359*2)

static inline float sin01(float alpha) {
    return 0.5*sin(alpha) + 0.5;
}

static inline float sin_minmax_Hz(float min, float max, float freq_Hz, float t) {
    return (max - min) / 2.0 * sin01(t * freq_Hz * TWO_PI);
}

Playback:

hr = pAudioClient->Start();  // Start playing.
IF_ERROR_EXIT(hr);

// just play indefinitely

while (true) {
    WaitForSingleObject(hEvent, INFINITE);

    hr = pRenderClient->GetBuffer(bufferFrameCount, &pData);
    memcpy(pData, samples, FrameSize_bytes);
    hr = pRenderClient->ReleaseBuffer(bufferFrameCount, 0);
}

The problem is that at the minimum latency, the sine wave usually plays smoothly for about 2 seconds, and then starts glitching out with massive aliasing, sounding almost like a sawtooth wave. Am I missing something here?

(The whole working example can be found here.)


Aucun commentaire:

Enregistrer un commentaire