AVIファイル内のオーディオストリームを、DirectSound にてストリーム再生

プロジェクトダウンロード(VC++2005)

※DirectSound の初期化・終了には、DSQuickLib を使用しています。

【プログラム主要部分1(AVIStreamToPCMStream_realtime.cpp)】


// AVIStreamToPCMStream_realtime.cpp : アプリケーションのエントリ ポイントを定義します。
//

#include "stdafx.h"
#include "AVIStreamToPCMStream_realtime.h"
#include <mmsystem.h>
#include <dsound.h>
#include "DSQuickLib.h"
#include <stdio.h>

#include <sys/types.h>
#include <sys/stat.h>    // stat 関数を使う為インクルード
#include <crtdbg.h>
#include <TCHAR.h>
#include <vfw.h>
#include <mmreg.h>
#include <msacm.h>

#include "PCMStreamForAVIStream.h"

#pragma comment (lib, "msacm32.lib")
#pragma comment (lib, "winmm.lib")
#pragma comment (lib, "vfw32.lib")


LRESULT CALLBACK WndProc(HWND hWnd,UINT message,WPARAM wParam,LPARAM lParam);
LRESULT CALLBACK IOProc(LPMMIOINFO lpMMIOInfo, UINT uMessage, LPARAM lParam1, LPARAM lParam2);

//    グローバル変数
CDSoundEnv    *g_pDSoundEnv = NULL;
static char * g_lpData = NULL;
static long    g_cfileSize;
CRITICAL_SECTION    g_csSema;
CPCMStreamForAVIStream    *g_pAviPCMStream = NULL;

typedef    struct _aviplaybackinfo{
    HANDLE     hthread;
    HWND       hWnd;
    BOOL       bExit;
    BOOL    bActive;
    PAVIFILE   lpAviFile;
    PAVISTREAM lpVideoStream;
    PAVISTREAM lpAudioStream;
}    AVIPLAYBACKINFO;

int APIENTRY WinMain(HINSTANCE hInstance,
                     HINSTANCE hPrevInstance,
                     LPSTR     lpCmdLine,
                     int       nCmdShow )
{
    _CrtSetDbgFlag ( _CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF );

    WNDCLASSEX wcex;
    wcex.cbSize = sizeof(WNDCLASSEX); 
    wcex.style            = CS_HREDRAW | CS_VREDRAW;
    wcex.lpfnWndProc    = (WNDPROC)WndProc;
    wcex.cbClsExtra        = 0;
    wcex.cbWndExtra        = 0;
    wcex.hInstance        = hInstance;
    wcex.hIcon            = LoadIcon(NULL, MAKEINTRESOURCE(IDI_APPLICATION));
    wcex.hCursor        = LoadCursor(NULL, MAKEINTRESOURCE(IDC_ARROW));
    wcex.hbrBackground    = (HBRUSH)(COLOR_WINDOW+1);
    wcex.lpszMenuName    = NULL;
    wcex.lpszClassName    = _T("avi realtime audio stream");
    wcex.hIconSm        = LoadIcon(NULL, IDI_APPLICATION);
    RegisterClassEx(&wcex);

    InitializeCriticalSection(&g_csSema);
    HWND hWnd;
    hWnd = CreateWindowEx(WS_EX_OVERLAPPEDWINDOW,wcex.lpszClassName,_T("avi realtime audio stream"),
                WS_VISIBLE|WS_CAPTION|WS_SYSMENU|WS_MINIMIZEBOX|WS_MAXIMIZEBOX,
                CW_USEDEFAULT,0,640,480,NULL,NULL,hInstance,NULL);
    if( !hWnd )
        return FALSE;

    RECT    bounds,client;
    GetWindowRect(hWnd,&bounds);
    GetClientRect(hWnd,&client);
    MoveWindow(hWnd,bounds.left,bounds.top,
        640 * 2 - client.right,
        480 * 2 - client.bottom,
        false );

    ShowWindow( hWnd, nCmdShow );
    UpdateWindow( hWnd );

    AVIPLAYBACKINFO    aviInfo;
    ZeroMemory((VOID*)&aviInfo,sizeof(AVIPLAYBACKINFO));

    ::SetWindowLong(hWnd,GWL_USERDATA,(DWORD)&aviInfo);
    aviInfo.hWnd = hWnd;


    g_pDSoundEnv = new CDSoundEnv(2,44100,16);
    if (g_pDSoundEnv){
        if (SUCCEEDED(g_pDSoundEnv->Initialize(hWnd,DSSCL_PRIORITY))){
            struct    _stat    status;
            if (0 == _tstat(_T("test.avi"),&status)){
                g_cfileSize = status.st_size;
                g_lpData = new char[g_cfileSize];
                FILE    *fp = fopen(_T("test.avi"),_T("rb"));
                if (fp != NULL){
                    fread(g_lpData,g_cfileSize,1,fp);
                    fclose(fp);
                }
                //    mci:カスタムIOProc のインストール
                mmioInstallIOProc(mmioFOURCC('M', 'E', 'V', ' '), (LPMMIOPROC)IOProc,
                                    MMIO_INSTALLPROC | MMIO_GLOBALPROC);

                AVIFileInit();
                
                if (AVIFileOpen(&aviInfo.lpAviFile, TEXT("test.MEV+"), OF_READ, NULL) != 0)
                    return -1;
                if (AVIFileGetStream(aviInfo.lpAviFile, &aviInfo.lpVideoStream, streamtypeVIDEO, 0) != 0)
                    aviInfo.lpVideoStream = NULL;
                if (AVIFileGetStream(aviInfo.lpAviFile, &aviInfo.lpAudioStream, streamtypeAUDIO, 0) != 0)
                    aviInfo.lpAudioStream = NULL;

                AVIFileRelease(aviInfo.lpAviFile);
            }
            if (aviInfo.lpAudioStream != NULL){
                g_pAviPCMStream = new CPCMStreamForAVIStream(g_pDSoundEnv,aviInfo.lpAudioStream);
                g_pAviPCMStream->Prepare();
            }
        }
    }

    MSG        msg;
    while(true){
        if(PeekMessage(&msg, 0, 0, 0, PM_REMOVE)){
            if(msg.message == WM_QUIT)
                break;
            TranslateMessage(&msg);
            DispatchMessage(&msg);
        }else{
            g_pDSoundEnv->PollStatus();
            if (!g_pAviPCMStream->IsPlaying()){
                Sleep(100);
                g_pAviPCMStream->Rewind();
                g_pAviPCMStream->Play();
            }
        }
    }

    if (g_pAviPCMStream)
        delete    g_pAviPCMStream;
    if (g_pDSoundEnv)
        delete    g_pDSoundEnv;

    aviInfo.bActive = false;
    if (aviInfo.hthread != NULL) {
        aviInfo.bExit = TRUE;
        WaitForSingleObject(aviInfo.hthread, 1000);
        CloseHandle(aviInfo.hthread);
    }
    
    if (aviInfo.lpVideoStream != NULL)
        AVIStreamRelease(aviInfo.lpVideoStream);
    if (aviInfo.lpAudioStream != NULL)
        AVIStreamRelease(aviInfo.lpAudioStream);
    
    AVIFileExit();

    EnterCriticalSection(&g_csSema);
    if (g_lpData != NULL){
        mmioInstallIOProc(mmioFOURCC('M', 'E', 'V', ' '), NULL,MMIO_REMOVEPROC);
        delete    g_lpData;
        g_lpData = NULL;
    }
    LeaveCriticalSection(&g_csSema);
    return (int)msg.wParam;
}

//
//    関数:WndProc
//    説明:ウインドウに渡されたイベントのハンドラ
//
LRESULT CALLBACK WndProc(HWND hWnd,UINT message,WPARAM wParam,LPARAM lParam)
{

    switch (message){
        case WM_PAINT:
            {
                HDC    hDC;
                PAINTSTRUCT    paintStruct;
                hDC = BeginPaint(hWnd,&paintStruct);
                RECT    rc;
                GetClientRect(hWnd,&rc);
                DrawText(hDC,_T("このアプリケーションはオーディオ再生のみ行います。"),-1,&rc,DT_NOCLIP);
                rc.top += 30;
                DrawText(hDC,_T("映像は再生されません。"),-1,&rc,DT_NOCLIP);
                ::EndPaint(hWnd,&paintStruct);
            }
            return DefWindowProc(hWnd, message, wParam, lParam);
        case WM_DESTROY:
            PostQuitMessage(0);
            break;
        default:
            return DefWindowProc(hWnd, message, wParam, lParam);
    }
    return    0;
}


//
//    mmio: IOProc
//
LRESULT CALLBACK IOProc(LPMMIOINFO lpMMIOInfo, UINT uMessage, LPARAM
lParam1, LPARAM lParam2)
{
  static BOOL alreadyOpened = FALSE;

  switch (uMessage) {
  case MMIOM_OPEN:
      if (alreadyOpened)
          return 0;
      alreadyOpened = TRUE;

      lpMMIOInfo->lDiskOffset = 0;
      return 0;

   case MMIOM_CLOSE:
       return 0;

   case MMIOM_READ:{
            LPARAM    dataRead = 0;
               EnterCriticalSection(&g_csSema);
            if (g_lpData != NULL){
                dataRead = lParam2;
                memcpy((void *)lParam1, g_lpData+lpMMIOInfo->lDiskOffset, lParam2);
                lpMMIOInfo->lDiskOffset += lParam2;
                dataRead = lParam2;
            }
            LeaveCriticalSection(&g_csSema);
            return (dataRead);
        }
   case MMIOM_SEEK:
       switch (lParam2) {
       case SEEK_SET:
           lpMMIOInfo->lDiskOffset = lParam1;
           break;

       case SEEK_CUR:
           lpMMIOInfo->lDiskOffset += lParam1;
           break;

       case SEEK_END:
           lpMMIOInfo->lDiskOffset = g_cfileSize - lParam1;
           break;
       }
       return lpMMIOInfo->lDiskOffset;

   default:
       return -1;
   }
}

 

 AVIFile ライブラリを用いて解凍したオーディオを、DirectSound を使ってストリーム再生  しています。
 オーディオの解凍は1秒ぶんずつ行っていて、メモリ消費を抑えています。

 ストリーム再生には、リスト2・3のCPCMStreamForAVIStream というクラスの機能を使っています。

 

【プログラム主要部分2・クラスCPCMStreamForAVIStreamの宣言(PCMStreamForAVIStream.h)】


#pragma once
#include <vfw.h>
#include <mmreg.h>
#include <msacm.h>
#include "DSQuickLib.h"

#define    AVIPCMSOUNDSTREAM_NUM_BLOCKS    2

class CPCMStreamForAVIStream :
    public CSoundObject
{
public:
    CPCMStreamForAVIStream(CDSoundEnv *pEnv, PAVISTREAM lpAudioStream);
    virtual ~CPCMStreamForAVIStream(void);
    virtual void DoThread();
    void    PollStatus(FLOAT timeElapsed);
    HRESULT    Prepare();

    virtual HRESULT Play( DWORD dwPriority = 0, DWORD dwFlags = 0, LONG lVol = 0, LONG lFreq = -1, LONG lPan = 0 , LONG lFade = 0);
    virtual HRESULT Stop();
    virtual HRESULT Rewind();
    virtual void    FadeOut(LONG lFadeLen);
    HRESULT    Pause();
    virtual BOOL    IsPlaying(){    return    m_dwPhase == STREAMSOUND_RUN;    };
protected:
    HRESULT        Create();
    HRESULT        FillBufferWithSound( LPDIRECTSOUNDBUFFER pDSB);
    HRESULT        SetNotify();
    LONG        ConvertAviSampleToPcmSample(LONG aviSample, LONG *pSurplus = NULL);
    LONG        ConvertPcmSampleToAviSample(LONG pcmSample, LONG *pSurplus = NULL);
    HRESULT        AviStreamReadAndDecode(void *buffer, LONG lSize);
    CDSoundEnv    *m_pDSoundEnv;
    DWORD       m_dwDSBufferSize;
    PAVISTREAM    m_lpAudioStream;
    AVISTREAMINFO    m_asInfo;
    LPWAVEFORMATEX    m_lpwfAvi;
    LONG        m_lAviStart;
    LONG        m_lAviEnd;
    DWORD       m_lAviSamplePerSec;
    DWORD       m_dwAviCurrentPos;    //    In PcmSample
    DWORD       m_lAviRead;
    WAVEFORMATEX    m_wfPcm;
    DWORD       m_dwSizeConverted;
    HACMSTREAM  m_hAcmStream;

    BYTE        *m_pPcmBuffer;            //    PCM Buffer (target to convert)
    BYTE        *m_pPcmBufferCurrent;    //    CurrentPos in m_pPcmBuffer
    LONG        m_dwPcmBufferSizeRemain;    //    Size of PCM Buffer
    DWORD       m_dwPcmBufferSize;

    ACMSTREAMHEADER    m_acmStreamHeader;    //
    BYTE        *m_lpAviData;
    LONG        m_dwAviDataSize;


    HANDLE      m_hNotificationEvent[AVIPCMSOUNDSTREAM_NUM_BLOCKS];
    HANDLE        m_hTerminator;

    DWORD       m_dwThreadID;
    HANDLE      m_hThread;
    
    LPDIRECTSOUNDBUFFER    m_lpDSBuffer;
    LONG        m_lVol;
    LONG        m_lFreq;
    LONG        m_lPan;
    LONG        m_lCurrentPosition;
    
    BOOL        m_bThreadDone;
    BOOL        m_bThreadActive;

    DWORD        m_dwNumNotification;
    DWORD        m_dwNotifySize;
    DWORD        m_dwBufferLengthInSec;

    DWORD        m_dwLastPlayPos;
    DWORD        m_dwPlayProgress;
    DWORD        m_dwNextWriteOffset;

    DWORD        m_dwPhase;
    DWORD        m_dwPhaseAfterReplay;
    FLOAT        m_fFadeTime;
    FLOAT        m_fFadeLen;
    CRITICAL_SECTION    m_CriticalSection;

    HRESULT      RestoreBuffer( LPDIRECTSOUNDBUFFER pDSB, BOOL* pbWasRestored );

    HRESULT      Reset();
    HRESULT      HandleNotificationEvent();
};

 

【プログラム主要部分3・クラスCPCMStreamForAVIStreamの実装(PCMStreamForAVIStream.cpp)】


#include "StdAfx.h"
#include <mmsystem.h>
#include <dxerr9.h>
#include <dsound.h>
#include <crtdbg.h>

#include "DSQuickLib.h"
#include "PCMStreamForAVIStream.h"

DWORD WINAPI AVIStreamPlayback_DoThread(LPVOID pVoid);
#define    SAFE_RELEASE(o)    {if (o){    (o)->Release(); (o) = NULL;    }}
#define    SAFE_DELETE(o)    {if (o){    delete (o); (o) = NULL;    }}
#define SAFE_DELETE_ARRAY(p) { if(p) { delete[] (p);   (p)=NULL; } }
#define SAFE_CLOSEHANDLE(h) { if(h) { CloseHandle(h);   (h)=NULL; } }

CPCMStreamForAVIStream::CPCMStreamForAVIStream(CDSoundEnv *pEnv, PAVISTREAM lpAudioStream)
{
    m_pDSoundEnv = pEnv;
    m_lpAudioStream = lpAudioStream;
    m_lpwfAvi = NULL;

    m_dwDSBufferSize = 0L;
    m_hThread = NULL;
    m_dwThreadID = 0;
    for (int i = 0; i < AVIPCMSOUNDSTREAM_NUM_BLOCKS ; ++i)
        m_hNotificationEvent[i] = NULL;
    m_hTerminator = NULL;
    m_lpDSBuffer = NULL;

    m_dwNumNotification = AVIPCMSOUNDSTREAM_NUM_BLOCKS;    //    バッファの分割数・作成途上
    m_dwBufferLengthInSec = AVIPCMSOUNDSTREAM_NUM_BLOCKS;        //    デフォルトのバッファ長1ブロック1秒

    InitializeCriticalSection(&m_CriticalSection);

    m_dwPhase = 0;
    m_bThreadActive = FALSE;
    m_bThreadDone = FALSE;

    m_lpAviData = NULL;
    m_dwAviDataSize = 0;

    m_pPcmBuffer = NULL;
    m_pPcmBufferCurrent = NULL;
    m_dwPcmBufferSizeRemain = 0;
    m_hAcmStream = NULL;

    pEnv->AddSoundObject(this);
}

CPCMStreamForAVIStream::~CPCMStreamForAVIStream(void)
{
    m_bThreadDone = TRUE;
    if (m_hTerminator != NULL){
        SetEvent(m_hTerminator);
        while(m_bThreadActive){
            Sleep(10);
        }
        SAFE_CLOSEHANDLE(m_hThread);
    }
    for (int i = 0; i < AVIPCMSOUNDSTREAM_NUM_BLOCKS ; ++i)
        SAFE_CLOSEHANDLE(m_hNotificationEvent[i]);
    if (m_hAcmStream != NULL){
        acmStreamUnprepareHeader(m_hAcmStream,&m_acmStreamHeader,0);
        acmStreamClose(m_hAcmStream,0);
    }

    DeleteCriticalSection(&m_CriticalSection);
    SAFE_CLOSEHANDLE(m_hTerminator);
    SAFE_RELEASE(m_lpDSBuffer);
    SAFE_DELETE(m_lpwfAvi);
    SAFE_DELETE_ARRAY(m_pPcmBuffer);
    SAFE_DELETE_ARRAY(m_lpAviData);
    m_pDSoundEnv->RemoveSoundObject(this);
}

//
//    再生準備
//
HRESULT    CPCMStreamForAVIStream::Prepare(){
    LONG           lSize;

    //    ストリームサイズの確認
    m_lAviStart = AVIStreamStart(m_lpAudioStream);
    m_lAviEnd   = m_lAviStart + AVIStreamLength(m_lpAudioStream);
    m_dwAviCurrentPos = 0;

    if (m_lAviEnd <= m_lAviStart){
        _RPT0(_CRT_WARN,"Stream にデータがありません。");
        goto e_Fail;
    }
    AVIStreamInfo(m_lpAudioStream,&m_asInfo, sizeof(AVISTREAMINFO));
    
    //    ストリームフォーマットの取得
    AVIStreamReadFormat(m_lpAudioStream, m_lAviStart, NULL, &lSize);
    SAFE_DELETE(m_lpwfAvi);
    m_lpwfAvi = (LPWAVEFORMATEX)new BYTE[lSize];
    AVIStreamReadFormat(m_lpAudioStream, m_lAviStart, m_lpwfAvi, &lSize);

    //    AVI サンプルサイズの算出(単位:PCM サンプル)
    if (0 != m_asInfo.dwRate % m_asInfo.dwScale){
        _RPT0(_CRT_WARN,_T("Warning:AVI sample not aligned to 1 sec.\n"));
    }
    m_lAviSamplePerSec = m_asInfo.dwRate / m_asInfo.dwScale;

    //    再生フォーマットの決定
    m_wfPcm.cbSize = sizeof(WAVEFORMATEX);
    m_wfPcm.wFormatTag = WAVE_FORMAT_PCM;
    m_wfPcm.nSamplesPerSec = m_lpwfAvi->nSamplesPerSec;
    if (m_lpwfAvi->wFormatTag == m_wfPcm.wFormatTag){
        m_wfPcm = *m_lpwfAvi;
    }else{
        if (0 != acmFormatSuggest(NULL, m_lpwfAvi, &m_wfPcm, sizeof(WAVEFORMATEX), ACM_FORMATSUGGESTF_WFORMATTAG|ACM_FORMATSUGGESTF_NSAMPLESPERSEC)){
            goto    e_Fail;
        }
    }


    DWORD    nAlign;
    nAlign = m_wfPcm.nBlockAlign;
    m_dwNotifySize = nAlign * (DWORD)m_wfPcm.nSamplesPerSec;
    m_dwNotifySize *= m_dwBufferLengthInSec;
    m_dwNotifySize /= m_dwNumNotification;
    m_dwNotifySize -= m_dwNotifySize % nAlign;

    for (int i = 0; i < AVIPCMSOUNDSTREAM_NUM_BLOCKS; ++i)
        m_hNotificationEvent[i] =  CreateEvent( NULL, FALSE, FALSE, NULL );
    DWORD    dwSizeConverted;
    LONG    lSurplus;
    dwSizeConverted = ConvertAviSampleToPcmSample(m_lAviEnd, &lSurplus);
    if (lSurplus != 0){
        dwSizeConverted++;
    }
    dwSizeConverted *= m_wfPcm.wBitsPerSample * m_wfPcm.nChannels / 8;
    m_dwSizeConverted = dwSizeConverted;


    if (FAILED(Create())){
        goto    e_Fail;
    }

    m_hTerminator =  CreateEvent( NULL, FALSE, FALSE, NULL );
    m_hThread = CreateThread(NULL,0,::AVIStreamPlayback_DoThread,(LPVOID)this,0,&m_dwThreadID);

    m_dwPhase = STREAMSOUND_STANDBY;
    return    S_OK;
e_Fail:
    SAFE_DELETE(m_lpwfAvi);
    return    E_FAIL;
}

//
//    AVI ストリーム上のサンプル番号を PCM サンプル番号へ変換する
//    割り切れない場合は、lSurplus に剰余を返す。・・あまり意味無い
//
LONG    CPCMStreamForAVIStream::ConvertAviSampleToPcmSample(LONG aviSample, LONG *pSurplus){
    __int64    i;
    i = (__int64)aviSample * m_lpwfAvi->nSamplesPerSec;
    i *= m_asInfo.dwScale;
    LONG    lSurplus;
    lSurplus = (LONG)(i % m_asInfo.dwRate);
    if (pSurplus != NULL)
        *pSurplus = lSurplus;
    i /= m_asInfo.dwRate;
    return    (LONG)i;


}


//
//    PCM サンプル番号を AVI ストリームのサンプル番号へ変換する
//    割り切れない時は、lSurplus に剰余を返す
LONG    CPCMStreamForAVIStream::ConvertPcmSampleToAviSample(LONG pcmSample, LONG *pSurplus){
    __int64    i;
    i = pcmSample;
    i *= m_asInfo.dwRate;
    i /= m_asInfo.dwScale;
    LONG    lSurplus;
    lSurplus = (LONG)(i % m_lpwfAvi->nSamplesPerSec);

    if (pSurplus != NULL)
        *pSurplus = lSurplus;
    i /= m_lpwfAvi->nSamplesPerSec;
    return    (LONG)i;
}


//
//    ストリームバッファの構築
//
HRESULT    CPCMStreamForAVIStream::Create(){
    LPDIRECTSOUND8    pDS;
    DSBUFFERDESC dsbd;
    HRESULT    hr;


    if( m_pDSoundEnv == NULL )
        return CO_E_NOTINITIALIZED;

    m_dwDSBufferSize = m_dwNumNotification * m_dwNotifySize;
    ZeroMemory( &dsbd, sizeof(DSBUFFERDESC) );
    dsbd.dwSize          = sizeof(DSBUFFERDESC);
    dsbd.dwFlags         = DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLFREQUENCY | DSBCAPS_CTRLPAN |
                           DSBCAPS_CTRLPOSITIONNOTIFY | 
                           DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_LOCSOFTWARE ;
    dsbd.dwBufferBytes   = m_dwDSBufferSize;
    dsbd.guid3DAlgorithm = GUID_NULL;
    dsbd.lpwfxFormat     = &m_wfPcm;
    pDS = m_pDSoundEnv->GetDirectSound();
    hr = pDS->CreateSoundBuffer( &dsbd, &m_lpDSBuffer, NULL );
    SAFE_RELEASE(pDS);
    if (FAILED(hr)){
        _RPT1(_CRT_WARN,_T("CreateSoundBuffer 失敗 0x%x"),hr);
        goto    e_Fail;
    }
    SetNotify();

    m_dwLastPlayPos     = 0;
    m_dwPlayProgress    = 0;
    m_dwNextWriteOffset = 0;
    //m_bFillNextNotificationWithSilence = FALSE;
    FillBufferWithSound(m_lpDSBuffer);

    return    S_OK;
e_Fail:
    return    hr;
}

//-----------------------------------------------------------------------------
// Name: CPCMStreamForAVIStream::AviStreamReadAndDecode()
// Desc: サウンドファイルをバッファに転送する。 
//-----------------------------------------------------------------------------
HRESULT    CPCMStreamForAVIStream::AviStreamReadAndDecode(void *buffer,LONG lSize){
    HRESULT hr = E_FAIL;
    LONG    lPos = m_lAviRead;
    INT        bits = (m_wfPcm.wBitsPerSample *m_wfPcm.nChannels) / 8;
    LONG    lPcmSampleNum = lSize / bits;
    LONG    lAviSampleNum = ConvertPcmSampleToAviSample(lPcmSampleNum);
    BYTE    silence = (BYTE)(m_wfPcm.wBitsPerSample == 8 ? 128 : 0 );
    BYTE    *pwrite = (BYTE*)buffer;
    m_dwAviCurrentPos += lPcmSampleNum;

    BOOL    bFillWithSilence = FALSE;
    LONG    lWritten = 0;
    if (lPos < m_lAviStart){
        INT    iSize = m_lAviStart - lPos;
        INT pcmSize;
        if (iSize > lAviSampleNum)
            iSize = lAviSampleNum;

        pcmSize = (iSize * m_wfPcm.nSamplesPerSec) / m_lAviSamplePerSec;
        LONG    lSizeWrite = pcmSize * bits;
        //    AVIStream のサイズが時間と必ずしも比例しないので
        //    誤差も大きいと思われる。
        //    ・・が、おそらくここの処理が使われる事は無い・・・甘いか・・・。
        if (lSizeWrite > lSize){
            lPos += (m_lAviSamplePerSec * lSize) / bits;
            lSizeWrite = lSize;
        }else{
            lPos += iSize;
        }

        FillMemory( (BYTE*) pwrite, (DWORD)lSizeWrite, silence );
        pwrite += lSizeWrite;
        lWritten += lSizeWrite;
    }

    while (lSize > lWritten){
        //    バッファに残りがあれば書き込む
        if (m_dwPcmBufferSizeRemain > 0){
            LONG    lSizeWrite = lSize - lWritten;
            if (lSizeWrite > m_dwPcmBufferSizeRemain){
                lSizeWrite = m_dwPcmBufferSizeRemain;
            }
            ::CopyMemory((BYTE*)pwrite,m_pPcmBufferCurrent,lSizeWrite);
            pwrite += lSizeWrite;
            lWritten += lSizeWrite;
            m_pPcmBufferCurrent += lSizeWrite;
            m_dwPcmBufferSizeRemain -= lSizeWrite;
            if (m_dwPcmBufferSizeRemain <= 0){
                m_pPcmBufferCurrent = NULL;
                m_dwPcmBufferSizeRemain = 0;
            }else{
                continue;
            }
        }
        if (bFillWithSilence){
            //    再生が終了していたら残りを0 で埋める
            LONG lSizeWrite = lSize - lWritten;
            ZeroMemory(pwrite,lSizeWrite);
            lWritten += lSizeWrite;
            break;
        }
        LONG    lBufferSize;
        LONG    lEnd = lPos + m_lAviSamplePerSec;
        if (lEnd > m_lAviEnd){
            //    最終ブロックの再生
            lEnd = m_lAviEnd;
            
            bFillWithSilence = TRUE;
        }
        LONG    numRead = lEnd - lPos;
        if (numRead <= 0){
            //    読み込むデータが無い時は読まない
            continue;
        }
        lBufferSize = m_dwAviDataSize;

        LONG    lNumBytesResult;
        LONG    lNumSamplesResult;
        hr = AVIStreamRead(m_lpAudioStream, lPos, numRead, m_lpAviData, lBufferSize, 
            &lNumBytesResult, &lNumSamplesResult);

        LONG    lPosSave = lPos;
        lPos += numRead;
        if (hr == AVIERR_BUFFERTOOSMALL){
            _RPT0(_CRT_WARN,_T("AVIERR_BUFFERTOOSMALL\n"));
        }
        if (hr == AVIERR_MEMORY){
            _RPT0(_CRT_WARN,_T("AVIERR_MEMORY\n"));
        }
        if (hr == AVIERR_FILEREAD){
            _RPT0(_CRT_WARN,_T("AVIERR_FILEREAD\n"));
        }

        m_pPcmBufferCurrent = m_pPcmBuffer;
        m_dwPcmBufferSizeRemain = 0;
        DWORD dwLengthSaved = m_acmStreamHeader.cbSrcLength;
        m_acmStreamHeader.cbSrcLength = lNumBytesResult;
        DWORD    dwFlag = 0;
        if (lPosSave <= m_lAviStart){
            dwFlag |= ACM_STREAMCONVERTF_START;
        }
        dwFlag |= ACM_STREAMCONVERTF_END;

        acmStreamConvert(m_hAcmStream, &m_acmStreamHeader, dwFlag);
        m_dwPcmBufferSizeRemain = m_acmStreamHeader.cbDstLengthUsed;

        m_acmStreamHeader.cbSrcLength = dwLengthSaved;
    }
    m_lAviRead = lPos;

    return    S_OK;
}


//-----------------------------------------------------------------------------
// Name: CPCMStreamForAVIStream::FillBufferWithSound()
// Desc: サウンドファイルをバッファに転送する。 
//-----------------------------------------------------------------------------
HRESULT CPCMStreamForAVIStream::FillBufferWithSound( LPDIRECTSOUNDBUFFER pDSB )
{
    HRESULT hr; 
    VOID*   pDSLockedBuffer      = NULL; // ロックされたバッファメモリへのポインタ
    DWORD   dwDSLockedBufferSize = 0;    // ロックされたバッファメモリのサイズ

    if( pDSB == NULL )
        return CO_E_NOTINITIALIZED;

    if( FAILED( hr = RestoreBuffer( pDSB, NULL ) ) ) {
        _RPT0(_CRT_WARN,  _T("RestoreBuffer"));
        return    hr;
    }
    if (m_dwPlayProgress != 0){
        m_dwAviCurrentPos = m_dwPlayProgress;
    }
    m_lAviRead = ConvertPcmSampleToAviSample(m_dwAviCurrentPos);
    


    //    PCM 変換の準備

    if (m_hAcmStream != NULL){
        acmStreamUnprepareHeader(m_hAcmStream,&m_acmStreamHeader,0);
        acmStreamClose(m_hAcmStream,0);
        m_hAcmStream = NULL;
    }
    MMRESULT mmr = acmStreamOpen(&m_hAcmStream, NULL, m_lpwfAvi, &m_wfPcm, NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME);
    if (mmr != 0) {
        if (mmr == ACMERR_NOTPOSSIBLE)
            _RPT0(_CRT_WARN,_T("要求された操作は実行できません。"));
        m_hAcmStream = NULL;
        return    E_FAIL;
    }
    SAFE_DELETE_ARRAY(m_lpAviData);
    m_dwAviDataSize = m_asInfo.dwRate * m_asInfo.dwSampleSize;
    m_dwAviDataSize += m_asInfo.dwScale - 1;
    m_dwAviDataSize /= m_asInfo.dwScale;
    m_lpAviData = new BYTE[m_dwAviDataSize];

    SAFE_DELETE_ARRAY(m_pPcmBuffer);
    acmStreamSize(m_hAcmStream,m_dwAviDataSize,&m_dwPcmBufferSize,ACM_STREAMSIZEF_SOURCE);
    m_pPcmBuffer = new BYTE[m_dwPcmBufferSize];
    m_pPcmBufferCurrent = m_pPcmBuffer;
    m_dwPcmBufferSizeRemain = 0;

    ZeroMemory((BYTE*)&m_acmStreamHeader,sizeof(ACMSTREAMHEADER));
    m_acmStreamHeader.cbStruct = sizeof(ACMSTREAMHEADER);
    m_acmStreamHeader.pbSrc = m_lpAviData;
    m_acmStreamHeader.cbSrcLength = m_dwAviDataSize;
    m_acmStreamHeader.pbDst = m_pPcmBuffer;
    m_acmStreamHeader.cbDstLength = m_dwPcmBufferSize;

    mmr = acmStreamPrepareHeader(m_hAcmStream,&m_acmStreamHeader,0);
    if (mmr != 0){
        _RPT0(_CRT_WARN,_T("acmStreamHeader の準備に失敗\n"));
        if (mmr == MMSYSERR_INVALHANDLE){
        }
    }

    //    サウンドバッファのロック
    if( FAILED( hr = pDSB->Lock( 0, m_dwDSBufferSize, 
                                 &pDSLockedBuffer, &dwDSLockedBufferSize, 
                                 NULL, NULL, 0L ) ) ){
        _RPT1(_CRT_WARN, _T("Lock"), hr );
        return    hr;
    }

    AviStreamReadAndDecode((BYTE*) pDSLockedBuffer,dwDSLockedBufferSize);

    pDSB->Unlock( pDSLockedBuffer, dwDSLockedBufferSize, NULL, 0 );
    return S_OK;
}

//-----------------------------------------------------------------------------
// Name: CPCMStreamForAVIStream::RestoreBuffer()
// Desc: 失われたバッファをリストアする
//-----------------------------------------------------------------------------
HRESULT CPCMStreamForAVIStream::RestoreBuffer( LPDIRECTSOUNDBUFFER pDSB, BOOL* pbWasRestored )
{
    HRESULT hr;

    if( pDSB == NULL )
        return CO_E_NOTINITIALIZED;
    if( pbWasRestored )
        *pbWasRestored = FALSE;

    DWORD dwStatus;
    if( FAILED( hr = pDSB->GetStatus( &dwStatus ) ) ){
        _RPT1(_CRT_WARN,_T("GetStatus %x\n"), hr );
        return    hr;
    }

    if( dwStatus & DSBSTATUS_BUFFERLOST ){
        while(true){
            hr = pDSB->Restore();
            if( hr != DSERR_BUFFERLOST )
                break;
            Sleep( 10 );
        }

        if( pbWasRestored != NULL )
            *pbWasRestored = TRUE;

        m_dwPlayProgress = 0L;

        return S_OK;
    }else{
        return S_FALSE;
    }
}

//
//    CPCMStreamForAVIStream::SetNotify()
//    Notification イベントの実装
//
HRESULT    CPCMStreamForAVIStream::SetNotify()
{
    HRESULT    hr;
    DSBPOSITIONNOTIFY*  pPosNotify     = NULL; 
    LPDIRECTSOUNDNOTIFY pDSNotify      = NULL;
    DWORD    dwC;
    if( FAILED( hr = m_lpDSBuffer->QueryInterface( IID_IDirectSoundNotify, 
                                                (VOID**)&pDSNotify ) ) )
    {
        _RPT1(_CRT_WARN,_T("QueryInterface 失敗 0x%x"),hr);
        goto    e_Fail;
    }

    pPosNotify = new DSBPOSITIONNOTIFY[ m_dwNumNotification];
    for( dwC = 0; dwC < m_dwNumNotification; dwC++ )
    {
        pPosNotify[dwC].dwOffset     = (m_dwNotifySize * dwC) + m_dwNotifySize - 1;
        pPosNotify[dwC].hEventNotify = m_hNotificationEvent[dwC];             
    }

    if( FAILED( hr = pDSNotify->SetNotificationPositions( m_dwNumNotification,
                                                        pPosNotify ) ) )
    {
        _RPT0(_CRT_WARN,  _T("SetNotificationPositions") );
        goto    e_Fail;
    }
    SAFE_RELEASE( pDSNotify );
    SAFE_DELETE_ARRAY( pPosNotify );
    return    S_OK;

e_Fail:
    SAFE_RELEASE( pDSNotify );
    SAFE_DELETE_ARRAY( pPosNotify );
    return    hr;
}

//----------------------------------------------------
//    CPCMStreamForAVIStream::DoThread()
//    Streaming のスレッドを実行!
//----------------------------------------------------
void    CPCMStreamForAVIStream::DoThread(){
    DWORD   dwEvent;
    HRESULT    hr;
    int    i;
    m_bThreadActive = TRUE;
    HANDLE    hEvents[AVIPCMSOUNDSTREAM_NUM_BLOCKS + 1];
    for (i = 0; i < AVIPCMSOUNDSTREAM_NUM_BLOCKS ; ++i)
        hEvents[i] = m_hNotificationEvent[i];
    hEvents[i] = m_hTerminator;
    while(!m_bThreadDone){
        dwEvent = WaitForMultipleObjects( AVIPCMSOUNDSTREAM_NUM_BLOCKS + 1, hEvents, FALSE, INFINITE);
        if (dwEvent >= WAIT_OBJECT_0 && dwEvent < WAIT_OBJECT_0 + AVIPCMSOUNDSTREAM_NUM_BLOCKS){
            //_RPT1(_CRT_WARN, _T("Notify:%d\n"),dwEvent - WAIT_OBJECT_0);
            EnterCriticalSection(&m_CriticalSection);
            hr = HandleNotificationEvent();
            LeaveCriticalSection(&m_CriticalSection);
            if (FAILED(hr)){
                m_lpDSBuffer->Stop();
                m_dwPhase = STREAMSOUND_STOP;
            }
        }
        if (dwEvent == WAIT_OBJECT_0 + AVIPCMSOUNDSTREAM_NUM_BLOCKS){
        }
    }
    m_bThreadActive = FALSE;
}

//-----------------------------------------------------------------------------
// Name: CPCMStreamForAVIStream::HandleNotificationEvent()
// Desc: Notification のイベントハンドラ
//-----------------------------------------------------------------------------
HRESULT CPCMStreamForAVIStream::HandleNotificationEvent(  )
{
    HRESULT hr;
    DWORD   dwCurrentPlayPos;
    DWORD   dwPlayDelta;
    VOID*   pDSLockedBuffer = NULL;
    VOID*   pDSLockedBuffer2 = NULL;
    DWORD   dwDSLockedBufferSize;
    DWORD   dwDSLockedBufferSize2;

    if( m_lpDSBuffer == NULL || m_lpwfAvi == NULL )
        return CO_E_NOTINITIALIZED;

    // バッファのリストア
    BOOL bRestored;
    if( FAILED( hr = RestoreBuffer( m_lpDSBuffer, &bRestored ) ) ){
        _RPT0(_CRT_WARN,_T("RestoreBuffer"));
        return    hr;
    }

    if( bRestored )
    {
        if( FAILED( hr = FillBufferWithSound( m_lpDSBuffer ) ) ){
            _RPT0(_CRT_WARN,_T("FillBufferWithSound"));
            return    hr;
        }
        return S_OK;
    }

    // DirectSoundBuffer をロック
    if( FAILED( hr = m_lpDSBuffer->Lock( m_dwNextWriteOffset, m_dwNotifySize, 
                                         &pDSLockedBuffer, &dwDSLockedBufferSize, 
                                         &pDSLockedBuffer2, &dwDSLockedBufferSize2, 0L ) ) ){
        _RPT0(_CRT_WARN,_T("Lock"));
        return    hr;
    }

    //    m_dwDSBufferSize および m_dwNextWriteOffset はともに、
    //    m_dwNotifySize の倍数であるため、 pDSLockedBuffer2 は有効になる事は無い。
    if( pDSLockedBuffer2 != NULL ){
        return E_UNEXPECTED; 
    }

    if( FAILED( hr = AviStreamReadAndDecode((void*)pDSLockedBuffer,dwDSLockedBufferSize))){
        m_lpDSBuffer->Unlock( pDSLockedBuffer, dwDSLockedBufferSize, NULL, 0 );
        _RPT0(_CRT_WARN,_T("Read"));
        return    hr;        
    }

    m_lpDSBuffer->Unlock( pDSLockedBuffer, dwDSLockedBufferSize, NULL, 0 );

    if( FAILED( hr = m_lpDSBuffer->GetCurrentPosition( &dwCurrentPlayPos, NULL ) ) ){
        _RPT0(_CRT_WARN,_T("GetCurrentPosition"));
        return    hr;        
    }
    //    前回の再生地点からの進行量をチェックする。
    if( dwCurrentPlayPos < m_dwLastPlayPos )
        dwPlayDelta = ( m_dwDSBufferSize - m_dwLastPlayPos ) + dwCurrentPlayPos;
    else
        dwPlayDelta = dwCurrentPlayPos - m_dwLastPlayPos;

    m_dwPlayProgress += dwPlayDelta;
    //_RPT2(_CRT_WARN,_T("Progress:%x total %x\n"),dwPlayDelta,m_dwPlayProgress);
    m_dwLastPlayPos = dwCurrentPlayPos;

    //    再生地点をチェックし、完了時点でバッファをストップする。

    if( m_dwPlayProgress >= m_dwSizeConverted/* + m_dwNotifySize*/)
    {
        m_dwPhase = STREAMSOUND_STOP;
        m_lpDSBuffer->Stop();
    }

    //    次回ロックするアドレスの更新
    m_dwNextWriteOffset += dwDSLockedBufferSize; 
    m_dwNextWriteOffset %= m_dwDSBufferSize; // Circular buffer

    return S_OK;
}

//----------------------------------------------------
//    CPCMStreamForAVIStream::Play()
//    Streaming の再生開始!
//----------------------------------------------------
HRESULT CPCMStreamForAVIStream::Play( DWORD dwPriority, DWORD dwFlags, LONG lVol, LONG lFreq, LONG lPan , LONG lFade){
    HRESULT hr;
    BOOL    bRestored;
    DWORD    dwCurrent;
    LPDIRECTSOUNDBUFFER pDSB = this->m_lpDSBuffer;
    FLOAT    fFadeTime;
    FLOAT    fVol1, fVol2;

    if( pDSB == NULL ){
        _RPT0(_CRT_WARN,_T("No Sound Buffer"));
        return    E_FAIL;
    }
    pDSB->Stop();
    EnterCriticalSection(&m_CriticalSection);
    switch(m_dwPhase){
        case    STREAMSOUND_STANDBY:
        case    STREAMSOUND_STOP:
        case    STREAMSOUND_RUN:
        case    STREAMSOUND_FADEIN:
        case    STREAMSOUND_FADEOUT:
            m_lVol = lVol;
            m_lPan = lPan;
            m_lFreq = lFreq;
            m_dwPhaseAfterReplay = STREAMSOUND_RUN;
            m_fFadeTime = 0.0f;
            m_fFadeLen = (FLOAT)lFade;
            if (lFade != 0){
                m_dwPhaseAfterReplay = STREAMSOUND_FADEIN;
            }
            m_dwPlayProgress = 0;
            if (m_dwPhase != STREAMSOUND_STANDBY){
                Reset();
                FillBufferWithSound(pDSB);
            }
            break;
        case    STREAMSOUND_PAUSE:
            dwCurrent = ConvertPcmSampleToAviSample(m_dwPlayProgress);
            if (dwCurrent >= (DWORD)m_lAviEnd){
                goto    exitPlay;
            }
            //    Avi サンプル境界にアラインメント
            m_dwPlayProgress = ConvertAviSampleToPcmSample(dwCurrent);
            Reset();
            FillBufferWithSound(pDSB);
            break;
        default:
            _RPT0(_CRT_WARN, _T("サウンドバッファが再生可能になっていません。") );
            hr = E_FAIL;

            goto    exitPlay;
            break;
    }
    pDSB->SetCurrentPosition(0);    

    // Restore the buffer if it was lost
    if( FAILED( hr = RestoreBuffer( pDSB, &bRestored ) ) ){
        _RPT0(_CRT_WARN,_T("RestoreBuffer"));
        goto    exitPlay;
    }

    if( bRestored )
    {
        // The buffer was restored, so we need to fill it with new data
        if( FAILED( hr = FillBufferWithSound( pDSB ) ) ){
            _RPT0(_CRT_WARN,_T("FillBufferWithSound"));
            goto    exitPlay;
        }
    }

    fFadeTime = m_fFadeTime;
    if (fFadeTime > m_fFadeLen)
        fFadeTime = m_fFadeLen;
    switch(m_dwPhaseAfterReplay){
        case    STREAMSOUND_FADEIN:
            fVol1 = DSBVOLUME_MIN * (m_fFadeLen - fFadeTime) / m_fFadeLen;
            fVol2 = (fFadeTime / m_fFadeLen) * m_lVol;
            lVol = (LONG)(fVol1 + fVol2);
            pDSB->SetVolume( lVol );
            break;
        case    STREAMSOUND_FADEOUT:
            fVol1 = DSBVOLUME_MIN * fFadeTime / m_fFadeLen;
            fVol2 = ((m_fFadeLen - fFadeTime) / m_fFadeLen) * m_lVol;
            lVol = (LONG)(fVol1 + fVol2);
            pDSB->SetVolume( lVol );
            break;
        default:
            pDSB->SetVolume( m_lVol );
            break;
    }
    pDSB->SetPan( m_lPan );
    if( m_lFreq != -1 ){
        pDSB->SetFrequency( m_lFreq );
    }
    
    if (SUCCEEDED(hr = pDSB->Play( 0, dwPriority, dwFlags | DSBPLAY_LOOPING ))){
        m_dwPhase = m_dwPhaseAfterReplay;
    }
exitPlay:
    LeaveCriticalSection(&m_CriticalSection);
    return    hr;
}

//----------------------------------------------------
//    CPCMStreamForAVIStream::Stop()
//    Streaming の再生終了!
//----------------------------------------------------
HRESULT CPCMStreamForAVIStream::Stop(){
    HRESULT    hr = S_OK;
    switch(m_dwPhase){
        case    STREAMSOUND_RUN:
        case    STREAMSOUND_FADEIN:
        case    STREAMSOUND_FADEOUT:
            EnterCriticalSection(&m_CriticalSection);
            hr = m_lpDSBuffer->Stop();
            if (FAILED(hr)){
                _RPT0(_CRT_WARN,_T("Stop"));
            }
            m_dwPhase = STREAMSOUND_STOP;
            hr = Reset();
            LeaveCriticalSection(&m_CriticalSection);
            break;
        case    STREAMSOUND_PAUSE:
            EnterCriticalSection(&m_CriticalSection);
            m_dwPhase = STREAMSOUND_STOP;
            hr = Reset();
            LeaveCriticalSection(&m_CriticalSection);
            break;
    }
    return    hr;
}

HRESULT    CPCMStreamForAVIStream::Rewind(){
    HRESULT    hr;
    if (m_dwPhase == STREAMSOUND_STANDBY)
        return    S_OK;
    hr = Stop();
    if (FAILED(hr))
        return    hr;
    Reset();
    hr = FillBufferWithSound(m_lpDSBuffer);
    return    hr;
}

//-----------------------------------------------------------------------------
// Name: CPCMStreamForAVIStream::Reset()
// Desc: バッファの再生ポジションを先頭に戻す
//-----------------------------------------------------------------------------
HRESULT CPCMStreamForAVIStream::Reset()
{
    HRESULT hr;

    if( m_lpDSBuffer == NULL || m_lpAudioStream == NULL )
        return CO_E_NOTINITIALIZED;

    m_dwLastPlayPos     = 0;
    m_dwPlayProgress    = 0;
    m_dwNextWriteOffset = 0;
    //m_bFillNextNotificationWithSilence = FALSE;

    BOOL bRestored;
    if( FAILED( hr = RestoreBuffer( m_lpDSBuffer, &bRestored ) ) ){
        _RPT0( _CRT_WARN, _T("RestoreBuffer"));
        goto    exitReset;
    }

    if( bRestored )
    {
        if( FAILED( hr = FillBufferWithSound( m_lpDSBuffer ) ) ){
            _RPT0( _CRT_WARN, _T("FillBufferWithSound"));
            goto    exitReset;
        }
    }

    //    m_pWaveFile->ResetFile();
    //    一旦AVI の再生ポジションを巻き戻しておく!
    m_dwAviCurrentPos = 0;

    hr = m_lpDSBuffer->SetCurrentPosition( 0L );  

    m_pPcmBufferCurrent = m_pPcmBuffer;
    m_dwPcmBufferSizeRemain = 0;
exitReset:
    return    hr;
}

//-----------------------------------------------------------------------------
// Name: CStreamSound::FadeOut()
// Desc: FadeOut 開始
//-----------------------------------------------------------------------------
void    CPCMStreamForAVIStream::FadeOut(LONG lFadeLen){
    EnterCriticalSection(&m_CriticalSection);
    FLOAT    t;
    switch(m_dwPhase){
        case    STREAMSOUND_RUN:
            m_fFadeTime = 0;
            m_fFadeLen = (FLOAT)lFadeLen;
            m_dwPhase = STREAMSOUND_FADEOUT;
            break;
        case    STREAMSOUND_FADEIN:
            t = m_fFadeTime / m_fFadeLen;
            if (t > 1.0f)
                t = 1.0f;
            m_fFadeLen = (FLOAT)lFadeLen;
            m_fFadeTime = m_fFadeLen - (t * m_fFadeLen);
            m_dwPhase = STREAMSOUND_FADEOUT;
            break;
        case    STREAMSOUND_PAUSE:
            switch(m_dwPhaseAfterReplay){
                case    STREAMSOUND_RUN:
                    m_fFadeTime = 0;
                    m_fFadeLen = (FLOAT)lFadeLen;
                    m_dwPhaseAfterReplay = STREAMSOUND_FADEOUT;
                    break;
                case    STREAMSOUND_FADEIN:
                    t = m_fFadeTime / m_fFadeLen;
                    if (t > 1.0f)
                        t = 1.0f;
                    m_fFadeLen = (FLOAT)lFadeLen;
                    m_fFadeTime = m_fFadeLen - (t * m_fFadeLen);
                    m_dwPhaseAfterReplay = STREAMSOUND_FADEOUT;
                    break;
                case    STREAMSOUND_FADEOUT:
                    break;
            }
            break;
    }
    LeaveCriticalSection(&m_CriticalSection);
}

//-----------------------------------------------------------------------------
// Name: CPCMStreamForAVIStream::PollStatus()
// Desc: FADE-OUT, FADE-IN の処理
//-----------------------------------------------------------------------------
void    CPCMStreamForAVIStream::PollStatus(FLOAT timeElapsed){
    FLOAT    fFadeTime;
    FLOAT    fVol1, fVol2;
    LONG    lVol;
    switch(m_dwPhase){
        case    STREAMSOUND_FADEIN:
            EnterCriticalSection(&m_CriticalSection);
            if (m_dwPhase == STREAMSOUND_FADEIN){
                fFadeTime = m_fFadeTime + timeElapsed;
                if (fFadeTime > m_fFadeLen){
                    m_dwPhase = STREAMSOUND_RUN;
                    fFadeTime = m_fFadeLen;
                }
                m_fFadeTime = fFadeTime;
                fVol1 = DSBVOLUME_MIN * (m_fFadeLen - fFadeTime) / m_fFadeLen;
                fVol2 = (fFadeTime / m_fFadeLen) * m_lVol;
                lVol = (LONG)(fVol1 + fVol2);
                m_lpDSBuffer->SetVolume( lVol );
            }
            LeaveCriticalSection(&m_CriticalSection);
            break;
        case    STREAMSOUND_FADEOUT:
            lVol = m_lVol;
            fFadeTime = m_fFadeTime + timeElapsed;
            if (fFadeTime >= m_fFadeLen){
                Stop();
                fFadeTime = m_fFadeLen;
                break;
            }
            m_fFadeTime = fFadeTime;
            EnterCriticalSection(&m_CriticalSection);
            fVol1 = DSBVOLUME_MIN * fFadeTime / m_fFadeLen;
            fVol2 = ((m_fFadeLen - fFadeTime) / m_fFadeLen) * m_lVol;
            lVol = (LONG)(fVol1 + fVol2);
            m_lpDSBuffer->SetVolume( lVol );
            LeaveCriticalSection(&m_CriticalSection);
            if (lVol == DSBVOLUME_MIN){
                Stop();
            }
            break;
    }
}

HRESULT    CPCMStreamForAVIStream::Pause(){
    if( m_lpDSBuffer == NULL || m_lpAudioStream == NULL )
        return CO_E_NOTINITIALIZED;

    EnterCriticalSection(&m_CriticalSection);
    if (m_dwPhase == STREAMSOUND_RUN || m_dwPhase == STREAMSOUND_FADEIN || m_dwPhase == STREAMSOUND_FADEOUT){
        m_lpDSBuffer->Stop();
        m_dwPhaseAfterReplay = m_dwPhase;
        m_dwPhase = STREAMSOUND_PAUSE;
    }
    LeaveCriticalSection(&m_CriticalSection);

    return    S_OK;
}

DWORD WINAPI AVIStreamPlayback_DoThread(LPVOID pVoid){
    ((CPCMStreamForAVIStream*)pVoid)->DoThread();
    return    0L;
}

 

 STREAMSOUND_RUN など PCMStreamForAVIStream.cpp の中で用いている定数のいくつかは、DSQuickLib 内で宣言されているものを用いています。

 ちょっと判りにくいですね・・・・。

 次は、映像をDirect3D を使って表示させたいと思います。

戻る