You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
463 lines
15 KiB
463 lines
15 KiB
/*==LICENSE==* |
|
|
|
CyanWorlds.com Engine - MMOG client, server and tools |
|
Copyright (C) 2011 Cyan Worlds, Inc. |
|
|
|
This program is free software: you can redistribute it and/or modify |
|
it under the terms of the GNU General Public License as published by |
|
the Free Software Foundation, either version 3 of the License, or |
|
(at your option) any later version. |
|
|
|
This program is distributed in the hope that it will be useful, |
|
but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
GNU General Public License for more details. |
|
|
|
You should have received a copy of the GNU General Public License |
|
along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
|
|
Additional permissions under GNU GPL version 3 section 7 |
|
|
|
If you modify this Program, or any covered work, by linking or |
|
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK, |
|
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent |
|
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK |
|
(or a modified version of those libraries), |
|
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA, |
|
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG |
|
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the |
|
licensors of this Program grant you additional |
|
permission to convey the resulting work. Corresponding Source for a |
|
non-source form of such a combination shall include the source code for |
|
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered |
|
work. |
|
|
|
You can contact Cyan Worlds, Inc. by email legal@cyan.com |
|
or by snail mail at: |
|
Cyan Worlds, Inc. |
|
14617 N Newport Hwy |
|
Mead, WA 99021 |
|
|
|
*==LICENSE==*/ |
|
|
|
#include "plMoviePlayer.h" |
|
|
|
#include "hsConfig.h" |
|
#ifdef PLASMA_USE_WEBM |
|
# define VPX_CODEC_DISABLE_COMPAT 1 |
|
# include <vpx/vpx_decoder.h> |
|
# include <vpx/vp8dx.h> |
|
# define iface (vpx_codec_vp9_dx()) |
|
# include <opus.h> |
|
|
|
# define WEBM_CODECID_VP9 "V_VP9" |
|
# define WEBM_CODECID_OPUS "A_OPUS" |
|
#endif |
|
|
|
#include "hsResMgr.h" |
|
#include "hsTimer.h" |
|
#include "../plAudio/plWin32VideoSound.h" |
|
#include "../plGImage/plMipmap.h" |
|
#include "../pnKeyedObject/plUoid.h" |
|
#include "../plPipeline/hsGDeviceRef.h" |
|
#include "../plPipeline/plPlates.h" |
|
#include "../plResMgr/plLocalization.h" |
|
#include "../plStatusLog/plStatusLog.h" |
|
#include "../plFile/plFileUtils.h" |
|
|
|
#include "plPlanarImage.h" |
|
#include "webm/mkvreader.hpp" |
|
#include "webm/mkvparser.hpp" |
|
|
|
#define SAFE_OP(x, err) \ |
|
{ \ |
|
Int64 ret = 0; \ |
|
ret = x; \ |
|
if (ret < 0) { \ |
|
hsAssert(false, "failed to " err); \ |
|
return false; \ |
|
} \ |
|
} |
|
|
|
// ===================================================== |
|
|
|
class VPX |
|
{ |
|
VPX() { } |
|
|
|
#ifdef PLASMA_USE_WEBM |
|
public: |
|
vpx_codec_ctx_t codec; |
|
|
|
~VPX() |
|
{ |
|
if (vpx_codec_destroy(&codec)) |
|
hsAssert(false, vpx_codec_error_detail(&codec)); |
|
} |
|
|
|
static VPX* Create() |
|
{ |
|
VPX* instance = new VPX; |
|
if (vpx_codec_dec_init(&instance->codec, iface, nullptr, 0)) { |
|
hsAssert(false, vpx_codec_error_detail(&instance->codec)); |
|
delete instance; |
|
return nullptr; |
|
} |
|
return instance; |
|
} |
|
|
|
vpx_image_t* Decode(UInt8* buf, UInt32 size) |
|
{ |
|
if (vpx_codec_decode(&codec, buf, size, nullptr, 0) != VPX_CODEC_OK) { |
|
const char* detail = vpx_codec_error_detail(&codec); |
|
hsAssert(false, detail ? detail : "unspecified decode error"); |
|
return nullptr; |
|
} |
|
|
|
vpx_codec_iter_t iter = nullptr; |
|
// ASSUMPTION: only one image per frame |
|
// if this proves false, move decoder function into IProcessVideoFrame |
|
return vpx_codec_get_frame(&codec, &iter); |
|
} |
|
#endif |
|
}; |
|
|
|
// ===================================================== |
|
|
|
class TrackMgr |
|
{ |
|
protected: |
|
const mkvparser::Track* fTrack; |
|
const mkvparser::BlockEntry* fCurrentBlock; |
|
Int32 fStatus; |
|
|
|
public: |
|
TrackMgr(const mkvparser::Track* track) : fTrack(track), fCurrentBlock(nullptr), fStatus(0) { } |
|
|
|
const mkvparser::Track* GetTrack() { return fTrack; } |
|
|
|
bool GetFrames(mkvparser::MkvReader* reader, Int64 movieTimeNs, std::vector<blkbuf_t>& frames) |
|
{ |
|
// If we have no block yet, grab the first one |
|
if (!fCurrentBlock) |
|
fStatus = fTrack->GetFirst(fCurrentBlock); |
|
|
|
// Continue through the blocks until our current movie time |
|
while (fCurrentBlock && fStatus == 0) { |
|
const mkvparser::Block* block = fCurrentBlock->GetBlock(); |
|
Int64 time = block->GetTime(fCurrentBlock->GetCluster()) - fTrack->GetCodecDelay(); |
|
if (time <= movieTimeNs) { |
|
// We want to play this block, add it to the frames buffer |
|
frames.reserve(frames.size() + block->GetFrameCount()); |
|
for (Int32 i = 0; i < block->GetFrameCount(); i++) { |
|
const mkvparser::Block::Frame data = block->GetFrame(i); |
|
UInt8* buf = new UInt8[data.len]; |
|
data.Read(reader, buf); |
|
frames.push_back(std::make_pair(std::auto_ptr<UInt8>(buf), static_cast<Int32>(data.len))); |
|
} |
|
fStatus = fTrack->GetNext(fCurrentBlock, fCurrentBlock); |
|
} else { |
|
// We've got all frames that have to play... come back for more later! |
|
return true; |
|
} |
|
} |
|
|
|
return false; // No more blocks... We're done! |
|
} |
|
}; |
|
|
|
// ===================================================== |
|
|
|
plMoviePlayer::plMoviePlayer() |
|
: fPlate(nullptr), |
|
fTexture(nullptr), |
|
fReader(nullptr), |
|
fMovieTime(0), |
|
fLastFrameTime(0), |
|
fPosition(hsPoint2()), |
|
fPlaying(false), |
|
fPaused(false), |
|
fMoviePath(nullptr) |
|
{ |
|
fScale.Set(1.0f, 1.0f); |
|
} |
|
|
|
plMoviePlayer::~plMoviePlayer() |
|
{ |
|
if (fPlate) |
|
// The plPlate owns the Mipmap Texture, so it destroys it for us |
|
plPlateManager::Instance().DestroyPlate(fPlate); |
|
#ifdef PLASMA_USE_WEBM |
|
if (fReader) { |
|
fReader->Close(); |
|
delete fReader; |
|
} |
|
#endif |
|
delete[] fMoviePath; |
|
} |
|
|
|
bool plMoviePlayer::IOpenMovie() |
|
{ |
|
#ifdef PLASMA_USE_WEBM |
|
if (!plFileUtils::FileExists(fMoviePath)) { |
|
plStatusLog::AddLineS("movie.log", "%s: Tried to play a movie that doesn't exist.", fMoviePath); |
|
return false; |
|
} |
|
|
|
// open the movie with libwebm |
|
fReader = new mkvparser::MkvReader; |
|
SAFE_OP(fReader->Open(fMoviePath), "open movie"); |
|
|
|
// opens the segment |
|
// it contains everything you ever want to know about the movie |
|
long long pos = 0; |
|
mkvparser::EBMLHeader ebmlHeader; |
|
SAFE_OP(ebmlHeader.Parse(fReader, pos), "read mkv header"); |
|
mkvparser::Segment* seg; |
|
SAFE_OP(mkvparser::Segment::CreateInstance(fReader, pos, seg), "get segment info"); |
|
SAFE_OP(seg->Load(), "load segment from webm"); |
|
fSegment.reset(seg); |
|
|
|
// Use first tracks unless another one matches the current game language |
|
const mkvparser::Tracks* tracks = fSegment->GetTracks(); |
|
for (UInt32 i = 0; i < tracks->GetTracksCount(); ++i) { |
|
const mkvparser::Track* track = tracks->GetTrackByIndex(i); |
|
if (!track) |
|
continue; |
|
|
|
switch (track->GetType()) { |
|
case mkvparser::Track::kAudio: |
|
if (!fAudioTrack.get() || ICheckLanguage(track)) |
|
fAudioTrack.reset(new TrackMgr(track)); |
|
break; |
|
case mkvparser::Track::kVideo: |
|
if (!fVideoTrack.get() || ICheckLanguage(track)) |
|
fVideoTrack.reset(new TrackMgr(track)); |
|
break; |
|
} |
|
} |
|
return true; |
|
#else |
|
return false; |
|
#endif |
|
} |
|
|
|
bool plMoviePlayer::ILoadAudio() |
|
{ |
|
#ifdef PLASMA_USE_WEBM |
|
// Fetch audio track information |
|
if (!fAudioTrack.get()) { |
|
hsStatusMessage("Movie ILoadAudio fAudioTrack NIL\n"); |
|
return false; |
|
} |
|
const mkvparser::AudioTrack* audio = static_cast<const mkvparser::AudioTrack*>(fAudioTrack->GetTrack()); |
|
plWAVHeader header; |
|
header.fFormatTag = plWAVHeader::kPCMFormatTag; |
|
header.fNumChannels = audio->GetChannels(); |
|
header.fBitsPerSample = audio->GetBitDepth() == 8 ? 8 : 16; |
|
header.fNumSamplesPerSec = 48000; // OPUS specs say we shall always decode at 48kHz |
|
header.fBlockAlign = header.fNumChannels * header.fBitsPerSample / 8; |
|
header.fAvgBytesPerSec = header.fNumSamplesPerSec * header.fBlockAlign; |
|
fAudioSound.reset(new plWin32VideoSound(header)); |
|
|
|
// Initialize Opus |
|
if (strncmp(audio->GetCodecId(), WEBM_CODECID_OPUS, arrsize(WEBM_CODECID_OPUS)) != 0) { |
|
plStatusLog::AddLineS("movie.log", "%s: Not an Opus audio track!", fMoviePath); |
|
return false; |
|
} |
|
int error; |
|
OpusDecoder* opus = opus_decoder_create(48000, audio->GetChannels(), &error); |
|
if (error != OPUS_OK) |
|
hsAssert(false, "Error occured initalizing opus"); |
|
|
|
// Decode audio track |
|
std::vector<blkbuf_t> frames; |
|
fAudioTrack->GetFrames(fReader, fSegment->GetDuration(), frames); |
|
static const int maxFrameSize = 5760; // for max packet duration at 48kHz |
|
std::vector<Int16> decoded; |
|
decoded.reserve(frames.size() * audio->GetChannels() * maxFrameSize); |
|
|
|
Int16* frameData = new Int16[maxFrameSize * audio->GetChannels()]; |
|
for (std::vector<blkbuf_t>::iterator& frame = frames.begin(); frame != frames.end(); frame++) { |
|
const std::auto_ptr<UInt8>& buf = frame->first; |
|
Int32 size = frame->second; |
|
|
|
int samples = opus_decode(opus, buf.get(), size, frameData, maxFrameSize, 0); |
|
if (samples < 0) |
|
hsAssert(false, "opus error"); |
|
for (size_t i = 0; i < samples * audio->GetChannels(); i++) |
|
decoded.push_back(frameData[i]); |
|
} |
|
delete[] frameData; |
|
|
|
fAudioSound->FillSoundBuffer(reinterpret_cast<UInt8*>(decoded.data()), decoded.size() * sizeof(Int16)); |
|
opus_decoder_destroy(opus); |
|
return true; |
|
#else |
|
return false; |
|
#endif |
|
} |
|
|
|
bool plMoviePlayer::ICheckLanguage(const mkvparser::Track* track) |
|
{ |
|
std::set<std::string> codes = plLocalization::GetLanguageCodes(plLocalization::GetLanguage()); |
|
if (codes.find(track->GetLanguage()) != codes.end()) |
|
return true; |
|
return false; |
|
} |
|
|
|
void plMoviePlayer::IProcessVideoFrame(const std::vector<blkbuf_t>& frames) |
|
{ |
|
#ifdef PLASMA_USE_WEBM |
|
vpx_image_t* img = nullptr; |
|
|
|
// We have to decode all the frames, but we only want to display the most recent one to the user. |
|
for (std::vector<blkbuf_t>::const_iterator frame = frames.begin(); frame != frames.end(); frame++) { |
|
const std::auto_ptr<UInt8>& buf = frame->first; |
|
UInt32 size = static_cast<UInt32>(frame->second); |
|
img = fVpx->Decode(buf.get(), size); |
|
} |
|
|
|
if (img) { |
|
// According to VideoLAN[1], I420 is the most common image format in videos. I am inclined to believe this as our |
|
// attemps to convert the common Uru videos use I420 image data. So, as a shortcut, we will only implement that format. |
|
// If for some reason we need other formats, please, be my guest! |
|
// [1] = http://wiki.videolan.org/YUV#YUV_4:2:0_.28I420.2FJ420.2FYV12.29 |
|
switch (img->fmt) { |
|
case VPX_IMG_FMT_I420: |
|
plPlanarImage::Yuv420ToRgba(img->d_w, img->d_h, reinterpret_cast<const Int32*>(img->stride), img->planes, reinterpret_cast<UInt8*>(fTexture->GetImage())); |
|
break; |
|
|
|
default: |
|
hsAssert(false, "image format"); |
|
} |
|
|
|
// Flush new data to the device |
|
if (fTexture->GetDeviceRef()) |
|
fTexture->GetDeviceRef()->SetDirty(true); |
|
fPlate->SetVisible(true); |
|
} |
|
#endif |
|
} |
|
|
|
bool plMoviePlayer::Start() |
|
{ |
|
if (fPlaying) |
|
return false; |
|
|
|
#ifdef PLASMA_USE_WEBM |
|
if (!IOpenMovie()) |
|
return false; |
|
hsAssert(fVideoTrack.get(), "nil video track -- expect bad things to happen!"); |
|
hsStatusMessageF("Opened movie %s\n", fMoviePath); |
|
|
|
// Initialize VPX |
|
const mkvparser::VideoTrack* video = static_cast<const mkvparser::VideoTrack*>(fVideoTrack->GetTrack()); |
|
if (strncmp(video->GetCodecId(), WEBM_CODECID_VP9, arrsize(WEBM_CODECID_VP9)) != 0) { |
|
plStatusLog::AddLineS("movie.log", "%s: Not a VP9 video track!", fMoviePath); |
|
return false; |
|
} |
|
hsStatusMessageF("... movie track selected, codec: %s\n", video->GetCodecId()); |
|
|
|
if (VPX* vpx = VPX::Create()) |
|
fVpx.reset(vpx); |
|
else |
|
return false; |
|
|
|
// Decode the audio track and load it into a sound buffer |
|
if (!ILoadAudio()) { |
|
hsStatusMessage("... movie audio track load failed\n"); |
|
return false; |
|
} |
|
hsStatusMessage("... movie audio track buffered\n"); |
|
|
|
|
|
fLastFrameTime = static_cast<Int64>(hsTimer::GetMilliSeconds()); |
|
fAudioSound->Play(); |
|
fPlaying = true; |
|
|
|
hsStatusMessage("... plMoviePlayer::Start() returns\n"); |
|
return true; |
|
#else |
|
return false; |
|
#endif // MOVIE_AVAILABLE |
|
} |
|
|
|
void plMoviePlayer::IInitPlate(UInt32 width, UInt32 height) |
|
{ |
|
// Need to figure out scaling based on pipe size. |
|
plPlateManager& plateMgr = plPlateManager::Instance(); |
|
float plateWidth = width * fScale.fX; |
|
float plateHeight = height * fScale.fY; |
|
if (plateWidth > plateMgr.GetPipeWidth() || plateHeight > plateMgr.GetPipeHeight()) { |
|
float scale = std::min(plateMgr.GetPipeWidth() / plateWidth, plateMgr.GetPipeHeight() / plateHeight); |
|
plateWidth *= scale; |
|
plateHeight *= scale; |
|
} |
|
plateMgr.CreatePlate(&fPlate, fPosition.fX, fPosition.fY, 0, 0); |
|
plateMgr.SetPlatePixelSize(fPlate, plateWidth, plateHeight); |
|
fTexture = fPlate->CreateMaterial(width, height, false); |
|
} |
|
|
|
bool plMoviePlayer::NextFrame() |
|
{ |
|
if (!fPlaying) |
|
return false; |
|
|
|
Int64 frameTime = static_cast<Int64>(hsTimer::GetMilliSeconds()); |
|
Int64 frameTimeDelta = frameTime - fLastFrameTime; |
|
fLastFrameTime = frameTime; |
|
|
|
if (fPaused) |
|
return true; |
|
|
|
#ifdef PLASMA_USE_WEBM |
|
// Get our current timecode |
|
fMovieTime += frameTimeDelta; |
|
|
|
std::vector<blkbuf_t> video; |
|
if (fVideoTrack.get() == nullptr || !fVideoTrack->GetFrames(fReader, fMovieTime * 1000000, video)) { |
|
Stop(); |
|
return false; |
|
} |
|
|
|
// If the pipeline's device was invalidated, the plate will be invalid. Recreate now. |
|
if (!fPlate) { |
|
const mkvparser::VideoTrack* vt = static_cast<const mkvparser::VideoTrack*>(fVideoTrack->GetTrack()); |
|
IInitPlate(static_cast<UInt32>(vt->GetWidth()), static_cast<UInt32>(vt->GetHeight())); |
|
hsAssert(fPlate, "failed to init plMoviePlayer plate -- bad things will happen!"); |
|
} |
|
|
|
// Show our mess |
|
IProcessVideoFrame(video); |
|
fAudioSound->RefreshVolume(); |
|
|
|
return true; |
|
#else |
|
return false; |
|
#endif // MOVIE_AVAILABLE |
|
} |
|
|
|
bool plMoviePlayer::Pause(bool on) |
|
{ |
|
if (!fPlaying) |
|
return false; |
|
|
|
fAudioSound->Pause(on); |
|
fPaused = on; |
|
return true; |
|
} |
|
|
|
bool plMoviePlayer::Stop() |
|
{ |
|
fPlaying = false; |
|
if (fAudioSound.get() != nullptr) |
|
fAudioSound->Stop(); |
|
if (fPlate) |
|
fPlate->SetVisible(false); |
|
|
|
for (std::vector<plMessage*>::iterator cb = fCallbacks.begin(); cb != fCallbacks.end(); cb++) |
|
(*cb)->Send(); |
|
fCallbacks.clear(); |
|
return true; |
|
}
|
|
|