mirror of
https://github.com/TES3MP/openmw-tes3mp.git
synced 2025-01-19 21:23:52 +00:00
Merge branch 'master' into mouse-picking-state
This commit is contained in:
commit
815e832dbb
34 changed files with 560 additions and 217 deletions
|
@ -120,6 +120,11 @@ void CSMSettings::UserSettings::buildSettingModelDefaults()
|
||||||
reuse->setToolTip ("When a new subview is requested and a matching subview already "
|
reuse->setToolTip ("When a new subview is requested and a matching subview already "
|
||||||
" exist, do not open a new subview and use the existing one instead.");
|
" exist, do not open a new subview and use the existing one instead.");
|
||||||
|
|
||||||
|
Setting *statusBar = createSetting (Type_CheckBox, "show-statusbar", "Show Status Bar");
|
||||||
|
statusBar->setDefaultValue ("true");
|
||||||
|
statusBar->setToolTip ("If a newly open top level window is showing status bars or not. "
|
||||||
|
" Note that this does not affect existing windows.");
|
||||||
|
|
||||||
Setting *maxSubView = createSetting (Type_SpinBox, "max-subviews",
|
Setting *maxSubView = createSetting (Type_SpinBox, "max-subviews",
|
||||||
"Maximum number of subviews per top-level window");
|
"Maximum number of subviews per top-level window");
|
||||||
maxSubView->setDefaultValue (256);
|
maxSubView->setDefaultValue (256);
|
||||||
|
|
|
@ -105,7 +105,7 @@ void CSVDoc::View::setupViewMenu()
|
||||||
mShowStatusBar->setCheckable (true);
|
mShowStatusBar->setCheckable (true);
|
||||||
connect (mShowStatusBar, SIGNAL (toggled (bool)), this, SLOT (toggleShowStatusBar (bool)));
|
connect (mShowStatusBar, SIGNAL (toggled (bool)), this, SLOT (toggleShowStatusBar (bool)));
|
||||||
std::string showStatusBar =
|
std::string showStatusBar =
|
||||||
CSMSettings::UserSettings::instance().settingValue("Display/show statusbar").toStdString();
|
CSMSettings::UserSettings::instance().settingValue("window/show-statusbar").toStdString();
|
||||||
if(showStatusBar == "true")
|
if(showStatusBar == "true")
|
||||||
mShowStatusBar->setChecked(true);
|
mShowStatusBar->setChecked(true);
|
||||||
view->addAction (mShowStatusBar);
|
view->addAction (mShowStatusBar);
|
||||||
|
|
|
@ -144,7 +144,7 @@ CSVDoc::View *CSVDoc::ViewManager::addView (CSMDoc::Document *document)
|
||||||
mViews.push_back (view);
|
mViews.push_back (view);
|
||||||
|
|
||||||
std::string showStatusBar =
|
std::string showStatusBar =
|
||||||
CSMSettings::UserSettings::instance().settingValue("Display/show statusbar").toStdString();
|
CSMSettings::UserSettings::instance().settingValue("window/show-statusbar").toStdString();
|
||||||
|
|
||||||
view->toggleStatusBar (showStatusBar == "true");
|
view->toggleStatusBar (showStatusBar == "true");
|
||||||
view->show();
|
view->show();
|
||||||
|
|
|
@ -199,6 +199,10 @@ namespace MWBase
|
||||||
virtual MWWorld::Ptr searchPtrViaActorId (int actorId) = 0;
|
virtual MWWorld::Ptr searchPtrViaActorId (int actorId) = 0;
|
||||||
///< Search is limited to the active cells.
|
///< Search is limited to the active cells.
|
||||||
|
|
||||||
|
virtual MWWorld::Ptr findContainer (const MWWorld::Ptr& ptr) = 0;
|
||||||
|
///< Return a pointer to a liveCellRef which contains \a ptr.
|
||||||
|
/// \note Search is limited to the active cells.
|
||||||
|
|
||||||
/// \todo enable reference in the OGRE scene
|
/// \todo enable reference in the OGRE scene
|
||||||
virtual void enable (const MWWorld::Ptr& ptr) = 0;
|
virtual void enable (const MWWorld::Ptr& ptr) = 0;
|
||||||
|
|
||||||
|
|
|
@ -191,8 +191,11 @@ namespace MWClass
|
||||||
|
|
||||||
std::string text;
|
std::string text;
|
||||||
|
|
||||||
text += "\n#{sWeight}: " + MWGui::ToolTips::toString(ref->mBase->mData.mWeight);
|
if (ref->mBase->mData.mWeight != 0)
|
||||||
text += MWGui::ToolTips::getValueString(ref->mBase->mData.mValue, "#{sValue}");
|
{
|
||||||
|
text += "\n#{sWeight}: " + MWGui::ToolTips::toString(ref->mBase->mData.mWeight);
|
||||||
|
text += MWGui::ToolTips::getValueString(ref->mBase->mData.mValue, "#{sValue}");
|
||||||
|
}
|
||||||
|
|
||||||
if (MWBase::Environment::get().getWindowManager()->getFullHelp()) {
|
if (MWBase::Environment::get().getWindowManager()->getFullHelp()) {
|
||||||
text += MWGui::ToolTips::getCellRefString(ptr.getCellRef());
|
text += MWGui::ToolTips::getCellRefString(ptr.getCellRef());
|
||||||
|
|
|
@ -132,6 +132,7 @@ namespace MWGui
|
||||||
virtual int pageSplit();
|
virtual int pageSplit();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
virtual ~GraphicElement() {}
|
||||||
MyGUI::Widget * mParent;
|
MyGUI::Widget * mParent;
|
||||||
Paginator & mPaginator;
|
Paginator & mPaginator;
|
||||||
BlockStyle mBlockStyle;
|
BlockStyle mBlockStyle;
|
||||||
|
|
|
@ -30,8 +30,7 @@ int VideoWidget::getVideoHeight()
|
||||||
|
|
||||||
bool VideoWidget::update()
|
bool VideoWidget::update()
|
||||||
{
|
{
|
||||||
mPlayer.update();
|
return mPlayer.update();
|
||||||
return mPlayer.isPlaying();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoWidget::stop()
|
void VideoWidget::stop()
|
||||||
|
|
|
@ -377,6 +377,7 @@ namespace MWGui
|
||||||
delete mHitFader;
|
delete mHitFader;
|
||||||
delete mWerewolfFader;
|
delete mWerewolfFader;
|
||||||
delete mScreenFader;
|
delete mScreenFader;
|
||||||
|
delete mBlindnessFader;
|
||||||
delete mDebugWindow;
|
delete mDebugWindow;
|
||||||
|
|
||||||
cleanupGarbage();
|
cleanupGarbage();
|
||||||
|
|
|
@ -27,9 +27,9 @@ CreatureAnimation::CreatureAnimation(const MWWorld::Ptr &ptr)
|
||||||
setObjectRoot(model, false);
|
setObjectRoot(model, false);
|
||||||
setRenderProperties(mObjectRoot, RV_Actors, RQG_Main, RQG_Alpha);
|
setRenderProperties(mObjectRoot, RV_Actors, RQG_Main, RQG_Alpha);
|
||||||
|
|
||||||
addAnimSource(model);
|
|
||||||
if((ref->mBase->mFlags&ESM::Creature::Bipedal))
|
if((ref->mBase->mFlags&ESM::Creature::Bipedal))
|
||||||
addAnimSource("meshes\\base_anim.nif");
|
addAnimSource("meshes\\base_anim.nif");
|
||||||
|
addAnimSource(model);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,9 +47,9 @@ CreatureWeaponAnimation::CreatureWeaponAnimation(const MWWorld::Ptr &ptr)
|
||||||
setObjectRoot(model, false);
|
setObjectRoot(model, false);
|
||||||
setRenderProperties(mObjectRoot, RV_Actors, RQG_Main, RQG_Alpha);
|
setRenderProperties(mObjectRoot, RV_Actors, RQG_Main, RQG_Alpha);
|
||||||
|
|
||||||
addAnimSource(model);
|
|
||||||
if((ref->mBase->mFlags&ESM::Creature::Bipedal))
|
if((ref->mBase->mFlags&ESM::Creature::Bipedal))
|
||||||
addAnimSource("meshes\\base_anim.nif");
|
addAnimSource("meshes\\base_anim.nif");
|
||||||
|
addAnimSource(model);
|
||||||
|
|
||||||
mPtr.getClass().getInventoryStore(mPtr).setListener(this, mPtr);
|
mPtr.getClass().getInventoryStore(mPtr).setListener(this, mPtr);
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
|
|
||||||
#include "../mwworld/class.hpp"
|
#include "../mwworld/class.hpp"
|
||||||
#include "../mwworld/cellstore.hpp"
|
#include "../mwworld/cellstore.hpp"
|
||||||
|
#include "../mwworld/containerstore.hpp"
|
||||||
|
|
||||||
#include "../mwmechanics/npcstats.hpp"
|
#include "../mwmechanics/npcstats.hpp"
|
||||||
|
|
||||||
|
@ -434,6 +435,16 @@ namespace MWScript
|
||||||
else
|
else
|
||||||
ref2 = MWBase::Environment::get().getWorld()->getPtr(id, false);
|
ref2 = MWBase::Environment::get().getWorld()->getPtr(id, false);
|
||||||
|
|
||||||
|
if (ref2.getContainerStore()) // is the object contained?
|
||||||
|
{
|
||||||
|
MWWorld::Ptr container = MWBase::Environment::get().getWorld()->findContainer(ref2);
|
||||||
|
|
||||||
|
if (!container.isEmpty())
|
||||||
|
ref2 = container;
|
||||||
|
else
|
||||||
|
throw std::runtime_error("failed to find container ptr");
|
||||||
|
}
|
||||||
|
|
||||||
const MWWorld::Ptr ref = MWBase::Environment::get().getWorld()->getPtr(name, false);
|
const MWWorld::Ptr ref = MWBase::Environment::get().getWorld()->getPtr(name, false);
|
||||||
|
|
||||||
// If the objects are in different worldspaces, return a large value (just like vanilla)
|
// If the objects are in different worldspaces, return a large value (just like vanilla)
|
||||||
|
|
|
@ -454,7 +454,8 @@ namespace MWScript
|
||||||
if (::Misc::StringUtils::ciEqual(iter->getCellRef().getRefId(), item))
|
if (::Misc::StringUtils::ciEqual(iter->getCellRef().getRefId(), item))
|
||||||
{
|
{
|
||||||
int removed = store.remove(*iter, toRemove, ptr);
|
int removed = store.remove(*iter, toRemove, ptr);
|
||||||
MWBase::Environment::get().getWorld()->dropObjectOnGround(ptr, *iter, removed);
|
MWWorld::Ptr dropped = MWBase::Environment::get().getWorld()->dropObjectOnGround(ptr, *iter, removed);
|
||||||
|
dropped.getCellRef().setOwner("");
|
||||||
|
|
||||||
toRemove -= removed;
|
toRemove -= removed;
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
#include "../mwbase/windowmanager.hpp"
|
#include "../mwbase/windowmanager.hpp"
|
||||||
|
|
||||||
#include "../mwworld/class.hpp"
|
#include "../mwworld/class.hpp"
|
||||||
|
#include "../mwworld/player.hpp"
|
||||||
|
|
||||||
#include "../mwmechanics/creaturestats.hpp"
|
#include "../mwmechanics/creaturestats.hpp"
|
||||||
#include "../mwmechanics/npcstats.hpp"
|
#include "../mwmechanics/npcstats.hpp"
|
||||||
|
@ -416,8 +417,12 @@ namespace MWScript
|
||||||
MWBase::World *world = MWBase::Environment::get().getWorld();
|
MWBase::World *world = MWBase::Environment::get().getWorld();
|
||||||
MWWorld::Ptr player = world->getPlayerPtr();
|
MWWorld::Ptr player = world->getPlayerPtr();
|
||||||
|
|
||||||
player.getClass().getNpcStats (player).setBounty(runtime[0].mFloat);
|
int bounty = runtime[0].mFloat;
|
||||||
runtime.pop();
|
runtime.pop();
|
||||||
|
player.getClass().getNpcStats (player).setBounty(bounty);
|
||||||
|
|
||||||
|
if (bounty == 0)
|
||||||
|
MWBase::Environment::get().getWorld()->getPlayer().recordCrimeId();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -112,7 +112,7 @@ bool FFmpeg_Decoder::getAVAudioData()
|
||||||
if(!mDataBuf || mDataBufLen < mFrame->nb_samples)
|
if(!mDataBuf || mDataBufLen < mFrame->nb_samples)
|
||||||
{
|
{
|
||||||
av_freep(&mDataBuf);
|
av_freep(&mDataBuf);
|
||||||
if(av_samples_alloc(&mDataBuf, NULL, (*mStream)->codec->channels,
|
if(av_samples_alloc(&mDataBuf, NULL, av_get_channel_layout_nb_channels(mOutputChannelLayout),
|
||||||
mFrame->nb_samples, mOutputSampleFormat, 0) < 0)
|
mFrame->nb_samples, mOutputSampleFormat, 0) < 0)
|
||||||
break;
|
break;
|
||||||
else
|
else
|
||||||
|
@ -147,8 +147,8 @@ size_t FFmpeg_Decoder::readAVAudioData(void *data, size_t length)
|
||||||
if(!getAVAudioData())
|
if(!getAVAudioData())
|
||||||
break;
|
break;
|
||||||
mFramePos = 0;
|
mFramePos = 0;
|
||||||
mFrameSize = mFrame->nb_samples * (*mStream)->codec->channels *
|
mFrameSize = mFrame->nb_samples * av_get_channel_layout_nb_channels(mOutputChannelLayout) *
|
||||||
av_get_bytes_per_sample((*mStream)->codec->sample_fmt);
|
av_get_bytes_per_sample(mOutputSampleFormat);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Get the amount of bytes remaining to be written, and clamp to
|
/* Get the amount of bytes remaining to be written, and clamp to
|
||||||
|
@ -285,47 +285,31 @@ void FFmpeg_Decoder::getInfo(int *samplerate, ChannelConfig *chans, SampleType *
|
||||||
if(!mStream)
|
if(!mStream)
|
||||||
fail("No audio stream info");
|
fail("No audio stream info");
|
||||||
|
|
||||||
if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_U8)
|
if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_FLT || (*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_FLTP)
|
||||||
*type = SampleType_UInt8;
|
mOutputSampleFormat = AV_SAMPLE_FMT_S16; // FIXME: Check for AL_EXT_FLOAT32 support
|
||||||
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_S16)
|
|
||||||
*type = SampleType_Int16;
|
|
||||||
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_FLT)
|
|
||||||
*type = SampleType_Float32;
|
|
||||||
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_U8P)
|
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_U8P)
|
||||||
*type = SampleType_UInt8;
|
mOutputSampleFormat = AV_SAMPLE_FMT_U8;
|
||||||
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_S16P)
|
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_S16P)
|
||||||
*type = SampleType_Int16;
|
mOutputSampleFormat = AV_SAMPLE_FMT_S16;
|
||||||
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_FLTP)
|
|
||||||
*type = SampleType_Float32;
|
|
||||||
else
|
else
|
||||||
fail(std::string("Unsupported sample format: ")+
|
mOutputSampleFormat = AV_SAMPLE_FMT_S16;
|
||||||
av_get_sample_fmt_name((*mStream)->codec->sample_fmt));
|
|
||||||
|
if(mOutputSampleFormat == AV_SAMPLE_FMT_U8)
|
||||||
|
*type = SampleType_UInt8;
|
||||||
|
else if(mOutputSampleFormat == AV_SAMPLE_FMT_S16)
|
||||||
|
*type = SampleType_Int16;
|
||||||
|
else if(mOutputSampleFormat == AV_SAMPLE_FMT_FLT)
|
||||||
|
*type = SampleType_Float32;
|
||||||
|
|
||||||
int64_t ch_layout = (*mStream)->codec->channel_layout;
|
int64_t ch_layout = (*mStream)->codec->channel_layout;
|
||||||
|
|
||||||
if((*mStream)->codec->channel_layout == AV_CH_LAYOUT_MONO)
|
if(ch_layout == 0)
|
||||||
*chans = ChannelConfig_Mono;
|
|
||||||
else if((*mStream)->codec->channel_layout == AV_CH_LAYOUT_STEREO)
|
|
||||||
*chans = ChannelConfig_Stereo;
|
|
||||||
else if((*mStream)->codec->channel_layout == AV_CH_LAYOUT_QUAD)
|
|
||||||
*chans = ChannelConfig_Quad;
|
|
||||||
else if((*mStream)->codec->channel_layout == AV_CH_LAYOUT_5POINT1)
|
|
||||||
*chans = ChannelConfig_5point1;
|
|
||||||
else if((*mStream)->codec->channel_layout == AV_CH_LAYOUT_7POINT1)
|
|
||||||
*chans = ChannelConfig_7point1;
|
|
||||||
else if((*mStream)->codec->channel_layout == 0)
|
|
||||||
{
|
{
|
||||||
/* Unknown channel layout. Try to guess. */
|
/* Unknown channel layout. Try to guess. */
|
||||||
if((*mStream)->codec->channels == 1)
|
if((*mStream)->codec->channels == 1)
|
||||||
{
|
|
||||||
*chans = ChannelConfig_Mono;
|
|
||||||
ch_layout = AV_CH_LAYOUT_MONO;
|
ch_layout = AV_CH_LAYOUT_MONO;
|
||||||
}
|
|
||||||
else if((*mStream)->codec->channels == 2)
|
else if((*mStream)->codec->channels == 2)
|
||||||
{
|
|
||||||
*chans = ChannelConfig_Stereo;
|
|
||||||
ch_layout = AV_CH_LAYOUT_STEREO;
|
ch_layout = AV_CH_LAYOUT_STEREO;
|
||||||
}
|
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
std::stringstream sstr("Unsupported raw channel count: ");
|
std::stringstream sstr("Unsupported raw channel count: ");
|
||||||
|
@ -333,6 +317,25 @@ void FFmpeg_Decoder::getInfo(int *samplerate, ChannelConfig *chans, SampleType *
|
||||||
fail(sstr.str());
|
fail(sstr.str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mOutputChannelLayout = ch_layout;
|
||||||
|
if (ch_layout == AV_CH_LAYOUT_5POINT1 || ch_layout == AV_CH_LAYOUT_7POINT1
|
||||||
|
|| ch_layout == AV_CH_LAYOUT_QUAD) // FIXME: check for AL_EXT_MCFORMATS support
|
||||||
|
mOutputChannelLayout = AV_CH_LAYOUT_STEREO;
|
||||||
|
else if (ch_layout != AV_CH_LAYOUT_MONO
|
||||||
|
&& ch_layout != AV_CH_LAYOUT_STEREO)
|
||||||
|
mOutputChannelLayout = AV_CH_LAYOUT_STEREO;
|
||||||
|
|
||||||
|
if(mOutputChannelLayout == AV_CH_LAYOUT_MONO)
|
||||||
|
*chans = ChannelConfig_Mono;
|
||||||
|
else if(mOutputChannelLayout == AV_CH_LAYOUT_STEREO)
|
||||||
|
*chans = ChannelConfig_Stereo;
|
||||||
|
else if(mOutputChannelLayout == AV_CH_LAYOUT_QUAD)
|
||||||
|
*chans = ChannelConfig_Quad;
|
||||||
|
else if(mOutputChannelLayout == AV_CH_LAYOUT_5POINT1)
|
||||||
|
*chans = ChannelConfig_5point1;
|
||||||
|
else if(mOutputChannelLayout == AV_CH_LAYOUT_7POINT1)
|
||||||
|
*chans = ChannelConfig_7point1;
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
char str[1024];
|
char str[1024];
|
||||||
|
@ -343,17 +346,11 @@ void FFmpeg_Decoder::getInfo(int *samplerate, ChannelConfig *chans, SampleType *
|
||||||
|
|
||||||
*samplerate = (*mStream)->codec->sample_rate;
|
*samplerate = (*mStream)->codec->sample_rate;
|
||||||
|
|
||||||
if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_U8P)
|
if(mOutputSampleFormat != (*mStream)->codec->sample_fmt
|
||||||
mOutputSampleFormat = AV_SAMPLE_FMT_U8;
|
|| mOutputChannelLayout != ch_layout)
|
||||||
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_S16P)
|
|
||||||
mOutputSampleFormat = AV_SAMPLE_FMT_S16;
|
|
||||||
else if((*mStream)->codec->sample_fmt == AV_SAMPLE_FMT_FLTP)
|
|
||||||
mOutputSampleFormat = AV_SAMPLE_FMT_FLT;
|
|
||||||
|
|
||||||
if(mOutputSampleFormat != AV_SAMPLE_FMT_NONE)
|
|
||||||
{
|
{
|
||||||
mSwr = swr_alloc_set_opts(mSwr, // SwrContext
|
mSwr = swr_alloc_set_opts(mSwr, // SwrContext
|
||||||
ch_layout, // output ch layout
|
mOutputChannelLayout, // output ch layout
|
||||||
mOutputSampleFormat, // output sample format
|
mOutputSampleFormat, // output sample format
|
||||||
(*mStream)->codec->sample_rate, // output sample rate
|
(*mStream)->codec->sample_rate, // output sample rate
|
||||||
ch_layout, // input ch layout
|
ch_layout, // input ch layout
|
||||||
|
@ -383,8 +380,8 @@ void FFmpeg_Decoder::readAll(std::vector<char> &output)
|
||||||
|
|
||||||
while(getAVAudioData())
|
while(getAVAudioData())
|
||||||
{
|
{
|
||||||
size_t got = mFrame->nb_samples * (*mStream)->codec->channels *
|
size_t got = mFrame->nb_samples * av_get_channel_layout_nb_channels(mOutputChannelLayout) *
|
||||||
av_get_bytes_per_sample((*mStream)->codec->sample_fmt);
|
av_get_bytes_per_sample(mOutputSampleFormat);
|
||||||
const char *inbuf = reinterpret_cast<char*>(mFrameData[0]);
|
const char *inbuf = reinterpret_cast<char*>(mFrameData[0]);
|
||||||
output.insert(output.end(), inbuf, inbuf+got);
|
output.insert(output.end(), inbuf, inbuf+got);
|
||||||
}
|
}
|
||||||
|
@ -402,8 +399,8 @@ void FFmpeg_Decoder::rewind()
|
||||||
|
|
||||||
size_t FFmpeg_Decoder::getSampleOffset()
|
size_t FFmpeg_Decoder::getSampleOffset()
|
||||||
{
|
{
|
||||||
int delay = (mFrameSize-mFramePos) / (*mStream)->codec->channels /
|
int delay = (mFrameSize-mFramePos) / av_get_channel_layout_nb_channels(mOutputChannelLayout) /
|
||||||
av_get_bytes_per_sample((*mStream)->codec->sample_fmt);
|
av_get_bytes_per_sample(mOutputSampleFormat);
|
||||||
return (int)(mNextPts*(*mStream)->codec->sample_rate) - delay;
|
return (int)(mNextPts*(*mStream)->codec->sample_rate) - delay;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -416,6 +413,7 @@ FFmpeg_Decoder::FFmpeg_Decoder()
|
||||||
, mNextPts(0.0)
|
, mNextPts(0.0)
|
||||||
, mSwr(0)
|
, mSwr(0)
|
||||||
, mOutputSampleFormat(AV_SAMPLE_FMT_NONE)
|
, mOutputSampleFormat(AV_SAMPLE_FMT_NONE)
|
||||||
|
, mOutputChannelLayout(0)
|
||||||
, mDataBuf(NULL)
|
, mDataBuf(NULL)
|
||||||
, mFrameData(NULL)
|
, mFrameData(NULL)
|
||||||
, mDataBufLen(0)
|
, mDataBufLen(0)
|
||||||
|
|
|
@ -59,6 +59,7 @@ namespace MWSound
|
||||||
|
|
||||||
SwrContext *mSwr;
|
SwrContext *mSwr;
|
||||||
enum AVSampleFormat mOutputSampleFormat;
|
enum AVSampleFormat mOutputSampleFormat;
|
||||||
|
int64_t mOutputChannelLayout;
|
||||||
uint8_t *mDataBuf;
|
uint8_t *mDataBuf;
|
||||||
uint8_t **mFrameData;
|
uint8_t **mFrameData;
|
||||||
int mDataBufLen;
|
int mDataBufLen;
|
||||||
|
|
|
@ -44,8 +44,8 @@ namespace MWSound
|
||||||
|
|
||||||
size_t getSampleOffset()
|
size_t getSampleOffset()
|
||||||
{
|
{
|
||||||
ssize_t clock_delay = (mFrameSize-mFramePos) / mAVStream->codec->channels /
|
ssize_t clock_delay = (mFrameSize-mFramePos) / av_get_channel_layout_nb_channels(mOutputChannelLayout) /
|
||||||
av_get_bytes_per_sample(mAVStream->codec->sample_fmt);
|
av_get_bytes_per_sample(mOutputSampleFormat);
|
||||||
return (size_t)(mAudioClock*mAVStream->codec->sample_rate) - clock_delay;
|
return (size_t)(mAudioClock*mAVStream->codec->sample_rate) - clock_delay;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,20 +64,20 @@ namespace MWSound
|
||||||
|
|
||||||
virtual void adjustAudioSettings(AVSampleFormat& sampleFormat, uint64_t& channelLayout, int& sampleRate)
|
virtual void adjustAudioSettings(AVSampleFormat& sampleFormat, uint64_t& channelLayout, int& sampleRate)
|
||||||
{
|
{
|
||||||
if (sampleFormat == AV_SAMPLE_FMT_U8P)
|
if (sampleFormat == AV_SAMPLE_FMT_U8P || sampleFormat == AV_SAMPLE_FMT_U8)
|
||||||
sampleFormat = AV_SAMPLE_FMT_U8;
|
sampleFormat = AV_SAMPLE_FMT_U8;
|
||||||
else if (sampleFormat == AV_SAMPLE_FMT_S16P)
|
else if (sampleFormat == AV_SAMPLE_FMT_S16P || sampleFormat == AV_SAMPLE_FMT_S16)
|
||||||
sampleFormat = AV_SAMPLE_FMT_S16;
|
sampleFormat = AV_SAMPLE_FMT_S16;
|
||||||
else if (sampleFormat == AV_SAMPLE_FMT_FLTP)
|
else if (sampleFormat == AV_SAMPLE_FMT_FLTP || sampleFormat == AV_SAMPLE_FMT_FLT)
|
||||||
sampleFormat = AV_SAMPLE_FMT_FLT;
|
sampleFormat = AV_SAMPLE_FMT_S16; // FIXME: check for AL_EXT_FLOAT32 support
|
||||||
else
|
else
|
||||||
sampleFormat = AV_SAMPLE_FMT_FLT;
|
sampleFormat = AV_SAMPLE_FMT_S16;
|
||||||
|
|
||||||
if (channelLayout != AV_CH_LAYOUT_MONO
|
if (channelLayout == AV_CH_LAYOUT_5POINT1 || channelLayout == AV_CH_LAYOUT_7POINT1
|
||||||
&& channelLayout != AV_CH_LAYOUT_5POINT1
|
|| channelLayout == AV_CH_LAYOUT_QUAD) // FIXME: check for AL_EXT_MCFORMATS support
|
||||||
&& channelLayout != AV_CH_LAYOUT_7POINT1
|
channelLayout = AV_CH_LAYOUT_STEREO;
|
||||||
&& channelLayout != AV_CH_LAYOUT_STEREO
|
else if (channelLayout != AV_CH_LAYOUT_MONO
|
||||||
&& channelLayout != AV_CH_LAYOUT_QUAD)
|
&& channelLayout != AV_CH_LAYOUT_STEREO)
|
||||||
channelLayout = AV_CH_LAYOUT_STEREO;
|
channelLayout = AV_CH_LAYOUT_STEREO;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -149,6 +149,17 @@ namespace MWWorld
|
||||||
forEachImp (functor, mCreatureLists);
|
forEachImp (functor, mCreatureLists);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<class Functor>
|
||||||
|
bool forEachContainer (Functor& functor)
|
||||||
|
{
|
||||||
|
mHasState = true;
|
||||||
|
|
||||||
|
return
|
||||||
|
forEachImp (functor, mContainers) &&
|
||||||
|
forEachImp (functor, mCreatures) &&
|
||||||
|
forEachImp (functor, mNpcs);
|
||||||
|
}
|
||||||
|
|
||||||
bool isExterior() const;
|
bool isExterior() const;
|
||||||
|
|
||||||
Ptr searchInContainer (const std::string& id);
|
Ptr searchInContainer (const std::string& id);
|
||||||
|
|
|
@ -272,7 +272,7 @@ MWWorld::ContainerStoreIterator MWWorld::ContainerStore::add (const Ptr& itemPtr
|
||||||
item.mCell = actorPtr.getCell();
|
item.mCell = actorPtr.getCell();
|
||||||
}
|
}
|
||||||
|
|
||||||
item.mContainerStore = 0;
|
item.mContainerStore = this;
|
||||||
|
|
||||||
MWBase::Environment::get().getWorld()->getLocalScripts().add(script, item);
|
MWBase::Environment::get().getWorld()->getLocalScripts().add(script, item);
|
||||||
|
|
||||||
|
|
|
@ -657,6 +657,47 @@ namespace MWWorld
|
||||||
return mWorldScene->searchPtrViaActorId (actorId);
|
return mWorldScene->searchPtrViaActorId (actorId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct FindContainerFunctor
|
||||||
|
{
|
||||||
|
Ptr mContainedPtr;
|
||||||
|
Ptr mResult;
|
||||||
|
|
||||||
|
FindContainerFunctor(const Ptr& containedPtr) : mContainedPtr(containedPtr) {}
|
||||||
|
|
||||||
|
bool operator() (Ptr ptr)
|
||||||
|
{
|
||||||
|
if (mContainedPtr.getContainerStore() == &ptr.getClass().getContainerStore(ptr))
|
||||||
|
{
|
||||||
|
mResult = ptr;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ptr World::findContainer(const Ptr& ptr)
|
||||||
|
{
|
||||||
|
if (ptr.isInCell())
|
||||||
|
return Ptr();
|
||||||
|
|
||||||
|
Ptr player = getPlayerPtr();
|
||||||
|
if (ptr.getContainerStore() == &player.getClass().getContainerStore(player))
|
||||||
|
return player;
|
||||||
|
|
||||||
|
const Scene::CellStoreCollection& collection = mWorldScene->getActiveCells();
|
||||||
|
for (Scene::CellStoreCollection::const_iterator cellIt = collection.begin(); cellIt != collection.end(); ++cellIt)
|
||||||
|
{
|
||||||
|
FindContainerFunctor functor(ptr);
|
||||||
|
(*cellIt)->forEachContainer(functor);
|
||||||
|
|
||||||
|
if (!functor.mResult.isEmpty())
|
||||||
|
return functor.mResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ptr();
|
||||||
|
}
|
||||||
|
|
||||||
void World::addContainerScripts(const Ptr& reference, CellStore * cell)
|
void World::addContainerScripts(const Ptr& reference, CellStore * cell)
|
||||||
{
|
{
|
||||||
if( reference.getTypeName()==typeid (ESM::Container).name() ||
|
if( reference.getTypeName()==typeid (ESM::Container).name() ||
|
||||||
|
|
|
@ -260,6 +260,10 @@ namespace MWWorld
|
||||||
virtual Ptr searchPtrViaActorId (int actorId);
|
virtual Ptr searchPtrViaActorId (int actorId);
|
||||||
///< Search is limited to the active cells.
|
///< Search is limited to the active cells.
|
||||||
|
|
||||||
|
virtual MWWorld::Ptr findContainer (const MWWorld::Ptr& ptr);
|
||||||
|
///< Return a pointer to a liveCellRef which contains \a ptr.
|
||||||
|
/// \note Search is limited to the active cells.
|
||||||
|
|
||||||
virtual void adjustPosition (const Ptr& ptr, bool force);
|
virtual void adjustPosition (const Ptr& ptr, bool force);
|
||||||
///< Adjust position after load to be on ground. Must be called after model load.
|
///< Adjust position after load to be on ground. Must be called after model load.
|
||||||
/// @param force do this even if the ptr is flying
|
/// @param force do this even if the ptr is flying
|
||||||
|
|
|
@ -382,7 +382,6 @@ endmacro()
|
||||||
ogre_find_component(Paging OgrePaging.h)
|
ogre_find_component(Paging OgrePaging.h)
|
||||||
# look for Overlay component
|
# look for Overlay component
|
||||||
ogre_find_component(Overlay OgreOverlaySystem.h)
|
ogre_find_component(Overlay OgreOverlaySystem.h)
|
||||||
ogre_find_component(Overlay OgreOverlay.h)
|
|
||||||
# look for Terrain component
|
# look for Terrain component
|
||||||
ogre_find_component(Terrain OgreTerrain.h)
|
ogre_find_component(Terrain OgreTerrain.h)
|
||||||
# look for Property component
|
# look for Property component
|
||||||
|
|
15
extern/ogre-ffmpeg-videoplayer/CMakeLists.txt
vendored
15
extern/ogre-ffmpeg-videoplayer/CMakeLists.txt
vendored
|
@ -3,15 +3,14 @@ set(OGRE_FFMPEG_VIDEOPLAYER_LIBRARY "ogre-ffmpeg-videoplayer")
|
||||||
# Sources
|
# Sources
|
||||||
|
|
||||||
set(OGRE_FFMPEG_VIDEOPLAYER_SOURCE_FILES
|
set(OGRE_FFMPEG_VIDEOPLAYER_SOURCE_FILES
|
||||||
videoplayer.cpp
|
videoplayer.cpp
|
||||||
videostate.cpp
|
videostate.cpp
|
||||||
videodefs.hpp
|
videodefs.hpp
|
||||||
libavwrapper.cpp
|
libavwrapper.cpp
|
||||||
audiodecoder.cpp
|
audiodecoder.cpp
|
||||||
audiofactory.hpp
|
audiofactory.hpp
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Find FFMPEG
|
# Find FFMPEG
|
||||||
set(FFmpeg_FIND_COMPONENTS AVCODEC AVFORMAT AVUTIL SWSCALE SWRESAMPLE AVRESAMPLE)
|
set(FFmpeg_FIND_COMPONENTS AVCODEC AVFORMAT AVUTIL SWSCALE SWRESAMPLE AVRESAMPLE)
|
||||||
unset(FFMPEG_LIBRARIES CACHE)
|
unset(FFMPEG_LIBRARIES CACHE)
|
||||||
|
@ -30,10 +29,14 @@ else()
|
||||||
message(FATAL_ERROR "Install either libswresample (FFmpeg) or libavresample (Libav).")
|
message(FATAL_ERROR "Install either libswresample (FFmpeg) or libavresample (Libav).")
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
include_directories(${FFMPEG_INCLUDE_DIRS})
|
include_directories(${FFMPEG_INCLUDE_DIRS})
|
||||||
|
|
||||||
|
# Find Boost
|
||||||
|
set(BOOST_COMPONENTS thread)
|
||||||
|
find_package(Boost REQUIRED COMPONENTS ${BOOST_COMPONENTS})
|
||||||
|
include_directories(${Boost_INCLUDE_DIRS})
|
||||||
|
|
||||||
add_library(${OGRE_FFMPEG_VIDEOPLAYER_LIBRARY} STATIC ${OGRE_FFMPEG_VIDEOPLAYER_SOURCE_FILES})
|
add_library(${OGRE_FFMPEG_VIDEOPLAYER_LIBRARY} STATIC ${OGRE_FFMPEG_VIDEOPLAYER_SOURCE_FILES})
|
||||||
target_link_libraries(${OGRE_FFMPEG_VIDEOPLAYER_LIBRARY} ${VIDEO_FFMPEG_LIBRARIES})
|
target_link_libraries(${OGRE_FFMPEG_VIDEOPLAYER_LIBRARY} ${VIDEO_FFMPEG_LIBRARIES} ${Boost_LIBRARIES})
|
||||||
|
|
||||||
link_directories(${CMAKE_CURRENT_BINARY_DIR})
|
link_directories(${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
|
40
extern/ogre-ffmpeg-videoplayer/audiodecoder.cpp
vendored
40
extern/ogre-ffmpeg-videoplayer/audiodecoder.cpp
vendored
|
@ -152,8 +152,8 @@ int MovieAudioDecoder::synchronize_audio()
|
||||||
double avg_diff = mAudioDiffAccum * (1.0 - mAudioDiffAvgCoef);
|
double avg_diff = mAudioDiffAccum * (1.0 - mAudioDiffAvgCoef);
|
||||||
if(fabs(avg_diff) >= mAudioDiffThreshold)
|
if(fabs(avg_diff) >= mAudioDiffThreshold)
|
||||||
{
|
{
|
||||||
int n = av_get_bytes_per_sample(mAVStream->codec->sample_fmt) *
|
int n = av_get_bytes_per_sample(mOutputSampleFormat) *
|
||||||
mAVStream->codec->channels;
|
av_get_channel_layout_nb_channels(mOutputChannelLayout);
|
||||||
sample_skip = ((int)(diff * mAVStream->codec->sample_rate) * n);
|
sample_skip = ((int)(diff * mAVStream->codec->sample_rate) * n);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -161,7 +161,7 @@ int MovieAudioDecoder::synchronize_audio()
|
||||||
return sample_skip;
|
return sample_skip;
|
||||||
}
|
}
|
||||||
|
|
||||||
int MovieAudioDecoder::audio_decode_frame(AVFrame *frame)
|
int MovieAudioDecoder::audio_decode_frame(AVFrame *frame, int &sample_skip)
|
||||||
{
|
{
|
||||||
AVPacket *pkt = &mPacket;
|
AVPacket *pkt = &mPacket;
|
||||||
|
|
||||||
|
@ -191,7 +191,7 @@ int MovieAudioDecoder::audio_decode_frame(AVFrame *frame)
|
||||||
if(!mDataBuf || mDataBufLen < frame->nb_samples)
|
if(!mDataBuf || mDataBufLen < frame->nb_samples)
|
||||||
{
|
{
|
||||||
av_freep(&mDataBuf);
|
av_freep(&mDataBuf);
|
||||||
if(av_samples_alloc(&mDataBuf, NULL, mAVStream->codec->channels,
|
if(av_samples_alloc(&mDataBuf, NULL, av_get_channel_layout_nb_channels(mOutputChannelLayout),
|
||||||
frame->nb_samples, mOutputSampleFormat, 0) < 0)
|
frame->nb_samples, mOutputSampleFormat, 0) < 0)
|
||||||
break;
|
break;
|
||||||
else
|
else
|
||||||
|
@ -212,8 +212,8 @@ int MovieAudioDecoder::audio_decode_frame(AVFrame *frame)
|
||||||
(double)mAVStream->codec->sample_rate;
|
(double)mAVStream->codec->sample_rate;
|
||||||
|
|
||||||
/* We have data, return it and come back for more later */
|
/* We have data, return it and come back for more later */
|
||||||
return frame->nb_samples * mAVStream->codec->channels *
|
return frame->nb_samples * av_get_channel_layout_nb_channels(mOutputChannelLayout) *
|
||||||
av_get_bytes_per_sample(mAVStream->codec->sample_fmt);
|
av_get_bytes_per_sample(mOutputSampleFormat);
|
||||||
}
|
}
|
||||||
av_free_packet(pkt);
|
av_free_packet(pkt);
|
||||||
|
|
||||||
|
@ -221,6 +221,18 @@ int MovieAudioDecoder::audio_decode_frame(AVFrame *frame)
|
||||||
if(mVideoState->audioq.get(pkt, mVideoState) < 0)
|
if(mVideoState->audioq.get(pkt, mVideoState) < 0)
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
|
if(pkt->data == mVideoState->mFlushPktData)
|
||||||
|
{
|
||||||
|
avcodec_flush_buffers(mAVStream->codec);
|
||||||
|
mAudioDiffAccum = 0.0;
|
||||||
|
mAudioDiffAvgCount = 0;
|
||||||
|
mAudioClock = av_q2d(mAVStream->time_base)*pkt->pts;
|
||||||
|
sample_skip = 0;
|
||||||
|
|
||||||
|
if(mVideoState->audioq.get(pkt, mVideoState) < 0)
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
/* if update, update the audio clock w/pts */
|
/* if update, update the audio clock w/pts */
|
||||||
if((uint64_t)pkt->pts != AV_NOPTS_VALUE)
|
if((uint64_t)pkt->pts != AV_NOPTS_VALUE)
|
||||||
mAudioClock = av_q2d(mAVStream->time_base)*pkt->pts;
|
mAudioClock = av_q2d(mAVStream->time_base)*pkt->pts;
|
||||||
|
@ -229,6 +241,16 @@ int MovieAudioDecoder::audio_decode_frame(AVFrame *frame)
|
||||||
|
|
||||||
size_t MovieAudioDecoder::read(char *stream, size_t len)
|
size_t MovieAudioDecoder::read(char *stream, size_t len)
|
||||||
{
|
{
|
||||||
|
if (mVideoState->mPaused)
|
||||||
|
{
|
||||||
|
// fill the buffer with silence
|
||||||
|
size_t sampleSize = av_get_bytes_per_sample(mOutputSampleFormat);
|
||||||
|
char* data[1];
|
||||||
|
data[0] = stream;
|
||||||
|
av_samples_set_silence((uint8_t**)data, 0, len/sampleSize, 1, mOutputSampleFormat);
|
||||||
|
return len;
|
||||||
|
}
|
||||||
|
|
||||||
int sample_skip = synchronize_audio();
|
int sample_skip = synchronize_audio();
|
||||||
size_t total = 0;
|
size_t total = 0;
|
||||||
|
|
||||||
|
@ -237,7 +259,7 @@ size_t MovieAudioDecoder::read(char *stream, size_t len)
|
||||||
if(mFramePos >= mFrameSize)
|
if(mFramePos >= mFrameSize)
|
||||||
{
|
{
|
||||||
/* We have already sent all our data; get more */
|
/* We have already sent all our data; get more */
|
||||||
mFrameSize = audio_decode_frame(mFrame);
|
mFrameSize = audio_decode_frame(mFrame, sample_skip);
|
||||||
if(mFrameSize < 0)
|
if(mFrameSize < 0)
|
||||||
{
|
{
|
||||||
/* If error, we're done */
|
/* If error, we're done */
|
||||||
|
@ -260,8 +282,8 @@ size_t MovieAudioDecoder::read(char *stream, size_t len)
|
||||||
{
|
{
|
||||||
len1 = std::min<size_t>(len1, -mFramePos);
|
len1 = std::min<size_t>(len1, -mFramePos);
|
||||||
|
|
||||||
int n = av_get_bytes_per_sample(mAVStream->codec->sample_fmt) *
|
int n = av_get_bytes_per_sample(mOutputSampleFormat)
|
||||||
mAVStream->codec->channels;
|
* av_get_channel_layout_nb_channels(mOutputChannelLayout);
|
||||||
|
|
||||||
/* add samples by copying the first sample*/
|
/* add samples by copying the first sample*/
|
||||||
if(n == 1)
|
if(n == 1)
|
||||||
|
|
|
@ -77,7 +77,8 @@ private:
|
||||||
* skip (negative means to duplicate). */
|
* skip (negative means to duplicate). */
|
||||||
int synchronize_audio();
|
int synchronize_audio();
|
||||||
|
|
||||||
int audio_decode_frame(AVFrame *frame);
|
/// @param sample_skip If seeking happened, the sample_skip variable will be reset to 0.
|
||||||
|
int audio_decode_frame(AVFrame *frame, int &sample_skip);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
MovieAudioDecoder(VideoState *is);
|
MovieAudioDecoder(VideoState *is);
|
||||||
|
@ -101,6 +102,8 @@ public:
|
||||||
virtual double getAudioClock();
|
virtual double getAudioClock();
|
||||||
|
|
||||||
/// This is the main interface to be used by the user's audio library.
|
/// This is the main interface to be used by the user's audio library.
|
||||||
|
/// @par Request filling the \a stream with \a len number of bytes.
|
||||||
|
/// @return The number of bytes read (may not be the requested number if we arrived at the end of the audio stream)
|
||||||
size_t read(char *stream, size_t len);
|
size_t read(char *stream, size_t len);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ class MovieAudioFactory
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual boost::shared_ptr<MovieAudioDecoder> createDecoder(VideoState* videoState) = 0;
|
virtual boost::shared_ptr<MovieAudioDecoder> createDecoder(VideoState* videoState) = 0;
|
||||||
|
virtual ~MovieAudioFactory() {}
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
66
extern/ogre-ffmpeg-videoplayer/videoplayer.cpp
vendored
66
extern/ogre-ffmpeg-videoplayer/videoplayer.cpp
vendored
|
@ -1,5 +1,6 @@
|
||||||
#include "videoplayer.hpp"
|
#include "videoplayer.hpp"
|
||||||
|
|
||||||
|
#include "audiofactory.hpp"
|
||||||
#include "videostate.hpp"
|
#include "videostate.hpp"
|
||||||
|
|
||||||
namespace Video
|
namespace Video
|
||||||
|
@ -31,6 +32,13 @@ void VideoPlayer::playVideo(const std::string &resourceName)
|
||||||
mState = new VideoState;
|
mState = new VideoState;
|
||||||
mState->setAudioFactory(mAudioFactory.get());
|
mState->setAudioFactory(mAudioFactory.get());
|
||||||
mState->init(resourceName);
|
mState->init(resourceName);
|
||||||
|
|
||||||
|
// wait until we have the first picture
|
||||||
|
while (mState->video_st && mState->mTexture.isNull())
|
||||||
|
{
|
||||||
|
if (!mState->update())
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch(std::exception& e) {
|
catch(std::exception& e) {
|
||||||
std::cerr<< "Failed to play video: "<<e.what() <<std::endl;
|
std::cerr<< "Failed to play video: "<<e.what() <<std::endl;
|
||||||
|
@ -38,19 +46,17 @@ void VideoPlayer::playVideo(const std::string &resourceName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoPlayer::update ()
|
bool VideoPlayer::update ()
|
||||||
{
|
{
|
||||||
if(mState)
|
if(mState)
|
||||||
{
|
return mState->update();
|
||||||
if(!mState->update())
|
return false;
|
||||||
close();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string VideoPlayer::getTextureName()
|
std::string VideoPlayer::getTextureName()
|
||||||
{
|
{
|
||||||
std::string name;
|
std::string name;
|
||||||
if (mState)
|
if (mState && !mState->mTexture.isNull())
|
||||||
name = mState->mTexture->getName();
|
name = mState->mTexture->getName();
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
@ -58,7 +64,7 @@ std::string VideoPlayer::getTextureName()
|
||||||
int VideoPlayer::getVideoWidth()
|
int VideoPlayer::getVideoWidth()
|
||||||
{
|
{
|
||||||
int width=0;
|
int width=0;
|
||||||
if (mState)
|
if (mState && !mState->mTexture.isNull())
|
||||||
width = mState->mTexture->getWidth();
|
width = mState->mTexture->getWidth();
|
||||||
return width;
|
return width;
|
||||||
}
|
}
|
||||||
|
@ -66,7 +72,7 @@ int VideoPlayer::getVideoWidth()
|
||||||
int VideoPlayer::getVideoHeight()
|
int VideoPlayer::getVideoHeight()
|
||||||
{
|
{
|
||||||
int height=0;
|
int height=0;
|
||||||
if (mState)
|
if (mState && !mState->mTexture.isNull())
|
||||||
height = mState->mTexture->getHeight();
|
height = mState->mTexture->getHeight();
|
||||||
return height;
|
return height;
|
||||||
}
|
}
|
||||||
|
@ -82,14 +88,48 @@ void VideoPlayer::close()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VideoPlayer::isPlaying ()
|
|
||||||
{
|
|
||||||
return mState != NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool VideoPlayer::hasAudioStream()
|
bool VideoPlayer::hasAudioStream()
|
||||||
{
|
{
|
||||||
return mState && mState->audio_st != NULL;
|
return mState && mState->audio_st != NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void VideoPlayer::play()
|
||||||
|
{
|
||||||
|
if (mState)
|
||||||
|
mState->setPaused(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPlayer::pause()
|
||||||
|
{
|
||||||
|
if (mState)
|
||||||
|
mState->setPaused(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool VideoPlayer::isPaused()
|
||||||
|
{
|
||||||
|
if (mState)
|
||||||
|
return mState->mPaused;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
double VideoPlayer::getCurrentTime()
|
||||||
|
{
|
||||||
|
if (mState)
|
||||||
|
return mState->get_master_clock();
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPlayer::seek(double time)
|
||||||
|
{
|
||||||
|
if (mState)
|
||||||
|
mState->seekTo(time);
|
||||||
|
}
|
||||||
|
|
||||||
|
double VideoPlayer::getDuration()
|
||||||
|
{
|
||||||
|
if (mState)
|
||||||
|
return mState->getDuration();
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
19
extern/ogre-ffmpeg-videoplayer/videoplayer.hpp
vendored
19
extern/ogre-ffmpeg-videoplayer/videoplayer.hpp
vendored
|
@ -29,16 +29,29 @@ namespace Video
|
||||||
bool hasAudioStream();
|
bool hasAudioStream();
|
||||||
|
|
||||||
/// Play the given video. If a video is already playing, the old video is closed first.
|
/// Play the given video. If a video is already playing, the old video is closed first.
|
||||||
|
/// @note The video will be unpaused by default. Use the pause() and play() methods to control pausing.
|
||||||
void playVideo (const std::string& resourceName);
|
void playVideo (const std::string& resourceName);
|
||||||
|
|
||||||
|
/// Get the current playback time position in the video, in seconds
|
||||||
|
double getCurrentTime();
|
||||||
|
|
||||||
|
/// Get the duration of the video in seconds
|
||||||
|
double getDuration();
|
||||||
|
|
||||||
|
/// Seek to the specified time position in the video
|
||||||
|
void seek(double time);
|
||||||
|
|
||||||
|
void play();
|
||||||
|
void pause();
|
||||||
|
bool isPaused();
|
||||||
|
|
||||||
/// This should be called every frame by the user to update the video texture.
|
/// This should be called every frame by the user to update the video texture.
|
||||||
void update();
|
/// @return Returns true if the video is still playing, false if we have reached the end of the video stream.
|
||||||
|
bool update();
|
||||||
|
|
||||||
/// Stop the currently playing video, if a video is playing.
|
/// Stop the currently playing video, if a video is playing.
|
||||||
void close();
|
void close();
|
||||||
|
|
||||||
bool isPlaying();
|
|
||||||
|
|
||||||
/// Return the texture name of the currently playing video, or "" if no video is playing.
|
/// Return the texture name of the currently playing video, or "" if no video is playing.
|
||||||
std::string getTextureName();
|
std::string getTextureName();
|
||||||
/// Return the width of the currently playing video, or 0 if no video is playing.
|
/// Return the width of the currently playing video, or 0 if no video is playing.
|
||||||
|
|
261
extern/ogre-ffmpeg-videoplayer/videostate.cpp
vendored
261
extern/ogre-ffmpeg-videoplayer/videostate.cpp
vendored
|
@ -25,12 +25,26 @@ extern "C"
|
||||||
#include <libavutil/time.h>
|
#include <libavutil/time.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#include <libavutil/mathematics.h>
|
||||||
|
|
||||||
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,28,1)
|
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,28,1)
|
||||||
#define av_frame_alloc avcodec_alloc_frame
|
#define av_frame_alloc avcodec_alloc_frame
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static const char* flushString = "FLUSH";
|
||||||
|
struct FlushPacket : AVPacket
|
||||||
|
{
|
||||||
|
FlushPacket()
|
||||||
|
: AVPacket()
|
||||||
|
{
|
||||||
|
data = ( (uint8_t*)flushString);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static FlushPacket flush_pkt;
|
||||||
|
|
||||||
#include "videoplayer.hpp"
|
#include "videoplayer.hpp"
|
||||||
#include "audiodecoder.hpp"
|
#include "audiodecoder.hpp"
|
||||||
#include "audiofactory.hpp"
|
#include "audiofactory.hpp"
|
||||||
|
@ -46,14 +60,18 @@ namespace Video
|
||||||
|
|
||||||
VideoState::VideoState()
|
VideoState::VideoState()
|
||||||
: format_ctx(NULL), av_sync_type(AV_SYNC_DEFAULT)
|
: format_ctx(NULL), av_sync_type(AV_SYNC_DEFAULT)
|
||||||
, external_clock_base(0.0)
|
|
||||||
, audio_st(NULL)
|
, audio_st(NULL)
|
||||||
, video_st(NULL), frame_last_pts(0.0)
|
, video_st(NULL), frame_last_pts(0.0)
|
||||||
, video_clock(0.0), sws_context(NULL), rgbaFrame(NULL), pictq_size(0)
|
, video_clock(0.0), sws_context(NULL), rgbaFrame(NULL), pictq_size(0)
|
||||||
, pictq_rindex(0), pictq_windex(0)
|
, pictq_rindex(0), pictq_windex(0)
|
||||||
, quit(false)
|
, mQuit(false), mPaused(false)
|
||||||
, mAudioFactory(NULL)
|
, mAudioFactory(NULL)
|
||||||
|
, mSeekRequested(false)
|
||||||
|
, mSeekPos(0)
|
||||||
|
, mVideoEnded(false)
|
||||||
{
|
{
|
||||||
|
mFlushPktData = flush_pkt.data;
|
||||||
|
|
||||||
// Register all formats and codecs
|
// Register all formats and codecs
|
||||||
av_register_all();
|
av_register_all();
|
||||||
}
|
}
|
||||||
|
@ -77,7 +95,7 @@ void PacketQueue::put(AVPacket *pkt)
|
||||||
pkt1->pkt = *pkt;
|
pkt1->pkt = *pkt;
|
||||||
pkt1->next = NULL;
|
pkt1->next = NULL;
|
||||||
|
|
||||||
if(pkt1->pkt.destruct == NULL)
|
if(pkt->data != flush_pkt.data && pkt1->pkt.destruct == NULL)
|
||||||
{
|
{
|
||||||
if(av_dup_packet(&pkt1->pkt) < 0)
|
if(av_dup_packet(&pkt1->pkt) < 0)
|
||||||
{
|
{
|
||||||
|
@ -104,7 +122,7 @@ void PacketQueue::put(AVPacket *pkt)
|
||||||
int PacketQueue::get(AVPacket *pkt, VideoState *is)
|
int PacketQueue::get(AVPacket *pkt, VideoState *is)
|
||||||
{
|
{
|
||||||
boost::unique_lock<boost::mutex> lock(this->mutex);
|
boost::unique_lock<boost::mutex> lock(this->mutex);
|
||||||
while(!is->quit)
|
while(!is->mQuit)
|
||||||
{
|
{
|
||||||
AVPacketList *pkt1 = this->first_pkt;
|
AVPacketList *pkt1 = this->first_pkt;
|
||||||
if(pkt1)
|
if(pkt1)
|
||||||
|
@ -143,7 +161,8 @@ void PacketQueue::clear()
|
||||||
for(pkt = this->first_pkt; pkt != NULL; pkt = pkt1)
|
for(pkt = this->first_pkt; pkt != NULL; pkt = pkt1)
|
||||||
{
|
{
|
||||||
pkt1 = pkt->next;
|
pkt1 = pkt->next;
|
||||||
av_free_packet(&pkt->pkt);
|
if (pkt->pkt.data != flush_pkt.data)
|
||||||
|
av_free_packet(&pkt->pkt);
|
||||||
av_freep(&pkt);
|
av_freep(&pkt);
|
||||||
}
|
}
|
||||||
this->last_pkt = NULL;
|
this->last_pkt = NULL;
|
||||||
|
@ -188,14 +207,17 @@ void VideoState::video_display(VideoPicture *vp)
|
||||||
{
|
{
|
||||||
if((*this->video_st)->codec->width != 0 && (*this->video_st)->codec->height != 0)
|
if((*this->video_st)->codec->width != 0 && (*this->video_st)->codec->height != 0)
|
||||||
{
|
{
|
||||||
|
if (mTexture.isNull())
|
||||||
if(static_cast<int>(mTexture->getWidth()) != (*this->video_st)->codec->width ||
|
|
||||||
static_cast<int>(mTexture->getHeight()) != (*this->video_st)->codec->height)
|
|
||||||
{
|
{
|
||||||
mTexture->unload();
|
static int i = 0;
|
||||||
mTexture->setWidth((*this->video_st)->codec->width);
|
mTexture = Ogre::TextureManager::getSingleton().createManual(
|
||||||
mTexture->setHeight((*this->video_st)->codec->height);
|
"ffmpeg/VideoTexture" + Ogre::StringConverter::toString(++i),
|
||||||
mTexture->createInternalResources();
|
Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
|
||||||
|
Ogre::TEX_TYPE_2D,
|
||||||
|
(*this->video_st)->codec->width, (*this->video_st)->codec->height,
|
||||||
|
0,
|
||||||
|
Ogre::PF_BYTE_RGBA,
|
||||||
|
Ogre::TU_DYNAMIC_WRITE_ONLY_DISCARDABLE);
|
||||||
}
|
}
|
||||||
Ogre::PixelBox pb((*this->video_st)->codec->width, (*this->video_st)->codec->height, 1, Ogre::PF_BYTE_RGBA, &vp->data[0]);
|
Ogre::PixelBox pb((*this->video_st)->codec->width, (*this->video_st)->codec->height, 1, Ogre::PF_BYTE_RGBA, &vp->data[0]);
|
||||||
Ogre::HardwarePixelBufferSharedPtr buffer = mTexture->getBuffer();
|
Ogre::HardwarePixelBufferSharedPtr buffer = mTexture->getBuffer();
|
||||||
|
@ -205,6 +227,7 @@ void VideoState::video_display(VideoPicture *vp)
|
||||||
|
|
||||||
void VideoState::video_refresh()
|
void VideoState::video_refresh()
|
||||||
{
|
{
|
||||||
|
boost::mutex::scoped_lock lock(this->pictq_mutex);
|
||||||
if(this->pictq_size == 0)
|
if(this->pictq_size == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
@ -212,16 +235,15 @@ void VideoState::video_refresh()
|
||||||
{
|
{
|
||||||
VideoPicture* vp = &this->pictq[this->pictq_rindex];
|
VideoPicture* vp = &this->pictq[this->pictq_rindex];
|
||||||
this->video_display(vp);
|
this->video_display(vp);
|
||||||
|
|
||||||
this->pictq_rindex = (pictq_rindex+1) % VIDEO_PICTURE_QUEUE_SIZE;
|
this->pictq_rindex = (pictq_rindex+1) % VIDEO_PICTURE_QUEUE_SIZE;
|
||||||
this->frame_last_pts = vp->pts;
|
this->frame_last_pts = vp->pts;
|
||||||
this->pictq_mutex.lock();
|
|
||||||
this->pictq_size--;
|
this->pictq_size--;
|
||||||
this->pictq_cond.notify_one();
|
this->pictq_cond.notify_one();
|
||||||
this->pictq_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const float threshold = 0.03;
|
const float threshold = 0.03f;
|
||||||
if (this->pictq[pictq_rindex].pts > this->get_master_clock() + threshold)
|
if (this->pictq[pictq_rindex].pts > this->get_master_clock() + threshold)
|
||||||
return; // not ready yet to show this picture
|
return; // not ready yet to show this picture
|
||||||
|
|
||||||
|
@ -236,19 +258,18 @@ void VideoState::video_refresh()
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assert (this->pictq_rindex < VIDEO_PICTURE_QUEUE_SIZE);
|
||||||
VideoPicture* vp = &this->pictq[this->pictq_rindex];
|
VideoPicture* vp = &this->pictq[this->pictq_rindex];
|
||||||
|
|
||||||
this->video_display(vp);
|
this->video_display(vp);
|
||||||
|
|
||||||
this->frame_last_pts = vp->pts;
|
this->frame_last_pts = vp->pts;
|
||||||
|
|
||||||
this->pictq_mutex.lock();
|
|
||||||
this->pictq_size -= i;
|
this->pictq_size -= i;
|
||||||
// update queue for next picture
|
// update queue for next picture
|
||||||
this->pictq_size--;
|
this->pictq_size--;
|
||||||
this->pictq_rindex++;
|
this->pictq_rindex = (this->pictq_rindex+1) % VIDEO_PICTURE_QUEUE_SIZE;
|
||||||
this->pictq_cond.notify_one();
|
this->pictq_cond.notify_one();
|
||||||
this->pictq_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,12 +281,14 @@ int VideoState::queue_picture(AVFrame *pFrame, double pts)
|
||||||
/* wait until we have a new pic */
|
/* wait until we have a new pic */
|
||||||
{
|
{
|
||||||
boost::unique_lock<boost::mutex> lock(this->pictq_mutex);
|
boost::unique_lock<boost::mutex> lock(this->pictq_mutex);
|
||||||
while(this->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && !this->quit)
|
while(this->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && !this->mQuit)
|
||||||
this->pictq_cond.timed_wait(lock, boost::posix_time::milliseconds(1));
|
this->pictq_cond.timed_wait(lock, boost::posix_time::milliseconds(1));
|
||||||
}
|
}
|
||||||
if(this->quit)
|
if(this->mQuit)
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
|
this->pictq_mutex.lock();
|
||||||
|
|
||||||
// windex is set to 0 initially
|
// windex is set to 0 initially
|
||||||
vp = &this->pictq[this->pictq_windex];
|
vp = &this->pictq[this->pictq_windex];
|
||||||
|
|
||||||
|
@ -292,7 +315,6 @@ int VideoState::queue_picture(AVFrame *pFrame, double pts)
|
||||||
|
|
||||||
// now we inform our display thread that we have a pic ready
|
// now we inform our display thread that we have a pic ready
|
||||||
this->pictq_windex = (this->pictq_windex+1) % VIDEO_PICTURE_QUEUE_SIZE;
|
this->pictq_windex = (this->pictq_windex+1) % VIDEO_PICTURE_QUEUE_SIZE;
|
||||||
this->pictq_mutex.lock();
|
|
||||||
this->pictq_size++;
|
this->pictq_size++;
|
||||||
this->pictq_mutex.unlock();
|
this->pictq_mutex.unlock();
|
||||||
|
|
||||||
|
@ -353,6 +375,21 @@ void VideoState::video_thread_loop(VideoState *self)
|
||||||
|
|
||||||
while(self->videoq.get(packet, self) >= 0)
|
while(self->videoq.get(packet, self) >= 0)
|
||||||
{
|
{
|
||||||
|
if(packet->data == flush_pkt.data)
|
||||||
|
{
|
||||||
|
avcodec_flush_buffers((*self->video_st)->codec);
|
||||||
|
|
||||||
|
self->pictq_mutex.lock();
|
||||||
|
self->pictq_size = 0;
|
||||||
|
self->pictq_rindex = 0;
|
||||||
|
self->pictq_windex = 0;
|
||||||
|
self->pictq_mutex.unlock();
|
||||||
|
|
||||||
|
self->frame_last_pts = packet->pts * av_q2d((*self->video_st)->time_base);
|
||||||
|
global_video_pkt_pts = self->frame_last_pts;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// Save global pts to be stored in pFrame
|
// Save global pts to be stored in pFrame
|
||||||
global_video_pkt_pts = packet->pts;
|
global_video_pkt_pts = packet->pts;
|
||||||
// Decode video frame
|
// Decode video frame
|
||||||
|
@ -394,8 +431,67 @@ void VideoState::decode_thread_loop(VideoState *self)
|
||||||
throw std::runtime_error("No streams to decode");
|
throw std::runtime_error("No streams to decode");
|
||||||
|
|
||||||
// main decode loop
|
// main decode loop
|
||||||
while(!self->quit)
|
while(!self->mQuit)
|
||||||
{
|
{
|
||||||
|
if(self->mSeekRequested)
|
||||||
|
{
|
||||||
|
uint64_t seek_target = self->mSeekPos;
|
||||||
|
int streamIndex = -1;
|
||||||
|
|
||||||
|
int videoStreamIndex = -1;;
|
||||||
|
int audioStreamIndex = -1;
|
||||||
|
if (self->video_st)
|
||||||
|
videoStreamIndex = self->video_st - self->format_ctx->streams;
|
||||||
|
if (self->audio_st)
|
||||||
|
audioStreamIndex = self->audio_st - self->format_ctx->streams;
|
||||||
|
|
||||||
|
if(videoStreamIndex >= 0)
|
||||||
|
streamIndex = videoStreamIndex;
|
||||||
|
else if(audioStreamIndex >= 0)
|
||||||
|
streamIndex = audioStreamIndex;
|
||||||
|
|
||||||
|
uint64_t timestamp = seek_target;
|
||||||
|
|
||||||
|
// QtCreator's highlighter doesn't like AV_TIME_BASE_Q's {} initializer for some reason
|
||||||
|
AVRational avTimeBaseQ = AVRational(); // = AV_TIME_BASE_Q;
|
||||||
|
avTimeBaseQ.num = 1;
|
||||||
|
avTimeBaseQ.den = AV_TIME_BASE;
|
||||||
|
|
||||||
|
if(streamIndex >= 0)
|
||||||
|
timestamp = av_rescale_q(seek_target, avTimeBaseQ, self->format_ctx->streams[streamIndex]->time_base);
|
||||||
|
|
||||||
|
// AVSEEK_FLAG_BACKWARD appears to be needed, otherwise ffmpeg may seek to a keyframe *after* the given time
|
||||||
|
// we want to seek to any keyframe *before* the given time, so we can continue decoding as normal from there on
|
||||||
|
if(av_seek_frame(self->format_ctx, streamIndex, timestamp, AVSEEK_FLAG_BACKWARD) < 0)
|
||||||
|
std::cerr << "Error seeking " << self->format_ctx->filename << std::endl;
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Clear the packet queues and put a special packet with the new clock time
|
||||||
|
if(audioStreamIndex >= 0)
|
||||||
|
{
|
||||||
|
self->audioq.clear();
|
||||||
|
flush_pkt.pts = av_rescale_q(seek_target, avTimeBaseQ,
|
||||||
|
self->format_ctx->streams[audioStreamIndex]->time_base);
|
||||||
|
self->audioq.put(&flush_pkt);
|
||||||
|
}
|
||||||
|
if(videoStreamIndex >= 0)
|
||||||
|
{
|
||||||
|
self->videoq.clear();
|
||||||
|
flush_pkt.pts = av_rescale_q(seek_target, avTimeBaseQ,
|
||||||
|
self->format_ctx->streams[videoStreamIndex]->time_base);
|
||||||
|
self->videoq.put(&flush_pkt);
|
||||||
|
}
|
||||||
|
self->pictq_mutex.lock();
|
||||||
|
self->pictq_size = 0;
|
||||||
|
self->pictq_rindex = 0;
|
||||||
|
self->pictq_windex = 0;
|
||||||
|
self->pictq_mutex.unlock();
|
||||||
|
self->mExternalClock.set(seek_target);
|
||||||
|
}
|
||||||
|
self->mSeekRequested = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
if((self->audio_st && self->audioq.size > MAX_AUDIOQ_SIZE) ||
|
if((self->audio_st && self->audioq.size > MAX_AUDIOQ_SIZE) ||
|
||||||
(self->video_st && self->videoq.size > MAX_VIDEOQ_SIZE))
|
(self->video_st && self->videoq.size > MAX_VIDEOQ_SIZE))
|
||||||
{
|
{
|
||||||
|
@ -404,7 +500,13 @@ void VideoState::decode_thread_loop(VideoState *self)
|
||||||
}
|
}
|
||||||
|
|
||||||
if(av_read_frame(pFormatCtx, packet) < 0)
|
if(av_read_frame(pFormatCtx, packet) < 0)
|
||||||
break;
|
{
|
||||||
|
if (self->audioq.nb_packets == 0 && self->videoq.nb_packets == 0 && self->pictq_size == 0)
|
||||||
|
self->mVideoEnded = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
self->mVideoEnded = false;
|
||||||
|
|
||||||
// Is this a packet from the video stream?
|
// Is this a packet from the video stream?
|
||||||
if(self->video_st && packet->stream_index == self->video_st-pFormatCtx->streams)
|
if(self->video_st && packet->stream_index == self->video_st-pFormatCtx->streams)
|
||||||
|
@ -414,17 +516,6 @@ void VideoState::decode_thread_loop(VideoState *self)
|
||||||
else
|
else
|
||||||
av_free_packet(packet);
|
av_free_packet(packet);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* all done - wait for it */
|
|
||||||
self->videoq.flush();
|
|
||||||
self->audioq.flush();
|
|
||||||
while(!self->quit)
|
|
||||||
{
|
|
||||||
// EOF reached, all packets processed, we can exit now
|
|
||||||
if(self->audioq.nb_packets == 0 && self->videoq.nb_packets == 0 && self->pictq_size == 0)
|
|
||||||
break;
|
|
||||||
boost::this_thread::sleep(boost::posix_time::milliseconds(100));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch(std::runtime_error& e) {
|
catch(std::runtime_error& e) {
|
||||||
std::cerr << "An error occured playing the video: " << e.what () << std::endl;
|
std::cerr << "An error occured playing the video: " << e.what () << std::endl;
|
||||||
|
@ -433,17 +524,14 @@ void VideoState::decode_thread_loop(VideoState *self)
|
||||||
std::cerr << "An error occured playing the video: " << e.getFullDescription () << std::endl;
|
std::cerr << "An error occured playing the video: " << e.getFullDescription () << std::endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
self->quit = true;
|
self->mQuit = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool VideoState::update()
|
bool VideoState::update()
|
||||||
{
|
{
|
||||||
if(this->quit)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
this->video_refresh();
|
this->video_refresh();
|
||||||
return true;
|
return !this->mVideoEnded;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -510,7 +598,7 @@ void VideoState::init(const std::string& resourceName)
|
||||||
unsigned int i;
|
unsigned int i;
|
||||||
|
|
||||||
this->av_sync_type = AV_SYNC_DEFAULT;
|
this->av_sync_type = AV_SYNC_DEFAULT;
|
||||||
this->quit = false;
|
this->mQuit = false;
|
||||||
|
|
||||||
this->stream = Ogre::ResourceGroupManager::getSingleton().openResource(resourceName);
|
this->stream = Ogre::ResourceGroupManager::getSingleton().openResource(resourceName);
|
||||||
if(this->stream.isNull())
|
if(this->stream.isNull())
|
||||||
|
@ -564,7 +652,7 @@ void VideoState::init(const std::string& resourceName)
|
||||||
audio_index = i;
|
audio_index = i;
|
||||||
}
|
}
|
||||||
|
|
||||||
this->external_clock_base = av_gettime();
|
mExternalClock.set(0);
|
||||||
|
|
||||||
if(audio_index >= 0)
|
if(audio_index >= 0)
|
||||||
this->stream_open(audio_index, this->format_ctx);
|
this->stream_open(audio_index, this->format_ctx);
|
||||||
|
@ -572,24 +660,6 @@ void VideoState::init(const std::string& resourceName)
|
||||||
if(video_index >= 0)
|
if(video_index >= 0)
|
||||||
{
|
{
|
||||||
this->stream_open(video_index, this->format_ctx);
|
this->stream_open(video_index, this->format_ctx);
|
||||||
|
|
||||||
int width = (*this->video_st)->codec->width;
|
|
||||||
int height = (*this->video_st)->codec->height;
|
|
||||||
static int i = 0;
|
|
||||||
this->mTexture = Ogre::TextureManager::getSingleton().createManual(
|
|
||||||
"ffmpeg/VideoTexture" + Ogre::StringConverter::toString(++i),
|
|
||||||
Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
|
|
||||||
Ogre::TEX_TYPE_2D,
|
|
||||||
width, height,
|
|
||||||
0,
|
|
||||||
Ogre::PF_BYTE_RGBA,
|
|
||||||
Ogre::TU_DYNAMIC_WRITE_ONLY_DISCARDABLE);
|
|
||||||
|
|
||||||
// initialize to (0,0,0,0)
|
|
||||||
std::vector<Ogre::uint32> buffer;
|
|
||||||
buffer.resize(width * height, 0);
|
|
||||||
Ogre::PixelBox pb(width, height, 1, Ogre::PF_BYTE_RGBA, &buffer[0]);
|
|
||||||
this->mTexture->getBuffer()->blitFromMemory(pb);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -598,13 +668,13 @@ void VideoState::init(const std::string& resourceName)
|
||||||
|
|
||||||
void VideoState::deinit()
|
void VideoState::deinit()
|
||||||
{
|
{
|
||||||
this->quit = true;
|
this->mQuit = true;
|
||||||
|
|
||||||
|
this->audioq.flush();
|
||||||
|
this->videoq.flush();
|
||||||
|
|
||||||
mAudioDecoder.reset();
|
mAudioDecoder.reset();
|
||||||
|
|
||||||
this->audioq.cond.notify_one();
|
|
||||||
this->videoq.cond.notify_one();
|
|
||||||
|
|
||||||
if (this->parse_thread.joinable())
|
if (this->parse_thread.joinable())
|
||||||
this->parse_thread.join();
|
this->parse_thread.join();
|
||||||
if (this->video_thread.joinable())
|
if (this->video_thread.joinable())
|
||||||
|
@ -639,11 +709,17 @@ void VideoState::deinit()
|
||||||
}
|
}
|
||||||
avformat_close_input(&this->format_ctx);
|
avformat_close_input(&this->format_ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!mTexture.isNull())
|
||||||
|
{
|
||||||
|
Ogre::TextureManager::getSingleton().remove(mTexture->getName());
|
||||||
|
mTexture.setNull();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
double VideoState::get_external_clock()
|
double VideoState::get_external_clock()
|
||||||
{
|
{
|
||||||
return ((uint64_t)av_gettime()-this->external_clock_base) / 1000000.0;
|
return mExternalClock.get() / 1000000.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
double VideoState::get_master_clock()
|
double VideoState::get_master_clock()
|
||||||
|
@ -667,5 +743,62 @@ double VideoState::get_audio_clock()
|
||||||
return mAudioDecoder->getAudioClock();
|
return mAudioDecoder->getAudioClock();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void VideoState::setPaused(bool isPaused)
|
||||||
|
{
|
||||||
|
this->mPaused = isPaused;
|
||||||
|
mExternalClock.setPaused(isPaused);
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoState::seekTo(double time)
|
||||||
|
{
|
||||||
|
time = std::max(0.0, time);
|
||||||
|
time = std::min(getDuration(), time);
|
||||||
|
mSeekPos = (uint64_t) (time * AV_TIME_BASE);
|
||||||
|
mSeekRequested = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
double VideoState::getDuration()
|
||||||
|
{
|
||||||
|
return this->format_ctx->duration / 1000000.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ExternalClock::ExternalClock()
|
||||||
|
: mTimeBase(av_gettime())
|
||||||
|
, mPausedAt(0)
|
||||||
|
, mPaused(false)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
void ExternalClock::setPaused(bool paused)
|
||||||
|
{
|
||||||
|
boost::mutex::scoped_lock lock(mMutex);
|
||||||
|
if (mPaused == paused)
|
||||||
|
return;
|
||||||
|
if (paused)
|
||||||
|
{
|
||||||
|
mPausedAt = av_gettime() - mTimeBase;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
mTimeBase = av_gettime() - mPausedAt;
|
||||||
|
mPaused = paused;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t ExternalClock::get()
|
||||||
|
{
|
||||||
|
boost::mutex::scoped_lock lock(mMutex);
|
||||||
|
if (mPaused)
|
||||||
|
return mPausedAt;
|
||||||
|
else
|
||||||
|
return av_gettime() - mTimeBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ExternalClock::set(uint64_t time)
|
||||||
|
{
|
||||||
|
boost::mutex::scoped_lock lock(mMutex);
|
||||||
|
mTimeBase = av_gettime() - time;
|
||||||
|
mPausedAt = time;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
33
extern/ogre-ffmpeg-videoplayer/videostate.hpp
vendored
33
extern/ogre-ffmpeg-videoplayer/videostate.hpp
vendored
|
@ -27,6 +27,21 @@ struct VideoState;
|
||||||
class MovieAudioFactory;
|
class MovieAudioFactory;
|
||||||
class MovieAudioDecoder;
|
class MovieAudioDecoder;
|
||||||
|
|
||||||
|
struct ExternalClock
|
||||||
|
{
|
||||||
|
ExternalClock();
|
||||||
|
|
||||||
|
uint64_t mTimeBase;
|
||||||
|
uint64_t mPausedAt;
|
||||||
|
bool mPaused;
|
||||||
|
|
||||||
|
boost::mutex mMutex;
|
||||||
|
|
||||||
|
void setPaused(bool paused);
|
||||||
|
uint64_t get();
|
||||||
|
void set(uint64_t time);
|
||||||
|
};
|
||||||
|
|
||||||
struct PacketQueue {
|
struct PacketQueue {
|
||||||
PacketQueue()
|
PacketQueue()
|
||||||
: first_pkt(NULL), last_pkt(NULL), flushing(false), nb_packets(0), size(0)
|
: first_pkt(NULL), last_pkt(NULL), flushing(false), nb_packets(0), size(0)
|
||||||
|
@ -66,6 +81,11 @@ struct VideoState {
|
||||||
void init(const std::string& resourceName);
|
void init(const std::string& resourceName);
|
||||||
void deinit();
|
void deinit();
|
||||||
|
|
||||||
|
void setPaused(bool isPaused);
|
||||||
|
void seekTo(double time);
|
||||||
|
|
||||||
|
double getDuration();
|
||||||
|
|
||||||
int stream_open(int stream_index, AVFormatContext *pFormatCtx);
|
int stream_open(int stream_index, AVFormatContext *pFormatCtx);
|
||||||
|
|
||||||
bool update();
|
bool update();
|
||||||
|
@ -93,15 +113,18 @@ struct VideoState {
|
||||||
MovieAudioFactory* mAudioFactory;
|
MovieAudioFactory* mAudioFactory;
|
||||||
boost::shared_ptr<MovieAudioDecoder> mAudioDecoder;
|
boost::shared_ptr<MovieAudioDecoder> mAudioDecoder;
|
||||||
|
|
||||||
|
ExternalClock mExternalClock;
|
||||||
|
|
||||||
Ogre::DataStreamPtr stream;
|
Ogre::DataStreamPtr stream;
|
||||||
AVFormatContext* format_ctx;
|
AVFormatContext* format_ctx;
|
||||||
|
|
||||||
int av_sync_type;
|
int av_sync_type;
|
||||||
uint64_t external_clock_base;
|
|
||||||
|
|
||||||
AVStream** audio_st;
|
AVStream** audio_st;
|
||||||
PacketQueue audioq;
|
PacketQueue audioq;
|
||||||
|
|
||||||
|
uint8_t* mFlushPktData;
|
||||||
|
|
||||||
AVStream** video_st;
|
AVStream** video_st;
|
||||||
double frame_last_pts;
|
double frame_last_pts;
|
||||||
double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
|
double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
|
||||||
|
@ -113,11 +136,15 @@ struct VideoState {
|
||||||
boost::mutex pictq_mutex;
|
boost::mutex pictq_mutex;
|
||||||
boost::condition_variable pictq_cond;
|
boost::condition_variable pictq_cond;
|
||||||
|
|
||||||
|
|
||||||
boost::thread parse_thread;
|
boost::thread parse_thread;
|
||||||
boost::thread video_thread;
|
boost::thread video_thread;
|
||||||
|
|
||||||
volatile bool quit;
|
volatile bool mSeekRequested;
|
||||||
|
uint64_t mSeekPos;
|
||||||
|
|
||||||
|
volatile bool mVideoEnded;
|
||||||
|
volatile bool mPaused;
|
||||||
|
volatile bool mQuit;
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,7 +68,7 @@
|
||||||
@version 120
|
@version 120
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if SH_GLSLES == 1 && SH_FRAGMENT_SHADER
|
#if SH_GLSLES == 1
|
||||||
precision mediump int;
|
precision mediump int;
|
||||||
precision mediump float;
|
precision mediump float;
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -499,7 +499,7 @@
|
||||||
|
|
||||||
#if SHADOWS || SHADOWS_PSSM
|
#if SHADOWS || SHADOWS_PSSM
|
||||||
float fadeRange = shadowFar_fadeStart.x - shadowFar_fadeStart.y;
|
float fadeRange = shadowFar_fadeStart.x - shadowFar_fadeStart.y;
|
||||||
float fade = 1-((depthPassthrough - shadowFar_fadeStart.y) / fadeRange);
|
float fade = 1.0-((depthPassthrough - shadowFar_fadeStart.y) / fadeRange);
|
||||||
shadow = (depthPassthrough > shadowFar_fadeStart.x) ? 1.0 : ((depthPassthrough > shadowFar_fadeStart.y) ? 1.0-((1.0-shadow)*fade) : shadow);
|
shadow = (depthPassthrough > shadowFar_fadeStart.x) ? 1.0 : ((depthPassthrough > shadowFar_fadeStart.y) ? 1.0-((1.0-shadow)*fade) : shadow);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -514,11 +514,11 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if UNDERWATER
|
#if UNDERWATER
|
||||||
float3 waterEyePos = intercept(worldPos, cameraPos.xyz - worldPos, float3(0,0,1), waterLevel);
|
float3 waterEyePos = intercept(worldPos, cameraPos.xyz - worldPos, float3(0.0,0.0,1.0), waterLevel);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if SHADOWS || SHADOWS_PSSM
|
#if SHADOWS || SHADOWS_PSSM
|
||||||
shOutputColour(0) *= (lightResult - float4(directionalResult * (1.0-shadow),0));
|
shOutputColour(0) *= (lightResult - float4(directionalResult * (1.0-shadow),0.0));
|
||||||
#else
|
#else
|
||||||
shOutputColour(0) *= lightResult;
|
shOutputColour(0) *= lightResult;
|
||||||
#endif
|
#endif
|
||||||
|
@ -574,7 +574,7 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// prevent negative colour output (for example with negative lights)
|
// prevent negative colour output (for example with negative lights)
|
||||||
shOutputColour(0).xyz = max(shOutputColour(0).xyz, float3(0,0,0));
|
shOutputColour(0).xyz = max(shOutputColour(0).xyz, float3(0.0,0.0,0.0));
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -6,11 +6,11 @@ float depthShadowPCF (shTexture2D shadowMap, float4 shadowMapPos, float2 offset)
|
||||||
shadowMapPos /= shadowMapPos.w;
|
shadowMapPos /= shadowMapPos.w;
|
||||||
float3 o = float3(offset.xy, -offset.x) * 0.3;
|
float3 o = float3(offset.xy, -offset.x) * 0.3;
|
||||||
//float3 o = float3(0,0,0);
|
//float3 o = float3(0,0,0);
|
||||||
float c = (shadowMapPos.z <= FIXED_BIAS + shSample(shadowMap, shadowMapPos.xy - o.xy).r) ? 1 : 0; // top left
|
float c = (shadowMapPos.z <= FIXED_BIAS + shSample(shadowMap, shadowMapPos.xy - o.xy).r) ? 1.0 : 0.0; // top left
|
||||||
c += (shadowMapPos.z <= FIXED_BIAS + shSample(shadowMap, shadowMapPos.xy + o.xy).r) ? 1 : 0; // bottom right
|
c += (shadowMapPos.z <= FIXED_BIAS + shSample(shadowMap, shadowMapPos.xy + o.xy).r) ? 1.0 : 0.0; // bottom right
|
||||||
c += (shadowMapPos.z <= FIXED_BIAS + shSample(shadowMap, shadowMapPos.xy + o.zy).r) ? 1 : 0; // bottom left
|
c += (shadowMapPos.z <= FIXED_BIAS + shSample(shadowMap, shadowMapPos.xy + o.zy).r) ? 1.0 : 0.0; // bottom left
|
||||||
c += (shadowMapPos.z <= FIXED_BIAS + shSample(shadowMap, shadowMapPos.xy - o.zy).r) ? 1 : 0; // top right
|
c += (shadowMapPos.z <= FIXED_BIAS + shSample(shadowMap, shadowMapPos.xy - o.zy).r) ? 1.0 : 0.0; // top right
|
||||||
return c / 4;
|
return c / 4.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,47 +2,51 @@
|
||||||
|
|
||||||
<MyGUI type="Layout">
|
<MyGUI type="Layout">
|
||||||
|
|
||||||
<Widget type="Window" skin="" layer="Windows" align="Left|Top" position="0 0 565 390" name="_Main">
|
<Widget type="Window" skin="" layer="Windows" align="Left Top" position="0 0 584 398" name="_Main">
|
||||||
|
|
||||||
<Widget type="ImageBox" skin="ImageBox" position="-70 0 705 390" align="Top|Right" name="JImage">
|
<Widget type="ImageBox" skin="ImageBox" position="-71 0 728 398" align="Left Top" name="JImage">
|
||||||
<Property key="ImageTexture" value="textures\tx_menubook.dds"/>
|
<Property key="ImageTexture" value="textures\tx_menubook.dds"/>
|
||||||
<Widget type="Widget" position="70 0 565 390" align="Top|Right">
|
<Widget type="Widget" position="71 0 584 398" align="Left Top">
|
||||||
|
|
||||||
<Widget type="Widget" position="0 0 282 390">
|
<Widget type="Widget" position="0 0 292 398">
|
||||||
<Widget type="ImageButton" skin="ImageBox" position="205 350 48 32" name="PrevPageBTN">
|
<Widget type="ImageButton" skin="ImageBox" position="205 358 48 32" name="PrevPageBTN">
|
||||||
<Property key="ImageHighlighted" value="textures\tx_menubook_prev_over.dds"/>
|
<Property key="ImageHighlighted" value="textures\tx_menubook_prev_over.dds"/>
|
||||||
<Property key="ImageNormal" value="textures\tx_menubook_prev_idle.dds"/>
|
<Property key="ImageNormal" value="textures\tx_menubook_prev_idle.dds"/>
|
||||||
<Property key="ImagePushed" value="textures\tx_menubook_prev_pressed.dds"/>
|
<Property key="ImagePushed" value="textures\tx_menubook_prev_pressed.dds"/>
|
||||||
</Widget>
|
</Widget>
|
||||||
</Widget>
|
</Widget>
|
||||||
<Widget type="Widget" position="282 0 282 390">
|
<Widget type="Widget" position="292 0 292 398">
|
||||||
<Widget type="ImageButton" skin="ImageBox" position="18 350 48 32" name="NextPageBTN">
|
<Widget type="ImageButton" skin="ImageBox" position="38 358 48 32" name="NextPageBTN">
|
||||||
<Property key="ImageHighlighted" value="textures\tx_menubook_next_over.dds"/>
|
<Property key="ImageHighlighted" value="textures\tx_menubook_next_over.dds"/>
|
||||||
<Property key="ImageNormal" value="textures\tx_menubook_next_idle.dds"/>
|
<Property key="ImageNormal" value="textures\tx_menubook_next_idle.dds"/>
|
||||||
<Property key="ImagePushed" value="textures\tx_menubook_next_pressed.dds"/>
|
<Property key="ImagePushed" value="textures\tx_menubook_next_pressed.dds"/>
|
||||||
</Widget>
|
</Widget>
|
||||||
</Widget>
|
</Widget>
|
||||||
|
|
||||||
<Widget type="ImageButton" skin="ImageBox" position="40 350 64 32" name="TakeButton">
|
<Widget type="ImageButton" skin="ImageBox" position="40 358 64 32" name="TakeButton">
|
||||||
<Property key="ImageHighlighted" value="textures\tx_menubook_take_over.dds"/>
|
<Property key="ImageHighlighted" value="textures\tx_menubook_take_over.dds"/>
|
||||||
<Property key="ImageNormal" value="textures\tx_menubook_take_idle.dds"/>
|
<Property key="ImageNormal" value="textures\tx_menubook_take_idle.dds"/>
|
||||||
<Property key="ImagePushed" value="textures\tx_menubook_take_pressed.dds"/>
|
<Property key="ImagePushed" value="textures\tx_menubook_take_pressed.dds"/>
|
||||||
</Widget>
|
</Widget>
|
||||||
<Widget type="ImageButton" skin="ImageBox" position="460 350 48 32" name="CloseButton">
|
<Widget type="ImageButton" skin="ImageBox" position="488 358 48 32" name="CloseButton">
|
||||||
<Property key="ImageHighlighted" value="textures\tx_menubook_close_over.dds"/>
|
<Property key="ImageHighlighted" value="textures\tx_menubook_close_over.dds"/>
|
||||||
<Property key="ImageNormal" value="textures\tx_menubook_close_idle.dds"/>
|
<Property key="ImageNormal" value="textures\tx_menubook_close_idle.dds"/>
|
||||||
<Property key="ImagePushed" value="textures\tx_menubook_close_pressed.dds"/>
|
<Property key="ImagePushed" value="textures\tx_menubook_close_pressed.dds"/>
|
||||||
</Widget>
|
</Widget>
|
||||||
|
|
||||||
<Widget type="TextBox" skin="NormalText" position="150 350 32 16" name="LeftPageNumber">
|
<Widget type="TextBox" skin="NormalText" position="30 358 250 16" name="LeftPageNumber">
|
||||||
<Property key="TextColour" value="0 0 0"/>
|
<Property key="TextColour" value="0 0 0"/>
|
||||||
|
<Property key="TextAlign" value="Center"/>
|
||||||
|
<Property key="NeedMouse" value="false"/>
|
||||||
</Widget>
|
</Widget>
|
||||||
<Widget type="TextBox" skin="NormalText" position="410 350 32 16" name="RightPageNumber">
|
<Widget type="TextBox" skin="NormalText" position="310 358 250 16" name="RightPageNumber">
|
||||||
<Property key="TextColour" value="0 0 0"/>
|
<Property key="TextColour" value="0 0 0"/>
|
||||||
|
<Property key="TextAlign" value="Center"/>
|
||||||
|
<Property key="NeedMouse" value="false"/>
|
||||||
</Widget>
|
</Widget>
|
||||||
|
|
||||||
<Widget type="Widget" skin="" position="30 15 240 328" name="LeftPage"/>
|
<Widget type="Widget" skin="" position="30 15 250 328" name="LeftPage"/>
|
||||||
<Widget type="Widget" skin="" position="300 15 240 328" name="RightPage"/>
|
<Widget type="Widget" skin="" position="310 15 250 328" name="RightPage"/>
|
||||||
</Widget>
|
</Widget>
|
||||||
</Widget>
|
</Widget>
|
||||||
</Widget>
|
</Widget>
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<Widget type="Window" skin="MW_Dialog" layer="Windows" position="0 0 498 198" name="_Main">
|
<Widget type="Window" skin="MW_Dialog" layer="Windows" position="0 0 498 198" name="_Main">
|
||||||
|
|
||||||
<!-- Class name -->
|
<!-- Class name -->
|
||||||
<Widget type="TextBox" skin="NormalText" position="8 8 48 23" name="LabelT" align="Left Top">
|
<Widget type="TextBox" skin="NormalText" position="8 8 52 23" name="LabelT" align="Left Top">
|
||||||
<Property key="Caption" value="#{sName}:"/>
|
<Property key="Caption" value="#{sName}:"/>
|
||||||
<Property key="TextAlign" value="Left VCenter"/>
|
<Property key="TextAlign" value="Left VCenter"/>
|
||||||
</Widget>
|
</Widget>
|
||||||
|
|
|
@ -69,33 +69,46 @@
|
||||||
</Widget>
|
</Widget>
|
||||||
|
|
||||||
<!-- Player level, race and class -->
|
<!-- Player level, race and class -->
|
||||||
<Widget type="Widget" skin="MW_Box" position="8 78 212 62" align="Left Top HStretch">
|
<Widget type="Widget" skin="MW_Box" position="8 78 212 62" align="Top HStretch">
|
||||||
<Widget type="TextBox" skin="NormalText" position="4 4 100 18" name="Level_str" align="Left Top HStretch">
|
<Widget type="HBox" position="4 4 204 18" align="Top HStretch">
|
||||||
<Property key="Caption" value="#{sLevel}"/>
|
<Widget type="AutoSizedTextBox" skin="NormalText" position="0 0 200 18" name="Level_str" align="Left Top">
|
||||||
<UserString key="ToolTipType" value="Layout"/>
|
<Property key="Caption" value="#{sLevel}"/>
|
||||||
<UserString key="ToolTipLayout" value="LevelToolTip"/>
|
<UserString key="ToolTipType" value="Layout"/>
|
||||||
|
<UserString key="ToolTipLayout" value="LevelToolTip"/>
|
||||||
|
</Widget>
|
||||||
|
<Widget type="TextBox" skin="SandTextRight" position="200 0 40 18" name="LevelText" align="Right Top">
|
||||||
|
<Property key="TextAlign" value="Right Top"/>
|
||||||
|
<UserString key="ToolTipType" value="Layout"/>
|
||||||
|
<UserString key="ToolTipLayout" value="LevelToolTip"/>
|
||||||
|
<UserString key="HStretch" value="true"/>
|
||||||
|
</Widget>
|
||||||
|
|
||||||
</Widget>
|
</Widget>
|
||||||
<Widget type="TextBox" skin="NormalText" position="4 22 100 18" name="Race_str" align="Left Top HStretch">
|
<Widget type="HBox" position="4 24 204 18" align="Top HStretch">
|
||||||
<Property key="Caption" value="#{sRace}"/>
|
<Widget type="AutoSizedTextBox" skin="NormalText" position="0 0 95 18" name="Race_str" align="Left Top">
|
||||||
<UserString key="ToolTipType" value="Layout"/>
|
<Property key="Caption" value="#{sRace}"/>
|
||||||
<UserString key="ToolTipLayout" value="TextWithCenteredCaptionToolTip"/>
|
<UserString key="ToolTipType" value="Layout"/>
|
||||||
|
<UserString key="ToolTipLayout" value="TextWithCenteredCaptionToolTip"/>
|
||||||
|
</Widget>
|
||||||
|
<Widget type="TextBox" skin="SandTextRight" position="104 0 200 18" name="RaceText" align="Left Top">
|
||||||
|
<Property key="TextAlign" value="Right Top"/>
|
||||||
|
<UserString key="ToolTipType" value="Layout"/>
|
||||||
|
<UserString key="ToolTipLayout" value="TextWithCenteredCaptionToolTip"/>
|
||||||
|
<UserString key="HStretch" value="true"/>
|
||||||
|
</Widget>
|
||||||
</Widget>
|
</Widget>
|
||||||
<Widget type="TextBox" skin="NormalText" position="4 40 100 18" name="Class_str" align="Left Top HStretch">
|
<Widget type="HBox" position="4 42 204 18" align="Top HStretch">
|
||||||
<Property key="Caption" value="#{sClass}"/>
|
<Widget type="AutoSizedTextBox" skin="NormalText" position="0 0 95 18" name="Class_str" align="Left Top">
|
||||||
<UserString key="ToolTipType" value="Layout"/>
|
<Property key="Caption" value="#{sClass}"/>
|
||||||
<UserString key="ToolTipLayout" value="ClassToolTip"/>
|
<UserString key="ToolTipType" value="Layout"/>
|
||||||
</Widget>
|
<UserString key="ToolTipLayout" value="ClassToolTip"/>
|
||||||
<Widget type="TextBox" skin="SandTextRight" position="104 4 104 18" name="LevelText" align="Right Top">
|
</Widget>
|
||||||
<UserString key="ToolTipType" value="Layout"/>
|
<Widget type="TextBox" skin="SandTextRight" position="104 0 200 18" name="ClassText" align="Left Top">
|
||||||
<UserString key="ToolTipLayout" value="LevelToolTip"/>
|
<Property key="TextAlign" value="Right Top"/>
|
||||||
</Widget>
|
<UserString key="ToolTipType" value="Layout"/>
|
||||||
<Widget type="TextBox" skin="SandTextRight" position="104 22 104 18" name="RaceText" align="Right Top">
|
<UserString key="ToolTipLayout" value="ClassToolTip"/>
|
||||||
<UserString key="ToolTipType" value="Layout"/>
|
<UserString key="HStretch" value="true"/>
|
||||||
<UserString key="ToolTipLayout" value="TextWithCenteredCaptionToolTip"/>
|
</Widget>
|
||||||
</Widget>
|
|
||||||
<Widget type="TextBox" skin="SandTextRight" position="104 40 104 18" name="ClassText" align="Right Top">
|
|
||||||
<UserString key="ToolTipType" value="Layout"/>
|
|
||||||
<UserString key="ToolTipLayout" value="ClassToolTip"/>
|
|
||||||
</Widget>
|
</Widget>
|
||||||
</Widget>
|
</Widget>
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue