Rewrite of tracking logic.

pull/615/head
madsbuvi 4 years ago
parent 6e5d9d39dd
commit d1aaa74193

@ -260,9 +260,9 @@ if(BUILD_OPENMW_VR)
vrengine.cpp
)
add_openmw_dir (mwvr
openxraction openxractionset openxrdebug openxrinput openxrmanager openxrmanagerimpl openxrplatform openxrswapchain openxrswapchainimage openxrswapchainimpl
openxraction openxractionset openxrdebug openxrinput openxrmanager openxrmanagerimpl openxrplatform openxrswapchain openxrswapchainimage openxrswapchainimpl openxrtracker openxrtypeconversions
realisticcombat
vranimation vrcamera vrenvironment vrframebuffer vrgui vrinputmanager vrinput vrlistbox vrmetamenu vrsession vrshadow vrtypes vrview vrviewer vrvirtualkeyboard
vranimation vrcamera vrenvironment vrframebuffer vrgui vrinputmanager vrinput vrlistbox vrmetamenu vrsession vrtracking vrtypes vrviewer vrvirtualkeyboard
)
openmw_add_executable(openmw_vr

@ -630,6 +630,7 @@ namespace MWGui
if (MWBase::Environment::get().getVrMode())
{
MWVR::Environment::get().getSession()->processChangedSettings(changed);
MWVR::Environment::get().getTrackingManager()->processChangedSettings(changed);
MWVR::Environment::get().getViewer()->processChangedSettings(changed);
MWVR::Environment::get().getGUIManager()->processChangedSettings(changed);
}

@ -128,6 +128,7 @@
#include "../mwvr/vrvirtualkeyboard.hpp"
#include "../mwvr/vrviewer.hpp"
#include "../mwvr/vrsession.hpp"
#include "../mwvr/vrtracking.hpp"
#endif
namespace MWGui
@ -2251,21 +2252,11 @@ namespace MWGui
void WindowManager::viewerTraversals(bool updateWindowManager)
{
#ifdef USE_OPENXR
if (MWBase::Environment::get().getVrMode())
MWVR::Environment::get().getSession()->beginFrame();
#endif
mViewer->eventTraversal();
mViewer->updateTraversal();
if (updateWindowManager)
MWBase::Environment::get().getWorld()->updateWindowManager();
mViewer->renderingTraversals();
#ifdef USE_OPENXR
if (MWBase::Environment::get().getVrMode())
MWVR::Environment::get().getSession()->endFrame();
#endif
}
void WindowManager::GuiModeState::update(bool visible)

@ -157,19 +157,15 @@ namespace MWPhysics
// I assume (1.) is easily solved, i just haven't taken the effort to study openmw's code enough.
// But 2. is not so obvious. I guess it's doable if i compute the direction between current position and the player's
// position in the VR stage, and just let it catch up at the character's own move speed, but it still needs to reach the position as exactly as possible.
auto* session = MWVR::Environment::get().getSession();
if (session)
{
if (isPlayer)
{
auto tm = MWVR::Environment::get().getTrackingManager();
float pitch = 0.f;
float yaw = 0.f;
session->movementAngles(yaw, pitch);
tm->movementAngles(yaw, pitch);
refpos.rot[0] += pitch;
refpos.rot[2] += yaw;
}
}
#endif
// Reset per-frame data
@ -236,94 +232,94 @@ namespace MWPhysics
osg::Vec3f origVelocity = velocity;
osg::Vec3f newPosition = actor.mPosition;
#ifdef USE_OPENXR
// Catch the player character up to the real world position of the player.
// But only if play is not seated.
// TODO: Hack.
if (isPlayer)
{
bool shouldMove = true;
if (session && session->seatedPlay())
shouldMove = false;
if (world->getPlayer().isDisabled())
shouldMove = false;
if (shouldMove)
{
auto* inputManager = reinterpret_cast<MWVR::VRCamera*>(MWBase::Environment::get().getWorld()->getRenderingManager().getCamera());
osg::Vec3 headOffset = inputManager->headOffset();
osg::Vec3 trackingOffset = headOffset;
// Player's tracking height should not affect character position
trackingOffset.z() = 0;
float remainingTime = time;
bool seenGround = physicActor->getOnGround() && !physicActor->getOnSlope() && !actor.mFlying;
float remainder = 1.f;
for (int iterations = 0; iterations < sMaxIterations && remainingTime > 0.01f && remainder > 0.01; ++iterations)
{
osg::Vec3 toMove = trackingOffset * remainder;
osg::Vec3 nextpos = newPosition + toMove;
if ((newPosition - nextpos).length2() > 0.0001)
{
// trace to where character would go if there were no obstructions
tracer.doTrace(colobj, newPosition, nextpos, collisionWorld);
// check for obstructions
if (tracer.mFraction >= 1.0f)
{
newPosition = tracer.mEndPos; // ok to move, so set newPosition
remainder = 0.f;
break;
}
}
else
{
// The current position and next position are nearly the same, so just exit.
// Note: Bullet can trigger an assert in debug modes if the positions
// are the same, since that causes it to attempt to normalize a zero
// length vector (which can also happen with nearly identical vectors, since
// precision can be lost due to any math Bullet does internally). Since we
// aren't performing any collision detection, we want to reject the next
// position, so that we don't slowly move inside another object.
remainder = 0.f;
break;
}
if (isWalkableSlope(tracer.mPlaneNormal) && !actor.mFlying && newPosition.z() >= swimlevel)
seenGround = true;
// We are touching something.
if (tracer.mFraction < 1E-9f)
{
// Try to separate by backing off slighly to unstuck the solver
osg::Vec3f backOff = (newPosition - tracer.mHitPoint) * 1E-2f;
newPosition += backOff;
}
// We hit something. Check if we can step up.
float hitHeight = tracer.mHitPoint.z() - tracer.mEndPos.z() + halfExtents.z();
osg::Vec3f oldPosition = newPosition;
bool result = false;
if (hitHeight < sStepSizeUp && !isActor(tracer.mHitObject))
{
// Try to step up onto it.
// NOTE: stepMove does not allow stepping over, modifies newPosition if successful
result = stepper.step(newPosition, toMove, remainingTime, seenGround, iterations == 0);
remainder = remainingTime / time;
}
}
// Try not to lose any tracking
osg::Vec3 moved = newPosition - actor.mPosition;
headOffset.x() -= moved.x();
headOffset.y() -= moved.y();
inputManager->setHeadOffset(headOffset);
}
}
#endif
//#ifdef USE_OPENXR
// // Catch the player character up to the real world position of the player.
// // But only if play is not seated.
// // TODO: This solution is a hack.
// if (isPlayer)
// {
// bool shouldMove = true;
// if (session && session->seatedPlay())
// shouldMove = false;
// if (world->getPlayer().isDisabled())
// shouldMove = false;
//
// if (shouldMove)
// {
// auto* inputManager = reinterpret_cast<MWVR::VRCamera*>(MWBase::Environment::get().getWorld()->getRenderingManager().getCamera());
//
// osg::Vec3 headOffset = inputManager->headOffset();
// osg::Vec3 trackingOffset = headOffset;
// // Player's tracking height should not affect character position
// trackingOffset.z() = 0;
//
// float remainingTime = time;
// bool seenGround = physicActor->getOnGround() && !physicActor->getOnSlope() && !actor.mFlying;
// float remainder = 1.f;
//
// for (int iterations = 0; iterations < sMaxIterations && remainingTime > 0.01f && remainder > 0.01; ++iterations)
// {
// osg::Vec3 toMove = trackingOffset * remainder;
// osg::Vec3 nextpos = newPosition + toMove;
//
// if ((newPosition - nextpos).length2() > 0.0001)
// {
// // trace to where character would go if there were no obstructions
// tracer.doTrace(colobj, newPosition, nextpos, collisionWorld);
//
// // check for obstructions
// if (tracer.mFraction >= 1.0f)
// {
// newPosition = tracer.mEndPos; // ok to move, so set newPosition
// remainder = 0.f;
// break;
// }
// }
// else
// {
// // The current position and next position are nearly the same, so just exit.
// // Note: Bullet can trigger an assert in debug modes if the positions
// // are the same, since that causes it to attempt to normalize a zero
// // length vector (which can also happen with nearly identical vectors, since
// // precision can be lost due to any math Bullet does internally). Since we
// // aren't performing any collision detection, we want to reject the next
// // position, so that we don't slowly move inside another object.
// remainder = 0.f;
// break;
// }
//
// if (isWalkableSlope(tracer.mPlaneNormal) && !actor.mFlying && newPosition.z() >= swimlevel)
// seenGround = true;
//
// // We are touching something.
// if (tracer.mFraction < 1E-9f)
// {
// // Try to separate by backing off slighly to unstuck the solver
// osg::Vec3f backOff = (newPosition - tracer.mHitPoint) * 1E-2f;
// newPosition += backOff;
// }
//
// // We hit something. Check if we can step up.
// float hitHeight = tracer.mHitPoint.z() - tracer.mEndPos.z() + halfExtents.z();
// osg::Vec3f oldPosition = newPosition;
// bool result = false;
// if (hitHeight < sStepSizeUp && !isActor(tracer.mHitObject))
// {
// // Try to step up onto it.
// // NOTE: stepMove does not allow stepping over, modifies newPosition if successful
// result = stepper.step(newPosition, toMove, remainingTime, seenGround, iterations == 0);
// remainder = remainingTime / time;
// }
// }
//
// // Try not to lose any tracking
// osg::Vec3 moved = newPosition - actor.mPosition;
// headOffset.x() -= moved.x();
// headOffset.y() -= moved.y();
// inputManager->setHeadOffset(headOffset);
// }
// }
//#endif
/*
* A loop to find newPosition using tracer, if successful different from the starting position.

@ -101,7 +101,7 @@ namespace MWRender
void setFocalPointTransitionSpeed(float v) { mFocalPointTransitionSpeedCoef = v; }
void setFocalPointTargetOffset(osg::Vec2d v);
void instantTransition();
virtual void instantTransition();
void enableDynamicCameraDistance(bool v) { mDynamicCameraDistanceEnabled = v; }
void enableCrosshairInThirdPersonMode(bool v) { mShowCrosshairInThirdPersonMode = v; }

@ -24,75 +24,10 @@ namespace MWVR
: mActionSet(nullptr)
, mLocalizedName(actionSetName)
, mInternalName(Misc::StringUtils::lowerCase(actionSetName))
, mDeadzone(deadzone)
{
mActionSet = createActionSet(actionSetName);
// When starting to account for more devices than oculus touch, this section may need some expansion/redesign.
// Currently the set of action paths was determined using the oculus touch (i know nothing about the vive and the index).
// The set of action paths may therefore need expansion. E.g. /click vs /value may vary with controllers.
/*
// Applicable actions not (yet) included
A_QuickKey1,
A_QuickKey2,
A_QuickKey3,
A_QuickKey4,
A_QuickKey5,
A_QuickKey6,
A_QuickKey7,
A_QuickKey8,
A_QuickKey9,
A_QuickKey10,
A_QuickKeysMenu,
A_QuickLoad,
A_CycleSpellLeft,
A_CycleSpellRight,
A_CycleWeaponLeft,
A_CycleWeaponRight,
A_Screenshot, // Generate a VR screenshot?
A_Console, // Currently awkward due to a lack of virtual keyboard, but should be included when that's in place
*/
// To fit more actions onto controllers i created a system of short and long press actions. Allowing one action to activate
// on a short press, and another on long. Here, what actions are short press and what actions are long press is simply
// hardcoded at init, rather than interpreted from bindings. That's bad, and should be fixed, but that's hard to do
// while staying true to openxr's binding system, so if the system i wrote for the oculus touch isn't a good fit for
// the vive/index, we might want to rewrite this to handle bindings ourselves.
createMWAction<ButtonPressAction>(MWInput::A_GameMenu, "game_menu", "Game Menu");
createMWAction<ButtonPressAction>(A_VrMetaMenu, "meta_menu", "Meta Menu");
createMWAction<ButtonLongPressAction>(A_Recenter, "reposition_menu", "Reposition Menu");
createMWAction<ButtonPressAction>(MWInput::A_Inventory, "inventory", "Inventory");
createMWAction<ButtonPressAction>(MWInput::A_Activate, "activate", "Activate");
createMWAction<ButtonHoldAction>(MWInput::A_Use, "use", "Use");
createMWAction<ButtonHoldAction>(MWInput::A_Jump, "jump", "Jump");
createMWAction<ButtonPressAction>(MWInput::A_ToggleWeapon, "weapon", "Weapon");
createMWAction<ButtonPressAction>(MWInput::A_ToggleSpell, "spell", "Spell");
createMWAction<ButtonPressAction>(MWInput::A_CycleSpellLeft, "cycle_spell_left", "Cycle Spell Left");
createMWAction<ButtonPressAction>(MWInput::A_CycleSpellRight, "cycle_spell_right", "Cycle Spell Right");
createMWAction<ButtonPressAction>(MWInput::A_CycleWeaponLeft, "cycle_weapon_left", "Cycle Weapon Left");
createMWAction<ButtonPressAction>(MWInput::A_CycleWeaponRight, "cycle_weapon_right", "Cycle Weapon Right");
createMWAction<ButtonHoldAction>(MWInput::A_Sneak, "sneak", "Sneak");
createMWAction<ButtonPressAction>(MWInput::A_QuickKeysMenu, "quick_menu", "Quick Menu");
createMWAction<AxisAction>(MWInput::A_LookLeftRight, "look_left_right", "Look Left Right", deadzone);
createMWAction<AxisAction>(MWInput::A_MoveForwardBackward, "move_forward_backward", "Move Forward Backward", deadzone);
createMWAction<AxisAction>(MWInput::A_MoveLeftRight, "move_left_right", "Move Left Right", deadzone);
createMWAction<ButtonPressAction>(MWInput::A_Journal, "journal_book", "Journal Book");
createMWAction<ButtonPressAction>(MWInput::A_QuickSave, "quick_save", "Quick Save");
createMWAction<ButtonPressAction>(MWInput::A_Rest, "rest", "Rest");
createMWAction<AxisAction>(A_ActivateTouch, "activate_touched", "Activate Touch", deadzone);
createMWAction<ButtonPressAction>(MWInput::A_AlwaysRun, "always_run", "Always Run");
createMWAction<ButtonPressAction>(MWInput::A_AutoMove, "auto_move", "Auto Move");
createMWAction<ButtonPressAction>(MWInput::A_ToggleHUD, "toggle_hud", "Toggle HUD");
createMWAction<ButtonPressAction>(MWInput::A_ToggleDebug, "toggle_debug", "Toggle the debug hud");
createMWAction<AxisAction>(A_MenuUpDown, "menu_up_down", "Menu Up Down", deadzone);
createMWAction<AxisAction>(A_MenuLeftRight, "menu_left_right", "Menu Left Right", deadzone);
createMWAction<ButtonPressAction>(A_MenuSelect, "menu_select", "Menu Select");
createMWAction<ButtonPressAction>(A_MenuBack, "menu_back", "Menu Back");
createPoseAction(TrackedLimb::LEFT_HAND, "left_hand_pose", "Left Hand Pose");
createPoseAction(TrackedLimb::RIGHT_HAND, "right_hand_pose", "Right Hand Pose");
createHapticsAction(TrackedLimb::RIGHT_HAND, "right_hand_haptics", "Right Hand Haptics");
createHapticsAction(TrackedLimb::LEFT_HAND, "left_hand_haptics", "Left Hand Haptics");
};
void
@ -113,6 +48,17 @@ namespace MWVR
mHapticsMap.emplace(limb, new HapticsAction(std::move(createXRAction(XR_ACTION_TYPE_VIBRATION_OUTPUT, actionName, localName))));
}
template<>
void
OpenXRActionSet::createMWAction<AxisAction>(
int openMWAction,
const std::string& actionName,
const std::string& localName)
{
auto xrAction = createXRAction(AxisAction::ActionType, mInternalName + "_" + actionName, mLocalizedName + " " + localName);
mActionMap.emplace(actionName, new AxisAction(openMWAction, std::move(xrAction), mDeadzone));
}
template<typename A>
void
OpenXRActionSet::createMWAction(
@ -124,18 +70,32 @@ namespace MWVR
mActionMap.emplace(actionName, new A(openMWAction, std::move(xrAction)));
}
template<typename A>
void
OpenXRActionSet::createMWAction(
VrControlType controlType,
int openMWAction,
const std::string& actionName,
const std::string& localName,
std::shared_ptr<AxisAction::Deadzone> deadzone)
const std::string& localName)
{
auto xrAction = createXRAction(AxisAction::ActionType, mInternalName + "_" + actionName, mLocalizedName + " " + localName);
mActionMap.emplace(actionName, new AxisAction(openMWAction, std::move(xrAction), deadzone));
switch (controlType)
{
case VrControlType::Press:
return createMWAction<ButtonPressAction>(openMWAction, actionName, localName);
case VrControlType::LongPress:
return createMWAction<ButtonLongPressAction>(openMWAction, actionName, localName);
case VrControlType::Hold:
return createMWAction<ButtonHoldAction>(openMWAction, actionName, localName);
case VrControlType::Axis:
return createMWAction<AxisAction>(openMWAction, actionName, localName);
//case VrControlType::Pose:
// return createMWAction<PoseAction>(openMWAction, actionName, localName);
//case VrControlType::Haptic:
// return createMWAction<HapticsAction>(openMWAction, actionName, localName);
default:
Log(Debug::Warning) << "createMWAction: pose/haptics Not implemented here";
}
}
XrActionSet
@ -156,12 +116,16 @@ namespace MWVR
void OpenXRActionSet::suggestBindings(std::vector<XrActionSuggestedBinding>& xrSuggestedBindings, const SuggestedBindings& mwSuggestedBindings)
{
std::vector<XrActionSuggestedBinding> suggestedBindings =
std::vector<XrActionSuggestedBinding> suggestedBindings;
if (!mTrackerMap.empty())
{
{*mTrackerMap[TrackedLimb::LEFT_HAND], getXrPath("/user/hand/left/input/aim/pose")},
{*mTrackerMap[TrackedLimb::RIGHT_HAND], getXrPath("/user/hand/right/input/aim/pose")},
{*mHapticsMap[TrackedLimb::LEFT_HAND], getXrPath("/user/hand/left/output/haptic")},
{*mHapticsMap[TrackedLimb::RIGHT_HAND], getXrPath("/user/hand/right/output/haptic")},
suggestedBindings.emplace_back(XrActionSuggestedBinding{ *mTrackerMap[TrackedLimb::LEFT_HAND], getXrPath("/user/hand/left/input/aim/pose") });
suggestedBindings.emplace_back(XrActionSuggestedBinding{ *mTrackerMap[TrackedLimb::RIGHT_HAND], getXrPath("/user/hand/right/input/aim/pose") });
}
if(!mHapticsMap.empty())
{
suggestedBindings.emplace_back(XrActionSuggestedBinding{ *mHapticsMap[TrackedLimb::LEFT_HAND], getXrPath("/user/hand/left/output/haptic") });
suggestedBindings.emplace_back(XrActionSuggestedBinding{ *mHapticsMap[TrackedLimb::RIGHT_HAND], getXrPath("/user/hand/right/output/haptic") });
};
for (auto& mwSuggestedBinding : mwSuggestedBindings)
@ -178,6 +142,11 @@ namespace MWVR
xrSuggestedBindings.insert(xrSuggestedBindings.end(), suggestedBindings.begin(), suggestedBindings.end());
}
XrSpace OpenXRActionSet::xrActionSpace(TrackedLimb limb)
{
return mTrackerMap[limb]->xrSpace();
}
std::unique_ptr<OpenXRAction>
OpenXRActionSet::createXRAction(
@ -209,6 +178,7 @@ namespace MWVR
syncInfo.activeActionSets = &activeActionSet;
CHECK_XRCMD(xrSyncActions(xr->impl().xrSession(), &syncInfo));
mActionQueue.clear();
for (auto& action : mActionMap)
action.second->updateAndQueue(mActionQueue);
}

@ -31,13 +31,15 @@ namespace MWVR
XrActionSet xrActionSet() { return mActionSet; };
void suggestBindings(std::vector<XrActionSuggestedBinding>& xrSuggestedBindings, const SuggestedBindings& mwSuggestedBindings);
XrSpace xrActionSpace(TrackedLimb limb);
void createMWAction(VrControlType controlType, int openMWAction, const std::string& actionName, const std::string& localName);
void createPoseAction(TrackedLimb limb, const std::string& actionName, const std::string& localName);
void createHapticsAction(TrackedLimb limb, const std::string& actionName, const std::string& localName);
protected:
template<typename A>
void createMWAction(int openMWAction, const std::string& actionName, const std::string& localName);
template<typename A>
void createMWAction(int openMWAction, const std::string& actionName, const std::string& localName, std::shared_ptr<AxisAction::Deadzone> deadzone);
void createPoseAction(TrackedLimb limb, const std::string& actionName, const std::string& localName);
void createHapticsAction(TrackedLimb limb, const std::string& actionName, const std::string& localName);
std::unique_ptr<OpenXRAction> createXRAction(XrActionType actionType, const std::string& actionName, const std::string& localName);
XrPath getXrPath(const std::string& path);
XrActionSet createActionSet(const std::string& name);
@ -49,6 +51,7 @@ namespace MWVR
std::map<TrackedLimb, std::unique_ptr<PoseAction>> mTrackerMap;
std::map<TrackedLimb, std::unique_ptr<HapticsAction>> mHapticsMap;
std::deque<const Action*> mActionQueue{};
std::shared_ptr<AxisAction::Deadzone> mDeadzone;
};
}

@ -18,6 +18,82 @@ namespace MWVR
{
mActionSets.emplace(ActionSet::Gameplay, OpenXRActionSet("Gameplay", deadzone));
mActionSets.emplace(ActionSet::GUI, OpenXRActionSet("GUI", deadzone));
mActionSets.emplace(ActionSet::Tracking, OpenXRActionSet("Tracking", deadzone));
mActionSets.emplace(ActionSet::Haptics, OpenXRActionSet("Haptics", deadzone));
/*
// Applicable actions not (yet) included
A_QuickKey1,
A_QuickKey2,
A_QuickKey3,
A_QuickKey4,
A_QuickKey5,
A_QuickKey6,
A_QuickKey7,
A_QuickKey8,
A_QuickKey9,
A_QuickKey10,
A_QuickKeysMenu,
A_QuickLoad,
A_CycleSpellLeft,
A_CycleSpellRight,
A_CycleWeaponLeft,
A_CycleWeaponRight,
A_Screenshot, // Generate a VR screenshot?
A_Console, // Currently awkward due to a lack of virtual keyboard, but should be included when that's in place
*/
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_GameMenu, "game_menu", "Game Menu");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, A_VrMetaMenu, "meta_menu", "Meta Menu");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::LongPress, A_Recenter, "reposition_menu", "Reposition Menu");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_Inventory, "inventory", "Inventory");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_Activate, "activate", "Activate");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Hold, MWInput::A_Use, "use", "Use");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Hold, MWInput::A_Jump, "jump", "Jump");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_ToggleWeapon, "weapon", "Weapon");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_ToggleSpell, "spell", "Spell");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_CycleSpellLeft, "cycle_spell_left", "Cycle Spell Left");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_CycleSpellRight, "cycle_spell_right", "Cycle Spell Right");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_CycleWeaponLeft, "cycle_weapon_left", "Cycle Weapon Left");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_CycleWeaponRight, "cycle_weapon_right", "Cycle Weapon Right");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Hold, MWInput::A_Sneak, "sneak", "Sneak");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_QuickKeysMenu, "quick_menu", "Quick Menu");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Axis, MWInput::A_LookLeftRight, "look_left_right", "Look Left Right");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Axis, MWInput::A_MoveForwardBackward, "move_forward_backward", "Move Forward Backward");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Axis, MWInput::A_MoveLeftRight, "move_left_right", "Move Left Right");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_Journal, "journal_book", "Journal Book");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_QuickSave, "quick_save", "Quick Save");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_Rest, "rest", "Rest");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Axis, A_ActivateTouch, "activate_touched", "Activate Touch");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_AlwaysRun, "always_run", "Always Run");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_AutoMove, "auto_move", "Auto Move");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_ToggleHUD, "toggle_hud", "Toggle HUD");
getActionSet(ActionSet::Gameplay).createMWAction(VrControlType::Press, MWInput::A_ToggleDebug, "toggle_debug", "Toggle the debug hud");
getActionSet(ActionSet::GUI).createMWAction(VrControlType::Press, MWInput::A_GameMenu, "game_menu", "Game Menu");
getActionSet(ActionSet::GUI).createMWAction(VrControlType::LongPress, A_Recenter, "reposition_menu", "Reposition Menu");
getActionSet(ActionSet::GUI).createMWAction(VrControlType::Axis, A_MenuUpDown, "menu_up_down", "Menu Up Down");
getActionSet(ActionSet::GUI).createMWAction(VrControlType::Axis, A_MenuLeftRight, "menu_left_right", "Menu Left Right");
getActionSet(ActionSet::GUI).createMWAction(VrControlType::Press, A_MenuSelect, "menu_select", "Menu Select");
getActionSet(ActionSet::GUI).createMWAction(VrControlType::Press, A_MenuBack, "menu_back", "Menu Back");
getActionSet(ActionSet::GUI).createMWAction(VrControlType::Hold, MWInput::A_Use, "use", "Use");
getActionSet(ActionSet::Tracking).createPoseAction(TrackedLimb::LEFT_HAND, "left_hand_pose", "Left Hand Pose");
getActionSet(ActionSet::Tracking).createPoseAction(TrackedLimb::RIGHT_HAND, "right_hand_pose", "Right Hand Pose");
getActionSet(ActionSet::Haptics).createHapticsAction(TrackedLimb::RIGHT_HAND, "right_hand_haptics", "Right Hand Haptics");
getActionSet(ActionSet::Haptics).createHapticsAction(TrackedLimb::LEFT_HAND, "left_hand_haptics", "Left Hand Haptics");
auto* xr = Environment::get().getManager();
auto* trackingManager = Environment::get().getTrackingManager();
auto leftHandPath = trackingManager->stringToVRPath("/user/hand/left/input/aim/pose");
auto rightHandPath = trackingManager->stringToVRPath("/user/hand/right/input/aim/pose");
xr->impl().tracker().addTrackingSpace(leftHandPath, getActionSet(ActionSet::Tracking).xrActionSpace(TrackedLimb::LEFT_HAND));
xr->impl().tracker().addTrackingSpace(rightHandPath, getActionSet(ActionSet::Tracking).xrActionSpace(TrackedLimb::RIGHT_HAND));
};
OpenXRActionSet& OpenXRInput::getActionSet(ActionSet actionSet)

@ -3,6 +3,7 @@
#include "openxrplatform.hpp"
#include "openxrswapchain.hpp"
#include "openxrswapchainimpl.hpp"
#include "openxrtypeconversions.hpp"
#include "vrenvironment.hpp"
#include "vrinputmanager.hpp"
@ -64,13 +65,14 @@ namespace MWVR
// TODO: Blend mode
// setupBlendMode();
// Create session
mSession = mPlatform.createXrSession(mInstance, mSystemId);
LogReferenceSpaces();
createReferenceSpaces();
initTracker();
getSystemProperties();
}
@ -84,9 +86,6 @@ namespace MWVR
CHECK_XRCMD(xrCreateReferenceSpace(mSession, &createInfo, &mReferenceSpaceStage));
createInfo.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_LOCAL;
CHECK_XRCMD(xrCreateReferenceSpace(mSession, &createInfo, &mReferenceSpaceLocal));
// Default to using the stage
mReferenceSpace = mReferenceSpaceStage;
}
void OpenXRManagerImpl::getSystem()
@ -290,33 +289,6 @@ namespace MWVR
CHECK_XRCMD(xrBeginFrame(mSession, &frameBeginInfo));
}
XrCompositionLayerProjectionView toXR(MWVR::CompositionLayerProjectionView layer)
{
XrCompositionLayerProjectionView xrLayer;
xrLayer.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW;
xrLayer.subImage = toXR(layer.subImage, false);
xrLayer.pose = toXR(layer.pose);
xrLayer.fov = toXR(layer.fov);
xrLayer.next = nullptr;
return xrLayer;
}
XrSwapchainSubImage toXR(MWVR::SubImage subImage, bool depthImage)
{
XrSwapchainSubImage xrSubImage{};
if(depthImage)
xrSubImage.swapchain = subImage.swapchain->impl().xrSwapchainDepth();
else
xrSubImage.swapchain = subImage.swapchain->impl().xrSwapchain();
xrSubImage.imageRect.extent.width = subImage.width;
xrSubImage.imageRect.extent.height = subImage.height;
xrSubImage.imageRect.offset.x = subImage.x;
xrSubImage.imageRect.offset.y = subImage.y;
xrSubImage.imageArrayIndex = 0;
return xrSubImage;
}
void
OpenXRManagerImpl::endFrame(FrameInfo frameInfo, const std::array<CompositionLayerProjectionView, 2>* layerStack)
{
@ -331,7 +303,7 @@ namespace MWVR
compositionLayerProjectionViews[(int)Side::LEFT_SIDE] = toXR((*layerStack)[(int)Side::LEFT_SIDE]);
compositionLayerProjectionViews[(int)Side::RIGHT_SIDE] = toXR((*layerStack)[(int)Side::RIGHT_SIDE]);
layer.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION;
layer.space = getReferenceSpace();
layer.space = mReferenceSpaceStage;
layer.viewCount = 2;
layer.views = compositionLayerProjectionViews.data();
auto* xrLayerStack = reinterpret_cast<XrCompositionLayerBaseHeader*>(&layer);
@ -368,11 +340,11 @@ namespace MWVR
int64_t predictedDisplayTime,
ReferenceSpace space)
{
if (!mPredictionsEnabled)
{
Log(Debug::Error) << "Prediction out of order";
throw std::logic_error("Prediction out of order");
}
//if (!mPredictionsEnabled)
//{
// Log(Debug::Error) << "Prediction out of order";
// throw std::logic_error("Prediction out of order");
//}
std::array<XrView, 2> xrViews{ {{XR_TYPE_VIEW}, {XR_TYPE_VIEW}} };
XrViewState viewState{ XR_TYPE_VIEW_STATE };
uint32_t viewCount = 2;
@ -605,31 +577,6 @@ namespace MWVR
}
}
MWVR::Pose fromXR(XrPosef pose)
{
return MWVR::Pose{ fromXR(pose.position), fromXR(pose.orientation) };
}
XrPosef toXR(MWVR::Pose pose)
{
return XrPosef{ toXR(pose.orientation), toXR(pose.position) };
}
MWVR::FieldOfView fromXR(XrFovf fov)
{
return MWVR::FieldOfView{ fov.angleLeft, fov.angleRight, fov.angleUp, fov.angleDown };
}
XrFovf toXR(MWVR::FieldOfView fov)
{
return XrFovf{ fov.angleLeft, fov.angleRight, fov.angleUp, fov.angleDown };
}
XrSpace OpenXRManagerImpl::getReferenceSpace()
{
return mReferenceSpace;
}
bool OpenXRManagerImpl::xrExtensionIsEnabled(const char* extensionName) const
{
return mPlatform.extensionEnabled(extensionName);
@ -681,6 +628,16 @@ namespace MWVR
mPlatform.eraseFormat(format);
}
void OpenXRManagerImpl::initTracker()
{
auto* trackingManager = Environment::get().getTrackingManager();
auto headPath = trackingManager->stringToVRPath("/user/head/input/pose");
mTracker.reset(new OpenXRTracker("pcstage", mReferenceSpaceStage));
mTracker->addTrackingSpace(headPath, mReferenceSpaceView);
mTrackerToWorldBinding.reset(new VRTrackingToWorldBinding("pcworld", mTracker.get(), headPath));
}
void OpenXRManagerImpl::enablePredictions()
{
mPredictionsEnabled = true;
@ -714,25 +671,16 @@ namespace MWVR
};
return config;
}
osg::Vec3 fromXR(XrVector3f v)
{
return osg::Vec3{ v.x, -v.z, v.y };
}
osg::Quat fromXR(XrQuaternionf quat)
XrSpace OpenXRManagerImpl::getReferenceSpace(ReferenceSpace space)
{
return osg::Quat{ quat.x, -quat.z, quat.y, quat.w };
}
XrVector3f toXR(osg::Vec3 v)
switch (space)
{
return XrVector3f{ v.x(), v.z(), -v.y() };
case ReferenceSpace::STAGE:
return mReferenceSpaceStage;
case ReferenceSpace::VIEW:
return mReferenceSpaceView;
}
XrQuaternionf toXR(osg::Quat quat)
{
return XrQuaternionf{ static_cast<float>(quat.x()), static_cast<float>(quat.z()), static_cast<float>(-quat.y()), static_cast<float>(quat.w()) };
return XR_NULL_HANDLE;
}
}

@ -3,6 +3,7 @@
#include "openxrmanager.hpp"
#include "openxrplatform.hpp"
#include "openxrtracker.hpp"
#include "../mwinput/inputmanagerimp.hpp"
#include <components/debug/debuglog.hpp>
@ -20,21 +21,6 @@
namespace MWVR
{
/// Conversion methods from openxr types to osg/mwvr types. Includes managing the differing conventions.
MWVR::Pose fromXR(XrPosef pose);
MWVR::FieldOfView fromXR(XrFovf fov);
osg::Vec3 fromXR(XrVector3f);
osg::Quat fromXR(XrQuaternionf quat);
/// Conversion methods from osg/mwvr types to openxr types. Includes managing the differing conventions.
XrPosef toXR(MWVR::Pose pose);
XrFovf toXR(MWVR::FieldOfView fov);
XrVector3f toXR(osg::Vec3 v);
XrQuaternionf toXR(osg::Quat quat);
XrCompositionLayerProjectionView toXR(MWVR::CompositionLayerProjectionView layer);
XrSwapchainSubImage toXR(MWVR::SubImage, bool depthImage);
/// \brief Implementation of OpenXRManager
class OpenXRManagerImpl
{
@ -56,7 +42,7 @@ namespace MWVR
long long getLastPredictedDisplayTime();
long long getLastPredictedDisplayPeriod();
std::array<SwapchainConfig, 2> getRecommendedSwapchainConfig() const;
XrSpace getReferenceSpace();
XrSpace getReferenceSpace(ReferenceSpace space);
XrSession xrSession() const { return mSession; };
XrInstance xrInstance() const { return mInstance; };
bool xrExtensionIsEnabled(const char* extensionName) const;
@ -67,8 +53,9 @@ namespace MWVR
int64_t selectColorFormat();
int64_t selectDepthFormat();
void eraseFormat(int64_t format);
OpenXRPlatform& platform() { return mPlatform; };
OpenXRPlatform& platform() { return mPlatform; }
OpenXRTracker& tracker() { return *mTracker; }
void initTracker();
protected:
void setupExtensionsAndLayers();
@ -104,13 +91,14 @@ namespace MWVR
XrSpace mReferenceSpaceView = XR_NULL_HANDLE;
XrSpace mReferenceSpaceStage = XR_NULL_HANDLE;
XrSpace mReferenceSpaceLocal = XR_NULL_HANDLE;
XrSpace mReferenceSpace = XR_NULL_HANDLE;
XrFrameState mFrameState{};
XrSessionState mSessionState = XR_SESSION_STATE_UNKNOWN;
XrDebugUtilsMessengerEXT mDebugMessenger{ nullptr };
OpenXRPlatform mPlatform;
std::unique_ptr<OpenXRTracker> mTracker{ nullptr };
std::unique_ptr<VRTrackingToWorldBinding> mTrackerToWorldBinding{ nullptr };
bool mXrSessionShouldStop = false;
bool mAppShouldSyncFrameLoop = false;
bool mAppShouldRender = false;

@ -0,0 +1,141 @@
#include "openxrinput.hpp"
#include "openxrmanagerimpl.hpp"
#include "openxrplatform.hpp"
#include "openxrtracker.hpp"
#include "openxrtypeconversions.hpp"
#include "vrenvironment.hpp"
#include "vrinputmanager.hpp"
#include "vrsession.hpp"
#include <components/misc/constants.hpp>
namespace MWVR
{
OpenXRTracker::OpenXRTracker(const std::string& name, XrSpace referenceSpace)
: VRTrackingSource(name)
, mReferenceSpace(referenceSpace)
, mTrackingSpaces()
{
}
OpenXRTracker::~OpenXRTracker()
{
}
void OpenXRTracker::addTrackingSpace(VRPath path, XrSpace space)
{
mTrackingSpaces[path] = space;
notifyAvailablePosesChanged();
}
void OpenXRTracker::deleteTrackingSpace(VRPath path)
{
mTrackingSpaces.erase(path);
notifyAvailablePosesChanged();
}
void OpenXRTracker::setReferenceSpace(XrSpace referenceSpace)
{
mReferenceSpace = referenceSpace;
}
std::vector<VRPath> OpenXRTracker::listSupportedTrackingPosePaths() const
{
std::vector<VRPath> path;
for (auto& e : mTrackingSpaces)
path.push_back(e.first);
return path;
}
void OpenXRTracker::updateTracking(DisplayTime predictedDisplayTime)
{
Environment::get().getInputManager()->xrInput().getActionSet(ActionSet::Tracking).updateControls();
auto* xr = Environment::get().getManager();
auto* session = Environment::get().getSession();
auto& frame = session->getFrame(VRSession::FramePhase::Update);
frame->mViews[(int)ReferenceSpace::STAGE] = locateViews(predictedDisplayTime, xr->impl().getReferenceSpace(ReferenceSpace::STAGE));
frame->mViews[(int)ReferenceSpace::VIEW] = locateViews(predictedDisplayTime, xr->impl().getReferenceSpace(ReferenceSpace::VIEW));
}
XrSpace OpenXRTracker::getSpace(VRPath path)
{
auto it = mTrackingSpaces.find(path);
if (it != mTrackingSpaces.end())
return it->second;
return 0;
}
VRTrackingPose OpenXRTracker::getTrackingPose(DisplayTime predictedDisplayTime, VRPath path, VRPath reference)
{
VRTrackingPose pose;
pose.status = TrackingStatus::Good;
XrSpace space = getSpace(path);
XrSpace ref = reference == 0 ? mReferenceSpace : getSpace(reference);
if (space == 0 || ref == 0)
pose.status = TrackingStatus::NotTracked;
if (!!pose.status)
locate(pose, space, ref, predictedDisplayTime);
return pose;
}
void OpenXRTracker::locate(VRTrackingPose& pose, XrSpace space, XrSpace reference, DisplayTime predictedDisplayTime)
{
XrSpaceLocation location{ XR_TYPE_SPACE_LOCATION };
auto res = xrLocateSpace(space, mReferenceSpace, predictedDisplayTime, &location);
if (XR_FAILED(res))
{
// Call failed, exit.
CHECK_XRRESULT(res, "xrLocateSpace");
pose.status = TrackingStatus::RuntimeFailure;
return;
}
// Check that everything is being tracked
if (!(location.locationFlags & (XR_SPACE_LOCATION_ORIENTATION_TRACKED_BIT | XR_SPACE_LOCATION_POSITION_TRACKED_BIT)))
{
// It's not, data is stale
pose.status = TrackingStatus::Stale;
}
// Check that data is valid
if (!(location.locationFlags & (XR_SPACE_LOCATION_ORIENTATION_VALID_BIT | XR_SPACE_LOCATION_POSITION_VALID_BIT)))
{
// It's not, we've lost tracking
pose.status = TrackingStatus::Lost;
}
pose.pose = MWVR::Pose{
fromXR(location.pose.position),
fromXR(location.pose.orientation)
};
}
std::array<View, 2> OpenXRTracker::locateViews(DisplayTime predictedDisplayTime, XrSpace reference)
{
std::array<XrView, 2> xrViews{ {{XR_TYPE_VIEW}, {XR_TYPE_VIEW}} };
XrViewState viewState{ XR_TYPE_VIEW_STATE };
uint32_t viewCount = 2;
XrViewLocateInfo viewLocateInfo{ XR_TYPE_VIEW_LOCATE_INFO };
viewLocateInfo.viewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO;
viewLocateInfo.displayTime = predictedDisplayTime;
viewLocateInfo.space = reference;
auto* xr = Environment::get().getManager();
CHECK_XRCMD(xrLocateViews(xr->impl().xrSession(), &viewLocateInfo, &viewState, viewCount, &viewCount, xrViews.data()));
std::array<View, 2> vrViews{};
vrViews[(int)Side::LEFT_SIDE].pose = fromXR(xrViews[(int)Side::LEFT_SIDE].pose);
vrViews[(int)Side::RIGHT_SIDE].pose = fromXR(xrViews[(int)Side::RIGHT_SIDE].pose);
vrViews[(int)Side::LEFT_SIDE].fov = fromXR(xrViews[(int)Side::LEFT_SIDE].fov);
vrViews[(int)Side::RIGHT_SIDE].fov = fromXR(xrViews[(int)Side::RIGHT_SIDE].fov);
return vrViews;
}
OpenXRTrackingToWorldBinding::OpenXRTrackingToWorldBinding()
{
}
}

@ -0,0 +1,85 @@
#ifndef OPENXR_TRACKER_HPP
#define OPENXR_TRACKER_HPP
#include <openxr/openxr.h>
#include "vrtracking.hpp"
#include <map>
namespace MWVR
{
//! Serves as a C++ wrapper of openxr spaces, but also bridges stage coordinates and game coordinates.
//! Supports the compulsory sets of paths.
class OpenXRTracker : public VRTrackingSource
{
public:
OpenXRTracker(const std::string& name, XrSpace referenceSpace);
~OpenXRTracker();
void addTrackingSpace(VRPath path, XrSpace space);
void deleteTrackingSpace(VRPath path);
//! The base space used to reference everything else.
void setReferenceSpace(XrSpace referenceSpace);
VRTrackingPose getTrackingPose(DisplayTime predictedDisplayTime, VRPath path, VRPath reference = 0) override;
std::vector<VRPath> listSupportedTrackingPosePaths() const override;
void updateTracking(DisplayTime predictedDisplayTime) override;
private:
std::array<View, 2> locateViews(DisplayTime predictedDisplayTime, XrSpace reference);
void locate(VRTrackingPose& pose, XrSpace space, XrSpace reference, DisplayTime predictedDisplayTime);
XrSpace getSpace(VRPath);
XrSpace mReferenceSpace;
std::map<VRPath, XrSpace> mTrackingSpaces;
};
//! Ties a tracked pose to the game world.
//! A movement tracking pose is selected by passing its path to the constructor.
//! All poses are transformed in the horizontal plane by moving the x,y origin to the position of the movement tracking pose, and then reoriented using the current orientation.
//! The movement tracking pose is effectively always at the x,y origin
//! The movement of the movement tracking pose is accumulated and can be read using the movement() call.
//! If this movement is ever consumed (such as by moving the character to follow the player) the consumed movement must be reported using consumeMovement().
class OpenXRTrackingToWorldBinding
{
public:
OpenXRTrackingToWorldBinding();
//! Re-orient the stage.
void setOrientation(float yaw, bool adjust);
osg::Quat getOrientation() const { return mOrientation; }
void setEyeLevel(float eyeLevel) { mEyeLevel = eyeLevel; }
float getEyeLevel() const { return mEyeLevel; }
void setSeatedPlay(bool seatedPlay) { mSeatedPlay = seatedPlay; }
bool getSeatedPlay() const { return mSeatedPlay; }
//! The player's movement within the VR stage. This accumulates until the movement has been consumed by calling consumeMovement()
osg::Vec3 movement() const;
//! Consume movement
void consumeMovement(const osg::Vec3& movement);
//! Recenter tracking by consuming all movement.
void recenter(bool resetZ);
void update(Pose movementTrackingPose);
//! Transforms a stage-referenced pose to be world-aligned.
//! \note Unlike VRTrackingSource::getTrackingPose() this does not take a reference path, as re-alignment is only needed when fetching a stage-referenced pose.
void alignPose(Pose& pose);
private:
bool mSeatedPlay = false;
bool mHasTrackingData = false;
float mEyeLevel = 0;
Pose mLastPose = Pose();
osg::Vec3 mMovement = osg::Vec3(0,0,0);
osg::Quat mOrientation = osg::Quat(0,0,0,1);
};
}
#endif

@ -0,0 +1,76 @@
#include "openxrtypeconversions.hpp"
#include "openxrswapchain.hpp"
#include "openxrswapchainimpl.hpp"
#include <iostream>
namespace MWVR
{
osg::Vec3 fromXR(XrVector3f v)
{
return osg::Vec3{ v.x, -v.z, v.y };
}
osg::Quat fromXR(XrQuaternionf quat)
{
return osg::Quat{ quat.x, -quat.z, quat.y, quat.w };
}
XrVector3f toXR(osg::Vec3 v)
{
return XrVector3f{ v.x(), v.z(), -v.y() };
}
XrQuaternionf toXR(osg::Quat quat)
{
return XrQuaternionf{ static_cast<float>(quat.x()), static_cast<float>(quat.z()), static_cast<float>(-quat.y()), static_cast<float>(quat.w()) };
}
MWVR::Pose fromXR(XrPosef pose)
{
return MWVR::Pose{ fromXR(pose.position), fromXR(pose.orientation) };
}
XrPosef toXR(MWVR::Pose pose)
{
return XrPosef{ toXR(pose.orientation), toXR(pose.position) };
}
MWVR::FieldOfView fromXR(XrFovf fov)
{
return MWVR::FieldOfView{ fov.angleLeft, fov.angleRight, fov.angleUp, fov.angleDown };
}
XrFovf toXR(MWVR::FieldOfView fov)
{
return XrFovf{ fov.angleLeft, fov.angleRight, fov.angleUp, fov.angleDown };
}
XrCompositionLayerProjectionView toXR(MWVR::CompositionLayerProjectionView layer)
{
XrCompositionLayerProjectionView xrLayer;
xrLayer.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW;
xrLayer.subImage = toXR(layer.subImage, false);
xrLayer.pose = toXR(layer.pose);
xrLayer.fov = toXR(layer.fov);
xrLayer.next = nullptr;
return xrLayer;
}
XrSwapchainSubImage toXR(MWVR::SubImage subImage, bool depthImage)
{
XrSwapchainSubImage xrSubImage{};
if (depthImage)
xrSubImage.swapchain = subImage.swapchain->impl().xrSwapchainDepth();
else
xrSubImage.swapchain = subImage.swapchain->impl().xrSwapchain();
xrSubImage.imageRect.extent.width = subImage.width;
xrSubImage.imageRect.extent.height = subImage.height;
xrSubImage.imageRect.offset.x = subImage.x;
xrSubImage.imageRect.offset.y = subImage.y;
xrSubImage.imageArrayIndex = 0;
return xrSubImage;
}
}

@ -0,0 +1,25 @@
#ifndef MWVR_OPENXRTYPECONVERSIONS_H
#define MWVR_OPENXRTYPECONVERSIONS_H
#include <openxr/openxr.h>
#include "vrtypes.hpp"
#include <osg/Vec3>
#include <osg/Quat>
namespace MWVR
{
/// Conversion methods between openxr types to osg/mwvr types. Includes managing the differing conventions.
Pose fromXR(XrPosef pose);
FieldOfView fromXR(XrFovf fov);
osg::Vec3 fromXR(XrVector3f);
osg::Quat fromXR(XrQuaternionf quat);
XrPosef toXR(Pose pose);
XrFovf toXR(FieldOfView fov);
XrVector3f toXR(osg::Vec3 v);
XrQuaternionf toXR(osg::Quat quat);
XrCompositionLayerProjectionView toXR(CompositionLayerProjectionView layer);
XrSwapchainSubImage toXR(SubImage, bool depthImage);
}
#endif

@ -46,13 +46,20 @@ namespace MWVR {
}
}
StateMachine::StateMachine(MWWorld::Ptr ptr)
StateMachine::StateMachine(MWWorld::Ptr ptr, VRPath trackingPath)
: mPtr(ptr)
, mMinVelocity(Settings::Manager::getFloat("realistic combat minimum swing velocity", "VR"))
, mMaxVelocity(Settings::Manager::getFloat("realistic combat maximum swing velocity", "VR"))
, mTrackingPath(trackingPath)
{
Log(Debug::Verbose) << "realistic combat minimum swing velocity: " << mMinVelocity;
Log(Debug::Verbose) << "realistic combat maximum swing velocity: " << mMaxVelocity;
Environment::get().getTrackingManager()->bind(this, "pcstage");
}
void StateMachine::onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime)
{
mTrackingInput = source.getTrackingPose(predictedDisplayTime, mTrackingPath);
}
bool StateMachine::canSwing()
@ -118,13 +125,12 @@ namespace MWVR {
void StateMachine::update(float dt, bool enabled)
{
auto* session = Environment::get().getSession();
auto* world = MWBase::Environment::get().getWorld();
auto& predictedPoses = session->predictedPoses(VRSession::FramePhase::Update);
auto& handPose = predictedPoses.hands[(int)MWVR::Side::RIGHT_SIDE];
auto& handPose = mTrackingInput.pose;
auto weaponType = world->getActiveWeaponType();
enabled = enabled && isMeleeWeapon(weaponType);
enabled = enabled && !!mTrackingInput.status;
if (mEnabled != enabled)
{

@ -9,6 +9,7 @@
#include "vrenvironment.hpp"
#include "vrsession.hpp"
#include "vrtracking.hpp"
namespace MWVR {
namespace RealisticCombat {
@ -47,14 +48,16 @@ namespace MWVR {
/// Cooldown -> Ready: When the minimum period has passed since entering Cooldown state
///
///
struct StateMachine
struct StateMachine : public VRTrackingListener
{
public:
StateMachine(MWWorld::Ptr ptr);
StateMachine(MWWorld::Ptr ptr, VRPath trackingPath);
void update(float dt, bool enabled);
MWWorld::Ptr ptr() { return mPtr; }
protected:
void onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime) override;
bool canSwing();
void playSwish();
@ -62,7 +65,6 @@ namespace MWVR {
void transition(SwingState newState);
void update_cooldownState();
void transition_cooldownToReady();
@ -103,6 +105,8 @@ namespace MWVR {
bool mEnabled = false;
osg::Vec3 mPreviousPosition{ 0.f,0.f,0.f };
VRTrackingPose mTrackingInput = VRTrackingPose();
VRPath mTrackingPath = 0;
};
}

@ -18,6 +18,7 @@
#include <components/sceneutil/actorutil.hpp>
#include <components/sceneutil/positionattitudetransform.hpp>
#include <components/sceneutil/shadow.hpp>
#include <components/sceneutil/skeleton.hpp>
#include <components/resource/resourcesystem.hpp>
#include <components/resource/scenemanager.hpp>
@ -55,98 +56,6 @@ namespace MWVR
return false;
}
/// Implements VR control of the forearm, to control mesh/bone deformation of the hand.
class ForearmController : public osg::NodeCallback
{
public:
ForearmController() = default;
void setEnabled(bool enabled) { mEnabled = enabled; };
void operator()(osg::Node* node, osg::NodeVisitor* nv);
private:
bool mEnabled{ true };
};
void ForearmController::operator()(osg::Node* node, osg::NodeVisitor* nv)
{
if (!mEnabled)
{
traverse(node, nv);
return;
}
osg::MatrixTransform* transform = static_cast<osg::MatrixTransform*>(node);
auto* camera = MWBase::Environment::get().getWorld()->getRenderingManager().getCamera();
auto* session = Environment::get().getSession();
int side = (int)Side::RIGHT_SIDE;
if (node->getName().find_first_of("L") != std::string::npos)
{
side = (int)Side::LEFT_SIDE;
// We base ourselves on the world position of the camera
// Therefore we have to make sure the camera is updated for this frame first.
camera->updateCamera();
}
MWVR::Pose handStage = session->predictedPoses(VRSession::FramePhase::Update).hands[side];
MWVR::Pose headStage = session->predictedPoses(VRSession::FramePhase::Update).head;
auto orientation = handStage.orientation;
auto position = handStage.position - headStage.position;
position = position * Constants::UnitsPerMeter;
// Align orientation with the game world
auto stageRotation = reinterpret_cast<MWVR::VRCamera*>(MWBase::Environment::get().getWorld()->getRenderingManager().getCamera())->stageRotation();
position = stageRotation * position;
orientation = orientation * stageRotation;
// Add camera offset
osg::Vec3 viewPosition;
osg::Vec3 center; // dummy
osg::Vec3 up; // dummy
auto viewMatrix = camera->getOsgCamera()->getViewMatrix();
viewMatrix.getLookAt(viewPosition, center, up, 1.0);
position += viewPosition;
// Morrowind's meshes do not point forward by default.
// Declare the offsets static since they do not need to be recomputed.
static float VRbias = osg::DegreesToRadians(-90.f);
static osg::Quat yaw(-VRbias, osg::Vec3f(0, 0, 1));
static osg::Quat pitch(2.f * VRbias, osg::Vec3f(0, 1, 0));
static osg::Quat roll(2 * VRbias, osg::Vec3f(1, 0, 0));
orientation = yaw * orientation;
if (side == (int)Side::LEFT_SIDE)
orientation = roll * orientation;
// Undo the wrist translate
auto* hand = transform->getChild(0);
auto handMatrix = hand->asTransform()->asMatrixTransform()->getMatrix();
position -= orientation * handMatrix.getTrans();
// Center hand mesh on tracking
// This is just an estimate from trial and error, any suggestion for improving this is welcome
position -= orientation * osg::Vec3{ 15,0,0 };
// Get current world transform of limb
osg::Matrix worldToLimb = osg::computeLocalToWorld(node->getParentalNodePaths()[0]);
// Get current world of the reference node
osg::Matrix worldReference = osg::Matrix::identity();
// New transform based on tracking.
worldReference.preMultTranslate(position);
worldReference.preMultRotate(orientation);
// Finally, set transform
transform->setMatrix(worldReference * osg::Matrix::inverse(worldToLimb) * transform->getMatrix());
// Omit nested callbacks to override animations of this node
osg::ref_ptr<osg::Callback> ncb = getNestedCallback();
setNestedCallback(nullptr);
traverse(node, nv);
setNestedCallback(ncb);
}
/// Implements control of a finger by overriding rotation
class FingerController : public osg::NodeCallback
{
@ -182,13 +91,6 @@ namespace MWVR
setNestedCallback(nullptr);
traverse(node, nv);
setNestedCallback(ncb);
// Recompute pointer target
auto* anim = MWVR::Environment::get().getPlayerAnimation();
if (anim && node->getName() == "Bip01 R Finger1")
{
anim->updatePointerTarget();
}
}
/// Implements control of a finger by overriding rotation
@ -362,6 +264,63 @@ namespace MWVR
traverse(node, nv);
}
class TrackingController : public VRTrackingListener
{
public:
TrackingController(VRPath trackingPath, osg::Vec3 baseOffset, osg::Quat baseOrientation)
: mTrackingPath(trackingPath)
, mTransform(nullptr)
, mBaseOffset(baseOffset)
, mBaseOrientation(baseOrientation)
{
}
void onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime) override
{
if (!mTransform)
return;
auto tp = source.getTrackingPose(predictedDisplayTime, mTrackingPath, 0);
if (!tp.status)
return;
auto orientation = mBaseOrientation * tp.pose.orientation;
// Undo the wrist translate
// TODO: I'm sure this could bee a lot less hacky
// But i'll defer that to whenever we get inverse cinematics so i can track the hand directly.
auto* hand = mTransform->getChild(0);
auto handMatrix = hand->asTransform()->asMatrixTransform()->getMatrix();
auto position = tp.pose.position - (orientation * handMatrix.getTrans());
// Center hand mesh on tracking
// This is just an estimate from trial and error, any suggestion for improving this is welcome
position -= orientation * mBaseOffset;
// Get current world transform of limb
osg::Matrix worldToLimb = osg::computeWorldToLocal(mTransform->getParentalNodePaths()[0]);
// Get current world of the reference node
osg::Matrix worldReference = osg::Matrix::identity();
// New transform based on tracking.
worldReference.preMultTranslate(position);
worldReference.preMultRotate(orientation);
// Finally, set transform
mTransform->setMatrix(worldReference * worldToLimb * mTransform->getMatrix());
}
void setTransform(osg::MatrixTransform* transform)
{
mTransform = transform;
}
VRPath mTrackingPath;
osg::ref_ptr<osg::MatrixTransform> mTransform;
osg::Vec3 mBaseOffset;
osg::Quat mBaseOrientation;
};
VRAnimation::VRAnimation(
const MWWorld::Ptr& ptr, osg::ref_ptr<osg::Group> parentNode, Resource::ResourceSystem* resourceSystem,
bool disableSounds, std::shared_ptr<VRSession> xrSession)
@ -377,7 +336,6 @@ namespace MWVR
for (int i = 0; i < 2; i++)
{
mIndexFingerControllers[i] = new FingerController;
mForearmControllers[i] = new ForearmController;
mHandControllers[i] = new HandController;
}
@ -403,6 +361,32 @@ namespace MWVR
mWeaponPointerTransform->setName("Weapon Pointer");
mWeaponPointerTransform->setUpdateCallback(new WeaponPointerController);
//mWeaponDirectionTransform->addChild(mWeaponPointerTransform);
auto vrTrackingManager = MWVR::Environment::get().getTrackingManager();
vrTrackingManager->bind(this, "pcworld");
auto* source = static_cast<VRTrackingToWorldBinding*>(vrTrackingManager->getSource("pcworld"));
source->setOriginNode(mObjectRoot->getParent(0));
// Morrowind's meshes do not point forward by default and need re-positioning and orientation.
float VRbias = osg::DegreesToRadians(-90.f);
osg::Quat yaw(-VRbias, osg::Vec3f(0, 0, 1));
osg::Quat roll(2 * VRbias, osg::Vec3f(1, 0, 0));
osg::Vec3 offset{ 15,0,0 };
auto* tm = Environment::get().getTrackingManager();
// Note that these controllers could be bound directly to source in the tracking manager.
// Instead we store them and update them manually to ensure order of operations.
{
auto path = tm->stringToVRPath("/user/hand/right/input/aim/pose");
auto orientation = yaw;
mVrControllers.emplace("bip01 r forearm", std::make_unique<TrackingController>(path, offset, orientation));
}
{
auto path = tm->stringToVRPath("/user/hand/left/input/aim/pose");
auto orientation = roll * yaw;
mVrControllers.emplace("bip01 l forearm", std::make_unique<TrackingController>(path, offset, orientation));
}
}
VRAnimation::~VRAnimation() {};
@ -580,6 +564,7 @@ namespace MWVR
stateset->setAttributeAndModes(material, osg::StateAttribute::ON);
mResourceSystem->getSceneManager()->recreateShaders(geometry);
mSkeleton->setIsTracked(true);
return geometry;
}
@ -589,6 +574,17 @@ namespace MWVR
return 0.0f;
}
void VRAnimation::onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime)
{
for (auto& controller : mVrControllers)
controller.second->onTrackingUpdated(source, predictedDisplayTime);
if (mSkeleton)
mSkeleton->markBoneMatriceDirty();
updatePointerTarget();
}
osg::Vec3f VRAnimation::runAnimation(float timepassed)
{
return NpcAnimation::runAnimation(timepassed);
@ -603,9 +599,11 @@ namespace MWVR
auto forearm = mNodeMap.find(i == 0 ? "bip01 l forearm" : "bip01 r forearm");
if (forearm != mNodeMap.end())
{
auto node = forearm->second;
node->removeUpdateCallback(mForearmControllers[i]);
node->addUpdateCallback(mForearmControllers[i]);
auto controller = mVrControllers.find(forearm->first);
if (controller != mVrControllers.end())
{
controller->second->setTransform(forearm->second);
}
}
auto hand = mNodeMap.find(i == 0 ? "bip01 l hand" : "bip01 r hand");
@ -629,15 +627,7 @@ namespace MWVR
finger->second->removeChild(mPointerTransform);
finger->second->addChild(mPointerTransform);
}
auto parent = mObjectRoot->getParent(0);
if (parent->getName() == "Player Root")
{
auto group = parent->asGroup();
group->removeChildren(0, parent->getNumChildren());
group->addChild(mModelOffset);
mModelOffset->addChild(mObjectRoot);
}
mSkeleton->setIsTracked(true);
}
void VRAnimation::enableHeadAnimation(bool)
{

@ -5,16 +5,17 @@
#include "../mwrender/renderingmanager.hpp"
#include "openxrmanager.hpp"
#include "vrsession.hpp"
#include "vrtracking.hpp"
namespace MWVR
{
class HandController;
class FingerController;
class ForearmController;
class TrackingController;
/// Subclassing NpcAnimation to implement VR related behaviour
class VRAnimation : public MWRender::NpcAnimation
class VRAnimation : public MWRender::NpcAnimation, public VRTrackingListener
{
protected:
virtual void addControllers();
@ -74,9 +75,11 @@ namespace MWVR
float getVelocity(const std::string& groupname) const override;
void onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime) override;
protected:
std::shared_ptr<VRSession> mSession;
osg::ref_ptr<ForearmController> mForearmControllers[2];
std::map<std::string, std::unique_ptr<TrackingController> > mVrControllers;
osg::ref_ptr<HandController> mHandControllers[2];
osg::ref_ptr<FingerController> mIndexFingerControllers[2];
osg::ref_ptr<osg::MatrixTransform> mModelOffset;

@ -24,13 +24,12 @@ namespace MWVR
VRCamera::VRCamera(osg::Camera* camera)
: MWRender::Camera(camera)
, mRoll(0.f)
{
mVanityAllowed = false;
mFirstPersonView = true;
auto* vrGuiManager = MWVR::Environment::get().getGUIManager();
vrGuiManager->setCamera(camera);
auto vrTrackingManager = MWVR::Environment::get().getTrackingManager();
vrTrackingManager->bind(this, "pcworld");
}
VRCamera::~VRCamera()
@ -49,24 +48,19 @@ namespace MWVR
// Move position of head to center of character
// Z should not be affected
mHeadOffset.x() = 0;
mHeadOffset.y() = 0;
auto* session = Environment::get().getSession();
if (mShouldResetZ)
{
if (session->seatedPlay())
{
// Adjust offset to place the current pose roughly at eye level
mHeadOffset.z() = session->eyeLevel() * Constants::UnitsPerMeter;
}
else
{
mHeadOffset.z() = mHeadPose.position.z();
}
mShouldResetZ = false;
}
auto* tm = Environment::get().getTrackingManager();
auto* ws = static_cast<VRTrackingToWorldBinding*>(tm->getSource("pcworld"));
ws->setSeatedPlay(session->seatedPlay());
ws->setEyeLevel(session->eyeLevel() * Constants::UnitsPerMeter);
ws->recenter(mShouldResetZ);
mShouldRecenter = false;
Log(Debug::Verbose) << "Recentered";
}
@ -78,13 +72,10 @@ namespace MWVR
auto& player = world->getPlayer();
auto playerPtr = player.getPlayer();
osg::Quat orientation;
getOrientation(orientation);
float yaw = 0.f;
float pitch = 0.f;
float roll = 0.f;
getEulerAngles(orientation, yaw, pitch, roll);
getEulerAngles(mHeadPose.orientation, yaw, pitch, roll);
if (!player.isDisabled() && mTrackingNode)
{
@ -92,31 +83,21 @@ namespace MWVR
}
}
void VRCamera::updateTracking()
void VRCamera::onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime)
{
auto* session = Environment::get().getSession();
auto& frameMeta = session->getFrame(VRSession::FramePhase::Update);
// Only update tracking if rendering.
// OpenXR does not provide tracking information while not rendering.
if (frameMeta && frameMeta->mShouldRender)
{
auto currentHeadPose = frameMeta->mPredictedPoses.head;
currentHeadPose.position *= Constants::UnitsPerMeter;
osg::Vec3 vrMovement = currentHeadPose.position - mHeadPose.position;
mHeadPose = currentHeadPose;
mHeadOffset += stageRotation() * vrMovement;
mHasTrackingData = true;
}
}
auto path = Environment::get().getTrackingManager()->stringToVRPath("/user/head/input/pose");
auto tp = source.getTrackingPose(predictedDisplayTime, path);
void VRCamera::updateCamera(osg::Camera* cam)
if (!!tp.status)
{
updateTracking();
mHeadPose = tp.pose;
mHasTrackingData = true;
}
if (mShouldRecenter)
{
recenter();
Camera::updateCamera(cam);
Camera::updateCamera(mCamera);
auto* vrGuiManager = MWVR::Environment::get().getGUIManager();
vrGuiManager->updateTracking();
}
@ -125,8 +106,13 @@ namespace MWVR
if (mShouldTrackPlayerCharacter && !MWBase::Environment::get().getWindowManager()->isGuiMode())
applyTracking();
Camera::updateCamera(cam);
Camera::updateCamera(mCamera);
}
}
void VRCamera::updateCamera(osg::Camera* cam)
{
// The regular update call should do nothing while tracking the player
}
void VRCamera::updateCamera()
@ -145,23 +131,11 @@ namespace MWVR
{
pitch += getPitch();
yaw += getYaw();
roll += getRoll();
}
setYaw(yaw);
setPitch(pitch);
setRoll(roll);
}
void VRCamera::setRoll(float angle)
{
if (angle > osg::PI) {
angle -= osg::PI * 2;
}
else if (angle < -osg::PI) {
angle += osg::PI * 2;
}
mRoll = angle;
}
void VRCamera::toggleViewMode(bool force)
{
mFirstPersonView = true;
@ -178,12 +152,11 @@ namespace MWVR
}
void VRCamera::getPosition(osg::Vec3d& focal, osg::Vec3d& camera) const
{
Camera::getPosition(focal, camera);
camera += mHeadOffset;
camera = focal = mHeadPose.position;
}
void VRCamera::getOrientation(osg::Quat& orientation) const
{
orientation = mHeadPose.orientation * osg::Quat(-mYawOffset, osg::Vec3d(0, 0, 1));
orientation = mHeadPose.orientation;
}
void VRCamera::processViewChange()
@ -197,10 +170,9 @@ namespace MWVR
mHeightScale = 1.f;
}
void VRCamera::rotateCameraToTrackingPtr()
void VRCamera::instantTransition()
{
Camera::rotateCameraToTrackingPtr();
setRoll(-mTrackingPtr.getRefData().getPosition().rot[1] - mDeferredRotation.y());
Camera::instantTransition();
// When the cell changes, openmw rotates the character.
// To make sure the player faces the same direction regardless of current orientation,
@ -209,12 +181,24 @@ namespace MWVR
float pitch = 0.f;
float roll = 0.f;
getEulerAngles(mHeadPose.orientation, yaw, pitch, roll);
mYawOffset = -mYaw - yaw;
yaw = - mYaw - yaw;
auto* tm = Environment::get().getTrackingManager();
auto* ws = static_cast<VRTrackingToWorldBinding*>(tm->getSource("pcworld"));
ws->setWorldOrientation(yaw, true);
}
void VRCamera::rotateStage(float yaw)
{
auto* tm = Environment::get().getTrackingManager();
auto* ws = static_cast<VRTrackingToWorldBinding*>(tm->getSource("pcworld"));
ws->setWorldOrientation(yaw, true);
}
osg::Quat VRCamera::stageRotation()
{
return osg::Quat(mYawOffset, osg::Vec3(0, 0, -1));
auto* tm = Environment::get().getTrackingManager();
auto* ws = static_cast<VRTrackingToWorldBinding*>(tm->getSource("pcworld"));
return ws->getWorldOrientation();
}
void VRCamera::requestRecenter(bool resetZ)

@ -9,13 +9,14 @@
#include <osg/Quat>
#include "../mwrender/camera.hpp"
#include "openxrtracker.hpp"
#include "vrtypes.hpp"
namespace MWVR
{
/// \brief VR camera control
class VRCamera : public MWRender::Camera
class VRCamera : public MWRender::Camera, public VRTrackingListener
{
public:
@ -35,9 +36,6 @@ namespace MWVR
/// \param rot Rotation angles in radians
void rotateCamera(float pitch, float roll, float yaw, bool adjust) override;
float getRoll() const { return mRoll; }
void setRoll(float angle);
void toggleViewMode(bool force = false) override;
bool toggleVanityMode(bool enable) override;
@ -51,33 +49,27 @@ namespace MWVR
void processViewChange() override;
void rotateCameraToTrackingPtr() override;
void instantTransition() override;
osg::Quat stageRotation();
void rotateStage(float yaw) { mYawOffset += yaw; }
void rotateStage(float yaw);
void requestRecenter(bool resetZ);
const osg::Vec3& headOffset() const { return mHeadOffset; }
void setHeadOffset(const osg::Vec3& headOffset) { mHeadOffset = headOffset; }
void setShouldTrackPlayerCharacter(bool track);
protected:
void recenter();
void applyTracking();
void updateTracking();
void onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime) override;
private:
float mRoll = 0.f;
Pose mHeadPose{};
osg::Vec3 mHeadOffset{ 0,0,0 };
bool mShouldRecenter{ true };
bool mShouldResetZ{ true };
bool mHasTrackingData{ false };
float mYawOffset{ 0.f };
bool mShouldTrackPlayerCharacter{ false };
};
}

@ -105,3 +105,13 @@ void MWVR::Environment::setManager(MWVR::OpenXRManager* xrManager)
{
mOpenXRManager = xrManager;
}
MWVR::VRTrackingManager* MWVR::Environment::getTrackingManager() const
{
return mTrackingManager;
}
void MWVR::Environment::setTrackingManager(MWVR::VRTrackingManager* trackingManager)
{
mTrackingManager = trackingManager;
}

@ -4,9 +4,10 @@
namespace MWVR
{
class VRAnimation;
class VRGUIManager;
class VRInputManager;
class VRSession;
class VRGUIManager;
class VRTrackingManager;
class VRViewer;
class OpenXRManager;
@ -60,12 +61,16 @@ namespace MWVR
MWVR::OpenXRManager* getManager() const;
void setManager(MWVR::OpenXRManager* xrManager);
MWVR::VRTrackingManager* getTrackingManager() const;
void setTrackingManager(MWVR::VRTrackingManager* xrManager);
private:
MWVR::VRSession* mSession{ nullptr };
MWVR::VRGUIManager* mGUIManager{ nullptr };
MWVR::VRAnimation* mPlayerAnimation{ nullptr };
MWVR::VRViewer* mViewer{ nullptr };
MWVR::OpenXRManager* mOpenXRManager{ nullptr };
MWVR::VRTrackingManager* mTrackingManager{ nullptr };
};
}

@ -138,24 +138,24 @@ namespace MWVR
};
class LayerUpdateCallback : public osg::Callback
{
public:
LayerUpdateCallback(VRGUILayer* layer)
: mLayer(layer)
{
//class LayerUpdateCallback : public osg::Callback
//{
//public:
// LayerUpdateCallback(VRGUILayer* layer)
// : mLayer(layer)
// {
}
// }
bool run(osg::Object* object, osg::Object* data)
{
mLayer->update();
return traverse(object, data);
}
// bool run(osg::Object* object, osg::Object* data)
// {
// mLayer->update();
// return traverse(object, data);
// }
private:
VRGUILayer* mLayer;
};
//private:
// VRGUILayer* mLayer;
//};
VRGUILayer::VRGUILayer(
osg::ref_ptr<osg::Group> geometryRoot,
@ -235,7 +235,11 @@ namespace MWVR
mConfig.offset.y() -= 0.001f * static_cast<float>(mConfig.priority);
}
mTransform->addUpdateCallback(new LayerUpdateCallback(this));
//mTransform->addUpdateCallback(new LayerUpdateCallback(this));
auto* tm = Environment::get().getTrackingManager();
mTrackingPath = tm->stringToVRPath(mConfig.trackingPath);
tm->bind(this, "uisource");
}
VRGUILayer::~VRGUILayer()
@ -261,38 +265,16 @@ namespace MWVR
updatePose();
}
void VRGUILayer::updateTracking(const Pose& headPose)
{
if (mConfig.trackingMode == TrackingMode::Menu)
{
mTrackedPose = headPose;
}
else
void VRGUILayer::onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime)
{
auto* anim = MWVR::Environment::get().getPlayerAnimation();
if (anim)
{
const osg::Node* hand = nullptr;
if (mConfig.trackingMode == TrackingMode::HudLeftHand)
hand = anim->getNode("bip01 l hand");
else
hand = anim->getNode("bip01 r hand");
if (hand)
auto tp = source.getTrackingPose(predictedDisplayTime, mTrackingPath);
if (!!tp.status)
{
auto world = osg::computeLocalToWorld(hand->getParentalNodePaths()[0]);
mTrackedPose.position = world.getTrans();
mTrackedPose.orientation = world.getRotate();
if (mConfig.trackingMode == TrackingMode::HudRightHand)
{
mTrackedPose.orientation = osg::Quat(osg::PI, osg::Vec3(1, 0, 0)) * mTrackedPose.orientation;
mTrackedPose.orientation = osg::Quat(osg::PI_2, osg::Vec3(0, 0, 1)) * mTrackedPose.orientation;
}
mTrackedPose.orientation = osg::Quat(osg::PI, osg::Vec3(1, 0, 0)) * mTrackedPose.orientation;
}
}
mTrackedPose = tp.pose;
updatePose();
}
updatePose();
update();
}
void VRGUILayer::updatePose()
@ -300,15 +282,11 @@ namespace MWVR
auto orientation = mRotation * mTrackedPose.orientation;
if (mConfig.trackingMode == TrackingMode::Menu)
if(mLayerName == "StatusHUD" || mLayerName == "VirtualKeyboard")
{
// Force menu layers to be vertical
auto axis = osg::Z_AXIS;
osg::Quat vertical;
auto local = orientation * axis;
vertical.makeRotate(local, axis);
orientation = orientation * vertical;
orientation = osg::Quat(osg::PI_2, osg::Vec3(0, 0, 1)) * orientation;
}
// Orient the offset and move the layer
auto position = mTrackedPose.position + orientation * mConfig.offset * Constants::UnitsPerMeter;
@ -353,13 +331,6 @@ namespace MWVR
void VRGUILayer::update()
{
auto xr = MWVR::Environment::get().getManager();
if (!xr)
return;
if (mConfig.trackingMode != TrackingMode::Menu || !xr->appShouldRender())
updateTracking();
if (mConfig.sideBySide)
{
// The side-by-side windows are also the resizable windows.
@ -469,7 +440,7 @@ namespace MWVR
osg::Vec2i(2048,2048), // Texture resolution
osg::Vec2(1,1),
sizingMode,
TrackingMode::Menu,
"/ui/input/stationary/pose",
extraLayers
};
}
@ -487,8 +458,8 @@ namespace MWVR
return config;
};
static osg::Vec3 gLeftHudOffsetTop = osg::Vec3(0.025f, -.05f, .066f);
static osg::Vec3 gLeftHudOffsetWrist = osg::Vec3(0.025f, -.090f, -.033f);
static osg::Vec3 gLeftHudOffsetTop = osg::Vec3(-0.200f, -.05f, .066f);
static osg::Vec3 gLeftHudOffsetWrist = osg::Vec3(-0.200f, -.090f, -.033f);
void VRGUIManager::setGeometryRoot(osg::Group* root)
{
@ -512,6 +483,7 @@ namespace MWVR
, mResourceSystem(resourceSystem)
, mGeometriesRootNode(rootNode)
, mGUICamerasRootNode(rootNode)
, mUiTracking(new VRGUITracking("pcworld"))
{
mGeometries->setName("VR GUI Geometry Root");
mGeometries->setUpdateCallback(new VRGUIManagerUpdateCallback(this));
@ -576,7 +548,7 @@ namespace MWVR
osg::Vec2i(2048,2048), // Texture resolution
osg::Vec2(1,1),
SizingMode::Auto,
TrackingMode::HudLeftHand,
"/user/hand/left/input/aim/pose",
""
};
LayerConfig statusHUDConfig = LayerConfig
@ -591,7 +563,7 @@ namespace MWVR
osg::Vec2i(1024,1024),
defaultConfig.myGUIViewSize,
SizingMode::Auto,
TrackingMode::HudLeftHand,
"/user/hand/left/input/aim/pose",
""
};
@ -600,14 +572,14 @@ namespace MWVR
0,
false, // side-by-side
osg::Vec4{0.f,0.f,0.f,0.f}, // background
osg::Vec3(-0.025f,.025f,.066f), // offset (meters)
osg::Vec3(-0.025f,-.200f,.066f), // offset (meters)
osg::Vec2(0.f,0.5f), // center (model space)
osg::Vec2(.1f, .1f), // extent (meters)
1024, // resolution (pixels per meter)
osg::Vec2i(2048,2048),
defaultConfig.myGUIViewSize,
SizingMode::Auto,
TrackingMode::HudRightHand,
"/user/hand/right/input/aim/pose",
""
};
@ -659,8 +631,10 @@ namespace MWVR
float low = -span / 2;
for (unsigned i = 0; i < mSideBySideLayers.size(); i++)
{
mSideBySideLayers[i]->setAngle(low + static_cast<float>(i) * sSideBySideAzimuthInterval);
}
}
void VRGUIManager::insertLayer(const std::string& name)
{
@ -693,9 +667,6 @@ namespace MWVR
updateSideBySideLayers();
}
if (config.trackingMode == TrackingMode::Menu)
layer->updateTracking(mHeadPose);
Resource::SceneManager* sceneManager = mResourceSystem->getSceneManager();
sceneManager->recreateShaders(layer->mGeometry);
}
@ -754,7 +725,6 @@ namespace MWVR
auto it = mLayers.find(name);
if (it == mLayers.end())
{
//Log(Debug::Warning) << "Tried to remove widget from nonexistent layer " << name;
return;
}
@ -783,45 +753,9 @@ namespace MWVR
removeWidget(widget);
}
void VRGUIManager::setCamera(osg::Camera* camera)
void VRGUIManager::updateTracking()
{
mCamera = camera;
mShouldUpdatePoses = true;
}
void VRGUIManager::updateTracking(void)
{
// Get head pose by reading the camera view matrix to place the GUI in the world.
osg::Vec3 eye{};
osg::Vec3 center{};
osg::Vec3 up{};
Pose headPose{};
osg::ref_ptr<osg::Camera> camera;
mCamera.lock(camera);
if (!camera)
{
// If a camera is not available, use VR stage poses directly.
auto pose = MWVR::Environment::get().getSession()->predictedPoses(MWVR::VRSession::FramePhase::Update).head;
osg::Vec3 position = pose.position * Constants::UnitsPerMeter;
osg::Quat orientation = pose.orientation;
headPose.position = position;
headPose.orientation = orientation;
}
else
{
auto viewMatrix = camera->getViewMatrix();
viewMatrix.getLookAt(eye, center, up);
headPose.position = eye;
headPose.orientation = viewMatrix.getRotate();
headPose.orientation = headPose.orientation.inverse();
}
mHeadPose = headPose;
for (auto& layer : mLayers)
layer.second->updateTracking(mHeadPose);
mUiTracking->resetStationaryPose();
}
bool VRGUIManager::updateFocus()
@ -851,19 +785,9 @@ namespace MWVR
{
auto xr = MWVR::Environment::get().getManager();
if (xr)
{
if (xr->appShouldRender())
{
if (mShouldUpdatePoses)
{
mShouldUpdatePoses = false;
if (!xr->appShouldRender())
updateTracking();
}
}
else
mShouldUpdatePoses = true;
}
}
void VRGUIManager::setFocusLayer(VRGUILayer* layer)
{
@ -1044,4 +968,55 @@ namespace MWVR
}
VRGUITracking::VRGUITracking(const std::string& source)
: VRTrackingSource("uisource")
{
auto* tm = Environment::get().getTrackingManager();
mSource = tm->getSource(source);
mHeadPath = tm->stringToVRPath("/user/head/input/pose");
mStationaryPath = tm->stringToVRPath("/ui/input/stationary/pose");
}
VRTrackingPose VRGUITracking::getTrackingPose(DisplayTime predictedDisplayTime, VRPath path, VRPath reference)
{
if (path == mStationaryPath)
return mStationaryPose;
return mSource->getTrackingPose(predictedDisplayTime, path, reference);
}
std::vector<VRPath> VRGUITracking::listSupportedTrackingPosePaths() const
{
auto paths = mSource->listSupportedTrackingPosePaths();
paths.push_back(mStationaryPath);
return paths;
}
void VRGUITracking::updateTracking(DisplayTime predictedDisplayTime)
{
if (mSource->availablePosesChanged())
notifyAvailablePosesChanged();
if (mShouldUpdateStationaryPose)
{
auto tp = mSource->getTrackingPose(predictedDisplayTime, mHeadPath);
if (!!tp.status)
{
mShouldUpdateStationaryPose = false;
mStationaryPose = tp;
// Stationary UI elements should always be vertical
auto axis = osg::Z_AXIS;
osg::Quat vertical;
auto local = mStationaryPose.pose.orientation * axis;
vertical.makeRotate(local, axis);
mStationaryPose.pose.orientation = mStationaryPose.pose.orientation * vertical;
}
}
}
void VRGUITracking::resetStationaryPose()
{
mShouldUpdateStationaryPose = true;
}
}

@ -12,8 +12,8 @@
#include <osg/Camera>
#include <osg/PositionAttitudeTransform>
#include "vrview.hpp"
#include "openxrmanager.hpp"
#include "vrtracking.hpp"
namespace MyGUI
{
@ -38,13 +38,6 @@ namespace MWVR
class GUICamera;
class VRGUIManager;
enum class TrackingMode
{
Menu, //!< Menu quads with fixed position based on head tracking.
HudLeftHand, //!< Hud quads tracking the left hand every frame
HudRightHand, //!< Hud quads tracking the right hand every frame
};
// Some UI elements should occupy predefined geometries
// Others should grow/shrink freely
enum class SizingMode
@ -66,18 +59,39 @@ namespace MWVR
osg::Vec2i pixelResolution; //!< Pixel resolution of the RTT texture
osg::Vec2 myGUIViewSize; //!< Resizable elements are resized to this (fraction of full view)
SizingMode sizingMode; //!< How to size the layer
TrackingMode trackingMode; //!< Tracking mode
std::string trackingPath; //!< The path that will be used to read tracking data
std::string extraLayers; //!< Additional layers to draw (list separated by any non-alphabetic)
bool operator<(const LayerConfig& rhs) const { return priority < rhs.priority; }
};
//! Extends the tracking source with /ui/input/stationary/pose
//! \note reference space will be ignored when reading /ui/input/stationary/pose
class VRGUITracking : public VRTrackingSource
{
public:
VRGUITracking(const std::string& source);
virtual VRTrackingPose getTrackingPose(DisplayTime predictedDisplayTime, VRPath path, VRPath reference = 0) override;
virtual std::vector<VRPath> listSupportedTrackingPosePaths() const override;
virtual void updateTracking(DisplayTime predictedDisplayTime) override;
void resetStationaryPose();
private:
VRPath mStationaryPath = 0;
VRPath mHeadPath = 0;
VRTrackingPose mStationaryPose = VRTrackingPose();
VRTrackingSource* mSource = nullptr;
bool mShouldUpdateStationaryPose = true;
};
/// \brief A single VR GUI Quad.
///
/// In VR menus are shown as quads within the game world.
/// The behaviour of that quad is defined by the MWVR::LayerConfig struct
/// Each instance of VRGUILayer is used to show one MYGUI layer.
class VRGUILayer
class VRGUILayer : public VRTrackingListener
{
public:
VRGUILayer(
@ -95,7 +109,6 @@ namespace MWVR
osg::Camera* camera();
osg::ref_ptr<osg::Texture2D> menuTexture();
void setAngle(float angle);
void updateTracking(const Pose& headPose = {});
void updatePose();
void updateRect();
void insertWidget(MWGui::Layout* widget);
@ -103,7 +116,13 @@ namespace MWVR
int widgetCount() { return mWidgets.size(); }
bool operator<(const VRGUILayer& rhs) const { return mConfig.priority < rhs.mConfig.priority; }
/// Update layer quads based on current tracking information
void onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime) override;
public:
VRPath mTrackingPath = 0;
Pose mTrackingPose = Pose();
Pose mTrackedPose{};
LayerConfig mConfig;
std::string mLayerName;
@ -132,7 +151,7 @@ namespace MWVR
/// Constructs and destructs VRGUILayer objects in response to MWGui::Layout::setVisible calls.
/// Layers can also be made visible directly by calling insertLayer() directly, e.g. to show
/// the video player.
class VRGUIManager
class VRGUIManager : public VRTrackingListener
{
public:
VRGUIManager(
@ -151,18 +170,15 @@ namespace MWVR
/// Remove the given layer quad
void removeLayer(const std::string& name);
/// Update layer quads based on current camera
void updateTracking(void);
/// Set camera on which to base tracking
void setCamera(osg::Camera* camera);
/// Check current pointer target and update focus layer
bool updateFocus();
/// Update traversal
void update();
/// Update traversal
void updateTracking();
/// Gui cursor coordinates to use to simulate a mouse press/move if the player is currently pointing at a vr gui layer
osg::Vec2i guiCursor() { return mGuiCursor; };
@ -199,15 +215,14 @@ namespace MWVR
osg::ref_ptr<osg::Group> mGUICamerasRootNode{ nullptr };
osg::ref_ptr<osg::Group> mGUICameras{ new osg::Group };
std::unique_ptr<VRGUITracking> mUiTracking = nullptr;
std::map<std::string, std::shared_ptr<VRGUILayer>> mLayers;
std::vector<std::shared_ptr<VRGUILayer> > mSideBySideLayers;
bool mShouldUpdatePoses{ true };
Pose mHeadPose{};
osg::Vec2i mGuiCursor{};
VRGUILayer* mFocusLayer{ nullptr };
MyGUI::Widget* mFocusWidget{ nullptr };
osg::observer_ptr<osg::Camera> mCamera{ nullptr };
std::map<std::string, LayerConfig> mLayerConfigs{};
};
}

@ -2,6 +2,7 @@
#include "openxrdebug.hpp"
#include "vrenvironment.hpp"
#include "openxrmanagerimpl.hpp"
#include "openxrtypeconversions.hpp"
#include <vector>
#include <deque>
@ -42,7 +43,7 @@ namespace MWVR
mPrevious = mValue;
auto* xr = Environment::get().getManager();
XrSpace referenceSpace = xr->impl().getReferenceSpace();
XrSpace referenceSpace = xr->impl().getReferenceSpace(ReferenceSpace::STAGE);
XrSpaceLocation location{ XR_TYPE_SPACE_LOCATION };
XrSpaceVelocity velocity{ XR_TYPE_SPACE_VELOCITY };

@ -26,6 +26,16 @@ namespace MWVR
A_VrLast
};
enum class VrControlType
{
Press,
LongPress,
Hold,
Pose,
Haptic,
Axis
};
/// \brief Suggest a binding by binding an action to a path on a given hand (left or right).
struct SuggestedBinding
{
@ -40,6 +50,8 @@ namespace MWVR
{
GUI = 0,
Gameplay = 1,
Tracking = 2,
Haptics = 3,
};
/// \brief Action for applying haptics
@ -77,6 +89,9 @@ namespace MWVR
//! Update pose value
void update(long long time);
//! Action space
XrSpace xrSpace() { return mXRSpace; }
private:
std::unique_ptr<OpenXRAction> mXRAction;
XrSpace mXRSpace;

@ -45,7 +45,7 @@ namespace MWVR
Pose VRInputManager::getLimbPose(int64_t time, TrackedLimb limb)
{
return activeActionSet().getLimbPose(time, limb);
return mXRInput->getActionSet(ActionSet::Tracking).getLimbPose(time, limb);
}
OpenXRActionSet& VRInputManager::activeActionSet()
@ -151,7 +151,6 @@ namespace MWVR
}
else
{
auto* world = MWBase::Environment::get().getWorld();
MWWorld::Player& player = world->getPlayer();
player.activate(ptr);
}
@ -182,13 +181,13 @@ namespace MWVR
void VRInputManager::applyHapticsLeftHand(float intensity)
{
if (mHapticsEnabled)
activeActionSet().applyHaptics(TrackedLimb::LEFT_HAND, intensity);
mXRInput->getActionSet(ActionSet::Haptics).applyHaptics(TrackedLimb::LEFT_HAND, intensity);
}
void VRInputManager::applyHapticsRightHand(float intensity)
{
if (mHapticsEnabled)
activeActionSet().applyHaptics(TrackedLimb::RIGHT_HAND, intensity);
mXRInput->getActionSet(ActionSet::Haptics).applyHaptics(TrackedLimb::RIGHT_HAND, intensity);
}
void VRInputManager::processChangedSettings(const std::set<std::pair<std::string, std::string>>& changed)
@ -265,6 +264,8 @@ namespace MWVR
readInteractionProfileActionSet(actionSetGameplay, ActionSet::Gameplay, interactionProfilePath);
readInteractionProfileActionSet(actionSetGUI, ActionSet::GUI, interactionProfilePath);
mXRInput->suggestBindings(ActionSet::Tracking, interactionProfilePath, {});
mXRInput->suggestBindings(ActionSet::Haptics, interactionProfilePath, {});
}
void VRInputManager::readInteractionProfileActionSet(TiXmlElement* element, ActionSet actionSet, std::string interactionProfilePath)
@ -415,8 +416,6 @@ namespace MWVR
if (!session)
return;
session->beginFrame();
// The rest of this code assumes the game is running
if (MWBase::Environment::get().getStateManager()->getState() == MWBase::StateManager::State_NoGame)
return;
@ -434,7 +433,10 @@ namespace MWVR
auto& player = world->getPlayer();
auto playerPtr = world->getPlayerPtr();
if (!mRealisticCombat || mRealisticCombat->ptr() != playerPtr)
mRealisticCombat.reset(new RealisticCombat::StateMachine(playerPtr));
{
auto trackingPath = Environment::get().getTrackingManager()->stringToVRPath("/user/hand/right/input/aim/pose");
mRealisticCombat.reset(new RealisticCombat::StateMachine(playerPtr, trackingPath));
}
bool enabled = !guiMode && player.getDrawState() == MWMechanics::DrawState_Weapon && !player.isDisabled();
mRealisticCombat->update(dt, enabled);
}

@ -57,6 +57,9 @@ namespace MWVR
/// Notify input manager that the active interaction profile has changed
void notifyInteractionProfileChanged();
/// OpenXR input interface
OpenXRInput& xrInput() { return *mXRInput; }
protected:
void processAction(const class Action* action, float dt, bool disableControls);

@ -33,25 +33,8 @@
namespace MWVR
{
static void swapConvention(osg::Vec3& v3)
{
float y = v3.y();
float z = v3.z();
v3.y() = z;
v3.z() = -y;
}
static void swapConvention(osg::Quat& q)
{
float y = q.y();
float z = q.z();
q.y() = z;
q.z() = -y;
}
VRSession::VRSession()
{
mHandDirectedMovement = Settings::Manager::getBool("hand directed movement", "VR");
mSeatedPlay = Settings::Manager::getBool("seated play", "VR");
}
@ -59,23 +42,10 @@ namespace MWVR
{
}
osg::Matrix VRSession::projectionMatrix(FramePhase phase, Side side)
{
assert(((int)side) < 2);
auto fov = predictedPoses(VRSession::FramePhase::Update).view[(int)side].fov;
float near_ = Settings::Manager::getFloat("near clip", "Camera");
float far_ = Settings::Manager::getFloat("viewing distance", "Camera");
return fov.perspectiveMatrix(near_, far_);
}
void VRSession::processChangedSettings(const std::set<std::pair<std::string, std::string>>& changed)
{
for (Settings::CategorySettingVector::const_iterator it = changed.begin(); it != changed.end(); ++it)
{
mHandDirectedMovement = Settings::Manager::getBool("hand directed movement", "VR");
setSeatedPlay(Settings::Manager::getBool("seated play", "VR"));
}
}
void VRSession::beginFrame()
{
@ -89,7 +59,7 @@ namespace MWVR
void VRSession::endFrame()
{
// Make sure we don't continue until the render thread has moved the frame to its next phase.
// Make sure we don't continue until the render thread has moved the current update frame to its next phase.
std::unique_lock<std::mutex> lock(mMutex);
while (getFrame(FramePhase::Update))
{
@ -106,59 +76,21 @@ namespace MWVR
}
}
osg::Matrix VRSession::viewMatrix(osg::Vec3 position, osg::Quat orientation)
{
position = position * Constants::UnitsPerMeter;
swapConvention(position);
swapConvention(orientation);
osg::Matrix viewMatrix;
viewMatrix.setTrans(-position);
viewMatrix.postMultRotate(orientation.conj());
return viewMatrix;
}
osg::Matrix VRSession::viewMatrix(FramePhase phase, Side side, bool offset, bool glConvention)
{
if (offset)
{
MWVR::Pose pose = predictedPoses(phase).view[(int)side].pose;
auto position = pose.position * Constants::UnitsPerMeter;
auto orientation = pose.orientation;
if (glConvention)
static void swapConvention(osg::Vec3& v3)
{
swapConvention(position);
swapConvention(orientation);
}
osg::Matrix viewMatrix;
viewMatrix.setTrans(-position);
viewMatrix.postMultRotate(orientation.conj());
return viewMatrix;
float y = v3.y();
float z = v3.z();
v3.y() = z;
v3.z() = -y;
}
else
{
MWVR::Pose pose = predictedPoses(phase).eye[(int)side];
osg::Vec3 position = pose.position * Constants::UnitsPerMeter;
osg::Quat orientation = pose.orientation;
osg::Vec3 forward = orientation * osg::Vec3(0, 1, 0);
osg::Vec3 up = orientation * osg::Vec3(0, 0, 1);
if (glConvention)
static void swapConvention(osg::Quat& q)
{
swapConvention(position);
swapConvention(orientation);
swapConvention(forward);
swapConvention(up);
}
osg::Matrix viewMatrix;
viewMatrix.makeLookAt(position, position + forward, up);
return viewMatrix;
}
float y = q.y();
float z = q.z();
q.y() = z;
q.z() = -y;
}
void VRSession::swapBuffers(osg::GraphicsContext* gc, VRViewer& viewer)
@ -169,21 +101,20 @@ namespace MWVR
if (frameMeta->mShouldSyncFrameLoop)
{
gc->swapBuffersImplementation();
if (frameMeta->mShouldRender)
{
gc->swapBuffersImplementation();
std::array<CompositionLayerProjectionView, 2> layerStack{};
layerStack[(int)Side::LEFT_SIDE].subImage = viewer.subImage(Side::LEFT_SIDE);
layerStack[(int)Side::RIGHT_SIDE].subImage = viewer.subImage(Side::RIGHT_SIDE);
layerStack[(int)Side::LEFT_SIDE].pose = frameMeta->mPredictedPoses.eye[(int)Side::LEFT_SIDE] / mPlayerScale;
layerStack[(int)Side::RIGHT_SIDE].pose = frameMeta->mPredictedPoses.eye[(int)Side::RIGHT_SIDE] / mPlayerScale;
layerStack[(int)Side::LEFT_SIDE].fov = frameMeta->mPredictedPoses.view[(int)Side::LEFT_SIDE].fov;
layerStack[(int)Side::RIGHT_SIDE].fov = frameMeta->mPredictedPoses.view[(int)Side::RIGHT_SIDE].fov;
layerStack[(int)Side::LEFT_SIDE].pose = frameMeta->mViews[(int)ReferenceSpace::STAGE][(int)Side::LEFT_SIDE].pose;
layerStack[(int)Side::RIGHT_SIDE].pose = frameMeta->mViews[(int)ReferenceSpace::STAGE][(int)Side::RIGHT_SIDE].pose;
layerStack[(int)Side::LEFT_SIDE].fov = frameMeta->mViews[(int)ReferenceSpace::STAGE][(int)Side::LEFT_SIDE].fov;
layerStack[(int)Side::RIGHT_SIDE].fov = frameMeta->mViews[(int)ReferenceSpace::STAGE][(int)Side::RIGHT_SIDE].fov;
xr->endFrame(frameMeta->mFrameInfo, &layerStack);
}
else
{
gc->swapBuffersImplementation();
xr->endFrame(frameMeta->mFrameInfo, nullptr);
}
@ -256,50 +187,9 @@ namespace MWVR
frame->mFrameInfo = xr->waitFrame();
frame->mShouldRender = frame->mFrameInfo.runtimeRequestsRender;
xr->xrResourceAcquired();
if (frame->mShouldRender)
{
frame->mPredictedDisplayTime = frame->mFrameInfo.runtimePredictedDisplayTime;
PoseSet predictedPoses{};
xr->enablePredictions();
predictedPoses.head = xr->getPredictedHeadPose(frame->mPredictedDisplayTime, ReferenceSpace::STAGE) * mPlayerScale;
auto hmdViews = xr->getPredictedViews(frame->mPredictedDisplayTime, ReferenceSpace::VIEW);
predictedPoses.view[(int)Side::LEFT_SIDE].pose = hmdViews[(int)Side::LEFT_SIDE].pose * mPlayerScale * Constants::UnitsPerMeter;
predictedPoses.view[(int)Side::RIGHT_SIDE].pose = hmdViews[(int)Side::RIGHT_SIDE].pose * mPlayerScale * Constants::UnitsPerMeter;
predictedPoses.view[(int)Side::LEFT_SIDE].fov = hmdViews[(int)Side::LEFT_SIDE].fov;
predictedPoses.view[(int)Side::RIGHT_SIDE].fov = hmdViews[(int)Side::RIGHT_SIDE].fov;
auto stageViews = xr->getPredictedViews(frame->mPredictedDisplayTime, ReferenceSpace::STAGE);
predictedPoses.eye[(int)Side::LEFT_SIDE] = stageViews[(int)Side::LEFT_SIDE].pose * mPlayerScale;
predictedPoses.eye[(int)Side::RIGHT_SIDE] = stageViews[(int)Side::RIGHT_SIDE].pose * mPlayerScale;
auto* input = Environment::get().getInputManager();
if (input)
{
predictedPoses.hands[(int)Side::LEFT_SIDE] = input->getLimbPose(frame->mPredictedDisplayTime, TrackedLimb::LEFT_HAND) * mPlayerScale;
predictedPoses.hands[(int)Side::RIGHT_SIDE] = input->getLimbPose(frame->mPredictedDisplayTime, TrackedLimb::RIGHT_HAND) * mPlayerScale;
}
xr->disablePredictions();
frame->mPredictedPoses = predictedPoses;
}
}
}
const PoseSet& VRSession::predictedPoses(FramePhase phase)
{
auto& frame = getFrame(phase);
// TODO: Manage execution order properly instead of this hack
if (phase == FramePhase::Update && !frame)
beginPhase(FramePhase::Update);
if (!frame)
throw std::logic_error("Attempted to get poses from a phase with no current pose");
return frame->mPredictedPoses;
}
// OSG doesn't provide API to extract euler angles from a quat, but i need it.
// Credits goes to Dennis Bunfield, i just copied his formula https://narkive.com/v0re6547.4
void getEulerAngles(const osg::Quat& quat, float& yaw, float& pitch, float& roll)
@ -337,30 +227,5 @@ namespace MWVR
roll = angle_y;
}
void VRSession::movementAngles(float& yaw, float& pitch)
{
if (!getFrame(FramePhase::Update))
return;
if (mHandDirectedMovement)
{
auto frameMeta = getFrame(FramePhase::Update).get();
float headYaw = 0.f;
float headPitch = 0.f;
float headsWillRoll = 0.f;
float handYaw = 0.f;
float handPitch = 0.f;
float handRoll = 0.f;
getEulerAngles(frameMeta->mPredictedPoses.head.orientation, headYaw, headPitch, headsWillRoll);
getEulerAngles(frameMeta->mPredictedPoses.hands[(int)Side::LEFT_SIDE].orientation, handYaw, handPitch, handRoll);
yaw = handYaw - headYaw;
pitch = handPitch - headPitch;
}
}
}

@ -42,9 +42,9 @@ namespace MWVR
struct VRFrameMeta
{
long long mFrameNo{ 0 };
long long mPredictedDisplayTime{ 0 };
PoseSet mPredictedPoses{};
DisplayTime mFrameNo{ 0 };
std::array<View, 2> mViews[2]{};
bool mShouldRender{ false };
bool mShouldSyncFrameLoop{ false };
FrameInfo mFrameInfo{};
@ -56,14 +56,9 @@ namespace MWVR
void swapBuffers(osg::GraphicsContext* gc, VRViewer& viewer);
const PoseSet& predictedPoses(FramePhase phase);
//! Starts a new frame
void prepareFrame();
//! Angles to be used for overriding movement direction
void movementAngles(float& yaw, float& pitch);
void beginPhase(FramePhase phase);
std::unique_ptr<VRFrameMeta>& getFrame(FramePhase phase);
bool seatedPlay() const { return mSeatedPlay; }
@ -74,10 +69,6 @@ namespace MWVR
float eyeLevel() const { return mEyeLevel; }
void setEyeLevel(float eyeLevel) { mEyeLevel = eyeLevel; }
osg::Matrix viewMatrix(osg::Vec3 position, osg::Quat orientation);
osg::Matrix viewMatrix(FramePhase phase, Side side, bool offset, bool glConvention);
osg::Matrix projectionMatrix(FramePhase phase, Side side);
std::array<std::unique_ptr<VRFrameMeta>, (int)FramePhase::NumPhases> mFrame{ nullptr };
void processChangedSettings(const std::set< std::pair<std::string, std::string> >& changed);
@ -92,7 +83,6 @@ namespace MWVR
std::mutex mMutex{};
std::condition_variable mCondition{};
bool mHandDirectedMovement{ false };
bool mSeatedPlay{ false };
long long mFrames{ 0 };
long long mLastRenderedFrame{ 0 };

@ -1,76 +0,0 @@
#include "vrenvironment.hpp"
#include "vrsession.hpp"
#include "vrshadow.hpp"
#include "../mwrender/vismask.hpp"
#include <components/sceneutil/mwshadowtechnique.hpp>
#include <cassert>
namespace MWVR
{
VrShadow::VrShadow()
: mMasterConfig(new SharedShadowMapConfig)
, mSlaveConfig(new SharedShadowMapConfig)
{
mMasterConfig->_id = "VR";
mMasterConfig->_master = true;
mSlaveConfig->_id = "VR";
mSlaveConfig->_master = false;
}
void VrShadow::configureShadowsForCamera(osg::Camera* camera, bool master)
{
if(master)
camera->setUserData(mMasterConfig);
else
camera->setUserData(mSlaveConfig);
}
void VrShadow::updateShadowConfig(osg::View& view)
{
auto* session = Environment::get().getSession();
auto viewMatrix = view.getCamera()->getViewMatrix();
auto& poses = session->predictedPoses(VRSession::FramePhase::Update);
auto& leftView = poses.view[(int)Side::LEFT_SIDE];
auto& rightView = poses.view[(int)Side::RIGHT_SIDE];
osg::Vec3d leftEye = leftView.pose.position;
osg::Vec3d rightEye = rightView.pose.position;
// The shadow map will be computed from a position P slightly behind the eyes L and R
// where it creates the minimum frustum encompassing both eyes' frustums.
// Compute Frustum angles. A simple min/max.
FieldOfView fov;
fov.angleLeft = std::min(leftView.fov.angleLeft, rightView.fov.angleLeft);
fov.angleRight = std::max(leftView.fov.angleRight, rightView.fov.angleRight);
fov.angleDown = std::min(leftView.fov.angleDown, rightView.fov.angleDown);
fov.angleUp = std::max(leftView.fov.angleUp, rightView.fov.angleUp);
// Use the law of sines on the triangle spanning PLR to determine P
double angleLeft = std::abs(fov.angleLeft);
double angleRight = std::abs(fov.angleRight);
double lengthRL = (rightEye - leftEye).length();
double ratioRL = lengthRL / std::sin(osg::PI - angleLeft - angleRight);
double lengthLP = ratioRL * std::sin(angleRight);
osg::Vec3d directionLP = osg::Vec3(std::cos(-angleLeft), std::sin(-angleLeft), 0);
osg::Vec3d P = leftEye + directionLP * lengthLP;
// Generate the matrices
float near_ = Settings::Manager::getFloat("near clip", "Camera");
float far_ = Settings::Manager::getFloat("viewing distance", "Camera");
auto modifiedViewMatrix = viewMatrix * session->viewMatrix(P, osg::Quat(0, 0, 0, 1));
auto projectionMatrix = fov.perspectiveMatrix(near_, far_);
if (mMasterConfig->_projection == nullptr)
mMasterConfig->_projection = new osg::RefMatrix;
if (mMasterConfig->_modelView == nullptr)
mMasterConfig->_modelView = new osg::RefMatrix;
mMasterConfig->_referenceFrame = view.getCamera()->getReferenceFrame();
mMasterConfig->_modelView->set(modifiedViewMatrix);
mMasterConfig->_projection->set(projectionMatrix);
}
}

@ -1,34 +0,0 @@
#ifndef MWVR_VRSHADOW_H
#define MWVR_VRSHADOW_H
#include <osg/Camera>
#include <osgViewer/Viewer>
#include <components/sceneutil/mwshadowtechnique.hpp>
namespace MWVR
{
class UpdateShadowMapSlaveCallback : public osg::View::Slave::UpdateSlaveCallback
{
public:
void updateSlave(osg::View& view, osg::View::Slave& slave) override;
};
class VrShadow
{
using SharedShadowMapConfig = SceneUtil::MWShadowTechnique::SharedShadowMapConfig;
public:
VrShadow();
void configureShadowsForCamera(osg::Camera* camera, bool master);
void updateShadowConfig(osg::View& view);
private:
osg::ref_ptr<SharedShadowMapConfig> mMasterConfig;
osg::ref_ptr<SharedShadowMapConfig> mSlaveConfig;
};
}
#endif

@ -0,0 +1,315 @@
#include "vrtracking.hpp"
#include "vrenvironment.hpp"
#include "vrsession.hpp"
#include "openxrmanagerimpl.hpp"
#include <components/misc/constants.hpp>
#include <mutex>
namespace MWVR
{
VRTrackingManager::VRTrackingManager()
{
mHandDirectedMovement = Settings::Manager::getBool("hand directed movement", "VR");
mHeadPath = stringToVRPath("/user/head/input/pose");
mHandPath = stringToVRPath("/user/hand/left/input/aim/pose");
}
VRTrackingManager::~VRTrackingManager()
{
}
void VRTrackingManager::registerTrackingSource(VRTrackingSource* source, const std::string& name)
{
mSources.emplace(name, source);
notifySourceChanged(name);
}
void VRTrackingManager::unregisterTrackingSource(VRTrackingSource* source)
{
std::string name = "";
{
auto it = mSources.begin();
while (it->second != source) it++;
name = it->first;
mSources.erase(it);
}
notifySourceChanged(name);
}
VRTrackingSource* VRTrackingManager::getSource(const std::string& name)
{
auto it = mSources.find(name);
if (it != mSources.end())
return it->second;
return nullptr;
}
void VRTrackingManager::movementAngles(float& yaw, float& pitch)
{
yaw = mMovementYaw;
pitch = mMovementPitch;
}
void VRTrackingManager::processChangedSettings(const std::set<std::pair<std::string, std::string>>& changed)
{
mHandDirectedMovement = Settings::Manager::getBool("hand directed movement", "VR");
}
void VRTrackingManager::notifySourceChanged(const std::string& name)
{
auto* source = getSource(name);
for (auto& it : mBindings)
{
if (it.second == name)
{
if (source)
it.first->onTrackingAttached(*source);
else
it.first->onTrackingDetached();
}
}
}
void VRTrackingManager::updateMovementAngles(DisplayTime predictedDisplayTime)
{
if (mHandDirectedMovement)
{
float headYaw = 0.f;
float headPitch = 0.f;
float headsWillRoll = 0.f;
float handYaw = 0.f;
float handPitch = 0.f;
float handRoll = 0.f;
auto pcsource = getSource("pcstage");
if (pcsource)
{
auto tpHead = pcsource->getTrackingPose(predictedDisplayTime, mHeadPath);
auto tpHand = pcsource->getTrackingPose(predictedDisplayTime, mHandPath);
if (!!tpHead.status && !!tpHand.status)
{
getEulerAngles(tpHead.pose.orientation, headYaw, headPitch, headsWillRoll);
getEulerAngles(tpHand.pose.orientation, handYaw, handPitch, handRoll);
mMovementYaw = handYaw - headYaw;
mMovementPitch = handPitch - headPitch;
}
}
}
else
{
mMovementYaw = 0;
mMovementPitch = 0;
}
}
void VRTrackingManager::bind(VRTrackingListener* listener, std::string sourceName)
{
unbind(listener);
mBindings.emplace(listener, sourceName);
auto* source = getSource(sourceName);
if (source)
listener->onTrackingAttached(*source);
else
listener->onTrackingDetached();
}
void VRTrackingManager::unbind(VRTrackingListener* listener)
{
auto it = mBindings.find(listener);
if (it != mBindings.end())
{
listener->onTrackingDetached();
mBindings.erase(listener);
}
}
VRPath VRTrackingManager::stringToVRPath(const std::string& path)
{
// Empty path is invalid
if (path.empty())
{
Log(Debug::Error) << "Empty path";
return 0;
}
// Return path immediately if it already exists
auto it = mPathIdentifiers.find(path);
if (it != mPathIdentifiers.end())
return it->second;
// Add new path and return it
auto res = mPathIdentifiers.emplace(path, mPathIdentifiers.size() + 1);
return res.first->second;
}
std::string VRTrackingManager::trackingPathToString(VRPath path)
{
// Find the identifier in the map and return the corresponding string.
for (auto& e : mPathIdentifiers)
if (e.second == path)
return e.first;
// No path found, return empty string
Log(Debug::Warning) << "No such path identifier (" << path << ")";
return "";
}
void VRTrackingManager::updateTracking()
{
MWVR::Environment::get().getSession()->endFrame();
MWVR::Environment::get().getSession()->beginFrame();
auto& frame = Environment::get().getSession()->getFrame(VRSession::FramePhase::Update);
if (frame->mFrameInfo.runtimePredictedDisplayTime == 0)
return;
for (auto source : mSources)
source.second->updateTracking(frame->mFrameInfo.runtimePredictedDisplayTime);
updateMovementAngles(frame->mFrameInfo.runtimePredictedDisplayTime);
for (auto& binding : mBindings)
{
auto* listener = binding.first;
auto* source = getSource(binding.second);
if (source)
{
if (source->availablePosesChanged())
listener->onAvailablePosesChanged(*source);
listener->onTrackingUpdated(*source, frame->mFrameInfo.runtimePredictedDisplayTime);
}
}
for (auto source : mSources)
source.second->clearAvailablePosesChanged();
}
VRTrackingSource::VRTrackingSource(const std::string& name)
{
Environment::get().getTrackingManager()->registerTrackingSource(this, name);
}
VRTrackingSource::~VRTrackingSource()
{
Environment::get().getTrackingManager()->unregisterTrackingSource(this);
}
bool VRTrackingSource::availablePosesChanged() const
{
return mAvailablePosesChanged;
}
void VRTrackingSource::clearAvailablePosesChanged()
{
mAvailablePosesChanged = false;
}
void VRTrackingSource::notifyAvailablePosesChanged()
{
mAvailablePosesChanged = true;
}
VRTrackingListener::~VRTrackingListener()
{
Environment::get().getTrackingManager()->unbind(this);
}
VRTrackingToWorldBinding::VRTrackingToWorldBinding(const std::string& name, VRTrackingSource* source, VRPath movementReference)
: VRTrackingSource(name)
, mMovementReference(movementReference)
, mSource(source)
{
}
void VRTrackingToWorldBinding::setWorldOrientation(float yaw, bool adjust)
{
auto yawQuat = osg::Quat(yaw, osg::Vec3(0, 0, -1));
if (adjust)
mOrientation = yawQuat * mOrientation;
else
mOrientation = yawQuat;
}
osg::Vec3 VRTrackingToWorldBinding::movement() const
{
return mMovement;
}
void VRTrackingToWorldBinding::consumeMovement(const osg::Vec3& movement)
{
mMovement.x() -= movement.x();
mMovement.y() -= movement.y();
}
void VRTrackingToWorldBinding::recenter(bool resetZ)
{
mMovement.x() = 0;
mMovement.y() = 0;
if (resetZ)
{
if (mSeatedPlay)
mMovement.z() = mEyeLevel;
else
mMovement.z() = mLastPose.position.z();
}
}
VRTrackingPose VRTrackingToWorldBinding::getTrackingPose(DisplayTime predictedDisplayTime, VRPath path, VRPath reference)
{
auto tp = mSource->getTrackingPose(predictedDisplayTime, path, reference);
tp.pose.position *= Constants::UnitsPerMeter;
if (reference == 0 && !!tp.status)
{
tp.pose.position -= mLastPose.position;
tp.pose.position = mOrientation * tp.pose.position;
tp.pose.position += mMovement;
tp.pose.orientation = tp.pose.orientation * mOrientation;
if(mOrigin)
tp.pose.position += mOriginWorldPose.position;
}
return tp;
}
std::vector<VRPath> VRTrackingToWorldBinding::listSupportedTrackingPosePaths() const
{
return mSource->listSupportedTrackingPosePaths();
}
void VRTrackingToWorldBinding::updateTracking(DisplayTime predictedDisplayTime)
{
mOriginWorldPose = Pose();
if (mOrigin)
{
auto worldMatrix = osg::computeLocalToWorld(mOrigin->getParentalNodePaths()[0]);
mOriginWorldPose.position = worldMatrix.getTrans();
mOriginWorldPose.orientation = worldMatrix.getRotate();
}
auto mtp = mSource->getTrackingPose(predictedDisplayTime, mMovementReference, 0);
if (!!mtp.status)
{
mtp.pose.position *= Constants::UnitsPerMeter;
osg::Vec3 vrMovement = mtp.pose.position - mLastPose.position;
mLastPose = mtp.pose;
if (mHasTrackingData)
mMovement += mOrientation * vrMovement;
else
mMovement.z() = mLastPose.position.z();
mHasTrackingData = true;
}
mAvailablePosesChanged = mSource->availablePosesChanged();
}
}

@ -0,0 +1,216 @@
#ifndef MWVR_VRTRACKING_H
#define MWVR_VRTRACKING_H
#include <memory>
#include <map>
#include <set>
#include <vector>
#include <mutex>
#include "vrtypes.hpp"
namespace MWVR
{
class VRAnimation;
//! Describes the status of the tracking data. Note that there are multiple success statuses, and predicted poses should be used whenever the status is a non-negative integer.
enum class TrackingStatus : signed
{
Unknown = 0, //!< No data has been written (default value)
Good = 1, //!< Accurate, up-to-date tracking data was used.
Stale = 2, //!< Inaccurate, stale tracking data was used. This code is a status warning, not an error, and the tracking pose should be used.
NotTracked = -1, //!< No tracking data was returned because the tracking source does not track that
Lost = -2, //!< No tracking data was returned because the tracking source could not be read (occluded controller, network connectivity issues, etc.).
RuntimeFailure = -3 //!< No tracking data was returned because of a runtime failure.
};
inline bool operator!(TrackingStatus status)
{
return static_cast<signed>(status) < static_cast<signed>(TrackingStatus::Good);
}
//! @brief An identifier representing an OpenXR path. 0 represents no path.
//! A VRPath can be optained from the string representation of a path using VRTrackingManager::getTrackingPath()
//! \note Support is determined by each VRTrackingSource. ALL strings are convertible to VRPaths but won't be useful unless they match a supported string.
//! \sa VRTrackingManager::getTrackingPath()
using VRPath = uint64_t;
//! A single tracked pose
struct VRTrackingPose
{
TrackingStatus status = TrackingStatus::Unknown; //!< State of the prediction.
Pose pose = {}; //!< The predicted pose.
};
//! Source for tracking data. Converts paths to poses at predicted times.
//! \par The following paths are compulsory and must be supported by the implementation.
//! - /user/head/input/pose
//! - /user/hand/left/input/aim/pose
//! - /user/hand/right/input/aim/pose
//! - /user/hand/left/input/grip/pose (Not actually implemented yet)
//! - /user/hand/right/input/grip/pose (Not actually implemented yet)
//! \note A path being *supported* does not guarantee tracking data will be available at any given time (or ever).
//! \note Implementations may expand this list.
//! \sa OpenXRTracker VRGUITracking OpenXRTrackingToWorldBinding
class VRTrackingSource
{
public:
VRTrackingSource(const std::string& name);
virtual ~VRTrackingSource();
//! @brief Predicted pose of the given path at the predicted time
//!
//! \arg predictedDisplayTime[in] Time to predict. This is normally the predicted display time.
//! \arg path[in] path of the pose requested. Should match an available pose path.
//! \arg reference[in] path of the pose to use as reference. If 0, pose is referenced to the VR stage.
//!
//! \return A structure describing a pose and the tracking status.
virtual VRTrackingPose getTrackingPose(DisplayTime predictedDisplayTime, VRPath path, VRPath reference = 0) = 0;
//! List currently supported tracking paths.
virtual std::vector<VRPath> listSupportedTrackingPosePaths() const = 0;
//! Returns true if the available poses changed since the last frame, false otherwise.
bool availablePosesChanged() const;
void clearAvailablePosesChanged();
//! Call once per frame, after (or at the end of) OSG update traversals and before cull traversals.
//! Predict tracked poses for the given display time.
//! \arg predictedDisplayTime [in] the predicted display time. The pose shall be predicted for this time based on current tracking data.
virtual void updateTracking(DisplayTime predictedDisplayTime) = 0;
protected:
void notifyAvailablePosesChanged();
bool mAvailablePosesChanged = true;
};
//! Ties a tracking source to the game world.
//! A reference pose is selected by passing its path to the constructor.
//! All poses are transformed in the horizontal plane by moving the x,y origin to the position of the reference pose, and then reoriented using the current orientation of the player character.
//! The reference pose is effectively always at the x,y origin, and its movement is accumulated and can be read using the movement() call.
//! If this movement is ever consumed (such as by moving the character to follow the player) the consumed movement must be reported using consumeMovement().
class VRTrackingToWorldBinding : public VRTrackingSource
{
public:
VRTrackingToWorldBinding(const std::string& name, VRTrackingSource* source, VRPath reference);
void setWorldOrientation(float yaw, bool adjust);
osg::Quat getWorldOrientation() const { return mOrientation; }
void setEyeLevel(float eyeLevel) { mEyeLevel = eyeLevel; }
float getEyeLevel() const { return mEyeLevel; }
void setSeatedPlay(bool seatedPlay) { mSeatedPlay = seatedPlay; }
bool getSeatedPlay() const { return mSeatedPlay; }
//! The player's movement within the VR stage. This accumulates until the movement has been consumed by calling consumeMovement()
osg::Vec3 movement() const;
//! Consume movement
void consumeMovement(const osg::Vec3& movement);
//! Recenter tracking by consuming all movement.
void recenter(bool resetZ);
//! World origin is the point that ties the stage and the world. (0,0,0 in the world-aligned stage is this node).
//! If no node is set, the world-aligned stage and the world correspond 1-1.
void setOriginNode(osg::Node* origin) { mOrigin = origin; }
protected:
//! Fetches a pose from the source, and then aligns it with the game world if the reference is 0 (stage).
VRTrackingPose getTrackingPose(DisplayTime predictedDisplayTime, VRPath path, VRPath movementReference = 0) override;
//! List currently supported tracking paths.
std::vector<VRPath> listSupportedTrackingPosePaths() const override;
//! Call once per frame, after (or at the end of) OSG update traversals and before cull traversals.
//! Predict tracked poses for the given display time.
//! \arg predictedDisplayTime [in] the predicted display time. The pose shall be predicted for this time based on current tracking data.
void updateTracking(DisplayTime predictedDisplayTime) override;
private:
VRPath mMovementReference;
VRTrackingSource* mSource;
osg::Node* mOrigin = nullptr;
bool mSeatedPlay = false;
bool mHasTrackingData = false;
float mEyeLevel = 0;
Pose mOriginWorldPose = Pose();
Pose mLastPose = Pose();
osg::Vec3 mMovement = osg::Vec3(0, 0, 0);
osg::Quat mOrientation = osg::Quat(0, 0, 0, 1);
};
class VRTrackingListener
{
public:
virtual ~VRTrackingListener();
//! Notify that available tracking poses have changed.
virtual void onAvailablePosesChanged(VRTrackingSource& source) {};
//! Notify that a tracking source has been attached
virtual void onTrackingAttached(VRTrackingSource& source) {};
//! Notify that a tracking source has been detached.
virtual void onTrackingDetached() {};
//! Called every frame after tracking poses have been updated
virtual void onTrackingUpdated(VRTrackingSource& source, DisplayTime predictedDisplayTime) {};
private:
};
class VRTrackingManager
{
public:
VRTrackingManager();
~VRTrackingManager();
//! Angles to be used for overriding movement direction
//void movementAngles(float& yaw, float& pitch);
void updateTracking();
//! Bind listener to source, listener will receive tracking updates from source until unbound.
//! \note A single listener can only receive tracking updates from one source.
void bind(VRTrackingListener* listener, std::string source);
//! Unbind listener, listener will no longer receive tracking updates.
void unbind(VRTrackingListener* listener);
//! Converts a string representation of a path to a VRTrackerPath identifier
VRPath stringToVRPath(const std::string& path);
//! Converts a path identifier back to string. Returns an empty string if no such identifier exists.
std::string VRPathToString(VRPath path);
//! Get a tracking source by name
VRTrackingSource* getSource(const std::string& name);
//! Angles to be used for overriding movement direction
void movementAngles(float& yaw, float& pitch);
void processChangedSettings(const std::set< std::pair<std::string, std::string> >& changed);
private:
friend class VRTrackingSource;
void registerTrackingSource(VRTrackingSource* source, const std::string& name);
void unregisterTrackingSource(VRTrackingSource* source);
void notifySourceChanged(const std::string& name);
void updateMovementAngles(DisplayTime predictedDisplayTime);
std::map<std::string, VRTrackingSource*> mSources;
std::map<VRTrackingListener*, std::string> mBindings;
std::map<std::string, VRPath> mPathIdentifiers;
bool mHandDirectedMovement = 0.f;
VRPath mHeadPath = 0;
VRPath mHandPath = 0;
float mMovementYaw = 0.f;
float mMovementPitch = 0.f;
};
}
#endif

@ -38,6 +38,8 @@ namespace MWVR
RIGHT_SIDE = 1
};
using DisplayTime = int64_t;
////! Represents the relative pose in space of some limb or eye.
//struct Pose
//{

@ -1,138 +0,0 @@
//#include "vrview.hpp"
//
//#include "openxrmanager.hpp"
//#include "openxrmanagerimpl.hpp"
//#include "vrsession.hpp"
//#include "vrenvironment.hpp"
//
//#include <components/debug/debuglog.hpp>
//
//#include <osgViewer/Renderer>
//
//namespace MWVR {
//
// VRView::VRView(
// std::string name,
// SwapchainConfig config,
// osg::ref_ptr<osg::State> state)
// : mSwapchainConfig{ config }
// , mSwapchain(new OpenXRSwapchain(state, mSwapchainConfig))
// , mName(name)
// {
// }
//
// VRView::~VRView()
// {
// }
//
// class CullCallback : public osg::NodeCallback
// {
// void operator()(osg::Node* node, osg::NodeVisitor* nv)
// {
// const auto& name = node->getName();
// if (name == "LeftEye")
// Environment::get().getSession()->beginPhase(VRSession::FramePhase::Cull);
// traverse(node, nv);
// }
// };
//
// osg::Camera* VRView::createCamera(int order, const osg::Vec4& clearColor, osg::GraphicsContext* gc)
// {
// osg::ref_ptr<osg::Camera> camera = new osg::Camera();
// camera->setClearColor(clearColor);
// camera->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
// camera->setRenderOrder(osg::Camera::PRE_RENDER, order);
// camera->setComputeNearFarMode(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR);
// camera->setAllowEventFocus(false);
// camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF);
// camera->setViewport(0, 0, mSwapchain->width(), mSwapchain->height());
// camera->setGraphicsContext(gc);
//
// camera->setInitialDrawCallback(new VRView::InitialDrawCallback());
// camera->setCullCallback(new CullCallback);
//
// return camera.release();
// }
//
// void VRView::prerenderCallback(osg::RenderInfo& renderInfo)
// {
// if(Environment::get().getSession()->getFrame(VRSession::FramePhase::Draw)->mShouldRender)
// mSwapchain->beginFrame(renderInfo.getState()->getGraphicsContext());
// }
//
// void VRView::InitialDrawCallback::operator()(osg::RenderInfo& renderInfo) const
// {
// const auto& name = renderInfo.getCurrentCamera()->getName();
// if (name == "LeftEye")
// Environment::get().getSession()->beginPhase(VRSession::FramePhase::Draw);
//
// osg::GraphicsOperation* graphicsOperation = renderInfo.getCurrentCamera()->getRenderer();
// osgViewer::Renderer* renderer = dynamic_cast<osgViewer::Renderer*>(graphicsOperation);
// if (renderer != nullptr)
// {
// // Disable normal OSG FBO camera setup
// renderer->setCameraRequiresSetUp(false);
// }
// }
// void VRView::UpdateSlaveCallback::updateSlave(
// osg::View& view,
// osg::View::Slave& slave)
// {
// mView->updateSlave(view, slave);
// }
//
// void VRView::postrenderCallback(osg::RenderInfo& renderInfo)
// {
// auto name = renderInfo.getCurrentCamera()->getName();
// }
//
// void VRView::swapBuffers(osg::GraphicsContext* gc)
// {
// mSwapchain->endFrame(gc);
// }
// void VRView::updateSlave(osg::View& view, osg::View::Slave& slave)
// {
// auto* camera = slave._camera.get();
//
// // Update current cached cull mask of camera if it is active
// auto mask = camera->getCullMask();
// if (mask == 0)
// camera->setCullMask(mCullMask);
// else
// mCullMask = mask;
//
// // If the session is not active, we do not want to waste resources rendering frames.
// if (Environment::get().getSession()->getFrame(VRSession::FramePhase::Update)->mShouldRender)
// {
// Side side = Side::RIGHT_SIDE;
// if (mName == "LeftEye")
// {
//
// Environment::get().getViewer()->vrShadow().updateShadowConfig(view);
// side = Side::LEFT_SIDE;
// }
//
// auto* session = Environment::get().getSession();
// auto viewMatrix = view.getCamera()->getViewMatrix();
//
// // If the camera does not have a view, use the VR stage directly
// bool useStage = !(viewMatrix.getTrans().length() > 0.01);
//
// // If the view matrix is still the identity matrix, conventions have to be swapped around.
// bool swapConventions = viewMatrix.isIdentity();
//
// viewMatrix = viewMatrix * session->viewMatrix(VRSession::FramePhase::Update, side, !useStage, !swapConventions);
//
// camera->setViewMatrix(viewMatrix);
//
// auto projectionMatrix = session->projectionMatrix(VRSession::FramePhase::Update, side);
// camera->setProjectionMatrix(projectionMatrix);
// }
// else
// {
// camera->setCullMask(0);
// }
// slave.updateSlaveImplementation(view);
// }
//}

@ -1,65 +0,0 @@
//#ifndef MWVR_VRVIEW_H
//#define MWVR_VRVIEW_H
//
//#include <cassert>
//#include "openxrmanager.hpp"
//#include "openxrswapchain.hpp"
//
//struct XrSwapchainSubImage;
//
//namespace MWVR
//{
// class VRViewer;
//
// /// \brief Manipulates a slave camera by replacing its framebuffer with one destined for openxr
// class VRView : public osg::Referenced
// {
// public:
//
// class InitialDrawCallback : public osg::Camera::DrawCallback
// {
// public:
// virtual void operator()(osg::RenderInfo& renderInfo) const;
// };
//
// class UpdateSlaveCallback : public osg::View::Slave::UpdateSlaveCallback
// {
// public:
// UpdateSlaveCallback(osg::ref_ptr<VRView> view) : mView(view) {}
// void updateSlave(osg::View& view, osg::View::Slave& slave) override;
//
// private:
// osg::ref_ptr<VRView> mView;
// };
//
// public:
// VRView(std::string name, SwapchainConfig config, osg::ref_ptr<osg::State> state);
// virtual ~VRView();
//
// public:
// //! Prepare for render (set FBO)
// virtual void prerenderCallback(osg::RenderInfo& renderInfo);
//
// //! Finalize render
// virtual void postrenderCallback(osg::RenderInfo& renderInfo);
//
// //! Create camera for this view
// osg::Camera* createCamera(int order, const osg::Vec4& clearColor, osg::GraphicsContext* gc);
//
// //! Get the view surface
// OpenXRSwapchain& swapchain(void) { return *mSwapchain; }
//
// //! Present to the openxr swapchain
// void swapBuffers(osg::GraphicsContext* gc);
//
// void updateSlave(osg::View& view, osg::View::Slave& slave);
// public:
// SwapchainConfig mSwapchainConfig;
// std::unique_ptr<OpenXRSwapchain> mSwapchain;
// std::string mName{};
// osg::Node::NodeMask mCullMask;
// bool mRendering{ false };
// };
//}
//
//#endif

@ -5,7 +5,6 @@
#include "vrenvironment.hpp"
#include "vrsession.hpp"
#include "vrframebuffer.hpp"
#include "vrview.hpp"
#include "../mwrender/vismask.hpp"
@ -21,6 +20,7 @@
#include <components/misc/stringops.hpp>
#include <components/misc/stereo.hpp>
#include <components/misc/callbackmanager.hpp>
#include <components/misc/constants.hpp>
#include <components/sdlutil/sdlgraphicswindow.hpp>
@ -499,12 +499,17 @@ namespace MWVR
{
auto phase = VRSession::FramePhase::Update;
auto session = Environment::get().getSession();
auto& frame = session->getFrame(phase);
std::array<View, 2> views;
MWVR::Environment::get().getTrackingManager()->updateTracking();
auto& frame = session->getFrame(phase);
if (frame->mShouldRender)
{
left = frame->mPredictedPoses.view[static_cast<int>(Side::LEFT_SIDE)];
right = frame->mPredictedPoses.view[static_cast<int>(Side::RIGHT_SIDE)];
left = frame->mViews[(int)ReferenceSpace::VIEW][(int)Side::LEFT_SIDE];
left.pose.position *= Constants::UnitsPerMeter * session->playerScale();
right = frame->mViews[(int)ReferenceSpace::VIEW][(int)Side::RIGHT_SIDE];
right.pose.position *= Constants::UnitsPerMeter * session->playerScale();
}
}

@ -10,7 +10,6 @@
#include <osgViewer/Viewer>
#include "openxrmanager.hpp"
#include "vrshadow.hpp"
#include <components/sceneutil/positionattitudetransform.hpp>
#include <components/misc/stereo.hpp>

@ -4,6 +4,7 @@
#include "mwvr/vrsession.hpp"
#include "mwvr/vrviewer.hpp"
#include "mwvr/vrgui.hpp"
#include "mwvr/vrtracking.hpp"
#ifndef USE_OPENXR
#error "USE_OPENXR not defined"
@ -17,4 +18,5 @@ void OMW::Engine::initVr()
mXrEnvironment.setManager(new MWVR::OpenXRManager);
mXrEnvironment.setSession(new MWVR::VRSession());
mXrEnvironment.setViewer(new MWVR::VRViewer(mViewer));
mXrEnvironment.setTrackingManager(new MWVR::VRTrackingManager());
}

@ -465,9 +465,6 @@ namespace Misc
void StereoView::update()
{
auto viewMatrix = mViewer->getCamera()->getViewMatrix();
auto projectionMatrix = mViewer->getCamera()->getProjectionMatrix();
View left{};
View right{};
double near_ = 1.f;
@ -478,6 +475,8 @@ namespace Misc
return;
}
mUpdateViewCallback->updateView(left, right);
auto viewMatrix = mViewer->getCamera()->getViewMatrix();
auto projectionMatrix = mViewer->getCamera()->getProjectionMatrix();
near_ = Settings::Manager::getFloat("near clip", "Camera");
far_ = Settings::Manager::getFloat("viewing distance", "Camera");

@ -42,6 +42,7 @@ RigGeometry::RigGeometry()
{
setNumChildrenRequiringUpdateTraversal(1);
// update done in accept(NodeVisitor&)
setCullingActive(false);
}
RigGeometry::RigGeometry(const RigGeometry &copy, const osg::CopyOp &copyop)
@ -195,6 +196,10 @@ void RigGeometry::cull(osg::NodeVisitor* nv)
mSkeleton->updateBoneMatrices(traversalNumber);
// Tracking login in VR updates bone matrices out of order, and forces bounds to be recalculated during cull.
if (mSkeleton->isTracked())
updateBounds(nv);
// skinning
const osg::Vec3Array* positionSrc = static_cast<osg::Vec3Array*>(mSourceGeometry->getVertexArray());
const osg::Vec3Array* normalSrc = static_cast<osg::Vec3Array*>(mSourceGeometry->getNormalArray());

@ -38,6 +38,7 @@ Skeleton::Skeleton()
, mActive(Active)
, mLastFrameNumber(0)
, mLastCullFrameNumber(0)
, mTracked(false)
{
}
@ -49,6 +50,7 @@ Skeleton::Skeleton(const Skeleton &copy, const osg::CopyOp &copyop)
, mActive(copy.mActive)
, mLastFrameNumber(0)
, mLastCullFrameNumber(0)
, mTracked(false)
{
}
@ -105,7 +107,7 @@ Bone* Skeleton::getBone(const std::string &name)
return bone;
}
void Skeleton::updateBoneMatrices(unsigned int traversalNumber)
bool Skeleton::updateBoneMatrices(unsigned int traversalNumber)
{
if (traversalNumber != mLastFrameNumber)
mNeedToUpdateBoneMatrices = true;
@ -121,7 +123,9 @@ void Skeleton::updateBoneMatrices(unsigned int traversalNumber)
}
mNeedToUpdateBoneMatrices = false;
return true;
}
return false;
}
void Skeleton::setActive(ActiveType active)
@ -141,6 +145,11 @@ void Skeleton::markDirty()
mBoneCacheInit = false;
}
void Skeleton::markBoneMatriceDirty()
{
mNeedToUpdateBoneMatrices = true;
}
void Skeleton::traverse(osg::NodeVisitor& nv)
{
if (nv.getVisitorType() == osg::NodeVisitor::UPDATE_VISITOR)

@ -44,8 +44,8 @@ namespace SceneUtil
/// Retrieve a bone by name.
Bone* getBone(const std::string& name);
/// Request an update of bone matrices. May be a no-op if already updated in this frame.
void updateBoneMatrices(unsigned int traversalNumber);
/// Request an update of bone matrices. May be a no-op if already updated in this frame. Returns true if update was performed.
bool updateBoneMatrices(unsigned int traversalNumber);
enum ActiveType
{
@ -64,6 +64,11 @@ namespace SceneUtil
void markDirty();
void markBoneMatriceDirty();
void setIsTracked(bool tracked) { mTracked = tracked; }
bool isTracked() const { return mTracked; }
void childInserted(unsigned int) override;
void childRemoved(unsigned int, unsigned int) override;
@ -77,6 +82,7 @@ namespace SceneUtil
bool mBoneCacheInit;
bool mNeedToUpdateBoneMatrices;
bool mTracked;
ActiveType mActive;

Loading…
Cancel
Save