本文整理汇总了C++中MyAvatar::isPlaying方法的典型用法代码示例。如果您正苦于以下问题:C++ MyAvatar::isPlaying方法的具体用法?C++ MyAvatar::isPlaying怎么用?C++ MyAvatar::isPlaying使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类MyAvatar
的用法示例。
在下文中一共展示了MyAvatar::isPlaying方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: simulate
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setTranslation(_owningAvatar->getSkeletonPosition());
static const glm::quat refOrientation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
setRotation(_owningAvatar->getOrientation() * refOrientation);
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale());
setBlendshapeCoefficients(_owningAvatar->getHead()->getBlendshapeCoefficients());
Model::simulate(deltaTime, fullUpdate);
if (!isActive() || !_owningAvatar->isMyAvatar()) {
return; // only simulate for own avatar
}
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
if (myAvatar->isPlaying()) {
// Don't take inputs if playing back a recording.
return;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
// find the left and rightmost active palms
int leftPalmIndex, rightPalmIndex;
Hand* hand = _owningAvatar->getHand();
hand->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
const float HAND_RESTORATION_RATE = 0.25f;
if (leftPalmIndex == -1 || rightPalmIndex == -1) {
// palms are not yet set, use mouse
if (_owningAvatar->getHandState() == HAND_STATE_NULL) {
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else {
// transform into model-frame
glm::vec3 handPosition = glm::inverse(_rotation) * (_owningAvatar->getHandPosition() - _translation);
applyHandPosition(geometry.rightHandJointIndex, handPosition);
}
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else if (leftPalmIndex == rightPalmIndex) {
// right hand only
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[leftPalmIndex]);
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else {
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
}
if (_isFirstPerson) {
cauterizeHead();
updateClusterMatrices();
}
_boundingShape.setTranslation(_translation + _rotation * _boundingShapeLocalOffset);
_boundingShape.setRotation(_rotation);
}
示例2: simulate
void Head::simulate(float deltaTime, bool isMine, bool billboard) {
// Update audio trailing average for rendering facial animations
const float AUDIO_AVERAGING_SECS = 0.05f;
const float AUDIO_LONG_TERM_AVERAGING_SECS = 30.0f;
_averageLoudness = glm::mix(_averageLoudness, _audioLoudness, glm::min(deltaTime / AUDIO_AVERAGING_SECS, 1.0f));
if (_longTermAverageLoudness == -1.0f) {
_longTermAverageLoudness = _averageLoudness;
} else {
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
}
if (isMine) {
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
// Only use face trackers when not playing back a recording.
if (!myAvatar->isPlaying()) {
FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker();
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
if (_isFaceTrackerConnected) {
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
calculateMouthShapes();
const int JAW_OPEN_BLENDSHAPE = 21;
const int MMMM_BLENDSHAPE = 34;
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
_blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
_blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
_blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
_blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
_blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
}
applyEyelidOffset(getFinalOrientationInWorldFrame());
}
}
auto eyeTracker = DependencyManager::get<EyeTracker>();
_isEyeTrackerConnected = eyeTracker->isTracking();
}
if (!myAvatar->getStandingHMDSensorMode()) {
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
// since everyone else will see the full joint rotations for other people.
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
float currentTwist = getTorsoTwist();
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
}
}
if (!(_isFaceTrackerConnected || billboard)) {
if (!_isEyeTrackerConnected) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
} else {
_saccade = glm::vec3();
}
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f;
const float BLINK_AFTER_TALKING = 0.25f;
if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) {
_timeWithoutTalking = 0.0f;
} else if (_timeWithoutTalking < BLINK_AFTER_TALKING && (_timeWithoutTalking += deltaTime) >= BLINK_AFTER_TALKING) {
forceBlink = true;
}
// Update audio attack data for facial animation (eyebrows and mouth)
const float AUDIO_ATTACK_AVERAGING_RATE = 0.9f;
_audioAttack = AUDIO_ATTACK_AVERAGING_RATE * _audioAttack + (1.0f - AUDIO_ATTACK_AVERAGING_RATE) * fabs((_audioLoudness - _longTermAverageLoudness) - _lastLoudness);
_lastLoudness = (_audioLoudness - _longTermAverageLoudness);
const float BROW_LIFT_THRESHOLD = 100.0f;
if (_audioAttack > BROW_LIFT_THRESHOLD) {
_browAudioLift += sqrtf(_audioAttack) * 0.01f;
}
_browAudioLift = glm::clamp(_browAudioLift *= 0.7f, 0.0f, 1.0f);
const float BLINK_SPEED = 10.0f;
//.........这里部分代码省略.........
示例3: simulate
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setTranslation(_owningAvatar->getSkeletonPosition());
static const glm::quat refOrientation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
setRotation(_owningAvatar->getOrientation() * refOrientation);
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale());
setBlendshapeCoefficients(_owningAvatar->getHead()->getBlendshapeCoefficients());
Model::simulate(deltaTime, fullUpdate);
if (!isActive() || !_owningAvatar->isMyAvatar()) {
return; // only simulate for own avatar
}
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
if (myAvatar->isPlaying()) {
// Don't take inputs if playing back a recording.
return;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
PrioVR* prioVR = Application::getInstance()->getPrioVR();
if (prioVR->isActive()) {
for (int i = 0; i < prioVR->getJointRotations().size(); i++) {
int humanIKJointIndex = prioVR->getHumanIKJointIndices().at(i);
if (humanIKJointIndex == -1) {
continue;
}
int jointIndex = geometry.humanIKJointIndices.at(humanIKJointIndex);
if (jointIndex != -1) {
JointState& state = _jointStates[jointIndex];
state.setRotationInBindFrame(prioVR->getJointRotations().at(i), PALM_PRIORITY);
}
}
return;
}
// find the left and rightmost active palms
int leftPalmIndex, rightPalmIndex;
Hand* hand = _owningAvatar->getHand();
hand->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
const float HAND_RESTORATION_RATE = 0.25f;
if (leftPalmIndex == -1 || rightPalmIndex == -1) {
// palms are not yet set, use mouse
if (_owningAvatar->getHandState() == HAND_STATE_NULL) {
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else {
// transform into model-frame
glm::vec3 handPosition = glm::inverse(_rotation) * (_owningAvatar->getHandPosition() - _translation);
applyHandPosition(geometry.rightHandJointIndex, handPosition);
}
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else if (leftPalmIndex == rightPalmIndex) {
// right hand only
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[leftPalmIndex]);
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else {
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
}
_boundingShape.setTranslation(_translation + _rotation * _boundingShapeLocalOffset);
_boundingShape.setRotation(_rotation);
}