本文整理汇总了C++中AMotionEvent_getAction函数的典型用法代码示例。如果您正苦于以下问题:C++ AMotionEvent_getAction函数的具体用法?C++ AMotionEvent_getAction怎么用?C++ AMotionEvent_getAction使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了AMotionEvent_getAction函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: AMotionEvent_getPointerCount
int32_t Engine::onMotionEvent(android_app* app, AInputEvent* event) {
if (!this->loaded) return 0;
if (!this->focused) return 0;
size_t pointerCount = AMotionEvent_getPointerCount(event);
this->updateUptime();
for (size_t i = 0; i < pointerCount; i++) {
size_t pointerId = AMotionEvent_getPointerId(event, i);
size_t action = AMotionEvent_getAction(event) & AMOTION_EVENT_ACTION_MASK;
size_t pointerIndex = i;
if (action == AMOTION_EVENT_ACTION_POINTER_DOWN || action == AMOTION_EVENT_ACTION_POINTER_UP) {
pointerIndex = (AMotionEvent_getAction(event) & AMOTION_EVENT_ACTION_POINTER_INDEX_MASK) >> AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT;
pointerId = AMotionEvent_getPointerId(event, pointerIndex);
}
this->touchEventParamCache[0] = pointerId;
this->touchEventParamCache[1] = action;
this->touchEventParamCache[2] = AMotionEvent_getX(event, pointerIndex);
this->touchEventParamCache[3] = AMotionEvent_getY(event, pointerIndex);
this->touchEventParamCache[4] = this->uptime.time;
this->touchEventParamCache[5] = this->uptime.millitm;
this->touchEventParamCache[6] = AInputEvent_getDeviceId(event);
this->touchEventParamCache[7] = AInputEvent_getSource(event);
if (callSqFunction_Bool_Floats(this->sqvm, EMO_NAMESPACE, EMO_FUNC_MOTIONEVENT, this->touchEventParamCache, MOTION_EVENT_PARAMS_SIZE, false)) {
return 1;
}
}
示例2: engine_handle_input
/**
* Process the next input event.
*/
static int32_t engine_handle_input(struct android_app* app, AInputEvent* event)
{
struct engine* engine = (struct engine*)app->userData;
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION)
{
int x = AMotionEvent_getX(event, 0);
int y = AMotionEvent_getY(event, 0);
if ((AMOTION_EVENT_ACTION_MASK & AMotionEvent_getAction( event )) == AMOTION_EVENT_ACTION_DOWN)
{
nuiAndroidBridge::androidMouse(0, 0, x, y);
}
else if ((AMOTION_EVENT_ACTION_MASK & AMotionEvent_getAction( event )) == AMOTION_EVENT_ACTION_UP)
{
nuiAndroidBridge::androidMouse(0, 1, x, y);
}
else if ((AMOTION_EVENT_ACTION_MASK & AMotionEvent_getAction( event )) == AMOTION_EVENT_ACTION_MOVE)
{
nuiAndroidBridge::androidMotion(x, y);
}
engine->animating = 1;
engine->state.x = AMotionEvent_getX(event, 0);
engine->state.y = AMotionEvent_getY(event, 0);
return 1;
}
return 0;
}
示例3: engine_handle_input
/**
* Process the next input event.
*/
static int32_t engine_handle_input(struct android_app* app, AInputEvent* event) {
struct engine* engine = (struct engine*)app->userData;
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) {
if (AMotionEvent_getAction(event) == AMOTION_EVENT_ACTION_DOWN
|| AMotionEvent_getAction(event) == AMOTION_EVENT_ACTION_MOVE) {
input_manager_touch_down(AMotionEvent_getRawX(event, 0),
AMotionEvent_getRawY(event, 0));
return 1;
}
}
input_manager_touch_up();
return 0;
}
示例4: packt_Log_debug
bool InputService::onTouchEvent(AInputEvent* pEvent) {
#ifdef INPUTSERVICE_LOG_EVENTS
packt_Log_debug("AMotionEvent_getAction=%d", AMotionEvent_getAction(pEvent));
packt_Log_debug("AMotionEvent_getFlags=%d", AMotionEvent_getFlags(pEvent));
packt_Log_debug("AMotionEvent_getMetaState=%d", AMotionEvent_getMetaState(pEvent));
packt_Log_debug("AMotionEvent_getEdgeFlags=%d", AMotionEvent_getEdgeFlags(pEvent));
packt_Log_debug("AMotionEvent_getDownTime=%lld", AMotionEvent_getDownTime(pEvent));
packt_Log_debug("AMotionEvent_getEventTime=%lld", AMotionEvent_getEventTime(pEvent));
packt_Log_debug("AMotionEvent_getXOffset=%f", AMotionEvent_getXOffset(pEvent));
packt_Log_debug("AMotionEvent_getYOffset=%f", AMotionEvent_getYOffset(pEvent));
packt_Log_debug("AMotionEvent_getXPrecision=%f", AMotionEvent_getXPrecision(pEvent));
packt_Log_debug("AMotionEvent_getYPrecision=%f", AMotionEvent_getYPrecision(pEvent));
packt_Log_debug("AMotionEvent_getPointerCount=%d", AMotionEvent_getPointerCount(pEvent));
packt_Log_debug("AMotionEvent_getRawX=%f", AMotionEvent_getRawX(pEvent, 0));
packt_Log_debug("AMotionEvent_getRawY=%f", AMotionEvent_getRawY(pEvent, 0));
packt_Log_debug("AMotionEvent_getX=%f", AMotionEvent_getX(pEvent, 0));
packt_Log_debug("AMotionEvent_getY=%f", AMotionEvent_getY(pEvent, 0));
packt_Log_debug("AMotionEvent_getPressure=%f", AMotionEvent_getPressure(pEvent, 0));
packt_Log_debug("AMotionEvent_getSize=%f", AMotionEvent_getSize(pEvent, 0));
packt_Log_debug("AMotionEvent_getOrientation=%f", AMotionEvent_getOrientation(pEvent, 0));
packt_Log_debug("AMotionEvent_getTouchMajor=%f", AMotionEvent_getTouchMajor(pEvent, 0));
packt_Log_debug("AMotionEvent_getTouchMinor=%f", AMotionEvent_getTouchMinor(pEvent, 0));
#endif
const float TOUCH_MAX_RANGE = 65.0f; // In pixels.
if (mRefPoint != NULL) {
if (AMotionEvent_getAction(pEvent)
== AMOTION_EVENT_ACTION_MOVE) {
// Needs a conversion to proper coordinates
// (origin at bottom/left). Only lMoveY needs it.
float lMoveX = AMotionEvent_getX(pEvent, 0)
- mRefPoint->mPosX;
float lMoveY = mHeight - AMotionEvent_getY(pEvent, 0)
- mRefPoint->mPosY;
float lMoveRange = sqrt((lMoveX * lMoveX)
+ (lMoveY * lMoveY));
if (lMoveRange > TOUCH_MAX_RANGE) {
float lCropFactor = TOUCH_MAX_RANGE / lMoveRange;
lMoveX *= lCropFactor; lMoveY *= lCropFactor;
}
mHorizontal = lMoveX / TOUCH_MAX_RANGE;
mVertical = lMoveY / TOUCH_MAX_RANGE;
} else {
mHorizontal = 0.0f; mVertical = 0.0f;
}
}
return true;
}
示例5: handle_input_events
int32_t handle_input_events(struct android_app* app, AInputEvent* event) {
int etype = AInputEvent_getType(event);
switch (etype) {
case AINPUT_EVENT_TYPE_KEY:
int32_t eaction, eflags, ekeycode, escancode;
eaction = AKeyEvent_getAction(event);
eflags = AKeyEvent_getFlags(event);
ekeycode = AKeyEvent_getKeyCode(event);
// LOGI(2, "%s", get_key_event_str(eaction, eflags, ekeycode));
break;
case AINPUT_EVENT_TYPE_MOTION:
int32_t action, posX, pointer_index;
action = AMotionEvent_getAction(event);
pointer_index = (action&AMOTION_EVENT_ACTION_POINTER_INDEX_MASK) >> AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT;
posX = AMotionEvent_getX(event, pointer_index);
//write a command to the activity lifecycle event queue (pipe)
if (action == AMOTION_EVENT_ACTION_MOVE) {
int xMove = posX - mPreviousX;
USERDATA* userData = (USERDATA*)app->userData;
userData->xMove = xMove;
app->redrawNeeded = 1;
}
mPreviousX = posX;
// LOGI(2, "action: %d, posX: %d, mPreviousX: %d, posX: %d", action, posX, mPreviousX, posX);
break;
default:
LOGI(2, "other input event");
break;
}
}
示例6: AMotionEvent_getAction
int32_t AndroidAppHelper::handleInput (struct android_app* app, AInputEvent* event)
{
if (mInputInjector)
{
if (AInputEvent_getType (event) == AINPUT_EVENT_TYPE_MOTION)
{
int action = (int) (AMOTION_EVENT_ACTION_MASK & AMotionEvent_getAction (event));
if (action == 0)
{
mInputInjector->injectTouchEvent (2, AMotionEvent_getRawX (event, 0),
AMotionEvent_getRawY (event, 0));
}
mInputInjector->injectTouchEvent (action, AMotionEvent_getRawX (event, 0),
AMotionEvent_getRawY (event, 0));
}
else
{
mInputInjector->injectKeyEvent (AKeyEvent_getAction (event), AKeyEvent_getKeyCode (event));
}
return 1;
}
return 0;
}
示例7: engine_handle_input
/**
* Process the next input event.
*/
static int32_t engine_handle_input(struct android_app* app,
AInputEvent* event) {
struct engine* engine = (struct engine*)app->userData;
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) {
unsigned int flags =
AMotionEvent_getAction(event) & AMOTION_EVENT_ACTION_MASK;
// gpg-cpp: Sign in or out on tap
if (flags == AMOTION_EVENT_ACTION_UP ||
flags == AMOTION_EVENT_ACTION_POINTER_UP) {
LOGI("Motion");
if (!StateManager::GetGameServices()->IsAuthorized()) {
LOGI("Signing in");
StateManager::BeginUserInitiatedSignIn();
} else {
LOGI("Signing out");
StateManager::SignOut();
}
}
// Make things pretty
engine->animating = 1;
engine->state.x = AMotionEvent_getX(event, 0);
engine->state.y = AMotionEvent_getY(event, 0);
return 1;
}
return 0;
}
示例8: AMotionEvent_getAction
bool CAndroidMouse::onMouseEvent(AInputEvent* event)
{
if (event == NULL)
return false;
int32_t eventAction = AMotionEvent_getAction(event);
int8_t mouseAction = eventAction & AMOTION_EVENT_ACTION_MASK;
size_t mousePointer = eventAction >> AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT;
CXBMCApp::android_printf("%s pointer:%i", __PRETTY_FUNCTION__, mousePointer);
float x = AMotionEvent_getX(event, mousePointer);
float y = AMotionEvent_getY(event, mousePointer);
switch (mouseAction)
{
case AMOTION_EVENT_ACTION_UP:
case AMOTION_EVENT_ACTION_DOWN:
MouseButton(x,y,mouseAction);
return true;
default:
MouseMove(x,y);
return true;
}
return false;
}
示例9: handleInput
static int32_t handleInput(struct android_app* app, AInputEvent* event)
{
if (okit.getInput())
{
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION)
{
int action = (int)(AMOTION_EVENT_ACTION_MASK & AMotionEvent_getAction(event));
if (action == 0)
okit.injectTouch(2, AMotionEvent_getRawX(event, 0), AMotionEvent_getRawY(event, 0));
okit.injectTouch(action, AMotionEvent_getRawX(event, 0), AMotionEvent_getRawY(event, 0));
}
else
{
int action = AKeyEvent_getAction(event);
int unicodeChar = 0;
okit.injectKey(action, unicodeChar, AKeyEvent_getKeyCode(event));
//mInputInjector->injectKeyEvent(AKeyEvent_getAction(event), AKeyEvent_getKeyCode(event));
}
return 1;
}
return 0;
}
示例10: engine_handle_input
/**
* Process the next input event.
*/
static int32_t engine_handle_input(struct android_app* app, AInputEvent* event)
{
struct engine* engine = (struct engine*)app->userData;
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION)
{
int32_t ret = 0;
int32_t action = AMotionEvent_getAction(event);
if (action == AMOTION_EVENT_ACTION_DOWN)
{
engine->touchIsDown = true;
ret = 1;
}
else if (action == AMOTION_EVENT_ACTION_UP)
{
engine->touchIsDown = false;
ret = 1;
}
if (ret)
{
engine->touchX = static_cast<float>(AMotionEvent_getRawX(event, 0)) / engine->width;
engine->touchY = static_cast<float>(AMotionEvent_getRawY(event, 0)) / engine->height;
}
return ret;
}
return 0;
}
示例11: AMotionEvent_getAction
bool CAndroidMouse::onMouseEvent(AInputEvent* event)
{
if (event == NULL)
return false;
int32_t eventAction = AMotionEvent_getAction(event);
int8_t mouseAction = eventAction & AMOTION_EVENT_ACTION_MASK;
size_t mousePointerIdx = eventAction >> AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT;
int32_t mousePointerId = AMotionEvent_getPointerId(event, mousePointerIdx);
#ifdef DEBUG_VERBOSE
CXBMCApp::android_printf("%s idx:%i, id:%i", __PRETTY_FUNCTION__, mousePointerIdx, mousePointerId);
#endif
CPoint in(AMotionEvent_getX(event, mousePointerIdx), AMotionEvent_getY(event, mousePointerIdx));
CPoint out = in * m_droid2guiRatio;
switch (mouseAction)
{
case AMOTION_EVENT_ACTION_UP:
case AMOTION_EVENT_ACTION_DOWN:
MouseButton(out.x, out.y, mouseAction, AMotionEvent_getButtonState(event));
return true;
case AMOTION_EVENT_ACTION_SCROLL:
MouseWheel(out.x, out.y, AMotionEvent_getAxisValue(event, AMOTION_EVENT_AXIS_VSCROLL, mousePointerIdx));
return true;
default:
MouseMove(out.x, out.y);
return true;
}
return false;
}
示例12: AInputEvent_getType
int32_t GLESApplication::handleInput(android_app *app, AInputEvent *event)
{
int32_t eventType = AInputEvent_getType(event);
if (eventType == AINPUT_EVENT_TYPE_MOTION) {
int32_t action = AMotionEvent_getAction(event);
switch(action) {
case AMOTION_EVENT_ACTION_DOWN:
onTouchDown(AMotionEvent_getX(event, 0), AMotionEvent_getY(event, 0));
break;
case AMOTION_EVENT_ACTION_MOVE:
onTouchMove(AMotionEvent_getX(event, 0), AMotionEvent_getY(event, 0));
break;
case AMOTION_EVENT_ACTION_UP:
onTouchUp(AMotionEvent_getX(event, 0), AMotionEvent_getY(event, 0));
break;
default:
break;
}
}
return 0;
}
示例13: AMotionEvent_getAction
//--------------------------------------------------------------------------------
// DoubletapDetector
//--------------------------------------------------------------------------------
GESTURE_STATE DoubletapDetector::Detect(const AInputEvent* motion_event) {
if (AMotionEvent_getPointerCount(motion_event) > 1) {
// Only support single double tap
return false;
}
bool tap_detected = tap_detector_.Detect(motion_event);
int32_t action = AMotionEvent_getAction(motion_event);
unsigned int flags = action & AMOTION_EVENT_ACTION_MASK;
switch (flags) {
case AMOTION_EVENT_ACTION_DOWN: {
int64_t eventTime = AMotionEvent_getEventTime(motion_event);
if (eventTime - last_tap_time_ <= DOUBLE_TAP_TIMEOUT) {
float x = AMotionEvent_getX(motion_event, 0) - last_tap_x_;
float y = AMotionEvent_getY(motion_event, 0) - last_tap_y_;
if (x * x + y * y < DOUBLE_TAP_SLOP * DOUBLE_TAP_SLOP * dp_factor_) {
LOGI("DoubletapDetector: Doubletap detected");
return GESTURE_STATE_ACTION;
}
}
break;
}
case AMOTION_EVENT_ACTION_UP:
if (tap_detected) {
last_tap_time_ = AMotionEvent_getEventTime(motion_event);
last_tap_x_ = AMotionEvent_getX(motion_event, 0);
last_tap_y_ = AMotionEvent_getY(motion_event, 0);
}
break;
}
return GESTURE_STATE_NONE;
}
示例14: AInputEvent_getSource
void WindowImplAndroid::processPointerEvent(bool isDown, AInputEvent* _event, ActivityStates* states)
{
int32_t device = AInputEvent_getSource(_event);
int32_t action = AMotionEvent_getAction(_event);
int index = (action & AMOTION_EVENT_ACTION_POINTER_INDEX_MASK) >> AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT;
int id = AMotionEvent_getPointerId(_event, index);
float x = AMotionEvent_getX(_event, index);
float y = AMotionEvent_getY(_event, index);
Event event;
if (isDown)
{
if (device == AINPUT_SOURCE_MOUSE)
{
event.type = Event::MouseButtonPressed;
event.mouseButton.button = static_cast<Mouse::Button>(id);
event.mouseButton.x = x;
event.mouseButton.y = y;
if (id >= 0 && id < Mouse::ButtonCount)
states->isButtonPressed[id] = true;
}
else if (device == AINPUT_SOURCE_TOUCHSCREEN)
{
event.type = Event::TouchBegan;
event.touch.finger = id;
event.touch.x = x;
event.touch.y = y;
states->touchEvents[id] = Vector2i(event.touch.x, event.touch.y);
}
}
else
{
if (device == AINPUT_SOURCE_MOUSE)
{
event.type = Event::MouseButtonReleased;
event.mouseButton.button = static_cast<Mouse::Button>(id);
event.mouseButton.x = x;
event.mouseButton.y = y;
if (id >= 0 && id < Mouse::ButtonCount)
states->isButtonPressed[id] = false;
}
else if (device == AINPUT_SOURCE_TOUCHSCREEN)
{
event.type = Event::TouchEnded;
event.touch.finger = id;
event.touch.x = x;
event.touch.y = y;
states->touchEvents.erase(id);
}
}
forwardEvent(event);
}
示例15: handle_motion_event
/**
* Process the next input event.
*/
static int32_t handle_motion_event(struct android_app* app, AInputEvent* event)
{
struct engine* engine = (struct engine*)app->userData;
int i, c = AMotionEvent_getPointerCount(event);
for( i = 0; i<c; i++) {
int32_t action = AMotionEvent_getAction(event);
uint32_t id = action >> AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT;
uint32_t index = AMotionEvent_getPointerId(event, i);
float x = AMotionEvent_getX(event, i);
float y = AMotionEvent_getY(event, i);
action = action & AMOTION_EVENT_ACTION_MASK;
if (action == AMOTION_EVENT_ACTION_DOWN) {
/* Workaround, these are needed in order to
dispatch GK_ON_MOUSE_DOWN. The problem is that
the library assumes that the 'mouse' is moved over
a panel, before it's pressed.
In case of thouchscreen the 'press' event happens before
the 'move' which results in wrong/invalid gkMouseTarget.
The mouse down event is dispatched ON the gkMouseTarget.
*/
onWindowMouseMove(x,y);
gkUpdateMouseTarget(gkMainPanel);
gkCheckFocusedPanel();
onWindowMouseDown(x,y,index);
}else if (action == AMOTION_EVENT_ACTION_MOVE) {
onWindowMouseMove(x,y);
}else if (action == AMOTION_EVENT_ACTION_UP) {
onWindowMouseUp(x,y,index);
}
__android_log_print(ANDROID_LOG_INFO, "GK", "action: %d index: %d id: %d i: %d", action, index, id, i);
}
return 1;
}