summaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorDoyle <[email protected]>2020-03-24 23:23:34 +1100
committerGitHub <[email protected]>2020-03-24 13:23:34 +0100
commitda836a732c0ecbf1ec9254d0007dec854e79f1f3 (patch)
tree71c22f7548bbce05d47e5faafce0a2e91101b70c
parent6bab884d1de8b1aadd89e79c26d6294eb22eaece (diff)
downloadraylib-da836a732c0ecbf1ec9254d0007dec854e79f1f3.tar.gz
raylib-da836a732c0ecbf1ec9254d0007dec854e79f1f3.zip
Android: Better track touch input returned from IsMouse*() (#1144)
* Android: Better track touch input returned from IsMouse*() Switch to actually tracking touch input to use for "mouse" input rather than the gestures system. The gesture system as an abstraction ontop of raw touch input loses some information needed to map to "mouse" input. Before, - IsMouseButtonReleased() triggers immediately after the initial touch (because GESTURE_TAP activates immediately on touch) instead of waiting for the touch to be released. - IsMouseButtonUp() returns false, when it should just be the opposite of IsMouseButtonDown(). - IsMouseButtonDown() returns true only after GESTURE_HOLD (which activates after some period of time after GESTURE_TAP), when instead it should just be true whenever there is touch input i.e. gesture != GESTURE_NONE or alternatively when any input is received on the screen. After this PR, touches map closer to mouse input. - IsMouseButtonReleased() triggers when touch is released (last frame was touched, this frame not touched). - IsMouseButtonUp() returns the opposite of IsMouseButtonDown() - IsMouseButtonDown() is true when (AMOTION_EVENT_ACTION_DOWN || AMOTION_EVENT_ACTION_MOVE) and false when (AMOTION_EVENT_ACTION_UP) * RPI: Include index check for RPI in GetTouchPosition()
-rw-r--r--src/core.c47
1 files changed, 16 insertions, 31 deletions
diff --git a/src/core.c b/src/core.c
index 969aa0fa..44b7125d 100644
--- a/src/core.c
+++ b/src/core.c
@@ -2504,14 +2504,10 @@ bool IsMouseButtonPressed(int button)
{
bool pressed = false;
-#if defined(PLATFORM_ANDROID)
- if (IsGestureDetected(GESTURE_TAP)) pressed = true;
-#else
if ((CORE.Input.Mouse.currentButtonState[button] == 1) && (CORE.Input.Mouse.previousButtonState[button] == 0)) pressed = true;
// Map touches to mouse buttons checking
if ((CORE.Input.Touch.currentTouchState[button] == 1) && (CORE.Input.Touch.previousTouchState[button] == 0)) pressed = true;
-#endif
return pressed;
}
@@ -2521,14 +2517,10 @@ bool IsMouseButtonDown(int button)
{
bool down = false;
-#if defined(PLATFORM_ANDROID)
- if (IsGestureDetected(GESTURE_HOLD)) down = true;
-#else
if (CORE.Input.Mouse.currentButtonState[button] == 1) down = true;
// Map touches to mouse buttons checking
if (CORE.Input.Touch.currentTouchState[button] == 1) down = true;
-#endif
return down;
}
@@ -2538,16 +2530,10 @@ bool IsMouseButtonReleased(int button)
{
bool released = false;
-#if defined(PLATFORM_ANDROID)
- #if defined(SUPPORT_GESTURES_SYSTEM)
- released = GetGestureDetected() == GESTURE_TAP;
- #endif
-#else
if ((CORE.Input.Mouse.currentButtonState[button] == 0) && (CORE.Input.Mouse.previousButtonState[button] == 1)) released = true;
// Map touches to mouse buttons checking
if ((CORE.Input.Touch.currentTouchState[button] == 0) && (CORE.Input.Touch.previousTouchState[button] == 1)) released = true;
-#endif
return released;
}
@@ -2555,16 +2541,7 @@ bool IsMouseButtonReleased(int button)
// Detect if a mouse button is NOT being pressed
bool IsMouseButtonUp(int button)
{
- bool up = false;
-
-#if !defined(PLATFORM_ANDROID)
- if (CORE.Input.Mouse.currentButtonState[button] == 0) up = true;
-
- // Map touches to mouse buttons checking
- if (CORE.Input.Touch.currentTouchState[button] == 0) up = true;
-#endif
-
- return up;
+ return !IsMouseButtonDown(button);
}
// Returns mouse position X
@@ -2671,7 +2648,7 @@ Vector2 GetTouchPosition(int index)
{
Vector2 position = { -1.0f, -1.0f };
-#if defined(PLATFORM_ANDROID) || defined(PLATFORM_WEB)
+#if defined(PLATFORM_ANDROID) || defined(PLATFORM_WEB) || defined(PLATFORM_RPI)
if (index < MAX_TOUCH_POINTS) position = CORE.Input.Touch.position[index];
else TRACELOG(LOG_WARNING, "Required touch point out of range (Max touch points: %i)", MAX_TOUCH_POINTS);
@@ -2687,11 +2664,7 @@ Vector2 GetTouchPosition(int index)
position.y = position.y*((float)CORE.Window.render.height/(float)CORE.Window.display.height) - CORE.Window.renderOffset.y/2;
}
#endif
-#endif
-#if defined(PLATFORM_RPI)
- position = CORE.Input.Touch.position[index];
-#endif
-#if defined(PLATFORM_DESKTOP)
+#elif defined(PLATFORM_DESKTOP)
// TODO: GLFW is not supporting multi-touch input just yet
// https://www.codeproject.com/Articles/668404/Programming-for-Multi-Touch
// https://docs.microsoft.com/en-us/windows/win32/wintouch/getting-started-with-multi-touch-messages
@@ -3763,7 +3736,9 @@ static void PollInputEvents(void)
// Register previous mouse wheel state
CORE.Input.Mouse.previousWheelMove = CORE.Input.Mouse.currentWheelMove;
CORE.Input.Mouse.currentWheelMove = 0;
+#endif
+#if defined(PLATFORM_DESKTOP) || defined(PLATFORM_WEB) || defined(PLATFORM_ANDROID) || defined(PLATFORM_RPI)
// Register previous touch states
for (int i = 0; i < MAX_TOUCH_POINTS; i++) CORE.Input.Touch.previousTouchState[i] = CORE.Input.Touch.currentTouchState[i];
#endif
@@ -4315,10 +4290,20 @@ static int32_t AndroidInputCallback(struct android_app *app, AInputEvent *event)
CORE.Input.Touch.position[0].x = AMotionEvent_getX(event, 0);
CORE.Input.Touch.position[0].y = AMotionEvent_getY(event, 0);
-#if defined(SUPPORT_GESTURES_SYSTEM)
int32_t action = AMotionEvent_getAction(event);
unsigned int flags = action & AMOTION_EVENT_ACTION_MASK;
+ if (flags == AMOTION_EVENT_ACTION_DOWN || flags == AMOTION_EVENT_ACTION_MOVE)
+ {
+ CORE.Input.Touch.currentTouchState[MOUSE_LEFT_BUTTON] = 1;
+ }
+ else if (flags == AMOTION_EVENT_ACTION_UP)
+ {
+ CORE.Input.Touch.currentTouchState[MOUSE_LEFT_BUTTON] = 0;
+ }
+
+#if defined(SUPPORT_GESTURES_SYSTEM)
+
GestureEvent gestureEvent;
// Register touch actions