From 57b4b2829e8033d6cf3f7bd48c1fe511e00b830c Mon Sep 17 00:00:00 2001 From: Mikko Pulkki Date: Mon, 27 Apr 2020 13:26:49 +0300 Subject: Refactor TransformState to use internal 3d camera --- src/mbgl/map/transform_state.cpp | 126 ++++++++++++++++++++++++++++----------- src/mbgl/map/transform_state.hpp | 11 +++- 2 files changed, 100 insertions(+), 37 deletions(-) (limited to 'src/mbgl/map') diff --git a/src/mbgl/map/transform_state.cpp b/src/mbgl/map/transform_state.cpp index 0333f4860c..a61d02963a 100644 --- a/src/mbgl/map/transform_state.cpp +++ b/src/mbgl/map/transform_state.cpp @@ -9,6 +9,15 @@ #include namespace mbgl { + +namespace { +LatLng latLngFromMercator(Point mercatorCoordinate, LatLng::WrapMode wrapMode = LatLng::WrapMode::Unwrapped) { + return {util::RAD2DEG * (2 * std::atan(std::exp(M_PI - mercatorCoordinate.y * util::M2PI)) - M_PI_2), + mercatorCoordinate.x * 360.0 - 180.0, + wrapMode}; +} +} // namespace + TransformState::TransformState(ConstrainMode constrainMode_, ViewportMode viewportMode_) : bounds(LatLngBounds()), constrainMode(constrainMode_), viewportMode(viewportMode_) {} @@ -101,62 +110,52 @@ void TransformState::getProjMatrix(mat4& projMatrix, uint16_t nearZ, bool aligne // Add a bit extra to avoid precision problems when a fragment's distance is exactly `furthestDistance` const double farZ = furthestDistance * 1.01; - matrix::perspective(projMatrix, getFieldOfView(), double(size.width) / size.height, nearZ, farZ); + // Make sure the camera state is up-to-date + updateCameraState(); + + mat4 worldToCamera = camera.getWorldToCamera(scale, viewportMode == ViewportMode::FlippedY); + mat4 cameraToClip = + camera.getCameraToClipPerspective(getFieldOfView(), double(size.width) / size.height, nearZ, farZ); // Move the center of perspective to center of specified edgeInsets. // Values are in range [-1, 1] where the upper and lower range values // position viewport center to the screen edges. This is overriden // if using axonometric perspective (not in public API yet, Issue #11882). // TODO(astojilj): Issue #11882 should take edge insets into account, too. - projMatrix[8] = -offset.x * 2.0 / size.width; - projMatrix[9] = offset.y * 2.0 / size.height; - - const bool flippedY = viewportMode == ViewportMode::FlippedY; - matrix::scale(projMatrix, projMatrix, 1.0, flippedY ? 1 : -1, 1); + if (!axonometric) { + cameraToClip[8] = -offset.x * 2.0 / size.width; + cameraToClip[9] = offset.y * 2.0 / size.height; + } - matrix::translate(projMatrix, projMatrix, 0, 0, -cameraToCenterDistance); + // Apply north orientation angle + if (getNorthOrientation() != NorthOrientation::Upwards) { + matrix::rotate_z(cameraToClip, cameraToClip, -getNorthOrientationAngle()); + } - using NO = NorthOrientation; - switch (getNorthOrientation()) { - case NO::Rightwards: - matrix::rotate_y(projMatrix, projMatrix, getPitch()); - break; - case NO::Downwards: - matrix::rotate_x(projMatrix, projMatrix, -getPitch()); - break; - case NO::Leftwards: - matrix::rotate_y(projMatrix, projMatrix, -getPitch()); - break; - default: - matrix::rotate_x(projMatrix, projMatrix, getPitch()); - break; - } - - matrix::rotate_z(projMatrix, projMatrix, getBearing() + getNorthOrientationAngle()); - - const double dx = pixel_x() - size.width / 2.0f; - const double dy = pixel_y() - size.height / 2.0f; - matrix::translate(projMatrix, projMatrix, dx, dy, 0); + matrix::multiply(projMatrix, cameraToClip, worldToCamera); if (axonometric) { // mat[11] controls perspective - projMatrix[11] = 0; + projMatrix[11] = 0.0; // mat[8], mat[9] control x-skew, y-skew - projMatrix[8] = xSkew; - projMatrix[9] = ySkew; + double pixelsPerMeter = 1.0 / Projection::getMetersPerPixelAtLatitude(getLatLng().latitude(), getZoom()); + projMatrix[8] = xSkew * pixelsPerMeter; + projMatrix[9] = ySkew * pixelsPerMeter; } - matrix::scale(projMatrix, projMatrix, 1, 1, - 1.0 / Projection::getMetersPerPixelAtLatitude(getLatLng(LatLng::Unwrapped).latitude(), getZoom())); - // Make a second projection matrix that is aligned to a pixel grid for rendering raster tiles. // We're rounding the (floating point) x/y values to achieve to avoid rendering raster images to fractional // coordinates. Additionally, we adjust by half a pixel in either direction in case that viewport dimension // is an odd integer to preserve rendering to the pixel grid. We're rotating this shift based on the angle // of the transformation so that 0°, 90°, 180°, and 270° rasters are crisp, and adjust the shift so that // it is always <= 0.5 pixels. + if (aligned) { + const double worldSize = Projection::worldSize(scale); + const double dx = x - 0.5 * worldSize; + const double dy = y - 0.5 * worldSize; + const float xShift = float(size.width % 2) / 2; const float yShift = float(size.height % 2) / 2; const double bearingCos = std::cos(bearing); @@ -168,6 +167,65 @@ void TransformState::getProjMatrix(mat4& projMatrix, uint16_t nearZ, bool aligne } } +void TransformState::updateCameraState() const { + if (!valid()) { + return; + } + + const double worldSize = Projection::worldSize(scale); + const double cameraToCenterDistance = getCameraToCenterDistance(); + + // x & y tracks the center of the map in pixels. However as rendering is done in pixel coordinates the rendering + // origo is actually in the middle of the map (0.5 * worldSize). x&y positions have to be negated because it defines + // position of the map, not the camera. Moving map 10 units left has the same effect as moving camera 10 units to the + // right. + const double dx = 0.5 * worldSize - x; + const double dy = 0.5 * worldSize - y; + + // Set camera orientation and move it to a proper distance from the map + camera.setOrientation(getPitch(), getBearing()); + + const vec3 forward = camera.forward(); + const vec3 orbitPosition = {{-forward[0] * cameraToCenterDistance, + -forward[1] * cameraToCenterDistance, + -forward[2] * cameraToCenterDistance}}; + vec3 cameraPosition = {{dx + orbitPosition[0], dy + orbitPosition[1], orbitPosition[2]}}; + + cameraPosition[0] /= worldSize; + cameraPosition[1] /= worldSize; + cameraPosition[2] /= worldSize; + + camera.setPosition(cameraPosition); +} + +void TransformState::updateStateFromCamera() { + const vec3 position = camera.getPosition(); + const vec3 forward = camera.forward(); + + const double dx = forward[0]; + const double dy = forward[1]; + const double dz = forward[2]; + assert(position[2] > 0.0 && dz < 0.0); + + // Compute bearing and pitch + double newBearing; + double newPitch; + camera.getOrientation(newPitch, newBearing); + newPitch = util::clamp(newPitch, minPitch, maxPitch); + + // Compute zoom level from the camera altitude + const double centerDistance = getCameraToCenterDistance(); + const double zoom = util::log2(centerDistance / (position[2] / std::cos(newPitch) * util::tileSize)); + const double newScale = util::clamp(std::pow(2.0, zoom), min_scale, max_scale); + + // Compute center point of the map + const double travel = -position[2] / dz; + const Point mercatorPoint = {position[0] + dx * travel, position[1] + dy * travel}; + + setLatLngZoom(latLngFromMercator(mercatorPoint), scaleZoom(newScale)); + setBearing(newBearing); + setPitch(newPitch); +} void TransformState::updateMatricesIfNeeded() const { if (!needsMatricesUpdate() || size.isEmpty()) return; diff --git a/src/mbgl/map/transform_state.hpp b/src/mbgl/map/transform_state.hpp index 32d5ef772f..aade9be098 100644 --- a/src/mbgl/map/transform_state.hpp +++ b/src/mbgl/map/transform_state.hpp @@ -1,13 +1,14 @@ #pragma once -#include #include +#include +#include +#include #include #include -#include +#include #include #include -#include #include #include @@ -248,6 +249,9 @@ private: void updateMatricesIfNeeded() const; bool needsMatricesUpdate() const { return requestMatricesUpdate; } + void updateCameraState() const; + void updateStateFromCamera(); + const mat4& getCoordMatrix() const; const mat4& getInvertedMatrix() const; @@ -276,6 +280,7 @@ private: bool axonometric = false; EdgeInsets edgeInsets; + mutable util::Camera camera; // cache values for spherical mercator math double Bc = Projection::worldSize(scale) / util::DEGREES_MAX; -- cgit v1.2.1