List of usage examples for java.lang Math atan2
@HotSpotIntrinsicCandidate public static double atan2(double y, double x)
From source file:org.esa.snap.engine_utilities.eo.GeoUtils.java
/** * Convert Cartesian to Polar coordinates. * <p>//w ww . j a v a 2 s.c om * <b>Definitions:</b> * <ul> * <li>Latitude: angle from XY-plane towards +Z-axis.</li> * <li>Longitude: angle in XY-plane measured from +X-axis towards +Y-axis.</li> * </ul> * <p> * Implementation Details: Unlike for rest of utility methods GeoPos class container is not used for storing polar * coordinates. GeoPos fields are declared as floats and can introduced numerical errors, especially in radius/height. * * <p> * Note: Apache's FastMath used in implementation. * * * @param xyz Array of x, y, and z coordinates. * @param phiLamHeight Array of latitude (in radians), longitude (in radians), and radius (in meters). * @author Petar Marikovic, PPO.labs */ public static void cartesian2polar(final double[] xyz, final double[] phiLamHeight) { phiLamHeight[2] = Math.sqrt(xyz[0] * xyz[0] + xyz[1] * xyz[1] + xyz[2] * xyz[2]); phiLamHeight[1] = Math.atan2(xyz[1], xyz[0]); phiLamHeight[0] = FastMath.asin(xyz[2] / phiLamHeight[2]); }
From source file:org.openhab.binding.mqttitude.internal.MqttitudeConsumer.java
private double calculateDistance(Location location1, Location location2) { float lat1 = location1.getLatitude(); float lng1 = location1.getLongitude(); float lat2 = location2.getLatitude(); float lng2 = location2.getLongitude(); double dLat = Math.toRadians(lat2 - lat1); double dLng = Math.toRadians(lng2 - lng1); double a = Math.sin(dLat / 2) * Math.sin(dLat / 2) + Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) * Math.sin(dLng / 2) * Math.sin(dLng / 2); double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); double earthRadiusKm = 6369; double distKm = earthRadiusKm * c; // return the distance in meters return distKm * 1000; }
From source file:org.esa.nest.eo.GeoUtils.java
/** * Convert Cartesian to Polar coordinates. * <p>/* w w w. java2 s.c o m*/ * <b>Definitions:<b/> * <p>Latitude: angle from XY-plane towards +Z-axis.<p/> * <p>Longitude: angle in XY-plane measured from +X-axis towards +Y-axis.<p/> * </p> * <p> * Implementation Details: Unlike for rest of utility methods GeoPos class container is not used for storing polar * coordinates. GeoPos fields are declared as floats and can introduced numerical errors, especially in radius/height. * </p> * <p> * Note: Apache's FastMath used in implementation. * </p> * @param xyz Array of x, y, and z coordinates. * @param phiLamHeight Array of latitude (in radians), longitude (in radians), and radius (in meters). * * @author Petar Marikovic, PPO.labs */ public static void cartesian2polar(final double[] xyz, final double[] phiLamHeight) { phiLamHeight[2] = Math.sqrt(xyz[0] * xyz[0] + xyz[1] * xyz[1] + xyz[2] * xyz[2]); phiLamHeight[1] = Math.atan2(xyz[1], xyz[0]); phiLamHeight[0] = Math.asin(xyz[2] / phiLamHeight[2]); }
From source file:org.orekit.tle.DeepSDP4.java
/** Computes luni - solar terms from initial coordinates and epoch. * @exception OrekitException when UTC time steps can't be read *//*from w w w .j a va2s.c om*/ protected void luniSolarTermsComputation() throws OrekitException { final double sing = Math.sin(tle.getPerigeeArgument()); final double cosg = Math.cos(tle.getPerigeeArgument()); final double sinq = Math.sin(tle.getRaan()); final double cosq = Math.cos(tle.getRaan()); final double aqnv = 1.0 / a0dp; // Compute julian days since 1900 final double daysSince1900 = (tle.getDate().durationFrom(AbsoluteDate.JULIAN_EPOCH) + tle.getDate().timeScalesOffset(TimeScalesFactory.getUTC(), TimeScalesFactory.getTT())) / Constants.JULIAN_DAY - 2415020; double cc = C1SS; double ze = ZES; double zn = ZNS; double zsinh = sinq; double zcosh = cosq; thgr = thetaG(tle.getDate()); xnq = xn0dp; omegaq = tle.getPerigeeArgument(); final double xnodce = 4.5236020 - 9.2422029e-4 * daysSince1900; final double stem = Math.sin(xnodce); final double ctem = Math.cos(xnodce); final double c_minus_gam = 0.228027132 * daysSince1900 - 1.1151842; final double gam = 5.8351514 + 0.0019443680 * daysSince1900; zcosil = 0.91375164 - 0.03568096 * ctem; zsinil = Math.sqrt(1.0 - zcosil * zcosil); zsinhl = 0.089683511 * stem / zsinil; zcoshl = Math.sqrt(1.0 - zsinhl * zsinhl); zmol = MathUtils.normalizeAngle(c_minus_gam, Math.PI); double zx = 0.39785416 * stem / zsinil; final double zy = zcoshl * ctem + 0.91744867 * zsinhl * stem; zx = Math.atan2(zx, zy) + gam - xnodce; zcosgl = Math.cos(zx); zsingl = Math.sin(zx); zmos = MathUtils.normalizeAngle(6.2565837 + 0.017201977 * daysSince1900, Math.PI); // Do solar terms savtsn = 1e20; double zcosi = 0.91744867; double zsini = 0.39785416; double zsing = -0.98088458; double zcosg = 0.1945905; double se = 0; double sgh = 0; double sh = 0; double si = 0; double sl = 0; // There was previously some convoluted logic here, but it boils // down to this: we compute the solar terms, then the lunar terms. // On a second pass, we recompute the solar terms, taking advantage // of the improved data that resulted from computing lunar terms. for (int iteration = 0; iteration < 2; ++iteration) { final double a1 = zcosg * zcosh + zsing * zcosi * zsinh; final double a3 = -zsing * zcosh + zcosg * zcosi * zsinh; final double a7 = -zcosg * zsinh + zsing * zcosi * zcosh; final double a8 = zsing * zsini; final double a9 = zsing * zsinh + zcosg * zcosi * zcosh; final double a10 = zcosg * zsini; final double a2 = cosi0 * a7 + sini0 * a8; final double a4 = cosi0 * a9 + sini0 * a10; final double a5 = -sini0 * a7 + cosi0 * a8; final double a6 = -sini0 * a9 + cosi0 * a10; final double x1 = a1 * cosg + a2 * sing; final double x2 = a3 * cosg + a4 * sing; final double x3 = -a1 * sing + a2 * cosg; final double x4 = -a3 * sing + a4 * cosg; final double x5 = a5 * sing; final double x6 = a6 * sing; final double x7 = a5 * cosg; final double x8 = a6 * cosg; final double z31 = 12 * x1 * x1 - 3 * x3 * x3; final double z32 = 24 * x1 * x2 - 6 * x3 * x4; final double z33 = 12 * x2 * x2 - 3 * x4 * x4; final double z11 = -6 * a1 * a5 + e0sq * (-24 * x1 * x7 - 6 * x3 * x5); final double z12 = -6 * (a1 * a6 + a3 * a5) + e0sq * (-24 * (x2 * x7 + x1 * x8) - 6 * (x3 * x6 + x4 * x5)); final double z13 = -6 * a3 * a6 + e0sq * (-24 * x2 * x8 - 6 * x4 * x6); final double z21 = 6 * a2 * a5 + e0sq * (24 * x1 * x5 - 6 * x3 * x7); final double z22 = 6 * (a4 * a5 + a2 * a6) + e0sq * (24 * (x2 * x5 + x1 * x6) - 6 * (x4 * x7 + x3 * x8)); final double z23 = 6 * a4 * a6 + e0sq * (24 * x2 * x6 - 6 * x4 * x8); final double s3 = cc / xnq; final double s2 = -0.5 * s3 / beta0; final double s4 = s3 * beta0; final double s1 = -15 * tle.getE() * s4; final double s5 = x1 * x3 + x2 * x4; final double s6 = x2 * x3 + x1 * x4; final double s7 = x2 * x4 - x1 * x3; double z1 = 3 * (a1 * a1 + a2 * a2) + z31 * e0sq; double z2 = 6 * (a1 * a3 + a2 * a4) + z32 * e0sq; double z3 = 3 * (a3 * a3 + a4 * a4) + z33 * e0sq; z1 = z1 + z1 + beta02 * z31; z2 = z2 + z2 + beta02 * z32; z3 = z3 + z3 + beta02 * z33; se = s1 * zn * s5; si = s2 * zn * (z11 + z13); sl = -zn * s3 * (z1 + z3 - 14 - 6 * e0sq); sgh = s4 * zn * (z31 + z33 - 6); if (tle.getI() < (Math.PI / 60.0)) { // inclination smaller than 3 degrees sh = 0; } else { sh = -zn * s2 * (z21 + z23); } ee2 = 2 * s1 * s6; e3 = 2 * s1 * s7; xi2 = 2 * s2 * z12; xi3 = 2 * s2 * (z13 - z11); xl2 = -2 * s3 * z2; xl3 = -2 * s3 * (z3 - z1); xl4 = -2 * s3 * (-21 - 9 * e0sq) * ze; xgh2 = 2 * s4 * z32; xgh3 = 2 * s4 * (z33 - z31); xgh4 = -18 * s4 * ze; xh2 = -2 * s2 * z22; xh3 = -2 * s2 * (z23 - z21); if (iteration == 0) { // we compute lunar terms only on the first pass: sse = se; ssi = si; ssl = sl; ssh = sh / sini0; ssg = sgh - cosi0 * ssh; se2 = ee2; si2 = xi2; sl2 = xl2; sgh2 = xgh2; sh2 = xh2; se3 = e3; si3 = xi3; sl3 = xl3; sgh3 = xgh3; sh3 = xh3; sl4 = xl4; sgh4 = xgh4; zcosg = zcosgl; zsing = zsingl; zcosi = zcosil; zsini = zsinil; zcosh = zcoshl * cosq + zsinhl * sinq; zsinh = sinq * zcoshl - cosq * zsinhl; zn = ZNL; cc = C1L; ze = ZEL; } } // end of solar - lunar - solar terms computation sse += se; ssi += si; ssl += sl; ssg += sgh - cosi0 / sini0 * sh; ssh += sh / sini0; // Start the resonant-synchronous tests and initialization double bfact = 0; // if mean motion is 1.893053 to 2.117652 revs/day, and eccentricity >= 0.5, // start of the 12-hour orbit, e > 0.5 section if ((xnq >= 0.00826) && (xnq <= 0.00924) && (tle.getE() >= 0.5)) { final double g201 = -0.306 - (tle.getE() - 0.64) * 0.440; final double eoc = tle.getE() * e0sq; final double sini2 = sini0 * sini0; final double f220 = 0.75 * (1 + 2 * cosi0 + theta2); final double f221 = 1.5 * sini2; final double f321 = 1.875 * sini0 * (1 - 2 * cosi0 - 3 * theta2); final double f322 = -1.875 * sini0 * (1 + 2 * cosi0 - 3 * theta2); final double f441 = 35 * sini2 * f220; final double f442 = 39.3750 * sini2 * sini2; final double f522 = 9.84375 * sini0 * (sini2 * (1 - 2 * cosi0 - 5 * theta2) + 0.33333333 * (-2 + 4 * cosi0 + 6 * theta2)); final double f523 = sini0 * (4.92187512 * sini2 * (-2 - 4 * cosi0 + 10 * theta2) + 6.56250012 * (1 + 2 * cosi0 - 3 * theta2)); final double f542 = 29.53125 * sini0 * (2 - 8 * cosi0 + theta2 * (-12 + 8 * cosi0 + 10 * theta2)); final double f543 = 29.53125 * sini0 * (-2 - 8 * cosi0 + theta2 * (12 + 8 * cosi0 - 10 * theta2)); double g211; double g310; double g322; double g410; double g422; double g520; resonant = true; // it is resonant... synchronous = false; // but it's not synchronous // Geopotential resonance initialization for 12 hour orbits : if (tle.getE() <= 0.65) { g211 = 3.616 - 13.247 * tle.getE() + 16.290 * e0sq; g310 = -19.302 + 117.390 * tle.getE() - 228.419 * e0sq + 156.591 * eoc; g322 = -18.9068 + 109.7927 * tle.getE() - 214.6334 * e0sq + 146.5816 * eoc; g410 = -41.122 + 242.694 * tle.getE() - 471.094 * e0sq + 313.953 * eoc; g422 = -146.407 + 841.880 * tle.getE() - 1629.014 * e0sq + 1083.435 * eoc; g520 = -532.114 + 3017.977 * tle.getE() - 5740.032 * e0sq + 3708.276 * eoc; } else { g211 = -72.099 + 331.819 * tle.getE() - 508.738 * e0sq + 266.724 * eoc; g310 = -346.844 + 1582.851 * tle.getE() - 2415.925 * e0sq + 1246.113 * eoc; g322 = -342.585 + 1554.908 * tle.getE() - 2366.899 * e0sq + 1215.972 * eoc; g410 = -1052.797 + 4758.686 * tle.getE() - 7193.992 * e0sq + 3651.957 * eoc; g422 = -3581.69 + 16178.11 * tle.getE() - 24462.77 * e0sq + 12422.52 * eoc; if (tle.getE() <= 0.715) { g520 = 1464.74 - 4664.75 * tle.getE() + 3763.64 * e0sq; } else { g520 = -5149.66 + 29936.92 * tle.getE() - 54087.36 * e0sq + 31324.56 * eoc; } } double g533; double g521; double g532; if (tle.getE() < 0.7) { g533 = -919.2277 + 4988.61 * tle.getE() - 9064.77 * e0sq + 5542.21 * eoc; g521 = -822.71072 + 4568.6173 * tle.getE() - 8491.4146 * e0sq + 5337.524 * eoc; g532 = -853.666 + 4690.25 * tle.getE() - 8624.77 * e0sq + 5341.4 * eoc; } else { g533 = -37995.78 + 161616.52 * tle.getE() - 229838.2 * e0sq + 109377.94 * eoc; g521 = -51752.104 + 218913.95 * tle.getE() - 309468.16 * e0sq + 146349.42 * eoc; g532 = -40023.88 + 170470.89 * tle.getE() - 242699.48 * e0sq + 115605.82 * eoc; } double temp1 = 3 * xnq * xnq * aqnv * aqnv; double temp = temp1 * ROOT22; d2201 = temp * f220 * g201; d2211 = temp * f221 * g211; temp1 *= aqnv; temp = temp1 * ROOT32; d3210 = temp * f321 * g310; d3222 = temp * f322 * g322; temp1 *= aqnv; temp = 2 * temp1 * ROOT44; d4410 = temp * f441 * g410; d4422 = temp * f442 * g422; temp1 *= aqnv; temp = temp1 * ROOT52; d5220 = temp * f522 * g520; d5232 = temp * f523 * g532; temp = 2 * temp1 * ROOT54; d5421 = temp * f542 * g521; d5433 = temp * f543 * g533; xlamo = tle.getMeanAnomaly() + tle.getRaan() + tle.getRaan() - thgr - thgr; bfact = xmdot + xnodot + xnodot - THDT - THDT; bfact += ssl + ssh + ssh; } else if ((xnq < 0.0052359877) && (xnq > 0.0034906585)) { // if mean motion is .8 to 1.2 revs/day : (geosynch) final double cosio_plus_1 = 1.0 + cosi0; final double g200 = 1 + e0sq * (-2.5 + 0.8125 * e0sq); final double g300 = 1 + e0sq * (-6 + 6.60937 * e0sq); final double f311 = 0.9375 * sini0 * sini0 * (1 + 3 * cosi0) - 0.75 * cosio_plus_1; final double g310 = 1 + 2 * e0sq; final double f220 = 0.75 * cosio_plus_1 * cosio_plus_1; final double f330 = 2.5 * f220 * cosio_plus_1; resonant = true; synchronous = true; // Synchronous resonance terms initialization del1 = 3 * xnq * xnq * aqnv * aqnv; del2 = 2 * del1 * f220 * g200 * Q22; del3 = 3 * del1 * f330 * g300 * Q33 * aqnv; del1 = del1 * f311 * g310 * Q31 * aqnv; xlamo = tle.getMeanAnomaly() + tle.getRaan() + tle.getPerigeeArgument() - thgr; bfact = xmdot + omgdot + xnodot - THDT; bfact = bfact + ssl + ssg + ssh; } else { // it's neither a high-e 12-hours orbit nor a geosynchronous: resonant = false; synchronous = false; } if (resonant) { xfact = bfact - xnq; // Initialize integrator xli = xlamo; xni = xnq; atime = 0; } derivs = new double[SECULAR_INTEGRATION_ORDER]; }
From source file:com.nextgis.maplibui.fragment.CompassFragment.java
public boolean onTouch(View v, MotionEvent event) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: mDownX = event.getX();/*from ww w. ja va 2s.c o m*/ mDownY = event.getY(); return true; case MotionEvent.ACTION_MOVE: float upX = event.getX(); float upY = event.getY(); double downR = Math.atan2(v.getHeight() / 2 - mDownY, mDownX - v.getWidth() / 2); int angle1 = (int) Math.toDegrees(downR); double upR = Math.atan2(v.getHeight() / 2 - upY, upX - v.getWidth() / 2); int angle2 = (int) Math.toDegrees(upR); this.rotateCompass(angle1 - angle2); if (mIsVibrationOn) { mVibrator.vibrate(5); } // update starting point for next move event mDownX = upX; mDownY = upY; return true; } return false; }
From source file:com.evgenyvyaz.cinaytaren.activity.FaceTrackerActivity.java
public static double distance(double lat1, double lon1, double lat2, double lon2) { double dLat = (double) Math.toRadians(lat2 - lat1); double dLon = (double) Math.toRadians(lon2 - lon1); double a = (double) (Math.sin(dLat / 2) * Math.sin(dLat / 2) + Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) * Math.sin(dLon / 2) * Math.sin(dLon / 2)); double c = (double) (2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a))); double d = 6371 * c; return Math.round(d * 1000); }
From source file:fr.certu.chouette.validation.checkpoint.AbstractValidation.java
/** * @see http://mathforum.org/library/drmath/view/51879.html */// w ww . j a v a 2 s . c om private double computeHaversineFormula(NeptuneLocalizedObject obj1, NeptuneLocalizedObject obj2) { double lon1 = Math.toRadians(obj1.getLongitude().doubleValue()); double lat1 = Math.toRadians(obj1.getLatitude().doubleValue()); double lon2 = Math.toRadians(obj2.getLongitude().doubleValue()); double lat2 = Math.toRadians(obj2.getLatitude().doubleValue()); final double R = 6371008.8; double dlon = lon2 - lon1; double dlat = lat2 - lat1; double a = Math.pow((Math.sin(dlat / 2)), 2) + Math.cos(lat1) * Math.cos(lat2) * Math.pow(Math.sin(dlon / 2), 2); double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); double d = R * c; return d; }
From source file:whitebox.stats.PolynomialLeastSquares2DFitting.java
public void calculateEquations() { try {//from ww w. j a va 2 s. c om int m, i, j, k; int n = xCoords2.length; // How many coefficients are there? numCoefficients = 0; for (j = 0; j <= polyOrder; j++) { for (k = 0; k <= (polyOrder - j); k++) { numCoefficients++; } } // for (i = 0; i < n; i++) { // xCoords1[i] -= xMin1; // yCoords1[i] -= yMin1; // xCoords2[i] -= xMin2; // yCoords2[i] -= yMin2; // } // Solve the forward transformation equations double[][] forwardCoefficientMatrix = new double[n][numCoefficients]; for (i = 0; i < n; i++) { m = 0; for (j = 0; j <= polyOrder; j++) { for (k = 0; k <= (polyOrder - j); k++) { forwardCoefficientMatrix[i][m] = Math.pow(xCoords1[i], j) * Math.pow(yCoords1[i], k); m++; } } } RealMatrix coefficients = new Array2DRowRealMatrix(forwardCoefficientMatrix, false); DecompositionSolver solver = new SingularValueDecomposition(coefficients).getSolver(); //DecompositionSolver solver = new QRDecomposition(coefficients).getSolver(); // do the x-coordinate first RealVector constants = new ArrayRealVector(xCoords2, false); RealVector solution = solver.solve(constants); forwardRegressCoeffX = new double[n]; for (int a = 0; a < numCoefficients; a++) { forwardRegressCoeffX[a] = solution.getEntry(a); } double[] residualsX = new double[n]; double SSresidX = 0; for (i = 0; i < n; i++) { double yHat = 0.0; for (j = 0; j < numCoefficients; j++) { yHat += forwardCoefficientMatrix[i][j] * forwardRegressCoeffX[j]; } residualsX[i] = xCoords2[i] - yHat; SSresidX += residualsX[i] * residualsX[i]; } double sumX = 0; double SSx = 0; for (i = 0; i < n; i++) { SSx += xCoords2[i] * xCoords2[i]; sumX += xCoords2[i]; } double varianceX = (SSx - (sumX * sumX) / n) / n; double SStotalX = (n - 1) * varianceX; double rsqX = 1 - SSresidX / SStotalX; // now the y-coordinate constants = new ArrayRealVector(yCoords2, false); solution = solver.solve(constants); forwardRegressCoeffY = new double[numCoefficients]; for (int a = 0; a < numCoefficients; a++) { forwardRegressCoeffY[a] = solution.getEntry(a); } double[] residualsY = new double[n]; residualsXY = new double[n]; residualsOrientation = new double[n]; double SSresidY = 0; for (i = 0; i < n; i++) { double yHat = 0.0; for (j = 0; j < numCoefficients; j++) { yHat += forwardCoefficientMatrix[i][j] * forwardRegressCoeffY[j]; } residualsY[i] = yCoords2[i] - yHat; SSresidY += residualsY[i] * residualsY[i]; residualsXY[i] = Math.sqrt(residualsX[i] * residualsX[i] + residualsY[i] * residualsY[i]); residualsOrientation[i] = Math.atan2(residualsY[i], residualsX[i]); } double sumY = 0; double sumR = 0; double SSy = 0; double SSr = 0; for (i = 0; i < n; i++) { SSy += yCoords2[i] * yCoords2[i]; SSr += residualsXY[i] * residualsXY[i]; sumY += yCoords2[i]; sumR += residualsXY[i]; } double varianceY = (SSy - (sumY * sumY) / n) / n; double varianceResiduals = (SSr - (sumR * sumR) / n) / n; double SStotalY = (n - 1) * varianceY; double rsqY = 1 - SSresidY / SStotalY; overallRMSE = Math.sqrt(varianceResiduals); //System.out.println("y-coordinate r-square: " + rsqY); // // Print the residuals. // System.out.println("\nResiduals:"); // for (i = 0; i < n; i++) { // System.out.println("Point " + (i + 1) + "\t" + residualsX[i] // + "\t" + residualsY[i] + "\t" + residualsXY[i]); // } // Solve the backward transformation equations double[][] backCoefficientMatrix = new double[n][numCoefficients]; for (i = 0; i < n; i++) { m = 0; for (j = 0; j <= polyOrder; j++) { for (k = 0; k <= (polyOrder - j); k++) { backCoefficientMatrix[i][m] = Math.pow(xCoords2[i], j) * Math.pow(yCoords2[i], k); m++; } } } coefficients = new Array2DRowRealMatrix(backCoefficientMatrix, false); //DecompositionSolver solver = new SingularValueDecomposition(coefficients).getSolver(); solver = new QRDecomposition(coefficients).getSolver(); // do the x-coordinate first constants = new ArrayRealVector(xCoords1, false); solution = solver.solve(constants); backRegressCoeffX = new double[numCoefficients]; for (int a = 0; a < numCoefficients; a++) { backRegressCoeffX[a] = solution.getEntry(a); } // now the y-coordinate constants = new ArrayRealVector(yCoords1, false); solution = solver.solve(constants); backRegressCoeffY = new double[n]; for (int a = 0; a < numCoefficients; a++) { backRegressCoeffY[a] = solution.getEntry(a); } } catch (Exception e) { e.printStackTrace(); // showFeedback("Error in ImageRectificationDialog.calculateEquations: " // + e.getMessage()); } }
From source file:arlocros.ArMarkerPoseEstimator.java
private void start(final ConnectedNode connectedNode) { // load OpenCV shared library System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // read configuration variables from the ROS Runtime (configured in the // launch file) log = connectedNode.getLog();/*from w w w .j a v a 2 s . c o m*/ // Read Marker Config markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory()); // setup rotation vector and translation vector storing output of the // localization rvec = new Mat(3, 1, CvType.CV_64F); tvec = new MatOfDouble(1.0, 1.0, 1.0); camp = getCameraInfo(connectedNode, parameter); // start to listen to transform messages in /tf in order to feed the // Transformer and lookup transforms final TransformationService transformationService = TransformationService.create(connectedNode); // Subscribe to Image Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(), sensor_msgs.Image._TYPE); ComputePose computePose = null; try { final Mat cameraMatrix = CameraParams.getCameraMatrix(camp); final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp); computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix, distCoeffs, this.parameter.visualization()); } catch (NyARException e) { logger.info("Cannot initialize ComputePose", e); } catch (FileNotFoundException e) { logger.info("Cannot find file when initialize ComputePose", e); } final ComputePose poseProcessor = computePose; subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() { @Override public void onNewMessage(sensor_msgs.Image message) { // if (!message.getEncoding().toLowerCase().equals("rgb8")) { log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING"); System.exit(-1); } if (camp != null) { try { // image = Utils.matFromImage(message); // uncomment to add more contrast to the image if (parameter.blackWhiteContrastLevel() > 0) { log.trace("using BlackWhiteContrastLevel"); Utils.tresholdContrastBlackWhite(image, parameter.blackWhiteContrastLevel(), parameter.invertBlackWhiteColor()); } if (parameter.useThreshold()) { Imgproc.threshold(image, image, 200, 255, Imgproc.THRESH_BINARY); } // Mat cannyimg = new Mat(image.height(), image.width(), // CvType.CV_8UC3); // Imgproc.Canny(image, cannyimg, 10, 100); // Imshow.show(cannyimg); // image.convertTo(image, -1, 1.5, 0); // setup camera matrix and return vectors // compute pose if (poseProcessor.computePose(rvec, tvec, image)) { // notify publisher threads (pose and tf, see below) synchronized (tvec) { tvec.notifyAll(); } } } catch (Exception e) { logger.info("An exception occurs.", e); } } } }); // publish tf CAMERA_FRAME_NAME --> MARKER_FRAME_NAME final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf", tf2_msgs.TFMessage._TYPE); connectedNode.executeCancellableLoop(new CancellableLoop() { @Override protected void loop() throws InterruptedException { synchronized (tvec) { tvec.wait(); } QuaternionHelper q = new QuaternionHelper(); /* * http://euclideanspace.com/maths/geometry/rotations/ * conversions/matrixToEuler/index.htm * http://stackoverflow.com/questions/12933284/rodrigues-into- * eulerangles-and-vice-versa * * heading = atan2(-m20,m00) attitude = asin(m10) bank = * atan2(-m12,m11) */ // convert output rotation vector rvec to rotation matrix R Mat R = new Mat(3, 3, CvType.CV_32FC1); Calib3d.Rodrigues(rvec, R); // get rotations around X,Y,Z from rotation matrix R double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]); double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]); double attitudeZ = Math.asin(R.get(1, 0)[0]); // convert Euler angles to quarternion q.setFromEuler(bankX, headingY, attitudeZ); // set information to message TFMessage tfmessage = tfPublisherCamToMarker.newMessage(); TransformStamped posestamped = connectedNode.getTopicMessageFactory() .newFromType(geometry_msgs.TransformStamped._TYPE); Transform transform = posestamped.getTransform(); Quaternion orientation = transform.getRotation(); Vector3 point = transform.getTranslation(); point.setX(tvec.get(0, 0)[0]); point.setY(tvec.get(1, 0)[0]); point.setZ(tvec.get(2, 0)[0]); orientation.setW(q.getW()); orientation.setX(q.getX()); orientation.setY(q.getY()); orientation.setZ(q.getZ()); posestamped.getHeader().setFrameId(parameter.cameraFrameName()); posestamped.setChildFrameId(parameter.markerFrameName()); posestamped.getHeader().setStamp(connectedNode.getCurrentTime()); // frame_id too tfmessage.getTransforms().add(posestamped); tfPublisherCamToMarker.publish(tfmessage); } }); // publish Markers final Publisher<visualization_msgs.Marker> markerPublisher = connectedNode.newPublisher("markers", visualization_msgs.Marker._TYPE); connectedNode.executeCancellableLoop(new CancellableLoop() { @Override protected void loop() throws InterruptedException { // publish markers every 500ms Thread.sleep(500); // get marker points from markerConfig, each marker has 4 // vertices List<Point3> points3dlist = markerConfig.getUnordered3DPointList(); int i = 0; for (Point3 p : points3dlist) { Marker markermessage = markerPublisher.newMessage(); // FIXME If the markers are published into an existing frame // (e.g. map or odom) the node will consume very high CPU // and will fail after a short time. The markers are // probably published in the wrong way. markermessage.getHeader().setFrameId(parameter.markerFrameName()); markermessage.setId(i); i++; markermessage.setType(visualization_msgs.Marker.SPHERE); markermessage.setAction(visualization_msgs.Marker.ADD); // position double x = p.x; markermessage.getPose().getPosition().setX(x); double y = p.y; markermessage.getPose().getPosition().setY(y); double z = p.z; markermessage.getPose().getPosition().setZ(z); // orientation markermessage.getPose().getOrientation().setX(0); markermessage.getPose().getOrientation().setY(0); markermessage.getPose().getOrientation().setZ(0); markermessage.getPose().getOrientation().setW(1); // patterntSize markermessage.getScale().setX(0.1); markermessage.getScale().setY(0.1); markermessage.getScale().setZ(0.1); // color markermessage.getColor().setA(1); markermessage.getColor().setR(1); markermessage.getColor().setG(0); markermessage.getColor().setB(0); markerPublisher.publish(markermessage); } } }); // publish tf map --> odom final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf", tf2_msgs.TFMessage._TYPE); connectedNode.executeCancellableLoop(new CancellableLoop() { @Override protected void loop() throws InterruptedException { // since this is an infinite loop, wait to be notified if new // image was processed synchronized (tvec) { tvec.wait(); } // compute transform map to odom from map to // camera_rgb_optical_frame and odom to camera_rgb_optical_frame // map to camera_rgb_optical_frame Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0); QuaternionHelper q = new QuaternionHelper(); // get rotation matrix R from solvepnp output rotation vector // rvec Mat R = new Mat(3, 3, CvType.CV_32FC1); Calib3d.Rodrigues(rvec, R); // transpose R, because we need the transformation from // world(map) to camera R = R.t(); // get rotation around X,Y,Z from R in radiants double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]); double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]); double attitudeZ = Math.asin(R.get(1, 0)[0]); q.setFromEuler(bankX, headingY, attitudeZ); // compute translation vector from world (map) to cam // tvec_map_cam Core.multiply(R, new Scalar(-1), R); // R=-R Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(), q.getY(), q.getZ(), q.getW()); double x = tvec_map_cam.get(0, 0)[0]; double y = tvec_map_cam.get(1, 0)[0]; double z = tvec_map_cam.get(2, 0)[0]; // create a Transform Object that hold the transform map to cam org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z); org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform( translation, rotation); // odom to camera_rgb_optical_frame GraphName sourceFrame = GraphName.of(parameter.cameraFrameName()); GraphName targetFrame = GraphName.of("odom"); org.ros.rosjava_geometry.Transform transform_cam_odom = null; if (transformationService.canTransform(targetFrame, sourceFrame)) { try { transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame); } catch (Exception e) { log.error(ExceptionUtils.getStackTrace(e)); log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! " + "However, " + "will continue.."); return; } } else { log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! " + "However, will " + "continue.."); // cancel this loop..no result can be computed return; } // multiply results org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity(); result = result.multiply(transform_map_cam); result = result.multiply(transform_cam_odom); // set information to ROS message TFMessage tfMessage = tfPublisherMapToOdom.newMessage(); TransformStamped transformStamped = connectedNode.getTopicMessageFactory() .newFromType(geometry_msgs.TransformStamped._TYPE); Transform transform = transformStamped.getTransform(); Quaternion orientation = transform.getRotation(); Vector3 vector = transform.getTranslation(); vector.setX(result.getTranslation().getX()); vector.setY(result.getTranslation().getY()); vector.setZ(result.getTranslation().getZ()); orientation.setW(result.getRotationAndScale().getW()); orientation.setX(result.getRotationAndScale().getX()); orientation.setY(result.getRotationAndScale().getY()); orientation.setZ(result.getRotationAndScale().getZ()); transformStamped.getHeader().setFrameId("map"); transformStamped.setChildFrameId("odom"); transformStamped.getHeader().setStamp(connectedNode.getCurrentTime()); // frame_id too tfMessage.getTransforms().add(transformStamped); tfPublisherMapToOdom.publish(tfMessage); // System.exit(0); } }); // Publish Pose connectedNode.executeCancellableLoop(new CancellableLoop() { @Override protected void loop() throws InterruptedException { // since this is an infinite loop, wait here to be notified if // new image was processed synchronized (tvec) { tvec.wait(); } final QuaternionHelper q = new QuaternionHelper(); // convert rotation vector result of solvepnp to rotation matrix Mat R = new Mat(3, 3, CvType.CV_32FC1); Calib3d.Rodrigues(rvec, R); // see publishers before for documentation final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0); R = R.t(); final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]); final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]); final double attitudeZ = Math.asin(R.get(1, 0)[0]); q.setFromEuler(bankX, headingY, attitudeZ); Core.multiply(R, new Scalar(-1), R); Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion( q.getX(), q.getY(), q.getZ(), q.getW()); final double x = tvec_map_cam.get(0, 0)[0]; final double y = tvec_map_cam.get(1, 0)[0]; final double z = tvec_map_cam.get(2, 0)[0]; final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z); final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform( translation, rotation); // odom to camera_rgb_optical_frame final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName()); final GraphName targetFrame = GraphName.of("base_link"); org.ros.rosjava_geometry.Transform transform_cam_base = null; if (transformationService.canTransform(targetFrame, sourceFrame)) { try { transform_cam_base = transformationService.lookupTransform(targetFrame, sourceFrame); } catch (Exception e) { log.error(ExceptionUtils.getStackTrace(e)); log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "base_link! " + "However, will continue.."); // cancel this loop..no result can be computed return; } } else { log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "base_link!" + " However, " + "will continue.."); // cancel this loop..no result can be computed return; } // multiply results org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform.identity(); current_pose = current_pose.multiply(transform_map_cam); current_pose = current_pose.multiply(transform_cam_base); // check for plausibility of the pose by checking if movement // exceeds max speed (defined) of the robot if (parameter.badPoseReject()) { Time current_timestamp = connectedNode.getCurrentTime(); // TODO Unfortunately, we do not have the tf timestamp at // hand here. So we can only use the current timestamp. double maxspeed = 5; boolean goodpose = false; // if (current_pose != null && current_timestamp != null) { if (last_pose != null && last_timestamp != null) { // check speed of movement between last and current pose double distance = PoseCompare.distance(current_pose, last_pose); double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp); if ((distance / timedelta) < maxspeed) { if (smoothing) { double xold = last_pose.getTranslation().getX(); double yold = last_pose.getTranslation().getY(); double zold = last_pose.getTranslation().getZ(); double xnew = current_pose.getTranslation().getX(); double ynew = current_pose.getTranslation().getY(); double znew = current_pose.getTranslation().getZ(); final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3( (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3, (zold * 2 + znew) / 3); current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation, current_pose.getRotationAndScale()); last_pose = current_pose; } last_pose = current_pose; last_timestamp = current_timestamp; goodpose = true; } else { log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected"); } } else { last_pose = current_pose; last_timestamp = current_timestamp; } // } // bad pose rejection if (!goodpose) { return; } } // set information to message final geometry_msgs.PoseStamped posestamped = posePublisher.newMessage(); Pose pose = posestamped.getPose(); Quaternion orientation = pose.getOrientation(); Point point = pose.getPosition(); point.setX(current_pose.getTranslation().getX()); point.setY(current_pose.getTranslation().getY()); point.setZ(current_pose.getTranslation().getZ()); orientation.setW(current_pose.getRotationAndScale().getW()); orientation.setX(current_pose.getRotationAndScale().getX()); orientation.setY(current_pose.getRotationAndScale().getY()); orientation.setZ(current_pose.getRotationAndScale().getZ()); // frame_id too posestamped.getHeader().setFrameId("map"); posestamped.getHeader().setStamp(connectedNode.getCurrentTime()); posePublisher.publish(posestamped); mostRecentPose.set(posestamped); } }); }
From source file:eu.hansolo.tilesfx.tools.Location.java
public double calcDistanceInMeter(final double LAT_1, final double LON_1, final double LAT_2, final double LON_2) { final double EARTH_RADIUS = 6_371_000; // m final double LAT_1_RADIANS = Math.toRadians(LAT_1); final double LAT_2_RADIANS = Math.toRadians(LAT_2); final double DELTA_LAT_RADIANS = Math.toRadians(LAT_2 - LAT_1); final double DELTA_LON_RADIANS = Math.toRadians(LON_2 - LON_1); final double A = Math.sin(DELTA_LAT_RADIANS * 0.5) * Math.sin(DELTA_LAT_RADIANS * 0.5) + Math.cos(LAT_1_RADIANS) * Math.cos(LAT_2_RADIANS) * Math.sin(DELTA_LON_RADIANS * 0.5) * Math.sin(DELTA_LON_RADIANS * 0.5); final double C = 2 * Math.atan2(Math.sqrt(A), Math.sqrt(1 - A)); final double DISTANCE = EARTH_RADIUS * C; return DISTANCE; }