Rugged.java

  1. /* Copyright 2013-2022 CS GROUP
  2.  * Licensed to CS GROUP (CS) under one or more
  3.  * contributor license agreements.  See the NOTICE file distributed with
  4.  * this work for additional information regarding copyright ownership.
  5.  * CS licenses this file to You under the Apache License, Version 2.0
  6.  * (the "License"); you may not use this file except in compliance with
  7.  * the License.  You may obtain a copy of the License at
  8.  *
  9.  *   http://www.apache.org/licenses/LICENSE-2.0
  10.  *
  11.  * Unless required by applicable law or agreed to in writing, software
  12.  * distributed under the License is distributed on an "AS IS" BASIS,
  13.  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14.  * See the License for the specific language governing permissions and
  15.  * limitations under the License.
  16.  */
  17. package org.orekit.rugged.api;

  18. import java.util.Collection;
  19. import java.util.HashMap;
  20. import java.util.Map;

  21. import org.hipparchus.analysis.differentiation.Derivative;
  22. import org.hipparchus.analysis.differentiation.DerivativeStructure;
  23. import org.hipparchus.geometry.euclidean.threed.FieldVector3D;
  24. import org.hipparchus.geometry.euclidean.threed.Vector3D;
  25. import org.hipparchus.util.FastMath;
  26. import org.hipparchus.util.MathArrays;
  27. import org.orekit.bodies.GeodeticPoint;
  28. import org.orekit.frames.Transform;
  29. import org.orekit.rugged.errors.DumpManager;
  30. import org.orekit.rugged.errors.RuggedException;
  31. import org.orekit.rugged.errors.RuggedInternalError;
  32. import org.orekit.rugged.errors.RuggedMessages;
  33. import org.orekit.rugged.intersection.IntersectionAlgorithm;
  34. import org.orekit.rugged.linesensor.LineSensor;
  35. import org.orekit.rugged.linesensor.SensorMeanPlaneCrossing;
  36. import org.orekit.rugged.linesensor.SensorPixel;
  37. import org.orekit.rugged.linesensor.SensorPixelCrossing;
  38. import org.orekit.rugged.refraction.AtmosphericRefraction;
  39. import org.orekit.rugged.utils.DerivativeGenerator;
  40. import org.orekit.rugged.utils.ExtendedEllipsoid;
  41. import org.orekit.rugged.utils.NormalizedGeodeticPoint;
  42. import org.orekit.rugged.utils.SpacecraftToObservedBody;
  43. import org.orekit.time.AbsoluteDate;
  44. import org.orekit.utils.Constants;
  45. import org.orekit.utils.PVCoordinates;

  46. /** Main class of Rugged library API.
  47.  * @see RuggedBuilder
  48.  * @author Luc Maisonobe
  49.  * @author Guylaine Prat
  50.  * @author Jonathan Guinet
  51.  * @author Lucie LabatAllee
  52.  */
  53. public class Rugged {

  54.     /** Accuracy to use in the first stage of inverse location.
  55.      * <p>
  56.      * This accuracy is only used to locate the point within one
  57.      * pixel, hence there is no point in choosing a too small value here.
  58.      * </p>
  59.      */
  60.     private static final double COARSE_INVERSE_LOCATION_ACCURACY = 0.01;

  61.     /** Maximum number of evaluations for crossing algorithms. */
  62.     private static final int MAX_EVAL = 50;

  63.     /** Threshold for pixel convergence in fixed point method
  64.      * (for inverse location with atmospheric refraction correction). */
  65.     private static final double PIXEL_CV_THRESHOLD = 1.e-4;

  66.     /** Threshold for line convergence in fixed point method
  67.      * (for inverse location with atmospheric refraction correction). */
  68.     private static final double LINE_CV_THRESHOLD = 1.e-4;

  69.     /** Reference ellipsoid. */
  70.     private final ExtendedEllipsoid ellipsoid;

  71.     /** Converter between spacecraft and body. */
  72.     private final SpacecraftToObservedBody scToBody;

  73.     /** Sensors. */
  74.     private final Map<String, LineSensor> sensors;

  75.     /** Mean plane crossing finders. */
  76.     private final Map<String, SensorMeanPlaneCrossing> finders;

  77.     /** DEM intersection algorithm. */
  78.     private final IntersectionAlgorithm algorithm;

  79.     /** Flag for light time correction. */
  80.     private boolean lightTimeCorrection;

  81.     /** Flag for aberration of light correction. */
  82.     private boolean aberrationOfLightCorrection;

  83.     /** Rugged name. */
  84.     private final String name;

  85.     /** Atmospheric refraction for line of sight correction. */
  86.     private AtmosphericRefraction atmosphericRefraction;

  87.     /** Build a configured instance.
  88.      * <p>
  89.      * By default, the instance performs both light time correction (which refers
  90.      * to ground point motion with respect to inertial frame) and aberration of
  91.      * light correction (which refers to spacecraft proper velocity). Explicit calls
  92.      * to {@link #setLightTimeCorrection(boolean) setLightTimeCorrection}
  93.      * and {@link #setAberrationOfLightCorrection(boolean) setAberrationOfLightCorrection}
  94.      * can be made after construction if these phenomena should not be corrected.
  95.      * </p>
  96.      * @param algorithm algorithm to use for Digital Elevation Model intersection
  97.      * @param ellipsoid f reference ellipsoid
  98.      * @param lightTimeCorrection if true, the light travel time between ground
  99.      * @param aberrationOfLightCorrection if true, the aberration of light
  100.      * is corrected for more accurate location
  101.      * and spacecraft is compensated for more accurate location
  102.      * @param atmosphericRefraction the atmospheric refraction model to be used for more accurate location
  103.      * @param scToBody transforms interpolator
  104.      * @param sensors sensors
  105.      * @param name Rugged name
  106.      */
  107.     Rugged(final IntersectionAlgorithm algorithm, final ExtendedEllipsoid ellipsoid, final boolean lightTimeCorrection,
  108.            final boolean aberrationOfLightCorrection, final AtmosphericRefraction atmosphericRefraction,
  109.            final SpacecraftToObservedBody scToBody, final Collection<LineSensor> sensors, final String name) {


  110.         // space reference
  111.         this.ellipsoid = ellipsoid;

  112.         // orbit/attitude to body converter
  113.         this.scToBody = scToBody;

  114.         // intersection algorithm
  115.         this.algorithm = algorithm;

  116.         // Rugged name
  117.         // @since 2.0
  118.         this.name = name;

  119.         this.sensors = new HashMap<String, LineSensor>();
  120.         for (final LineSensor s : sensors) {
  121.             this.sensors.put(s.getName(), s);
  122.         }
  123.         this.finders = new HashMap<String, SensorMeanPlaneCrossing>();

  124.         this.lightTimeCorrection         = lightTimeCorrection;
  125.         this.aberrationOfLightCorrection = aberrationOfLightCorrection;
  126.         this.atmosphericRefraction       = atmosphericRefraction;
  127.     }

  128.     /** Get the Rugged name.
  129.      * @return Rugged name
  130.      * @since 2.0
  131.      */
  132.     public String getName() {
  133.         return name;
  134.     }

  135.     /** Get the DEM intersection algorithm.
  136.      * @return DEM intersection algorithm
  137.      */
  138.     public IntersectionAlgorithm getAlgorithm() {
  139.         return algorithm;
  140.     }

  141.     /** Get the DEM intersection algorithm identifier.
  142.      * @return DEM intersection algorithm Id
  143.      * @since 2.2
  144.      */
  145.     public AlgorithmId getAlgorithmId() {
  146.         return algorithm.getAlgorithmId();
  147.     }

  148.     /** Get flag for light time correction.
  149.      * @return true if the light time between ground and spacecraft is
  150.      * compensated for more accurate location
  151.      */
  152.     public boolean isLightTimeCorrected() {
  153.         return lightTimeCorrection;
  154.     }

  155.     /** Get flag for aberration of light correction.
  156.      * @return true if the aberration of light time is corrected
  157.      * for more accurate location
  158.      */
  159.     public boolean isAberrationOfLightCorrected() {
  160.         return aberrationOfLightCorrection;
  161.     }

  162.     /** Get the atmospheric refraction model.
  163.      * @return atmospheric refraction model
  164.      * @since 2.0
  165.      */
  166.     public AtmosphericRefraction getRefractionCorrection() {
  167.         return atmosphericRefraction;
  168.     }

  169.     /** Get the line sensors.
  170.      * @return line sensors
  171.      */
  172.     public Collection<LineSensor> getLineSensors() {
  173.         return sensors.values();
  174.     }

  175.     /** Get the start of search time span.
  176.      * @return start of search time span
  177.      */
  178.     public AbsoluteDate getMinDate() {
  179.         return scToBody.getMinDate();
  180.     }

  181.     /** Get the end of search time span.
  182.      * @return end of search time span
  183.      */
  184.     public AbsoluteDate getMaxDate() {
  185.         return scToBody.getMaxDate();
  186.     }

  187.     /** Check if a date is in the supported range.
  188.      * <p>
  189.      * The support range is given by the {@code minDate} and
  190.      * {@code maxDate} construction parameters, with an {@code
  191.      * overshootTolerance} margin accepted (i.e. a date slightly
  192.      * before {@code minDate} or slightly after {@code maxDate}
  193.      * will be considered in range if the overshoot does not exceed
  194.      * the tolerance set at construction).
  195.      * </p>
  196.      * @param date date to check
  197.      * @return true if date is in the supported range
  198.      */
  199.     public boolean isInRange(final AbsoluteDate date) {
  200.         return scToBody.isInRange(date);
  201.     }

  202.     /** Get the observed body ellipsoid.
  203.      * @return observed body ellipsoid
  204.      */
  205.     public ExtendedEllipsoid getEllipsoid() {
  206.         return ellipsoid;
  207.     }

  208.     /** Direct location of a sensor line.
  209.      * @param sensorName name of the line sensor
  210.      * @param lineNumber number of the line to localize on ground
  211.      * @return ground position of all pixels of the specified sensor line
  212.      */
  213.     public GeodeticPoint[] directLocation(final String sensorName, final double lineNumber) {

  214.         final LineSensor   sensor = getLineSensor(sensorName);
  215.         final Vector3D sensorPosition   = sensor.getPosition();
  216.         final AbsoluteDate date   = sensor.getDate(lineNumber);

  217.         // Compute the transform for the date
  218.         // from spacecraft to inertial
  219.         final Transform    scToInert   = scToBody.getScToInertial(date);
  220.         // from inertial to body
  221.         final Transform    inertToBody = scToBody.getInertialToBody(date);

  222.         // Compute spacecraft velocity in inertial frame
  223.         final Vector3D spacecraftVelocity = scToInert.transformPVCoordinates(PVCoordinates.ZERO).getVelocity();
  224.         // Compute sensor position in inertial frame
  225.         // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
  226.         final Vector3D pInert = scToInert.transformPosition(sensorPosition);

  227.         // Compute location of each pixel
  228.         final GeodeticPoint[] gp = new GeodeticPoint[sensor.getNbPixels()];
  229.         for (int i = 0; i < sensor.getNbPixels(); ++i) {

  230.             final Vector3D los = sensor.getLOS(date, i);
  231.             DumpManager.dumpDirectLocation(date, sensorPosition, los, lightTimeCorrection,
  232.                     aberrationOfLightCorrection, atmosphericRefraction != null);

  233.             // compute the line of sight in inertial frame (without correction)
  234.             final Vector3D obsLInert = scToInert.transformVector(los);
  235.             final Vector3D lInert;

  236.             if (aberrationOfLightCorrection) {
  237.                 // apply aberration of light correction on LOS
  238.                 lInert = applyAberrationOfLightCorrection(obsLInert, spacecraftVelocity);
  239.             } else {
  240.                 // don't apply aberration of light correction on LOS
  241.                 lInert = obsLInert;
  242.             }

  243.             if (lightTimeCorrection) {
  244.                 // compute DEM intersection with light time correction
  245.                 // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
  246.                 gp[i] = computeWithLightTimeCorrection(date, sensorPosition, los, scToInert, inertToBody, pInert, lInert);

  247.             } else {
  248.                 // compute DEM intersection without light time correction
  249.                 final Vector3D pBody = inertToBody.transformPosition(pInert);
  250.                 final Vector3D lBody = inertToBody.transformVector(lInert);
  251.                 gp[i] = algorithm.refineIntersection(ellipsoid, pBody, lBody,
  252.                                                      algorithm.intersection(ellipsoid, pBody, lBody));
  253.             }

  254.             // compute with atmospheric refraction correction if necessary
  255.             if (atmosphericRefraction != null && atmosphericRefraction.mustBeComputed()) {

  256.                 // apply atmospheric refraction correction
  257.                 final Vector3D pBody = inertToBody.transformPosition(pInert);
  258.                 final Vector3D lBody = inertToBody.transformVector(lInert);
  259.                 gp[i] = atmosphericRefraction.applyCorrection(pBody, lBody, (NormalizedGeodeticPoint) gp[i], algorithm);
  260.             }
  261.             DumpManager.dumpDirectLocationResult(gp[i]);
  262.         }
  263.         return gp;
  264.     }

  265.     /** Direct location of a single line-of-sight.
  266.      * @param date date of the location
  267.      * @param sensorPosition sensor position in spacecraft frame. For simplicity, due to the size of sensor,
  268.      * we consider each pixel to be at sensor position
  269.      * @param los normalized line-of-sight in spacecraft frame
  270.      * @return ground position of intersection point between specified los and ground
  271.      */
  272.     public GeodeticPoint directLocation(final AbsoluteDate date, final Vector3D sensorPosition, final Vector3D los) {

  273.         DumpManager.dumpDirectLocation(date, sensorPosition, los, lightTimeCorrection, aberrationOfLightCorrection,
  274.                                        atmosphericRefraction != null);

  275.         // Compute the transforms for the date
  276.         // from spacecraft to inertial
  277.         final Transform    scToInert   = scToBody.getScToInertial(date);
  278.         // from inertial to body
  279.         final Transform    inertToBody = scToBody.getInertialToBody(date);

  280.         // Compute spacecraft velocity in inertial frame
  281.         final Vector3D spacecraftVelocity = scToInert.transformPVCoordinates(PVCoordinates.ZERO).getVelocity();
  282.         // Compute sensor position in inertial frame
  283.         // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
  284.         final Vector3D pInert    = scToInert.transformPosition(sensorPosition);

  285.         // Compute the line of sight in inertial frame (without correction)
  286.         final Vector3D obsLInert = scToInert.transformVector(los);

  287.         final Vector3D lInert;
  288.         if (aberrationOfLightCorrection) {
  289.             // apply aberration of light correction on LOS
  290.             lInert = applyAberrationOfLightCorrection(obsLInert, spacecraftVelocity);
  291.         } else {
  292.             // don't apply aberration of light correction on LOS
  293.             lInert = obsLInert;
  294.         }

  295.         // Compute ground location of specified pixel
  296.         final NormalizedGeodeticPoint gp;

  297.         if (lightTimeCorrection) {
  298.             // compute DEM intersection with light time correction
  299.             // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
  300.             gp = computeWithLightTimeCorrection(date, sensorPosition, los, scToInert, inertToBody, pInert, lInert);

  301.         } else {
  302.             // compute DEM intersection without light time correction
  303.             final Vector3D pBody = inertToBody.transformPosition(pInert);
  304.             final Vector3D lBody = inertToBody.transformVector(lInert);
  305.             gp = algorithm.refineIntersection(ellipsoid, pBody, lBody,
  306.                                               algorithm.intersection(ellipsoid, pBody, lBody));
  307.         }

  308.         NormalizedGeodeticPoint result = gp;

  309.         // compute the ground location with atmospheric correction if asked for
  310.         if (atmosphericRefraction != null && atmosphericRefraction.mustBeComputed()) {

  311.             // apply atmospheric refraction correction
  312.             final Vector3D pBody = inertToBody.transformPosition(pInert);
  313.             final Vector3D lBody = inertToBody.transformVector(lInert);
  314.             result = atmosphericRefraction.applyCorrection(pBody, lBody, gp, algorithm);

  315.         } // end test on atmosphericRefraction != null

  316.         DumpManager.dumpDirectLocationResult(result);
  317.         return result;
  318.     }

  319.     /** Find the date at which sensor sees a ground point.
  320.      * <p>
  321.      * This method is a partial {@link #inverseLocation(String, GeodeticPoint, int, int) inverse location} focusing only on date.
  322.      * </p>
  323.      * <p>
  324.      * The point is given only by its latitude and longitude, the elevation is
  325.      * computed from the Digital Elevation Model.
  326.      * </p>
  327.      * <p>
  328.      * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
  329.      * are cached, because they induce costly frames computation. So these settings
  330.      * should not be tuned very finely and changed at each call, but should rather be
  331.      * a few thousand lines wide and refreshed only when needed. If for example an
  332.      * inverse location is roughly estimated to occur near line 53764 (for example
  333.      * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
  334.      * and {@code maxLine} could be set for example to 50000 and 60000, which would
  335.      * be OK also if next line inverse location is expected to occur near line 53780,
  336.      * and next one ... The setting could be changed for example to 55000 and 65000 when
  337.      * an inverse location is expected to occur after 55750. Of course, these values
  338.      * are only an example and should be adjusted depending on mission needs.
  339.      * </p>
  340.      * @param sensorName name of the line  sensor
  341.      * @param latitude ground point latitude (rad)
  342.      * @param longitude ground point longitude (rad)
  343.      * @param minLine minimum line number
  344.      * @param maxLine maximum line number
  345.      * @return date at which ground point is seen by line sensor
  346.      * @see #inverseLocation(String, double, double, int, int)
  347.      * @see org.orekit.rugged.utils.RoughVisibilityEstimator
  348.      */
  349.     public AbsoluteDate dateLocation(final String sensorName,
  350.                                      final double latitude, final double longitude,
  351.                                      final int minLine, final int maxLine) {

  352.         final GeodeticPoint groundPoint =
  353.                 new GeodeticPoint(latitude, longitude, algorithm.getElevation(latitude, longitude));
  354.         return dateLocation(sensorName, groundPoint, minLine, maxLine);
  355.     }

  356.     /** Find the date at which sensor sees a ground point.
  357.      * <p>
  358.      * This method is a partial {@link #inverseLocation(String,
  359.      * GeodeticPoint, int, int) inverse location} focusing only on date.
  360.      * </p>
  361.      * <p>
  362.      * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
  363.      * are cached, because they induce costly frames computation. So these settings
  364.      * should not be tuned very finely and changed at each call, but should rather be
  365.      * a few thousand lines wide and refreshed only when needed. If for example an
  366.      * inverse location is roughly estimated to occur near line 53764 (for example
  367.      * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
  368.      * and {@code maxLine} could be set for example to 50000 and 60000, which would
  369.      * be OK also if next line inverse location is expected to occur near line 53780,
  370.      * and next one ... The setting could be changed for example to 55000 and 65000 when
  371.      * an inverse location is expected to occur after 55750. Of course, these values
  372.      * are only an example and should be adjusted depending on mission needs.
  373.      * </p>
  374.      * @param sensorName name of the line sensor
  375.      * @param point point to localize
  376.      * @param minLine minimum line number
  377.      * @param maxLine maximum line number
  378.      * @return date at which ground point is seen by line sensor
  379.      * @see #inverseLocation(String, GeodeticPoint, int, int)
  380.      * @see org.orekit.rugged.utils.RoughVisibilityEstimator
  381.      */
  382.     public AbsoluteDate dateLocation(final String sensorName, final GeodeticPoint point,
  383.                                      final int minLine, final int maxLine) {

  384.         final LineSensor sensor = getLineSensor(sensorName);
  385.         final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);

  386.         // find approximately the sensor line at which ground point crosses sensor mean plane
  387.         final Vector3D   target = ellipsoid.transform(point);
  388.         final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
  389.         if (crossingResult == null) {
  390.             // target is out of search interval
  391.             return null;
  392.         } else {
  393.             return sensor.getDate(crossingResult.getLine());
  394.         }
  395.     }

  396.     /** Inverse location of a ground point.
  397.      * <p>
  398.      * The point is given only by its latitude and longitude, the elevation is
  399.      * computed from the Digital Elevation Model.
  400.      * </p>
  401.      * <p>
  402.      * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
  403.      * are cached, because they induce costly frames computation. So these settings
  404.      * should not be tuned very finely and changed at each call, but should rather be
  405.      * a few thousand lines wide and refreshed only when needed. If for example an
  406.      * inverse location is roughly estimated to occur near line 53764 (for example
  407.      * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
  408.      * and {@code maxLine} could be set for example to 50000 and 60000, which would
  409.      * be OK also if next line inverse location is expected to occur near line 53780,
  410.      * and next one ... The setting could be changed for example to 55000 and 65000 when
  411.      * an inverse location is expected to occur after 55750. Of course, these values
  412.      * are only an example and should be adjusted depending on mission needs.
  413.      * </p>
  414.      * @param sensorName name of the line  sensor
  415.      * @param latitude ground point latitude (rad)
  416.      * @param longitude ground point longitude (rad)
  417.      * @param minLine minimum line number
  418.      * @param maxLine maximum line number
  419.      * @return sensor pixel seeing ground point, or null if ground point cannot
  420.      * be seen between the prescribed line numbers
  421.      * @see org.orekit.rugged.utils.RoughVisibilityEstimator
  422.      */
  423.     public SensorPixel inverseLocation(final String sensorName,
  424.                                        final double latitude, final double longitude,
  425.                                        final int minLine,  final int maxLine) {

  426.         final GeodeticPoint groundPoint = new GeodeticPoint(latitude, longitude, algorithm.getElevation(latitude, longitude));
  427.         return inverseLocation(sensorName, groundPoint, minLine, maxLine);
  428.     }

  429.     /** Inverse location of a point.
  430.      * <p>
  431.      * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
  432.      * are cached, because they induce costly frames computation. So these settings
  433.      * should not be tuned very finely and changed at each call, but should rather be
  434.      * a few thousand lines wide and refreshed only when needed. If for example an
  435.      * inverse location is roughly estimated to occur near line 53764 (for example
  436.      * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
  437.      * and {@code maxLine} could be set for example to 50000 and 60000, which would
  438.      * be OK also if next line inverse location is expected to occur near line 53780,
  439.      * and next one ... The setting could be changed for example to 55000 and 65000 when
  440.      * an inverse location is expected to occur after 55750. Of course, these values
  441.      * are only an example and should be adjusted depending on mission needs.
  442.      * </p>
  443.      * @param sensorName name of the line sensor
  444.      * @param point geodetic point to localize
  445.      * @param minLine minimum line number where the search will be performed
  446.      * @param maxLine maximum line number where the search will be performed
  447.      * @return sensor pixel seeing point, or null if point cannot be seen between the
  448.      * prescribed line numbers
  449.      * @see #dateLocation(String, GeodeticPoint, int, int)
  450.      * @see org.orekit.rugged.utils.RoughVisibilityEstimator
  451.      */
  452.     public SensorPixel inverseLocation(final String sensorName, final GeodeticPoint point,
  453.                                        final int minLine, final int maxLine) {

  454.         final LineSensor sensor = getLineSensor(sensorName);
  455.         DumpManager.dumpInverseLocation(sensor, point, ellipsoid, minLine, maxLine, lightTimeCorrection,
  456.                                         aberrationOfLightCorrection, atmosphericRefraction != null);

  457.         final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);
  458.         DumpManager.dumpSensorMeanPlane(planeCrossing);

  459.         if (atmosphericRefraction == null || !atmosphericRefraction.mustBeComputed()) {
  460.             // Compute inverse location WITHOUT atmospheric refraction
  461.             return findSensorPixelWithoutAtmosphere(point, sensor, planeCrossing);
  462.         } else {
  463.             // Compute inverse location WITH atmospheric refraction
  464.             return findSensorPixelWithAtmosphere(point, sensor, minLine, maxLine);
  465.         }
  466.     }

  467.     /** Apply aberration of light correction (for direct location).
  468.      * @param spacecraftVelocity spacecraft velocity in inertial frame
  469.      * @param obsLInert line of sight in inertial frame
  470.      * @return line of sight with aberration of light correction
  471.      */
  472.     private Vector3D applyAberrationOfLightCorrection(final Vector3D obsLInert, final Vector3D spacecraftVelocity) {

  473.         // As the spacecraft velocity is small with respect to speed of light,
  474.         // we use classical velocity addition and not relativistic velocity addition
  475.         // we look for a positive k such that: c * lInert + vsat = k * obsLInert
  476.         // with lInert normalized
  477.         final double a = obsLInert.getNormSq();
  478.         final double b = -Vector3D.dotProduct(obsLInert, spacecraftVelocity);
  479.         final double c = spacecraftVelocity.getNormSq() - Constants.SPEED_OF_LIGHT * Constants.SPEED_OF_LIGHT;

  480.         // a > 0 and c < 0
  481.         final double s = FastMath.sqrt(b * b - a * c);

  482.         // Only the k > 0 are kept as solutions (the solutions: -(s+b)/a and c/(s-b) are useless)
  483.         final double k = (b > 0) ? -c / (s + b) : (s - b) / a;

  484.         final Vector3D lInert = new Vector3D( k / Constants.SPEED_OF_LIGHT, obsLInert, -1.0 / Constants.SPEED_OF_LIGHT, spacecraftVelocity);
  485.         return lInert;
  486.     }

  487.     /** Compute the DEM intersection with light time correction.
  488.      * @param date date of the los
  489.      * @param sensorPosition sensor position in spacecraft frame
  490.      * @param los los in spacecraft frame
  491.      * @param scToInert transform for the date from spacecraft to inertial
  492.      * @param inertToBody transform for the date from inertial to body
  493.      * @param pInert sensor position in inertial frame
  494.      * @param lInert line of sight in inertial frame
  495.      * @return geodetic point with light time correction
  496.      */
  497.     private NormalizedGeodeticPoint computeWithLightTimeCorrection(final AbsoluteDate date,
  498.                                                                    final Vector3D sensorPosition, final Vector3D los,
  499.                                                                    final Transform scToInert, final Transform inertToBody,
  500.                                                                    final Vector3D pInert, final Vector3D lInert) {

  501.         // compute the approximate transform between spacecraft and observed body
  502.         final Transform approximate = new Transform(date, scToInert, inertToBody);

  503.         final Vector3D  sL       = approximate.transformVector(los);
  504.         final Vector3D  sP       = approximate.transformPosition(sensorPosition);

  505.         final Vector3D  eP1      = ellipsoid.transform(ellipsoid.pointOnGround(sP, sL, 0.0));
  506.         final double    deltaT1  = eP1.distance(sP) / Constants.SPEED_OF_LIGHT;
  507.         final Transform shifted1 = inertToBody.shiftedBy(-deltaT1);
  508.         final NormalizedGeodeticPoint gp1  = algorithm.intersection(ellipsoid,
  509.                                                                     shifted1.transformPosition(pInert),
  510.                                                                     shifted1.transformVector(lInert));

  511.         final Vector3D  eP2      = ellipsoid.transform(gp1);
  512.         final double    deltaT2  = eP2.distance(sP) / Constants.SPEED_OF_LIGHT;
  513.         final Transform shifted2 = inertToBody.shiftedBy(-deltaT2);
  514.         return algorithm.refineIntersection(ellipsoid,
  515.                                              shifted2.transformPosition(pInert),
  516.                                              shifted2.transformVector(lInert),
  517.                                              gp1);
  518.     }

  519.     /**
  520.      * Find the sensor pixel WITHOUT atmospheric refraction correction.
  521.      * @param point geodetic point to localize
  522.      * @param sensor the line sensor
  523.      * @param planeCrossing the sensor mean plane crossing
  524.      * @return the sensor pixel crossing or null if cannot be found
  525.      * @since 2.1
  526.      */
  527.     private SensorPixel findSensorPixelWithoutAtmosphere(final GeodeticPoint point,
  528.                                                          final LineSensor sensor, final SensorMeanPlaneCrossing planeCrossing) {

  529.         // find approximately the sensor line at which ground point crosses sensor mean plane
  530.         final Vector3D target = ellipsoid.transform(point);
  531.         final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
  532.         if (crossingResult == null) {
  533.             // target is out of search interval
  534.             return null;
  535.         }

  536.         // find approximately the pixel along this sensor line
  537.         final SensorPixelCrossing pixelCrossing =
  538.                 new SensorPixelCrossing(sensor, planeCrossing.getMeanPlaneNormal(),
  539.                                         crossingResult.getTargetDirection(),
  540.                                         MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
  541.         final double coarsePixel = pixelCrossing.locatePixel(crossingResult.getDate());
  542.         if (Double.isNaN(coarsePixel)) {
  543.             // target is out of search interval
  544.             return null;
  545.         }

  546.         // fix line by considering the closest pixel exact position and line-of-sight
  547.         // (this pixel might point towards a direction slightly above or below the mean sensor plane)
  548.         final int      lowIndex       = FastMath.max(0, FastMath.min(sensor.getNbPixels() - 2, (int) FastMath.floor(coarsePixel)));
  549.         final Vector3D lowLOS         = sensor.getLOS(crossingResult.getDate(), lowIndex);
  550.         final Vector3D highLOS        = sensor.getLOS(crossingResult.getDate(), lowIndex + 1);
  551.         final Vector3D localZ         = Vector3D.crossProduct(lowLOS, highLOS).normalize();
  552.         final double   beta           = FastMath.acos(Vector3D.dotProduct(crossingResult.getTargetDirection(), localZ));
  553.         final double   s              = Vector3D.dotProduct(crossingResult.getTargetDirectionDerivative(), localZ);
  554.         final double   betaDer        = -s / FastMath.sqrt(1 - s * s);
  555.         final double   deltaL         = (0.5 * FastMath.PI - beta) / betaDer;
  556.         final double   fixedLine      = crossingResult.getLine() + deltaL;
  557.         final Vector3D fixedDirection = new Vector3D(1, crossingResult.getTargetDirection(),
  558.                                                      deltaL, crossingResult.getTargetDirectionDerivative()).normalize();

  559.         // fix neighbouring pixels
  560.         final AbsoluteDate fixedDate  = sensor.getDate(fixedLine);
  561.         final Vector3D fixedX         = sensor.getLOS(fixedDate, lowIndex);
  562.         final Vector3D fixedZ         = Vector3D.crossProduct(fixedX, sensor.getLOS(fixedDate, lowIndex + 1));
  563.         final Vector3D fixedY         = Vector3D.crossProduct(fixedZ, fixedX);

  564.         // fix pixel
  565.         final double pixelWidth = FastMath.atan2(Vector3D.dotProduct(highLOS,        fixedY),
  566.                                                  Vector3D.dotProduct(highLOS,        fixedX));
  567.         final double alpha      = FastMath.atan2(Vector3D.dotProduct(fixedDirection, fixedY),
  568.                                                  Vector3D.dotProduct(fixedDirection, fixedX));
  569.         final double fixedPixel = lowIndex + alpha / pixelWidth;

  570.         final SensorPixel result = new SensorPixel(fixedLine, fixedPixel);
  571.         DumpManager.dumpInverseLocationResult(result);

  572.         return result;
  573.     }

  574.     /**
  575.      * Find the sensor pixel WITH atmospheric refraction correction.
  576.      * @param point geodetic point to localize
  577.      * @param sensor the line sensor
  578.      * @param minLine minimum line number where the search will be performed
  579.      * @param maxLine maximum line number where the search will be performed
  580.      * @return the sensor pixel crossing or null if cannot be found
  581.      * @since 2.1
  582.      */
  583.     private SensorPixel findSensorPixelWithAtmosphere(final GeodeticPoint point,
  584.                                                       final LineSensor sensor, final int minLine, final int maxLine) {

  585.         // TBN: there is no direct way to compute the inverse location.
  586.         // The method is based on an interpolation grid associated with the fixed point method

  587.         final String sensorName = sensor.getName();

  588.         // Compute a correction grid (at sensor level)
  589.         // ===========================================
  590.         // Need to be computed only once for a given sensor (with the same minLine and maxLine)
  591.         if (atmosphericRefraction.getBifPixel() == null || atmosphericRefraction.getBifLine() == null || // lazy evaluation
  592.             !atmosphericRefraction.isSameContext(sensorName, minLine, maxLine)) { // Must be recomputed if the context changed

  593.             // Definition of a regular grid (at sensor level)
  594.             atmosphericRefraction.configureCorrectionGrid(sensor, minLine, maxLine);

  595.             // Get the grid nodes
  596.             final int nbPixelGrid = atmosphericRefraction.getComputationParameters().getNbPixelGrid();
  597.             final int nbLineGrid = atmosphericRefraction.getComputationParameters().getNbLineGrid();
  598.             final double[] pixelGrid = atmosphericRefraction.getComputationParameters().getUgrid();
  599.             final double[] lineGrid = atmosphericRefraction.getComputationParameters().getVgrid();

  600.             // Computation, for the sensor grid, of the direct location WITH atmospheric refraction
  601.             // (full computation)
  602.             atmosphericRefraction.reactivateComputation();
  603.             final GeodeticPoint[][] geodeticGridWithAtmosphere = computeDirectLocOnGridWithAtmosphere(pixelGrid, lineGrid, sensor);
  604.             // pixelGrid and lineGrid are the nodes where the direct loc is computed WITH atmosphere

  605.             // Computation of the inverse location WITHOUT atmospheric refraction for the grid nodes
  606.             atmosphericRefraction.deactivateComputation();
  607.             final SensorPixel[][] sensorPixelGridInverseWithout = computeInverseLocOnGridWithoutAtmosphere(geodeticGridWithAtmosphere,
  608.                                                             nbPixelGrid, nbLineGrid, sensor, minLine, maxLine);
  609.             atmosphericRefraction.reactivateComputation();

  610.             // Compute the grid correction functions (for pixel and line)
  611.             atmosphericRefraction.computeGridCorrectionFunctions(sensorPixelGridInverseWithout);
  612.         }

  613.         // Fixed point method
  614.         // ==================
  615.         // Initialization
  616.         // --------------
  617.         // Deactivate the dump because no need to keep intermediate computations of inverse loc (can be regenerate)
  618.         final Boolean wasSuspended = DumpManager.suspend();

  619.         // compute the sensor pixel on the desired ground point WITHOUT atmosphere
  620.         atmosphericRefraction.deactivateComputation();
  621.         final SensorPixel sp0 = inverseLocation(sensorName, point, minLine, maxLine);
  622.         atmosphericRefraction.reactivateComputation();
  623.         // Reactivate the dump
  624.         DumpManager.resume(wasSuspended);

  625.         if (sp0 == null) {
  626.             // In order for the dump to end nicely
  627.             DumpManager.endNicely();

  628.             // Impossible to find the sensor pixel in the given range lines (without atmosphere)
  629.             throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_RANGE_LINES, minLine, maxLine);
  630.         }

  631.         // set up the starting point of the fixed point method
  632.         final double pixel0 = sp0.getPixelNumber();
  633.         final double line0 = sp0.getLineNumber();
  634.         // Needed data for the dump
  635.         sensor.dumpRate(line0);

  636.         // Apply fixed point method until convergence in pixel and line
  637.         // ------------------------------------------------------------
  638.         // compute the first (pixel, line) value:
  639.         // initial sensor pixel value + correction due to atmosphere at this same sensor pixel
  640.         double corrPixelPrevious =  pixel0 + atmosphericRefraction.getBifPixel().value(pixel0, line0);
  641.         double corrLinePrevious = line0 + atmosphericRefraction.getBifLine().value(pixel0, line0);

  642.         double deltaCorrPixel = Double.POSITIVE_INFINITY;
  643.         double deltaCorrLine = Double.POSITIVE_INFINITY;

  644.         while (deltaCorrPixel > PIXEL_CV_THRESHOLD && deltaCorrLine > LINE_CV_THRESHOLD) {
  645.             // Compute the current (pixel, line) value =
  646.             // initial sensor pixel value + correction due to atmosphere on the previous sensor pixel
  647.             final double corrPixelCurrent = pixel0 + atmosphericRefraction.getBifPixel().value(corrPixelPrevious, corrLinePrevious);
  648.             final double corrLineCurrent = line0 + atmosphericRefraction.getBifLine().value(corrPixelPrevious, corrLinePrevious);

  649.             // Compute the delta in pixel and line to check the convergence
  650.             deltaCorrPixel = FastMath.abs(corrPixelCurrent - corrPixelPrevious);
  651.             deltaCorrLine = FastMath.abs(corrLineCurrent - corrLinePrevious);

  652.             // Store the (pixel, line) for next loop
  653.             corrPixelPrevious = corrPixelCurrent;
  654.             corrLinePrevious = corrLineCurrent;
  655.         }
  656.         // The sensor pixel is found !
  657.         final SensorPixel sensorPixelWithAtmosphere = new SensorPixel(corrLinePrevious, corrPixelPrevious);

  658.         // Dump the found sensorPixel
  659.         DumpManager.dumpInverseLocationResult(sensorPixelWithAtmosphere);

  660.         return sensorPixelWithAtmosphere;
  661.     }

  662.     /** Compute the inverse location WITHOUT atmospheric refraction for the geodetic points
  663.      * associated to the sensor grid nodes.
  664.      * @param groundGridWithAtmosphere ground grid found for sensor grid nodes with atmosphere
  665.      * @param nbPixelGrid size of the pixel grid
  666.      * @param nbLineGrid size of the line grid
  667.      * @param sensor the line sensor
  668.      * @param minLine minimum line number where the search will be performed
  669.      * @param maxLine maximum line number where the search will be performed
  670.      * @return the sensor pixel grid computed without atmosphere
  671.      * @since 2.1
  672.      */
  673.     private SensorPixel[][] computeInverseLocOnGridWithoutAtmosphere(final GeodeticPoint[][] groundGridWithAtmosphere,
  674.                                                                      final int nbPixelGrid, final int nbLineGrid,
  675.                                                                      final LineSensor sensor, final int minLine, final int maxLine) {

  676.         // Deactivate the dump because no need to keep intermediate computations of inverse loc (can be regenerate)
  677.         final Boolean wasSuspended = DumpManager.suspend();

  678.         final SensorPixel[][] sensorPixelGrid = new SensorPixel[nbPixelGrid][nbLineGrid];
  679.         final String sensorName = sensor.getName();

  680.         for (int uIndex = 0; uIndex < nbPixelGrid; uIndex++) {
  681.             for (int vIndex = 0; vIndex < nbLineGrid; vIndex++) {

  682.                 // Check if the geodetic point exists
  683.                 if (groundGridWithAtmosphere[uIndex][vIndex] != null) {
  684.                     final GeodeticPoint groundPoint = groundGridWithAtmosphere[uIndex][vIndex];
  685.                     final double currentLat = groundPoint.getLatitude();
  686.                     final double currentLon = groundPoint.getLongitude();

  687.                     try {
  688.                         // Compute the inverse location for the current node
  689.                         sensorPixelGrid[uIndex][vIndex] = inverseLocation(sensorName, currentLat, currentLon, minLine, maxLine);

  690.                     } catch (RuggedException re) { // This should never happen
  691.                         // In order for the dump to end nicely
  692.                         DumpManager.endNicely();
  693.                         throw new RuggedInternalError(re);
  694.                     }

  695.                     // Check if the pixel is inside the sensor (with a margin) OR if the inverse location was impossible (null result)
  696.                     if (!pixelIsInside(sensorPixelGrid[uIndex][vIndex], sensor)) {

  697.                         // In order for the dump to end nicely
  698.                         DumpManager.endNicely();

  699.                         if (sensorPixelGrid[uIndex][vIndex] == null) {
  700.                             // Impossible to find the sensor pixel in the given range lines
  701.                             throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_RANGE_LINES, minLine, maxLine);
  702.                         } else {
  703.                             // Impossible to find the sensor pixel
  704.                             final double invLocationMargin = atmosphericRefraction.getComputationParameters().getInverseLocMargin();
  705.                             throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_PIXELS_LINE, sensorPixelGrid[uIndex][vIndex].getPixelNumber(),
  706.                                                       -invLocationMargin, invLocationMargin + sensor.getNbPixels() - 1, invLocationMargin);
  707.                         }
  708.                     }

  709.                 } else { // groundGrid[uIndex][vIndex] == null: impossible to compute inverse loc because ground point not defined

  710.                     sensorPixelGrid[uIndex][vIndex] = null;

  711.                 } // groundGrid[uIndex][vIndex] != null
  712.             } // end loop vIndex
  713.         } // end loop uIndex

  714.         // Reactivate the dump
  715.         DumpManager.resume(wasSuspended);

  716.         // The sensor grid computed WITHOUT atmospheric refraction correction
  717.         return sensorPixelGrid;
  718.     }

  719.     /** Check if pixel is inside the sensor with a margin.
  720.      * @param pixel pixel to check (may be null if not found)
  721.      * @param sensor the line sensor
  722.      * @return true if the pixel is inside the sensor
  723.      */
  724.     private boolean pixelIsInside(final SensorPixel pixel, final LineSensor sensor) {
  725.         // Get the inverse location margin
  726.         final double invLocationMargin = atmosphericRefraction.getComputationParameters().getInverseLocMargin();

  727.         return pixel != null && pixel.getPixelNumber() >= -invLocationMargin && pixel.getPixelNumber() < invLocationMargin + sensor.getNbPixels() - 1;
  728.     }

  729.     /** Computation, for the sensor pixels grid, of the direct location WITH atmospheric refraction.
  730.      * (full computation)
  731.      * @param pixelGrid the pixel grid
  732.      * @param lineGrid the line grid
  733.      * @param sensor the line sensor
  734.      * @return the ground grid computed with atmosphere
  735.      * @since 2.1
  736.      */
  737.     private GeodeticPoint[][] computeDirectLocOnGridWithAtmosphere(final double[] pixelGrid, final double[] lineGrid,
  738.                                                                    final LineSensor sensor) {

  739.         // Deactivate the dump because no need to keep intermediate computations of direct loc (can be regenerate)
  740.         final Boolean wasSuspended = DumpManager.suspend();

  741.         final int nbPixelGrid = pixelGrid.length;
  742.         final int nbLineGrid = lineGrid.length;
  743.         final GeodeticPoint[][] groundGridWithAtmosphere = new GeodeticPoint[nbPixelGrid][nbLineGrid];
  744.         final Vector3D sensorPosition = sensor.getPosition();

  745.         for (int uIndex = 0; uIndex < nbPixelGrid; uIndex++) {
  746.             final double pixelNumber = pixelGrid[uIndex];
  747.             for (int vIndex = 0; vIndex < nbLineGrid; vIndex++) {
  748.                 final double lineNumber = lineGrid[vIndex];
  749.                 final AbsoluteDate date = sensor.getDate(lineNumber);
  750.                 final Vector3D los = sensor.getLOS(date, pixelNumber);
  751.                 try {
  752.                     // Compute the direct location for the current node
  753.                     groundGridWithAtmosphere[uIndex][vIndex] = directLocation(date, sensorPosition, los);

  754.                 } catch (RuggedException re) { // This should never happen
  755.                     // In order for the dump to end nicely
  756.                     DumpManager.endNicely();
  757.                     throw new RuggedInternalError(re);
  758.                 }
  759.             } // end loop vIndex
  760.         } // end loop uIndex

  761.         // Reactivate the dump
  762.         DumpManager.resume(wasSuspended);

  763.         // The ground grid computed WITH atmospheric refraction correction
  764.         return groundGridWithAtmosphere;
  765.     }

  766.     /** Compute distances between two line sensors.
  767.      * @param sensorA line sensor A
  768.      * @param dateA current date for sensor A
  769.      * @param pixelA pixel index for sensor A
  770.      * @param scToBodyA spacecraft to body transform for sensor A
  771.      * @param sensorB line sensor B
  772.      * @param dateB current date for sensor B
  773.      * @param pixelB pixel index for sensor B
  774.      * @return distances computed between LOS and to the ground
  775.      * @since 2.0
  776.      */
  777.     public double[] distanceBetweenLOS(final LineSensor sensorA, final AbsoluteDate dateA, final double pixelA,
  778.                                        final SpacecraftToObservedBody scToBodyA,
  779.                                        final LineSensor sensorB, final AbsoluteDate dateB, final double pixelB) {

  780.         // Compute the approximate transform between spacecraft and observed body
  781.         // from Rugged instance A
  782.         final Transform scToInertA = scToBodyA.getScToInertial(dateA);
  783.         final Transform inertToBodyA = scToBodyA.getInertialToBody(dateA);
  784.         final Transform transformScToBodyA = new Transform(dateA, scToInertA, inertToBodyA);

  785.         // from (current) Rugged instance B
  786.         final Transform scToInertB = scToBody.getScToInertial(dateB);
  787.         final Transform inertToBodyB = scToBody.getInertialToBody(dateB);
  788.         final Transform transformScToBodyB = new Transform(dateB, scToInertB, inertToBodyB);

  789.         // Get sensors LOS into local frame
  790.         final Vector3D vALocal = sensorA.getLOS(dateA, pixelA);
  791.         final Vector3D vBLocal = sensorB.getLOS(dateB, pixelB);

  792.         // Position of sensors into local frame
  793.         final Vector3D sALocal = sensorA.getPosition(); // S_a : sensorA 's position
  794.         final Vector3D sBLocal = sensorB.getPosition(); // S_b : sensorB 's position

  795.         // Get sensors position and LOS into body frame
  796.         final Vector3D sA = transformScToBodyA.transformPosition(sALocal); // S_a : sensorA's position
  797.         final Vector3D vA = transformScToBodyA.transformVector(vALocal);   // V_a : line of sight's vectorA
  798.         final Vector3D sB = transformScToBodyB.transformPosition(sBLocal); // S_b : sensorB's position
  799.         final Vector3D vB = transformScToBodyB.transformVector(vBLocal);   // V_b : line of sight's vectorB

  800.         // Compute distance
  801.         final Vector3D vBase = sB.subtract(sA);            // S_b - S_a
  802.         final double svA = Vector3D.dotProduct(vBase, vA); // SV_a = (S_b - S_a).V_a
  803.         final double svB = Vector3D.dotProduct(vBase, vB); // SV_b = (S_b - S_a).V_b

  804.         final double vAvB = Vector3D.dotProduct(vA, vB); // V_a.V_b

  805.         // Compute lambda_b = (SV_a * V_a.V_b - SV_b) / (1 - (V_a.V_b)²)
  806.         final double lambdaB = (svA * vAvB - svB) / (1 - vAvB * vAvB);

  807.         // Compute lambda_a = SV_a + lambdaB * V_a.V_b
  808.         final double lambdaA = svA + lambdaB * vAvB;

  809.         // Compute vector M_a = S_a + lambda_a * V_a
  810.         final Vector3D mA = sA.add(vA.scalarMultiply(lambdaA));
  811.         // Compute vector M_b = S_b + lambda_b * V_b
  812.         final Vector3D mB = sB.add(vB.scalarMultiply(lambdaB));

  813.         // Compute vector M_a -> M_B for which distance between LOS is minimum
  814.         final Vector3D vDistanceMin = mB.subtract(mA); // M_b - M_a

  815.         // Compute vector from mid point of vector M_a -> M_B to the ground (corresponds to minimum elevation)
  816.         final Vector3D midPoint = (mB.add(mA)).scalarMultiply(0.5);

  817.         // Get the euclidean norms to compute the minimum distances: between LOS and to the ground
  818.         final double[] distances = {vDistanceMin.getNorm(), midPoint.getNorm()};

  819.         return distances;
  820.     }

  821.     /** Compute distances between two line sensors with derivatives.
  822.      * @param <T> derivative type
  823.      * @param sensorA line sensor A
  824.      * @param dateA current date for sensor A
  825.      * @param pixelA pixel index for sensor A
  826.      * @param scToBodyA spacecraftToBody transform for sensor A
  827.      * @param sensorB line sensor B
  828.      * @param dateB current date for sensor B
  829.      * @param pixelB pixel index for sensor B
  830.      * @param generator generator to use for building {@link DerivativeStructure} instances
  831.      * @return distances computed, with derivatives, between LOS and to the ground
  832.      * @see #distanceBetweenLOS(LineSensor, AbsoluteDate, double, SpacecraftToObservedBody, LineSensor, AbsoluteDate, double)
  833.      */
  834.     public <T extends Derivative<T>> T[] distanceBetweenLOSderivatives(
  835.                                  final LineSensor sensorA, final AbsoluteDate dateA, final double pixelA,
  836.                                  final SpacecraftToObservedBody scToBodyA,
  837.                                  final LineSensor sensorB, final AbsoluteDate dateB, final double pixelB,
  838.                                  final DerivativeGenerator<T> generator) {

  839.         // Compute the approximate transforms between spacecraft and observed body
  840.         // from Rugged instance A
  841.         final Transform scToInertA = scToBodyA.getScToInertial(dateA);
  842.         final Transform inertToBodyA = scToBodyA.getInertialToBody(dateA);
  843.         final Transform transformScToBodyA = new Transform(dateA, scToInertA, inertToBodyA);

  844.         // from (current) Rugged instance B
  845.         final Transform scToInertB = scToBody.getScToInertial(dateB);
  846.         final Transform inertToBodyB = scToBody.getInertialToBody(dateB);
  847.         final Transform transformScToBodyB = new Transform(dateB, scToInertB, inertToBodyB);

  848.         // Get sensors LOS into local frame
  849.         final FieldVector3D<T> vALocal = sensorA.getLOSDerivatives(dateA, pixelA, generator);
  850.         final FieldVector3D<T> vBLocal = sensorB.getLOSDerivatives(dateB, pixelB, generator);

  851.         // Get sensors LOS into body frame
  852.         final FieldVector3D<T> vA = transformScToBodyA.transformVector(vALocal); // V_a : line of sight's vectorA
  853.         final FieldVector3D<T> vB = transformScToBodyB.transformVector(vBLocal); // V_b : line of sight's vectorB

  854.         // Position of sensors into local frame
  855.         final Vector3D sAtmp = sensorA.getPosition();
  856.         final Vector3D sBtmp = sensorB.getPosition();

  857.         final T scaleFactor = FieldVector3D.dotProduct(vA.normalize(), vA.normalize()); // V_a.V_a=1

  858.         // Build a vector from the position and a scale factor (equals to 1).
  859.         // The vector built will be scaleFactor * sAtmp for example.
  860.         final FieldVector3D<T> sALocal = new FieldVector3D<>(scaleFactor, sAtmp);
  861.         final FieldVector3D<T> sBLocal = new FieldVector3D<>(scaleFactor, sBtmp);

  862.         // Get sensors position into body frame
  863.         final FieldVector3D<T> sA = transformScToBodyA.transformPosition(sALocal); // S_a : sensorA 's position
  864.         final FieldVector3D<T> sB = transformScToBodyB.transformPosition(sBLocal); // S_b : sensorB 's position

  865.         // Compute distance
  866.         final FieldVector3D<T> vBase = sB.subtract(sA);    // S_b - S_a
  867.         final T svA = FieldVector3D.dotProduct(vBase, vA); // SV_a = (S_b - S_a).V_a
  868.         final T svB = FieldVector3D.dotProduct(vBase, vB); // SV_b = (S_b - S_a).V_b

  869.         final T vAvB = FieldVector3D.dotProduct(vA, vB); // V_a.V_b

  870.         // Compute lambda_b = (SV_a * V_a.V_b - SV_b) / (1 - (V_a.V_b)²)
  871.         final T lambdaB = (svA.multiply(vAvB).subtract(svB)).divide(vAvB.multiply(vAvB).subtract(1).negate());

  872.         // Compute lambda_a = SV_a + lambdaB * V_a.V_b
  873.         final T lambdaA = vAvB.multiply(lambdaB).add(svA);

  874.         // Compute vector M_a:
  875.         final FieldVector3D<T> mA = sA.add(vA.scalarMultiply(lambdaA)); // M_a = S_a + lambda_a * V_a
  876.         // Compute vector M_b
  877.         final FieldVector3D<T> mB = sB.add(vB.scalarMultiply(lambdaB)); // M_b = S_b + lambda_b * V_b

  878.         // Compute vector M_a -> M_B for which distance between LOS is minimum
  879.         final FieldVector3D<T> vDistanceMin = mB.subtract(mA); // M_b - M_a

  880.         // Compute vector from mid point of vector M_a -> M_B to the ground (corresponds to minimum elevation)
  881.         final FieldVector3D<T> midPoint = (mB.add(mA)).scalarMultiply(0.5);

  882.         // Get the euclidean norms to compute the minimum distances:
  883.         // between LOS
  884.         final T dMin = vDistanceMin.getNorm();
  885.         // to the ground
  886.         final T dCentralBody = midPoint.getNorm();

  887.         final T[] ret = MathArrays.buildArray(dMin.getField(), 2);
  888.         ret[0] = dMin;
  889.         ret[1] = dCentralBody;
  890.         return ret;

  891.     }


  892.     /** Get the mean plane crossing finder for a sensor.
  893.      * @param sensorName name of the line sensor
  894.      * @param minLine minimum line number
  895.      * @param maxLine maximum line number
  896.      * @return mean plane crossing finder
  897.      */
  898.     private SensorMeanPlaneCrossing getPlaneCrossing(final String sensorName,
  899.                                                      final int minLine, final int maxLine) {

  900.         final LineSensor sensor = getLineSensor(sensorName);
  901.         SensorMeanPlaneCrossing planeCrossing = finders.get(sensorName);
  902.         if (planeCrossing == null ||
  903.             planeCrossing.getMinLine() != minLine ||
  904.             planeCrossing.getMaxLine() != maxLine) {

  905.             // create a new finder for the specified sensor and range
  906.             planeCrossing = new SensorMeanPlaneCrossing(sensor, scToBody, minLine, maxLine,
  907.                                                         lightTimeCorrection, aberrationOfLightCorrection,
  908.                                                         MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);

  909.             // store the finder, in order to reuse it
  910.             // (and save some computation done in its constructor)
  911.             setPlaneCrossing(planeCrossing);

  912.         }

  913.         return planeCrossing;
  914.     }

  915.     /** Set the mean plane crossing finder for a sensor.
  916.      * @param planeCrossing plane crossing finder
  917.      */
  918.     private void setPlaneCrossing(final SensorMeanPlaneCrossing planeCrossing) {
  919.         finders.put(planeCrossing.getSensor().getName(), planeCrossing);
  920.     }

  921.     /** Inverse location of a point with derivatives.
  922.      * @param <T> derivative type
  923.      * @param sensorName name of the line sensor
  924.      * @param point point to localize
  925.      * @param minLine minimum line number
  926.      * @param maxLine maximum line number
  927.      * @param generator generator to use for building {@link Derivative} instances
  928.      * @return sensor pixel seeing point with derivatives, or null if point cannot be seen between the
  929.      * prescribed line numbers
  930.      * @see #inverseLocation(String, GeodeticPoint, int, int)
  931.      * @since 2.0
  932.      */
  933.     public <T extends Derivative<T>> T[] inverseLocationDerivatives(final String sensorName,
  934.                                                                     final GeodeticPoint point,
  935.                                                                     final int minLine,
  936.                                                                     final int maxLine,
  937.                                                                     final DerivativeGenerator<T> generator) {

  938.         final LineSensor sensor = getLineSensor(sensorName);

  939.         final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);

  940.         // find approximately the sensor line at which ground point crosses sensor mean plane
  941.         final Vector3D   target = ellipsoid.transform(point);
  942.         final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
  943.         if (crossingResult == null) {
  944.             // target is out of search interval
  945.             return null;
  946.         }

  947.         // find approximately the pixel along this sensor line
  948.         final SensorPixelCrossing pixelCrossing =
  949.                 new SensorPixelCrossing(sensor, planeCrossing.getMeanPlaneNormal(),
  950.                                         crossingResult.getTargetDirection(),
  951.                                         MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
  952.         final double coarsePixel = pixelCrossing.locatePixel(crossingResult.getDate());
  953.         if (Double.isNaN(coarsePixel)) {
  954.             // target is out of search interval
  955.             return null;
  956.         }

  957.         // fix line by considering the closest pixel exact position and line-of-sight
  958.         // (this pixel might point towards a direction slightly above or below the mean sensor plane)
  959.         final int lowIndex = FastMath.max(0, FastMath.min(sensor.getNbPixels() - 2, (int) FastMath.floor(coarsePixel)));
  960.         final FieldVector3D<T> lowLOS =
  961.                         sensor.getLOSDerivatives(crossingResult.getDate(), lowIndex, generator);
  962.         final FieldVector3D<T> highLOS = sensor.getLOSDerivatives(crossingResult.getDate(), lowIndex + 1, generator);
  963.         final FieldVector3D<T> localZ = FieldVector3D.crossProduct(lowLOS, highLOS).normalize();
  964.         final T beta         = FieldVector3D.dotProduct(crossingResult.getTargetDirection(), localZ).acos();
  965.         final T s            = FieldVector3D.dotProduct(crossingResult.getTargetDirectionDerivative(), localZ);
  966.         final T minusBetaDer = s.divide(s.multiply(s).subtract(1).negate().sqrt());
  967.         final T deltaL       = beta.subtract(0.5 * FastMath.PI) .divide(minusBetaDer);
  968.         final T fixedLine    = deltaL.add(crossingResult.getLine());
  969.         final FieldVector3D<T> fixedDirection =
  970.                         new FieldVector3D<>(deltaL.getField().getOne(), crossingResult.getTargetDirection(),
  971.                                             deltaL, crossingResult.getTargetDirectionDerivative()).normalize();

  972.         // fix neighbouring pixels
  973.         final AbsoluteDate fixedDate  = sensor.getDate(fixedLine.getValue());
  974.         final FieldVector3D<T> fixedX = sensor.getLOSDerivatives(fixedDate, lowIndex, generator);
  975.         final FieldVector3D<T> fixedZ = FieldVector3D.crossProduct(fixedX, sensor.getLOSDerivatives(fixedDate, lowIndex + 1, generator));
  976.         final FieldVector3D<T> fixedY = FieldVector3D.crossProduct(fixedZ, fixedX);

  977.         // fix pixel
  978.         final T hY         = FieldVector3D.dotProduct(highLOS, fixedY);
  979.         final T hX         = FieldVector3D.dotProduct(highLOS, fixedX);
  980.         final T pixelWidth = hY.atan2(hX);
  981.         final T fY         = FieldVector3D.dotProduct(fixedDirection, fixedY);
  982.         final T fX         = FieldVector3D.dotProduct(fixedDirection, fixedX);
  983.         final T alpha      = fY.atan2(fX);
  984.         final T fixedPixel = alpha.divide(pixelWidth).add(lowIndex);

  985.         final T[] ret = MathArrays.buildArray(fixedPixel.getField(), 2);
  986.         ret[0] = fixedLine;
  987.         ret[1] = fixedPixel;
  988.         return ret;

  989.     }

  990.     /** Get transform from spacecraft to inertial frame.
  991.      * @param date date of the transform
  992.      * @return transform from spacecraft to inertial frame
  993.      */
  994.     public Transform getScToInertial(final AbsoluteDate date) {
  995.         return scToBody.getScToInertial(date);
  996.     }

  997.     /** Get transform from inertial frame to observed body frame.
  998.      * @param date date of the transform
  999.      * @return transform from inertial frame to observed body frame
  1000.      */
  1001.     public Transform getInertialToBody(final AbsoluteDate date) {
  1002.         return scToBody.getInertialToBody(date);
  1003.     }

  1004.     /** Get transform from observed body frame to inertial frame.
  1005.      * @param date date of the transform
  1006.      * @return transform from observed body frame to inertial frame
  1007.      */
  1008.     public Transform getBodyToInertial(final AbsoluteDate date) {
  1009.         return scToBody.getBodyToInertial(date);
  1010.     }

  1011.     /** Get a sensor.
  1012.      * @param sensorName sensor name
  1013.      * @return selected sensor
  1014.      */
  1015.     public LineSensor getLineSensor(final String sensorName) {

  1016.         final LineSensor sensor = sensors.get(sensorName);
  1017.         if (sensor == null) {
  1018.             throw new RuggedException(RuggedMessages.UNKNOWN_SENSOR, sensorName);
  1019.         }
  1020.         return sensor;
  1021.     }

  1022.     /** Get converter between spacecraft and body.
  1023.      * @return the scToBody
  1024.      * @since 2.0
  1025.      */
  1026.     public SpacecraftToObservedBody getScToBody() {
  1027.         return scToBody;
  1028.     }
  1029. }