stringTimeSeries, Timestamp startTime, Timestamp endTime) {
-
- }
-
/**
* GPX is an XML file that follows the schema found here http://www.topografix.com/GPX/1/1/
*
@@ -2688,7 +2694,7 @@ public void runLOCICalculations(Connection connection) throws MalformedFlightFil
}
CalculatedDoubleTimeSeries vspdCalculated = new CalculatedDoubleTimeSeries(connection, VSPD_CALCULATED, "ft/min", true, this);
- vspdCalculated.create(new VSPDRegression(connection, this));
+ vspdCalculated.create(new VSPDRegression(getDoubleTimeSeries(ALT_B)));
CalculatedDoubleTimeSeries densityRatio = new CalculatedDoubleTimeSeries(connection, DENSITY_RATIO, "ratio", false, this);
densityRatio.create(index -> {
@@ -3327,6 +3333,119 @@ public void printValues(String[] requestedHeaders) {
System.out.println();
}
+ public static void batchUpdateDatabase(Connection connection, Upload upload, Iterable flights) {
+ int fleetId = upload.getFleetId();
+ int uploaderId = upload.getUploaderId();
+ int uploadId = upload.getId();
+
+ try {
+ PreparedStatement preparedStatement = createPreparedStatement(connection);
+ for (Flight flight : flights) {
+ // This is fine because this stuff is mostly cached
+ flight.airframeNameId = Airframes.getNameId(connection, flight.airframeName);
+ flight.airframeTypeId = Airframes.getTypeId(connection, flight.airframeType);
+ Airframes.setAirframeFleet(connection, flight.airframeNameId, fleetId);
+
+ Tails.setSuggestedTail(connection, fleetId, flight.systemId, flight.suggestedTailNumber);
+ flight.tailNumber = Tails.getTail(connection, fleetId, flight.systemId);
+ flight.tailConfirmed = Tails.getConfirmed(connection, fleetId, flight.systemId);
+ flight.fleetId = fleetId;
+ flight.uploaderId = uploaderId;
+ flight.uploadId = uploadId;
+ flight.addBatch(preparedStatement);
+ }
+
+ int[] _results = preparedStatement.executeBatch();
+ ResultSet results = preparedStatement.getGeneratedKeys();
+ int count = 0;
+
+
+ for (Flight flight : flights) {
+ if (!results.next()) {
+ LOG.severe("ERROR: insertion of flight to the database did not result in an id. This should never happen.");
+ System.exit(1);
+ }
+ flight.id = results.getInt(1);
+ }
+
+ preparedStatement.close();
+
+ PreparedStatement doubleTSPreparedStatement = DoubleTimeSeries.createPreparedStatement(connection);
+
+ for (Flight flight : flights)
+ for (var doubleTS : flight.doubleTimeSeries.values())
+ doubleTS.addBatch(connection, doubleTSPreparedStatement, flight.id);
+
+ doubleTSPreparedStatement.executeBatch();
+ doubleTSPreparedStatement.close();
+
+ PreparedStatement stringTSPreparedStatement = StringTimeSeries.createPreparedStatement(connection);
+
+ for (Flight flight : flights)
+ for (var stringTS : flight.stringTimeSeries.values())
+ stringTS.addBatch(connection, stringTSPreparedStatement, flight.id);
+
+ stringTSPreparedStatement.executeBatch();
+ stringTSPreparedStatement.close();
+
+ PreparedStatement itineraryPreparedStatement = Itinerary.createPreparedStatement(connection);
+ PreparedStatement airportPreparedStatement = Itinerary.createAirportPreparedStatement(connection);
+ PreparedStatement runwayPreparedStatement = Itinerary.createRunwayPreparedStatement(connection);
+
+ for (Flight flight : flights) {
+ for (int i = 0; i < flight.itinerary.size(); i++)
+ flight.itinerary.get(i).addBatch(itineraryPreparedStatement, airportPreparedStatement, runwayPreparedStatement, fleetId, flight.id, i);
+ }
+
+ itineraryPreparedStatement.executeBatch();
+ itineraryPreparedStatement.close();
+ airportPreparedStatement.executeBatch();
+ airportPreparedStatement.close();
+ runwayPreparedStatement.executeBatch();
+ runwayPreparedStatement.close();
+
+ PreparedStatement warningPreparedStatement = FlightWarning.createPreparedStatement(connection);
+
+ for (Flight flight : flights)
+ for (var e : flight.exceptions)
+ new FlightWarning(e.getMessage()).addBatch(connection, preparedStatement, flight.id);
+
+ warningPreparedStatement.executeBatch();
+ warningPreparedStatement.close();
+
+ } catch (SQLException | IOException e) {
+ LOG.severe("Encountered the following exception while inserting batch of flights: \n" + e.getMessage());
+ e.printStackTrace();
+ System.exit(1);
+ }
+ }
+
+ private static PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
+ return connection.prepareStatement("INSERT INTO flights (fleet_id, uploader_id, upload_id, airframe_id, airframe_type_id, system_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status, start_timestamp, end_timestamp) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, UNIX_TIMESTAMP(?), UNIX_TIMESTAMP(?))", Statement.RETURN_GENERATED_KEYS);
+ }
+
+ private void addBatch(PreparedStatement preparedStatement) throws SQLException {
+ preparedStatement.setInt(1, fleetId);
+ preparedStatement.setInt(2, uploaderId);
+ preparedStatement.setInt(3, uploadId);
+ preparedStatement.setInt(4, airframeNameId);
+ preparedStatement.setInt(5, airframeTypeId);
+ preparedStatement.setString(6, systemId);
+ preparedStatement.setString(7, startDateTime);
+ preparedStatement.setString(8, endDateTime);
+ preparedStatement.setString(9, filename);
+ preparedStatement.setString(10, md5Hash);
+ preparedStatement.setInt(11, numberRows);
+ preparedStatement.setString(12, status);
+ preparedStatement.setBoolean(13, hasCoords);
+ preparedStatement.setBoolean(14, hasAGL);
+ preparedStatement.setBoolean(15, false); //insert not yet completed
+ preparedStatement.setLong(16, processingStatus);
+ preparedStatement.setString(17, startDateTime);
+ preparedStatement.setString(18, endDateTime);
+ preparedStatement.addBatch();
+ }
+
public void updateDatabase(Connection connection, int uploadId, int uploaderId, int fleetId) {
this.fleetId = fleetId;
this.uploaderId = uploaderId;
@@ -3343,55 +3462,29 @@ public void updateDatabase(Connection connection, int uploadId, int uploaderId,
tailNumber = Tails.getTail(connection, fleetId, systemId);
tailConfirmed = Tails.getConfirmed(connection, fleetId, systemId);
- PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO flights (fleet_id, uploader_id, upload_id, airframe_id, airframe_type_id, system_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status, start_timestamp, end_timestamp) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, UNIX_TIMESTAMP(?), UNIX_TIMESTAMP(?))", Statement.RETURN_GENERATED_KEYS);
- preparedStatement.setInt(1, fleetId);
- preparedStatement.setInt(2, uploaderId);
- preparedStatement.setInt(3, uploadId);
- preparedStatement.setInt(4, airframeNameId);
- preparedStatement.setInt(5, airframeTypeId);
- preparedStatement.setString(6, systemId);
- preparedStatement.setString(7, startDateTime);
- preparedStatement.setString(8, endDateTime);
- preparedStatement.setString(9, filename);
- preparedStatement.setString(10, md5Hash);
- preparedStatement.setInt(11, numberRows);
- preparedStatement.setString(12, status);
- preparedStatement.setBoolean(13, hasCoords);
- preparedStatement.setBoolean(14, hasAGL);
- preparedStatement.setBoolean(15, false); //insert not yet completed
- preparedStatement.setLong(16, processingStatus);
- preparedStatement.setString(17, startDateTime);
- preparedStatement.setString(18, endDateTime);
-
- System.out.println(preparedStatement);
- preparedStatement.executeUpdate();
+ PreparedStatement preparedStatement = createPreparedStatement(connection);
+
+ this.addBatch(preparedStatement);
+
+ LOG.info(preparedStatement.toString());
+ preparedStatement.executeBatch();
ResultSet resultSet = preparedStatement.getGeneratedKeys();
if (resultSet.next()) {
int flightId = resultSet.getInt(1);
this.id = flightId;
- // Comment this out unless debugging
- //for (String key : doubleTimeSeries.keySet()) {
- //System.out.println("double time series key: '" + key);
- //System.out.println("\tis " + doubleTimeSeries.get(key).toString());
- //}
-
- for (DoubleTimeSeries series : doubleTimeSeries.values()) {
+ for (DoubleTimeSeries series : doubleTimeSeries.values())
series.updateDatabase(connection, flightId);
- }
- for (StringTimeSeries series : stringTimeSeries.values()) {
+ for (StringTimeSeries series : stringTimeSeries.values())
series.updateDatabase(connection, flightId);
- }
- for (Exception exception : exceptions) {
+ for (Exception exception : exceptions)
FlightWarning.insertWarning(connection, flightId, exception.getMessage());
- }
- for (int i = 0; i < itinerary.size(); i++) {
+ for (int i = 0; i < itinerary.size(); i++)
itinerary.get(i).updateDatabase(connection, fleetId, flightId, i);
- }
PreparedStatement ps = connection.prepareStatement("UPDATE flights SET insert_completed = 1 WHERE id = ?");
ps.setInt(1, this.id);
@@ -3399,7 +3492,7 @@ public void updateDatabase(Connection connection, int uploadId, int uploaderId,
ps.close();
} else {
- System.err.println("ERROR: insertion of flight to the database did not result in an id. This should never happen.");
+ LOG.severe("ERROR: insertion of flight to the database did not result in an id. This should never happen.");
System.exit(1);
}
@@ -3427,10 +3520,9 @@ public void writeToFile(Connection connection, String filename) throws IOExcepti
for (int i = 0; i < series.size(); i++) {
String name = series.get(i).getName();
if (name.equals("AirportDistance") || name.equals("RunwayDistance") || series.get(i).getMin() == series.get(i).getMax()) {
- System.out.println("Skipping column: '" + name + "'");
+ LOG.warning("Skipping column: '" + name + "'");
continue;
}
- System.out.println("'" + name + "' min - max: " + (series.get(i).getMin() - series.get(i).getMax()));
if (afterFirst) printWriter.print(",");
printWriter.print(series.get(i).getName());
@@ -3480,4 +3572,12 @@ public void setAirframeType(String type) {
public void setAirframeTypeID(Integer typeID) {
this.airframeTypeId = typeID;
}
+
+ public void setHasCoords(boolean hasCoords) {
+ this.hasCoords = hasCoords;
+ }
+
+ public void setHasAGL(boolean hasAGL) {
+ this.hasAGL = hasAGL;
+ }
}
diff --git a/src/main/java/org/ngafid/flights/FlightError.java b/src/main/java/org/ngafid/flights/FlightError.java
index dc9eda052..027f851d1 100644
--- a/src/main/java/org/ngafid/flights/FlightError.java
+++ b/src/main/java/org/ngafid/flights/FlightError.java
@@ -25,8 +25,6 @@ public static void insertError(Connection connection, int uploadId, String filen
exceptionPreparedStatement.setString(2, filename);
exceptionPreparedStatement.setInt(3, ErrorMessage.getMessageId(connection, message));
- LOG.info(exceptionPreparedStatement.toString());
-
exceptionPreparedStatement.executeUpdate();
exceptionPreparedStatement.close();
}
diff --git a/src/main/java/org/ngafid/flights/FlightWarning.java b/src/main/java/org/ngafid/flights/FlightWarning.java
index 3c6536704..9908ac0e2 100644
--- a/src/main/java/org/ngafid/flights/FlightWarning.java
+++ b/src/main/java/org/ngafid/flights/FlightWarning.java
@@ -18,15 +18,22 @@ public class FlightWarning {
private String filename;
private String message;
private String stackTrace;
+
+ public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
+ return connection.prepareStatement("INSERT INTO flight_warnings (flight_id, message_id) VALUES (?, ?)");
+ }
+ public void addBatch(Connection connection, PreparedStatement preparedStatement, int flightId) throws SQLException {
+ preparedStatement.setInt(1, flightId);
+ preparedStatement.setInt(2, ErrorMessage.getMessageId(connection, message));
+ }
+
public static void insertWarning(Connection connection, int flightId, String message) throws SQLException {
- PreparedStatement exceptionPreparedStatement = connection.prepareStatement("INSERT INTO flight_warnings (flight_id, message_id) VALUES (?, ?)");
- exceptionPreparedStatement.setInt(1, flightId);
- exceptionPreparedStatement.setInt(2, ErrorMessage.getMessageId(connection, message));
-
- LOG.info(exceptionPreparedStatement.toString());
+ PreparedStatement exceptionPreparedStatement = createPreparedStatement(connection);
- exceptionPreparedStatement.executeUpdate();
+ new FlightWarning(message).addBatch(connection, exceptionPreparedStatement, flightId);
+
+ exceptionPreparedStatement.executeBatch();
exceptionPreparedStatement.close();
}
@@ -68,6 +75,10 @@ public static int getCount(Connection connection, int fleetId) throws SQLExcepti
return count ;
}
+ public FlightWarning(String message) {
+ this.message = message;
+ }
+
public FlightWarning(Connection connection, ResultSet resultSet) throws SQLException {
filename = resultSet.getString(1);
uploadId = resultSet.getInt(2);
diff --git a/src/main/java/org/ngafid/flights/Itinerary.java b/src/main/java/org/ngafid/flights/Itinerary.java
index 800e0c298..7f9357c40 100644
--- a/src/main/java/org/ngafid/flights/Itinerary.java
+++ b/src/main/java/org/ngafid/flights/Itinerary.java
@@ -320,22 +320,15 @@ public void update(String runway, int index, double altitudeAGL, double airportD
public void selectBestRunway() {
runway = null;
int maxCount = 0;
- System.err.println("Selecting runway:");
- System.err.println("min airport distance: " + minAirportDistance);
- System.err.println("min runway distance: " + minRunwayDistance);
- System.err.println("min altitude agl: " + minAltitude);
-
+
for (String key : runwayCounts.keySet()) {
int count = runwayCounts.get(key);
- System.err.println("\trunway: " + key + ", count: " + count);
if (count > maxCount) {
runway = key;
maxCount = count;
}
}
-
- System.err.println("selected runway '" + runway + "' with count: " + maxCount);
}
public boolean wasApproach() {
@@ -358,45 +351,59 @@ public boolean wasApproach() {
}
}
- public void updateDatabase(Connection connection, int fleetId, int flightId, int order) throws SQLException {
+ public static PreparedStatement createAirportPreparedStatement(Connection connection) throws SQLException {
+ return connection.prepareStatement("INSERT IGNORE INTO visited_airports SET fleet_id = ?, airport = ?");
+ }
+
+ public static PreparedStatement createRunwayPreparedStatement(Connection connection) throws SQLException {
+ return connection.prepareStatement("INSERT IGNORE INTO visited_runways SET fleet_id = ?, runway = ?");
+ }
+
+ public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
+ return connection.prepareStatement("INSERT INTO itinerary (flight_id, `order`, min_altitude_index, min_altitude, min_airport_distance, min_runway_distance, airport, runway, start_of_approach, end_of_approach, start_of_takeoff, end_of_takeoff, type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
+ }
+
+ public void addBatch(PreparedStatement itineraryStatement, PreparedStatement airportStatement, PreparedStatement runwayStatement, int fleetId, int flightId, int order) throws SQLException {
this.order = order;
+
+ airportStatement.setInt(1, fleetId);
+ airportStatement.setString(2, airport);
+ airportStatement.addBatch();
+
+ runwayStatement.setInt(1, fleetId);
+ runwayStatement.setString(2, airport + " - " + runway);
+ runwayStatement.addBatch();
+
+ itineraryStatement.setInt(1, flightId);
+ itineraryStatement.setInt(2, order);
+ itineraryStatement.setInt(3, minAltitudeIndex);
+ itineraryStatement.setDouble(4, minAltitude);
+ itineraryStatement.setDouble(5, minAirportDistance);
+ itineraryStatement.setDouble(6, minRunwayDistance);
+ itineraryStatement.setString(7, airport);
+ itineraryStatement.setString(8, runway);
+ itineraryStatement.setInt(9, startOfApproach);
+ itineraryStatement.setInt(10, endOfApproach);
+ itineraryStatement.setInt(11, startOfTakeoff);
+ itineraryStatement.setInt(12, endOfTakeoff);
+ itineraryStatement.setString(13, type);
+ itineraryStatement.addBatch();
+ }
+ public void updateDatabase(Connection connection, int fleetId, int flightId, int order) throws SQLException {
//insert new visited airports and runways -- will ignore if it already exists
- PreparedStatement preparedStatement = connection.prepareStatement("INSERT IGNORE INTO visited_airports SET fleet_id = ?, airport = ?");
- preparedStatement.setInt(1, fleetId);
- preparedStatement.setString(2, airport);
-
- System.err.println(preparedStatement);
- preparedStatement.executeUpdate();
- preparedStatement.close();
-
- preparedStatement = connection.prepareStatement("INSERT IGNORE INTO visited_runways SET fleet_id = ?, runway = ?");
- preparedStatement.setInt(1, fleetId);
- preparedStatement.setString(2, airport + " - " + runway);
-
- System.err.println(preparedStatement);
- preparedStatement.executeUpdate();
- preparedStatement.close();
-
- //now insert the itinerary
- preparedStatement = connection.prepareStatement("INSERT INTO itinerary (flight_id, `order`, min_altitude_index, min_altitude, min_airport_distance, min_runway_distance, airport, runway, start_of_approach, end_of_approach, start_of_takeoff, end_of_takeoff, type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
- preparedStatement.setInt(1, flightId);
- preparedStatement.setInt(2, order);
- preparedStatement.setInt(3, minAltitudeIndex);
- preparedStatement.setDouble(4, minAltitude);
- preparedStatement.setDouble(5, minAirportDistance);
- preparedStatement.setDouble(6, minRunwayDistance);
- preparedStatement.setString(7, airport);
- preparedStatement.setString(8, runway);
- preparedStatement.setInt(9, startOfApproach);
- preparedStatement.setInt(10, endOfApproach);
- preparedStatement.setInt(11, startOfTakeoff);
- preparedStatement.setInt(12, endOfTakeoff);
- preparedStatement.setString(13, type);
-
- System.err.println(preparedStatement);
- preparedStatement.executeUpdate();
- preparedStatement.close();
+ PreparedStatement statement = createPreparedStatement(connection);
+ PreparedStatement airportStatement = createAirportPreparedStatement(connection);
+ PreparedStatement runwayStatement = createRunwayPreparedStatement(connection);
+
+ this.addBatch(statement, airportStatement, runwayStatement, fleetId, flightId, order);
+
+ statement.executeBatch();
+ statement.close();
+ airportStatement.executeBatch();
+ airportStatement.close();
+ runwayStatement.executeBatch();
+ runwayStatement.close();
}
public String toString() { // TODO: add new columns to toString?
diff --git a/src/main/java/org/ngafid/flights/NIFA.java b/src/main/java/org/ngafid/flights/NIFA.java
index c37c9a57d..a7091090e 100644
--- a/src/main/java/org/ngafid/flights/NIFA.java
+++ b/src/main/java/org/ngafid/flights/NIFA.java
@@ -18,7 +18,7 @@
import java.util.logging.Logger;
import java.util.stream.Stream;
-import static org.ngafid.flights.calculations.Parameters.*;
+import static org.ngafid.flights.Parameters.*;
public class NIFA implements Serializable {
diff --git a/src/main/java/org/ngafid/flights/calculations/Parameters.java b/src/main/java/org/ngafid/flights/Parameters.java
similarity index 86%
rename from src/main/java/org/ngafid/flights/calculations/Parameters.java
rename to src/main/java/org/ngafid/flights/Parameters.java
index 1a6f3b417..b97e3819b 100644
--- a/src/main/java/org/ngafid/flights/calculations/Parameters.java
+++ b/src/main/java/org/ngafid/flights/Parameters.java
@@ -8,7 +8,9 @@
* @author Josh Karns
* @author Aidan LaBella
*/
-package org.ngafid.flights.calculations;
+package org.ngafid.flights;
+
+import java.util.Set;
public interface Parameters {
/**
@@ -24,7 +26,7 @@ public interface Parameters {
public static final String PARAM_JSON_LONGITUDE = "lon";
public static final double STD_PRESS_INHG = 29.92;
- public static final double COMP_CONV = (double) (Math.PI / 180);
+ public static final double COMP_CONV = Math.PI / 180.0;
/**
* Critical Values
@@ -65,6 +67,7 @@ public interface Parameters {
public static final String ROLL = "Roll";
public static final String ALT_AGL = "AltAGL";
public static final String ALT_MSL = "AltMSL";
+ public static final String ALT_MSL_LAG_DIFF = "AltMSL Lag Diff";
public static final String ALT_B = "AltB";
public static final String AOA_SIMPLE = "AOASimple";
public static final String E1_RPM = "E1 RPM";
@@ -82,11 +85,27 @@ public interface Parameters {
public static final String TOTAL_FUEL = "Total Fuel";
public static final String LCL_DATE = "Lcl Date";
public static final String LCL_TIME = "Lcl Time";
+ public static final String UTC_OFFSET = "UTCOfst";
public static final String LATITUDE = "Latitude";
public static final String LONGITUDE = "Longitude";
public static final String STALL_PROBABILITY = "PStall";
public static final String LOSS_OF_CONTROL_PROBABILITY = "PLOCI";
public static final String HDG_TRK_DIFF = "HDG TRK Diff";
+ public static final String FUEL_QTY_LEFT = "FQtyL";
+ public static final String FUEL_QTY_RIGHT = "FQtyR";
+
+ public static final String NEAREST_RUNWAY = "NearestRunway";
+ public static final String RUNWAY_DISTANCE = "RunwayDistance";
+ public static final String NEAREST_AIRPORT = "NearestAirport";
+ public static final String AIRPORT_DISTANCE = "AirportDistance";
+
+ /**
+ * Units
+ **/
+ public static final String UNIT_FT_AGL = "ft agl";
+ public static final String UNIT_FT_MSL = "ft msl";
+ public static final String UNIT_GALLONS = "gals";
+ public static final String UNIT_DEG_F = "deg F";
/**
* {@link Airframes} id's
@@ -104,7 +123,7 @@ public interface Parameters {
/**
* Strings that represent the parameters used in the Stall Index calculation
*/
- public static final String [] LOCI_DEPENDENCIES = {HDG, ROLL};
+ public static final String [] LOCI_DEPENDENCIES = {HDG, ROLL, TAS_FTMIN};
//
// use these for a real true airspeed (Shelbys method) /*GND_SPD, WIND_SPEED, WIND_DIRECTION};*/
public static final String [] SPIN_DEPENDENCIES = {IAS, VSPD_CALCULATED, NORM_AC, LAT_AC, ALT_AGL};
diff --git a/src/main/java/org/ngafid/flights/StringTimeSeries.java b/src/main/java/org/ngafid/flights/StringTimeSeries.java
index b5e8c301a..4ee2fe75d 100644
--- a/src/main/java/org/ngafid/flights/StringTimeSeries.java
+++ b/src/main/java/org/ngafid/flights/StringTimeSeries.java
@@ -33,42 +33,77 @@
public class StringTimeSeries {
private static final Logger LOG = Logger.getLogger(StringTimeSeries.class.getName());
private static final int COMPRESSION_LEVEL = Deflater.DEFAULT_COMPRESSION;
+ private static final int SIZE_HINT = 256;
- private int nameId;
+ private int nameId = -1;
private String name;
- private int typeId;
+ private int typeId = -1;
private String dataType;
private ArrayList timeSeries;
private int validCount;
- public StringTimeSeries(Connection connection, String name, String dataType) throws SQLException {
+ public StringTimeSeries(String name, String dataType, int sizeHint) {
this.name = name;
- this.nameId = SeriesNames.getStringNameId(connection, name);
this.dataType = dataType;
- this.typeId = TypeNames.getId(connection, dataType);
- this.timeSeries = new ArrayList();
+ this.timeSeries = new ArrayList(sizeHint);
validCount = 0;
}
+ public StringTimeSeries(String name, String dataType) {
+ this(name, dataType, SIZE_HINT);
+ }
+
+ public StringTimeSeries(Connection connection, String name, String dataType) throws SQLException {
+ this(name, dataType, SIZE_HINT);
+ setNameId(connection);
+ setTypeId(connection);
+ }
+
public StringTimeSeries(Connection connection, String name, String dataType, ArrayList timeSeries) throws SQLException {
+ this(name, dataType, timeSeries);
+ setNameId(connection);
+ setTypeId(connection);
+ }
+
+ public StringTimeSeries(String name, String dataType, ArrayList timeSeries) {
this.name = name;
- this.nameId = SeriesNames.getStringNameId(connection, name);
this.dataType = dataType;
- this.typeId = TypeNames.getId(connection, dataType);
this.timeSeries = timeSeries;
-
validCount = 0;
for (int i = 0; i < timeSeries.size(); i++) {
if (!timeSeries.get(i).equals("")) {
validCount++;
}
}
- }
+ }
+
+ // Added to get results for StringTimeSeries
+ public StringTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException, ClassNotFoundException {
+
+ this.nameId = resultSet.getInt(1);
+ this.name = SeriesNames.getStringName(connection, this.nameId);
+ //System.out.println("name: " + name);
+
+ this.typeId = resultSet.getInt(2);
+ this.dataType = TypeNames.getName(connection, this.typeId);
+ //System.out.println("data type: " + dataType);
+ int length = resultSet.getInt(3);
+ //System.out.println("length: " + length);
+ validCount = resultSet.getInt(4);
+ //System.out.println("valid count: " + validCount);
- // Added to get StringTimeSeries
+ Blob values = resultSet.getBlob(5);
+ byte[] bytes = values.getBytes(1, (int)values.length());
+ //System.out.println("values.length: " + (int)values.length());
+ values.free();
+
+ // This unchecked caste warning can be fixed but it shouldnt be necessary if we only but ArrayList objects into the StringTimeSeries cache.
+ this.timeSeries = (ArrayList) Compression.inflateObject(bytes);
+ }
+
public static StringTimeSeries getStringTimeSeries(Connection connection, int flightId, String name) throws SQLException {
PreparedStatement query = connection.prepareStatement("SELECT ss.name_id, ss.data_type_id, ss.length, ss.valid_length, ss.data FROM string_series AS ss INNER JOIN string_series_names AS ssn ON ssn.id = ss.name_id WHERE ssn.name = ? AND ss.flight_id = ?");
@@ -99,32 +134,14 @@ public static StringTimeSeries getStringTimeSeries(Connection connection, int fl
return null;
}
}
-
- // Added to get results for StringTimeSeries
- public StringTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException, ClassNotFoundException {
-
- this.nameId = resultSet.getInt(1);
- this.name = SeriesNames.getStringName(connection, this.nameId);
- //System.out.println("name: " + name);
-
- this.typeId = resultSet.getInt(2);
- this.dataType = TypeNames.getName(connection, this.typeId);
- //System.out.println("data type: " + dataType);
-
- int length = resultSet.getInt(3);
- //System.out.println("length: " + length);
- validCount = resultSet.getInt(4);
- //System.out.println("valid count: " + validCount);
-
- Blob values = resultSet.getBlob(5);
- byte[] bytes = values.getBytes(1, (int)values.length());
- //System.out.println("values.length: " + (int)values.length());
- values.free();
-
- // This unchecked caste warning can be fixed but it shouldnt be necessary if we only but ArrayList objects into the StringTimeSeries cache.
- this.timeSeries = (ArrayList) Compression.inflateObject(bytes);
+
+ private void setNameId(Connection connection) throws SQLException {
+ this.nameId = SeriesNames.getStringNameId(connection, name);
}
+ private void setTypeId(Connection connection) throws SQLException {
+ this.typeId = TypeNames.getId(connection, dataType);
+ }
public String toString() {
return "[StringTimeSeries '" + name + "' size: " + timeSeries.size() + ", validCount: " + validCount + "]";
}
@@ -193,26 +210,38 @@ public int validCount() {
return validCount;
}
- public void updateDatabase(Connection connection, int flightId) {
- //System.out.println("Updating database for " + this);
+ public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
+ return connection.prepareStatement("INSERT INTO string_series (flight_id, name_id, data_type_id, length, valid_length, data) VALUES (?, ?, ?, ?, ?, ?)");
+ }
- try {
- PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO string_series (flight_id, name_id, data_type_id, length, valid_length, data) VALUES (?, ?, ?, ?, ?, ?)");
+ public void addBatch(Connection connection, PreparedStatement preparedStatement, int flightId) throws SQLException, IOException {
+ if (nameId == -1)
+ setNameId(connection);
+ if (typeId == -1)
+ setTypeId(connection);
+
+ preparedStatement.setInt(1, flightId);
+ preparedStatement.setInt(2, nameId);
+ preparedStatement.setInt(3, typeId);
+ preparedStatement.setInt(4, timeSeries.size());
+ preparedStatement.setInt(5, validCount);
+
+ // To get rid of extra bytes at the end of the buffer
+ byte[] compressed = Compression.compressObject(this.timeSeries);
+ Blob seriesBlob = new SerialBlob(compressed);
+ preparedStatement.setBlob(6, seriesBlob);
+
+ preparedStatement.addBatch();
+ }
- preparedStatement.setInt(1, flightId);
- preparedStatement.setInt(2, nameId);
- preparedStatement.setInt(3, typeId);
- preparedStatement.setInt(4, timeSeries.size());
- preparedStatement.setInt(5, validCount);
+ public void updateDatabase(Connection connection, int flightId) {
+ try {
+ PreparedStatement preparedStatement = createPreparedStatement(connection);
- // To get rid of extra bytes at the end of the buffer
- byte[] compressed = Compression.compressObject(this.timeSeries);
- Blob seriesBlob = new SerialBlob(compressed);
+ this.addBatch(connection, preparedStatement, flightId);
- preparedStatement.setBlob(6, seriesBlob);
preparedStatement.executeUpdate();
preparedStatement.close();
- seriesBlob.free();
} catch (SQLException | IOException e) {
e.printStackTrace();
@@ -228,5 +257,14 @@ public StringTimeSeries subSeries(Connection connection, int from, int until) th
return newSeries;
}
+
+ public StringTimeSeries subSeries(int from, int until) throws SQLException {
+ StringTimeSeries newSeries = new StringTimeSeries(name, dataType);
+
+ for (int i = from; i < until; i++)
+ newSeries.add(this.timeSeries.get(i));
+
+ return newSeries;
+ }
}
diff --git a/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java b/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java
index a6eccf41a..96ddd5699 100644
--- a/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java
+++ b/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java
@@ -13,6 +13,7 @@
public class CalculatedDoubleTimeSeries extends DoubleTimeSeries {
private final Flight flight;
+ private final boolean cache;
/**
* Default Constructor
@@ -23,7 +24,14 @@ public class CalculatedDoubleTimeSeries extends DoubleTimeSeries {
* @param flight the flight instance the timeseries is being calcualted for
*/
public CalculatedDoubleTimeSeries(Connection connection, String name, String dataType, boolean cache, Flight flight) throws SQLException {
- super(connection, name, dataType, cache);
+ super(connection, name, dataType);
+ this.flight = flight;
+ this.cache = cache;
+ }
+
+ public CalculatedDoubleTimeSeries(String name, String dataType, boolean cache, Flight flight) throws SQLException {
+ super(name, dataType);
+ this.cache = cache;
this.flight = flight;
}
@@ -39,6 +47,7 @@ public void create(Calculation calculation) throws IOException, SQLException {
super.add(calculation.calculate(i));
}
- flight.addDoubleTimeSeries(super.getName(), this);
+ if (cache)
+ flight.addDoubleTimeSeries(super.getName(), this);
}
}
diff --git a/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java b/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java
index e403b9f0e..a8c25efeb 100644
--- a/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java
+++ b/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java
@@ -8,7 +8,7 @@
import java.util.List;
import java.util.logging.Logger;
-import static org.ngafid.flights.calculations.Parameters.*;
+import static org.ngafid.flights.Parameters.*;
import org.apache.commons.cli.*;
import org.ngafid.Database;
diff --git a/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java b/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java
index 4bce0456f..77140936d 100644
--- a/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java
+++ b/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java
@@ -18,7 +18,7 @@
import org.ngafid.flights.*;
-import static org.ngafid.flights.calculations.Parameters.*; //eliminates the need to use Parameters.
+import static org.ngafid.flights.Parameters.*; //eliminates the need to use Parameters.
public class TurnToFinal implements Serializable {
// NGAFIDTTF0000L
diff --git a/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java b/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java
index 11cd1672f..2d6ab1c7a 100644
--- a/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java
+++ b/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java
@@ -1,11 +1,13 @@
package org.ngafid.flights.calculations;
import org.ngafid.flights.*;
+import org.ngafid.flights.DoubleTimeSeries.TimeStepCalculation;
+
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
-import static org.ngafid.flights.calculations.Parameters.*;
+import static org.ngafid.flights.Parameters.*;
/**
* This class is an instance of a {@link Calculation} that gets a derived VSI using linear regression
@@ -13,7 +15,7 @@
* @author Aidan LaBella @ RIT CS
*/
-public class VSPDRegression implements Calculation {
+public class VSPDRegression implements TimeStepCalculation, Calculation {
private final DoubleTimeSeries altB;
private final DoubleTimeSeries altBLag;
private final DoubleTimeSeries altBLead;
@@ -25,10 +27,14 @@ public class VSPDRegression implements Calculation {
*
* @param flight the {@link Flight} to perform a regression on
*/
- public VSPDRegression(Connection connection, Flight flight) throws SQLException, IOException {
- this.altB = flight.getDoubleTimeSeries(ALT_B);
- this.altBLag = altB.lag(connection, VSI_LAG_DIFF);
- this.altBLead = altB.lead(connection, VSI_LAG_DIFF);
+ public VSPDRegression(DoubleTimeSeries altB) {
+ this.altB = altB;
+ this.altBLag = altB.lag(VSI_LAG_DIFF);
+ this.altBLead = altB.lead(VSI_LAG_DIFF);
+ }
+
+ public double compute(int index) {
+ return calculate(index);
}
/**
diff --git a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java
new file mode 100644
index 000000000..b32e17266
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java
@@ -0,0 +1,247 @@
+package org.ngafid.flights.process;
+
+import com.opencsv.CSVReader;
+import com.opencsv.exceptions.CsvException;
+import org.ngafid.flights.*;
+
+import java.sql.Connection;
+import java.io.*;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+
+/**
+ * Handles parsing of CSV files
+ *
+ * @author Aaron Chan
+ */
+
+public class CSVFileProcessor extends FlightFileProcessor {
+ private static final Logger LOG = Logger.getLogger(CSVFileProcessor.class.getName());
+ private final List headers;
+ private final List dataTypes;
+ private final FlightMeta meta = new FlightMeta();
+
+ private final Upload upload;
+
+ public CSVFileProcessor(Connection connection, InputStream stream, String filename, Upload upload) {
+ super(connection, stream, filename);
+ this.upload = upload;
+
+
+ headers = new ArrayList<>();
+ dataTypes = new ArrayList<>();
+
+ meta.airframeType = "Fixed Wing"; // Fixed Wing By default
+ meta.filename = filename;
+ }
+
+ @Override
+ public Stream parse() throws FlightProcessingException {
+ Map doubleTimeSeries = new ConcurrentHashMap<>();
+ Map stringTimeSeries = new ConcurrentHashMap<>();
+
+ List csvValues = null;
+
+ try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(super.stream, StandardCharsets.UTF_8)); CSVReader csvReader = new CSVReader(bufferedReader)) {
+ String fileInformation = getFlightInfo(bufferedReader); // Will read a line
+
+ if (meta.airframeName != null && meta.airframeName.equals("ScanEagle")) {
+ scanEagleParsing(fileInformation); // TODO: Handle ScanEagle data
+ } else {
+ processFileInormation(fileInformation);
+ bufferedReader.read(); // Skip first char (#)
+ Arrays.stream(csvReader.readNext())
+ .map(String::strip)
+ .forEachOrdered(dataTypes::add);;
+ Arrays.stream(csvReader.readNext())
+ .map(String::strip)
+ .forEachOrdered(headers::add);;
+ }
+
+ updateAirframe();
+
+ ArrayList> columns = new ArrayList<>();
+ String[] firstRow = csvReader.peek();
+ for (int i = 0; i < firstRow.length; i++)
+ columns.add(new ArrayList<>());
+
+ String[] row = null;
+ while ((row = csvReader.readNext()) != null && row.length == firstRow.length)
+ for (int i = 0; i < row.length; i++)
+ columns.get(i).add(row[i].trim());
+
+ final int granulatiry = 8;
+ IntStream.range(0, columns.size() / granulatiry)
+ .parallel()
+ .forEach(g -> {
+ var max = Math.max(g * granulatiry + granulatiry, columns.size());
+ for (int i = g * granulatiry; i < max; i++) {
+ var column = columns.get(i);
+ var name = headers.get(i);
+ var dataType = dataTypes.get(i);
+
+ try {
+ Double.parseDouble(column.get(0));
+ doubleTimeSeries.put(name, new DoubleTimeSeries(name, dataType, column));
+ } catch (NumberFormatException e) {
+ stringTimeSeries.put(name, new StringTimeSeries(name, dataType, column));
+ }
+ }
+ });
+ } catch (IOException | FatalFlightFileException | CsvException e) {
+ throw new FlightProcessingException(e);
+ }
+
+ FlightBuilder builder = new FlightBuilder(meta, doubleTimeSeries, stringTimeSeries);
+
+ return Stream.of(builder);
+ }
+
+
+ /**
+ * Updates the airframe type if airframe name does not belong to fixed wing
+ */
+ private void updateAirframe() {
+ if (meta.airframeName.equals("R44") || meta.airframeName.equals("Robinson R44")) {
+ meta.airframeName = "R44";
+ meta.airframeType = "Rotorcraft";
+ }
+ }
+
+ /**
+ * Gets the flight information from the first line of the file
+ * @param reader BufferedReader for reading the first line
+ * @return
+ * @throws FatalFlightFileException
+ * @throws IOException
+ */
+ private String getFlightInfo(BufferedReader reader) throws FatalFlightFileException, IOException {
+ String fileInformation = reader.readLine();
+
+ if (fileInformation == null || fileInformation.trim().length() == 0) {
+ throw new FatalFlightFileException("The flight file was empty.");
+ }
+
+ if (fileInformation.charAt(0) != '#' && fileInformation.charAt(0) != '{') {
+ if (fileInformation.startsWith("DID_")) {
+ LOG.info("CAME FROM A SCANEAGLE! CAN CALCULATE SUGGESTED TAIL/SYSTEM ID FROM FILENAME");
+
+ meta.airframeName = "ScanEagle";
+ meta.airframeType = "UAS Fixed Wing";
+ } else {
+ throw new FatalFlightFileException("First line of the flight file should begin with a '#' and contain flight recorder information.");
+ }
+ }
+
+ return fileInformation;
+ }
+
+ private void processFileInormation(String fileInformation) throws FatalFlightFileException {
+ String[] infoParts = fileInformation.split(",");
+ try {
+ for (int i = 1; i < infoParts.length; i++) {
+ //process everything else (G1000 data)
+ if (infoParts[i].trim().length() == 0) continue;
+
+ //System.err.println("splitting key/value: '" + infoParts[i] + "'");
+ String subParts[] = infoParts[i].trim().split("=");
+ String key = subParts[0];
+ String value = subParts[1];
+
+ //System.err.println("key: '" + key + "'");
+ //System.err.println("value: '" + value + "'");
+
+ // TODO: Create some sort of automatic mapping for synonomous airframe names.
+ if (key.equals("airframe_name")) {
+ meta.airframeName = value.substring(1, value.length() - 1);
+
+ //throw an error for 'Unknown Aircraft'
+ if (meta.airframeName.equals("Unknown Aircraft")) {
+ throw new FatalFlightFileException("Flight airframe name was 'Unknown Aircraft', please fix and re-upload so the flight can be properly identified and processed.");
+ }
+
+
+ if (meta.airframeName.equals("Diamond DA 40")) {
+ meta.airframeName = "Diamond DA40";
+ } else if ((meta.airframeName.equals("Garmin Flight Display") || meta.airframeName.equals("Robinson R44 Raven I")) && upload.getFleetId() == 1 /*This is a hack for UND who has their airframe names set up incorrectly for their helicopters*/) {
+ meta.airframeName = "R44";
+ } else if (meta.airframeName.equals("Garmin Flight Display")) {
+ throw new FatalFlightFileException("Flight airframe name was 'Garmin Flight Display' which does not specify what airframe type the flight was, please fix and re-upload so the flight can be properly identified and processed.");
+
+ }
+
+ if (meta.airframeName.equals("Cirrus SR22 (3600 GW)")) {
+ meta.airframeName = "Cirrus SR22";
+ }
+
+ if (Airframes.FIXED_WING_AIRFRAMES.contains(meta.airframeName) || meta.airframeName.contains("Garmin")) {
+ meta.airframeType = "Fixed Wing";
+ } else if (meta.airframeName.equals("R44") || meta.airframeName.equals("Robinson R44")) {
+ meta.airframeName = "R44";
+ meta.airframeType = "Rotorcraft";
+ } else {
+ System.err.println("Could not import flight because the aircraft type was unknown for the following airframe name: '" + meta.airframeName + "'");
+ System.err.println("Please add this to the the `airframe_type` table in the database and update this method.");
+ System.exit(1);
+ }
+
+ } else if (key.equals("system_id")) {
+ meta.systemId = value.substring(1, value.length() - 1);
+ }
+ }
+ } catch (Exception e) {
+ //LOG.info("parsting flight information threw exception: " + e);
+ //e.printStackTrace();
+ throw new FatalFlightFileException("Flight information line was not properly formed with key value pairs.", e);
+ }
+ }
+
+
+ /**
+ * Parses for ScanEagle flight data
+ * @param fileInformation First line of the file
+ */
+ private void scanEagleParsing(String fileInformation) {
+
+ //need a custom method to process ScanEagle data because the column
+ //names are different and there is no header info
+ scanEagleSetTailAndID();
+ scanEagleHeaders(fileInformation);
+ }
+
+ /**
+ * Handles setting the tail number and system id for ScanEagle data
+ */
+ private void scanEagleSetTailAndID() {
+ String[] filenameParts = filename.split("_");
+ meta.startDateTime = filenameParts[0];
+ meta.endDateTime = meta.startDateTime;
+ LOG.log(Level.INFO, "start date: '{0}'", meta.startDateTime);
+ LOG.log(Level.INFO, "end date: '{0}'", meta.startDateTime);
+
+ //UND doesn't have the systemId for UAS anywhere in the filename or file (sigh)
+ meta.suggestedTailNumber = "N" + filenameParts[1] + "ND";
+ meta.systemId = meta.suggestedTailNumber;
+
+ LOG.log(Level.INFO, "suggested tail number: '{0}'", meta.suggestedTailNumber);
+ LOG.log(Level.INFO, "system id: '{0}'", meta.systemId);
+ }
+
+
+ // TODO: Figure out ScanEagle data
+ private void scanEagleHeaders(String fileInformation) {
+ String headersLine = fileInformation;
+ headers.addAll(Arrays.asList(headersLine.split("\\,", -1)));
+ headers.replaceAll(String::trim);
+ //scan eagle files have no data types, set all to ""
+ for (int i = 0; i < headers.size(); i++) {
+ dataTypes.add("none");
+ }
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/DATFileProcessor.java b/src/main/java/org/ngafid/flights/process/DATFileProcessor.java
new file mode 100644
index 000000000..29fea4dba
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/DATFileProcessor.java
@@ -0,0 +1,741 @@
+package org.ngafid.flights.process;
+
+import com.opencsv.CSVReader;
+import com.opencsv.exceptions.CsvValidationException;
+import org.ngafid.flights.*;
+
+import java.io.*;
+import java.net.URI;
+import java.nio.file.*;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.stream.Stream;
+import java.util.zip.ZipFile;
+
+import static org.ngafid.common.TimeUtils.addMilliseconds;
+
+import Files.*;
+
+
+import org.ngafid.flights.FatalFlightFileException;
+import org.ngafid.flights.Flight;
+import org.ngafid.flights.FlightAlreadyExistsException;
+import org.ngafid.flights.MalformedFlightFileException;
+
+/**
+ * Parses DAT files from DJI flights after converting them to CSV
+ *
+ * @author Aaron Chan
+ */
+
+public class DATFileProcessor extends FlightFileProcessor {
+ private static final Logger LOG = Logger.getLogger(DATFileProcessor.class.getName());
+
+ private static final Set STRING_COLS = new HashSet<>(List.of(new String[]{"flyCState", "flycCommand", "flightAction",
+ "nonGPSCause", "connectedToRC", "Battery:lowVoltage", "RC:ModeSwitch", "gpsUsed", "visionUsed", "IMUEX(0):err"}));
+
+ private final ZipFile zipFile;
+
+ public DATFileProcessor(Connection connection, InputStream stream, String filename, ZipFile file) {
+ super(connection, stream, filename);
+ this.zipFile = file;
+ }
+
+ @Override
+ public Stream parse() throws FlightProcessingException {
+ try {
+ convertAndInsert();
+ List inputStreams = duplicateInputStream(stream, 2);
+ Map indexedCols = new HashMap<>();
+ Map doubleTimeSeriesMap = new HashMap<>();
+ Map stringTimeSeriesMap = new HashMap<>();
+ Map attributeMap = getAttributeMap(inputStreams.remove(inputStreams.size() - 1));
+
+ if (!attributeMap.containsKey("mcID(SN)")) {
+ throw new FlightProcessingException(new FatalFlightFileException("No DJI serial number provided in binary."));
+ }
+
+ try (CSVReader reader = new CSVReader(new BufferedReader(new InputStreamReader(inputStreams.remove(inputStreams.size() - 1))))) {
+ processCols(reader.readNext(), indexedCols, doubleTimeSeriesMap, stringTimeSeriesMap);
+
+ readData(reader, doubleTimeSeriesMap, stringTimeSeriesMap, indexedCols);
+ calculateLatLonGPS(doubleTimeSeriesMap);
+
+ if (attributeMap.containsKey("dateTime")) {
+ calculateDateTime(doubleTimeSeriesMap, stringTimeSeriesMap, attributeMap.get("dateTime"));
+ String dateTimeStr = findStartDateTime(doubleTimeSeriesMap);
+
+ if (dateTimeStr != null) {
+ calculateDateTime(doubleTimeSeriesMap, stringTimeSeriesMap, dateTimeStr);
+ }
+ }
+ } catch (CsvValidationException | FatalFlightFileException | IOException e) {
+ throw new FlightProcessingException(e);
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+
+ dropBlankCols(doubleTimeSeriesMap, stringTimeSeriesMap);
+ doubleTimeSeriesMap.put("AltAGL", new DoubleTimeSeries("AltAGL", "ft")); // TODO: Should this be done in proc?
+
+ FlightMeta meta = new FlightMeta();
+ meta.setFilename(filename);
+ meta.setAirframeType("UAS Rotorcraft");
+ meta.setAirframeName("DJI " + attributeMap.get("ACType"));
+ meta.setSystemId(attributeMap.get("mcID(SN)"));
+
+
+ return Stream.of(new FlightBuilder[]{new FlightBuilder(meta, doubleTimeSeriesMap, stringTimeSeriesMap)});
+ } catch (NotDatFile | FileEnd | IOException e) {
+ throw new FlightProcessingException(e);
+ }
+ }
+
+ // TODO: Validate the conversion works still. Also maybe figure out another way of doing this since var args forced into FFP
+
+ /**
+ * Converts the DAT file to CSV and inserts it into the zip file
+ * @throws NotDatFile
+ * @throws IOException
+ * @throws FileEnd
+ */
+ private void convertAndInsert() throws NotDatFile, IOException, FileEnd {
+ String zipName = filename.substring(filename.lastIndexOf("/"));
+ String parentFolder = zipFile.getName().substring(0, zipFile.getName().lastIndexOf("/"));
+ File tempExtractedFile = new File(parentFolder, zipName);
+
+ System.out.println("Extracting to " + tempExtractedFile.getAbsolutePath());
+ try (InputStream inputStream = zipFile.getInputStream(zipFile.getEntry(filename)); FileOutputStream fileOutputStream = new FileOutputStream(tempExtractedFile)) {
+ int len;
+ byte[] buffer = new byte[1024];
+
+ while ((len = inputStream.read(buffer)) > 0) {
+ fileOutputStream.write(buffer, 0, len);
+ }
+ }
+
+ convertDATFile(tempExtractedFile);
+ File processedCSVFile = new File(tempExtractedFile.getAbsolutePath() + ".csv");
+ placeInZip(processedCSVFile.getAbsolutePath(), zipFile.getName().substring(zipFile.getName().lastIndexOf("/") + 1));
+ }
+
+ /**
+ * Places a file into the given zip file
+ * @param file - File to place
+ * @param zipFileName - Name of the zip file
+ * @throws IOException
+ */
+ private static void placeInZip(String file, String zipFileName) throws IOException {
+ LOG.info("Placing " + file + " in zip");
+
+ Map zipENV = new HashMap<>();
+ zipENV.put("create", "true");
+
+ Path csvFilePath = Paths.get(file);
+ Path zipFilePath = Paths.get(csvFilePath.getParent() + "/" + zipFileName);
+
+ URI zipURI = URI.create("jar:" + zipFilePath.toUri());
+ try (FileSystem fileSystem = FileSystems.newFileSystem(zipURI, zipENV)) {
+ Path zipFileSystemPath = fileSystem.getPath(file.substring(file.lastIndexOf("/") + 1));
+ Files.write(zipFileSystemPath, Files.readAllBytes(csvFilePath), StandardOpenOption.CREATE);
+ }
+ }
+
+ /**
+ * Converts the DAT file to CSV
+ * @param file - File to convert
+ * @return - CSV converted file
+ * @throws NotDatFile
+ * @throws IOException
+ * @throws FileEnd
+ */
+ private static File convertDATFile(File file) throws NotDatFile, IOException, FileEnd {
+ LOG.info("Converting to CSV: " + file.getAbsolutePath());
+ DatFile datFile = DatFile.createDatFile(file.getAbsolutePath());
+ datFile.reset();
+ datFile.preAnalyze();
+
+ ConvertDat convertDat = datFile.createConVertDat();
+
+ String csvFilename = file.getAbsolutePath() + ".csv";
+ convertDat.csvWriter = new CsvWriter(csvFilename);
+ convertDat.createRecordParsers();
+
+ datFile.reset();
+ AnalyzeDatResults results = convertDat.analyze(false);
+ LOG.info(datFile.getFile().getAbsolutePath());
+
+ return datFile.getFile();
+ }
+
+ /**
+ * Reads the data from the converted CSV file
+ * @param reader - CSV reader
+ * @param doubleTimeSeriesMap - Map of double time series data
+ * @param stringTimeSeriesMap - Map of string time series data
+ * @param indexedCols - Map of indexed columns
+ * @throws IOException
+ * @throws CsvValidationException
+ */
+ private static void readData(CSVReader reader, Map doubleTimeSeriesMap,
+ Map stringTimeSeriesMap, Map indexedCols) throws IOException, CsvValidationException {
+ String[] line;
+
+ while ((line = reader.readNext()) != null) {
+ for (int i = 0; i < line.length; i++) {
+
+ String column = indexedCols.get(i);
+
+ try {
+ if (doubleTimeSeriesMap.containsKey(column)) {
+ DoubleTimeSeries colTimeSeries = doubleTimeSeriesMap.get(column);
+ double value = !line[i].equals("") ? Double.parseDouble(line[i]) : Double.NaN;
+ colTimeSeries.add(value);
+ } else {
+ StringTimeSeries colTimeSeries = stringTimeSeriesMap.get(column);
+ colTimeSeries.add(line[i]);
+ }
+ } catch (NullPointerException e) {
+ LOG.log(Level.WARNING, "Column {0} not found in time series map", column);
+ } catch (NumberFormatException e) {
+ LOG.log(Level.WARNING, "Could not parse value {0} as double", line[i]);
+ }
+ }
+ }
+ }
+
+ /**
+ * Calculates GPS data from the given time series map
+ * @param doubleTimeSeriesMap - Map of double time series data
+ * @throws FatalFlightFileException
+ */
+ private static void calculateLatLonGPS(Map doubleTimeSeriesMap) throws FatalFlightFileException {
+ DoubleTimeSeries lonRad = doubleTimeSeriesMap.get("GPS(0):Long");
+ DoubleTimeSeries latRad = doubleTimeSeriesMap.get("GPS(0):Lat");
+ DoubleTimeSeries altMSL = doubleTimeSeriesMap.get("GPS(0):heightMSL");
+
+ if (lonRad == null || latRad == null) {
+ LOG.log(Level.WARNING, "Could not find GPS(0):Long or GPS(0):Lat in time series map");
+ throw new FatalFlightFileException("No GPS data found in binary.");
+ }
+
+ DoubleTimeSeries longDeg = new DoubleTimeSeries("Longitude", "degrees");
+ DoubleTimeSeries latDeg = new DoubleTimeSeries("Latitude", "degrees");
+ DoubleTimeSeries msl = new DoubleTimeSeries("AltMSL", "ft");
+
+ for (int i = 0; i < lonRad.size(); i++) {
+ longDeg.add(lonRad.get(i));
+ }
+
+ for (int i = 0; i < lonRad.size(); i++) {
+ latDeg.add(latRad.get(i));
+ }
+
+ for (int i = 0; i < altMSL.size(); i++) {
+ msl.add(altMSL.get(i));
+ }
+
+ doubleTimeSeriesMap.put("Longitude", longDeg);
+ doubleTimeSeriesMap.put("Latitude", latDeg);
+ doubleTimeSeriesMap.put("AltMSL", altMSL);
+ }
+
+ /**
+ * Calculates the local date and time from the given time series map
+ * @param doubleTimeSeriesMap - Map of double time series data
+ * @param stringTimeSeriesMap - Map of string time series data
+ * @param dateTimeStr - Format of the date and time
+ * @throws ParseException
+ */
+ private static void calculateDateTime(Map doubleTimeSeriesMap, Map stringTimeSeriesMap, String dateTimeStr) throws ParseException {
+ StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd");
+ StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss");
+ StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm"); // Always 0
+ DoubleTimeSeries seconds = doubleTimeSeriesMap.get("offsetTime");
+
+ SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss");
+
+ String[] dateTime = dateTimeStr.split(" ");
+ String date = dateTime[0];
+
+ if (date.split("-")[1].length() == 1) {
+ date = date.substring(0, 5) + "0" + date.substring(5);
+ }
+
+ if (date.split("-")[2].length() == 1) {
+ date = date.substring(0, 8) + "0" + date.substring(8);
+ }
+
+ String time = dateTime[1];
+
+ Date parsedDate = (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")).parse(date + " " + time);
+ for (int i = 0; i < seconds.size(); i++) {
+ int millis = (int) (seconds.get(i) * 1000);
+ Date newDate = addMilliseconds(parsedDate, millis);
+
+ localDateSeries.add(lclDateFormat.format(newDate));
+ localTimeSeries.add(lclTimeFormat.format(newDate));
+ utcOfstSeries.add("+00:00");
+ }
+
+ stringTimeSeriesMap.put("Lcl Date", localDateSeries);
+ stringTimeSeriesMap.put("Lcl Time", localTimeSeries);
+ stringTimeSeriesMap.put("UTCOfst", utcOfstSeries);
+ }
+
+
+ /**
+ * Determine the start date and time from the given time series map
+ * @param doubleTimeSeriesMap - Map of double time series data
+ * @return
+ */
+ private static String findStartDateTime(Map doubleTimeSeriesMap) {
+ DoubleTimeSeries dateSeries = doubleTimeSeriesMap.get("GPS(0):Date");
+ DoubleTimeSeries timeSeries = doubleTimeSeriesMap.get("GPS(0):Time");
+ DoubleTimeSeries offsetTime = doubleTimeSeriesMap.get("offsetTime");
+
+ if (dateSeries == null || timeSeries == null) {
+ LOG.log(Level.WARNING, "Could not find GPS(0):Date or GPS(0):Time in time series map");
+ return null;
+ }
+
+ int colCount = 0;
+ while (colCount < dateSeries.size() && colCount < timeSeries.size()) {
+ int date = (int) dateSeries.get(colCount); // Date is an integer in the format YYYYMMDD
+ int time = (int) timeSeries.get(colCount);
+
+
+ if (!Double.isNaN(date) && !Double.isNaN(time) && date != 0 && time != 0) {
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
+
+ String year = String.valueOf(date).substring(0, 4);
+ String month = String.valueOf(date).substring(4, 6);
+ String day = String.valueOf(date).substring(6, 8);
+
+ String hour = String.valueOf(time).substring(0, 2);
+ String minute = String.valueOf(time).substring(2, 4);
+ String second = String.valueOf(time).substring(4, 6);
+
+ try {
+ Date parsedDate = dateFormat.parse(year + month + day + hour + minute + second);
+ int currentOffset = (int) (offsetTime.get(colCount) * 1000);
+ Date newDate = addMilliseconds(parsedDate, -currentOffset);
+
+ return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(newDate);
+ } catch (ParseException e) {
+ LOG.log(Level.WARNING, "Could not parse date {0} and time {1} as date", new Object[]{date, time});
+ return null;
+ }
+ }
+
+ colCount++;
+ }
+
+ return null;
+ }
+
+ /**
+ * Duplicate an input stream a given number of times
+ * @param inputStream - Input Stream to duplicate
+ * @param copies - Number of copies to make
+ * @return - List of input streams
+ * @throws IOException
+ */
+ private static List duplicateInputStream(InputStream inputStream, int copies) throws IOException {
+ List inputStreams = new ArrayList<>();
+ List outputStreams = new ArrayList<>();
+
+ for (int i = 0; i < copies; i++) {
+ outputStreams.add(new ByteArrayOutputStream());
+ }
+
+ byte[] buffer = new byte[1024];
+ while (inputStream.read(buffer) > -1) {
+ for (OutputStream outputStream : outputStreams) {
+ outputStream.write(buffer);
+ }
+ }
+
+ for (OutputStream outputStream : outputStreams) {
+ outputStream.flush();
+ inputStreams.add(new ByteArrayInputStream(((ByteArrayOutputStream) outputStream).toByteArray()));
+ }
+
+ return inputStreams;
+ }
+
+ /**
+ * Gets the attributes of the flight
+ * @param stream - Input stream of flight file
+ * @return
+ */
+ private static Map getAttributeMap(InputStream stream) {
+ Map attributeMap = new HashMap<>();
+ try (CSVReader reader = new CSVReader(new BufferedReader(new InputStreamReader(stream)))) {
+ String[] line;
+ while ((line = reader.readNext()) != null) {
+ if (line[line.length - 1].contains("|")) {
+ String[] split = line[line.length - 1].split("\\|");
+ attributeMap.put(split[0], split[1]);
+ }
+ }
+ } catch (IOException | CsvValidationException e) {
+ e.printStackTrace();
+ }
+
+ LOG.log(Level.INFO, "Attribute Map: {0}", attributeMap);
+
+ return attributeMap;
+ }
+
+ /**
+ * Drop all columns that have no data
+ * @param doubleTimeSeriesMap - Map of double time series data
+ * @param stringTimeSeriesMap - Map of string time series data
+ */
+ private static void dropBlankCols(Map doubleTimeSeriesMap, Map stringTimeSeriesMap) {
+ for (String key : doubleTimeSeriesMap.keySet()) {
+ if (doubleTimeSeriesMap.get(key).size() == 0) {
+ doubleTimeSeriesMap.remove(key);
+ }
+ }
+
+ for (String key : stringTimeSeriesMap.keySet()) {
+ if (stringTimeSeriesMap.get(key).size() == 0) {
+ stringTimeSeriesMap.remove(key);
+ }
+ }
+ }
+
+ /**
+ * Initialize columns based on flight data
+ * @param cols
+ * @param indexedCols
+ * @param doubleTimeSeriesMap
+ * @param stringTimeSeriesMap
+ */
+ private static void processCols(String[] cols, Map indexedCols, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) {
+ int i = 0;
+ for (String col : cols) {
+ indexedCols.put(i++, col);
+ String category = col.split(":")[0];
+
+ if (category.contains("(")) {
+ category = category.substring(0, category.indexOf("("));
+ }
+
+ switch (category) {
+ case "IMU_ATTI":
+ case "IMUEX":
+ handleIMUDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap);
+ break;
+ case "GPS":
+ handleGPSDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap);
+ break;
+
+ case "Battery":
+ case "SMART_BATT":
+ handleBatteryDataType(col, doubleTimeSeriesMap);
+ break;
+
+ case "Motor":
+ handleMotorDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap);
+ break;
+
+ case "RC":
+ handleRCDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap);
+ break;
+
+ case "AirComp":
+ handleAirCompDataType(col, doubleTimeSeriesMap);
+ break;
+
+ case "General":
+ doubleTimeSeriesMap.put(col, new DoubleTimeSeries(col, "ft"));
+ break;
+
+ case "Controller":
+ doubleTimeSeriesMap.put(col, new DoubleTimeSeries(col, "level"));
+ break;
+
+ default:
+ handleMiscDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap);
+ }
+
+ }
+ }
+
+ /**
+ * Helper for initializing IMU data
+ * @param colName - Name of column
+ * @param doubleTimeSeriesMap - Map of double time series data
+ * @param stringTimeSeriesMap - Map of string time series data
+ */
+ private static void handleIMUDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) {
+ String dataType;
+
+ if (colName.contains("accel")) {
+ dataType = "m/s^2";
+ } else if (colName.contains("gyro") || colName.contains("Gyro")) {
+ dataType = "deg/s";
+ } else if (colName.contains("vel") || colName.contains("Velocity")) {
+ dataType = "m/s";
+ } else if (colName.contains("mag")) {
+ dataType = "A/m";
+ } else if (colName.contains("Longitude") || colName.contains("Latitude")) {
+ dataType = "degrees";
+ } else if (colName.contains("roll") || colName.contains("pitch") || colName.contains("yaw") || colName.contains("directionOfTravel")) {
+ dataType = "degrees";
+ } else if (colName.contains("distance") || colName.contains("GPS-H") || colName.contains("Alti")) {
+ dataType = "ft";
+ } else if (colName.contains("temperature")) {
+ dataType = "Celsius";
+ } else if (colName.contains("barometer")) {
+ dataType = "atm";
+ } else {
+ if (colName.contains("err")) {
+ stringTimeSeriesMap.put("IMUEX(0):err", new StringTimeSeries("IMUEX Error", "error"));
+ return;
+ }
+
+ dataType = "number";
+ if (!colName.contains("num")) {
+ LOG.log(Level.WARNING, "IMU Unknown data type: {0}", colName);
+
+ }
+ }
+
+ doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType));
+ }
+
+ /**
+ * Helper for initializing battery data
+ * @param colName - Name of column
+ * @param doubleTimeSeriesMap - Map of double time series data
+ */
+ private static void handleGPSDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) {
+ String dataType;
+
+ if (colName.contains("dateTimeStamp")) {
+ stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "yyyy-mm-ddThh:mm:ssZ"));
+ return;
+ }
+
+ if (colName.contains("Long") || colName.contains("Lat")) {
+ dataType = "degrees";
+ } else if (colName.contains("vel")) {
+ dataType = "m/s";
+ } else if (colName.contains("height")) {
+ dataType = "ft";
+ } else if (colName.contains("DOP")) {
+ dataType = "DOP Value";
+ } else if (colName.contains("Date")) {
+ dataType = "Date";
+ } else if (colName.contains("Time")) {
+ dataType = "Time";
+ } else if (colName.contains("sAcc")) {
+ dataType = "cm/s";
+ } else {
+ dataType = "number";
+ if (!colName.contains("num")) {
+ LOG.log(Level.WARNING, "GPS Unknown data type: {0}", colName);
+ }
+ }
+
+ doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType));
+ }
+
+ /**
+ * Helper for initializing battery data
+ * @param colName - Name of column
+ * @param doubleTimeSeriesMap - Map of double time series data
+ */
+ private static void handleBatteryDataType(String colName, Map doubleTimeSeriesMap) {
+ String dataType = "number";
+ String lowerColName = colName.toLowerCase();
+
+ if (lowerColName.contains("volt")) {
+ dataType = "Voltage";
+ } else if (lowerColName.contains("watts")) {
+ dataType = "Watts";
+ } else if (lowerColName.contains("current")) {
+ dataType = "Amps";
+ } else if (lowerColName.contains("cap")) {
+ dataType = "Capacity";
+ } else if (lowerColName.contains("temp")) {
+ dataType = "Celsius";
+ } else if (lowerColName.contains("%")) {
+ dataType = "Percentage";
+ } else if (lowerColName.contains("time")) {
+ dataType = "seconds";
+ } else {
+ LOG.log(Level.WARNING, "Battery Unknown data type: {0}", colName);
+ }
+
+ doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType));
+ }
+
+ /**
+ * Helper for initializing motor data
+ * @param colName - Name of column
+ * @param doubleTimeSeriesMap - Map of double time series data
+ */
+ private static void handleMotorDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) {
+ if (colName.contains("lowVoltage")) {
+ stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "Low Voltage"));
+ return;
+ } else if (colName.contains("status")) {
+ stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "Battery Status"));
+ return;
+ }
+
+ String dataType = "number";
+
+ if (colName.contains("V_out") || colName.contains("Volts")) {
+ dataType = "Voltage";
+ } else if (colName.contains("Speed")) {
+ dataType = "m/s";
+ } else if (colName.contains("Current")) {
+ dataType = "Amps";
+ } else if (colName.contains("PPMrecv")) {
+ dataType = "RC Stop Command";
+ } else if (colName.contains("Temp")) {
+ dataType = "Celsius";
+ } else if (colName.contains("Status")) {
+ dataType = "Status Number";
+ } else if (colName.contains("Hz")) {
+ dataType = "Status Number";
+ } else {
+ LOG.log(Level.WARNING, "Battery Unknown data type: {0}", colName);
+ }
+
+ doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType));
+ }
+
+ /**
+ * Helper for initializing RC data
+ * @param colName - Name of column
+ * @param doubleTimeSeriesMap - Map of double time series data
+ */
+ private static void handleRCDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) {
+ String dataType = "number";
+
+ if (colName.contains("Aileron")) {
+ dataType = "Aileron";
+ } else if (colName.contains("Elevator")) {
+ dataType = "Elevator";
+ } else if (colName.contains("Rudder")) {
+ dataType = "Rudder";
+ } else if (colName.contains("Throttle")) {
+ dataType = "Throttle";
+ } else {
+ if (colName.equals("RC:ModeSwitch")) {
+ stringTimeSeriesMap.put(colName, new StringTimeSeries("RC Mode Switch", "Mode"));
+ return;
+ }
+
+ LOG.log(Level.WARNING, "RC Unknown data type: {0}", colName);
+ }
+
+ doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType));
+ }
+
+ /**
+ * Helper for initializing air comp data
+ * @param colName - Name of column
+ * @param doubleTimeSeriesMap - Map of double time series data
+ */
+ private static void handleAirCompDataType(String colName, Map doubleTimeSeriesMap) {
+ String dataType;
+
+ if (colName.contains("AirSpeed")) {
+ dataType = "knots";
+ } else if (colName.contains("Alti")) {
+ dataType = "ft";
+ } else if (colName.contains("Vel")) {
+ dataType = "k/h";
+ } else {
+ dataType = "number";
+ LOG.log(Level.WARNING, "AirComp Unknown data type: {0}", colName);
+ }
+
+ doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType));
+ }
+
+ /**
+ * Helper for initializing other types of data
+ * @param colName - Name of column
+ * @param doubleTimeSeriesMap - Map of double time series data
+ */
+ private static void handleMiscDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) {
+ String dataType;
+ boolean isDouble = true;
+ switch (colName) {
+ case "Tick#":
+ dataType = "tick";
+ break;
+
+ case "offsetTime":
+ case "flightTime":
+ dataType = "seconds";
+ break;
+
+ case "gpsHealth":
+ dataType = "GPS Health";
+ break;
+
+ case "flyCState":
+ dataType = "C State";
+ isDouble = false;
+ break;
+
+ case "flycCommand":
+ dataType = "Command";
+ isDouble = false;
+ break;
+
+ case "flightAction":
+ dataType = "Action";
+ isDouble = false;
+ break;
+
+ case "nonGPSCause":
+ dataType = "GPS Cause";
+ isDouble = false;
+ break;
+
+ case "connectedToRC":
+ dataType = "Connection";
+ isDouble = false;
+ break;
+
+ case "gpsUsed":
+ case "visionUsed":
+ dataType = "boolean";
+ isDouble = false;
+ break;
+
+ case "Attribute|Value":
+ dataType = "Key-Value Pair";
+ isDouble = false;
+ break;
+
+ default:
+ dataType = "N/A";
+ isDouble = false;
+ LOG.log(Level.WARNING, "Misc Unknown data type: {0}", colName);
+ }
+
+ if (isDouble) {
+ doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType));
+ } else {
+ stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, dataType));
+ }
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java
new file mode 100644
index 000000000..a977fe2db
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java
@@ -0,0 +1,286 @@
+package org.ngafid.flights.process;
+
+import java.sql.SQLException;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ForkJoinPool;
+import java.util.concurrent.ForkJoinTask;
+import java.util.concurrent.RecursiveTask;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.*;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+
+import org.ngafid.flights.FatalFlightFileException;
+import org.ngafid.flights.MalformedFlightFileException;
+
+/**
+ * A dependency graph which represents the dependencies of ProcessSteps on one another.
+ **/
+public class DependencyGraph {
+ private static final Logger LOG = Logger.getLogger(DependencyGraph.class.getName());
+
+ class DependencyNode {
+ final ProcessStep step;
+
+ // Used for cycle detection.
+ boolean mark = false;
+ AtomicBoolean enabled = new AtomicBoolean(true);
+
+ final HashSet requiredBy = new HashSet<>(32);
+ final HashSet requires = new HashSet<>(32);
+
+ ArrayList exceptions = new ArrayList<>();
+
+ public DependencyNode(ProcessStep step) {
+ this.step = step;
+ }
+
+ void disableChildren() {
+ if (enabled.get()) {
+ enabled.set(false);
+ if (step.isRequired()) {
+ String reason = step.explainApplicability();
+ LOG.severe("Required step " + step.getClass().getName() + " has been disabled for the following reason:\n " + reason);
+ exceptions.add(new FatalFlightFileException(reason));
+ }
+ for (var child : requiredBy) child.disable();
+ }
+ }
+
+ void disable() {
+ if (enabled.get()) {
+ enabled.set(false);
+ if (step.isRequired()) {
+ LOG.severe("Required step " + step.toString() + " has been disabled.");
+ exceptions.add(
+ new FatalFlightFileException(
+ "Required step " + step.getClass().getName()
+ + " has been disabled because a required parent step has been disabled"));
+ }
+ for (var child : requiredBy) child.disable();
+ }
+ }
+
+ void compute() {
+ try {
+
+ if (step.applicable()) {
+ step.compute();
+ } else {
+ disableChildren();
+ }
+
+ } catch (SQLException | MalformedFlightFileException | FatalFlightFileException e) {
+ LOG.warning("Encountered exception when calculating process step " + step.toString() + ": " + e.toString());
+ exceptions.add(e);
+ disable();
+ }
+ }
+ }
+
+ class DependencyNodeTask extends RecursiveTask {
+ private static final long serialVersionUID = 0;
+
+ // This is used to avoid creating duplicate tasks.
+ // This isn't a problem w/ a tree-like problem, but ours is a DAG.
+ final ConcurrentHashMap> taskMap;
+ final DependencyNode node;
+
+ public DependencyNodeTask(DependencyNode node, ConcurrentHashMap> taskMap) {
+ this.taskMap = taskMap;
+ this.node = node;
+ }
+
+ ForkJoinTask getTask(DependencyNode node) {
+ return taskMap.computeIfAbsent(node, x -> new DependencyNodeTask(x, taskMap).fork());
+ }
+
+ public Void compute() {
+ for (var requiredNode : node.requires) {
+ getTask(requiredNode).join();
+ }
+
+ if (node.enabled.get())
+ node.compute();
+
+ return null;
+ }
+ }
+
+ /**
+ * Dummy step meant to act as a root node in DAG. This is done by adding all of the columns included in the file
+ * as output columns, so all other steps will depend on this.
+ **/
+ class DummyStep extends ProcessStep {
+ Set outputColumns = new HashSet<>();
+
+ public DummyStep(FlightBuilder builder) {
+ // We can pass in null rather than a connection object
+ super(null, builder);
+ outputColumns.addAll(doubleTS.keySet());
+ outputColumns.addAll(stringTS.keySet());
+ }
+
+ public Set getRequiredDoubleColumns() { return Collections.emptySet(); }
+ public Set getRequiredStringColumns() { return Collections.emptySet(); }
+ public Set getRequiredColumns() { return Collections.emptySet(); }
+ public Set getOutputColumns() { return outputColumns; }
+
+ public boolean airframeIsValid(String airframe) { return true; }
+
+ // Left blank intentionally
+ public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {
+ LOG.info("Computed dummy step!");
+ }
+ }
+
+ private void nodeConflictError(ProcessStep first, ProcessStep second) throws FatalFlightFileException {
+ throw new FatalFlightFileException(
+ "ERROR when building dependency graph! "
+ + "Two ProcessSteps are indicated as having the same output column. "
+ + "While it is possible for two ProcessSteps to have the same output column(s), "
+ + "their use should be mutually exclusive from one another. "
+ + "\nDEBUG INFO:\n node 0: " + first.toString() + "\n node 1: " + second.toString());
+
+ }
+
+ private DependencyNode registerStep(ProcessStep step) throws FatalFlightFileException {
+ DependencyNode node = new DependencyNode(step);
+ nodes.add(node);
+
+ for (String outputColumn : step.getOutputColumns()) {
+ DependencyNode other = null;
+ if ((other = columnToSource.put(outputColumn, node)) != null) nodeConflictError(step, other.step);
+ }
+
+ return node;
+ }
+
+ /**
+ * Create the edges. An edge exists from step X to step Y if step X has an output column
+ * that step Y relies upon.
+ **/
+ private void createEdges(DependencyNode node) throws FatalFlightFileException {
+ for (String column : node.step.getRequiredColumns()) {
+ DependencyNode sourceNode = columnToSource.get(column);
+ if (sourceNode != null) {
+ sourceNode.requiredBy.add(node);
+ node.requires.add(sourceNode);
+ }
+ }
+ }
+
+ // Maps column name to the node where that column is computed
+ HashMap columnToSource = new HashMap<>(64);
+ HashSet nodes = new HashSet<>(64);
+ FlightBuilder builder;
+
+ public DependencyGraph(FlightBuilder builder, List steps) throws FlightProcessingException {
+ /**
+ * Create nodes for each step and create a mapping from output column name
+ * to the node that outputs that column. This should be a unique mapping, as
+ * we don't want two steps generating the same output column.
+ **/
+
+ this.builder = builder;
+
+ try {
+ registerStep(new DummyStep(builder));
+ for (var step : steps) registerStep(step);
+ for (var node : nodes) createEdges(node);
+ } catch (FatalFlightFileException e) {
+ throw new FlightProcessingException(e);
+ }
+ }
+
+ // Modifies the flight object in place.
+ public void compute() throws FlightProcessingException {
+ // Start with all of the leaf nodes.
+ ConcurrentHashMap> tasks = new ConcurrentHashMap<>();
+ ArrayList> initialTasks = new ArrayList<>();
+ for (var node : nodes) {
+ if (node.requiredBy.size() == 0) {
+ var task = new DependencyNodeTask(node, tasks);
+ initialTasks.add(task);
+ tasks.put(node, task);
+ }
+ }
+
+ var handles = initialTasks
+ .stream()
+ .map(x -> x.fork())
+ .collect(Collectors.toList());
+ handles.forEach(ForkJoinTask::join);
+
+ ArrayList fatalExceptions = new ArrayList<>();
+ for (var node : nodes) {
+ for (var e : node.exceptions) {
+ if (e instanceof MalformedFlightFileException me) {
+ builder.exceptions.add(me);
+ } else if (e instanceof FatalFlightFileException fe) {
+ fatalExceptions.add(fe);
+ } else if (e instanceof SQLException se) {
+ fatalExceptions.add(se);
+ } else {
+ LOG.severe(
+ "Encountered exception of unknown type when executing dependency graph. "
+ + "\"" + e.getMessage() + "\"" + "\n."
+ + "This should not be possible - if this seems plausible you should add a handler for this "
+ + "type of exception in DependencyGraph::compute.");
+ e.printStackTrace();
+ System.exit(1);
+ }
+ }
+ }
+
+ if (fatalExceptions.size() != 0)
+ throw new FlightProcessingException(fatalExceptions);
+ }
+
+ public void scrutinize() {
+ cycleCheck();
+ requiredCheck();
+ }
+
+ // Ensure that there are no required steps that are children to optional steps,
+ // since that wouldn't make sense.
+ private void requiredCheck() {
+ for (var node : nodes) {
+ if (!node.step.isRequired())
+ continue;
+
+ for (var parent : node.requiredBy) {
+ if (!parent.step.isRequired()) {
+ System.err.println("ERROR in your DependencyGraph! The optional step '" + parent + "' has a required dependent step '" + node + "'.");
+ System.exit(1);
+ }
+ }
+ }
+ }
+
+ // Ensure there are no cycles!
+ private void cycleCheck() {
+ for (var src : nodes) {
+ for (var node : nodes)
+ node.mark = false;
+
+ Queue q = new ArrayDeque<>();
+ var dst = src;
+ for (var child : src.requiredBy)
+ q.add(child);
+
+ while ((dst = q.poll()) != null) {
+ if (dst == src) {
+ System.err.println("ERROR in your DependencyGraph! Cycle was detected from step '" + src + "' to step '" + dst + "'.");
+ System.exit(1);
+ }
+
+ dst.mark = true;
+ for (var child : dst.requiredBy) {
+ if (!child.mark)
+ q.add(child);
+ }
+ }
+ }
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java
new file mode 100644
index 000000000..69fcd45e9
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java
@@ -0,0 +1,100 @@
+package org.ngafid.flights.process;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import java.util.Map;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.stream.Collectors;
+
+import org.ngafid.flights.*;
+import static org.ngafid.flights.process.ProcessStep.required;
+import org.ngafid.flights.process.*;
+
+public class FlightBuilder {
+
+ public final ConcurrentHashMap doubleTimeSeries;
+ public final ConcurrentHashMap stringTimeSeries;
+
+ private ArrayList itinerary = null;
+
+ public final FlightMeta meta;
+
+ public final ArrayList exceptions = new ArrayList<>();
+
+ public FlightBuilder(FlightMeta meta, Map doubleTimeSeries, Map stringTimeSeries) {
+ this.doubleTimeSeries = new ConcurrentHashMap<>(doubleTimeSeries);
+ this.stringTimeSeries = new ConcurrentHashMap<>(stringTimeSeries);
+ this.meta = meta;
+ }
+
+ public FlightBuilder addTimeSeries(String name, DoubleTimeSeries timeSeries) {
+ doubleTimeSeries.put(name, timeSeries);
+ return this;
+ }
+
+ public FlightBuilder addTimeSeries(String name, StringTimeSeries timeSeries) {
+ stringTimeSeries.put(name, timeSeries);
+ return this;
+ }
+
+ public synchronized FlightBuilder setStartDateTime(String startDateTime) {
+ this.meta.startDateTime = startDateTime;
+ return this;
+ }
+
+ public synchronized FlightBuilder setEndDateTime(String endDateTime) {
+ this.meta.endDateTime = endDateTime;
+ return this;
+ }
+
+ public synchronized FlightBuilder setItinerary(ArrayList itinerary) {
+ this.itinerary = itinerary;
+ return this;
+ }
+
+ public synchronized FlightBuilder updateProcessingStatus(int processingStatus) {
+ this.meta.processingStatus |= processingStatus;
+ return this;
+ }
+
+ private static final List processSteps = List.of(
+ required(ProcessAltAGL::new),
+ required(ProcessAirportProximity::new),
+ required(ProcessStartEndTime::new),
+ ProcessLaggedAltMSL::new,
+ ProcessStallIndex::new,
+ ProcessTotalFuel::new,
+ ProcessDivergence::new,
+ ProcessLOCI::new,
+ ProcessItinerary::new
+ );
+
+ // This can be overridden.
+ protected List gatherSteps(Connection connection) {
+ // Add all of our processing steps here...
+ // The order doesn't matter; the DependencyGraph will resolve
+ // the order in the event that there are dependencies.
+ return processSteps.stream().map(factory -> factory.create(connection, this)).collect(Collectors.toList());
+ }
+
+ // throws a flight processing exception if an unrecoverable error occurred.
+ public Flight build(Connection connection) throws FlightProcessingException {
+ DependencyGraph dg = new DependencyGraph(this, gatherSteps(connection));
+
+ dg.compute();
+
+ // TODO: Make sure headers are calculated appropriately.
+ // TODO: Make sure hasAGL and hasCoords get set correctly
+ try {
+ return new Flight(connection, meta, doubleTimeSeries, stringTimeSeries, itinerary, exceptions);
+ } catch (SQLException e) {
+ throw new FlightProcessingException(e);
+ }
+ }
+
+ // TODO: implement this
+ public void validate() {}
+}
diff --git a/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java b/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java
new file mode 100644
index 000000000..24f620d85
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java
@@ -0,0 +1,16 @@
+package org.ngafid.flights.process;
+
+public class FlightFileFormatException extends Exception {
+ private static final long serialVersionUID = 124311;
+
+ String filename;
+
+ public FlightFileFormatException(String filename) {
+ this.filename = filename;
+ }
+
+ public String getMessage() {
+ return "File '" + filename + "' is of an unrecognized or unsupported file format.";
+ }
+
+}
diff --git a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java
new file mode 100644
index 000000000..764c88acc
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java
@@ -0,0 +1,193 @@
+package org.ngafid.flights.process;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.sql.Connection;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.Objects;
+import java.util.Spliterator;
+import java.util.Spliterators;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.ngafid.filters.Pair;
+import org.ngafid.UploadException;
+import org.ngafid.flights.Flight;
+import org.ngafid.flights.Upload;
+
+public abstract class FlightFileProcessor {
+
+ interface Factory {
+ FlightFileProcessor create(Connection connection, InputStream is, String filename);
+ }
+
+ // Right now this is only for zip files but this could easily be extended to handle other types of archives.
+ // Most of the code is reusable.
+ public static class Pipeline {
+ final Connection connection;
+ final ZipFile zipFile;
+ final Map factories;
+ final Upload upload;
+ private int validFlightsCount = 0;
+ private int warningFlightsCount = 0;
+
+ private ConcurrentHashMap flightErrors = new ConcurrentHashMap<>();
+
+ public Pipeline(Connection connection, Upload upload, ZipFile zipFile) {
+ this.connection = connection;
+ this.upload = upload;
+ this.zipFile = zipFile;
+
+ this.factories = Map.of(
+ "csv", this::createCSVFileProcessor,
+ "dat", this::createDATFileProcessor,
+ "json", JSONFileProcessor::new,
+ "gpx", GPXFileProcessor::new
+ );
+ }
+
+ public Map getFlightErrors() {
+ return Collections.unmodifiableMap(flightErrors);
+ }
+
+ private FlightFileProcessor createDATFileProcessor(Connection connection, InputStream is, String filename) {
+ return new DATFileProcessor(connection, is, filename, zipFile);
+ }
+
+ private FlightFileProcessor createCSVFileProcessor(Connection connection, InputStream is, String filename) {
+ return new CSVFileProcessor(connection, is, filename, upload);
+ }
+
+ public Stream stream() {
+ Enumeration extends ZipEntry> entries = zipFile.entries();
+ Stream extends ZipEntry> validFiles =
+ StreamSupport.stream(
+ Spliterators.spliteratorUnknownSize(entries.asIterator(), Spliterator.ORDERED),
+ false
+ )
+ .filter(z -> !z.getName().contains("__MACOSX"))
+ .filter(z -> !z.isDirectory());
+
+ return validFiles.map(this::create).filter(Objects::nonNull).collect(Collectors.toList()).stream();
+ }
+
+ public Stream parse(FlightFileProcessor processor) {
+ try {
+ return processor.parse();
+ } catch (FlightProcessingException e) {
+ flightErrors.put(processor.filename, new UploadException(e.getMessage(), e, processor.filename));
+ return Stream.of();
+ }
+ }
+
+ public Flight build(FlightBuilder fb) {
+ try {
+ return fb.build(connection);
+ } catch (FlightProcessingException e) {
+ flightErrors.put(fb.meta.filename, new UploadException(e.getMessage(), e, fb.meta.filename));
+ return null;
+ }
+ }
+
+ public List build(Stream fbs) {
+ return fbs.map(this::build).filter(Objects::nonNull).collect(Collectors.toList());
+ }
+
+ private FlightFileProcessor create(ZipEntry entry) {
+ String filename = entry.getName();
+
+ int index = filename.lastIndexOf('.');
+ String extension = index >= 0 ? filename.substring(index + 1).toLowerCase() : "";
+ Factory f = factories.get(extension);
+ if (f != null) {
+ try {
+ return f.create(connection, zipFile.getInputStream(entry), filename);
+ } catch (IOException e) {
+ flightErrors.put(filename, new UploadException(e.getMessage(), e, filename));
+ }
+ } else {
+ flightErrors.put(filename, new UploadException("Unknown file type '" + extension + "' contained in zip file.", filename));
+ }
+
+ return null;
+ }
+
+ public Flight tabulateFlightStatus(Flight flight) {
+ if (flight.getStatus().equals("WARNING"))
+ warningFlightsCount++;
+ else
+ validFlightsCount++;
+
+ return flight;
+ }
+
+ public int getWarningFlightsCount() {
+ return warningFlightsCount;
+ }
+
+ public int getValidFlightsCount() {
+ return validFlightsCount;
+ }
+ }
+
+ protected final Connection connection;
+ protected final InputStream stream;
+ protected final String filename;
+
+ public FlightFileProcessor(Connection connection, InputStream stream, String filename) {
+ this.connection = connection;
+ this.stream = stream;
+ this.filename = filename;
+ }
+
+ // If an exception occurs, it will be stored here.
+ FlightProcessingException parseException = null;
+
+ /**
+ * Parses the file for flight data to be processed
+ * @return A stream of FlightBuilders
+ * @throws FlightProcessingException
+ */
+ private Stream parsedFlightBuilders = null;
+ protected abstract Stream parse() throws FlightProcessingException;
+
+ public FlightFileProcessor pipelinedParse() {
+ try {
+ parsedFlightBuilders = parse();
+ assert parsedFlightBuilders != null;
+ } catch (FlightProcessingException e) {
+ parseException = e;
+ }
+
+ return this;
+ }
+
+ protected Stream flights = null;
+ protected final ArrayList buildExceptions = new ArrayList<>();
+
+ private Flight build(FlightBuilder fb) {
+ try {
+ return fb.build(connection);
+ } catch (FlightProcessingException e) {
+ buildExceptions.add(e);
+ }
+ return null;
+ }
+
+ public FlightFileProcessor pipelinedBuild() {
+ if (parseException == null) {
+ flights = parsedFlightBuilders.map(this::build).filter(Objects::nonNull);
+ }
+
+ return this;
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/FlightMeta.java b/src/main/java/org/ngafid/flights/process/FlightMeta.java
new file mode 100644
index 000000000..5e8007ac1
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/FlightMeta.java
@@ -0,0 +1,125 @@
+package org.ngafid.flights.process;
+
+/**
+ * Utility class used by FlightBuilder to call the Flight constructor.
+ **/
+public final class FlightMeta {
+ // TODO: Should be made private
+ public int fleetId = -1,
+ uploaderId = -1,
+ uploadId = -1,
+ processingStatus = 0;
+
+ public String startDateTime,
+ endDateTime,
+ md5Hash,
+ airframeType,
+ systemId,
+ filename,
+ airframeName,
+ calculated,
+ suggestedTailNumber;
+ public int getFleetId() {
+ return fleetId;
+ }
+
+ public void setFleetId(int fleetId) {
+ this.fleetId = fleetId;
+ }
+
+ public int getUploaderId() {
+ return uploaderId;
+ }
+
+ public void setUploaderId(int uploaderId) {
+ this.uploaderId = uploaderId;
+ }
+
+ public int getUploadId() {
+ return uploadId;
+ }
+
+ public void setUploadId(int uploadId) {
+ this.uploadId = uploadId;
+ }
+
+ public int getProcessingStatus() {
+ return processingStatus;
+ }
+
+ public void setProcessingStatus(int processingStatus) {
+ this.processingStatus = processingStatus;
+ }
+
+ public String getStartDateTime() {
+ return startDateTime;
+ }
+
+ public void setStartDateTime(String startDateTime) {
+ this.startDateTime = startDateTime;
+ }
+
+ public String getEndDateTime() {
+ return endDateTime;
+ }
+
+ public void setEndDateTime(String endDateTime) {
+ this.endDateTime = endDateTime;
+ }
+
+ public String getMd5Hash() {
+ return md5Hash;
+ }
+
+ public void setMd5Hash(String md5Hash) {
+ this.md5Hash = md5Hash;
+ }
+
+ public String getAirframeType() {
+ return airframeType;
+ }
+
+ public void setAirframeType(String airframeType) {
+ this.airframeType = airframeType;
+ }
+
+ public String getSystemId() {
+ return systemId;
+ }
+
+ public void setSystemId(String systemId) {
+ this.systemId = systemId;
+ }
+
+ public String getFilename() {
+ return filename;
+ }
+
+ public void setFilename(String filename) {
+ this.filename = filename;
+ }
+
+ public String getAirframeName() {
+ return airframeName;
+ }
+
+ public void setAirframeName(String airframeName) {
+ this.airframeName = airframeName;
+ }
+
+ public String getCalculated() {
+ return calculated;
+ }
+
+ public void setCalculated(String calculated) {
+ this.calculated = calculated;
+ }
+
+ public String getSuggestedTailNumber() {
+ return suggestedTailNumber;
+ }
+
+ public void setSuggestedTailNumber(String suggestedTailNumber) {
+ this.suggestedTailNumber = suggestedTailNumber;
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/FlightProcessingException.java b/src/main/java/org/ngafid/flights/process/FlightProcessingException.java
new file mode 100644
index 000000000..617f80280
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/FlightProcessingException.java
@@ -0,0 +1,49 @@
+package org.ngafid.flights.process;
+
+import java.util.List;
+import java.util.Collections;
+
+/**
+ * An exception that contains all of the FATAL exceptions that occurred during flight processing.
+ * Namely SQLException, FatalFlightFileException, IOException, and FlightAlreadyExistsException.
+ *
+ * If flight processing steps are done in parallel multiple exceptions could be thrown, which is
+ * where this class comes in: it will contain all of the exceptions that occurred.
+ *
+ **/
+public class FlightProcessingException extends Exception {
+ private static final long serialVersionUID = 1235003;
+ private static final String DEFAULT_MESSAGE = "(exception message was empty / null)";
+
+ private List exceptions;
+
+ public FlightProcessingException(Exception e) {
+ exceptions = List.of(e);
+ }
+
+ public FlightProcessingException(List exceptions) {
+ this.exceptions = Collections.unmodifiableList(exceptions);
+ }
+
+ public String getMessage() {
+ String message;
+
+ if (exceptions.size() == 1) {
+
+ message = exceptions.get(0).getMessage();
+ if (message == null)
+ return DEFAULT_MESSAGE;
+
+ } else {
+ message = "Encountered the following " + exceptions.size() + " errors when processing a flight:\n";
+ for (var e : exceptions) {
+ String eMessage = e.getMessage();
+ if (eMessage == null)
+ eMessage = DEFAULT_MESSAGE;
+ message += eMessage + "\n\n";
+ }
+ }
+
+ return message;
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java b/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java
new file mode 100644
index 000000000..15e9ef75e
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java
@@ -0,0 +1,201 @@
+package org.ngafid.flights.process;
+
+import org.ngafid.flights.*;
+import org.w3c.dom.Document;
+import org.w3c.dom.NamedNodeMap;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.logging.Logger;
+import java.util.stream.Stream;
+
+/**
+ * This class is responsible for parsing GPX files.
+ *
+ * @author Josh Karns
+ */
+
+public class GPXFileProcessor extends FlightFileProcessor {
+ private static final Logger LOG = Logger.getLogger(GPXFileProcessor.class.getName());
+
+ public GPXFileProcessor(Connection connection, InputStream stream, String filename) {
+ super(connection, stream, filename);
+ }
+
+ @Override
+ public Stream parse() throws FlightProcessingException {
+ try {
+ List flights = parseFlights(filename, stream);
+
+ return flights.stream();
+ } catch (SQLException | MalformedFlightFileException | IOException | FatalFlightFileException |
+ FlightAlreadyExistsException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public List parseFlights(String entry, InputStream stream) throws SQLException, MalformedFlightFileException, IOException, FatalFlightFileException, FlightAlreadyExistsException {
+ List flights = new ArrayList<>();
+ // BE-GPS-2200
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ try {
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ Document doc = db.parse(stream);
+
+ NodeList l = doc.getElementsByTagName("trkseg");
+ if (l.getLength() == 0)
+ throw new FatalFlightFileException("could not parse GPX data file: failed to find data node.");
+
+ if (l.getLength() != 1)
+ throw new FatalFlightFileException("could not parse GPX data file: found multiple data nodes.");
+
+ Node dataNode = l.item(0);
+ int len = dataNode.getChildNodes().getLength();
+
+ DoubleTimeSeries lat = new DoubleTimeSeries("Latitude", "degrees", len);
+ DoubleTimeSeries lon = new DoubleTimeSeries("Longitude", "degrees", len);
+ DoubleTimeSeries msl = new DoubleTimeSeries("AltMSL", "ft", len);
+ DoubleTimeSeries spd = new DoubleTimeSeries("GndSpd", "kt", len);
+ ArrayList timestamps = new ArrayList(len);
+ StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd");
+ StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss");
+ StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm");
+ // ss.SSSSSSXXX
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
+
+ SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss");
+
+ // NodeList serialNumberNodes = doc.getElementsByTagName("badelf:modelSerialNumber");
+ // String serialNumber = serialNumberNodes.item(0).getTextContent();
+ NodeList nicknameNodes = doc.getElementsByTagName("badelf:modelNickname");
+ if (nicknameNodes.item(0) == null)
+ throw new FatalFlightFileException("GPX file is missing necessary metadata (modelNickname).");
+ String nickname = nicknameNodes.item(0).getTextContent();
+
+ NodeList fdrModel = doc.getElementsByTagName("badelf:modelName");
+ if (fdrModel.item(0) == null)
+ throw new FatalFlightFileException("GPX file is missing necessary metadata (modelName).");
+ String airframeName = fdrModel.item(0).getTextContent();
+ LOG.info("Airframe name: " + airframeName);
+
+ NodeList dates = doc.getElementsByTagName("time");
+ NodeList datanodes = doc.getElementsByTagName("trkpt");
+ NodeList elenodes = doc.getElementsByTagName("ele");
+ NodeList spdnodes = doc.getElementsByTagName("badelf:speed");
+
+
+ if (spdnodes.item(0) == null)
+ throw new FatalFlightFileException("GPX file is missing GndSpd.");
+
+ if (!(dates.getLength() == datanodes.getLength() &&
+ dates.getLength() == elenodes.getLength() &&
+ dates.getLength() == spdnodes.getLength())) {
+ throw new FatalFlightFileException("Mismatching number of data tags in GPX file");
+ }
+
+ for (int i = 0; i < dates.getLength(); i++) {
+ Date parsedDate = dateFormat.parse(dates.item(i).getTextContent());
+ timestamps.add(new Timestamp(parsedDate.getTime()));
+ Calendar cal = new Calendar.Builder().setInstant(parsedDate).build();
+
+ int offsetMS = cal.getTimeZone().getOffset(parsedDate.getTime());
+ String sign = offsetMS < 0 ? "-" : "+";
+ offsetMS = offsetMS < 0 ? -offsetMS : offsetMS;
+
+ int offsetSEC = offsetMS / 1000;
+ int offsetMIN = offsetSEC / 60;
+ int offsetHRS = offsetMIN / 60;
+ offsetMIN %= 60;
+
+ String offsetHrsStr = (offsetHRS < 10 ? "0" : "") + offsetHRS;
+ String offsetMinStr = (offsetMIN < 10 ? "0" : "") + offsetMIN;
+ // This should look like +HH:mm
+ utcOfstSeries.add(sign + offsetHrsStr + ":" + offsetMinStr);
+
+ localDateSeries.add(lclDateFormat.format(parsedDate));
+ localTimeSeries.add(lclTimeFormat.format(parsedDate));
+
+ Node spdNode = spdnodes.item(i);
+ // Convert m / s to knots
+ spd.add(Double.parseDouble(spdNode.getTextContent()) * 1.94384);
+
+ Node eleNode = elenodes.item(i);
+ // Convert meters to feet.
+ msl.add(Double.parseDouble(eleNode.getTextContent()) * 3.28084);
+
+ Node d = datanodes.item(i);
+ NamedNodeMap attrs = d.getAttributes();
+
+ Node latNode = attrs.getNamedItem("lat");
+ lat.add(Double.parseDouble(latNode.getTextContent()));
+
+ Node lonNode = attrs.getNamedItem("lon");
+ lon.add(Double.parseDouble(lonNode.getTextContent()));
+ }
+
+ int start = 0;
+ for (int end = 1; end < timestamps.size(); end++) {
+ // 1 minute delay -> new flight.
+ if (timestamps.get(end).getTime() - timestamps.get(end - 1).getTime() > 60000
+ || end == localTimeSeries.size() - 1) {
+ if (end == localTimeSeries.size() - 1) {
+ end += 1;
+ }
+
+ if (end - start < 60) {
+ start = end;
+ continue;
+ }
+
+ StringTimeSeries localTime = localTimeSeries.subSeries(start, end);
+ StringTimeSeries localDate = localDateSeries.subSeries(start, end);
+ StringTimeSeries offset = utcOfstSeries.subSeries(start, end);
+ DoubleTimeSeries nlat = lat.subSeries(start, end);
+ DoubleTimeSeries nlon = lon.subSeries(start, end);
+ DoubleTimeSeries nmsl = msl.subSeries(start, end);
+ DoubleTimeSeries nspd = spd.subSeries(start, end);
+
+
+ HashMap doubleSeries = new HashMap<>();
+ doubleSeries.put("GndSpd", nspd);
+ doubleSeries.put("Longitude", nlon);
+ doubleSeries.put("Latitude", nlat);
+ doubleSeries.put("AltMSL", nmsl);
+
+ HashMap stringSeries = new HashMap<>();
+ stringSeries.put("Lcl Date", localDate);
+ stringSeries.put("Lcl Time", localTime);
+ stringSeries.put("UTCOfst", offset);
+
+ FlightMeta meta = new FlightMeta();
+ meta.setFilename(this.filename + ":" + start + "-" + end);
+ meta.setAirframeName(airframeName);
+ meta.setSuggestedTailNumber(nickname);
+ meta.setSystemId(nickname);
+ meta.setAirframeType("Fixed Wing");
+
+ flights.add(new FlightBuilder(meta, doubleSeries, stringSeries));
+ start = end;
+ }
+ }
+
+ } catch (ParserConfigurationException | SAXException | ParseException e) {
+ throw new FatalFlightFileException("Could not parse GPX data file: " + e.getMessage());
+ }
+
+ return flights;
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java b/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java
new file mode 100644
index 000000000..7d0a41601
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java
@@ -0,0 +1,171 @@
+package org.ngafid.flights.process;
+
+import com.google.gson.Gson;
+import com.google.gson.stream.JsonReader;
+import org.ngafid.common.TimeUtils;
+import org.ngafid.flights.*;
+
+import javax.xml.bind.DatatypeConverter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.logging.Logger;
+import java.util.stream.Stream;
+
+/**
+ * This class is responsible for parsing JSON files.
+ *
+ * @author Aaron Chan
+ */
+
+public class JSONFileProcessor extends FlightFileProcessor {
+ private static final Logger LOG = Logger.getLogger(JSONFileProcessor.class.getName());
+
+ public JSONFileProcessor(Connection connection, InputStream stream, String filename) {
+ super(connection, stream, filename);
+ }
+
+ @Override
+ public Stream parse() throws FlightProcessingException {
+ FlightMeta flightMeta = new FlightMeta();
+ final Map doubleTimeSeries = new HashMap<>();
+ final Map stringTimeSeries = new HashMap<>();
+
+
+ try {
+ processTimeSeries(flightMeta, doubleTimeSeries, stringTimeSeries);
+ } catch (SQLException | MalformedFlightFileException | IOException | FatalFlightFileException |
+ FlightAlreadyExistsException e) {
+ throw new FlightProcessingException(e);
+ }
+
+ return Stream.of(new FlightBuilder(flightMeta, doubleTimeSeries, stringTimeSeries));
+ }
+
+ private void processTimeSeries(FlightMeta flightMeta, Map doubleTimeSeries, Map stringTimeSeries) throws SQLException, MalformedFlightFileException, IOException, FatalFlightFileException, FlightAlreadyExistsException {
+ String status = "";
+ Gson gson = new Gson();
+ JsonReader reader = new JsonReader(new InputStreamReader(super.stream));
+ Map jsonMap = gson.fromJson(reader, Map.class);
+
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HHmmssZ");
+
+ Date parsedDate;
+ try {
+ parsedDate = dateFormat.parse((String) jsonMap.get("date"));
+ } catch (Exception e) {
+ throw new MalformedFlightFileException("Could not parse date from JSON file: " + e.getMessage());
+ }
+
+ int timezoneOffset = parsedDate.getTimezoneOffset() / 60;
+ String timezoneOffsetString = (timezoneOffset >= 0 ? "+" : "-") + String.format("%02d:00", timezoneOffset);
+
+ ArrayList headers = (ArrayList) jsonMap.get("details_headers");
+ ArrayList> lines = (ArrayList>) jsonMap.get("details_data");
+ int len = headers.size();
+
+ DoubleTimeSeries lat = new DoubleTimeSeries("Latitude", "degrees", len);
+ DoubleTimeSeries lon = new DoubleTimeSeries("Longitude", "degrees", len);
+ DoubleTimeSeries agl = new DoubleTimeSeries("AltAGL", "ft", len);
+ DoubleTimeSeries spd = new DoubleTimeSeries("GndSpd", "kt", len);
+
+ ArrayList timestamps = new ArrayList<>(len);
+ StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd");
+ StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss");
+ StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm");
+
+ SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss");
+
+ int latIndex = headers.indexOf("product_gps_latitude");
+ int lonIndex = headers.indexOf("product_gps_longitude");
+ int altIndex = headers.indexOf("altitude");
+ int spdIndex = headers.indexOf("speed");
+ int timeIndex = headers.indexOf("time");
+
+ double timeDiff = ((double) lines.get(lines.size() - 1).get(timeIndex)) - ((double) lines.get(0).get(timeIndex));
+ if (timeDiff < 180) throw new FatalFlightFileException("Flight file was less than 3 minutes long, ignoring.");
+
+ double prevSeconds = 0;
+ double metersToFeet = 3.28084;
+
+ for (ArrayList> line : lines) {
+ double milliseconds = (double) line.get(timeIndex) - prevSeconds;
+ prevSeconds = (double) line.get(timeIndex);
+ parsedDate = TimeUtils.addMilliseconds(parsedDate, (int) milliseconds);
+
+ if ((double) line.get(latIndex) > 90 || (double) line.get(latIndex) < -90) {
+ LOG.severe("Invalid latitude: " + line.get(latIndex));
+ status = "WARNING";
+ lat.add(Double.NaN);
+ } else {
+ lat.add((Double) line.get(latIndex));
+ }
+
+ if ((double) line.get(lonIndex) > 180 || (double) line.get(lonIndex) < -180) {
+ LOG.severe("Invalid longitude: " + line.get(lonIndex));
+ status = "WARNING";
+ lon.add(Double.NaN);
+ } else {
+ lon.add((Double) line.get(lonIndex));
+ }
+
+ agl.add((Double) line.get(altIndex) * metersToFeet);
+ spd.add((Double) line.get(spdIndex));
+
+ localDateSeries.add(lclDateFormat.format(parsedDate));
+ localTimeSeries.add(lclTimeFormat.format(parsedDate));
+ utcOfstSeries.add(timezoneOffsetString);
+ timestamps.add(new Timestamp(parsedDate.getTime()));
+ }
+
+ int start = 0;
+ int end = timestamps.size() - 1;
+
+ DoubleTimeSeries nspd = spd.subSeries(start, end);
+ DoubleTimeSeries nlon = lon.subSeries(start, end);
+ DoubleTimeSeries nlat = lat.subSeries(start, end);
+ DoubleTimeSeries nagl = agl.subSeries(start, end);
+
+ doubleTimeSeries.put("GndSpd", nspd);
+ doubleTimeSeries.put("Longitude", nlon);
+ doubleTimeSeries.put("Latitude", nlat);
+ doubleTimeSeries.put("AltAGL", nagl); // Parrot data is stored as AGL and most likely in meters
+
+ StringTimeSeries localDate = localDateSeries.subSeries(start, end);
+ StringTimeSeries localTime = localTimeSeries.subSeries(start, end);
+ StringTimeSeries offset = utcOfstSeries.subSeries(start, end);
+
+ stringTimeSeries.put("Lcl Date", localDate);
+ stringTimeSeries.put("Lcl Time", localTime);
+ stringTimeSeries.put("UTCOfst", offset);
+
+ MessageDigest md = null;
+ try {
+ md = MessageDigest.getInstance("MD5");
+ } catch (NoSuchAlgorithmException e) {
+ throw new FatalFlightFileException("Could not create MD5 hash: " + e.getMessage());
+ }
+ byte[] hash = md.digest(filename.getBytes());
+
+ flightMeta.setStartDateTime(localDateSeries.get(0) + " " + localTimeSeries.get(0) + " " + utcOfstSeries.get(0));
+ flightMeta.setEndDateTime(localDateSeries.get(localDateSeries.size() - 1) + " " + localTimeSeries.get(localTimeSeries.size() - 1) + " " + utcOfstSeries.get(utcOfstSeries.size() - 1));
+ flightMeta.setMd5Hash(DatatypeConverter.printHexBinary(hash).toLowerCase());
+ flightMeta.setAirframeType("UAS Rotorcraft");
+ flightMeta.setSystemId((String) jsonMap.get("serial_number"));
+ flightMeta.setFilename(super.filename);
+ flightMeta.setAirframeName((String) jsonMap.get("controller_model"));
+ flightMeta.setCalculated(""); // TODO: Figure this out
+ flightMeta.setSuggestedTailNumber((String) jsonMap.get("serial_number"));
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java
new file mode 100644
index 000000000..291d6a96e
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java
@@ -0,0 +1,95 @@
+package org.ngafid.flights.process;
+
+import java.util.Set;
+import java.util.Collections;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import org.ngafid.airports.*;
+import org.ngafid.common.MutableDouble;
+import org.ngafid.flights.DoubleTimeSeries;
+import org.ngafid.flights.StringTimeSeries;
+import static org.ngafid.flights.Parameters.*;
+import org.ngafid.flights.process.ProcessStep;
+import org.ngafid.flights.process.FlightBuilder;
+import org.ngafid.flights.FatalFlightFileException;
+import org.ngafid.flights.MalformedFlightFileException;
+
+public class ProcessAirportProximity extends ProcessStep {
+ private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LATITUDE, LONGITUDE, ALT_AGL);
+ private static Set OUTPUT_COLUMNS = Set.of(NEAREST_RUNWAY, AIRPORT_DISTANCE, RUNWAY_DISTANCE, NEAREST_AIRPORT);
+ private final static double MAX_AIRPORT_DISTANCE_FT = 10000;
+ private final static double MAX_RUNWAY_DISTANCE_FT = 100;
+
+ public ProcessAirportProximity(Connection connection, FlightBuilder builder) {
+ super(connection, builder);
+ }
+
+ public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getRequiredStringColumns() { return Collections.emptySet(); }
+ public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getOutputColumns() { return OUTPUT_COLUMNS; }
+
+ public boolean airframeIsValid(String airframe) { return true; }
+
+ public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {
+ DoubleTimeSeries latitudeTS = builder.doubleTimeSeries.get(LATITUDE);
+ DoubleTimeSeries longitudeTS = builder.doubleTimeSeries.get(LONGITUDE);
+ DoubleTimeSeries altitudeAGLTS = builder.doubleTimeSeries.get(ALT_AGL);
+
+ int sizeHint = latitudeTS.size();
+
+ StringTimeSeries nearestAirportTS = new StringTimeSeries("NearestAirport", "IATA Code", sizeHint);
+ stringTS.put("NearestAirport", nearestAirportTS);
+
+ DoubleTimeSeries airportDistanceTS = new DoubleTimeSeries("AirportDistance", "ft", sizeHint);
+ doubleTS.put("AirportDistance", airportDistanceTS);
+
+ StringTimeSeries nearestRunwayTS = new StringTimeSeries("NearestRunway", "IATA Code", sizeHint);
+ stringTS.put("NearestRunway", nearestRunwayTS);
+
+ DoubleTimeSeries runwayDistanceTS = new DoubleTimeSeries("RunwayDistance", "ft", sizeHint);
+ doubleTS.put("RunwayDistance", runwayDistanceTS);
+
+
+ for (int i = 0; i < latitudeTS.size(); i++) {
+ double latitude = latitudeTS.get(i);
+ double longitude = longitudeTS.get(i);
+ double altitudeAGL = altitudeAGLTS.get(i);
+
+ // TODO: Move this outside of the loop. To avoid re-allocation ?
+ MutableDouble airportDistance = new MutableDouble();
+
+ Airport airport = null;
+ if (altitudeAGL <= 2000) {
+ airport = Airports.getNearestAirportWithin(latitude, longitude, MAX_AIRPORT_DISTANCE_FT, airportDistance);
+ }
+
+ if (airport == null) {
+ nearestAirportTS.add("");
+ airportDistanceTS.add(Double.NaN);
+ nearestRunwayTS.add("");
+ runwayDistanceTS.add(Double.NaN);
+ } else {
+ nearestAirportTS.add(airport.iataCode);
+ airportDistanceTS.add(airportDistance.get());
+
+ MutableDouble runwayDistance = new MutableDouble();
+ Runway runway = airport.getNearestRunwayWithin(latitude, longitude, MAX_RUNWAY_DISTANCE_FT, runwayDistance);
+ if (runway == null) {
+ nearestRunwayTS.add("");
+ runwayDistanceTS.add(Double.NaN);
+ } else {
+ nearestRunwayTS.add(runway.name);
+ runwayDistanceTS.add(runwayDistance.get());
+ }
+ }
+
+ }
+
+ stringTS.put(NEAREST_RUNWAY, nearestRunwayTS);
+ stringTS.put(NEAREST_AIRPORT, nearestAirportTS);
+ doubleTS.put(RUNWAY_DISTANCE, runwayDistanceTS);
+ doubleTS.put(AIRPORT_DISTANCE, airportDistanceTS);
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java
new file mode 100644
index 000000000..327613f74
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java
@@ -0,0 +1,60 @@
+package org.ngafid.flights.process;
+
+import java.util.Set;
+import java.util.Collections;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import java.nio.file.NoSuchFileException;
+
+import org.ngafid.flights.Flight;
+import org.ngafid.terrain.TerrainCache;
+import org.ngafid.flights.DoubleTimeSeries;
+import static org.ngafid.flights.Parameters.*;
+import org.ngafid.flights.FatalFlightFileException;
+import org.ngafid.flights.MalformedFlightFileException;
+
+public class ProcessAltAGL extends ProcessStep {
+ private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_MSL, LATITUDE, LONGITUDE);
+ private static Set OUTPUT_COLUMNS = Set.of(ALT_AGL);
+
+ public ProcessAltAGL(Connection connection, FlightBuilder builder) {
+ super(connection, builder);
+ }
+
+ public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getRequiredStringColumns() { return Collections.emptySet(); }
+ public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getOutputColumns() { return OUTPUT_COLUMNS; }
+
+ public boolean airframeIsValid(String airframe) { return true; }
+
+ public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {
+ DoubleTimeSeries altitudeMSLTS = doubleTS.get(ALT_MSL);
+ DoubleTimeSeries latitudeTS = doubleTS.get(LATITUDE);
+ DoubleTimeSeries longitudeTS = doubleTS.get(LONGITUDE);
+
+ DoubleTimeSeries altitudeAGLTS = withConnection(connection -> new DoubleTimeSeries(connection, ALT_AGL, UNIT_FT_AGL));
+
+ for (int i = 0; i < altitudeMSLTS.size(); i++) {
+ double altitudeMSL = altitudeMSLTS.get(i);
+ double latitude = latitudeTS.get(i);
+ double longitude = longitudeTS.get(i);
+
+ if (Double.isNaN(altitudeMSL) || Double.isNaN(latitude) || Double.isNaN(longitude)) {
+ altitudeAGLTS.add(Double.NaN);
+ continue;
+ }
+
+ try {
+ int altitudeAGL = TerrainCache.getAltitudeFt(altitudeMSL, latitude, longitude);
+ altitudeAGLTS.add(altitudeAGL);
+ } catch (NoSuchFileException e) {
+ throw new MalformedFlightFileException("Could not calculate AGL for this flight as it had latitudes/longitudes outside of the United States.");
+ }
+ }
+
+ doubleTS.put(ALT_AGL, altitudeAGLTS);
+ }
+
+}
diff --git a/src/main/java/org/ngafid/flights/process/ProcessDivergence.java b/src/main/java/org/ngafid/flights/process/ProcessDivergence.java
new file mode 100644
index 000000000..7a2fb9715
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/ProcessDivergence.java
@@ -0,0 +1,171 @@
+package org.ngafid.flights.process;
+
+
+import java.util.Set;
+import java.util.Map;
+import static java.util.Map.entry;
+import java.util.List;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Collections;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import java.nio.file.NoSuchFileException;
+
+import org.ngafid.flights.Flight;
+import org.ngafid.terrain.TerrainCache;
+import org.ngafid.flights.DoubleTimeSeries;
+import static org.ngafid.flights.Parameters.*;
+import static org.ngafid.flights.Airframes.*;
+import org.ngafid.flights.FatalFlightFileException;
+import org.ngafid.flights.MalformedFlightFileException;
+
+public class ProcessDivergence extends ProcessStep {
+
+ private record DivergenceConfig (List parameters, String output) {}
+
+ private static final Set OUTPUT_COLUMNS = Set.of(TOTAL_FUEL);
+ private static final Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI);
+
+ private static final List CESSNA_CONFIG =
+ List.of(
+ new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4"), "E1 CHT Divergence"),
+ new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence")
+ );
+
+ private static final List PA_28_CONFIG =
+ List.of(
+ new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence")
+ );
+
+ private static final List PA_44_CONFIG =
+ List.of(
+ new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence"),
+ new DivergenceConfig(List.of("E2 EGT1", "E2 EGT2", "E2 EGT3", "E2 EGT4"), "E2 EGT Divergence")
+ );
+
+ private static final List SIX_CYLINDER_CIRRUS =
+ List.of(
+ new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4", "E1 CHT5", "E1 CHT6"), "E1 CHT Divergence"),
+ new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4", "E1 EGT5", "E1 EGT6"), "E1 EGT Divergence")
+ );
+
+ private static final List DIAMOND_CONFIG =
+ List.of(
+ new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4"), "E1 CHT Divergence"),
+ new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence")
+ );
+
+ private static final Map> CONFIG_MAP =
+ Map.ofEntries(
+ entry(AIRFRAME_CESSNA_172R, CESSNA_CONFIG),
+ entry(AIRFRAME_CESSNA_172S, CESSNA_CONFIG),
+ entry(AIRFRAME_PA_28_181, PA_28_CONFIG),
+ entry(AIRFRAME_PA_44_180, PA_44_CONFIG),
+ entry(AIRFRAME_CIRRUS_SR20, SIX_CYLINDER_CIRRUS),
+ entry(AIRFRAME_CESSNA_T182T, SIX_CYLINDER_CIRRUS),
+ entry(AIRFRAME_CESSNA_182T, SIX_CYLINDER_CIRRUS),
+ entry(AIRFRAME_BEECHCRAFT_A36_G36, SIX_CYLINDER_CIRRUS),
+ entry(AIRFRAME_CIRRUS_SR22, SIX_CYLINDER_CIRRUS),
+ entry(AIRFRAME_CESSNA_400, SIX_CYLINDER_CIRRUS),
+ entry(AIRFRAME_DIAMOND_DA_40_F, DIAMOND_CONFIG),
+ entry(AIRFRAME_DIAMOND_DA_40, DIAMOND_CONFIG),
+ entry(AIRFRAME_DIAMOND_DA40, DIAMOND_CONFIG)
+ );
+
+ public ProcessDivergence(Connection connection, FlightBuilder builder) {
+ super(connection, builder);
+ }
+
+ private Set requiredDoubleColumns = null;
+ public Set getRequiredDoubleColumns() {
+ if (requiredDoubleColumns == null) {
+
+ var configs = CONFIG_MAP.get(builder.meta.airframeName);
+ if (configs != null) {
+
+ requiredDoubleColumns = new HashSet<>(32);
+ for (var config : configs)
+ requiredDoubleColumns.addAll(config.parameters);
+
+ } else {
+ requiredDoubleColumns = Collections.emptySet();
+ }
+ }
+
+ return requiredDoubleColumns;
+ }
+
+ public Set getRequiredStringColumns() { return Collections.emptySet(); }
+ public Set getRequiredColumns() { return getRequiredDoubleColumns(); }
+
+ private Set outputColumns = null;
+ public Set getOutputColumns() {
+ if (outputColumns == null) {
+
+ var configs = CONFIG_MAP.get(builder.meta.airframeName);
+ if (configs != null) {
+
+ outputColumns = new HashSet<>();
+ for (var config : configs)
+ outputColumns.add(config.output);
+
+ } else {
+ outputColumns = Collections.emptySet();
+ }
+ }
+
+ return outputColumns;
+ }
+
+ public boolean airframeIsValid(String airframe) {
+ for (String blacklisted : AIRFRAME_BLACKLIST)
+ if (airframe.contains(blacklisted))
+ return false;
+
+ return true;
+ }
+ private void calculateDivergence(List columnNames, String varianceColumnName) throws MalformedFlightFileException, SQLException {
+ DoubleTimeSeries columns[] = new DoubleTimeSeries[columnNames.size()];
+ for (int i = 0; i < columns.length; i++) {
+ columns[i] = doubleTS.get(columnNames.get(i));
+
+ if (columns[i] == null) {
+ throw new MalformedFlightFileException("Cannot calculate '" + varianceColumnName + "' as parameter '" + columnNames.get(i) + "' was missing.");
+ }
+ }
+
+ DoubleTimeSeries variance = new DoubleTimeSeries(varianceColumnName, UNIT_DEG_F, columns[0].size());
+
+ for (int i = 0; i < columns[0].size(); i++) {
+ double max = -Double.MAX_VALUE;
+ double min = Double.MAX_VALUE;
+
+ for (int j = 0; j < columns.length; j++) {
+ double current = columns[j].get(i);
+ if (!Double.isNaN(current) && current > max) max = columns[j].get(i);
+ if (!Double.isNaN(current) && current < min) min = columns[j].get(i);
+ }
+
+ double v = 0;
+ if (max != -Double.MAX_VALUE && min != Double.MAX_VALUE) {
+ v = max - min;
+ }
+
+ variance.add(v);
+ }
+
+ doubleTS.put(varianceColumnName, variance);
+ }
+
+ public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {
+ List configs = CONFIG_MAP.get(builder.meta.airframeName);
+
+ if (configs == null)
+ return;
+
+ for (var config : configs)
+ calculateDivergence(config.parameters, config.output);
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/ProcessItinerary.java b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java
new file mode 100644
index 000000000..46f39e673
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java
@@ -0,0 +1,107 @@
+package org.ngafid.flights.process;
+
+import java.util.Set;
+import java.util.ArrayList;
+import java.util.logging.Logger;
+import java.util.Collections;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import java.nio.file.NoSuchFileException;
+
+import org.ngafid.flights.Flight;
+import org.ngafid.flights.DoubleTimeSeries;
+import org.ngafid.flights.StringTimeSeries;
+import org.ngafid.flights.Itinerary;
+import static org.ngafid.flights.Parameters.*;
+import org.ngafid.flights.FatalFlightFileException;
+import org.ngafid.flights.MalformedFlightFileException;
+
+public class ProcessItinerary extends ProcessStep {
+ private static final Logger LOG = Logger.getLogger(ProcessItinerary.class.getName());
+
+ private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_AGL, LATITUDE, LONGITUDE, AIRPORT_DISTANCE, RUNWAY_DISTANCE, GND_SPD, E1_RPM);
+ private static Set REQUIRED_STRING_COLUMNS = Set.of(NEAREST_AIRPORT, NEAREST_RUNWAY);
+ private static Set OUTPUT_COLUMNS = Set.of("_itinerary"); // This is a fake column; never actually created.
+
+ public ProcessItinerary(Connection connection, FlightBuilder builder) {
+ super(connection, builder);
+ }
+
+ public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getRequiredStringColumns() { return REQUIRED_STRING_COLUMNS; }
+ public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getOutputColumns() { return OUTPUT_COLUMNS; }
+
+ public boolean airframeIsValid(String airframe) { return true; }
+
+ public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {
+ DoubleTimeSeries groundSpeed = doubleTS.get(GND_SPD);
+ DoubleTimeSeries rpm = doubleTS.get(E1_RPM);
+
+ StringTimeSeries nearestAirportTS = stringTS.get(NEAREST_AIRPORT);
+ DoubleTimeSeries airportDistanceTS = doubleTS.get(AIRPORT_DISTANCE);
+ DoubleTimeSeries altitudeAGL = doubleTS.get(ALT_AGL);
+
+ StringTimeSeries nearestRunwayTS = stringTS.get(NEAREST_RUNWAY);
+ DoubleTimeSeries runwayDistanceTS = doubleTS.get(RUNWAY_DISTANCE);
+
+ ArrayList itinerary = new ArrayList<>();
+
+ Itinerary currentItinerary = null;
+ for (int i = 1; i < nearestAirportTS.size(); i++) {
+ String airport = nearestAirportTS.get(i);
+ String runway = nearestRunwayTS.get(i);
+
+ if (airport != null && !airport.equals("")) {
+ //We've gotten close to an airport, so create a stop if there
+ //isn't one. If there is one, update the runway being visited.
+ //If the airport is a new airport (this shouldn't happen really),
+ //then create a new stop.
+ if (currentItinerary == null) {
+ currentItinerary = new Itinerary(airport, runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i));
+ } else if (airport.equals(currentItinerary.getAirport())) {
+ currentItinerary.update(runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i));
+ } else {
+ currentItinerary.selectBestRunway();
+ if (currentItinerary.wasApproach()) itinerary.add(currentItinerary);
+ currentItinerary = new Itinerary(airport, runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i));
+ }
+
+ } else {
+ //aiport is null, so if there was an airport being visited
+ //then we can determine it's runway and add it to the itinerary
+ if (currentItinerary != null) {
+ currentItinerary.selectBestRunway();
+ if (currentItinerary.wasApproach()) itinerary.add(currentItinerary);
+ }
+
+ //set the currentItinerary to null until we approach another
+ //airport
+ currentItinerary = null;
+ }
+ }
+
+ //dont forget to add the last stop in the itinerary if it wasn't set to null
+ if (currentItinerary != null) {
+ currentItinerary.selectBestRunway();
+ if (currentItinerary.wasApproach()) itinerary.add(currentItinerary);
+ }
+
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ // setting and determining itinerary type
+ int itinerary_size = itinerary.size();
+ for (int i = 0; i < itinerary_size; i++) {
+ itinerary.get(i).determineType();
+ }
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ // LOG.info("Itinerary:");
+ // for (int i = 0; i < itinerary.size(); i++) {
+ // LOG.info(itinerary.get(i).toString());
+ // }
+
+ builder.setItinerary(itinerary);
+ }
+
+}
diff --git a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java
new file mode 100644
index 000000000..29e23496e
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java
@@ -0,0 +1,69 @@
+package org.ngafid.flights.process;
+
+import java.time.*;
+import java.util.Set;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Collections;
+import java.util.logging.Logger;
+import java.time.format.DateTimeFormatter;
+
+import static org.ngafid.flights.Parameters.*;
+import static org.ngafid.flights.Airframes.*;
+import org.ngafid.common.*;
+import org.ngafid.flights.StringTimeSeries;
+import org.ngafid.flights.DoubleTimeSeries;
+import org.ngafid.flights.MalformedFlightFileException;
+import org.ngafid.flights.FatalFlightFileException;
+
+public class ProcessLOCI extends ProcessStep {
+ private static final Logger LOG = Logger.getLogger(ProcessLOCI.class.getName());
+
+ public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LOCI_DEPENDENCIES);
+
+ public ProcessLOCI(Connection connection, FlightBuilder builder) {
+ super(connection, builder);
+ }
+
+ public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getRequiredStringColumns() { return Collections.emptySet(); }
+ public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getOutputColumns() { return Collections.emptySet(); }
+
+ public boolean airframeIsValid(String airframe) { return airframe.equals(AIRFRAME_CESSNA_172S); }
+
+ public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {
+ DoubleTimeSeries hdg = doubleTS.get(HDG);
+ DoubleTimeSeries hdgLagged = hdg.lag(YAW_RATE_LAG);
+ DoubleTimeSeries roll = doubleTS.get(ROLL);
+ DoubleTimeSeries tas = doubleTS.get(TAS_FTMIN);
+ DoubleTimeSeries stallIndex = doubleTS.get(STALL_PROB);
+
+ int length = roll.size();
+
+ DoubleTimeSeries coordIndex = DoubleTimeSeries.computed(PRO_SPIN_FORCE, "index", length,
+ (int index) -> {
+ double laggedHdg = hdgLagged.get(index);
+ double yawRate = Double.isNaN(laggedHdg) ? 0 :
+ 180 - Math.abs(180 - Math.abs(hdg.get(index) - laggedHdg) % 360);
+
+ double yawComp = yawRate * COMP_CONV;
+ double vrComp = ((tas.get(index) / 60) * yawComp);
+ double rollComp = roll.get(index) * COMP_CONV;
+ double ctComp = Math.sin(rollComp) * 32.2;
+ double value = Math.min(((Math.abs(ctComp - vrComp) * 100) / PROSPIN_LIM), 100);
+
+ return value;
+ }
+ );
+ DoubleTimeSeries loci = DoubleTimeSeries.computed(LOCI, "index", length,
+ index -> {
+ double prob = stallIndex.get(index) * coordIndex.get(index);
+ return prob / 100;
+ }
+ );
+
+ doubleTS.put(PRO_SPIN_FORCE, coordIndex);
+ doubleTS.put(LOCI, loci);
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java b/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java
new file mode 100644
index 000000000..d26d711cb
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java
@@ -0,0 +1,52 @@
+package org.ngafid.flights.process;
+
+import java.util.Set;
+import java.util.Collections;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import java.nio.file.NoSuchFileException;
+
+import org.ngafid.flights.Flight;
+import org.ngafid.flights.DoubleTimeSeries;
+import static org.ngafid.flights.Parameters.*;
+import static org.ngafid.flights.Airframes.*;
+import org.ngafid.flights.FatalFlightFileException;
+import org.ngafid.flights.MalformedFlightFileException;
+
+public class ProcessLaggedAltMSL extends ProcessStep {
+ private static final Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_MSL);
+ private static final Set OUTPUT_COLUMNS = Set.of(ALT_MSL_LAG_DIFF);
+ private static final Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI);
+ private static final int LAG = 10;
+
+ public ProcessLaggedAltMSL(Connection connection, FlightBuilder builder) {
+ super(connection, builder);
+ }
+
+ public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getRequiredStringColumns() { return Collections.emptySet(); }
+ public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getOutputColumns() { return OUTPUT_COLUMNS; }
+
+ public boolean airframeIsValid(String airframe) {
+ for (String blacklisted : AIRFRAME_BLACKLIST)
+ if (airframe.contains(blacklisted))
+ return false;
+
+ return true;
+ }
+
+ public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {
+ DoubleTimeSeries altMSL = doubleTS.get(ALT_MSL);
+ DoubleTimeSeries laggedAltMSL = new DoubleTimeSeries(ALT_MSL_LAG_DIFF, UNIT_FT_MSL, altMSL.size());
+
+ for (int i = 0; i < LAG; i++)
+ laggedAltMSL.add(0.0);
+ for (int i = LAG; i < altMSL.size(); i++)
+ laggedAltMSL.add(altMSL.get(i) - altMSL.get(i - LAG));
+
+ doubleTS.put(ALT_MSL_LAG_DIFF, laggedAltMSL);
+ }
+
+}
diff --git a/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java b/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java
new file mode 100644
index 000000000..dc62d42f8
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java
@@ -0,0 +1,101 @@
+package org.ngafid.flights.process;
+
+import java.time.*;
+import java.util.Set;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Collections;
+import java.util.logging.Logger;
+import java.time.format.DateTimeFormatter;
+
+import static org.ngafid.flights.Parameters.*;
+import static org.ngafid.flights.Airframes.*;
+import org.ngafid.common.*;
+import org.ngafid.flights.StringTimeSeries;
+import org.ngafid.flights.DoubleTimeSeries;
+import org.ngafid.flights.MalformedFlightFileException;
+import org.ngafid.flights.FatalFlightFileException;
+import org.ngafid.flights.calculations.VSPDRegression;
+
+public class ProcessStallIndex extends ProcessStep {
+ private static final Logger LOG = Logger.getLogger(ProcessStallIndex.class.getName());
+
+ public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(STALL_DEPENDENCIES);
+ public static Set OUTPUT_COLUMNS = Set.of(STALL_PROB, TAS_FTMIN, VSPD_CALCULATED, CAS);
+
+ public ProcessStallIndex(Connection connection, FlightBuilder builder) {
+ super(connection, builder);
+ }
+
+ public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getRequiredStringColumns() { return Collections.emptySet(); }
+ public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; }
+ public Set getOutputColumns() { return OUTPUT_COLUMNS; }
+
+ public boolean airframeIsValid(String airframe) { return true; }
+
+ public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {
+ DoubleTimeSeries ias = doubleTS.get(IAS);
+ int length = ias.size();
+
+ if (builder.meta.airframeName.equals(AIRFRAME_CESSNA_172S)) {
+ DoubleTimeSeries cas = DoubleTimeSeries.computed(CAS, "knots", length,
+ index -> {
+ double iasValue = ias.get(index);
+
+ if (iasValue < 70.d)
+ iasValue = (0.7d * iasValue) + 20.667;
+
+ return iasValue;
+ }
+ );
+ cas.setTemporary(true);
+ doubleTS.put(CAS, cas);
+ }
+
+ DoubleTimeSeries vspdCalculated =
+ DoubleTimeSeries.computed(VSPD_CALCULATED, "ft/min", length, new VSPDRegression(doubleTS.get(ALT_B)));
+ vspdCalculated.setTemporary(true);
+ doubleTS.put(VSPD_CALCULATED, vspdCalculated);
+
+ DoubleTimeSeries baroA = doubleTS.get(BARO_A);
+ DoubleTimeSeries oat = doubleTS.get(OAT);
+ DoubleTimeSeries densityRatio = DoubleTimeSeries.computed(DENSITY_RATIO, "ratio", length,
+ index -> {
+ double pressRatio = baroA.get(index) / STD_PRESS_INHG;
+ double tempRatio = (273 + oat.get(index)) / 288;
+
+ return pressRatio / tempRatio;
+ }
+ );
+
+ DoubleTimeSeries airspeed =
+ builder.meta.airframeName.equals(AIRFRAME_CESSNA_172S) ? doubleTS.get(CAS) : doubleTS.get(IAS);
+ DoubleTimeSeries tasFtMin = DoubleTimeSeries.computed(TAS_FTMIN, "ft/min", length,
+ index -> {
+ return (airspeed.get(index) * Math.pow(densityRatio.get(index), -0.5)) * ((double) 6076 / 60);
+ });
+ tasFtMin.setTemporary(true);
+
+ DoubleTimeSeries pitch = doubleTS.get(PITCH);
+ DoubleTimeSeries aoaSimple = DoubleTimeSeries.computed(AOA_SIMPLE, "degrees", length,
+ index -> {
+
+ double vspdGeo = vspdCalculated.get(index) * Math.pow(densityRatio.get(index), -0.5);
+ double fltPthAngle = Math.asin(vspdGeo / tasFtMin.get(index));
+ fltPthAngle = fltPthAngle * (180 / Math.PI);
+ double value = pitch.get(index) - fltPthAngle;
+
+ return value;
+ }
+ );
+
+ DoubleTimeSeries stallIndex = DoubleTimeSeries.computed(STALL_PROB, "index", length,
+ index -> {
+ return (Math.min(((Math.abs(aoaSimple.get(index) / AOA_CRIT)) * 100), 100)) / 100;
+ }
+ );
+ doubleTS.put(STALL_PROB, stallIndex);
+ doubleTS.put(TAS_FTMIN, tasFtMin);
+ }
+}
diff --git a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java
new file mode 100644
index 000000000..3792ddd5b
--- /dev/null
+++ b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java
@@ -0,0 +1,112 @@
+package org.ngafid.flights.process;
+
+import java.time.*;
+import java.util.Set;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.logging.Logger;
+import java.time.format.DateTimeFormatter;
+
+import static org.ngafid.flights.Parameters.*;
+import org.ngafid.common.*;
+import org.ngafid.flights.StringTimeSeries;
+import org.ngafid.flights.MalformedFlightFileException;
+import org.ngafid.flights.FatalFlightFileException;
+
+public class ProcessStartEndTime extends ProcessStep {
+ private static final Logger LOG = Logger.getLogger(ProcessStartEndTime.class.getName());
+
+ public static Set REQUIRED_STRING_COLUMNS = Set.of(LCL_DATE, LCL_TIME, UTC_OFFSET);
+
+ public ProcessStartEndTime(Connection connection, FlightBuilder builder) {
+ super(connection, builder);
+ }
+
+ public Set getRequiredDoubleColumns() { return Collections.emptySet(); }
+ public Set getRequiredStringColumns() { return REQUIRED_STRING_COLUMNS; }
+ public Set getRequiredColumns() { return REQUIRED_STRING_COLUMNS; }
+ public Set