diff --git a/pom.xml b/pom.xml index f1c046161..54aed3e23 100644 --- a/pom.xml +++ b/pom.xml @@ -129,8 +129,8 @@ maven-compiler-plugin 3.8.1 - 11 - 11 + 16 + 16 -Xlint:all -Xmaxwarns @@ -145,8 +145,8 @@ org.apache.maven.plugins maven-compiler-plugin - 11 - 11 + 16 + 16 diff --git a/src/main/java/org/ngafid/CalculateExceedences.java b/src/main/java/org/ngafid/CalculateExceedences.java index 2a46aa045..517c7af29 100644 --- a/src/main/java/org/ngafid/CalculateExceedences.java +++ b/src/main/java/org/ngafid/CalculateExceedences.java @@ -29,9 +29,11 @@ import org.ngafid.filters.Conditional; import org.ngafid.filters.Filter; import org.ngafid.filters.Pair; +import java.util.logging.*; public class CalculateExceedences { - + private static final Logger LOG = Logger.getLogger(CalculateExceedences.class.getName()); + static String timeSeriesName = "Lcl Time"; static String dateSeriesName = "Lcl Date"; @@ -55,24 +57,24 @@ public void processFlight(Connection connection, Flight flight, EventDefinition int airframeNameId = flight.getAirframeNameId(); String flightFilename = flight.getFilename(); - System.out.println("Processing flight: " + flightId + ", " + flightFilename); + LOG.info("Processing flight: " + flightId + ", " + flightFilename); try { - System.out.println("Event is: '" + eventDefinition.getName() + "'"); + LOG.info("Event is: '" + eventDefinition.getName() + "'"); //first check and see if this was actually a flight (RPM > 800) Pair minMaxRPM1 = DoubleTimeSeries.getMinMax(connection, flightId, "E1 RPM"); Pair minMaxRPM2 = DoubleTimeSeries.getMinMax(connection, flightId, "E2 RPM"); - System.out.println("minMaxRPM1: " + minMaxRPM1); - System.out.println("minMaxRPM2: " + minMaxRPM2); + // LOG.info("minMaxRPM1: " + minMaxRPM1); + // LOG.info("minMaxRPM2: " + minMaxRPM2); if ((minMaxRPM1 == null && minMaxRPM2 == null) //both RPM values are null, can't calculate exceedence || (minMaxRPM2 == null && minMaxRPM1 != null && minMaxRPM1.second() < 800) //RPM2 is null, RPM1 is < 800 || (minMaxRPM1 == null && minMaxRPM2 != null && minMaxRPM2.second() < 800) //RPM1 is null, RPM2 is < 800 || (minMaxRPM1 != null && minMaxRPM1.second() < 800) && (minMaxRPM2 != null && minMaxRPM2.second() < 800)) { //RPM1 and RPM2 < 800 //couldn't calculate exceedences for this flight because the engines never kicked on (it didn't fly) - System.out.println("engines never turned on, setting flight_processed.had_error = 1"); + LOG.info("engines never turned on, setting flight_processed.had_error = 1"); if (uploadProcessedEmail != null) uploadProcessedEmail.addExceedenceError(flightFilename, "could not calculate exceedences for flight " + flightId + ", '" + flightFilename + "' - engines never turned on"); @@ -80,14 +82,14 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + // LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); return; } TreeSet columnNames = eventDefinition.getColumnNames(); - System.out.println("Number of Column Name(s): [ " + columnNames.size() + " ]"); + // LOG.info("Number of Column Name(s): [ " + columnNames.size() + " ]"); //first test and see if min/max values can violate exceedence, otherwise we can skip conditional.reset(); @@ -95,7 +97,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition Pair minMax = DoubleTimeSeries.getMinMax(connection, flightId, columnName); if (minMax == null) { - System.out.println("minMax was null, setting flight_processed.had_error = 1"); + LOG.info("minMax was null, setting flight_processed.had_error = 1"); //couldn't calculate this exceedence because at least one of the columns was missing if (uploadProcessedEmail != null) uploadProcessedEmail.addExceedenceError(flightFilename, "could not calculate '" + eventDefinition.getName() + "' for flight " + flightId + ", '" + flightFilename + "' - " + columnName + " was missing"); @@ -103,19 +105,19 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + // LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); return; } - System.out.println(columnName + ", min: " + minMax.first() + ", max: " + minMax.second()); + LOG.info(columnName + ", min: " + minMax.first() + ", max: " + minMax.second()); conditional.set(columnName, minMax); } - System.out.println("Post-set conditional: " + conditional.toString()); + LOG.info("Post-set conditional: " + conditional.toString()); boolean result = conditional.evaluate(); - System.out.println("overall result: " + result); + LOG.info("overall result: " + result); if (!result) { //this flight could not have caused one of these events @@ -123,7 +125,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + // LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -136,14 +138,14 @@ public void processFlight(Connection connection, Flight flight, EventDefinition if (timeSeries == null || dateSeries == null) { //couldn't calculate this exceedence because the date or time column was missing - System.out.println("time series or date series was missing, setting flight_processed.had_error = 1"); + LOG.info("time series or date series was missing, setting flight_processed.had_error = 1"); if (uploadProcessedEmail != null) uploadProcessedEmail.addExceedenceError(flightFilename, "could not calculate exceedences for flight " + flightId + ", '" + flightFilename + "' - date or time was missing"); PreparedStatement stmt = connection.prepareStatement("INSERT INTO flight_processed SET fleet_id = ?, flight_id = ?, event_definition_id = ?, count = 0, had_error = 1"); stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); return; @@ -174,23 +176,23 @@ public void processFlight(Connection connection, Flight flight, EventDefinition lineNumber = i; double currentValue = doubleSeries[0].get(i); - //System.out.println("Pre-set conditional: " + conditional.toString()); + //LOG.info("Pre-set conditional: " + conditional.toString()); conditional.reset(); for (DoubleTimeSeries series : doubleSeries) { conditional.set(series.getName(), series.get(i)); } - //System.out.println("Post-set conditional: " + conditional.toString()); + //LOG.info("Post-set conditional: " + conditional.toString()); result = conditional.evaluate(); - //System.out.println(conditional + ", result: " + result); + //LOG.info(conditional + ", result: " + result); if (!result) { if (startTime != null) { //we're tracking an event, so increment the stopCount stopCount++; - System.out.println("stopCount: " + stopCount + " with on line: " + lineNumber ); + LOG.info("stopCount: " + stopCount + " with on line: " + lineNumber ); if (stopCount == stopBuffer) { System.err.println("Stop count (" + stopCount + ") reached the stop buffer (" + stopBuffer + "), new event created!"); @@ -224,7 +226,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition startLine = lineNumber; severity = eventDefinition.getSeverity(doubleSeries, i); - System.out.println("start date time: " + startTime + ", start line number: " + startLine); + LOG.info("start date time: " + startTime + ", start line number: " + startLine); } endLine = lineNumber; endTime = dateSeries.get(i) + " " + timeSeries.get(i); @@ -240,11 +242,11 @@ public void processFlight(Connection connection, Flight flight, EventDefinition Event event = new Event(startTime, endTime, startLine, endLine, severity); eventList.add( event ); } - System.out.println(""); + LOG.info(""); for (i = 0; i < eventList.size(); i++) { Event event = eventList.get(i); - System.out.println( "Event : [line: " + event.getStartLine() + " to " + event.getEndLine() + ", time: " + event.getStartTime() + " to " + event.getEndTime() + "]" ); + LOG.info( "Event : [line: " + event.getStartLine() + " to " + event.getEndLine() + ", time: " + event.getStartTime() + " to " + event.getEndTime() + "]" ); if (uploadProcessedEmail != null) uploadProcessedEmail.addExceedence(flightFilename, "flight " + flightId + ", '" + flightFilename + "' - '" + eventDefinition.getName() + "' from " + event.getStartTime() + " to " + event.getEndTime()); } @@ -284,7 +286,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setDouble(8, sumSeverity); stmt.setDouble(9, minSeverity); stmt.setDouble(10, maxSeverity); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -295,7 +297,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -316,14 +318,14 @@ public static void calculateExceedences(Connection connection, int uploadId, Upl if (allEvents == null) { allEvents = EventDefinition.getAll(connection, "id > ?", new Object[]{0}); } - System.out.println("n events = " + allEvents.size()); + LOG.info("n events = " + allEvents.size()); int airframeTypeId = Airframes.getTypeId(connection, "Fixed Wing"); for (int i = 0; i < allEvents.size(); i++) { //process events for this event type EventDefinition currentDefinition = allEvents.get(i); - System.out.println("\t" + currentDefinition.toString()); + LOG.info("\t" + currentDefinition.toString()); CalculateExceedences currentCalculator = new CalculateExceedences(currentDefinition); @@ -349,7 +351,7 @@ public static void calculateExceedences(Connection connection, int uploadId, Upl Instant end = Instant.now(); long elapsed_millis = Duration.between(start, end).toMillis(); double elapsed_seconds = ((double) elapsed_millis) / 1000; - System.out.println("finished in " + elapsed_seconds); + LOG.info("finished in " + elapsed_seconds); if (uploadProcessedEmail != null) uploadProcessedEmail.setExceedencesElapsedTime(elapsed_seconds); } @@ -365,11 +367,11 @@ public static void main(String[] arguments) { connection = Database.resetConnection(); Instant start = Instant.now(); ArrayList allEvents = EventDefinition.getAll(connection, "id > ?", new Object[]{0}); - System.out.println("n events = " + allEvents.size()); + LOG.info("n events = " + allEvents.size()); for (int i = 0; i < allEvents.size(); i++) { //process events for this event type EventDefinition currentDefinition = allEvents.get(i); - System.out.println("\t" + currentDefinition.toString()); + LOG.info("\t" + currentDefinition.toString()); CalculateExceedences currentCalculator = new CalculateExceedences(currentDefinition); @@ -395,7 +397,7 @@ public static void main(String[] arguments) { Instant end = Instant.now(); long elapsed_millis = Duration.between(start, end).toMillis(); double elapsed_seconds = ((double) elapsed_millis) / 1000; - System.out.println("finished in " + elapsed_seconds); + LOG.info("finished in " + elapsed_seconds); try { Thread.sleep(3000); diff --git a/src/main/java/org/ngafid/CalculateProximity.java b/src/main/java/org/ngafid/CalculateProximity.java index dac3bdd5e..3d3a26b95 100644 --- a/src/main/java/org/ngafid/CalculateProximity.java +++ b/src/main/java/org/ngafid/CalculateProximity.java @@ -42,7 +42,10 @@ import org.ngafid.airports.Airports; +import java.util.logging.*; + public class CalculateProximity { + private static final Logger LOG = Logger.getLogger(CalculateProximity.class.getName()); //Proximity events (and potentially other complicated event calculations) will have negative IDs so they //can be excluded from the regular event calculation process @@ -96,7 +99,7 @@ public FlightTimeLocation(Connection connection, int fleetId, int flightId, int this.endDateTime = endDateTime; //first check and see if the flight had a start and end time, if not we cannot process it - //System.out.println("Getting info for flight with start date time: " + startDateTime + " and end date time: " + endDateTime); + //LOG.info("Getting info for flight with start date time: " + startDateTime + " and end date time: " + endDateTime); if (startDateTime == null || endDateTime == null) { //flight didnt have a start or end time @@ -108,11 +111,11 @@ public FlightTimeLocation(Connection connection, int fleetId, int flightId, int Pair minMaxRPM1 = DoubleTimeSeries.getMinMax(connection, flightId, "E1 RPM"); Pair minMaxRPM2 = DoubleTimeSeries.getMinMax(connection, flightId, "E2 RPM"); - System.out.println("minMaxRPM1: " + minMaxRPM1); - System.out.println("minMaxRPM2: " + minMaxRPM2); + LOG.info("minMaxRPM1: " + minMaxRPM1); + LOG.info("minMaxRPM2: " + minMaxRPM2); - if (minMaxRPM1 != null) System.out.println("min max E1 RPM: " + minMaxRPM1.first() + ", " + minMaxRPM1.second()); - if (minMaxRPM2 != null) System.out.println("min max E2 RPM: " + minMaxRPM2.first() + ", " + minMaxRPM2.second()); + if (minMaxRPM1 != null) LOG.info("min max E1 RPM: " + minMaxRPM1.first() + ", " + minMaxRPM1.second()); + if (minMaxRPM2 != null) LOG.info("min max E2 RPM: " + minMaxRPM2.first() + ", " + minMaxRPM2.second()); if ((minMaxRPM1 == null && minMaxRPM2 == null) //both RPM values are null, can't calculate exceedence || (minMaxRPM2 == null && minMaxRPM1.second() < 800) //RPM2 is null, RPM1 is < 800 (RPM1 would not be null as well) @@ -127,8 +130,8 @@ public FlightTimeLocation(Connection connection, int fleetId, int flightId, int Pair minMaxLatitude = DoubleTimeSeries.getMinMax(connection, flightId, "Latitude"); Pair minMaxLongitude = DoubleTimeSeries.getMinMax(connection, flightId, "Longitude"); - //if (minMaxLatitude != null) System.out.println("min max latitude: " + minMaxLatitude.first() + ", " + minMaxLatitude.second()); - //if (minMaxLongitude != null) System.out.println("min max longitude: " + minMaxLongitude.first() + ", " + minMaxLongitude.second()); + //if (minMaxLatitude != null) LOG.info("min max latitude: " + minMaxLatitude.first() + ", " + minMaxLatitude.second()); + //if (minMaxLongitude != null) LOG.info("min max longitude: " + minMaxLongitude.first() + ", " + minMaxLongitude.second()); if (minMaxLatitude == null || minMaxLongitude == null) { //flight didn't have latitude or longitude @@ -144,7 +147,7 @@ public FlightTimeLocation(Connection connection, int fleetId, int flightId, int //then check and see if this flight had alt MSL, if not we cannot calculate adjacency Pair minMaxAltMSL = DoubleTimeSeries.getMinMax(connection, flightId, "AltMSL"); - //if (minMaxAltMSL != null) System.out.println("min max alt MSL: " + minMaxAltMSL.first() + ", " + minMaxAltMSL.second()); + //if (minMaxAltMSL != null) LOG.info("min max alt MSL: " + minMaxAltMSL.first() + ", " + minMaxAltMSL.second()); if (minMaxAltMSL == null) { //flight didn't have alt MSL @@ -185,7 +188,7 @@ public boolean getSeriesData(Connection connection) throws SQLException { //check to see if we could get these columns if (dateSeries == null || timeSeries == null || utcOffsetSeries == null) return false; - //System.out.println("date length: " + dateSeries.size() + ", time length: " + timeSeries.size() + ", utc length: " + utcOffsetSeries.size()); + //LOG.info("date length: " + dateSeries.size() + ", time length: " + timeSeries.size() + ", utc length: " + utcOffsetSeries.size()); int length = dateSeries.size(); epochTime = new long[length]; @@ -224,17 +227,17 @@ public boolean alreadyProcessed(Connection connection) throws SQLException { stmt.setInt(2, flightId); stmt.setInt(3, adjacencyEventDefinitionId); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); //if there was a flight processed entry for this flight it was already processed ResultSet resultSet = stmt.executeQuery(); if (resultSet.next()) { - System.out.println("already processed!"); + LOG.info("already processed!"); resultSet.close(); stmt.close(); return true; } else { - System.out.println("not already processed!"); + LOG.info("not already processed!"); resultSet.close(); stmt.close(); return false; @@ -246,17 +249,17 @@ public static boolean proximityAlreadyCalculated(Connection connection, FlightTi stmt.setInt(1, first.flightId); stmt.setInt(2, second.flightId); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); //if there was a flight processed entry for this flight it was already processed ResultSet resultSet = stmt.executeQuery(); if (resultSet.next()) { - System.out.println("proximity event already exists!"); + LOG.info("proximity event already exists!"); resultSet.close(); stmt.close(); return true; } else { - System.out.println("proximity does not already exist!"); + LOG.info("proximity does not already exist!"); resultSet.close(); stmt.close(); return false; @@ -281,7 +284,7 @@ public void updateWithEvent(Connection connection, Event event, String startDate stmt.setDouble(7, severity); stmt.setDouble(8, severity); stmt.setDouble(9, severity); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -298,13 +301,13 @@ public static void processFlightWithError(Connection connection, int fleetId, in stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, adjacencyEventDefinitionId); - //System.out.println(stmt.toString()); + //LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); } public static void processFlight(Connection connection, Flight flight, UploadProcessedEmail uploadProcessedEmail) { - System.out.println("Processing flight: " + flight.getId() + ", " + flight.getFilename()); + LOG.info("Processing flight: " + flight.getId() + ", " + flight.getFilename()); int fleetId = flight.getFleetId(); int flightId = flight.getId(); @@ -323,10 +326,10 @@ public static void processFlight(Connection connection, Flight flight, UploadPro ArrayList potentialFlights = Flight.getFlights(connection, "(id != " + flightId + " AND start_timestamp <= UNIX_TIMESTAMP('" + flightInfo.endDateTime + "') AND end_timestamp >= UNIX_TIMESTAMP('" + flightInfo.startDateTime + "'))"); - System.out.println("Found " + potentialFlights.size() + " potential time matched flights."); - //System.out.println("Flight start time: " + flightInfo.startDateTime + ", end time: " + flightInfo.endDateTime); - //System.out.println("Flight latitude min: " + flightInfo.minLatitude + ", max: " + flightInfo.maxLatitude); - //System.out.println("Flight longitude min: " + flightInfo.minLongitude + ", max: " + flightInfo.maxLongitude); + LOG.info("Found " + potentialFlights.size() + " potential time matched flights."); + //LOG.info("Flight start time: " + flightInfo.startDateTime + ", end time: " + flightInfo.endDateTime); + //LOG.info("Flight latitude min: " + flightInfo.minLatitude + ", max: " + flightInfo.maxLatitude); + //LOG.info("Flight longitude min: " + flightInfo.minLongitude + ", max: " + flightInfo.maxLongitude); ArrayList eventList = new ArrayList<>(); String startTime = null; @@ -349,7 +352,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro int stopBuffer = 30; for (Flight otherFlight : potentialFlights) { - //System.out.println("\tmatched to flight with start time: " + otherFlight.getStartDateTime() + ", end time: " + otherFlight.getEndDateTime()); + //LOG.info("\tmatched to flight with start time: " + otherFlight.getStartDateTime() + ", end time: " + otherFlight.getEndDateTime()); timeMatchFlights++; FlightTimeLocation otherInfo = new FlightTimeLocation(connection, otherFlight.getFleetId(), otherFlight.getId(), otherFlight.getAirframeNameId(), otherFlight.getStartDateTime(), otherFlight.getEndDateTime()); @@ -360,15 +363,15 @@ public static void processFlight(Connection connection, Flight flight, UploadPro //see if proximity between these two flights was already calculated, if so we can skip if (FlightTimeLocation.proximityAlreadyCalculated(connection, otherInfo, flightInfo)) { - System.out.println("Not re-performing proximity calculation"); + LOG.info("Not re-performing proximity calculation"); continue; } - //System.out.println("\t\tother latitude min: " + otherInfo.minLatitude + ", max: " + otherInfo.maxLatitude); - //System.out.println("\t\tother longitude min: " + otherInfo.minLongitude + ", max: " + otherInfo.maxLongitude); + //LOG.info("\t\tother latitude min: " + otherInfo.minLatitude + ", max: " + otherInfo.maxLatitude); + //LOG.info("\t\tother longitude min: " + otherInfo.minLongitude + ", max: " + otherInfo.maxLongitude); if (flightInfo.hasRegionOverlap(otherInfo)) { - //System.out.println("\t\tLatitude/Longitude overlap!"); + //LOG.info("\t\tLatitude/Longitude overlap!"); locMatchFlights++; if (!flightInfo.hasSeriesData()) { @@ -389,7 +392,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro int i = 30, j = 30; int totalMatches = 0; - //System.out.println("\t\tgot series data for both flights, iterate over times"); + //LOG.info("\t\tgot series data for both flights, iterate over times"); while (i < flightInfo.epochTime.length && j < otherInfo.epochTime.length) { //skip entries where the epoch time was 0 (the date/time was null) if (flightInfo.epochTime[i] == 0) { @@ -420,13 +423,13 @@ public static void processFlight(Connection connection, Flight flight, UploadPro if (distanceFt < 1000.0 && flightInfo.altitudeAGL[i] >= 50 && otherInfo.altitudeAGL[j] >= 50 && flightInfo.indicatedAirspeed[i] > 20 && otherInfo.indicatedAirspeed[j] > 20) { /* - System.out.println("\t\t\tother time[" + j + "]: " + otherInfo.epochTime[j] + " == flight time[" + i + "]: " + flightInfo.epochTime[i] + LOG.info("\t\t\tother time[" + j + "]: " + otherInfo.epochTime[j] + " == flight time[" + i + "]: " + flightInfo.epochTime[i] + ", flight lat/lon: " + flightInfo.latitude[i] + " " + flightInfo.longitude[i] + ", other lat/lon: " + otherInfo.latitude[j] + " " + otherInfo.longitude[j] + " -- distance: " + distanceFt ); */ - //System.out.println("\t\t\t\t\tflight alt AGL: " + flightInfo.altitudeAGL[i] + ", other alt AGL: " + otherInfo.altitudeAGL[j] + ", final distance: " + distanceFt); + //LOG.info("\t\t\t\t\tflight alt AGL: " + flightInfo.altitudeAGL[i] + ", other alt AGL: " + otherInfo.altitudeAGL[j] + ", final distance: " + distanceFt); //startTime is null if an exceedence is not being tracked if (startTime == null) { @@ -438,7 +441,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro otherStartLine = j; severity = distanceFt; - //System.out.println("start date time: " + startTime + ", start line number: " + startLine); + //LOG.info("start date time: " + startTime + ", start line number: " + startLine); } endLine = i; otherEndLine = j; @@ -500,7 +503,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro j++; totalMatches++; } - //System.out.println("\t\tseries matched time on " + totalMatches + " rows"); + //LOG.info("\t\tseries matched time on " + totalMatches + " rows"); //if there was an event still going when one flight ended, create it and add it to the list @@ -517,12 +520,12 @@ public static void processFlight(Connection connection, Flight flight, UploadPro //end the loop processing all flights for (Event event : eventList) { - System.out.println("\t" + event.toString()); + LOG.info("\t" + event.toString()); eventsFound++; uploadProcessedEmail.addProximity(flightFilename, "flight " + flightId + ", '" + flightFilename + "' - had a proximity event with flight " + event.getOtherFlightId() + " from " + event.getStartTime() + " to " + event.getEndTime()); } - System.out.println("\n"); + LOG.info("\n"); //Step 2: export the events and their statistics in the database @@ -540,8 +543,8 @@ public static void processFlight(Connection connection, Flight flight, UploadPro } else if (event.getEndTime() != null) { EventStatistics.updateEventStatistics(connection, fleetId, airframeNameId, adjacencyEventDefinitionId, event.getEndTime(), event.getSeverity(), event.getDuration()); } else { - System.out.println("WARNING: could not update event statistics for event: " + event); - System.out.println("WARNING: event start and end time were both null."); + LOG.info("WARNING: could not update event statistics for event: " + event); + LOG.info("WARNING: event start and end time were both null."); } double currentSeverity = eventList.get(i).getSeverity(); @@ -567,7 +570,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro stmt.setDouble(8, sumSeverity); stmt.setDouble(9, minSeverity); stmt.setDouble(10, maxSeverity); - //System.out.println(stmt.toString()); + //LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -578,7 +581,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, adjacencyEventDefinitionId); - //System.out.println(stmt.toString()); + //LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -615,9 +618,9 @@ public static void calculateProximity(Connection connection, int uploadId, Uploa double avgTimeMatchedFlights = ((double)timeMatchFlights / (double) count); double avgLocationMatchedFlights = ((double)locMatchFlights / (double)count); - System.out.println("calculated " + count + " proximity evaluations in " + elapsed_seconds + " seconds, averaged: " + average_seconds + " seconds per flight"); - System.out.println("avg time matched flights: " + avgTimeMatchedFlights + ", avg loc matched flights: " + avgLocationMatchedFlights); - System.out.println("proximity events found:" + eventsFound); + LOG.info("calculated " + count + " proximity evaluations in " + elapsed_seconds + " seconds, averaged: " + average_seconds + " seconds per flight"); + LOG.info("avg time matched flights: " + avgTimeMatchedFlights + ", avg loc matched flights: " + avgLocationMatchedFlights); + LOG.info("proximity events found:" + eventsFound); uploadProcessedEmail.setProximityElapsedTime(elapsed_seconds, average_seconds, avgTimeMatchedFlights, avgLocationMatchedFlights); } @@ -648,9 +651,9 @@ public static void main(String[] arguments) { Instant end = Instant.now(); double elapsed_seconds = (double)Duration.between(start, end).toMillis() / 1000.0; double average_seconds = ((double) elapsed_seconds) / (double)count; - System.out.println("calculated " + count + " adjacency evaluations in " + elapsed_seconds + " seconds, averaged: " + average_seconds + " seconds per flight"); - System.out.println("avg time matched flights: " + ((double)timeMatchFlights / (double) count) + ", avg loc matched flights: " + ((double)locMatchFlights / (double)count)); - System.out.println("evnets found:" + eventsFound); + LOG.info("calculated " + count + " adjacency evaluations in " + elapsed_seconds + " seconds, averaged: " + average_seconds + " seconds per flight"); + LOG.info("avg time matched flights: " + ((double)timeMatchFlights / (double) count) + ", avg loc matched flights: " + ((double)locMatchFlights / (double)count)); + LOG.info("evnets found:" + eventsFound); //System.exit(1); try { diff --git a/src/main/java/org/ngafid/FindLowEndingFuelEvents.java b/src/main/java/org/ngafid/FindLowEndingFuelEvents.java index 660119c9e..7700c250c 100644 --- a/src/main/java/org/ngafid/FindLowEndingFuelEvents.java +++ b/src/main/java/org/ngafid/FindLowEndingFuelEvents.java @@ -16,7 +16,7 @@ import java.util.logging.Logger; import static org.ngafid.events.CustomEvent.*; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class FindLowEndingFuelEvents { public static final Logger LOG = Logger.getLogger(FindLowEndingFuelEvents.class.getName()); @@ -37,7 +37,7 @@ public static void findLowEndFuelEventsInUpload(Connection connection, Upload up try { findLowEndFuel(connection, flight); } catch (MalformedFlightFileException e) { - System.out.println("Could not process flight " + flight.getId()); + LOG.info("Could not process flight " + flight.getId()); } catch (ParseException e) { LOG.info("Error parsing date"); e.printStackTrace(); @@ -81,7 +81,7 @@ public static void findLowEndFuel(Connection connection, Flight flight) throws S String[] lastValidDateAndIndex = date.getLastValidAndIndex(); int i = Integer.parseInt(lastValidDateAndIndex[1]); - System.out.println("last valid date and index: " + i); + LOG.info("last valid date and index: " + i); String endTime = lastValidDateAndIndex[0] + " " + time.getLastValid(); diff --git a/src/main/java/org/ngafid/FindSpinEvents.java b/src/main/java/org/ngafid/FindSpinEvents.java index 1cd3f663c..7ef91819e 100644 --- a/src/main/java/org/ngafid/FindSpinEvents.java +++ b/src/main/java/org/ngafid/FindSpinEvents.java @@ -15,7 +15,7 @@ import java.util.logging.Logger; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; import static org.ngafid.events.CustomEvent.*; /** @@ -216,7 +216,7 @@ static void calculateVSPDDerived(Connection connection, Flight flight) throws IO if (dts == null) { flight.checkCalculationParameters(VSPD_CALCULATED, ALT_B); CalculatedDoubleTimeSeries dVSI = new CalculatedDoubleTimeSeries(connection, VSPD_CALCULATED, "ft/min", true, flight); - dVSI.create(new VSPDRegression(connection, flight)); + dVSI.create(new VSPDRegression(flight.getDoubleTimeSeries(ALT_B))); dVSI.updateDatabase(connection, flightId); } } diff --git a/src/main/java/org/ngafid/ProcessFlights.java b/src/main/java/org/ngafid/ProcessFlights.java index 8b8bfc63f..8b8582a9e 100644 --- a/src/main/java/org/ngafid/ProcessFlights.java +++ b/src/main/java/org/ngafid/ProcessFlights.java @@ -27,200 +27,200 @@ public class ProcessFlights { private static Connection connection = Database.getConnection(); public static void main(String[] arguments) { - while (true) { - connection = Database.resetConnection(); + // while (true) { + // connection = Database.resetConnection(); - Instant start = Instant.now(); + // Instant start = Instant.now(); - try { - PreparedStatement fleetPreparedStatement = connection.prepareStatement("SELECT id FROM fleet WHERE EXISTS (SELECT id FROM uploads WHERE fleet.id = uploads.fleet_id AND uploads.status = 'UPLOADED')"); - ResultSet fleetSet = fleetPreparedStatement.executeQuery(); + // try { + // PreparedStatement fleetPreparedStatement = connection.prepareStatement("SELECT id FROM fleet WHERE EXISTS (SELECT id FROM uploads WHERE fleet.id = uploads.fleet_id AND uploads.status = 'UPLOADED')"); + // ResultSet fleetSet = fleetPreparedStatement.executeQuery(); - while (fleetSet.next()) { - int targetFleetId = fleetSet.getInt(1); - System.err.println("Importing an upload from fleet: " + targetFleetId); + // while (fleetSet.next()) { + // int targetFleetId = fleetSet.getInt(1); + // System.err.println("Importing an upload from fleet: " + targetFleetId); - PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? LIMIT 1"); + // PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? LIMIT 1"); - //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? AND id = 2281"); - //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id != 1"); - //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ?"); - - //targetFleetId = 1; - //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? AND id = 2863 LIMIT 1"); - uploadsPreparedStatement.setString(1, "UPLOADED"); - uploadsPreparedStatement.setInt(2, targetFleetId); - - ResultSet resultSet = uploadsPreparedStatement.executeQuery(); - - while (resultSet.next()) { - ArrayList flightErrors = new ArrayList(); - - int uploadId = resultSet.getInt(1); - int uploaderId = resultSet.getInt(2); - int fleetId = resultSet.getInt(3); - String filename = resultSet.getString(4); - - filename = WebServer.NGAFID_ARCHIVE_DIR + "/" + fleetId + "/" + uploaderId + "/" + uploadId + "__" + filename; - System.err.println("processing: '" + filename + "'"); - - String extension = filename.substring(filename.length() - 4); - System.err.println("extension: '" + extension + "'"); - - String status = "IMPORTED"; - - Exception uploadException = null; - - int validFlights = 0; - int warningFlights = 0; - int errorFlights = 0; - if (extension.equals(".zip")) { - try { - System.err.println("processing zip file: '" + filename + "'"); - ZipFile zipFile = new ZipFile(filename); - - Enumeration entries = zipFile.entries(); - - while (entries.hasMoreElements()) { - ZipEntry entry = entries.nextElement(); - String name = entry.getName(); - - if (entry.isDirectory()) { - //System.err.println("SKIPPING: " + entry.getName()); - continue; - } - - if (name.contains("__MACOSX")) { - //System.err.println("SKIPPING: " + entry.getName()); - continue; - } - - System.err.println("PROCESSING: " + name); - - if (entry.getName().endsWith(".csv")) { - try { - InputStream stream = zipFile.getInputStream(entry); - Flight flight = new Flight(fleetId, entry.getName(), stream, connection); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException e) { - System.err.println(e.getMessage()); - flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - - } else if (entry.getName().endsWith(".json")) { - try { - Flight flight = Flight.processJSON(fleetId, connection, zipFile.getInputStream(entry), entry.getName()); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | ParseException e) { - System.err.println("ERROR: " + e.getMessage()); - flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - } else if (entry.getName().endsWith(".gpx")) { - try { - InputStream stream = zipFile.getInputStream(entry); - ArrayList flights = Flight.processGPXFile(fleetId, connection, stream, entry.getName()); - - if (connection != null) { - for (Flight flight : flights) { - System.out.println(flight.getFilename()); - } - for (Flight flight : flights) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - if (flight.getStatus().equals("WARNING")) warningFlights++; - validFlights++; - } - } - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | ParserConfigurationException | SAXException | SQLException | ParseException e) { - System.err.println(e.getMessage()); - flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - } else { - flightErrors.add(new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", entry.getName())); - errorFlights++; - } - } - - } catch (java.nio.file.NoSuchFileException e) { - System.err.println("IOException: " + e ); - e.printStackTrace(); - - UploadError.insertError(connection, uploadId, "Broken upload: please delete this upload and re-upload."); - status = "ERROR"; - - } catch (IOException e) { - System.err.println("IOException: " + e ); - e.printStackTrace(); - - UploadError.insertError(connection, uploadId, "Could not read from zip file: please delete this upload and re-upload."); - status = "ERROR"; - } - - } else { - //insert an upload error for this upload - status = "ERROR"; - UploadError.insertError(connection, uploadId, "Uploaded file was not a zip file."); - } - - //update upload in database, add upload exceptions if there are any - PreparedStatement updateStatement = connection.prepareStatement("UPDATE uploads SET status = ?, n_valid_flights = ?, n_warning_flights = ?, n_error_flights = ? WHERE id = ?"); - updateStatement.setString(1, status); - updateStatement.setInt(2, validFlights); - updateStatement.setInt(3, warningFlights); - updateStatement.setInt(4, errorFlights); - updateStatement.setInt(5, uploadId); - updateStatement.executeUpdate(); - updateStatement.close(); - - for (UploadException exception : flightErrors) { - FlightError.insertError(connection, uploadId, exception.getFilename(), exception.getMessage()); - } - } - - resultSet.close(); - uploadsPreparedStatement.close(); - - //TURN OFF FOR REGULAR USE - //System.exit(1); - } - - fleetSet.close(); - fleetPreparedStatement.close(); - - } catch (SQLException e) { - e.printStackTrace(); - System.exit(1); - } - - Instant end = Instant.now(); - double elapsed_millis = (double) Duration.between(start, end).toMillis(); - double elapsed_seconds = elapsed_millis / 1000; - System.err.println("finished in " + elapsed_seconds); - - try { - Thread.sleep(10000); - } catch (Exception e) { - System.err.println(e); - e.printStackTrace(); - } - - } + // //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? AND id = 2281"); + // //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id != 1"); + // //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ?"); + + // //targetFleetId = 1; + // //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? AND id = 2863 LIMIT 1"); + // uploadsPreparedStatement.setString(1, "UPLOADED"); + // uploadsPreparedStatement.setInt(2, targetFleetId); + + // ResultSet resultSet = uploadsPreparedStatement.executeQuery(); + + // while (resultSet.next()) { + // ArrayList flightErrors = new ArrayList(); + + // int uploadId = resultSet.getInt(1); + // int uploaderId = resultSet.getInt(2); + // int fleetId = resultSet.getInt(3); + // String filename = resultSet.getString(4); + + // filename = WebServer.NGAFID_ARCHIVE_DIR + "/" + fleetId + "/" + uploaderId + "/" + uploadId + "__" + filename; + // System.err.println("processing: '" + filename + "'"); + + // String extension = filename.substring(filename.length() - 4); + // System.err.println("extension: '" + extension + "'"); + + // String status = "IMPORTED"; + + // Exception uploadException = null; + + // int validFlights = 0; + // int warningFlights = 0; + // int errorFlights = 0; + // if (extension.equals(".zip")) { + // try { + // System.err.println("processing zip file: '" + filename + "'"); + // ZipFile zipFile = new ZipFile(filename); + + // Enumeration entries = zipFile.entries(); + + // while (entries.hasMoreElements()) { + // ZipEntry entry = entries.nextElement(); + // String name = entry.getName(); + + // if (entry.isDirectory()) { + // //System.err.println("SKIPPING: " + entry.getName()); + // continue; + // } + + // if (name.contains("__MACOSX")) { + // //System.err.println("SKIPPING: " + entry.getName()); + // continue; + // } + + // System.err.println("PROCESSING: " + name); + + // if (entry.getName().endsWith(".csv")) { + // try { + // InputStream stream = zipFile.getInputStream(entry); + // Flight flight = new Flight(fleetId, entry.getName(), stream, connection); + + // if (connection != null) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // } + + // if (flight.getStatus().equals("WARNING")) warningFlights++; + + // validFlights++; + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException e) { + // System.err.println(e.getMessage()); + // flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + + // } else if (entry.getName().endsWith(".json")) { + // try { + // Flight flight = Flight.processJSON(fleetId, connection, zipFile.getInputStream(entry), entry.getName()); + + // if (connection != null) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // } + + // if (flight.getStatus().equals("WARNING")) warningFlights++; + + // validFlights++; + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | ParseException e) { + // System.err.println("ERROR: " + e.getMessage()); + // flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + // } else if (entry.getName().endsWith(".gpx")) { + // try { + // InputStream stream = zipFile.getInputStream(entry); + // ArrayList flights = Flight.processGPXFile(fleetId, connection, stream, entry.getName()); + + // if (connection != null) { + // for (Flight flight : flights) { + // System.out.println(flight.getFilename()); + // } + // for (Flight flight : flights) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // if (flight.getStatus().equals("WARNING")) warningFlights++; + // validFlights++; + // } + // } + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | ParserConfigurationException | SAXException | SQLException | ParseException e) { + // System.err.println(e.getMessage()); + // flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + // } else { + // flightErrors.add(new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", entry.getName())); + // errorFlights++; + // } + // } + + // } catch (java.nio.file.NoSuchFileException e) { + // System.err.println("IOException: " + e ); + // e.printStackTrace(); + + // UploadError.insertError(connection, uploadId, "Broken upload: please delete this upload and re-upload."); + // status = "ERROR"; + + // } catch (IOException e) { + // System.err.println("IOException: " + e ); + // e.printStackTrace(); + + // UploadError.insertError(connection, uploadId, "Could not read from zip file: please delete this upload and re-upload."); + // status = "ERROR"; + // } + + // } else { + // //insert an upload error for this upload + // status = "ERROR"; + // UploadError.insertError(connection, uploadId, "Uploaded file was not a zip file."); + // } + + // //update upload in database, add upload exceptions if there are any + // PreparedStatement updateStatement = connection.prepareStatement("UPDATE uploads SET status = ?, n_valid_flights = ?, n_warning_flights = ?, n_error_flights = ? WHERE id = ?"); + // updateStatement.setString(1, status); + // updateStatement.setInt(2, validFlights); + // updateStatement.setInt(3, warningFlights); + // updateStatement.setInt(4, errorFlights); + // updateStatement.setInt(5, uploadId); + // updateStatement.executeUpdate(); + // updateStatement.close(); + + // for (UploadException exception : flightErrors) { + // FlightError.insertError(connection, uploadId, exception.getFilename(), exception.getMessage()); + // } + // } + + // resultSet.close(); + // uploadsPreparedStatement.close(); + + // //TURN OFF FOR REGULAR USE + // //System.exit(1); + // } + + // fleetSet.close(); + // fleetPreparedStatement.close(); + + // } catch (SQLException e) { + // e.printStackTrace(); + // System.exit(1); + // } + + // Instant end = Instant.now(); + // double elapsed_millis = (double) Duration.between(start, end).toMillis(); + // double elapsed_seconds = elapsed_millis / 1000; + // System.err.println("finished in " + elapsed_seconds); + + // try { + // Thread.sleep(10000); + // } catch (Exception e) { + // System.err.println(e); + // e.printStackTrace(); + // } + + // } } } diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index 57f3dc3f2..2ff681208 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -15,41 +15,38 @@ import java.time.format.DateTimeFormatter; import Files.*; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.text.ParseException; import java.util.Arrays; +import java.util.Collections; +import java.util.List; import java.util.ArrayList; -import java.util.Enumeration; import java.util.HashMap; import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ForkJoinPool; import java.util.logging.Level; import java.util.logging.Logger; -import java.util.zip.ZipEntry; import java.util.zip.ZipFile; -import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.Flight; -import org.ngafid.flights.FlightAlreadyExistsException; import org.ngafid.flights.FlightError; import org.ngafid.flights.MalformedFlightFileException; import org.ngafid.flights.Upload; import org.ngafid.flights.UploadError; +import org.ngafid.flights.process.*; import org.ngafid.accounts.Fleet; import org.ngafid.accounts.User; -import static org.ngafid.flights.DJIFlightProcessor.processDATFile; public class ProcessUpload { private static Connection connection = null; private static Logger LOG = Logger.getLogger(ProcessUpload.class.getName()); private static final String ERROR_STATUS_STR = "ERROR"; - + public static void main(String[] arguments) { - System.out.println("arguments are:"); - System.out.println(Arrays.toString(arguments)); + LOG.info("arguments are:"); + LOG.info(Arrays.toString(arguments)); connection = Database.getConnection(); @@ -121,7 +118,7 @@ public static void operateAsDaemon() { } public static void processFleetUploads(int fleetId) { - System.out.println("processing uploads from fleet with id: " + fleetId); + LOG.info("processing uploads from fleet with id: " + fleetId); try { Fleet fleet = Fleet.get(connection, fleetId); String f = fleet.getName() == null ? " NULL NAME " : fleet.getName(); @@ -145,7 +142,7 @@ public static void processFleetUploads(int fleetId) { } public static void processUpload(int uploadId) { - System.out.println("processing upload with id: " + uploadId); + LOG.info("processing upload with id: " + uploadId); try { Upload upload = Upload.getUploadById(connection, uploadId); @@ -182,12 +179,17 @@ public static void processUpload(Upload upload) { SendEmail.sendEmail(recipients, bccRecipients, subject, body); upload.reset(connection); - System.out.println("upload was reset!\n\n"); - + LOG.info("upload was reset!\n\n"); UploadProcessedEmail uploadProcessedEmail = new UploadProcessedEmail(recipients, bccRecipients); - boolean success = ingestFlights(connection, uploadId, fleetId, uploaderId, filename, uploadProcessedEmail); + long start = System.nanoTime(); + boolean success = ingestFlights(connection, upload, uploadProcessedEmail); + long end = System.nanoTime(); + + long diff = end - start; + double asSeconds = ((double) diff) * 1.0e-9; + System.out.println("Took " + asSeconds + "s to ingest upload " + upload.getFilename()); //only progress if the upload ingestion was successful if (success) { @@ -225,9 +227,9 @@ private static class FlightInfo { int id; int length; String filename; - ArrayList exceptions = new ArrayList(); + List exceptions; - public FlightInfo(int id, int length, String filename, ArrayList exceptions) { + public FlightInfo(int id, int length, String filename, List exceptions) { this.id = id; this.length = length; this.filename = filename; @@ -235,10 +237,16 @@ public FlightInfo(int id, int length, String filename, ArrayList flightInfo = new ArrayList(); - HashMap flightErrors = new HashMap(); + Map flightErrors = Collections.emptyMap(); int validFlights = 0; int warningFlights = 0; int errorFlights = 0; + if (extension.equals(".zip")) { try { System.err.println("processing zip file: '" + filename + "'"); ZipFile zipFile = new ZipFile(filename); - - Enumeration entries = zipFile.entries(); - - while (entries.hasMoreElements()) { - ZipEntry entry = entries.nextElement(); - String name = entry.getName(); - - if (entry.isDirectory()) { - //System.err.println("SKIPPING: " + entry.getName()); - continue; - } - - if (name.contains("__MACOSX")) { - //System.err.println("SKIPPING: " + entry.getName()); - continue; - } - - System.err.println("PROCESSING: " + name); - - String entryName = entry.getName(); - - if (entryName.contains(".csv")) { - try { - InputStream stream = zipFile.getInputStream(entry); - Flight flight = new Flight(fleetId, entry.getName(), stream, connection); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException e) { - System.err.println(e.getMessage()); - flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - - } else if (entryName.contains(".gpx")) { - try { - InputStream stream = zipFile.getInputStream(entry); - ArrayList flights = Flight.processGPXFile(fleetId, connection, stream, entry.getName()); - - if (connection != null) { - for (Flight flight : flights) { - flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - } - for (Flight flight : flights) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - if (flight.getStatus().equals("WARNING")) warningFlights++; - validFlights++; - } - } - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | - ParserConfigurationException | SAXException | SQLException | ParseException e) { - System.err.println(e.getMessage()); - flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - } else if (entry.getName().endsWith(".json")) { - try { - Flight flight = Flight.processJSON(fleetId, connection, zipFile.getInputStream(entry), entry.getName()); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | - ParseException e) { - System.err.println("ERROR: " + e.getMessage()); - flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - } else if (entry.getName().endsWith(".DAT")) { - String zipName = entry.getName().substring(entry.getName().lastIndexOf("/")); - String parentFolder = zipFile.getName().substring(0, zipFile.getName().lastIndexOf("/")); - File tempExtractedFile = new File(parentFolder, zipName); - - System.out.println("Extracting to " + tempExtractedFile.getAbsolutePath()); - try (InputStream inputStream = zipFile.getInputStream(entry); FileOutputStream fileOutputStream = new FileOutputStream(tempExtractedFile)) { - int len; - byte[] buffer = new byte[1024]; - - while ((len = inputStream.read(buffer)) > 0) { - fileOutputStream.write(buffer, 0, len); - } - } - - convertDATFile(tempExtractedFile); - File processedCSVFile = new File(tempExtractedFile.getAbsolutePath() + ".csv"); - placeInZip(processedCSVFile.getAbsolutePath(), zipFile.getName().substring(zipFile.getName().lastIndexOf("/") + 1)); - - try (InputStream stream = new FileInputStream(processedCSVFile)) { - Flight flight = processDATFile(fleetId, entry.getName(), stream, connection); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | MalformedFlightFileException | - SQLException e) { - System.err.println(e.getMessage()); - flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } finally { - Files.delete(Paths.get(processedCSVFile.getAbsolutePath())); - Files.delete(Paths.get(tempExtractedFile.getAbsolutePath())); - } - } else { - flightErrors.put(entry.getName(), new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", entry.getName())); - errorFlights++; - } - } + FlightFileProcessor.Pipeline pipeline = new FlightFileProcessor.Pipeline(connection, upload, zipFile); + + var flights = new ConcurrentLinkedQueue(); + long startNanos = System.nanoTime(); + pool.submit(() -> + pipeline + .stream() + .parallel() + .flatMap(pipeline::parse) + .map(pipeline::build) + .filter(Objects::nonNull) + .map(pipeline::tabulateFlightStatus) + .forEach(flights::add) + ).join(); + long endNanos = System.nanoTime(); + double s = 1e-9 * (double) (endNanos - startNanos); + System.out.println("Took " + s + "s to process flights"); + + startNanos = System.nanoTime(); + + Flight.batchUpdateDatabase(connection, upload, flights); + // flights.forEach(f -> f.updateDatabase(connection, uploadId, uploaderId, fleetId)); + + endNanos = System.nanoTime(); + s = 1e-9 * (double) (endNanos - startNanos); + System.out.println("Took " + s + "s to upload flights to database"); + flightErrors = pipeline.getFlightErrors(); + errorFlights = flightErrors.size(); + warningFlights = pipeline.getWarningFlightsCount(); + validFlights = pipeline.getValidFlightsCount(); } catch (java.nio.file.NoSuchFileException e) { System.err.println("NoSuchFileException: " + e); @@ -401,22 +316,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle UploadError.insertError(connection, uploadId, "Could not read from zip file: please delete this upload and re-upload."); status = ERROR_STATUS_STR; uploadException = new Exception(e.toString() + ", could not read from zip file: please delete this upload and re-upload."); - } catch (NotDatFile e) { - LOG.log(Level.SEVERE, "NotDatFile: {0}", e.toString()); - e.printStackTrace(); - - UploadError.insertError(connection, uploadId, "Tried to process a non-DAT file as a DAT file."); - status = ERROR_STATUS_STR; - uploadException = new Exception(e + ", tried to process a non-DAT file as a DAT file."); - } catch (FileEnd e) { - LOG.log(Level.SEVERE, "FileEnd: {0}", e.toString()); - e.printStackTrace(); - - UploadError.insertError(connection, uploadId, "Reached the end of a file while doing DAT processing"); - status = ERROR_STATUS_STR; - uploadException = new Exception(e + ", reached the end of a file while doing DAT processing"); } - } else { //insert an upload error for this upload status = ERROR_STATUS_STR; @@ -438,7 +338,6 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle //insert all the flight errors to the database for (Map.Entry entry : flightErrors.entrySet()) { UploadException exception = entry.getValue(); - FlightError.insertError(connection, uploadId, exception.getFilename(), exception.getMessage()); } @@ -457,16 +356,16 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle System.err.println("email in " + elapsed_seconds); uploadProcessedEmail.setImportElapsedTime(elapsed_seconds); - System.out.println("valid flights: " + validFlights); - System.out.println("warning flights: " + warningFlights); - System.out.println("error flights: " + errorFlights); + LOG.info("valid flights: " + validFlights); + LOG.info("warning flights: " + warningFlights); + LOG.info("error flights: " + errorFlights); uploadProcessedEmail.setValidFlights(validFlights); //iterate over all the flights without warnings for (FlightInfo info : flightInfo) { uploadProcessedEmail.addFlight(info.filename, info.id, info.length); - ArrayList exceptions = info.exceptions; + List exceptions = info.exceptions; if (exceptions.size() == 0) { uploadProcessedEmail.flightImportOK(info.filename); } @@ -483,7 +382,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle uploadProcessedEmail.setWarningFlights(warningFlights); for (FlightInfo info : flightInfo) { - ArrayList exceptions = info.exceptions; + List exceptions = info.exceptions; if (exceptions.size() > 0) { for (MalformedFlightFileException exception : exceptions) { @@ -514,7 +413,7 @@ private static void placeInZip(String file, String zipFileName) throws IOExcepti } private static File convertDATFile(File file) throws NotDatFile, IOException, FileEnd { - System.out.println("Converting to CSV: " + file.getAbsolutePath()); + LOG.info("Converting to CSV: " + file.getAbsolutePath()); DatFile datFile = DatFile.createDatFile(file.getAbsolutePath()); datFile.reset(); datFile.preAnalyze(); @@ -527,7 +426,7 @@ private static File convertDATFile(File file) throws NotDatFile, IOException, Fi datFile.reset(); AnalyzeDatResults results = convertDat.analyze(false); - System.out.println(datFile.getFile().getAbsolutePath()); + LOG.info(datFile.getFile().getAbsolutePath()); return datFile.getFile(); } diff --git a/src/main/java/org/ngafid/accounts/UserPreferences.java b/src/main/java/org/ngafid/accounts/UserPreferences.java index f3083405a..f9330d4eb 100644 --- a/src/main/java/org/ngafid/accounts/UserPreferences.java +++ b/src/main/java/org/ngafid/accounts/UserPreferences.java @@ -2,7 +2,7 @@ import java.util.List; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class UserPreferences { private int userId, decimalPrecision; diff --git a/src/main/java/org/ngafid/airports/Airports.java b/src/main/java/org/ngafid/airports/Airports.java index 6c0608c75..86c83aa19 100644 --- a/src/main/java/org/ngafid/airports/Airports.java +++ b/src/main/java/org/ngafid/airports/Airports.java @@ -9,11 +9,13 @@ import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - +import java.util.logging.*; import org.ngafid.common.MutableDouble; public class Airports { + private static final Logger LOG = Logger.getLogger(Airports.class.getName()); + public final static double AVERAGE_RADIUS_OF_EARTH_KM = 6371; private static HashMap> geoHashToAirport; @@ -33,7 +35,7 @@ public class Airports { System.err.println("export AIRPORTS_FILE="); System.exit(1); } - System.out.println("AIRPORTS_FILE: '" + System.getenv("AIRPORTS_FILE") + "'"); + LOG.info("AIRPORTS_FILE: '" + System.getenv("AIRPORTS_FILE") + "'"); AIRPORTS_FILE = System.getenv("AIRPORTS_FILE"); @@ -96,13 +98,13 @@ public class Airports { numberAirports++; } - System.out.println("Creating buffered reader for '" + RUNWAYS_FILE + "'"); + LOG.info("Creating buffered reader for '" + RUNWAYS_FILE + "'"); //now read the runways file and add runways to airports br = new BufferedReader(new FileReader(RUNWAYS_FILE)); - System.out.println("buffered reader is ready? " + br.ready()); + LOG.info("buffered reader is ready? " + br.ready()); while ((line = br.readLine()) != null) { - //System.out.println("read runways line: " + line); + //LOG.info("read runways line: " + line); String[] values = line.split(","); @@ -135,7 +137,7 @@ public class Airports { } airport.addRunway(runway); - //System.out.println("Adding " + runway + " to " + airport); + //LOG.info("Adding " + runway + " to " + airport); } } catch (Exception e) { @@ -143,9 +145,9 @@ public class Airports { System.exit(1); } - System.out.println("Read " + numberAirports + " airports."); - System.out.println("airports HashMap size: " + geoHashToAirport.size()); - System.out.println("max airport ArrayList: " + maxHashSize); + LOG.info("Read " + numberAirports + " airports."); + LOG.info("airports HashMap size: " + geoHashToAirport.size()); + LOG.info("max airport ArrayList: " + maxHashSize); } /** @@ -231,11 +233,11 @@ public static Airport getNearestAirportWithin(double latitude, double longitude, ArrayList hashedAirports = geoHashToAirport.get(geoHashes[i]); if (hashedAirports != null) { - // System.out.println("\t" + geoHashes[i] + " resulted in " + hashedAirports.size() + " airports."); + // LOG.info("\t" + geoHashes[i] + " resulted in " + hashedAirports.size() + " airports."); for (int j = 0; j < hashedAirports.size(); j++) { Airport airport = hashedAirports.get(j); double distanceFt = calculateDistanceInFeet(latitude, longitude, airport.latitude, airport.longitude); - // System.out.println("\t\t" + airport + ", distanceFt: " + distanceFt); + // LOG.info("\t\t" + airport + ", distanceFt: " + distanceFt); if (distanceFt < minDistance) { nearestAirport = airport; @@ -248,9 +250,9 @@ public static Airport getNearestAirportWithin(double latitude, double longitude, /* if (nearestAirport != null) { - System.out.println("nearest airport: " + nearestAirport + ", " + minDistance); + LOG.info("nearest airport: " + nearestAirport + ", " + minDistance); } else { - System.out.println("nearest airport: NULL"); + LOG.info("nearest airport: NULL"); } */ @@ -258,7 +260,6 @@ public static Airport getNearestAirportWithin(double latitude, double longitude, } public static boolean hasRunwayInfo(String iataCode) { - System.out.println("checking to see if airport '" + iataCode + "' has runway info"); return iataToAirport.get(iataCode).hasRunways(); } diff --git a/src/main/java/org/ngafid/flights/Airframes.java b/src/main/java/org/ngafid/flights/Airframes.java index bf0b33fda..e5e3f8606 100644 --- a/src/main/java/org/ngafid/flights/Airframes.java +++ b/src/main/java/org/ngafid/flights/Airframes.java @@ -7,12 +7,50 @@ import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Set; import java.util.logging.Logger; public class Airframes { private static final Logger LOG = Logger.getLogger(Airframes.class.getName()); + + /** + * {@link Airframes} names + * + * TODO: In the future, we may want to consider using Set reather than hardcoded strings. + * This would make our code more robust to varying airframe names + **/ + public static final String AIRFRAME_SCAN_EAGLE = "ScanEagle"; + public static final String AIRFRAME_DJI = "DJI"; + + public static final String AIRFRAME_CESSNA_172S = "Cessna 172S"; + public static final String AIRFRAME_CESSNA_172R = "Cessna 172R"; + public static final String AIRFRAME_CESSNA_172T = "Cessna 172T"; + public static final String AIRFRAME_CESSNA_400 = "Cessna 400"; + public static final String AIRFRAME_CESSNA_525 = "Cessna 525"; + public static final String AIRFRAME_CESSNA_MODEL_525 = "Cessna Model 525"; + public static final String AIRFRAME_CESSNA_T182T = "Cessna T182T"; + public static final String AIRFRAME_CESSNA_182T = "Cessna 182T"; + + public static final String AIRFRAME_PA_28_181 = "PA-28-181"; + public static final String AIRFRAME_PA_44_180 = "PA-44-180"; + public static final String AIRFRAME_PIPER_PA_46_500TP_MERIDIAN = "Piper PA-46-500TP Meridian"; + + public static final String AIRFRAME_CIRRUS_SR20 = "Cirrus SR20"; + public static final String AIRFRAME_CIRRUS_SR22 = "Cirrus SR22"; + + public static final String AIRFRAME_BEECHCRAFT_A36_G36 = "Beechcraft A36/G36"; + public static final String AIRFRAME_BEECHCRAFT_G58 = "Beechcraft G58"; + + public static final String AIRFRAME_DIAMOND_DA_40 = "Diamond DA 40"; + public static final String AIRFRAME_DIAMOND_DA40 = "Diamond DA40"; + public static final String AIRFRAME_DIAMOND_DA40NG = "Diamond DA40NG"; + public static final String AIRFRAME_DIAMOND_DA42NG = "Diamond DA42NG"; + public static final String AIRFRAME_DIAMOND_DA_40_F = "Diamond DA 40 F"; + + public static final String AIRFRAME_QUEST_KODIAK_100 = "Quest Kodiak 100"; private static HashMap nameIdMap = new HashMap<>(); private static HashMap airframeNameMap = new HashMap<>(); @@ -21,6 +59,28 @@ public class Airframes { private static HashSet fleetAirframes = new HashSet<>(); + public static final Set FIXED_WING_AIRFRAMES = Collections.unmodifiableSet(Set.of( + AIRFRAME_CESSNA_172R, + AIRFRAME_CESSNA_172S, + AIRFRAME_CESSNA_172T, + AIRFRAME_CESSNA_182T, + AIRFRAME_CESSNA_T182T, + AIRFRAME_CESSNA_MODEL_525, + AIRFRAME_CIRRUS_SR20, + AIRFRAME_CIRRUS_SR22, + AIRFRAME_DIAMOND_DA40, + AIRFRAME_DIAMOND_DA_40_F, + AIRFRAME_DIAMOND_DA40NG, + AIRFRAME_DIAMOND_DA42NG, + AIRFRAME_PA_28_181, + AIRFRAME_PA_44_180, + AIRFRAME_PIPER_PA_46_500TP_MERIDIAN, + AIRFRAME_QUEST_KODIAK_100, + AIRFRAME_CESSNA_400, + AIRFRAME_BEECHCRAFT_A36_G36, + AIRFRAME_BEECHCRAFT_G58 + )); + public static void setAirframeFleet(Connection connection, int airframeId, int fleetId) throws SQLException { String key = airframeId + "-" + fleetId; diff --git a/src/main/java/org/ngafid/flights/DJIFlightProcessor.java b/src/main/java/org/ngafid/flights/DJIFlightProcessor.java index 344ff5cbd..8e9039565 100644 --- a/src/main/java/org/ngafid/flights/DJIFlightProcessor.java +++ b/src/main/java/org/ngafid/flights/DJIFlightProcessor.java @@ -43,7 +43,6 @@ public static Flight processDATFile(int fleetId, String entry, InputStream strea if (attributeMap.containsKey("dateTime")) { calculateDateTime(connection, doubleTimeSeriesMap, stringTimeSeriesMap, attributeMap.get("dateTime")); - } else { String dateTimeStr = findStartDateTime(doubleTimeSeriesMap); if (dateTimeStr != null) { diff --git a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java index 21349dcc6..408686a72 100644 --- a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java @@ -13,14 +13,14 @@ import java.util.ArrayList; import java.util.Optional; import java.util.logging.Logger; +import java.util.stream.DoubleStream; import java.util.zip.Deflater; -import java.util.zip.Inflater; import org.ngafid.Database; import org.ngafid.common.Compression; import org.ngafid.filters.Pair; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; import javax.sql.rowset.serial.SerialBlob; @@ -29,7 +29,6 @@ public class DoubleTimeSeries { private static final int COMPRESSION_LEVEL = Deflater.DEFAULT_COMPRESSION; private static final String DS_COLUMNS = "ds.id, ds.flight_id, ds.name_id, ds.data_type_id, ds.length, ds.valid_length, ds.min, ds.avg, ds.max, ds.data"; - private boolean cache = true; private int id = -1; private int flightId = -1; private int nameId; @@ -40,6 +39,9 @@ public class DoubleTimeSeries { private double[] data; private int size = 0; + // Set this to true if this double time series is temporary and should not be written to the database. + private boolean temporary = false; + // Now called size since data.length is the buffer length and size is the number of elements in the buffer // private int length = -1; private double min = Double.MAX_VALUE; @@ -47,38 +49,48 @@ public class DoubleTimeSeries { private double avg; private double max = -Double.MAX_VALUE; - public DoubleTimeSeries(Connection connection, String name, String dataType) throws SQLException { - this(connection, name, dataType, 16); - } - - public DoubleTimeSeries(Connection connection, String name, String dataType, int sizeHint) throws SQLException { + // Construct from an array + public DoubleTimeSeries(String name, String dataType, double[] data, int size) { this.name = name; - this.nameId = SeriesNames.getDoubleNameId(connection, name); this.dataType = dataType; - this.typeId = TypeNames.getId(connection, dataType); - this.data = new double[sizeHint]; + this.data = data; + this.size = size; - min = Double.NaN; - avg = Double.NaN; - max = Double.NaN; + calculateValidCountMinMaxAvg(); + } - validCount = 0; + public DoubleTimeSeries(String name, String dataType, double[] data) { + this(name, dataType, data, data.length); } + public DoubleTimeSeries(String name, String dataType, int sizeHint) { + this(name, dataType, new double[sizeHint], 0); + } - public DoubleTimeSeries(Connection connection, String name, String dataType, boolean cache) throws SQLException { - this(connection, name, dataType); + public DoubleTimeSeries(String name, String dataType) { + this(name, dataType, 16); + } - this.cache = cache; + public DoubleTimeSeries(Connection connection, String name, String dataType, int sizeHint) throws SQLException { + this(name, dataType, sizeHint); + setNameId(connection); + setTypeId(connection); + } + + public DoubleTimeSeries(Connection connection, String name, String dataType) throws SQLException { + this(connection, name, dataType, 16); } public DoubleTimeSeries(Connection connection, String name, String dataType, ArrayList stringTimeSeries) throws SQLException { + this(name, dataType, stringTimeSeries); + setNameId(connection); + setTypeId(connection); + } + + public DoubleTimeSeries(String name, String dataType, ArrayList stringTimeSeries) { this.name = name; - this.nameId = SeriesNames.getDoubleNameId(connection, name); this.dataType = dataType; - this.typeId = TypeNames.getId(connection, dataType); - // timeSeries = new ArrayList(); this.data = new double[stringTimeSeries.size()]; int emptyValues = 0; @@ -88,7 +100,6 @@ public DoubleTimeSeries(Connection connection, String name, String dataType, Arr for (int i = 0; i < stringTimeSeries.size(); i++) { String currentValue = stringTimeSeries.get(i); if (currentValue.length() == 0) { - //System.err.println("WARNING: double column '" + name + "' value[" + i + "] is empty."); this.add(Double.NaN); emptyValues++; continue; @@ -118,45 +129,40 @@ public DoubleTimeSeries(Connection connection, String name, String dataType, Arr avg /= validCount; } - /** - * Checks to see whether this series will be cached in the database - * - * @return a boolean representaion of wheteher or not it should be cached - */ - public final boolean isCached() { - return this.cache; - } + public DoubleTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException { + id = resultSet.getInt(1); + flightId = resultSet.getInt(2); + nameId = resultSet.getInt(3); + name = SeriesNames.getDoubleName(connection, nameId); + typeId = resultSet.getInt(4); + dataType = TypeNames.getName(connection, typeId); + size = resultSet.getInt(5); + validCount = resultSet.getInt(6); + min = resultSet.getDouble(7); + avg = resultSet.getDouble(8); + max = resultSet.getDouble(9); - /** - * Gets the name of the DoubleTimeSeries. - * @return the column name of the DoubleTimeSeries - */ - public String getName() { - return name; + Blob values = resultSet.getBlob(10); + byte[] bytes = values.getBytes(1, (int)values.length()); + values.free(); + + this.data = Compression.inflateDoubleArray(bytes, size); } - - /** - * Gets the minimum value of the DoubleTimeSeries. - * @return the minimum value of the DoubleTimeSeries - */ - public double getMin() { - return min; + + public interface TimeStepCalculation { + double compute(int i); } - /** - * Gets the maximum value of the DoubleTimeSeries. - * @return the maximum value of the DoubleTimeSeries - */ - public double getMax() { - return max; + public void setTemporary(boolean temp) { + this.temporary = temp; } - /** - * Gets the average value of the DoubleTimeSeries. - * @return the average value of the DoubleTimeSeries - */ - public double getAvg() { - return avg; + public static DoubleTimeSeries computed(String name, String dataType, int length, TimeStepCalculation calculation) { + double[] data = new double[length]; + for (int i = 0; i < length; i++) + data[i] = calculation.compute(i); + + return new DoubleTimeSeries(name, dataType, data, length); } public static Pair getMinMax(Connection connection, int flightId, String name) throws SQLException { @@ -263,44 +269,65 @@ public static DoubleTimeSeries getDoubleTimeSeries(Connection connection, int fl } } - public DoubleTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException { - id = resultSet.getInt(1); - flightId = resultSet.getInt(2); - nameId = resultSet.getInt(3); - name = SeriesNames.getDoubleName(connection, nameId); - typeId = resultSet.getInt(4); - dataType = TypeNames.getName(connection, typeId); - size = resultSet.getInt(5); - validCount = resultSet.getInt(6); - min = resultSet.getDouble(7); - avg = resultSet.getDouble(8); - max = resultSet.getDouble(9); + private void setNameId(Connection connection) throws SQLException { + this.nameId = SeriesNames.getDoubleNameId(connection, name); + } - Blob values = resultSet.getBlob(10); - byte[] bytes = values.getBytes(1, (int)values.length()); - values.free(); + private void setTypeId(Connection connection) throws SQLException { + this.typeId = TypeNames.getId(connection, dataType); + } + + private void calculateValidCountMinMaxAvg() { + if (size <= 0) + return; - this.data = Compression.inflateDoubleArray(bytes, size); - - // OLD COMPRESSION CODE - // byte[] bytes = values.getBytes(1, (int)values.length()); - // values.free(); - - // LOG.info("id: " + id + ", flightId: " + flightId + ", name: " + name + ", length: " + size + ", validLength: " + validCount + ", min: " + min + ", avg: " + avg + ", max: " + max); - - // try { - // Inflater inflater = new Inflater(); - // inflater.setInput(bytes, 0, bytes.length); - // ByteBuffer timeSeriesBytes = ByteBuffer.allocate(size * Double.BYTES); - // int _inflatedSize = inflater.inflate(timeSeriesBytes.array()); - // double[] timeSeriesArray = new double[size]; - // timeSeriesBytes.asDoubleBuffer().get(timeSeriesArray); - // this.data = timeSeriesArray; - // } catch (Exception e) { - // e.printStackTrace(); - // } - // - // LOG.info("id: " + id + ", flightId: " + flightId + ", name: " + name + ", length: " + size + ", validLength: " + validCount + ", min: " + min + ", avg: " + avg + ", max: " + max); + min = data[0]; + max = data[0]; + + double sum = 0.0; + for (int i = 1; i < size; i++) { + if (Double.isNaN(data[i])) + continue; + + sum += data[i]; + + min = min > data[i] ? data[i] : min; + max = max < data[i] ? data[i] : max; + } + + avg = sum / validCount; + } + + /** + * Gets the name of the DoubleTimeSeries. + * @return the column name of the DoubleTimeSeries + */ + public String getName() { + return name; + } + + /** + * Gets the minimum value of the DoubleTimeSeries. + * @return the minimum value of the DoubleTimeSeries + */ + public double getMin() { + return min; + } + + /** + * Gets the maximum value of the DoubleTimeSeries. + * @return the maximum value of the DoubleTimeSeries + */ + public double getMax() { + return max; + } + + /** + * Gets the average value of the DoubleTimeSeries. + * @return the average value of the DoubleTimeSeries + */ + public double getAvg() { + return avg; } public String toString() { @@ -371,87 +398,65 @@ public double[] sliceCopy(int from, int to) { return slice; } - public void updateDatabase(Connection connection, int flightId) { - //System.out.println("Updating database for " + this); - if (!this.cache) return; + public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO double_series (flight_id, name_id, data_type_id, length, valid_length, min, avg, max, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"); + } - try { - PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO double_series (flight_id, name_id, data_type_id, length, valid_length, min, avg, max, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"); + public void addBatch(Connection connection, PreparedStatement preparedStatement, int flightId) throws SQLException, IOException { + if (typeId == -1) + setTypeId(connection); + if (nameId == -1) + setNameId(connection); - preparedStatement.setInt(1, flightId); - preparedStatement.setInt(2, nameId); - preparedStatement.setInt(3, typeId); + preparedStatement.setInt(1, flightId); + preparedStatement.setInt(2, nameId); + preparedStatement.setInt(3, typeId); - preparedStatement.setInt(4, this.size); - preparedStatement.setInt(5, validCount); + preparedStatement.setInt(4, this.size); + preparedStatement.setInt(5, validCount); - if (Double.isNaN(min)) { - preparedStatement.setNull(6, java.sql.Types.DOUBLE); - } else { - preparedStatement.setDouble(6, min); - } + if (Double.isNaN(min)) { + preparedStatement.setNull(6, java.sql.Types.DOUBLE); + } else { + preparedStatement.setDouble(6, min); + } - if (Double.isNaN(avg)) { - preparedStatement.setNull(7, java.sql.Types.DOUBLE); - } else { - preparedStatement.setDouble(7, avg); - } + if (Double.isNaN(avg)) { + preparedStatement.setNull(7, java.sql.Types.DOUBLE); + } else { + preparedStatement.setDouble(7, avg); + } - if (Double.isNaN(max)) { - preparedStatement.setNull(8, java.sql.Types.DOUBLE); - } else { - preparedStatement.setDouble(8, max); - } + if (Double.isNaN(max)) { + preparedStatement.setNull(8, java.sql.Types.DOUBLE); + } else { + preparedStatement.setDouble(8, max); + } - // UPDATED COMPRESSION CODE - // byte[] compressed = Compression.compressDoubleArray(this.data); - // Blob seriesBlob = new SerialBlob(compressed); - - // Possible optimization: using an array instead of an array list for timeSeries, since ArrayList - // is a list of objects rather than a list of primitives - it consumes much more memory. - // It may also be possible to use some memory tricks to do this with no copying by wrapping the double[]. - ByteBuffer timeSeriesBytes = ByteBuffer.allocate(size * Double.BYTES); - for (int i = 0; i < size; i++) - timeSeriesBytes.putDouble(data[i]); - - // Hopefully this is enough memory. It should be enough. - int bufferSize = timeSeriesBytes.capacity() + 256; - ByteBuffer compressedTimeSeries; - - // This is probably super overkill but it won't hurt? - // If there is not enough memory in the buffer it will through BufferOverflowException. If that happens, - // allocate more memory. - // I don't think it should happen unless the time series unless the compressed data is larger than the - // raw data, which should never happen. - int compressedDataLength; - - for (;;) { - compressedTimeSeries = ByteBuffer.allocate(bufferSize); - try { - Deflater deflater = new Deflater(DoubleTimeSeries.COMPRESSION_LEVEL); - deflater.setInput(timeSeriesBytes.array()); - deflater.finish(); - compressedDataLength = deflater.deflate(compressedTimeSeries.array()); - deflater.end(); - break; - } catch (BufferOverflowException _boe) { - bufferSize *= 2; - } - } + // UPDATED COMPRESSION CODE + byte[] compressed = Compression.compressDoubleArray(this.data); + Blob seriesBlob = new SerialBlob(compressed); - // Have to do this to make sure there are no extra zeroes at the end of the buffer, which may happen because - // we don't know what the compressed data size until after it is done being compressed - byte[] blobBytes = new byte[compressedDataLength]; - compressedTimeSeries.get(blobBytes); - Blob seriesBlob = new SerialBlob(blobBytes); + preparedStatement.setBlob(9, seriesBlob); - preparedStatement.setBlob(9, seriesBlob); - preparedStatement.executeUpdate(); - preparedStatement.close(); + preparedStatement.addBatch(); + } - seriesBlob.free(); + public void updateDatabase(Connection connection, int flightId) { + //System.out.println("Updating database for " + this); + if (this.temporary) + return; + try { + if (typeId == -1) + setTypeId(connection); + if (nameId == -1) + setNameId(connection); - } catch (SQLException e) { // | IOException e) { // Re-enable this for the new compression code. + PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO double_series (flight_id, name_id, data_type_id, length, valid_length, min, avg, max, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"); + this.addBatch(connection, preparedStatement, flightId); + preparedStatement.executeBatch(); + preparedStatement.close(); + } catch (SQLException | IOException e) { e.printStackTrace(); System.exit(1); } @@ -491,15 +496,19 @@ public DoubleTimeSeries lag(Connection connection, int n) throws SQLException { if (existingSeries.isPresent()) { return existingSeries.get(); + } else { + return lag(n); } + } - DoubleTimeSeries laggedSeries = new DoubleTimeSeries(connection, this.name + LAG_SUFFIX + n, "double"); + public DoubleTimeSeries lag(int n) { + DoubleTimeSeries laggedSeries = new DoubleTimeSeries(this.name + LAG_SUFFIX + n, "double"); for (int i = 0; i < data.length; i++) { laggedSeries.add((i >= n) ? data[i - n] : Double.NaN); } - return laggedSeries; + return laggedSeries; } public DoubleTimeSeries lead(Connection connection, int n) throws SQLException { @@ -507,9 +516,13 @@ public DoubleTimeSeries lead(Connection connection, int n) throws SQLException { if (existingSeries.isPresent()) { return existingSeries.get(); + } else { + return lead(n); } + } - DoubleTimeSeries leadingSeries = new DoubleTimeSeries(connection, this.name + LEAD_SUFFIX + n, "double"); + public DoubleTimeSeries lead(int n) { + DoubleTimeSeries leadingSeries = new DoubleTimeSeries(this.name + LEAD_SUFFIX + n, "double"); int len = data.length; for (int i = 0; i < len; i++) { @@ -525,5 +538,12 @@ public DoubleTimeSeries subSeries(Connection connection, int from, int until) th System.arraycopy(data, from, newSeries.data, 0, until - from); return newSeries; } + + public DoubleTimeSeries subSeries(int from, int until) throws SQLException { + DoubleTimeSeries newSeries = new DoubleTimeSeries(name, dataType, until - from); + newSeries.size = until - from; + System.arraycopy(data, from, newSeries.data, 0, until - from); + return newSeries; + } } diff --git a/src/main/java/org/ngafid/flights/ErrorMessage.java b/src/main/java/org/ngafid/flights/ErrorMessage.java index 4195f46de..361fb66cc 100644 --- a/src/main/java/org/ngafid/flights/ErrorMessage.java +++ b/src/main/java/org/ngafid/flights/ErrorMessage.java @@ -21,15 +21,13 @@ public static int getMessageId(Connection connection, String message) throws SQL if (id != null) { return id; - } else { //id wasn't in the hashmap, look it up String queryString = "SELECT id FROM flight_messages WHERE message = ?"; PreparedStatement query = connection.prepareStatement(queryString); query.setString(1, message); - LOG.info(query.toString()); - System.out.println(query); + // LOG.info(query.toString()); ResultSet resultSet = query.executeQuery(); if (resultSet.next()) { diff --git a/src/main/java/org/ngafid/flights/Flight.java b/src/main/java/org/ngafid/flights/Flight.java index 5c9e7ca53..4630b1f18 100644 --- a/src/main/java/org/ngafid/flights/Flight.java +++ b/src/main/java/org/ngafid/flights/Flight.java @@ -4,12 +4,15 @@ import java.sql.*; import java.text.DateFormat; import java.time.*; +import java.lang.Class; +import java.lang.reflect.*; import static java.time.temporal.ChronoUnit.SECONDS; import java.util.Iterator; import java.text.SimpleDateFormat; import java.text.ParseException; import java.util.Date; import java.util.Calendar; +import java.util.Collections; // XML stuff. import javax.xml.parsers.DocumentBuilderFactory; @@ -46,14 +49,15 @@ import java.util.Optional; import java.util.logging.Logger; import java.util.stream.Collectors; +import java.util.stream.Stream; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import javax.xml.bind.DatatypeConverter; import org.ngafid.common.*; +import org.apache.commons.lang.NotImplementedException; import org.ngafid.Database; -import org.ngafid.common.*; import org.ngafid.airports.Airport; import org.ngafid.airports.Airports; import org.ngafid.airports.Runway; @@ -61,8 +65,9 @@ import org.ngafid.filters.Filter; import org.ngafid.flights.calculations.*; +import org.ngafid.flights.process.*; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; /** * This class represents a Flight in the NGAFID. It also contains static methods for database interaction @@ -82,6 +87,7 @@ public class Flight { private final static String FLIGHT_COLUMNS = "id, fleet_id, uploader_id, upload_id, system_id, airframe_id, airframe_type_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status"; private final static String FLIGHT_COLUMNS_TAILS = "id, fleet_id, uploader_id, upload_id, f.system_id, airframe_id, airframe_type_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status"; + // TODO: Roll a lot of this stuff up into some sort of meta-data object? private int id = -1; private int fleetId = -1; private int uploaderId = -1; @@ -96,7 +102,6 @@ public class Flight { private String tailNumber; private String suggestedTailNumber; - private String calculationEndpoint; private boolean tailConfirmed; private String md5Hash; @@ -126,7 +131,7 @@ public class Flight { private long processingStatus = 0; private String status; - private ArrayList exceptions = new ArrayList(); + private List exceptions = new ArrayList(); private int numberRows; private String fileInformation; @@ -162,7 +167,7 @@ public static ArrayList getFlightsFromUpload(Connection connection, int return flights; } - public ArrayList getExceptions() { + public List getExceptions() { return exceptions; } @@ -555,7 +560,6 @@ public static long getTotalFlightHours(Connection connection, String queryString resultSet.next(); long diffSeconds = resultSet.getLong(1); - System.out.println("total time is: " + diffSeconds); resultSet.close(); query.close(); @@ -782,7 +786,6 @@ public static List getFlightsByRange(Connection connection, Filter filte while (resultSet.next()) { flights.add(new Flight(connection, resultSet)); } - System.out.println(flights); resultSet.close(); query.close(); @@ -1119,8 +1122,6 @@ public static List getUnassociatedTags(Connection connection, int fli return getAllTags(connection, fleetId); } - System.out.println("TAG NUMS: " + tagIds.toString()); - String queryString = "SELECT id, fleet_id, name, description, color FROM flight_tags " + idLimStr(tagIds, true); PreparedStatement query = connection.prepareStatement(queryString); ResultSet resultSet = query.executeQuery(); @@ -1264,7 +1265,7 @@ public static FlightTag editTag(Connection connection, FlightTag flightTag) thro } queryString.append("WHERE id = " + flightTag.hashCode()); - System.out.println("Query String Update: " + queryString.toString()); + LOG.info("Query String Update: " + queryString.toString()); PreparedStatement query = connection.prepareStatement(queryString.toString()); query.executeUpdate(); @@ -1306,7 +1307,6 @@ public static FlightTag createTag(int fleetId, int flightId, String name, String index = resultSet.getInt(1); } - System.out.println(index); associateTag(flightId, index, connection); return new FlightTag(index, fleetId, name, description, color); @@ -1350,6 +1350,34 @@ public static List getSimAircraft(Connection connection, int fleetId) th return paths; } + public Flight(Connection connection, FlightMeta meta, Map doubletimeSeries, Map stringTimeSeries, List itinerary, List exceptions) throws SQLException { + fleetId = meta.fleetId; + uploaderId = meta.uploaderId; + uploadId = meta.uploadId; + + filename = meta.filename; + + airframeName = meta.airframeName; + airframeNameId = Airframes.getNameId(connection, airframeName); + + airframeType = meta.airframeType; + airframeTypeId = Airframes.getTypeId(connection, airframeType); + + systemId = meta.systemId; + suggestedTailNumber = meta.suggestedTailNumber; + md5Hash = meta.md5Hash; + startDateTime = meta.startDateTime; + endDateTime = meta.endDateTime; + + hasCoords = doubleTimeSeries.containsKey(LATITUDE) && doubleTimeSeries.containsKey(LONGITUDE); + hasAGL = doubleTimeSeries.containsKey(ALT_AGL); + + this.exceptions = exceptions; + checkExceptions(); + + this.stringTimeSeries = Collections.unmodifiableMap(new HashMap<>(stringTimeSeries)); + } + public Flight(Connection connection, ResultSet resultSet) throws SQLException { id = resultSet.getInt(1); fleetId = resultSet.getInt(2); @@ -1480,10 +1508,27 @@ public String getEndDateTime() { return endDateTime; } + public void addException(MalformedFlightFileException me) { + exceptions.add(me); + } + + public void addHeader(String column, String dataType) { + headers.add(column); + dataTypes.add(dataType); + } + public void addDoubleTimeSeries(String name, DoubleTimeSeries doubleTimeSeries) { this.doubleTimeSeries.put(name, doubleTimeSeries); } + public Map getDoubleTimeSeriesMap() { + return doubleTimeSeries; + } + + public Map getStringTimeSeriesMap() { + return stringTimeSeries; + } + public DoubleTimeSeries getDoubleTimeSeries(String name) throws SQLException { if (this.doubleTimeSeries.containsKey(name)) { return this.doubleTimeSeries.get(name); @@ -1580,8 +1625,6 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu DoubleTimeSeries latitudes = doubleTimeSeries.get(latColumnName); DoubleTimeSeries longitudes = doubleTimeSeries.get(lonColumnName); - System.out.println("times: " + times + ", latitudes: " + latitudes + ", longitudes: " + longitudes); - if (times == null) { throw new MalformedFlightFileException("Time column '" + timeColumnName + "' did not exist! Cannot set start/end times."); } @@ -1601,9 +1644,6 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu int latSize = latitudes.size(); int lonSize = longitudes.size(); - System.out.println("\ttime size: " + timeSize + ", lat size: " + latSize + ", lon size: " + lonSize); - System.out.println("\tstart time: " + startDateTime); - System.out.println("\tend time: " + endDateTime); String firstTime = null; for (int i = 0; i < times.size(); i++) { @@ -1612,7 +1652,6 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu break; } } - System.out.println("\tfirst time: '" + firstTime + "'"); String lastTime = null; for (int i = times.size() - 1; i >= 0; i--) { @@ -1621,18 +1660,15 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu break; } } - System.out.println("\tlast time: '" + lastTime + "'"); double firstLat = 0.0; for (int i = 0; i < latitudes.size(); i++) { - //System.out.println("\t\tlat[" + i + "]: " + latitudes.get(i)); double lat = latitudes.get(i); if (lat != 0.0 && !Double.isNaN(lat)) { firstLat = latitudes.get(i); break; } } - System.out.println("\tfirst lat: '" + firstLat + "'"); double firstLon = 0.0; for (int i = 0; i < longitudes.size(); i++) { @@ -1643,15 +1679,12 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu break; } } - System.out.println("\tfirst long: '" + firstLon + "'"); //TODO: can't get time offset from lat/long because they aren't being set correctly startDateTime += " " + firstTime; endDateTime += " " + lastTime; - System.out.println("start date time: " + startDateTime); - System.out.println("end date time: " + endDateTime); } public void calculateStartEndTime(String dateColumnName, String timeColumnName, String offsetColumnName) throws MalformedFlightFileException { @@ -1676,8 +1709,6 @@ public void calculateStartEndTime(String dateColumnName, String timeColumnName, int timeSize = times.size(); int offsetSize = offsets.size(); - System.out.println("\tdate size: " + dateSize + ", time size: " + timeSize + ", offset size: " + offsetSize); - //get the minimum sized length of each of these series, they should all be the same but //if the last column was cut off it might not be the case int minSize = dateSize; @@ -1694,8 +1725,6 @@ public void calculateStartEndTime(String dateColumnName, String timeColumnName, start++; } - System.out.println("\tfirst date time and offset not null at index: " + start); - if (start >= minSize) { throw new MalformedFlightFileException("Date, Time or Offset columns were all null! Cannot set start/end times."); } @@ -1718,9 +1747,6 @@ public void calculateStartEndTime(String dateColumnName, String timeColumnName, String endTime = times.get(end); String endOffset = offsets.get(end); - System.out.println("\t\t\tfirst not null " + start + " -- " + startDate + " " + startTime + " " + startOffset); - System.out.println("\t\t\tlast not null " + endDate + " " + endTime + " " + endOffset); - OffsetDateTime startODT = null; try { startODT = TimeUtils.convertToOffset(startDate, startTime, startOffset, "+00:00"); @@ -1769,8 +1795,6 @@ private void initialize(Connection connection, InputStream inputStream) throws F throw new FatalFlightFileException("The flight file was empty."); if (fileInformation.charAt(0) != '#' && fileInformation.charAt(0) != '{') { if (fileInformation.substring(0, 4).equals("DID_")) { - System.out.println("CAME FROM A SCANEAGLE! CAN CALCULATE SUGGESTED TAIL/SYSTEM ID FROM FILENAME"); - airframeName = "ScanEagle"; airframeType = "UAS Fixed Wing"; } else { @@ -1785,19 +1809,12 @@ private void initialize(Connection connection, InputStream inputStream) throws F String[] filenameParts = filename.split("_"); startDateTime = filenameParts[0]; endDateTime = startDateTime; - System.out.println("start date: '" + startDateTime + "'"); - System.out.println("end date: '" + startDateTime + "'"); //UND doesn't have the systemId for UAS anywhere in the filename or file (sigh) suggestedTailNumber = "N" + filenameParts[1] + "ND"; systemId = suggestedTailNumber; - System.out.println("suggested tail number: '" + suggestedTailNumber + "'"); - System.out.println("system id: '" + systemId + "'"); - } else if (headers.size() > 0) { - System.out.println("JSON detected"); - Gson gson = new Gson(); JsonReader reader = new JsonReader(new InputStreamReader(inputStream)); Map jsonMap = gson.fromJson(reader, Map.class); @@ -1900,7 +1917,6 @@ private void initialize(Connection connection, InputStream inputStream) throws F //System.out.println("Headers line is: " + headersLine); headers.addAll(Arrays.asList(headersLine.split("\\,", -1))); headers.replaceAll(String::trim); - System.out.println("headers are:\n" + headers.toString()); //scan eagle files have no data types, set all to "" for (int i = 0; i < headers.size(); i++) { @@ -1923,7 +1939,6 @@ private void initialize(Connection connection, InputStream inputStream) throws F String headersLine = bufferedReader.readLine(); if (headersLine.length() == 0) headersLine = bufferedReader.readLine(); //handle windows files with carriage returns - System.out.println("Headers line is: " + headersLine); headers.addAll(Arrays.asList(headersLine.split("\\,", -1))); headers.replaceAll(String::trim); @@ -2071,7 +2086,9 @@ private void process(Connection connection, InputStream inputStream) throws IOEx process(connection); } - private void process(Connection connection) throws IOException, FatalFlightFileException, SQLException { + List defaultPasses = List.of(); + + final private void process(Connection connection) throws IOException, FatalFlightFileException, SQLException { //TODO: these may be different for different airframes/flight //data recorders. depending on the airframe/flight data recorder //we should specify these. @@ -2111,8 +2128,6 @@ private void process(Connection connection) throws IOException, FatalFlightFileE //this is all we can do with the scan eagle data until we //get better lat/lon info hasCoords = true; - } else if (airframeName.equals("")) { - } else { calculateStartEndTime("Lcl Date", "Lcl Time", "UTCOfst"); } @@ -2120,18 +2135,23 @@ private void process(Connection connection) throws IOException, FatalFlightFileE exceptions.add(e); } + // DONE try { calculateAGL(connection, "AltAGL", "AltMSL", "Latitude", "Longitude"); } catch (MalformedFlightFileException e) { exceptions.add(e); } + // END + // DONE try { calculateAirportProximity(connection, "Latitude", "Longitude", "AltAGL"); } catch (MalformedFlightFileException e) { exceptions.add(e); } + // END + // DONE if (!airframeName.equals("ScanEagle") && !airframeName.contains("DJI")) { try { calculateTotalFuel(connection, new String[]{"FQtyL", "FQtyR"}, "Total Fuel"); @@ -2145,7 +2165,9 @@ private void process(Connection connection) throws IOException, FatalFlightFileE exceptions.add(e); } } + // END + // DONE try { if (airframeName.equals("Cessna 172S") || airframeName.equals("Cessna 172R")) { String chtNames[] = {"E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4"}; @@ -2165,8 +2187,6 @@ private void process(Connection connection) throws IOException, FatalFlightFileE String egt2Names[] = {"E2 EGT1", "E2 EGT2", "E2 EGT3", "E2 EGT4"}; calculateDivergence(connection, egt2Names, "E2 EGT Divergence", "deg F"); - - } else if (airframeName.equals("Cirrus SR20") || airframeName.equals("Cessna 182T") || airframeName.equals("Cessna T182T") || airframeName.equals("Beechcraft A36/G36") || airframeName.equals("Cirrus SR22") || airframeName.equals("Cessna 400")) { String chtNames[] = {"E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4", "E1 CHT5", "E1 CHT6"}; calculateDivergence(connection, chtNames, "E1 CHT Divergence", "deg F"); @@ -2203,6 +2223,7 @@ private void process(Connection connection) throws IOException, FatalFlightFileE LOG.severe("Skipping..."); // System.exit(1); } + // END if (!airframeName.equals("ScanEagle") && this.doubleTimeSeries.containsKey(ALT_B)) { //LOCI doesn't apply to UAS @@ -2252,7 +2273,6 @@ private void process(Connection connection) throws IOException, FatalFlightFileE } catch (MalformedFlightFileException e) { exceptions.add(e); } - } private void checkExceptions() { @@ -2341,17 +2361,11 @@ public Flight(int fleetId, String filename, String suggestedTailNumber, String a this.status = "SUCCESS"; } - public Flight(int fleetId, String zipEntryName, InputStream inputStream, Connection connection) throws IOException, FatalFlightFileException, FlightAlreadyExistsException, SQLException { + public Flight(int fleetId, String zipEntryName, InputStream inputStream, Connection connection) throws FlightProcessingException { this.fleetId = fleetId; this.filename = zipEntryName; this.tailConfirmed = false; - /* - if (!filename.contains("/")) { - throw new FatalFlightFileException("The flight file was not in a directory in the zip file. Flight files should be in a directory with the name of their tail number (or other aircraft identifier)."); - } - */ - String[] parts = zipEntryName.split("/"); if (parts.length <= 1) { suggestedTailNumber = null; @@ -2369,28 +2383,20 @@ public Flight(int fleetId, String zipEntryName, InputStream inputStream, Connect setMD5Hash(inputStream); //check to see if a flight with this MD5 hash already exists in the database - if (connection != null) checkIfExists(connection); + if (connection != null) + checkIfExists(connection); inputStream.reset(); process(connection, inputStream); - } catch (FatalFlightFileException | IOException e) { + } catch (FatalFlightFileException | IOException | FlightAlreadyExistsException | SQLException e) { status = "WARNING"; - throw e; - } catch (SQLException e) { - System.out.println(e); - e.printStackTrace(); - System.exit(1); + throw new FlightProcessingException(e); } checkExceptions(); } - // Constructor for a flight that takes lists of UNINSERTED time series (that is, they should not be in the database yet!) - private Flight(Connection connection, ArrayList doubleTimeSeries, ArrayList stringTimeSeries, Timestamp startTime, Timestamp endTime) { - - } - /** * GPX is an XML file that follows the schema found here http://www.topografix.com/GPX/1/1/ *

@@ -2688,7 +2694,7 @@ public void runLOCICalculations(Connection connection) throws MalformedFlightFil } CalculatedDoubleTimeSeries vspdCalculated = new CalculatedDoubleTimeSeries(connection, VSPD_CALCULATED, "ft/min", true, this); - vspdCalculated.create(new VSPDRegression(connection, this)); + vspdCalculated.create(new VSPDRegression(getDoubleTimeSeries(ALT_B))); CalculatedDoubleTimeSeries densityRatio = new CalculatedDoubleTimeSeries(connection, DENSITY_RATIO, "ratio", false, this); densityRatio.create(index -> { @@ -3327,6 +3333,119 @@ public void printValues(String[] requestedHeaders) { System.out.println(); } + public static void batchUpdateDatabase(Connection connection, Upload upload, Iterable flights) { + int fleetId = upload.getFleetId(); + int uploaderId = upload.getUploaderId(); + int uploadId = upload.getId(); + + try { + PreparedStatement preparedStatement = createPreparedStatement(connection); + for (Flight flight : flights) { + // This is fine because this stuff is mostly cached + flight.airframeNameId = Airframes.getNameId(connection, flight.airframeName); + flight.airframeTypeId = Airframes.getTypeId(connection, flight.airframeType); + Airframes.setAirframeFleet(connection, flight.airframeNameId, fleetId); + + Tails.setSuggestedTail(connection, fleetId, flight.systemId, flight.suggestedTailNumber); + flight.tailNumber = Tails.getTail(connection, fleetId, flight.systemId); + flight.tailConfirmed = Tails.getConfirmed(connection, fleetId, flight.systemId); + flight.fleetId = fleetId; + flight.uploaderId = uploaderId; + flight.uploadId = uploadId; + flight.addBatch(preparedStatement); + } + + int[] _results = preparedStatement.executeBatch(); + ResultSet results = preparedStatement.getGeneratedKeys(); + int count = 0; + + + for (Flight flight : flights) { + if (!results.next()) { + LOG.severe("ERROR: insertion of flight to the database did not result in an id. This should never happen."); + System.exit(1); + } + flight.id = results.getInt(1); + } + + preparedStatement.close(); + + PreparedStatement doubleTSPreparedStatement = DoubleTimeSeries.createPreparedStatement(connection); + + for (Flight flight : flights) + for (var doubleTS : flight.doubleTimeSeries.values()) + doubleTS.addBatch(connection, doubleTSPreparedStatement, flight.id); + + doubleTSPreparedStatement.executeBatch(); + doubleTSPreparedStatement.close(); + + PreparedStatement stringTSPreparedStatement = StringTimeSeries.createPreparedStatement(connection); + + for (Flight flight : flights) + for (var stringTS : flight.stringTimeSeries.values()) + stringTS.addBatch(connection, stringTSPreparedStatement, flight.id); + + stringTSPreparedStatement.executeBatch(); + stringTSPreparedStatement.close(); + + PreparedStatement itineraryPreparedStatement = Itinerary.createPreparedStatement(connection); + PreparedStatement airportPreparedStatement = Itinerary.createAirportPreparedStatement(connection); + PreparedStatement runwayPreparedStatement = Itinerary.createRunwayPreparedStatement(connection); + + for (Flight flight : flights) { + for (int i = 0; i < flight.itinerary.size(); i++) + flight.itinerary.get(i).addBatch(itineraryPreparedStatement, airportPreparedStatement, runwayPreparedStatement, fleetId, flight.id, i); + } + + itineraryPreparedStatement.executeBatch(); + itineraryPreparedStatement.close(); + airportPreparedStatement.executeBatch(); + airportPreparedStatement.close(); + runwayPreparedStatement.executeBatch(); + runwayPreparedStatement.close(); + + PreparedStatement warningPreparedStatement = FlightWarning.createPreparedStatement(connection); + + for (Flight flight : flights) + for (var e : flight.exceptions) + new FlightWarning(e.getMessage()).addBatch(connection, preparedStatement, flight.id); + + warningPreparedStatement.executeBatch(); + warningPreparedStatement.close(); + + } catch (SQLException | IOException e) { + LOG.severe("Encountered the following exception while inserting batch of flights: \n" + e.getMessage()); + e.printStackTrace(); + System.exit(1); + } + } + + private static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO flights (fleet_id, uploader_id, upload_id, airframe_id, airframe_type_id, system_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status, start_timestamp, end_timestamp) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, UNIX_TIMESTAMP(?), UNIX_TIMESTAMP(?))", Statement.RETURN_GENERATED_KEYS); + } + + private void addBatch(PreparedStatement preparedStatement) throws SQLException { + preparedStatement.setInt(1, fleetId); + preparedStatement.setInt(2, uploaderId); + preparedStatement.setInt(3, uploadId); + preparedStatement.setInt(4, airframeNameId); + preparedStatement.setInt(5, airframeTypeId); + preparedStatement.setString(6, systemId); + preparedStatement.setString(7, startDateTime); + preparedStatement.setString(8, endDateTime); + preparedStatement.setString(9, filename); + preparedStatement.setString(10, md5Hash); + preparedStatement.setInt(11, numberRows); + preparedStatement.setString(12, status); + preparedStatement.setBoolean(13, hasCoords); + preparedStatement.setBoolean(14, hasAGL); + preparedStatement.setBoolean(15, false); //insert not yet completed + preparedStatement.setLong(16, processingStatus); + preparedStatement.setString(17, startDateTime); + preparedStatement.setString(18, endDateTime); + preparedStatement.addBatch(); + } + public void updateDatabase(Connection connection, int uploadId, int uploaderId, int fleetId) { this.fleetId = fleetId; this.uploaderId = uploaderId; @@ -3343,55 +3462,29 @@ public void updateDatabase(Connection connection, int uploadId, int uploaderId, tailNumber = Tails.getTail(connection, fleetId, systemId); tailConfirmed = Tails.getConfirmed(connection, fleetId, systemId); - PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO flights (fleet_id, uploader_id, upload_id, airframe_id, airframe_type_id, system_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status, start_timestamp, end_timestamp) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, UNIX_TIMESTAMP(?), UNIX_TIMESTAMP(?))", Statement.RETURN_GENERATED_KEYS); - preparedStatement.setInt(1, fleetId); - preparedStatement.setInt(2, uploaderId); - preparedStatement.setInt(3, uploadId); - preparedStatement.setInt(4, airframeNameId); - preparedStatement.setInt(5, airframeTypeId); - preparedStatement.setString(6, systemId); - preparedStatement.setString(7, startDateTime); - preparedStatement.setString(8, endDateTime); - preparedStatement.setString(9, filename); - preparedStatement.setString(10, md5Hash); - preparedStatement.setInt(11, numberRows); - preparedStatement.setString(12, status); - preparedStatement.setBoolean(13, hasCoords); - preparedStatement.setBoolean(14, hasAGL); - preparedStatement.setBoolean(15, false); //insert not yet completed - preparedStatement.setLong(16, processingStatus); - preparedStatement.setString(17, startDateTime); - preparedStatement.setString(18, endDateTime); - - System.out.println(preparedStatement); - preparedStatement.executeUpdate(); + PreparedStatement preparedStatement = createPreparedStatement(connection); + + this.addBatch(preparedStatement); + + LOG.info(preparedStatement.toString()); + preparedStatement.executeBatch(); ResultSet resultSet = preparedStatement.getGeneratedKeys(); if (resultSet.next()) { int flightId = resultSet.getInt(1); this.id = flightId; - // Comment this out unless debugging - //for (String key : doubleTimeSeries.keySet()) { - //System.out.println("double time series key: '" + key); - //System.out.println("\tis " + doubleTimeSeries.get(key).toString()); - //} - - for (DoubleTimeSeries series : doubleTimeSeries.values()) { + for (DoubleTimeSeries series : doubleTimeSeries.values()) series.updateDatabase(connection, flightId); - } - for (StringTimeSeries series : stringTimeSeries.values()) { + for (StringTimeSeries series : stringTimeSeries.values()) series.updateDatabase(connection, flightId); - } - for (Exception exception : exceptions) { + for (Exception exception : exceptions) FlightWarning.insertWarning(connection, flightId, exception.getMessage()); - } - for (int i = 0; i < itinerary.size(); i++) { + for (int i = 0; i < itinerary.size(); i++) itinerary.get(i).updateDatabase(connection, fleetId, flightId, i); - } PreparedStatement ps = connection.prepareStatement("UPDATE flights SET insert_completed = 1 WHERE id = ?"); ps.setInt(1, this.id); @@ -3399,7 +3492,7 @@ public void updateDatabase(Connection connection, int uploadId, int uploaderId, ps.close(); } else { - System.err.println("ERROR: insertion of flight to the database did not result in an id. This should never happen."); + LOG.severe("ERROR: insertion of flight to the database did not result in an id. This should never happen."); System.exit(1); } @@ -3427,10 +3520,9 @@ public void writeToFile(Connection connection, String filename) throws IOExcepti for (int i = 0; i < series.size(); i++) { String name = series.get(i).getName(); if (name.equals("AirportDistance") || name.equals("RunwayDistance") || series.get(i).getMin() == series.get(i).getMax()) { - System.out.println("Skipping column: '" + name + "'"); + LOG.warning("Skipping column: '" + name + "'"); continue; } - System.out.println("'" + name + "' min - max: " + (series.get(i).getMin() - series.get(i).getMax())); if (afterFirst) printWriter.print(","); printWriter.print(series.get(i).getName()); @@ -3480,4 +3572,12 @@ public void setAirframeType(String type) { public void setAirframeTypeID(Integer typeID) { this.airframeTypeId = typeID; } + + public void setHasCoords(boolean hasCoords) { + this.hasCoords = hasCoords; + } + + public void setHasAGL(boolean hasAGL) { + this.hasAGL = hasAGL; + } } diff --git a/src/main/java/org/ngafid/flights/FlightError.java b/src/main/java/org/ngafid/flights/FlightError.java index dc9eda052..027f851d1 100644 --- a/src/main/java/org/ngafid/flights/FlightError.java +++ b/src/main/java/org/ngafid/flights/FlightError.java @@ -25,8 +25,6 @@ public static void insertError(Connection connection, int uploadId, String filen exceptionPreparedStatement.setString(2, filename); exceptionPreparedStatement.setInt(3, ErrorMessage.getMessageId(connection, message)); - LOG.info(exceptionPreparedStatement.toString()); - exceptionPreparedStatement.executeUpdate(); exceptionPreparedStatement.close(); } diff --git a/src/main/java/org/ngafid/flights/FlightWarning.java b/src/main/java/org/ngafid/flights/FlightWarning.java index 3c6536704..9908ac0e2 100644 --- a/src/main/java/org/ngafid/flights/FlightWarning.java +++ b/src/main/java/org/ngafid/flights/FlightWarning.java @@ -18,15 +18,22 @@ public class FlightWarning { private String filename; private String message; private String stackTrace; + + public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO flight_warnings (flight_id, message_id) VALUES (?, ?)"); + } + public void addBatch(Connection connection, PreparedStatement preparedStatement, int flightId) throws SQLException { + preparedStatement.setInt(1, flightId); + preparedStatement.setInt(2, ErrorMessage.getMessageId(connection, message)); + } + public static void insertWarning(Connection connection, int flightId, String message) throws SQLException { - PreparedStatement exceptionPreparedStatement = connection.prepareStatement("INSERT INTO flight_warnings (flight_id, message_id) VALUES (?, ?)"); - exceptionPreparedStatement.setInt(1, flightId); - exceptionPreparedStatement.setInt(2, ErrorMessage.getMessageId(connection, message)); - - LOG.info(exceptionPreparedStatement.toString()); + PreparedStatement exceptionPreparedStatement = createPreparedStatement(connection); - exceptionPreparedStatement.executeUpdate(); + new FlightWarning(message).addBatch(connection, exceptionPreparedStatement, flightId); + + exceptionPreparedStatement.executeBatch(); exceptionPreparedStatement.close(); } @@ -68,6 +75,10 @@ public static int getCount(Connection connection, int fleetId) throws SQLExcepti return count ; } + public FlightWarning(String message) { + this.message = message; + } + public FlightWarning(Connection connection, ResultSet resultSet) throws SQLException { filename = resultSet.getString(1); uploadId = resultSet.getInt(2); diff --git a/src/main/java/org/ngafid/flights/Itinerary.java b/src/main/java/org/ngafid/flights/Itinerary.java index 800e0c298..7f9357c40 100644 --- a/src/main/java/org/ngafid/flights/Itinerary.java +++ b/src/main/java/org/ngafid/flights/Itinerary.java @@ -320,22 +320,15 @@ public void update(String runway, int index, double altitudeAGL, double airportD public void selectBestRunway() { runway = null; int maxCount = 0; - System.err.println("Selecting runway:"); - System.err.println("min airport distance: " + minAirportDistance); - System.err.println("min runway distance: " + minRunwayDistance); - System.err.println("min altitude agl: " + minAltitude); - + for (String key : runwayCounts.keySet()) { int count = runwayCounts.get(key); - System.err.println("\trunway: " + key + ", count: " + count); if (count > maxCount) { runway = key; maxCount = count; } } - - System.err.println("selected runway '" + runway + "' with count: " + maxCount); } public boolean wasApproach() { @@ -358,45 +351,59 @@ public boolean wasApproach() { } } - public void updateDatabase(Connection connection, int fleetId, int flightId, int order) throws SQLException { + public static PreparedStatement createAirportPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT IGNORE INTO visited_airports SET fleet_id = ?, airport = ?"); + } + + public static PreparedStatement createRunwayPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT IGNORE INTO visited_runways SET fleet_id = ?, runway = ?"); + } + + public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO itinerary (flight_id, `order`, min_altitude_index, min_altitude, min_airport_distance, min_runway_distance, airport, runway, start_of_approach, end_of_approach, start_of_takeoff, end_of_takeoff, type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); + } + + public void addBatch(PreparedStatement itineraryStatement, PreparedStatement airportStatement, PreparedStatement runwayStatement, int fleetId, int flightId, int order) throws SQLException { this.order = order; + + airportStatement.setInt(1, fleetId); + airportStatement.setString(2, airport); + airportStatement.addBatch(); + + runwayStatement.setInt(1, fleetId); + runwayStatement.setString(2, airport + " - " + runway); + runwayStatement.addBatch(); + + itineraryStatement.setInt(1, flightId); + itineraryStatement.setInt(2, order); + itineraryStatement.setInt(3, minAltitudeIndex); + itineraryStatement.setDouble(4, minAltitude); + itineraryStatement.setDouble(5, minAirportDistance); + itineraryStatement.setDouble(6, minRunwayDistance); + itineraryStatement.setString(7, airport); + itineraryStatement.setString(8, runway); + itineraryStatement.setInt(9, startOfApproach); + itineraryStatement.setInt(10, endOfApproach); + itineraryStatement.setInt(11, startOfTakeoff); + itineraryStatement.setInt(12, endOfTakeoff); + itineraryStatement.setString(13, type); + itineraryStatement.addBatch(); + } + public void updateDatabase(Connection connection, int fleetId, int flightId, int order) throws SQLException { //insert new visited airports and runways -- will ignore if it already exists - PreparedStatement preparedStatement = connection.prepareStatement("INSERT IGNORE INTO visited_airports SET fleet_id = ?, airport = ?"); - preparedStatement.setInt(1, fleetId); - preparedStatement.setString(2, airport); - - System.err.println(preparedStatement); - preparedStatement.executeUpdate(); - preparedStatement.close(); - - preparedStatement = connection.prepareStatement("INSERT IGNORE INTO visited_runways SET fleet_id = ?, runway = ?"); - preparedStatement.setInt(1, fleetId); - preparedStatement.setString(2, airport + " - " + runway); - - System.err.println(preparedStatement); - preparedStatement.executeUpdate(); - preparedStatement.close(); - - //now insert the itinerary - preparedStatement = connection.prepareStatement("INSERT INTO itinerary (flight_id, `order`, min_altitude_index, min_altitude, min_airport_distance, min_runway_distance, airport, runway, start_of_approach, end_of_approach, start_of_takeoff, end_of_takeoff, type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); - preparedStatement.setInt(1, flightId); - preparedStatement.setInt(2, order); - preparedStatement.setInt(3, minAltitudeIndex); - preparedStatement.setDouble(4, minAltitude); - preparedStatement.setDouble(5, minAirportDistance); - preparedStatement.setDouble(6, minRunwayDistance); - preparedStatement.setString(7, airport); - preparedStatement.setString(8, runway); - preparedStatement.setInt(9, startOfApproach); - preparedStatement.setInt(10, endOfApproach); - preparedStatement.setInt(11, startOfTakeoff); - preparedStatement.setInt(12, endOfTakeoff); - preparedStatement.setString(13, type); - - System.err.println(preparedStatement); - preparedStatement.executeUpdate(); - preparedStatement.close(); + PreparedStatement statement = createPreparedStatement(connection); + PreparedStatement airportStatement = createAirportPreparedStatement(connection); + PreparedStatement runwayStatement = createRunwayPreparedStatement(connection); + + this.addBatch(statement, airportStatement, runwayStatement, fleetId, flightId, order); + + statement.executeBatch(); + statement.close(); + airportStatement.executeBatch(); + airportStatement.close(); + runwayStatement.executeBatch(); + runwayStatement.close(); } public String toString() { // TODO: add new columns to toString? diff --git a/src/main/java/org/ngafid/flights/NIFA.java b/src/main/java/org/ngafid/flights/NIFA.java index c37c9a57d..a7091090e 100644 --- a/src/main/java/org/ngafid/flights/NIFA.java +++ b/src/main/java/org/ngafid/flights/NIFA.java @@ -18,7 +18,7 @@ import java.util.logging.Logger; import java.util.stream.Stream; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class NIFA implements Serializable { diff --git a/src/main/java/org/ngafid/flights/calculations/Parameters.java b/src/main/java/org/ngafid/flights/Parameters.java similarity index 86% rename from src/main/java/org/ngafid/flights/calculations/Parameters.java rename to src/main/java/org/ngafid/flights/Parameters.java index 1a6f3b417..b97e3819b 100644 --- a/src/main/java/org/ngafid/flights/calculations/Parameters.java +++ b/src/main/java/org/ngafid/flights/Parameters.java @@ -8,7 +8,9 @@ * @author Josh Karns * @author Aidan LaBella */ -package org.ngafid.flights.calculations; +package org.ngafid.flights; + +import java.util.Set; public interface Parameters { /** @@ -24,7 +26,7 @@ public interface Parameters { public static final String PARAM_JSON_LONGITUDE = "lon"; public static final double STD_PRESS_INHG = 29.92; - public static final double COMP_CONV = (double) (Math.PI / 180); + public static final double COMP_CONV = Math.PI / 180.0; /** * Critical Values @@ -65,6 +67,7 @@ public interface Parameters { public static final String ROLL = "Roll"; public static final String ALT_AGL = "AltAGL"; public static final String ALT_MSL = "AltMSL"; + public static final String ALT_MSL_LAG_DIFF = "AltMSL Lag Diff"; public static final String ALT_B = "AltB"; public static final String AOA_SIMPLE = "AOASimple"; public static final String E1_RPM = "E1 RPM"; @@ -82,11 +85,27 @@ public interface Parameters { public static final String TOTAL_FUEL = "Total Fuel"; public static final String LCL_DATE = "Lcl Date"; public static final String LCL_TIME = "Lcl Time"; + public static final String UTC_OFFSET = "UTCOfst"; public static final String LATITUDE = "Latitude"; public static final String LONGITUDE = "Longitude"; public static final String STALL_PROBABILITY = "PStall"; public static final String LOSS_OF_CONTROL_PROBABILITY = "PLOCI"; public static final String HDG_TRK_DIFF = "HDG TRK Diff"; + public static final String FUEL_QTY_LEFT = "FQtyL"; + public static final String FUEL_QTY_RIGHT = "FQtyR"; + + public static final String NEAREST_RUNWAY = "NearestRunway"; + public static final String RUNWAY_DISTANCE = "RunwayDistance"; + public static final String NEAREST_AIRPORT = "NearestAirport"; + public static final String AIRPORT_DISTANCE = "AirportDistance"; + + /** + * Units + **/ + public static final String UNIT_FT_AGL = "ft agl"; + public static final String UNIT_FT_MSL = "ft msl"; + public static final String UNIT_GALLONS = "gals"; + public static final String UNIT_DEG_F = "deg F"; /** * {@link Airframes} id's @@ -104,7 +123,7 @@ public interface Parameters { /** * Strings that represent the parameters used in the Stall Index calculation */ - public static final String [] LOCI_DEPENDENCIES = {HDG, ROLL}; + public static final String [] LOCI_DEPENDENCIES = {HDG, ROLL, TAS_FTMIN}; // // use these for a real true airspeed (Shelbys method) /*GND_SPD, WIND_SPEED, WIND_DIRECTION};*/ public static final String [] SPIN_DEPENDENCIES = {IAS, VSPD_CALCULATED, NORM_AC, LAT_AC, ALT_AGL}; diff --git a/src/main/java/org/ngafid/flights/StringTimeSeries.java b/src/main/java/org/ngafid/flights/StringTimeSeries.java index b5e8c301a..4ee2fe75d 100644 --- a/src/main/java/org/ngafid/flights/StringTimeSeries.java +++ b/src/main/java/org/ngafid/flights/StringTimeSeries.java @@ -33,42 +33,77 @@ public class StringTimeSeries { private static final Logger LOG = Logger.getLogger(StringTimeSeries.class.getName()); private static final int COMPRESSION_LEVEL = Deflater.DEFAULT_COMPRESSION; + private static final int SIZE_HINT = 256; - private int nameId; + private int nameId = -1; private String name; - private int typeId; + private int typeId = -1; private String dataType; private ArrayList timeSeries; private int validCount; - public StringTimeSeries(Connection connection, String name, String dataType) throws SQLException { + public StringTimeSeries(String name, String dataType, int sizeHint) { this.name = name; - this.nameId = SeriesNames.getStringNameId(connection, name); this.dataType = dataType; - this.typeId = TypeNames.getId(connection, dataType); - this.timeSeries = new ArrayList(); + this.timeSeries = new ArrayList(sizeHint); validCount = 0; } + public StringTimeSeries(String name, String dataType) { + this(name, dataType, SIZE_HINT); + } + + public StringTimeSeries(Connection connection, String name, String dataType) throws SQLException { + this(name, dataType, SIZE_HINT); + setNameId(connection); + setTypeId(connection); + } + public StringTimeSeries(Connection connection, String name, String dataType, ArrayList timeSeries) throws SQLException { + this(name, dataType, timeSeries); + setNameId(connection); + setTypeId(connection); + } + + public StringTimeSeries(String name, String dataType, ArrayList timeSeries) { this.name = name; - this.nameId = SeriesNames.getStringNameId(connection, name); this.dataType = dataType; - this.typeId = TypeNames.getId(connection, dataType); this.timeSeries = timeSeries; - validCount = 0; for (int i = 0; i < timeSeries.size(); i++) { if (!timeSeries.get(i).equals("")) { validCount++; } } - } + } + + // Added to get results for StringTimeSeries + public StringTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException, ClassNotFoundException { + + this.nameId = resultSet.getInt(1); + this.name = SeriesNames.getStringName(connection, this.nameId); + //System.out.println("name: " + name); + + this.typeId = resultSet.getInt(2); + this.dataType = TypeNames.getName(connection, this.typeId); + //System.out.println("data type: " + dataType); + int length = resultSet.getInt(3); + //System.out.println("length: " + length); + validCount = resultSet.getInt(4); + //System.out.println("valid count: " + validCount); - // Added to get StringTimeSeries + Blob values = resultSet.getBlob(5); + byte[] bytes = values.getBytes(1, (int)values.length()); + //System.out.println("values.length: " + (int)values.length()); + values.free(); + + // This unchecked caste warning can be fixed but it shouldnt be necessary if we only but ArrayList objects into the StringTimeSeries cache. + this.timeSeries = (ArrayList) Compression.inflateObject(bytes); + } + public static StringTimeSeries getStringTimeSeries(Connection connection, int flightId, String name) throws SQLException { PreparedStatement query = connection.prepareStatement("SELECT ss.name_id, ss.data_type_id, ss.length, ss.valid_length, ss.data FROM string_series AS ss INNER JOIN string_series_names AS ssn ON ssn.id = ss.name_id WHERE ssn.name = ? AND ss.flight_id = ?"); @@ -99,32 +134,14 @@ public static StringTimeSeries getStringTimeSeries(Connection connection, int fl return null; } } - - // Added to get results for StringTimeSeries - public StringTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException, ClassNotFoundException { - - this.nameId = resultSet.getInt(1); - this.name = SeriesNames.getStringName(connection, this.nameId); - //System.out.println("name: " + name); - - this.typeId = resultSet.getInt(2); - this.dataType = TypeNames.getName(connection, this.typeId); - //System.out.println("data type: " + dataType); - - int length = resultSet.getInt(3); - //System.out.println("length: " + length); - validCount = resultSet.getInt(4); - //System.out.println("valid count: " + validCount); - - Blob values = resultSet.getBlob(5); - byte[] bytes = values.getBytes(1, (int)values.length()); - //System.out.println("values.length: " + (int)values.length()); - values.free(); - - // This unchecked caste warning can be fixed but it shouldnt be necessary if we only but ArrayList objects into the StringTimeSeries cache. - this.timeSeries = (ArrayList) Compression.inflateObject(bytes); + + private void setNameId(Connection connection) throws SQLException { + this.nameId = SeriesNames.getStringNameId(connection, name); } + private void setTypeId(Connection connection) throws SQLException { + this.typeId = TypeNames.getId(connection, dataType); + } public String toString() { return "[StringTimeSeries '" + name + "' size: " + timeSeries.size() + ", validCount: " + validCount + "]"; } @@ -193,26 +210,38 @@ public int validCount() { return validCount; } - public void updateDatabase(Connection connection, int flightId) { - //System.out.println("Updating database for " + this); + public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO string_series (flight_id, name_id, data_type_id, length, valid_length, data) VALUES (?, ?, ?, ?, ?, ?)"); + } - try { - PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO string_series (flight_id, name_id, data_type_id, length, valid_length, data) VALUES (?, ?, ?, ?, ?, ?)"); + public void addBatch(Connection connection, PreparedStatement preparedStatement, int flightId) throws SQLException, IOException { + if (nameId == -1) + setNameId(connection); + if (typeId == -1) + setTypeId(connection); + + preparedStatement.setInt(1, flightId); + preparedStatement.setInt(2, nameId); + preparedStatement.setInt(3, typeId); + preparedStatement.setInt(4, timeSeries.size()); + preparedStatement.setInt(5, validCount); + + // To get rid of extra bytes at the end of the buffer + byte[] compressed = Compression.compressObject(this.timeSeries); + Blob seriesBlob = new SerialBlob(compressed); + preparedStatement.setBlob(6, seriesBlob); + + preparedStatement.addBatch(); + } - preparedStatement.setInt(1, flightId); - preparedStatement.setInt(2, nameId); - preparedStatement.setInt(3, typeId); - preparedStatement.setInt(4, timeSeries.size()); - preparedStatement.setInt(5, validCount); + public void updateDatabase(Connection connection, int flightId) { + try { + PreparedStatement preparedStatement = createPreparedStatement(connection); - // To get rid of extra bytes at the end of the buffer - byte[] compressed = Compression.compressObject(this.timeSeries); - Blob seriesBlob = new SerialBlob(compressed); + this.addBatch(connection, preparedStatement, flightId); - preparedStatement.setBlob(6, seriesBlob); preparedStatement.executeUpdate(); preparedStatement.close(); - seriesBlob.free(); } catch (SQLException | IOException e) { e.printStackTrace(); @@ -228,5 +257,14 @@ public StringTimeSeries subSeries(Connection connection, int from, int until) th return newSeries; } + + public StringTimeSeries subSeries(int from, int until) throws SQLException { + StringTimeSeries newSeries = new StringTimeSeries(name, dataType); + + for (int i = from; i < until; i++) + newSeries.add(this.timeSeries.get(i)); + + return newSeries; + } } diff --git a/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java b/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java index a6eccf41a..96ddd5699 100644 --- a/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java @@ -13,6 +13,7 @@ public class CalculatedDoubleTimeSeries extends DoubleTimeSeries { private final Flight flight; + private final boolean cache; /** * Default Constructor @@ -23,7 +24,14 @@ public class CalculatedDoubleTimeSeries extends DoubleTimeSeries { * @param flight the flight instance the timeseries is being calcualted for */ public CalculatedDoubleTimeSeries(Connection connection, String name, String dataType, boolean cache, Flight flight) throws SQLException { - super(connection, name, dataType, cache); + super(connection, name, dataType); + this.flight = flight; + this.cache = cache; + } + + public CalculatedDoubleTimeSeries(String name, String dataType, boolean cache, Flight flight) throws SQLException { + super(name, dataType); + this.cache = cache; this.flight = flight; } @@ -39,6 +47,7 @@ public void create(Calculation calculation) throws IOException, SQLException { super.add(calculation.calculate(i)); } - flight.addDoubleTimeSeries(super.getName(), this); + if (cache) + flight.addDoubleTimeSeries(super.getName(), this); } } diff --git a/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java b/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java index e403b9f0e..a8c25efeb 100644 --- a/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java +++ b/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java @@ -8,7 +8,7 @@ import java.util.List; import java.util.logging.Logger; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; import org.apache.commons.cli.*; import org.ngafid.Database; diff --git a/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java b/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java index 4bce0456f..77140936d 100644 --- a/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java +++ b/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java @@ -18,7 +18,7 @@ import org.ngafid.flights.*; -import static org.ngafid.flights.calculations.Parameters.*; //eliminates the need to use Parameters. +import static org.ngafid.flights.Parameters.*; //eliminates the need to use Parameters. public class TurnToFinal implements Serializable { // NGAFIDTTF0000L diff --git a/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java b/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java index 11cd1672f..2d6ab1c7a 100644 --- a/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java +++ b/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java @@ -1,11 +1,13 @@ package org.ngafid.flights.calculations; import org.ngafid.flights.*; +import org.ngafid.flights.DoubleTimeSeries.TimeStepCalculation; + import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; /** * This class is an instance of a {@link Calculation} that gets a derived VSI using linear regression @@ -13,7 +15,7 @@ * @author Aidan LaBella @ RIT CS */ -public class VSPDRegression implements Calculation { +public class VSPDRegression implements TimeStepCalculation, Calculation { private final DoubleTimeSeries altB; private final DoubleTimeSeries altBLag; private final DoubleTimeSeries altBLead; @@ -25,10 +27,14 @@ public class VSPDRegression implements Calculation { * * @param flight the {@link Flight} to perform a regression on */ - public VSPDRegression(Connection connection, Flight flight) throws SQLException, IOException { - this.altB = flight.getDoubleTimeSeries(ALT_B); - this.altBLag = altB.lag(connection, VSI_LAG_DIFF); - this.altBLead = altB.lead(connection, VSI_LAG_DIFF); + public VSPDRegression(DoubleTimeSeries altB) { + this.altB = altB; + this.altBLag = altB.lag(VSI_LAG_DIFF); + this.altBLead = altB.lead(VSI_LAG_DIFF); + } + + public double compute(int index) { + return calculate(index); } /** diff --git a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java new file mode 100644 index 000000000..b32e17266 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java @@ -0,0 +1,247 @@ +package org.ngafid.flights.process; + +import com.opencsv.CSVReader; +import com.opencsv.exceptions.CsvException; +import org.ngafid.flights.*; + +import java.sql.Connection; +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +/** + * Handles parsing of CSV files + * + * @author Aaron Chan + */ + +public class CSVFileProcessor extends FlightFileProcessor { + private static final Logger LOG = Logger.getLogger(CSVFileProcessor.class.getName()); + private final List headers; + private final List dataTypes; + private final FlightMeta meta = new FlightMeta(); + + private final Upload upload; + + public CSVFileProcessor(Connection connection, InputStream stream, String filename, Upload upload) { + super(connection, stream, filename); + this.upload = upload; + + + headers = new ArrayList<>(); + dataTypes = new ArrayList<>(); + + meta.airframeType = "Fixed Wing"; // Fixed Wing By default + meta.filename = filename; + } + + @Override + public Stream parse() throws FlightProcessingException { + Map doubleTimeSeries = new ConcurrentHashMap<>(); + Map stringTimeSeries = new ConcurrentHashMap<>(); + + List csvValues = null; + + try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(super.stream, StandardCharsets.UTF_8)); CSVReader csvReader = new CSVReader(bufferedReader)) { + String fileInformation = getFlightInfo(bufferedReader); // Will read a line + + if (meta.airframeName != null && meta.airframeName.equals("ScanEagle")) { + scanEagleParsing(fileInformation); // TODO: Handle ScanEagle data + } else { + processFileInormation(fileInformation); + bufferedReader.read(); // Skip first char (#) + Arrays.stream(csvReader.readNext()) + .map(String::strip) + .forEachOrdered(dataTypes::add);; + Arrays.stream(csvReader.readNext()) + .map(String::strip) + .forEachOrdered(headers::add);; + } + + updateAirframe(); + + ArrayList> columns = new ArrayList<>(); + String[] firstRow = csvReader.peek(); + for (int i = 0; i < firstRow.length; i++) + columns.add(new ArrayList<>()); + + String[] row = null; + while ((row = csvReader.readNext()) != null && row.length == firstRow.length) + for (int i = 0; i < row.length; i++) + columns.get(i).add(row[i].trim()); + + final int granulatiry = 8; + IntStream.range(0, columns.size() / granulatiry) + .parallel() + .forEach(g -> { + var max = Math.max(g * granulatiry + granulatiry, columns.size()); + for (int i = g * granulatiry; i < max; i++) { + var column = columns.get(i); + var name = headers.get(i); + var dataType = dataTypes.get(i); + + try { + Double.parseDouble(column.get(0)); + doubleTimeSeries.put(name, new DoubleTimeSeries(name, dataType, column)); + } catch (NumberFormatException e) { + stringTimeSeries.put(name, new StringTimeSeries(name, dataType, column)); + } + } + }); + } catch (IOException | FatalFlightFileException | CsvException e) { + throw new FlightProcessingException(e); + } + + FlightBuilder builder = new FlightBuilder(meta, doubleTimeSeries, stringTimeSeries); + + return Stream.of(builder); + } + + + /** + * Updates the airframe type if airframe name does not belong to fixed wing + */ + private void updateAirframe() { + if (meta.airframeName.equals("R44") || meta.airframeName.equals("Robinson R44")) { + meta.airframeName = "R44"; + meta.airframeType = "Rotorcraft"; + } + } + + /** + * Gets the flight information from the first line of the file + * @param reader BufferedReader for reading the first line + * @return + * @throws FatalFlightFileException + * @throws IOException + */ + private String getFlightInfo(BufferedReader reader) throws FatalFlightFileException, IOException { + String fileInformation = reader.readLine(); + + if (fileInformation == null || fileInformation.trim().length() == 0) { + throw new FatalFlightFileException("The flight file was empty."); + } + + if (fileInformation.charAt(0) != '#' && fileInformation.charAt(0) != '{') { + if (fileInformation.startsWith("DID_")) { + LOG.info("CAME FROM A SCANEAGLE! CAN CALCULATE SUGGESTED TAIL/SYSTEM ID FROM FILENAME"); + + meta.airframeName = "ScanEagle"; + meta.airframeType = "UAS Fixed Wing"; + } else { + throw new FatalFlightFileException("First line of the flight file should begin with a '#' and contain flight recorder information."); + } + } + + return fileInformation; + } + + private void processFileInormation(String fileInformation) throws FatalFlightFileException { + String[] infoParts = fileInformation.split(","); + try { + for (int i = 1; i < infoParts.length; i++) { + //process everything else (G1000 data) + if (infoParts[i].trim().length() == 0) continue; + + //System.err.println("splitting key/value: '" + infoParts[i] + "'"); + String subParts[] = infoParts[i].trim().split("="); + String key = subParts[0]; + String value = subParts[1]; + + //System.err.println("key: '" + key + "'"); + //System.err.println("value: '" + value + "'"); + + // TODO: Create some sort of automatic mapping for synonomous airframe names. + if (key.equals("airframe_name")) { + meta.airframeName = value.substring(1, value.length() - 1); + + //throw an error for 'Unknown Aircraft' + if (meta.airframeName.equals("Unknown Aircraft")) { + throw new FatalFlightFileException("Flight airframe name was 'Unknown Aircraft', please fix and re-upload so the flight can be properly identified and processed."); + } + + + if (meta.airframeName.equals("Diamond DA 40")) { + meta.airframeName = "Diamond DA40"; + } else if ((meta.airframeName.equals("Garmin Flight Display") || meta.airframeName.equals("Robinson R44 Raven I")) && upload.getFleetId() == 1 /*This is a hack for UND who has their airframe names set up incorrectly for their helicopters*/) { + meta.airframeName = "R44"; + } else if (meta.airframeName.equals("Garmin Flight Display")) { + throw new FatalFlightFileException("Flight airframe name was 'Garmin Flight Display' which does not specify what airframe type the flight was, please fix and re-upload so the flight can be properly identified and processed."); + + } + + if (meta.airframeName.equals("Cirrus SR22 (3600 GW)")) { + meta.airframeName = "Cirrus SR22"; + } + + if (Airframes.FIXED_WING_AIRFRAMES.contains(meta.airframeName) || meta.airframeName.contains("Garmin")) { + meta.airframeType = "Fixed Wing"; + } else if (meta.airframeName.equals("R44") || meta.airframeName.equals("Robinson R44")) { + meta.airframeName = "R44"; + meta.airframeType = "Rotorcraft"; + } else { + System.err.println("Could not import flight because the aircraft type was unknown for the following airframe name: '" + meta.airframeName + "'"); + System.err.println("Please add this to the the `airframe_type` table in the database and update this method."); + System.exit(1); + } + + } else if (key.equals("system_id")) { + meta.systemId = value.substring(1, value.length() - 1); + } + } + } catch (Exception e) { + //LOG.info("parsting flight information threw exception: " + e); + //e.printStackTrace(); + throw new FatalFlightFileException("Flight information line was not properly formed with key value pairs.", e); + } + } + + + /** + * Parses for ScanEagle flight data + * @param fileInformation First line of the file + */ + private void scanEagleParsing(String fileInformation) { + + //need a custom method to process ScanEagle data because the column + //names are different and there is no header info + scanEagleSetTailAndID(); + scanEagleHeaders(fileInformation); + } + + /** + * Handles setting the tail number and system id for ScanEagle data + */ + private void scanEagleSetTailAndID() { + String[] filenameParts = filename.split("_"); + meta.startDateTime = filenameParts[0]; + meta.endDateTime = meta.startDateTime; + LOG.log(Level.INFO, "start date: '{0}'", meta.startDateTime); + LOG.log(Level.INFO, "end date: '{0}'", meta.startDateTime); + + //UND doesn't have the systemId for UAS anywhere in the filename or file (sigh) + meta.suggestedTailNumber = "N" + filenameParts[1] + "ND"; + meta.systemId = meta.suggestedTailNumber; + + LOG.log(Level.INFO, "suggested tail number: '{0}'", meta.suggestedTailNumber); + LOG.log(Level.INFO, "system id: '{0}'", meta.systemId); + } + + + // TODO: Figure out ScanEagle data + private void scanEagleHeaders(String fileInformation) { + String headersLine = fileInformation; + headers.addAll(Arrays.asList(headersLine.split("\\,", -1))); + headers.replaceAll(String::trim); + //scan eagle files have no data types, set all to "" + for (int i = 0; i < headers.size(); i++) { + dataTypes.add("none"); + } + } +} diff --git a/src/main/java/org/ngafid/flights/process/DATFileProcessor.java b/src/main/java/org/ngafid/flights/process/DATFileProcessor.java new file mode 100644 index 000000000..29fea4dba --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/DATFileProcessor.java @@ -0,0 +1,741 @@ +package org.ngafid.flights.process; + +import com.opencsv.CSVReader; +import com.opencsv.exceptions.CsvValidationException; +import org.ngafid.flights.*; + +import java.io.*; +import java.net.URI; +import java.nio.file.*; +import java.sql.Connection; +import java.sql.SQLException; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Stream; +import java.util.zip.ZipFile; + +import static org.ngafid.common.TimeUtils.addMilliseconds; + +import Files.*; + + +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.Flight; +import org.ngafid.flights.FlightAlreadyExistsException; +import org.ngafid.flights.MalformedFlightFileException; + +/** + * Parses DAT files from DJI flights after converting them to CSV + * + * @author Aaron Chan + */ + +public class DATFileProcessor extends FlightFileProcessor { + private static final Logger LOG = Logger.getLogger(DATFileProcessor.class.getName()); + + private static final Set STRING_COLS = new HashSet<>(List.of(new String[]{"flyCState", "flycCommand", "flightAction", + "nonGPSCause", "connectedToRC", "Battery:lowVoltage", "RC:ModeSwitch", "gpsUsed", "visionUsed", "IMUEX(0):err"})); + + private final ZipFile zipFile; + + public DATFileProcessor(Connection connection, InputStream stream, String filename, ZipFile file) { + super(connection, stream, filename); + this.zipFile = file; + } + + @Override + public Stream parse() throws FlightProcessingException { + try { + convertAndInsert(); + List inputStreams = duplicateInputStream(stream, 2); + Map indexedCols = new HashMap<>(); + Map doubleTimeSeriesMap = new HashMap<>(); + Map stringTimeSeriesMap = new HashMap<>(); + Map attributeMap = getAttributeMap(inputStreams.remove(inputStreams.size() - 1)); + + if (!attributeMap.containsKey("mcID(SN)")) { + throw new FlightProcessingException(new FatalFlightFileException("No DJI serial number provided in binary.")); + } + + try (CSVReader reader = new CSVReader(new BufferedReader(new InputStreamReader(inputStreams.remove(inputStreams.size() - 1))))) { + processCols(reader.readNext(), indexedCols, doubleTimeSeriesMap, stringTimeSeriesMap); + + readData(reader, doubleTimeSeriesMap, stringTimeSeriesMap, indexedCols); + calculateLatLonGPS(doubleTimeSeriesMap); + + if (attributeMap.containsKey("dateTime")) { + calculateDateTime(doubleTimeSeriesMap, stringTimeSeriesMap, attributeMap.get("dateTime")); + String dateTimeStr = findStartDateTime(doubleTimeSeriesMap); + + if (dateTimeStr != null) { + calculateDateTime(doubleTimeSeriesMap, stringTimeSeriesMap, dateTimeStr); + } + } + } catch (CsvValidationException | FatalFlightFileException | IOException e) { + throw new FlightProcessingException(e); + } catch (ParseException e) { + e.printStackTrace(); + } + + dropBlankCols(doubleTimeSeriesMap, stringTimeSeriesMap); + doubleTimeSeriesMap.put("AltAGL", new DoubleTimeSeries("AltAGL", "ft")); // TODO: Should this be done in proc? + + FlightMeta meta = new FlightMeta(); + meta.setFilename(filename); + meta.setAirframeType("UAS Rotorcraft"); + meta.setAirframeName("DJI " + attributeMap.get("ACType")); + meta.setSystemId(attributeMap.get("mcID(SN)")); + + + return Stream.of(new FlightBuilder[]{new FlightBuilder(meta, doubleTimeSeriesMap, stringTimeSeriesMap)}); + } catch (NotDatFile | FileEnd | IOException e) { + throw new FlightProcessingException(e); + } + } + + // TODO: Validate the conversion works still. Also maybe figure out another way of doing this since var args forced into FFP + + /** + * Converts the DAT file to CSV and inserts it into the zip file + * @throws NotDatFile + * @throws IOException + * @throws FileEnd + */ + private void convertAndInsert() throws NotDatFile, IOException, FileEnd { + String zipName = filename.substring(filename.lastIndexOf("/")); + String parentFolder = zipFile.getName().substring(0, zipFile.getName().lastIndexOf("/")); + File tempExtractedFile = new File(parentFolder, zipName); + + System.out.println("Extracting to " + tempExtractedFile.getAbsolutePath()); + try (InputStream inputStream = zipFile.getInputStream(zipFile.getEntry(filename)); FileOutputStream fileOutputStream = new FileOutputStream(tempExtractedFile)) { + int len; + byte[] buffer = new byte[1024]; + + while ((len = inputStream.read(buffer)) > 0) { + fileOutputStream.write(buffer, 0, len); + } + } + + convertDATFile(tempExtractedFile); + File processedCSVFile = new File(tempExtractedFile.getAbsolutePath() + ".csv"); + placeInZip(processedCSVFile.getAbsolutePath(), zipFile.getName().substring(zipFile.getName().lastIndexOf("/") + 1)); + } + + /** + * Places a file into the given zip file + * @param file - File to place + * @param zipFileName - Name of the zip file + * @throws IOException + */ + private static void placeInZip(String file, String zipFileName) throws IOException { + LOG.info("Placing " + file + " in zip"); + + Map zipENV = new HashMap<>(); + zipENV.put("create", "true"); + + Path csvFilePath = Paths.get(file); + Path zipFilePath = Paths.get(csvFilePath.getParent() + "/" + zipFileName); + + URI zipURI = URI.create("jar:" + zipFilePath.toUri()); + try (FileSystem fileSystem = FileSystems.newFileSystem(zipURI, zipENV)) { + Path zipFileSystemPath = fileSystem.getPath(file.substring(file.lastIndexOf("/") + 1)); + Files.write(zipFileSystemPath, Files.readAllBytes(csvFilePath), StandardOpenOption.CREATE); + } + } + + /** + * Converts the DAT file to CSV + * @param file - File to convert + * @return - CSV converted file + * @throws NotDatFile + * @throws IOException + * @throws FileEnd + */ + private static File convertDATFile(File file) throws NotDatFile, IOException, FileEnd { + LOG.info("Converting to CSV: " + file.getAbsolutePath()); + DatFile datFile = DatFile.createDatFile(file.getAbsolutePath()); + datFile.reset(); + datFile.preAnalyze(); + + ConvertDat convertDat = datFile.createConVertDat(); + + String csvFilename = file.getAbsolutePath() + ".csv"; + convertDat.csvWriter = new CsvWriter(csvFilename); + convertDat.createRecordParsers(); + + datFile.reset(); + AnalyzeDatResults results = convertDat.analyze(false); + LOG.info(datFile.getFile().getAbsolutePath()); + + return datFile.getFile(); + } + + /** + * Reads the data from the converted CSV file + * @param reader - CSV reader + * @param doubleTimeSeriesMap - Map of double time series data + * @param stringTimeSeriesMap - Map of string time series data + * @param indexedCols - Map of indexed columns + * @throws IOException + * @throws CsvValidationException + */ + private static void readData(CSVReader reader, Map doubleTimeSeriesMap, + Map stringTimeSeriesMap, Map indexedCols) throws IOException, CsvValidationException { + String[] line; + + while ((line = reader.readNext()) != null) { + for (int i = 0; i < line.length; i++) { + + String column = indexedCols.get(i); + + try { + if (doubleTimeSeriesMap.containsKey(column)) { + DoubleTimeSeries colTimeSeries = doubleTimeSeriesMap.get(column); + double value = !line[i].equals("") ? Double.parseDouble(line[i]) : Double.NaN; + colTimeSeries.add(value); + } else { + StringTimeSeries colTimeSeries = stringTimeSeriesMap.get(column); + colTimeSeries.add(line[i]); + } + } catch (NullPointerException e) { + LOG.log(Level.WARNING, "Column {0} not found in time series map", column); + } catch (NumberFormatException e) { + LOG.log(Level.WARNING, "Could not parse value {0} as double", line[i]); + } + } + } + } + + /** + * Calculates GPS data from the given time series map + * @param doubleTimeSeriesMap - Map of double time series data + * @throws FatalFlightFileException + */ + private static void calculateLatLonGPS(Map doubleTimeSeriesMap) throws FatalFlightFileException { + DoubleTimeSeries lonRad = doubleTimeSeriesMap.get("GPS(0):Long"); + DoubleTimeSeries latRad = doubleTimeSeriesMap.get("GPS(0):Lat"); + DoubleTimeSeries altMSL = doubleTimeSeriesMap.get("GPS(0):heightMSL"); + + if (lonRad == null || latRad == null) { + LOG.log(Level.WARNING, "Could not find GPS(0):Long or GPS(0):Lat in time series map"); + throw new FatalFlightFileException("No GPS data found in binary."); + } + + DoubleTimeSeries longDeg = new DoubleTimeSeries("Longitude", "degrees"); + DoubleTimeSeries latDeg = new DoubleTimeSeries("Latitude", "degrees"); + DoubleTimeSeries msl = new DoubleTimeSeries("AltMSL", "ft"); + + for (int i = 0; i < lonRad.size(); i++) { + longDeg.add(lonRad.get(i)); + } + + for (int i = 0; i < lonRad.size(); i++) { + latDeg.add(latRad.get(i)); + } + + for (int i = 0; i < altMSL.size(); i++) { + msl.add(altMSL.get(i)); + } + + doubleTimeSeriesMap.put("Longitude", longDeg); + doubleTimeSeriesMap.put("Latitude", latDeg); + doubleTimeSeriesMap.put("AltMSL", altMSL); + } + + /** + * Calculates the local date and time from the given time series map + * @param doubleTimeSeriesMap - Map of double time series data + * @param stringTimeSeriesMap - Map of string time series data + * @param dateTimeStr - Format of the date and time + * @throws ParseException + */ + private static void calculateDateTime(Map doubleTimeSeriesMap, Map stringTimeSeriesMap, String dateTimeStr) throws ParseException { + StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd"); + StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss"); + StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm"); // Always 0 + DoubleTimeSeries seconds = doubleTimeSeriesMap.get("offsetTime"); + + SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss"); + + String[] dateTime = dateTimeStr.split(" "); + String date = dateTime[0]; + + if (date.split("-")[1].length() == 1) { + date = date.substring(0, 5) + "0" + date.substring(5); + } + + if (date.split("-")[2].length() == 1) { + date = date.substring(0, 8) + "0" + date.substring(8); + } + + String time = dateTime[1]; + + Date parsedDate = (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")).parse(date + " " + time); + for (int i = 0; i < seconds.size(); i++) { + int millis = (int) (seconds.get(i) * 1000); + Date newDate = addMilliseconds(parsedDate, millis); + + localDateSeries.add(lclDateFormat.format(newDate)); + localTimeSeries.add(lclTimeFormat.format(newDate)); + utcOfstSeries.add("+00:00"); + } + + stringTimeSeriesMap.put("Lcl Date", localDateSeries); + stringTimeSeriesMap.put("Lcl Time", localTimeSeries); + stringTimeSeriesMap.put("UTCOfst", utcOfstSeries); + } + + + /** + * Determine the start date and time from the given time series map + * @param doubleTimeSeriesMap - Map of double time series data + * @return + */ + private static String findStartDateTime(Map doubleTimeSeriesMap) { + DoubleTimeSeries dateSeries = doubleTimeSeriesMap.get("GPS(0):Date"); + DoubleTimeSeries timeSeries = doubleTimeSeriesMap.get("GPS(0):Time"); + DoubleTimeSeries offsetTime = doubleTimeSeriesMap.get("offsetTime"); + + if (dateSeries == null || timeSeries == null) { + LOG.log(Level.WARNING, "Could not find GPS(0):Date or GPS(0):Time in time series map"); + return null; + } + + int colCount = 0; + while (colCount < dateSeries.size() && colCount < timeSeries.size()) { + int date = (int) dateSeries.get(colCount); // Date is an integer in the format YYYYMMDD + int time = (int) timeSeries.get(colCount); + + + if (!Double.isNaN(date) && !Double.isNaN(time) && date != 0 && time != 0) { + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss"); + + String year = String.valueOf(date).substring(0, 4); + String month = String.valueOf(date).substring(4, 6); + String day = String.valueOf(date).substring(6, 8); + + String hour = String.valueOf(time).substring(0, 2); + String minute = String.valueOf(time).substring(2, 4); + String second = String.valueOf(time).substring(4, 6); + + try { + Date parsedDate = dateFormat.parse(year + month + day + hour + minute + second); + int currentOffset = (int) (offsetTime.get(colCount) * 1000); + Date newDate = addMilliseconds(parsedDate, -currentOffset); + + return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(newDate); + } catch (ParseException e) { + LOG.log(Level.WARNING, "Could not parse date {0} and time {1} as date", new Object[]{date, time}); + return null; + } + } + + colCount++; + } + + return null; + } + + /** + * Duplicate an input stream a given number of times + * @param inputStream - Input Stream to duplicate + * @param copies - Number of copies to make + * @return - List of input streams + * @throws IOException + */ + private static List duplicateInputStream(InputStream inputStream, int copies) throws IOException { + List inputStreams = new ArrayList<>(); + List outputStreams = new ArrayList<>(); + + for (int i = 0; i < copies; i++) { + outputStreams.add(new ByteArrayOutputStream()); + } + + byte[] buffer = new byte[1024]; + while (inputStream.read(buffer) > -1) { + for (OutputStream outputStream : outputStreams) { + outputStream.write(buffer); + } + } + + for (OutputStream outputStream : outputStreams) { + outputStream.flush(); + inputStreams.add(new ByteArrayInputStream(((ByteArrayOutputStream) outputStream).toByteArray())); + } + + return inputStreams; + } + + /** + * Gets the attributes of the flight + * @param stream - Input stream of flight file + * @return + */ + private static Map getAttributeMap(InputStream stream) { + Map attributeMap = new HashMap<>(); + try (CSVReader reader = new CSVReader(new BufferedReader(new InputStreamReader(stream)))) { + String[] line; + while ((line = reader.readNext()) != null) { + if (line[line.length - 1].contains("|")) { + String[] split = line[line.length - 1].split("\\|"); + attributeMap.put(split[0], split[1]); + } + } + } catch (IOException | CsvValidationException e) { + e.printStackTrace(); + } + + LOG.log(Level.INFO, "Attribute Map: {0}", attributeMap); + + return attributeMap; + } + + /** + * Drop all columns that have no data + * @param doubleTimeSeriesMap - Map of double time series data + * @param stringTimeSeriesMap - Map of string time series data + */ + private static void dropBlankCols(Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + for (String key : doubleTimeSeriesMap.keySet()) { + if (doubleTimeSeriesMap.get(key).size() == 0) { + doubleTimeSeriesMap.remove(key); + } + } + + for (String key : stringTimeSeriesMap.keySet()) { + if (stringTimeSeriesMap.get(key).size() == 0) { + stringTimeSeriesMap.remove(key); + } + } + } + + /** + * Initialize columns based on flight data + * @param cols + * @param indexedCols + * @param doubleTimeSeriesMap + * @param stringTimeSeriesMap + */ + private static void processCols(String[] cols, Map indexedCols, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + int i = 0; + for (String col : cols) { + indexedCols.put(i++, col); + String category = col.split(":")[0]; + + if (category.contains("(")) { + category = category.substring(0, category.indexOf("(")); + } + + switch (category) { + case "IMU_ATTI": + case "IMUEX": + handleIMUDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + break; + case "GPS": + handleGPSDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + break; + + case "Battery": + case "SMART_BATT": + handleBatteryDataType(col, doubleTimeSeriesMap); + break; + + case "Motor": + handleMotorDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + break; + + case "RC": + handleRCDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + break; + + case "AirComp": + handleAirCompDataType(col, doubleTimeSeriesMap); + break; + + case "General": + doubleTimeSeriesMap.put(col, new DoubleTimeSeries(col, "ft")); + break; + + case "Controller": + doubleTimeSeriesMap.put(col, new DoubleTimeSeries(col, "level")); + break; + + default: + handleMiscDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + } + + } + } + + /** + * Helper for initializing IMU data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + * @param stringTimeSeriesMap - Map of string time series data + */ + private static void handleIMUDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + String dataType; + + if (colName.contains("accel")) { + dataType = "m/s^2"; + } else if (colName.contains("gyro") || colName.contains("Gyro")) { + dataType = "deg/s"; + } else if (colName.contains("vel") || colName.contains("Velocity")) { + dataType = "m/s"; + } else if (colName.contains("mag")) { + dataType = "A/m"; + } else if (colName.contains("Longitude") || colName.contains("Latitude")) { + dataType = "degrees"; + } else if (colName.contains("roll") || colName.contains("pitch") || colName.contains("yaw") || colName.contains("directionOfTravel")) { + dataType = "degrees"; + } else if (colName.contains("distance") || colName.contains("GPS-H") || colName.contains("Alti")) { + dataType = "ft"; + } else if (colName.contains("temperature")) { + dataType = "Celsius"; + } else if (colName.contains("barometer")) { + dataType = "atm"; + } else { + if (colName.contains("err")) { + stringTimeSeriesMap.put("IMUEX(0):err", new StringTimeSeries("IMUEX Error", "error")); + return; + } + + dataType = "number"; + if (!colName.contains("num")) { + LOG.log(Level.WARNING, "IMU Unknown data type: {0}", colName); + + } + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing battery data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleGPSDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + String dataType; + + if (colName.contains("dateTimeStamp")) { + stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "yyyy-mm-ddThh:mm:ssZ")); + return; + } + + if (colName.contains("Long") || colName.contains("Lat")) { + dataType = "degrees"; + } else if (colName.contains("vel")) { + dataType = "m/s"; + } else if (colName.contains("height")) { + dataType = "ft"; + } else if (colName.contains("DOP")) { + dataType = "DOP Value"; + } else if (colName.contains("Date")) { + dataType = "Date"; + } else if (colName.contains("Time")) { + dataType = "Time"; + } else if (colName.contains("sAcc")) { + dataType = "cm/s"; + } else { + dataType = "number"; + if (!colName.contains("num")) { + LOG.log(Level.WARNING, "GPS Unknown data type: {0}", colName); + } + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing battery data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleBatteryDataType(String colName, Map doubleTimeSeriesMap) { + String dataType = "number"; + String lowerColName = colName.toLowerCase(); + + if (lowerColName.contains("volt")) { + dataType = "Voltage"; + } else if (lowerColName.contains("watts")) { + dataType = "Watts"; + } else if (lowerColName.contains("current")) { + dataType = "Amps"; + } else if (lowerColName.contains("cap")) { + dataType = "Capacity"; + } else if (lowerColName.contains("temp")) { + dataType = "Celsius"; + } else if (lowerColName.contains("%")) { + dataType = "Percentage"; + } else if (lowerColName.contains("time")) { + dataType = "seconds"; + } else { + LOG.log(Level.WARNING, "Battery Unknown data type: {0}", colName); + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing motor data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleMotorDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + if (colName.contains("lowVoltage")) { + stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "Low Voltage")); + return; + } else if (colName.contains("status")) { + stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "Battery Status")); + return; + } + + String dataType = "number"; + + if (colName.contains("V_out") || colName.contains("Volts")) { + dataType = "Voltage"; + } else if (colName.contains("Speed")) { + dataType = "m/s"; + } else if (colName.contains("Current")) { + dataType = "Amps"; + } else if (colName.contains("PPMrecv")) { + dataType = "RC Stop Command"; + } else if (colName.contains("Temp")) { + dataType = "Celsius"; + } else if (colName.contains("Status")) { + dataType = "Status Number"; + } else if (colName.contains("Hz")) { + dataType = "Status Number"; + } else { + LOG.log(Level.WARNING, "Battery Unknown data type: {0}", colName); + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing RC data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleRCDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + String dataType = "number"; + + if (colName.contains("Aileron")) { + dataType = "Aileron"; + } else if (colName.contains("Elevator")) { + dataType = "Elevator"; + } else if (colName.contains("Rudder")) { + dataType = "Rudder"; + } else if (colName.contains("Throttle")) { + dataType = "Throttle"; + } else { + if (colName.equals("RC:ModeSwitch")) { + stringTimeSeriesMap.put(colName, new StringTimeSeries("RC Mode Switch", "Mode")); + return; + } + + LOG.log(Level.WARNING, "RC Unknown data type: {0}", colName); + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing air comp data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleAirCompDataType(String colName, Map doubleTimeSeriesMap) { + String dataType; + + if (colName.contains("AirSpeed")) { + dataType = "knots"; + } else if (colName.contains("Alti")) { + dataType = "ft"; + } else if (colName.contains("Vel")) { + dataType = "k/h"; + } else { + dataType = "number"; + LOG.log(Level.WARNING, "AirComp Unknown data type: {0}", colName); + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing other types of data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleMiscDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + String dataType; + boolean isDouble = true; + switch (colName) { + case "Tick#": + dataType = "tick"; + break; + + case "offsetTime": + case "flightTime": + dataType = "seconds"; + break; + + case "gpsHealth": + dataType = "GPS Health"; + break; + + case "flyCState": + dataType = "C State"; + isDouble = false; + break; + + case "flycCommand": + dataType = "Command"; + isDouble = false; + break; + + case "flightAction": + dataType = "Action"; + isDouble = false; + break; + + case "nonGPSCause": + dataType = "GPS Cause"; + isDouble = false; + break; + + case "connectedToRC": + dataType = "Connection"; + isDouble = false; + break; + + case "gpsUsed": + case "visionUsed": + dataType = "boolean"; + isDouble = false; + break; + + case "Attribute|Value": + dataType = "Key-Value Pair"; + isDouble = false; + break; + + default: + dataType = "N/A"; + isDouble = false; + LOG.log(Level.WARNING, "Misc Unknown data type: {0}", colName); + } + + if (isDouble) { + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } else { + stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, dataType)); + } + } +} diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java new file mode 100644 index 000000000..a977fe2db --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -0,0 +1,286 @@ +package org.ngafid.flights.process; + +import java.sql.SQLException; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.ForkJoinTask; +import java.util.concurrent.RecursiveTask; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.*; +import java.util.logging.Logger; +import java.util.stream.Collectors; + +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +/** + * A dependency graph which represents the dependencies of ProcessSteps on one another. + **/ +public class DependencyGraph { + private static final Logger LOG = Logger.getLogger(DependencyGraph.class.getName()); + + class DependencyNode { + final ProcessStep step; + + // Used for cycle detection. + boolean mark = false; + AtomicBoolean enabled = new AtomicBoolean(true); + + final HashSet requiredBy = new HashSet<>(32); + final HashSet requires = new HashSet<>(32); + + ArrayList exceptions = new ArrayList<>(); + + public DependencyNode(ProcessStep step) { + this.step = step; + } + + void disableChildren() { + if (enabled.get()) { + enabled.set(false); + if (step.isRequired()) { + String reason = step.explainApplicability(); + LOG.severe("Required step " + step.getClass().getName() + " has been disabled for the following reason:\n " + reason); + exceptions.add(new FatalFlightFileException(reason)); + } + for (var child : requiredBy) child.disable(); + } + } + + void disable() { + if (enabled.get()) { + enabled.set(false); + if (step.isRequired()) { + LOG.severe("Required step " + step.toString() + " has been disabled."); + exceptions.add( + new FatalFlightFileException( + "Required step " + step.getClass().getName() + + " has been disabled because a required parent step has been disabled")); + } + for (var child : requiredBy) child.disable(); + } + } + + void compute() { + try { + + if (step.applicable()) { + step.compute(); + } else { + disableChildren(); + } + + } catch (SQLException | MalformedFlightFileException | FatalFlightFileException e) { + LOG.warning("Encountered exception when calculating process step " + step.toString() + ": " + e.toString()); + exceptions.add(e); + disable(); + } + } + } + + class DependencyNodeTask extends RecursiveTask { + private static final long serialVersionUID = 0; + + // This is used to avoid creating duplicate tasks. + // This isn't a problem w/ a tree-like problem, but ours is a DAG. + final ConcurrentHashMap> taskMap; + final DependencyNode node; + + public DependencyNodeTask(DependencyNode node, ConcurrentHashMap> taskMap) { + this.taskMap = taskMap; + this.node = node; + } + + ForkJoinTask getTask(DependencyNode node) { + return taskMap.computeIfAbsent(node, x -> new DependencyNodeTask(x, taskMap).fork()); + } + + public Void compute() { + for (var requiredNode : node.requires) { + getTask(requiredNode).join(); + } + + if (node.enabled.get()) + node.compute(); + + return null; + } + } + + /** + * Dummy step meant to act as a root node in DAG. This is done by adding all of the columns included in the file + * as output columns, so all other steps will depend on this. + **/ + class DummyStep extends ProcessStep { + Set outputColumns = new HashSet<>(); + + public DummyStep(FlightBuilder builder) { + // We can pass in null rather than a connection object + super(null, builder); + outputColumns.addAll(doubleTS.keySet()); + outputColumns.addAll(stringTS.keySet()); + } + + public Set getRequiredDoubleColumns() { return Collections.emptySet(); } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return Collections.emptySet(); } + public Set getOutputColumns() { return outputColumns; } + + public boolean airframeIsValid(String airframe) { return true; } + + // Left blank intentionally + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + LOG.info("Computed dummy step!"); + } + } + + private void nodeConflictError(ProcessStep first, ProcessStep second) throws FatalFlightFileException { + throw new FatalFlightFileException( + "ERROR when building dependency graph! " + + "Two ProcessSteps are indicated as having the same output column. " + + "While it is possible for two ProcessSteps to have the same output column(s), " + + "their use should be mutually exclusive from one another. " + + "\nDEBUG INFO:\n node 0: " + first.toString() + "\n node 1: " + second.toString()); + + } + + private DependencyNode registerStep(ProcessStep step) throws FatalFlightFileException { + DependencyNode node = new DependencyNode(step); + nodes.add(node); + + for (String outputColumn : step.getOutputColumns()) { + DependencyNode other = null; + if ((other = columnToSource.put(outputColumn, node)) != null) nodeConflictError(step, other.step); + } + + return node; + } + + /** + * Create the edges. An edge exists from step X to step Y if step X has an output column + * that step Y relies upon. + **/ + private void createEdges(DependencyNode node) throws FatalFlightFileException { + for (String column : node.step.getRequiredColumns()) { + DependencyNode sourceNode = columnToSource.get(column); + if (sourceNode != null) { + sourceNode.requiredBy.add(node); + node.requires.add(sourceNode); + } + } + } + + // Maps column name to the node where that column is computed + HashMap columnToSource = new HashMap<>(64); + HashSet nodes = new HashSet<>(64); + FlightBuilder builder; + + public DependencyGraph(FlightBuilder builder, List steps) throws FlightProcessingException { + /** + * Create nodes for each step and create a mapping from output column name + * to the node that outputs that column. This should be a unique mapping, as + * we don't want two steps generating the same output column. + **/ + + this.builder = builder; + + try { + registerStep(new DummyStep(builder)); + for (var step : steps) registerStep(step); + for (var node : nodes) createEdges(node); + } catch (FatalFlightFileException e) { + throw new FlightProcessingException(e); + } + } + + // Modifies the flight object in place. + public void compute() throws FlightProcessingException { + // Start with all of the leaf nodes. + ConcurrentHashMap> tasks = new ConcurrentHashMap<>(); + ArrayList> initialTasks = new ArrayList<>(); + for (var node : nodes) { + if (node.requiredBy.size() == 0) { + var task = new DependencyNodeTask(node, tasks); + initialTasks.add(task); + tasks.put(node, task); + } + } + + var handles = initialTasks + .stream() + .map(x -> x.fork()) + .collect(Collectors.toList()); + handles.forEach(ForkJoinTask::join); + + ArrayList fatalExceptions = new ArrayList<>(); + for (var node : nodes) { + for (var e : node.exceptions) { + if (e instanceof MalformedFlightFileException me) { + builder.exceptions.add(me); + } else if (e instanceof FatalFlightFileException fe) { + fatalExceptions.add(fe); + } else if (e instanceof SQLException se) { + fatalExceptions.add(se); + } else { + LOG.severe( + "Encountered exception of unknown type when executing dependency graph. " + + "\"" + e.getMessage() + "\"" + "\n." + + "This should not be possible - if this seems plausible you should add a handler for this " + + "type of exception in DependencyGraph::compute."); + e.printStackTrace(); + System.exit(1); + } + } + } + + if (fatalExceptions.size() != 0) + throw new FlightProcessingException(fatalExceptions); + } + + public void scrutinize() { + cycleCheck(); + requiredCheck(); + } + + // Ensure that there are no required steps that are children to optional steps, + // since that wouldn't make sense. + private void requiredCheck() { + for (var node : nodes) { + if (!node.step.isRequired()) + continue; + + for (var parent : node.requiredBy) { + if (!parent.step.isRequired()) { + System.err.println("ERROR in your DependencyGraph! The optional step '" + parent + "' has a required dependent step '" + node + "'."); + System.exit(1); + } + } + } + } + + // Ensure there are no cycles! + private void cycleCheck() { + for (var src : nodes) { + for (var node : nodes) + node.mark = false; + + Queue q = new ArrayDeque<>(); + var dst = src; + for (var child : src.requiredBy) + q.add(child); + + while ((dst = q.poll()) != null) { + if (dst == src) { + System.err.println("ERROR in your DependencyGraph! Cycle was detected from step '" + src + "' to step '" + dst + "'."); + System.exit(1); + } + + dst.mark = true; + for (var child : dst.requiredBy) { + if (!child.mark) + q.add(child); + } + } + } + } +} diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java new file mode 100644 index 000000000..69fcd45e9 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java @@ -0,0 +1,100 @@ +package org.ngafid.flights.process; + +import java.sql.Connection; +import java.sql.SQLException; + +import java.util.Map; +import java.util.List; +import java.util.ArrayList; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +import org.ngafid.flights.*; +import static org.ngafid.flights.process.ProcessStep.required; +import org.ngafid.flights.process.*; + +public class FlightBuilder { + + public final ConcurrentHashMap doubleTimeSeries; + public final ConcurrentHashMap stringTimeSeries; + + private ArrayList itinerary = null; + + public final FlightMeta meta; + + public final ArrayList exceptions = new ArrayList<>(); + + public FlightBuilder(FlightMeta meta, Map doubleTimeSeries, Map stringTimeSeries) { + this.doubleTimeSeries = new ConcurrentHashMap<>(doubleTimeSeries); + this.stringTimeSeries = new ConcurrentHashMap<>(stringTimeSeries); + this.meta = meta; + } + + public FlightBuilder addTimeSeries(String name, DoubleTimeSeries timeSeries) { + doubleTimeSeries.put(name, timeSeries); + return this; + } + + public FlightBuilder addTimeSeries(String name, StringTimeSeries timeSeries) { + stringTimeSeries.put(name, timeSeries); + return this; + } + + public synchronized FlightBuilder setStartDateTime(String startDateTime) { + this.meta.startDateTime = startDateTime; + return this; + } + + public synchronized FlightBuilder setEndDateTime(String endDateTime) { + this.meta.endDateTime = endDateTime; + return this; + } + + public synchronized FlightBuilder setItinerary(ArrayList itinerary) { + this.itinerary = itinerary; + return this; + } + + public synchronized FlightBuilder updateProcessingStatus(int processingStatus) { + this.meta.processingStatus |= processingStatus; + return this; + } + + private static final List processSteps = List.of( + required(ProcessAltAGL::new), + required(ProcessAirportProximity::new), + required(ProcessStartEndTime::new), + ProcessLaggedAltMSL::new, + ProcessStallIndex::new, + ProcessTotalFuel::new, + ProcessDivergence::new, + ProcessLOCI::new, + ProcessItinerary::new + ); + + // This can be overridden. + protected List gatherSteps(Connection connection) { + // Add all of our processing steps here... + // The order doesn't matter; the DependencyGraph will resolve + // the order in the event that there are dependencies. + return processSteps.stream().map(factory -> factory.create(connection, this)).collect(Collectors.toList()); + } + + // throws a flight processing exception if an unrecoverable error occurred. + public Flight build(Connection connection) throws FlightProcessingException { + DependencyGraph dg = new DependencyGraph(this, gatherSteps(connection)); + + dg.compute(); + + // TODO: Make sure headers are calculated appropriately. + // TODO: Make sure hasAGL and hasCoords get set correctly + try { + return new Flight(connection, meta, doubleTimeSeries, stringTimeSeries, itinerary, exceptions); + } catch (SQLException e) { + throw new FlightProcessingException(e); + } + } + + // TODO: implement this + public void validate() {} +} diff --git a/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java b/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java new file mode 100644 index 000000000..24f620d85 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java @@ -0,0 +1,16 @@ +package org.ngafid.flights.process; + +public class FlightFileFormatException extends Exception { + private static final long serialVersionUID = 124311; + + String filename; + + public FlightFileFormatException(String filename) { + this.filename = filename; + } + + public String getMessage() { + return "File '" + filename + "' is of an unrecognized or unsupported file format."; + } + +} diff --git a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java new file mode 100644 index 000000000..764c88acc --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java @@ -0,0 +1,193 @@ +package org.ngafid.flights.process; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.Connection; +import java.util.Map; +import java.util.HashMap; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Enumeration; +import java.util.Objects; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.concurrent.ConcurrentHashMap; + +import org.ngafid.filters.Pair; +import org.ngafid.UploadException; +import org.ngafid.flights.Flight; +import org.ngafid.flights.Upload; + +public abstract class FlightFileProcessor { + + interface Factory { + FlightFileProcessor create(Connection connection, InputStream is, String filename); + } + + // Right now this is only for zip files but this could easily be extended to handle other types of archives. + // Most of the code is reusable. + public static class Pipeline { + final Connection connection; + final ZipFile zipFile; + final Map factories; + final Upload upload; + private int validFlightsCount = 0; + private int warningFlightsCount = 0; + + private ConcurrentHashMap flightErrors = new ConcurrentHashMap<>(); + + public Pipeline(Connection connection, Upload upload, ZipFile zipFile) { + this.connection = connection; + this.upload = upload; + this.zipFile = zipFile; + + this.factories = Map.of( + "csv", this::createCSVFileProcessor, + "dat", this::createDATFileProcessor, + "json", JSONFileProcessor::new, + "gpx", GPXFileProcessor::new + ); + } + + public Map getFlightErrors() { + return Collections.unmodifiableMap(flightErrors); + } + + private FlightFileProcessor createDATFileProcessor(Connection connection, InputStream is, String filename) { + return new DATFileProcessor(connection, is, filename, zipFile); + } + + private FlightFileProcessor createCSVFileProcessor(Connection connection, InputStream is, String filename) { + return new CSVFileProcessor(connection, is, filename, upload); + } + + public Stream stream() { + Enumeration entries = zipFile.entries(); + Stream validFiles = + StreamSupport.stream( + Spliterators.spliteratorUnknownSize(entries.asIterator(), Spliterator.ORDERED), + false + ) + .filter(z -> !z.getName().contains("__MACOSX")) + .filter(z -> !z.isDirectory()); + + return validFiles.map(this::create).filter(Objects::nonNull).collect(Collectors.toList()).stream(); + } + + public Stream parse(FlightFileProcessor processor) { + try { + return processor.parse(); + } catch (FlightProcessingException e) { + flightErrors.put(processor.filename, new UploadException(e.getMessage(), e, processor.filename)); + return Stream.of(); + } + } + + public Flight build(FlightBuilder fb) { + try { + return fb.build(connection); + } catch (FlightProcessingException e) { + flightErrors.put(fb.meta.filename, new UploadException(e.getMessage(), e, fb.meta.filename)); + return null; + } + } + + public List build(Stream fbs) { + return fbs.map(this::build).filter(Objects::nonNull).collect(Collectors.toList()); + } + + private FlightFileProcessor create(ZipEntry entry) { + String filename = entry.getName(); + + int index = filename.lastIndexOf('.'); + String extension = index >= 0 ? filename.substring(index + 1).toLowerCase() : ""; + Factory f = factories.get(extension); + if (f != null) { + try { + return f.create(connection, zipFile.getInputStream(entry), filename); + } catch (IOException e) { + flightErrors.put(filename, new UploadException(e.getMessage(), e, filename)); + } + } else { + flightErrors.put(filename, new UploadException("Unknown file type '" + extension + "' contained in zip file.", filename)); + } + + return null; + } + + public Flight tabulateFlightStatus(Flight flight) { + if (flight.getStatus().equals("WARNING")) + warningFlightsCount++; + else + validFlightsCount++; + + return flight; + } + + public int getWarningFlightsCount() { + return warningFlightsCount; + } + + public int getValidFlightsCount() { + return validFlightsCount; + } + } + + protected final Connection connection; + protected final InputStream stream; + protected final String filename; + + public FlightFileProcessor(Connection connection, InputStream stream, String filename) { + this.connection = connection; + this.stream = stream; + this.filename = filename; + } + + // If an exception occurs, it will be stored here. + FlightProcessingException parseException = null; + + /** + * Parses the file for flight data to be processed + * @return A stream of FlightBuilders + * @throws FlightProcessingException + */ + private Stream parsedFlightBuilders = null; + protected abstract Stream parse() throws FlightProcessingException; + + public FlightFileProcessor pipelinedParse() { + try { + parsedFlightBuilders = parse(); + assert parsedFlightBuilders != null; + } catch (FlightProcessingException e) { + parseException = e; + } + + return this; + } + + protected Stream flights = null; + protected final ArrayList buildExceptions = new ArrayList<>(); + + private Flight build(FlightBuilder fb) { + try { + return fb.build(connection); + } catch (FlightProcessingException e) { + buildExceptions.add(e); + } + return null; + } + + public FlightFileProcessor pipelinedBuild() { + if (parseException == null) { + flights = parsedFlightBuilders.map(this::build).filter(Objects::nonNull); + } + + return this; + } +} diff --git a/src/main/java/org/ngafid/flights/process/FlightMeta.java b/src/main/java/org/ngafid/flights/process/FlightMeta.java new file mode 100644 index 000000000..5e8007ac1 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightMeta.java @@ -0,0 +1,125 @@ +package org.ngafid.flights.process; + +/** + * Utility class used by FlightBuilder to call the Flight constructor. + **/ +public final class FlightMeta { + // TODO: Should be made private + public int fleetId = -1, + uploaderId = -1, + uploadId = -1, + processingStatus = 0; + + public String startDateTime, + endDateTime, + md5Hash, + airframeType, + systemId, + filename, + airframeName, + calculated, + suggestedTailNumber; + public int getFleetId() { + return fleetId; + } + + public void setFleetId(int fleetId) { + this.fleetId = fleetId; + } + + public int getUploaderId() { + return uploaderId; + } + + public void setUploaderId(int uploaderId) { + this.uploaderId = uploaderId; + } + + public int getUploadId() { + return uploadId; + } + + public void setUploadId(int uploadId) { + this.uploadId = uploadId; + } + + public int getProcessingStatus() { + return processingStatus; + } + + public void setProcessingStatus(int processingStatus) { + this.processingStatus = processingStatus; + } + + public String getStartDateTime() { + return startDateTime; + } + + public void setStartDateTime(String startDateTime) { + this.startDateTime = startDateTime; + } + + public String getEndDateTime() { + return endDateTime; + } + + public void setEndDateTime(String endDateTime) { + this.endDateTime = endDateTime; + } + + public String getMd5Hash() { + return md5Hash; + } + + public void setMd5Hash(String md5Hash) { + this.md5Hash = md5Hash; + } + + public String getAirframeType() { + return airframeType; + } + + public void setAirframeType(String airframeType) { + this.airframeType = airframeType; + } + + public String getSystemId() { + return systemId; + } + + public void setSystemId(String systemId) { + this.systemId = systemId; + } + + public String getFilename() { + return filename; + } + + public void setFilename(String filename) { + this.filename = filename; + } + + public String getAirframeName() { + return airframeName; + } + + public void setAirframeName(String airframeName) { + this.airframeName = airframeName; + } + + public String getCalculated() { + return calculated; + } + + public void setCalculated(String calculated) { + this.calculated = calculated; + } + + public String getSuggestedTailNumber() { + return suggestedTailNumber; + } + + public void setSuggestedTailNumber(String suggestedTailNumber) { + this.suggestedTailNumber = suggestedTailNumber; + } +} diff --git a/src/main/java/org/ngafid/flights/process/FlightProcessingException.java b/src/main/java/org/ngafid/flights/process/FlightProcessingException.java new file mode 100644 index 000000000..617f80280 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightProcessingException.java @@ -0,0 +1,49 @@ +package org.ngafid.flights.process; + +import java.util.List; +import java.util.Collections; + +/** + * An exception that contains all of the FATAL exceptions that occurred during flight processing. + * Namely SQLException, FatalFlightFileException, IOException, and FlightAlreadyExistsException. + * + * If flight processing steps are done in parallel multiple exceptions could be thrown, which is + * where this class comes in: it will contain all of the exceptions that occurred. + * + **/ +public class FlightProcessingException extends Exception { + private static final long serialVersionUID = 1235003; + private static final String DEFAULT_MESSAGE = "(exception message was empty / null)"; + + private List exceptions; + + public FlightProcessingException(Exception e) { + exceptions = List.of(e); + } + + public FlightProcessingException(List exceptions) { + this.exceptions = Collections.unmodifiableList(exceptions); + } + + public String getMessage() { + String message; + + if (exceptions.size() == 1) { + + message = exceptions.get(0).getMessage(); + if (message == null) + return DEFAULT_MESSAGE; + + } else { + message = "Encountered the following " + exceptions.size() + " errors when processing a flight:\n"; + for (var e : exceptions) { + String eMessage = e.getMessage(); + if (eMessage == null) + eMessage = DEFAULT_MESSAGE; + message += eMessage + "\n\n"; + } + } + + return message; + } +} diff --git a/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java b/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java new file mode 100644 index 000000000..15e9ef75e --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java @@ -0,0 +1,201 @@ +package org.ngafid.flights.process; + +import org.ngafid.flights.*; +import org.w3c.dom.Document; +import org.w3c.dom.NamedNodeMap; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.InputStream; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.logging.Logger; +import java.util.stream.Stream; + +/** + * This class is responsible for parsing GPX files. + * + * @author Josh Karns + */ + +public class GPXFileProcessor extends FlightFileProcessor { + private static final Logger LOG = Logger.getLogger(GPXFileProcessor.class.getName()); + + public GPXFileProcessor(Connection connection, InputStream stream, String filename) { + super(connection, stream, filename); + } + + @Override + public Stream parse() throws FlightProcessingException { + try { + List flights = parseFlights(filename, stream); + + return flights.stream(); + } catch (SQLException | MalformedFlightFileException | IOException | FatalFlightFileException | + FlightAlreadyExistsException e) { + throw new RuntimeException(e); + } + } + + public List parseFlights(String entry, InputStream stream) throws SQLException, MalformedFlightFileException, IOException, FatalFlightFileException, FlightAlreadyExistsException { + List flights = new ArrayList<>(); + // BE-GPS-2200 + DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + try { + DocumentBuilder db = dbf.newDocumentBuilder(); + Document doc = db.parse(stream); + + NodeList l = doc.getElementsByTagName("trkseg"); + if (l.getLength() == 0) + throw new FatalFlightFileException("could not parse GPX data file: failed to find data node."); + + if (l.getLength() != 1) + throw new FatalFlightFileException("could not parse GPX data file: found multiple data nodes."); + + Node dataNode = l.item(0); + int len = dataNode.getChildNodes().getLength(); + + DoubleTimeSeries lat = new DoubleTimeSeries("Latitude", "degrees", len); + DoubleTimeSeries lon = new DoubleTimeSeries("Longitude", "degrees", len); + DoubleTimeSeries msl = new DoubleTimeSeries("AltMSL", "ft", len); + DoubleTimeSeries spd = new DoubleTimeSeries("GndSpd", "kt", len); + ArrayList timestamps = new ArrayList(len); + StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd"); + StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss"); + StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm"); + // ss.SSSSSSXXX + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); + + SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss"); + + // NodeList serialNumberNodes = doc.getElementsByTagName("badelf:modelSerialNumber"); + // String serialNumber = serialNumberNodes.item(0).getTextContent(); + NodeList nicknameNodes = doc.getElementsByTagName("badelf:modelNickname"); + if (nicknameNodes.item(0) == null) + throw new FatalFlightFileException("GPX file is missing necessary metadata (modelNickname)."); + String nickname = nicknameNodes.item(0).getTextContent(); + + NodeList fdrModel = doc.getElementsByTagName("badelf:modelName"); + if (fdrModel.item(0) == null) + throw new FatalFlightFileException("GPX file is missing necessary metadata (modelName)."); + String airframeName = fdrModel.item(0).getTextContent(); + LOG.info("Airframe name: " + airframeName); + + NodeList dates = doc.getElementsByTagName("time"); + NodeList datanodes = doc.getElementsByTagName("trkpt"); + NodeList elenodes = doc.getElementsByTagName("ele"); + NodeList spdnodes = doc.getElementsByTagName("badelf:speed"); + + + if (spdnodes.item(0) == null) + throw new FatalFlightFileException("GPX file is missing GndSpd."); + + if (!(dates.getLength() == datanodes.getLength() && + dates.getLength() == elenodes.getLength() && + dates.getLength() == spdnodes.getLength())) { + throw new FatalFlightFileException("Mismatching number of data tags in GPX file"); + } + + for (int i = 0; i < dates.getLength(); i++) { + Date parsedDate = dateFormat.parse(dates.item(i).getTextContent()); + timestamps.add(new Timestamp(parsedDate.getTime())); + Calendar cal = new Calendar.Builder().setInstant(parsedDate).build(); + + int offsetMS = cal.getTimeZone().getOffset(parsedDate.getTime()); + String sign = offsetMS < 0 ? "-" : "+"; + offsetMS = offsetMS < 0 ? -offsetMS : offsetMS; + + int offsetSEC = offsetMS / 1000; + int offsetMIN = offsetSEC / 60; + int offsetHRS = offsetMIN / 60; + offsetMIN %= 60; + + String offsetHrsStr = (offsetHRS < 10 ? "0" : "") + offsetHRS; + String offsetMinStr = (offsetMIN < 10 ? "0" : "") + offsetMIN; + // This should look like +HH:mm + utcOfstSeries.add(sign + offsetHrsStr + ":" + offsetMinStr); + + localDateSeries.add(lclDateFormat.format(parsedDate)); + localTimeSeries.add(lclTimeFormat.format(parsedDate)); + + Node spdNode = spdnodes.item(i); + // Convert m / s to knots + spd.add(Double.parseDouble(spdNode.getTextContent()) * 1.94384); + + Node eleNode = elenodes.item(i); + // Convert meters to feet. + msl.add(Double.parseDouble(eleNode.getTextContent()) * 3.28084); + + Node d = datanodes.item(i); + NamedNodeMap attrs = d.getAttributes(); + + Node latNode = attrs.getNamedItem("lat"); + lat.add(Double.parseDouble(latNode.getTextContent())); + + Node lonNode = attrs.getNamedItem("lon"); + lon.add(Double.parseDouble(lonNode.getTextContent())); + } + + int start = 0; + for (int end = 1; end < timestamps.size(); end++) { + // 1 minute delay -> new flight. + if (timestamps.get(end).getTime() - timestamps.get(end - 1).getTime() > 60000 + || end == localTimeSeries.size() - 1) { + if (end == localTimeSeries.size() - 1) { + end += 1; + } + + if (end - start < 60) { + start = end; + continue; + } + + StringTimeSeries localTime = localTimeSeries.subSeries(start, end); + StringTimeSeries localDate = localDateSeries.subSeries(start, end); + StringTimeSeries offset = utcOfstSeries.subSeries(start, end); + DoubleTimeSeries nlat = lat.subSeries(start, end); + DoubleTimeSeries nlon = lon.subSeries(start, end); + DoubleTimeSeries nmsl = msl.subSeries(start, end); + DoubleTimeSeries nspd = spd.subSeries(start, end); + + + HashMap doubleSeries = new HashMap<>(); + doubleSeries.put("GndSpd", nspd); + doubleSeries.put("Longitude", nlon); + doubleSeries.put("Latitude", nlat); + doubleSeries.put("AltMSL", nmsl); + + HashMap stringSeries = new HashMap<>(); + stringSeries.put("Lcl Date", localDate); + stringSeries.put("Lcl Time", localTime); + stringSeries.put("UTCOfst", offset); + + FlightMeta meta = new FlightMeta(); + meta.setFilename(this.filename + ":" + start + "-" + end); + meta.setAirframeName(airframeName); + meta.setSuggestedTailNumber(nickname); + meta.setSystemId(nickname); + meta.setAirframeType("Fixed Wing"); + + flights.add(new FlightBuilder(meta, doubleSeries, stringSeries)); + start = end; + } + } + + } catch (ParserConfigurationException | SAXException | ParseException e) { + throw new FatalFlightFileException("Could not parse GPX data file: " + e.getMessage()); + } + + return flights; + } +} diff --git a/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java b/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java new file mode 100644 index 000000000..7d0a41601 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java @@ -0,0 +1,171 @@ +package org.ngafid.flights.process; + +import com.google.gson.Gson; +import com.google.gson.stream.JsonReader; +import org.ngafid.common.TimeUtils; +import org.ngafid.flights.*; + +import javax.xml.bind.DatatypeConverter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.logging.Logger; +import java.util.stream.Stream; + +/** + * This class is responsible for parsing JSON files. + * + * @author Aaron Chan + */ + +public class JSONFileProcessor extends FlightFileProcessor { + private static final Logger LOG = Logger.getLogger(JSONFileProcessor.class.getName()); + + public JSONFileProcessor(Connection connection, InputStream stream, String filename) { + super(connection, stream, filename); + } + + @Override + public Stream parse() throws FlightProcessingException { + FlightMeta flightMeta = new FlightMeta(); + final Map doubleTimeSeries = new HashMap<>(); + final Map stringTimeSeries = new HashMap<>(); + + + try { + processTimeSeries(flightMeta, doubleTimeSeries, stringTimeSeries); + } catch (SQLException | MalformedFlightFileException | IOException | FatalFlightFileException | + FlightAlreadyExistsException e) { + throw new FlightProcessingException(e); + } + + return Stream.of(new FlightBuilder(flightMeta, doubleTimeSeries, stringTimeSeries)); + } + + private void processTimeSeries(FlightMeta flightMeta, Map doubleTimeSeries, Map stringTimeSeries) throws SQLException, MalformedFlightFileException, IOException, FatalFlightFileException, FlightAlreadyExistsException { + String status = ""; + Gson gson = new Gson(); + JsonReader reader = new JsonReader(new InputStreamReader(super.stream)); + Map jsonMap = gson.fromJson(reader, Map.class); + + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HHmmssZ"); + + Date parsedDate; + try { + parsedDate = dateFormat.parse((String) jsonMap.get("date")); + } catch (Exception e) { + throw new MalformedFlightFileException("Could not parse date from JSON file: " + e.getMessage()); + } + + int timezoneOffset = parsedDate.getTimezoneOffset() / 60; + String timezoneOffsetString = (timezoneOffset >= 0 ? "+" : "-") + String.format("%02d:00", timezoneOffset); + + ArrayList headers = (ArrayList) jsonMap.get("details_headers"); + ArrayList> lines = (ArrayList>) jsonMap.get("details_data"); + int len = headers.size(); + + DoubleTimeSeries lat = new DoubleTimeSeries("Latitude", "degrees", len); + DoubleTimeSeries lon = new DoubleTimeSeries("Longitude", "degrees", len); + DoubleTimeSeries agl = new DoubleTimeSeries("AltAGL", "ft", len); + DoubleTimeSeries spd = new DoubleTimeSeries("GndSpd", "kt", len); + + ArrayList timestamps = new ArrayList<>(len); + StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd"); + StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss"); + StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm"); + + SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss"); + + int latIndex = headers.indexOf("product_gps_latitude"); + int lonIndex = headers.indexOf("product_gps_longitude"); + int altIndex = headers.indexOf("altitude"); + int spdIndex = headers.indexOf("speed"); + int timeIndex = headers.indexOf("time"); + + double timeDiff = ((double) lines.get(lines.size() - 1).get(timeIndex)) - ((double) lines.get(0).get(timeIndex)); + if (timeDiff < 180) throw new FatalFlightFileException("Flight file was less than 3 minutes long, ignoring."); + + double prevSeconds = 0; + double metersToFeet = 3.28084; + + for (ArrayList line : lines) { + double milliseconds = (double) line.get(timeIndex) - prevSeconds; + prevSeconds = (double) line.get(timeIndex); + parsedDate = TimeUtils.addMilliseconds(parsedDate, (int) milliseconds); + + if ((double) line.get(latIndex) > 90 || (double) line.get(latIndex) < -90) { + LOG.severe("Invalid latitude: " + line.get(latIndex)); + status = "WARNING"; + lat.add(Double.NaN); + } else { + lat.add((Double) line.get(latIndex)); + } + + if ((double) line.get(lonIndex) > 180 || (double) line.get(lonIndex) < -180) { + LOG.severe("Invalid longitude: " + line.get(lonIndex)); + status = "WARNING"; + lon.add(Double.NaN); + } else { + lon.add((Double) line.get(lonIndex)); + } + + agl.add((Double) line.get(altIndex) * metersToFeet); + spd.add((Double) line.get(spdIndex)); + + localDateSeries.add(lclDateFormat.format(parsedDate)); + localTimeSeries.add(lclTimeFormat.format(parsedDate)); + utcOfstSeries.add(timezoneOffsetString); + timestamps.add(new Timestamp(parsedDate.getTime())); + } + + int start = 0; + int end = timestamps.size() - 1; + + DoubleTimeSeries nspd = spd.subSeries(start, end); + DoubleTimeSeries nlon = lon.subSeries(start, end); + DoubleTimeSeries nlat = lat.subSeries(start, end); + DoubleTimeSeries nagl = agl.subSeries(start, end); + + doubleTimeSeries.put("GndSpd", nspd); + doubleTimeSeries.put("Longitude", nlon); + doubleTimeSeries.put("Latitude", nlat); + doubleTimeSeries.put("AltAGL", nagl); // Parrot data is stored as AGL and most likely in meters + + StringTimeSeries localDate = localDateSeries.subSeries(start, end); + StringTimeSeries localTime = localTimeSeries.subSeries(start, end); + StringTimeSeries offset = utcOfstSeries.subSeries(start, end); + + stringTimeSeries.put("Lcl Date", localDate); + stringTimeSeries.put("Lcl Time", localTime); + stringTimeSeries.put("UTCOfst", offset); + + MessageDigest md = null; + try { + md = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException e) { + throw new FatalFlightFileException("Could not create MD5 hash: " + e.getMessage()); + } + byte[] hash = md.digest(filename.getBytes()); + + flightMeta.setStartDateTime(localDateSeries.get(0) + " " + localTimeSeries.get(0) + " " + utcOfstSeries.get(0)); + flightMeta.setEndDateTime(localDateSeries.get(localDateSeries.size() - 1) + " " + localTimeSeries.get(localTimeSeries.size() - 1) + " " + utcOfstSeries.get(utcOfstSeries.size() - 1)); + flightMeta.setMd5Hash(DatatypeConverter.printHexBinary(hash).toLowerCase()); + flightMeta.setAirframeType("UAS Rotorcraft"); + flightMeta.setSystemId((String) jsonMap.get("serial_number")); + flightMeta.setFilename(super.filename); + flightMeta.setAirframeName((String) jsonMap.get("controller_model")); + flightMeta.setCalculated(""); // TODO: Figure this out + flightMeta.setSuggestedTailNumber((String) jsonMap.get("serial_number")); + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java new file mode 100644 index 000000000..291d6a96e --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java @@ -0,0 +1,95 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import org.ngafid.airports.*; +import org.ngafid.common.MutableDouble; +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.StringTimeSeries; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.process.ProcessStep; +import org.ngafid.flights.process.FlightBuilder; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessAirportProximity extends ProcessStep { + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LATITUDE, LONGITUDE, ALT_AGL); + private static Set OUTPUT_COLUMNS = Set.of(NEAREST_RUNWAY, AIRPORT_DISTANCE, RUNWAY_DISTANCE, NEAREST_AIRPORT); + private final static double MAX_AIRPORT_DISTANCE_FT = 10000; + private final static double MAX_RUNWAY_DISTANCE_FT = 100; + + public ProcessAirportProximity(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries latitudeTS = builder.doubleTimeSeries.get(LATITUDE); + DoubleTimeSeries longitudeTS = builder.doubleTimeSeries.get(LONGITUDE); + DoubleTimeSeries altitudeAGLTS = builder.doubleTimeSeries.get(ALT_AGL); + + int sizeHint = latitudeTS.size(); + + StringTimeSeries nearestAirportTS = new StringTimeSeries("NearestAirport", "IATA Code", sizeHint); + stringTS.put("NearestAirport", nearestAirportTS); + + DoubleTimeSeries airportDistanceTS = new DoubleTimeSeries("AirportDistance", "ft", sizeHint); + doubleTS.put("AirportDistance", airportDistanceTS); + + StringTimeSeries nearestRunwayTS = new StringTimeSeries("NearestRunway", "IATA Code", sizeHint); + stringTS.put("NearestRunway", nearestRunwayTS); + + DoubleTimeSeries runwayDistanceTS = new DoubleTimeSeries("RunwayDistance", "ft", sizeHint); + doubleTS.put("RunwayDistance", runwayDistanceTS); + + + for (int i = 0; i < latitudeTS.size(); i++) { + double latitude = latitudeTS.get(i); + double longitude = longitudeTS.get(i); + double altitudeAGL = altitudeAGLTS.get(i); + + // TODO: Move this outside of the loop. To avoid re-allocation ? + MutableDouble airportDistance = new MutableDouble(); + + Airport airport = null; + if (altitudeAGL <= 2000) { + airport = Airports.getNearestAirportWithin(latitude, longitude, MAX_AIRPORT_DISTANCE_FT, airportDistance); + } + + if (airport == null) { + nearestAirportTS.add(""); + airportDistanceTS.add(Double.NaN); + nearestRunwayTS.add(""); + runwayDistanceTS.add(Double.NaN); + } else { + nearestAirportTS.add(airport.iataCode); + airportDistanceTS.add(airportDistance.get()); + + MutableDouble runwayDistance = new MutableDouble(); + Runway runway = airport.getNearestRunwayWithin(latitude, longitude, MAX_RUNWAY_DISTANCE_FT, runwayDistance); + if (runway == null) { + nearestRunwayTS.add(""); + runwayDistanceTS.add(Double.NaN); + } else { + nearestRunwayTS.add(runway.name); + runwayDistanceTS.add(runwayDistance.get()); + } + } + + } + + stringTS.put(NEAREST_RUNWAY, nearestRunwayTS); + stringTS.put(NEAREST_AIRPORT, nearestAirportTS); + doubleTS.put(RUNWAY_DISTANCE, runwayDistanceTS); + doubleTS.put(AIRPORT_DISTANCE, airportDistanceTS); + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java new file mode 100644 index 000000000..327613f74 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java @@ -0,0 +1,60 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.terrain.TerrainCache; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessAltAGL extends ProcessStep { + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_MSL, LATITUDE, LONGITUDE); + private static Set OUTPUT_COLUMNS = Set.of(ALT_AGL); + + public ProcessAltAGL(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries altitudeMSLTS = doubleTS.get(ALT_MSL); + DoubleTimeSeries latitudeTS = doubleTS.get(LATITUDE); + DoubleTimeSeries longitudeTS = doubleTS.get(LONGITUDE); + + DoubleTimeSeries altitudeAGLTS = withConnection(connection -> new DoubleTimeSeries(connection, ALT_AGL, UNIT_FT_AGL)); + + for (int i = 0; i < altitudeMSLTS.size(); i++) { + double altitudeMSL = altitudeMSLTS.get(i); + double latitude = latitudeTS.get(i); + double longitude = longitudeTS.get(i); + + if (Double.isNaN(altitudeMSL) || Double.isNaN(latitude) || Double.isNaN(longitude)) { + altitudeAGLTS.add(Double.NaN); + continue; + } + + try { + int altitudeAGL = TerrainCache.getAltitudeFt(altitudeMSL, latitude, longitude); + altitudeAGLTS.add(altitudeAGL); + } catch (NoSuchFileException e) { + throw new MalformedFlightFileException("Could not calculate AGL for this flight as it had latitudes/longitudes outside of the United States."); + } + } + + doubleTS.put(ALT_AGL, altitudeAGLTS); + } + +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessDivergence.java b/src/main/java/org/ngafid/flights/process/ProcessDivergence.java new file mode 100644 index 000000000..7a2fb9715 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessDivergence.java @@ -0,0 +1,171 @@ +package org.ngafid.flights.process; + + +import java.util.Set; +import java.util.Map; +import static java.util.Map.entry; +import java.util.List; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.terrain.TerrainCache; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessDivergence extends ProcessStep { + + private record DivergenceConfig (List parameters, String output) {} + + private static final Set OUTPUT_COLUMNS = Set.of(TOTAL_FUEL); + private static final Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI); + + private static final List CESSNA_CONFIG = + List.of( + new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4"), "E1 CHT Divergence"), + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence") + ); + + private static final List PA_28_CONFIG = + List.of( + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence") + ); + + private static final List PA_44_CONFIG = + List.of( + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence"), + new DivergenceConfig(List.of("E2 EGT1", "E2 EGT2", "E2 EGT3", "E2 EGT4"), "E2 EGT Divergence") + ); + + private static final List SIX_CYLINDER_CIRRUS = + List.of( + new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4", "E1 CHT5", "E1 CHT6"), "E1 CHT Divergence"), + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4", "E1 EGT5", "E1 EGT6"), "E1 EGT Divergence") + ); + + private static final List DIAMOND_CONFIG = + List.of( + new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4"), "E1 CHT Divergence"), + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence") + ); + + private static final Map> CONFIG_MAP = + Map.ofEntries( + entry(AIRFRAME_CESSNA_172R, CESSNA_CONFIG), + entry(AIRFRAME_CESSNA_172S, CESSNA_CONFIG), + entry(AIRFRAME_PA_28_181, PA_28_CONFIG), + entry(AIRFRAME_PA_44_180, PA_44_CONFIG), + entry(AIRFRAME_CIRRUS_SR20, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_CESSNA_T182T, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_CESSNA_182T, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_BEECHCRAFT_A36_G36, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_CIRRUS_SR22, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_CESSNA_400, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_DIAMOND_DA_40_F, DIAMOND_CONFIG), + entry(AIRFRAME_DIAMOND_DA_40, DIAMOND_CONFIG), + entry(AIRFRAME_DIAMOND_DA40, DIAMOND_CONFIG) + ); + + public ProcessDivergence(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + private Set requiredDoubleColumns = null; + public Set getRequiredDoubleColumns() { + if (requiredDoubleColumns == null) { + + var configs = CONFIG_MAP.get(builder.meta.airframeName); + if (configs != null) { + + requiredDoubleColumns = new HashSet<>(32); + for (var config : configs) + requiredDoubleColumns.addAll(config.parameters); + + } else { + requiredDoubleColumns = Collections.emptySet(); + } + } + + return requiredDoubleColumns; + } + + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return getRequiredDoubleColumns(); } + + private Set outputColumns = null; + public Set getOutputColumns() { + if (outputColumns == null) { + + var configs = CONFIG_MAP.get(builder.meta.airframeName); + if (configs != null) { + + outputColumns = new HashSet<>(); + for (var config : configs) + outputColumns.add(config.output); + + } else { + outputColumns = Collections.emptySet(); + } + } + + return outputColumns; + } + + public boolean airframeIsValid(String airframe) { + for (String blacklisted : AIRFRAME_BLACKLIST) + if (airframe.contains(blacklisted)) + return false; + + return true; + } + private void calculateDivergence(List columnNames, String varianceColumnName) throws MalformedFlightFileException, SQLException { + DoubleTimeSeries columns[] = new DoubleTimeSeries[columnNames.size()]; + for (int i = 0; i < columns.length; i++) { + columns[i] = doubleTS.get(columnNames.get(i)); + + if (columns[i] == null) { + throw new MalformedFlightFileException("Cannot calculate '" + varianceColumnName + "' as parameter '" + columnNames.get(i) + "' was missing."); + } + } + + DoubleTimeSeries variance = new DoubleTimeSeries(varianceColumnName, UNIT_DEG_F, columns[0].size()); + + for (int i = 0; i < columns[0].size(); i++) { + double max = -Double.MAX_VALUE; + double min = Double.MAX_VALUE; + + for (int j = 0; j < columns.length; j++) { + double current = columns[j].get(i); + if (!Double.isNaN(current) && current > max) max = columns[j].get(i); + if (!Double.isNaN(current) && current < min) min = columns[j].get(i); + } + + double v = 0; + if (max != -Double.MAX_VALUE && min != Double.MAX_VALUE) { + v = max - min; + } + + variance.add(v); + } + + doubleTS.put(varianceColumnName, variance); + } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + List configs = CONFIG_MAP.get(builder.meta.airframeName); + + if (configs == null) + return; + + for (var config : configs) + calculateDivergence(config.parameters, config.output); + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessItinerary.java b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java new file mode 100644 index 000000000..46f39e673 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java @@ -0,0 +1,107 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.ArrayList; +import java.util.logging.Logger; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.Itinerary; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessItinerary extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessItinerary.class.getName()); + + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_AGL, LATITUDE, LONGITUDE, AIRPORT_DISTANCE, RUNWAY_DISTANCE, GND_SPD, E1_RPM); + private static Set REQUIRED_STRING_COLUMNS = Set.of(NEAREST_AIRPORT, NEAREST_RUNWAY); + private static Set OUTPUT_COLUMNS = Set.of("_itinerary"); // This is a fake column; never actually created. + + public ProcessItinerary(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return REQUIRED_STRING_COLUMNS; } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries groundSpeed = doubleTS.get(GND_SPD); + DoubleTimeSeries rpm = doubleTS.get(E1_RPM); + + StringTimeSeries nearestAirportTS = stringTS.get(NEAREST_AIRPORT); + DoubleTimeSeries airportDistanceTS = doubleTS.get(AIRPORT_DISTANCE); + DoubleTimeSeries altitudeAGL = doubleTS.get(ALT_AGL); + + StringTimeSeries nearestRunwayTS = stringTS.get(NEAREST_RUNWAY); + DoubleTimeSeries runwayDistanceTS = doubleTS.get(RUNWAY_DISTANCE); + + ArrayList itinerary = new ArrayList<>(); + + Itinerary currentItinerary = null; + for (int i = 1; i < nearestAirportTS.size(); i++) { + String airport = nearestAirportTS.get(i); + String runway = nearestRunwayTS.get(i); + + if (airport != null && !airport.equals("")) { + //We've gotten close to an airport, so create a stop if there + //isn't one. If there is one, update the runway being visited. + //If the airport is a new airport (this shouldn't happen really), + //then create a new stop. + if (currentItinerary == null) { + currentItinerary = new Itinerary(airport, runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i)); + } else if (airport.equals(currentItinerary.getAirport())) { + currentItinerary.update(runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i)); + } else { + currentItinerary.selectBestRunway(); + if (currentItinerary.wasApproach()) itinerary.add(currentItinerary); + currentItinerary = new Itinerary(airport, runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i)); + } + + } else { + //aiport is null, so if there was an airport being visited + //then we can determine it's runway and add it to the itinerary + if (currentItinerary != null) { + currentItinerary.selectBestRunway(); + if (currentItinerary.wasApproach()) itinerary.add(currentItinerary); + } + + //set the currentItinerary to null until we approach another + //airport + currentItinerary = null; + } + } + + //dont forget to add the last stop in the itinerary if it wasn't set to null + if (currentItinerary != null) { + currentItinerary.selectBestRunway(); + if (currentItinerary.wasApproach()) itinerary.add(currentItinerary); + } + + /////////////////////////////////////////////////////////////////////////////////////////////////////////////// + // setting and determining itinerary type + int itinerary_size = itinerary.size(); + for (int i = 0; i < itinerary_size; i++) { + itinerary.get(i).determineType(); + } + /////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + // LOG.info("Itinerary:"); + // for (int i = 0; i < itinerary.size(); i++) { + // LOG.info(itinerary.get(i).toString()); + // } + + builder.setItinerary(itinerary); + } + +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java new file mode 100644 index 000000000..29e23496e --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java @@ -0,0 +1,69 @@ +package org.ngafid.flights.process; + +import java.time.*; +import java.util.Set; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Collections; +import java.util.logging.Logger; +import java.time.format.DateTimeFormatter; + +import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; +import org.ngafid.common.*; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.MalformedFlightFileException; +import org.ngafid.flights.FatalFlightFileException; + +public class ProcessLOCI extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessLOCI.class.getName()); + + public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LOCI_DEPENDENCIES); + + public ProcessLOCI(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return Collections.emptySet(); } + + public boolean airframeIsValid(String airframe) { return airframe.equals(AIRFRAME_CESSNA_172S); } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries hdg = doubleTS.get(HDG); + DoubleTimeSeries hdgLagged = hdg.lag(YAW_RATE_LAG); + DoubleTimeSeries roll = doubleTS.get(ROLL); + DoubleTimeSeries tas = doubleTS.get(TAS_FTMIN); + DoubleTimeSeries stallIndex = doubleTS.get(STALL_PROB); + + int length = roll.size(); + + DoubleTimeSeries coordIndex = DoubleTimeSeries.computed(PRO_SPIN_FORCE, "index", length, + (int index) -> { + double laggedHdg = hdgLagged.get(index); + double yawRate = Double.isNaN(laggedHdg) ? 0 : + 180 - Math.abs(180 - Math.abs(hdg.get(index) - laggedHdg) % 360); + + double yawComp = yawRate * COMP_CONV; + double vrComp = ((tas.get(index) / 60) * yawComp); + double rollComp = roll.get(index) * COMP_CONV; + double ctComp = Math.sin(rollComp) * 32.2; + double value = Math.min(((Math.abs(ctComp - vrComp) * 100) / PROSPIN_LIM), 100); + + return value; + } + ); + DoubleTimeSeries loci = DoubleTimeSeries.computed(LOCI, "index", length, + index -> { + double prob = stallIndex.get(index) * coordIndex.get(index); + return prob / 100; + } + ); + + doubleTS.put(PRO_SPIN_FORCE, coordIndex); + doubleTS.put(LOCI, loci); + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java b/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java new file mode 100644 index 000000000..d26d711cb --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java @@ -0,0 +1,52 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessLaggedAltMSL extends ProcessStep { + private static final Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_MSL); + private static final Set OUTPUT_COLUMNS = Set.of(ALT_MSL_LAG_DIFF); + private static final Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI); + private static final int LAG = 10; + + public ProcessLaggedAltMSL(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { + for (String blacklisted : AIRFRAME_BLACKLIST) + if (airframe.contains(blacklisted)) + return false; + + return true; + } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries altMSL = doubleTS.get(ALT_MSL); + DoubleTimeSeries laggedAltMSL = new DoubleTimeSeries(ALT_MSL_LAG_DIFF, UNIT_FT_MSL, altMSL.size()); + + for (int i = 0; i < LAG; i++) + laggedAltMSL.add(0.0); + for (int i = LAG; i < altMSL.size(); i++) + laggedAltMSL.add(altMSL.get(i) - altMSL.get(i - LAG)); + + doubleTS.put(ALT_MSL_LAG_DIFF, laggedAltMSL); + } + +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java b/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java new file mode 100644 index 000000000..dc62d42f8 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java @@ -0,0 +1,101 @@ +package org.ngafid.flights.process; + +import java.time.*; +import java.util.Set; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Collections; +import java.util.logging.Logger; +import java.time.format.DateTimeFormatter; + +import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; +import org.ngafid.common.*; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.MalformedFlightFileException; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.calculations.VSPDRegression; + +public class ProcessStallIndex extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessStallIndex.class.getName()); + + public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(STALL_DEPENDENCIES); + public static Set OUTPUT_COLUMNS = Set.of(STALL_PROB, TAS_FTMIN, VSPD_CALCULATED, CAS); + + public ProcessStallIndex(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries ias = doubleTS.get(IAS); + int length = ias.size(); + + if (builder.meta.airframeName.equals(AIRFRAME_CESSNA_172S)) { + DoubleTimeSeries cas = DoubleTimeSeries.computed(CAS, "knots", length, + index -> { + double iasValue = ias.get(index); + + if (iasValue < 70.d) + iasValue = (0.7d * iasValue) + 20.667; + + return iasValue; + } + ); + cas.setTemporary(true); + doubleTS.put(CAS, cas); + } + + DoubleTimeSeries vspdCalculated = + DoubleTimeSeries.computed(VSPD_CALCULATED, "ft/min", length, new VSPDRegression(doubleTS.get(ALT_B))); + vspdCalculated.setTemporary(true); + doubleTS.put(VSPD_CALCULATED, vspdCalculated); + + DoubleTimeSeries baroA = doubleTS.get(BARO_A); + DoubleTimeSeries oat = doubleTS.get(OAT); + DoubleTimeSeries densityRatio = DoubleTimeSeries.computed(DENSITY_RATIO, "ratio", length, + index -> { + double pressRatio = baroA.get(index) / STD_PRESS_INHG; + double tempRatio = (273 + oat.get(index)) / 288; + + return pressRatio / tempRatio; + } + ); + + DoubleTimeSeries airspeed = + builder.meta.airframeName.equals(AIRFRAME_CESSNA_172S) ? doubleTS.get(CAS) : doubleTS.get(IAS); + DoubleTimeSeries tasFtMin = DoubleTimeSeries.computed(TAS_FTMIN, "ft/min", length, + index -> { + return (airspeed.get(index) * Math.pow(densityRatio.get(index), -0.5)) * ((double) 6076 / 60); + }); + tasFtMin.setTemporary(true); + + DoubleTimeSeries pitch = doubleTS.get(PITCH); + DoubleTimeSeries aoaSimple = DoubleTimeSeries.computed(AOA_SIMPLE, "degrees", length, + index -> { + + double vspdGeo = vspdCalculated.get(index) * Math.pow(densityRatio.get(index), -0.5); + double fltPthAngle = Math.asin(vspdGeo / tasFtMin.get(index)); + fltPthAngle = fltPthAngle * (180 / Math.PI); + double value = pitch.get(index) - fltPthAngle; + + return value; + } + ); + + DoubleTimeSeries stallIndex = DoubleTimeSeries.computed(STALL_PROB, "index", length, + index -> { + return (Math.min(((Math.abs(aoaSimple.get(index) / AOA_CRIT)) * 100), 100)) / 100; + } + ); + doubleTS.put(STALL_PROB, stallIndex); + doubleTS.put(TAS_FTMIN, tasFtMin); + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java new file mode 100644 index 000000000..3792ddd5b --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java @@ -0,0 +1,112 @@ +package org.ngafid.flights.process; + +import java.time.*; +import java.util.Set; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.logging.Logger; +import java.time.format.DateTimeFormatter; + +import static org.ngafid.flights.Parameters.*; +import org.ngafid.common.*; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.MalformedFlightFileException; +import org.ngafid.flights.FatalFlightFileException; + +public class ProcessStartEndTime extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessStartEndTime.class.getName()); + + public static Set REQUIRED_STRING_COLUMNS = Set.of(LCL_DATE, LCL_TIME, UTC_OFFSET); + + public ProcessStartEndTime(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return Collections.emptySet(); } + public Set getRequiredStringColumns() { return REQUIRED_STRING_COLUMNS; } + public Set getRequiredColumns() { return REQUIRED_STRING_COLUMNS; } + public Set getOutputColumns() { return Collections.emptySet(); } + + public boolean airframeIsValid(String airframe) { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + StringTimeSeries dates = builder.stringTimeSeries.get(LCL_DATE); + StringTimeSeries times = builder.stringTimeSeries.get(LCL_TIME); + StringTimeSeries offsets = builder.stringTimeSeries.get(UTC_OFFSET); + + int dateSize = dates.size(); + int timeSize = times.size(); + int offsetSize = offsets.size(); + + LOG.info("\tdate size: " + dateSize + ", time size: " + timeSize + ", offset size: " + offsetSize); + + //get the minimum sized length of each of these series, they should all be the same but + //if the last column was cut off it might not be the case + int minSize = dateSize; + if (minSize < timeSize) minSize = timeSize; + if (minSize < offsetSize) minSize = offsetSize; + + //find the first non-null time entry + int start = 0; + while (start < minSize && + (dates.get(start) == null || dates.get(start).equals("") || + times.get(start) == null || times.get(start).equals("") || + offsets.get(start) == null || offsets.get(start).equals("") || offsets.get(start).equals("+19:00"))) { + + start++; + } + + LOG.info("\tfirst date time and offset not null at index: " + start); + + if (start >= minSize) + throw new MalformedFlightFileException("Date, Time or Offset columns were all null! Cannot set start/end times."); + + //find the last full date time offset entry row + int end = minSize - 1; + while (end >= 0 && + (dates.get(end) == null || dates.get(end).equals("") || + times.get(end) == null || times.get(end).equals("") || + offsets.get(end) == null || offsets.get(end).equals(""))) { + + end--; + } + + String startDate = dates.get(start); + String startTime = times.get(start); + String startOffset = offsets.get(start); + + String endDate = dates.get(end); + String endTime = times.get(end); + String endOffset = offsets.get(end); + + LOG.info("\t\t\tfirst not null " + start + " -- " + startDate + " " + startTime + " " + startOffset); + LOG.info("\t\t\tlast not null " + endDate + " " + endTime + " " + endOffset); + + OffsetDateTime startODT = null; + try { + startODT = TimeUtils.convertToOffset(startDate, startTime, startOffset, "+00:00"); + } catch (DateTimeException dte) { + LOG.severe("Corrupt start time data in flight file: " + dte.getMessage()); + throw new MalformedFlightFileException("Corrupt start time data in flight file: '" + dte.getMessage() + "'"); + } + + OffsetDateTime endODT = null; + try { + endODT = TimeUtils.convertToOffset(endDate, endTime, endOffset, "+00:00"); + } catch (DateTimeException dte) { + LOG.severe("Corrupt end time data in flight file: " + dte.getMessage()); + throw new MalformedFlightFileException("Corrupt end time data in flight file: '" + dte.getMessage() + "'"); + } + + if (startODT.isAfter(endODT)) { + builder.setStartDateTime(null); + builder.setEndDateTime(null); + throw new MalformedFlightFileException("Corrupt time data in flight file, start time was after the end time"); + } + + builder.setStartDateTime(startODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); + builder.setEndDateTime(endODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessStep.java b/src/main/java/org/ngafid/flights/process/ProcessStep.java new file mode 100644 index 000000000..ce67e19e6 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessStep.java @@ -0,0 +1,116 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.sql.Connection; +import java.sql.SQLException; + +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + + +public abstract class ProcessStep { + + public interface Factory { + ProcessStep create(Connection connection, FlightBuilder builder); + } + + public static Factory required(Factory factory) { + return (c, b) -> { + var step = factory.create(c, b); + step.required = true; + return step; + }; + } + + protected final FlightBuilder builder; + // References to the TS maps in builder + protected final ConcurrentHashMap doubleTS; + protected final ConcurrentHashMap stringTS; + + + // Connection is not accessible by subclasses directly by design, instead use the `withConnection` function. + // This grabs the lock on the object so only one thread is using the connection at any given point in time. + private Connection connection; + + public ProcessStep(Connection connection, FlightBuilder builder) { + this.connection = connection; + this.builder = builder; + doubleTS = builder.doubleTimeSeries; + stringTS = builder.stringTimeSeries; + } + + // These should probably return references to static immutable Sets. + public abstract Set getRequiredDoubleColumns(); + public abstract Set getRequiredStringColumns(); + public abstract Set getRequiredColumns(); + public abstract Set getOutputColumns(); + + private boolean required = false; + + // Whether or not this ProcessStep is required / mandatory + // If a required step cannot be computed, a MalformedFlightFileException will be raised + public final boolean isRequired() { return required; } + + // Whether or not this ProcessStep can be performed for a given airframe + public abstract boolean airframeIsValid(String airframe); + + public final boolean applicable() { + return + airframeIsValid(builder.meta.airframeName) + && builder + .stringTimeSeries + .keySet() + .containsAll(getRequiredStringColumns()) + && builder + .doubleTimeSeries + .keySet() + .containsAll(getRequiredDoubleColumns()); + } + + public final String explainApplicability() { + if (applicable()) { + return "is applicable - all required columns are present and the airframeName is valid)"; + } + + String className = this.getClass().getSimpleName(); + StringBuilder sb = new StringBuilder("Step '" + className + "' cannot be applied for the following reason(s):\n"); + + if (!airframeIsValid(builder.meta.airframeName)) { + sb.append(" - airframeName '" + builder.meta.airframeName + "' is invalid (" + + className + "::airframeIsValid returned false for airframeName '" + className + "')\n"); + } + + for (String key : getRequiredStringColumns()) { + if (!builder.stringTimeSeries.containsKey(key)) + sb.append(" - The required string column '" + key + "' is not available.\n"); + } + + for (String key : getRequiredDoubleColumns()) { + if (!builder.doubleTimeSeries.containsKey(key)) + sb.append(" - The required double column '" + key + "' is not available.\n"); + } + + return sb.toString(); + } + + protected interface ConnectionFunctor { + public T compute(Connection connection) throws SQLException; + } + + // This interface must be used to access the connection so that we can guarantee that only one + // thread is using it at any given time. + final public T withConnection(ConnectionFunctor functor) throws SQLException { + T value = null; + + synchronized (connection) { + value = functor.compute(connection); + } + + return value; + } + + public abstract void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException; +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java b/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java new file mode 100644 index 000000000..24ff74872 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java @@ -0,0 +1,55 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.logging.Logger; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessTotalFuel extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessTotalFuel.class.getName()); + + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(FUEL_QTY_LEFT, FUEL_QTY_RIGHT); + private static Set OUTPUT_COLUMNS = Set.of(TOTAL_FUEL); + private static Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI); + + public ProcessTotalFuel(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { + return !AIRFRAME_BLACKLIST.contains(airframe); + } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + LOG.info("Computing total fuel..."); + double[] totalFuel = null; + + for (var columnName : REQUIRED_DOUBLE_COLUMNS) { + DoubleTimeSeries fuelTS = doubleTS.get(columnName); + if (totalFuel == null) + totalFuel = new double[fuelTS.size()]; + + for (int i = 0; i < fuelTS.size(); i++) + totalFuel[i] += fuelTS.get(i); + } + + DoubleTimeSeries totalFuelTS = new DoubleTimeSeries(TOTAL_FUEL, UNIT_GALLONS, totalFuel); + doubleTS.put(TOTAL_FUEL, totalFuelTS); + } + +} diff --git a/src/main/java/org/ngafid/routes/PostUserPreferences.java b/src/main/java/org/ngafid/routes/PostUserPreferences.java index 053f912c6..8ae84ae3d 100644 --- a/src/main/java/org/ngafid/routes/PostUserPreferences.java +++ b/src/main/java/org/ngafid/routes/PostUserPreferences.java @@ -24,7 +24,7 @@ import org.ngafid.accounts.UserPreferences; import org.ngafid.flights.DoubleTimeSeries; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class PostUserPreferences implements Route { private static final Logger LOG = Logger.getLogger(PostUserPreferences.class.getName()); diff --git a/src/main/java/org/ngafid/routes/PostUserPreferencesMetric.java b/src/main/java/org/ngafid/routes/PostUserPreferencesMetric.java index 063c64a10..237bf2afe 100644 --- a/src/main/java/org/ngafid/routes/PostUserPreferencesMetric.java +++ b/src/main/java/org/ngafid/routes/PostUserPreferencesMetric.java @@ -25,7 +25,7 @@ import org.ngafid.accounts.UserPreferences; import org.ngafid.flights.DoubleTimeSeries; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class PostUserPreferencesMetric implements Route { private static final Logger LOG = Logger.getLogger(PostUserPreferencesMetric.class.getName()); diff --git a/src/main/java/org/ngafid/terrain/SRTMTile.java b/src/main/java/org/ngafid/terrain/SRTMTile.java index 48d1e55da..c330de94c 100644 --- a/src/main/java/org/ngafid/terrain/SRTMTile.java +++ b/src/main/java/org/ngafid/terrain/SRTMTile.java @@ -7,8 +7,11 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.NoSuchFileException; +import java.util.logging.*; public class SRTMTile { + private static final Logger LOG = Logger.getLogger(SRTMTile.class.getName()); + public static final int srtmTileSize = 1201; public static final double srtmGridSize = 1.0/(srtmTileSize - 1.0); @@ -29,8 +32,8 @@ public SRTMTile(int latitudeS, int longitudeW) throws NoSuchFileException { directory = TerrainCache.getDirectoryFromLatLon(latitudeS, longitudeW); filename = TerrainCache.getFilenameFromLatLon(latitudeS, longitudeW); - System.out.println("loading terrain from: '" + directory + "/" + filename + "'"); - System.out.println("lat and lon for SW corner -- latitude_s: " + latitudeS + ", longitude_w: " + longitudeW); + LOG.info("loading terrain from: '" + directory + "/" + filename + "'"); + LOG.info("lat and lon for SW corner -- latitude_s: " + latitudeS + ", longitude_w: " + longitudeW); Path path = Paths.get(TerrainCache.TERRAIN_DIRECTORY + "/" + directory + "/" + filename); //Path path = Paths.get(TerrainCache.getTerrainDirectory() + "/" + filename); @@ -79,11 +82,11 @@ public SRTMTile(int latitudeS, int longitudeW) throws NoSuchFileException { } } - System.out.println("read " + bytes.length + " bytes."); - System.out.println("final offset: " + offset); + LOG.info("read " + bytes.length + " bytes."); + LOG.info("final offset: " + offset); - System.out.println("max: " + max); - System.out.println("min: " + min); + LOG.info("max: " + max); + LOG.info("min: " + min); } public double getAltitudeFt(double latitude, double longitude) { diff --git a/src/main/java/org/ngafid/terrain/TerrainCache.java b/src/main/java/org/ngafid/terrain/TerrainCache.java index 10ad74721..2a648b2bf 100644 --- a/src/main/java/org/ngafid/terrain/TerrainCache.java +++ b/src/main/java/org/ngafid/terrain/TerrainCache.java @@ -49,7 +49,7 @@ public static String getDirectoryFromLatLon(int latitude, int longitude) { ilongitude /= 6; ilongitude += 1; - System.out.println("iLatitude: " + ilatitude + ", iLongitude: " + ilongitude); + // System.out.println("iLatitude: " + ilatitude + ", iLongitude: " + ilongitude); //note that ascii 65 == 'A' directory += Character.toString((char)(65 + ilatitude)) + ilongitude; @@ -92,7 +92,7 @@ public static int getAltitudeFt(double msl, double latitude, double longitude) t SRTMTile tile = tiles[latIndex][lonIndex]; if (tile == null) { - System.out.println("tiles[" + latIndex + "][" + lonIndex + "] not initialized, loading!"); + // System.out.println("tiles[" + latIndex + "][" + lonIndex + "] not initialized, loading!"); tile = new SRTMTile(90 - latIndex, lonIndex - 180); tiles[latIndex][lonIndex] = tile; }