From 82496f51c6cdad8cd198314e663d23c216b0ee21 Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Mon, 3 Apr 2023 01:30:49 -0400 Subject: [PATCH 01/12] Initial implementation of new flight processing system --- .../org/ngafid/FindLowEndingFuelEvents.java | 2 +- src/main/java/org/ngafid/FindSpinEvents.java | 2 +- .../org/ngafid/accounts/UserPreferences.java | 2 +- .../org/ngafid/flights/DoubleTimeSeries.java | 2 +- src/main/java/org/ngafid/flights/Flight.java | 63 ++++- src/main/java/org/ngafid/flights/NIFA.java | 2 +- .../{calculations => }/Parameters.java | 14 +- .../calculations/HeadingTrackDiff.java | 2 +- .../flights/calculations/TurnToFinal.java | 2 +- .../flights/calculations/VSPDRegression.java | 2 +- .../flights/process/DependencyGraph.java | 239 ++++++++++++++++++ .../flights/process/FlightProcessor.java | 62 +++++ .../process/ProcessAirportProximity.java | 34 +++ .../ngafid/flights/process/ProcessAltAGL.java | 105 ++++++++ .../ngafid/flights/process/ProcessStep.java | 76 ++++++ .../ngafid/routes/PostUserPreferences.java | 2 +- .../routes/PostUserPreferencesMetric.java | 2 +- 17 files changed, 597 insertions(+), 16 deletions(-) rename src/main/java/org/ngafid/flights/{calculations => }/Parameters.java (92%) create mode 100644 src/main/java/org/ngafid/flights/process/DependencyGraph.java create mode 100644 src/main/java/org/ngafid/flights/process/FlightProcessor.java create mode 100644 src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java create mode 100644 src/main/java/org/ngafid/flights/process/ProcessAltAGL.java create mode 100644 src/main/java/org/ngafid/flights/process/ProcessStep.java diff --git a/src/main/java/org/ngafid/FindLowEndingFuelEvents.java b/src/main/java/org/ngafid/FindLowEndingFuelEvents.java index 660119c9e..039898454 100644 --- a/src/main/java/org/ngafid/FindLowEndingFuelEvents.java +++ b/src/main/java/org/ngafid/FindLowEndingFuelEvents.java @@ -16,7 +16,7 @@ import java.util.logging.Logger; import static org.ngafid.events.CustomEvent.*; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class FindLowEndingFuelEvents { public static final Logger LOG = Logger.getLogger(FindLowEndingFuelEvents.class.getName()); diff --git a/src/main/java/org/ngafid/FindSpinEvents.java b/src/main/java/org/ngafid/FindSpinEvents.java index 1cd3f663c..174e8dfb8 100644 --- a/src/main/java/org/ngafid/FindSpinEvents.java +++ b/src/main/java/org/ngafid/FindSpinEvents.java @@ -15,7 +15,7 @@ import java.util.logging.Logger; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; import static org.ngafid.events.CustomEvent.*; /** diff --git a/src/main/java/org/ngafid/accounts/UserPreferences.java b/src/main/java/org/ngafid/accounts/UserPreferences.java index f3083405a..f9330d4eb 100644 --- a/src/main/java/org/ngafid/accounts/UserPreferences.java +++ b/src/main/java/org/ngafid/accounts/UserPreferences.java @@ -2,7 +2,7 @@ import java.util.List; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class UserPreferences { private int userId, decimalPrecision; diff --git a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java index 21349dcc6..43dbaf7da 100644 --- a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java @@ -20,7 +20,7 @@ import org.ngafid.common.Compression; import org.ngafid.filters.Pair; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; import javax.sql.rowset.serial.SerialBlob; diff --git a/src/main/java/org/ngafid/flights/Flight.java b/src/main/java/org/ngafid/flights/Flight.java index 5c9e7ca53..84c67a047 100644 --- a/src/main/java/org/ngafid/flights/Flight.java +++ b/src/main/java/org/ngafid/flights/Flight.java @@ -4,6 +4,8 @@ import java.sql.*; import java.text.DateFormat; import java.time.*; +import java.lang.Class; +import java.lang.reflect.*; import static java.time.temporal.ChronoUnit.SECONDS; import java.util.Iterator; import java.text.SimpleDateFormat; @@ -41,6 +43,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.HashSet; import java.util.Optional; @@ -61,8 +64,9 @@ import org.ngafid.filters.Filter; import org.ngafid.flights.calculations.*; +import org.ngafid.flights.process.*; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; /** * This class represents a Flight in the NGAFID. It also contains static methods for database interaction @@ -1480,10 +1484,27 @@ public String getEndDateTime() { return endDateTime; } + public void addException(MalformedFlightFileException me) { + exceptions.add(me); + } + + public void addHeader(String column, String dataType) { + headers.add(column); + dataTypes.add(dataType); + } + public void addDoubleTimeSeries(String name, DoubleTimeSeries doubleTimeSeries) { this.doubleTimeSeries.put(name, doubleTimeSeries); } + public Map getDoubleTimeSeriesMap() { + return doubleTimeSeries; + } + + public Map getStringTimeSeriesMap() { + return stringTimeSeries; + } + public DoubleTimeSeries getDoubleTimeSeries(String name) throws SQLException { if (this.doubleTimeSeries.containsKey(name)) { return this.doubleTimeSeries.get(name); @@ -2071,7 +2092,32 @@ private void process(Connection connection, InputStream inputStream) throws IOEx process(connection); } - private void process(Connection connection) throws IOException, FatalFlightFileException, SQLException { + List defaultPasses = List.of(); + + private ArrayList gatherProcessSteps(Connection connection) { + ArrayList steps = new ArrayList<>(); + + steps.add(new ProcessAltAGL(connection, this)); + steps.add(new ProcessAirportProximity(connection, this)); + + return steps; + } + + final private void newProcess(Connection connection) throws FatalFlightFileException { + ArrayList steps = gatherProcessSteps(connection); // gatherProcessSteps will be an abstract method + + // These fields will be written to directly by the ProcessSteps. + doubleTimeSeries = new ConcurrentHashMap<>(doubleTimeSeries); + stringTimeSeries = new ConcurrentHashMap<>(stringTimeSeries); + + ArrayList fatalExceptions = new DependencyGraph(this, steps).compute(); + + // Probably not worth the time to convert back to serial versions + // doubleTimeSeries = new HashMap<>(doubleTimeSeries); + // stringTimeSeries = new HashMap<>(stringTimeSeries); + } + + final private void process(Connection connection) throws IOException, FatalFlightFileException, SQLException { //TODO: these may be different for different airframes/flight //data recorders. depending on the airframe/flight data recorder //we should specify these. @@ -2111,8 +2157,6 @@ private void process(Connection connection) throws IOException, FatalFlightFileE //this is all we can do with the scan eagle data until we //get better lat/lon info hasCoords = true; - } else if (airframeName.equals("")) { - } else { calculateStartEndTime("Lcl Date", "Lcl Time", "UTCOfst"); } @@ -2120,12 +2164,15 @@ private void process(Connection connection) throws IOException, FatalFlightFileE exceptions.add(e); } + // DONE try { calculateAGL(connection, "AltAGL", "AltMSL", "Latitude", "Longitude"); } catch (MalformedFlightFileException e) { exceptions.add(e); } + // END + try { calculateAirportProximity(connection, "Latitude", "Longitude", "AltAGL"); } catch (MalformedFlightFileException e) { @@ -3480,4 +3527,12 @@ public void setAirframeType(String type) { public void setAirframeTypeID(Integer typeID) { this.airframeTypeId = typeID; } + + public void setHasCoords(boolean hasCoords) { + this.hasCoords = hasCoords; + } + + public void setHasAGL(boolean hasAGL) { + this.hasAGL = hasAGL; + } } diff --git a/src/main/java/org/ngafid/flights/NIFA.java b/src/main/java/org/ngafid/flights/NIFA.java index c37c9a57d..a7091090e 100644 --- a/src/main/java/org/ngafid/flights/NIFA.java +++ b/src/main/java/org/ngafid/flights/NIFA.java @@ -18,7 +18,7 @@ import java.util.logging.Logger; import java.util.stream.Stream; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class NIFA implements Serializable { diff --git a/src/main/java/org/ngafid/flights/calculations/Parameters.java b/src/main/java/org/ngafid/flights/Parameters.java similarity index 92% rename from src/main/java/org/ngafid/flights/calculations/Parameters.java rename to src/main/java/org/ngafid/flights/Parameters.java index 1a6f3b417..c65571131 100644 --- a/src/main/java/org/ngafid/flights/calculations/Parameters.java +++ b/src/main/java/org/ngafid/flights/Parameters.java @@ -8,7 +8,7 @@ * @author Josh Karns * @author Aidan LaBella */ -package org.ngafid.flights.calculations; +package org.ngafid.flights; public interface Parameters { /** @@ -24,7 +24,7 @@ public interface Parameters { public static final String PARAM_JSON_LONGITUDE = "lon"; public static final double STD_PRESS_INHG = 29.92; - public static final double COMP_CONV = (double) (Math.PI / 180); + public static final double COMP_CONV = Math.PI / 180.0; /** * Critical Values @@ -88,6 +88,16 @@ public interface Parameters { public static final String LOSS_OF_CONTROL_PROBABILITY = "PLOCI"; public static final String HDG_TRK_DIFF = "HDG TRK Diff"; + public static final String NEAREST_RUNWAY = "NearestRunway"; + public static final String RUNWAY_DISTANCE = "RunwayDistance"; + public static final String NEAREST_AIRPORT = "NearestAirport"; + public static final String AIRPORT_DISTANCE = "AirportDistance"; + + /** + * Units + **/ + public static final String UNIT_FT_AGL = "ft agl"; + /** * {@link Airframes} id's */ diff --git a/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java b/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java index e403b9f0e..a8c25efeb 100644 --- a/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java +++ b/src/main/java/org/ngafid/flights/calculations/HeadingTrackDiff.java @@ -8,7 +8,7 @@ import java.util.List; import java.util.logging.Logger; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; import org.apache.commons.cli.*; import org.ngafid.Database; diff --git a/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java b/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java index 4bce0456f..77140936d 100644 --- a/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java +++ b/src/main/java/org/ngafid/flights/calculations/TurnToFinal.java @@ -18,7 +18,7 @@ import org.ngafid.flights.*; -import static org.ngafid.flights.calculations.Parameters.*; //eliminates the need to use Parameters. +import static org.ngafid.flights.Parameters.*; //eliminates the need to use Parameters. public class TurnToFinal implements Serializable { // NGAFIDTTF0000L diff --git a/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java b/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java index 11cd1672f..0677ae4a0 100644 --- a/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java +++ b/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java @@ -5,7 +5,7 @@ import java.sql.Connection; import java.sql.SQLException; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; /** * This class is an instance of a {@link Calculation} that gets a derived VSI using linear regression diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java new file mode 100644 index 000000000..0926c6b8c --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -0,0 +1,239 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executor; +import java.util.concurrent.Executors; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.ForkJoinTask; +import java.util.concurrent.RecursiveTask; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.HashSet; +import java.util.Queue; +import java.util.HashMap; +import java.util.ArrayList; +import java.util.Collections; +import java.util.logging.Logger; + +import org.ngafid.flights.Flight; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +import org.ngafid.flights.process.ProcessStep; + +/** + * A dependency graph which represents the dependencies of ProcessSteps on one another. + **/ +public class DependencyGraph { + private static final Logger LOG = Logger.getLogger(DependencyGraph.class.getName()); + + private static final int PARALLELISM = Runtime.getRuntime().availableProcessors(); + + class DependencyNode { + final ProcessStep step; + + // Used for cycle detection. + boolean mark = false; + AtomicBoolean enabled = new AtomicBoolean(true); + + final HashSet requiredBy = new HashSet<>(32); + final HashSet requires = new HashSet<>(32); + + ArrayList exceptions = new ArrayList<>(); + + public DependencyNode(ProcessStep step) { + this.step = step; + } + + void disable() { + enabled.set(false); + if (step.isRequired()) { + LOG.severe("Required step " + step.toString() + " has been disabled."); + exceptions.add(new FatalFlightFileException("Required step " + step.toString() + " has been disabled.")); + } + for (var child : requiredBy) + child.disable(); + } + + void compute() { + try { + + if (step.applicable()) + step.compute(); + else + disable(); + + } catch (SQLException | MalformedFlightFileException | FatalFlightFileException e) { + LOG.warning("Encountered exception when calculating process step " + step.toString() + ": " + e.toString()); + exceptions.add(e); + disable(); + } + } + } + + class DependencyNodeTask extends RecursiveTask { + private static final long serialVersionUID = 0; + + // This is used to avoid creating duplicate tasks. + // This isn't a problem w/ a tree-like problem, but ours is a DAG. + final ConcurrentHashMap> taskMap; + final DependencyNode node; + + public DependencyNodeTask(DependencyNode node, ConcurrentHashMap> taskMap) { + this.taskMap = taskMap; + this.node = node; + } + + ForkJoinTask getTask(DependencyNode node) { + return taskMap.computeIfAbsent(node, x -> new DependencyNodeTask(x, taskMap).fork()); + } + + public Void compute() { + for (var requiredNode : node.requires) { + getTask(requiredNode).join(); + } + + if (node.enabled.get()) + node.compute(); + else {} // TODO: Add some sort of exception here. We don't want to just silently + // let the processing pipeline fail somewhere + + return null; + } + } + + /** + * Dummy step meant to act as a root node in DAG. This is done by adding all of the columns included in the file + * as output columns, so all other steps will depend on this. + **/ + class DummyProcessStep extends ProcessStep { + Set outputColumns = new HashSet<>(); + + public DummyProcessStep(Flight flight) { + // We can pass in null rather than a connection object + super(null, flight); + outputColumns.addAll(doubleTimeSeries.keySet()); + outputColumns.addAll(stringTimeSeries.keySet()); + } + + public Set getRequiredDoubleColumns() { return Collections.emptySet(); } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return Collections.emptySet(); } + public Set getOutputColumns() { return outputColumns; } + + public boolean airframeIsValid(String airframe) { return true; } + public boolean isRequired() { return true; } + + // Left blank intentionally + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {} + } + + private void nodeConflictError(ProcessStep first, ProcessStep second) throws FatalFlightFileException { + throw new FatalFlightFileException( + "ERROR when building dependency graph! " + + "Two ProcessSteps are indicated as having the same output column. " + + "While it is possible for two ProcessSteps to have the same output column(s), " + + "their use should be mutually exclusive from one another. " + + "\nDEBUG INFO:\n node 0: " + first.toString() + "\n node 1: " + second.toString()); + + } + + private DependencyNode registerStep(ProcessStep step) throws FatalFlightFileException { + DependencyNode node = new DependencyNode(step); + nodes.add(node); + + for (String outputColumn : step.getOutputColumns()) { + DependencyNode other = null; + if ((other = columnToSource.put(outputColumn, node)) != null) nodeConflictError(step, other.step); + } + + return node; + } + + /** + * Create the edges. An edge exists from step X to step Y if step X has an output column + * that step Y relies upon. + **/ + private void createEdges(DependencyNode node) throws FatalFlightFileException { + for (String column : node.step.getRequiredColumns()) { + DependencyNode sourceNode = columnToSource.get(column); + if (sourceNode != null) { + sourceNode.requiredBy.add(node); + node.requires.add(sourceNode); + } + } + } + + // Maps column name to the node where that column is computed + HashMap columnToSource = new HashMap<>(64); + HashSet nodes = new HashSet<>(64); + DependencyNode rootNode; + Flight flight; + + public DependencyGraph(Flight flight, ArrayList steps) throws FatalFlightFileException { + /** + * Create nodes for each step and create a mapping from output column name + * to the node that outputs that column. This should be a unique mapping, as + * we don't want two steps generating the same output column. + **/ + + this.flight = flight; + + rootNode = registerStep(new DummyProcessStep(flight)); + for (var step : steps) registerStep(step); + for (var node : nodes) createEdges(node); + } + + public ArrayList compute() { + // Start with all of the leaf nodes. + ConcurrentHashMap> tasks = new ConcurrentHashMap<>(); + ArrayList> initialTasks = new ArrayList<>(); + for (var node : nodes) { + if (node.requiredBy.size() == 0) { + var task = new DependencyNodeTask(rootNode, tasks); + initialTasks.add(task); + tasks.put(node, task); + } + } + + ForkJoinPool ex = new ForkJoinPool(); + try { + ex.invoke(new RecursiveTask() { + public Void compute() { + initialTasks + .stream() + .map(x -> x.fork()) + .map(x -> x.join()) + .count(); + return null; + } + }); + } finally { + ex.shutdown(); + } + + ArrayList fatalExceptions = new ArrayList<>(); + for (var node : nodes) { + for (var e : node.exceptions) { + // TODO: Consider whether or not we should throw the first unrecoverable exception + // we encounter, or if we shoud batch them all together. + if (e instanceof MalformedFlightFileException me) { + flight.addException(me); + } else if (e instanceof FatalFlightFileException fe) { + fatalExceptions.add(fe); + } else if (e instanceof SQLException se) { + fatalExceptions.add(se); + } + } + } + + return fatalExceptions; + } +} diff --git a/src/main/java/org/ngafid/flights/process/FlightProcessor.java b/src/main/java/org/ngafid/flights/process/FlightProcessor.java new file mode 100644 index 000000000..f24004f9d --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightProcessor.java @@ -0,0 +1,62 @@ +package org.ngafid.flights.process; + +import java.util.List; +import java.util.ArrayList; +import java.util.logging.Logger; + +import org.ngafid.flights.Flight; +import org.ngafid.flights.MalformedFlightFileException; +import org.ngafid.flights.FatalFlightFileException; + +/** + * This class applies a sequence of processing steps to a flight that has already been parse (that is, a flight which + * has already had its columns parsed). The processing done here falls largely into a few categories: + * + * - Creation of new synthetic columns that aren't in the original files + * - Conversion of existing columns to different units + * + * Currently, the basic idea is that a list of process steps is created and then sequentially applied. + * Some of these steps are going to be mandatory, and some will only be applied to specific aircraft and/or + * aircraft that have the appropriate columns. + * + **/ +public class FlightProcessor { + + private static final Logger LOG = Logger.getLogger(FlightProcessor.class.getName()); + + Flight flight; + + public FlightProcessor(Flight flight) { + this.flight = flight; + } + + private static List requiredSteps = List.of(); + private static List optionalSteps = List.of(); + + protected ArrayList gatherProcessSteps() throws FatalFlightFileException { + ArrayList steps = new ArrayList<>(); + + for (ProcessStep.Factory factory : requiredSteps) { + ProcessStep step = factory.create(flight); + + if (!step.applicable()) + throw new FatalFlightFileException("Cannot apply required step " + step.toString() + " to flight " + flight.getId()); + + steps.add(step); + } + + for (ProcessStep.Factory factory : optionalSteps) { + ProcessStep step = factory.create(flight); + + if (!step.applicable()) + LOG.info("Cannot apply optional step " + step.toString() + " to flight " + flight.getId()); + + steps.add(step); + } + + return steps; + } + + final private void process() { + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java new file mode 100644 index 000000000..25fe6b954 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java @@ -0,0 +1,34 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import org.ngafid.flights.Flight; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.process.ProcessStep; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessAirportProximity extends ProcessStep { + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LATITUDE, LONGITUDE, ALT_AGL); + private static Set OUTPUT_COLUMNS = Set.of(NEAREST_RUNWAY, AIRPORT_DISTANCE, NEAREST_RUNWAY, RUNWAY_DISTANCE); + + public ProcessAirportProximity(Connection connection, Flight flight) { + super(connection, flight); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return Collections.emptySet(); } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { return true; } + public boolean isRequired() { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java new file mode 100644 index 000000000..0f0199334 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java @@ -0,0 +1,105 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.terrain.TerrainCache; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessAltAGL extends ProcessStep { + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_MSL, LATITUDE, LONGITUDE); + private static Set OUTPUT_COLUMNS = Set.of(ALT_AGL); + + public ProcessAltAGL(Connection connection, Flight flight) { + super(connection, flight); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return Collections.emptySet(); } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { return true; } + public boolean isRequired() { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + + flight.addHeader(ALT_AGL, UNIT_FT_AGL); + + DoubleTimeSeries altitudeMSLTS = doubleTimeSeries.get(ALT_MSL); + DoubleTimeSeries latitudeTS = doubleTimeSeries.get(LATITUDE); + DoubleTimeSeries longitudeTS = doubleTimeSeries.get(LONGITUDE); + + // TODO: Probably remove this stuff since ths method will only be called if the columns are available + if (altitudeMSLTS == null || latitudeTS == null || longitudeTS == null) { + String message = "Cannot calculate AGL, flight file had empty or missing "; + + int count = 0; + if (altitudeMSLTS == null) { + message += "'" + ALT_MSL + "'"; + count++; + } + + if (latitudeTS == null) { + if (count > 0) message += ", "; + message += "'" + LATITUDE + "'"; + count++; + } + + if (longitudeTS == null) { + if (count > 0) message += " and "; + message += "'" + LONGITUDE + "'"; + count++; + } + + message += " column"; + if (count >= 2) message += "s"; + message += "."; + + //should be initialized to false, but lets make sure + flight.setHasCoords(false); + flight.setHasAGL(false); + throw new MalformedFlightFileException(message); + } + flight.setHasCoords(true); + flight.setHasAGL(true); + + DoubleTimeSeries altitudeAGLTS = withConnection(connection -> new DoubleTimeSeries(connection, ALT_AGL, UNIT_FT_AGL)); + + for (int i = 0; i < altitudeMSLTS.size(); i++) { + double altitudeMSL = altitudeMSLTS.get(i); + double latitude = latitudeTS.get(i); + double longitude = longitudeTS.get(i); + + //System.err.println("getting AGL for latitude: " + latitude + ", " + longitude); + + if (Double.isNaN(altitudeMSL) || Double.isNaN(latitude) || Double.isNaN(longitude)) { + altitudeAGLTS.add(Double.NaN); + //System.err.println("result is: " + Double.NaN); + continue; + } + + try { + int altitudeAGL = TerrainCache.getAltitudeFt(altitudeMSL, latitude, longitude); + altitudeAGLTS.add(altitudeAGL); + } catch (NoSuchFileException e) { + System.err.println("ERROR: could not read terrain file: " + e); + + flight.setHasAGL(false); + throw new MalformedFlightFileException("Could not calculate AGL for this flight as it had latitudes/longitudes outside of the United States."); + } + + } + + doubleTimeSeries.put(ALT_AGL, altitudeAGLTS); + } + +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessStep.java b/src/main/java/org/ngafid/flights/process/ProcessStep.java new file mode 100644 index 000000000..b02de7cf4 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessStep.java @@ -0,0 +1,76 @@ +package org.ngafid.flights.process; + +import org.ngafid.flights.Flight; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +import java.util.Map; +import java.util.Set; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.HashSet; + + +public abstract class ProcessStep { + + public interface Factory { + ProcessStep create(Flight flight); + } + + protected Flight flight; + + // Connection is not accessible by subclasses directly by design, instead use the `withConnection` function. + // This grabs the lock on the object so only one thread is using the connection at any given point in time. + private Connection connection; + + // References to the corresponding fields in `flight` + protected Map doubleTimeSeries; + protected Map stringTimeSeries; + + public ProcessStep(Connection connection, Flight flight) { + this.connection = connection; + this.flight = flight; + + this.doubleTimeSeries = flight.getDoubleTimeSeriesMap(); + this.stringTimeSeries = flight.getStringTimeSeriesMap(); + } + + // These should probably return references to static immutable Sets. + public abstract Set getRequiredDoubleColumns(); + public abstract Set getRequiredStringColumns(); + public abstract Set getRequiredColumns(); + public abstract Set getOutputColumns(); + + // Whether or not this ProcessStep is required / mandatory + public abstract boolean isRequired(); + + // Whether or not this ProcessStep can be performed for a given airframe + public abstract boolean airframeIsValid(String airframe); + + final public boolean applicable() { + return + airframeIsValid(flight.getAirframeName()) + && stringTimeSeries + .keySet() + .containsAll(getRequiredStringColumns()) + && doubleTimeSeries + .keySet() + .containsAll(getRequiredDoubleColumns()); + } + + protected interface ConnectionFunctor { + public T compute(Connection connection) throws SQLException; + } + + final public T withConnection(ConnectionFunctor functor) throws SQLException { + T value = null; + synchronized (connection) { + value = functor.compute(connection); + } + return value; + } + + public abstract void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException; +} diff --git a/src/main/java/org/ngafid/routes/PostUserPreferences.java b/src/main/java/org/ngafid/routes/PostUserPreferences.java index 053f912c6..8ae84ae3d 100644 --- a/src/main/java/org/ngafid/routes/PostUserPreferences.java +++ b/src/main/java/org/ngafid/routes/PostUserPreferences.java @@ -24,7 +24,7 @@ import org.ngafid.accounts.UserPreferences; import org.ngafid.flights.DoubleTimeSeries; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class PostUserPreferences implements Route { private static final Logger LOG = Logger.getLogger(PostUserPreferences.class.getName()); diff --git a/src/main/java/org/ngafid/routes/PostUserPreferencesMetric.java b/src/main/java/org/ngafid/routes/PostUserPreferencesMetric.java index 063c64a10..237bf2afe 100644 --- a/src/main/java/org/ngafid/routes/PostUserPreferencesMetric.java +++ b/src/main/java/org/ngafid/routes/PostUserPreferencesMetric.java @@ -25,7 +25,7 @@ import org.ngafid.accounts.UserPreferences; import org.ngafid.flights.DoubleTimeSeries; -import static org.ngafid.flights.calculations.Parameters.*; +import static org.ngafid.flights.Parameters.*; public class PostUserPreferencesMetric implements Route { private static final Logger LOG = Logger.getLogger(PostUserPreferencesMetric.class.getName()); From 04ecd3447b23231c056b20cb86ce1a9f8da5b20b Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Tue, 4 Apr 2023 13:34:03 -0400 Subject: [PATCH 02/12] Part way though moving towards a flight builder --- src/main/java/org/ngafid/ProcessFlights.java | 378 +++++++++--------- src/main/java/org/ngafid/ProcessUpload.java | 6 +- src/main/java/org/ngafid/flights/Flight.java | 60 +-- .../java/org/ngafid/flights/Parameters.java | 1 + .../flights/process/DependencyGraph.java | 32 +- .../ngafid/flights/process/FlightBuilder.java | 46 +++ .../process/FlightProcessingException.java | 49 +++ .../ngafid/flights/process/ProcessAltAGL.java | 2 +- .../flights/process/ProcessStartEndTime.java | 117 ++++++ 9 files changed, 466 insertions(+), 225 deletions(-) create mode 100644 src/main/java/org/ngafid/flights/process/FlightBuilder.java create mode 100644 src/main/java/org/ngafid/flights/process/FlightProcessingException.java create mode 100644 src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java diff --git a/src/main/java/org/ngafid/ProcessFlights.java b/src/main/java/org/ngafid/ProcessFlights.java index 8b8bfc63f..8b8582a9e 100644 --- a/src/main/java/org/ngafid/ProcessFlights.java +++ b/src/main/java/org/ngafid/ProcessFlights.java @@ -27,200 +27,200 @@ public class ProcessFlights { private static Connection connection = Database.getConnection(); public static void main(String[] arguments) { - while (true) { - connection = Database.resetConnection(); + // while (true) { + // connection = Database.resetConnection(); - Instant start = Instant.now(); + // Instant start = Instant.now(); - try { - PreparedStatement fleetPreparedStatement = connection.prepareStatement("SELECT id FROM fleet WHERE EXISTS (SELECT id FROM uploads WHERE fleet.id = uploads.fleet_id AND uploads.status = 'UPLOADED')"); - ResultSet fleetSet = fleetPreparedStatement.executeQuery(); + // try { + // PreparedStatement fleetPreparedStatement = connection.prepareStatement("SELECT id FROM fleet WHERE EXISTS (SELECT id FROM uploads WHERE fleet.id = uploads.fleet_id AND uploads.status = 'UPLOADED')"); + // ResultSet fleetSet = fleetPreparedStatement.executeQuery(); - while (fleetSet.next()) { - int targetFleetId = fleetSet.getInt(1); - System.err.println("Importing an upload from fleet: " + targetFleetId); + // while (fleetSet.next()) { + // int targetFleetId = fleetSet.getInt(1); + // System.err.println("Importing an upload from fleet: " + targetFleetId); - PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? LIMIT 1"); + // PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? LIMIT 1"); - //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? AND id = 2281"); - //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id != 1"); - //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ?"); - - //targetFleetId = 1; - //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? AND id = 2863 LIMIT 1"); - uploadsPreparedStatement.setString(1, "UPLOADED"); - uploadsPreparedStatement.setInt(2, targetFleetId); - - ResultSet resultSet = uploadsPreparedStatement.executeQuery(); - - while (resultSet.next()) { - ArrayList flightErrors = new ArrayList(); - - int uploadId = resultSet.getInt(1); - int uploaderId = resultSet.getInt(2); - int fleetId = resultSet.getInt(3); - String filename = resultSet.getString(4); - - filename = WebServer.NGAFID_ARCHIVE_DIR + "/" + fleetId + "/" + uploaderId + "/" + uploadId + "__" + filename; - System.err.println("processing: '" + filename + "'"); - - String extension = filename.substring(filename.length() - 4); - System.err.println("extension: '" + extension + "'"); - - String status = "IMPORTED"; - - Exception uploadException = null; - - int validFlights = 0; - int warningFlights = 0; - int errorFlights = 0; - if (extension.equals(".zip")) { - try { - System.err.println("processing zip file: '" + filename + "'"); - ZipFile zipFile = new ZipFile(filename); - - Enumeration entries = zipFile.entries(); - - while (entries.hasMoreElements()) { - ZipEntry entry = entries.nextElement(); - String name = entry.getName(); - - if (entry.isDirectory()) { - //System.err.println("SKIPPING: " + entry.getName()); - continue; - } - - if (name.contains("__MACOSX")) { - //System.err.println("SKIPPING: " + entry.getName()); - continue; - } - - System.err.println("PROCESSING: " + name); - - if (entry.getName().endsWith(".csv")) { - try { - InputStream stream = zipFile.getInputStream(entry); - Flight flight = new Flight(fleetId, entry.getName(), stream, connection); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException e) { - System.err.println(e.getMessage()); - flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - - } else if (entry.getName().endsWith(".json")) { - try { - Flight flight = Flight.processJSON(fleetId, connection, zipFile.getInputStream(entry), entry.getName()); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | ParseException e) { - System.err.println("ERROR: " + e.getMessage()); - flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - } else if (entry.getName().endsWith(".gpx")) { - try { - InputStream stream = zipFile.getInputStream(entry); - ArrayList flights = Flight.processGPXFile(fleetId, connection, stream, entry.getName()); - - if (connection != null) { - for (Flight flight : flights) { - System.out.println(flight.getFilename()); - } - for (Flight flight : flights) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - if (flight.getStatus().equals("WARNING")) warningFlights++; - validFlights++; - } - } - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | ParserConfigurationException | SAXException | SQLException | ParseException e) { - System.err.println(e.getMessage()); - flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - } else { - flightErrors.add(new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", entry.getName())); - errorFlights++; - } - } - - } catch (java.nio.file.NoSuchFileException e) { - System.err.println("IOException: " + e ); - e.printStackTrace(); - - UploadError.insertError(connection, uploadId, "Broken upload: please delete this upload and re-upload."); - status = "ERROR"; - - } catch (IOException e) { - System.err.println("IOException: " + e ); - e.printStackTrace(); - - UploadError.insertError(connection, uploadId, "Could not read from zip file: please delete this upload and re-upload."); - status = "ERROR"; - } - - } else { - //insert an upload error for this upload - status = "ERROR"; - UploadError.insertError(connection, uploadId, "Uploaded file was not a zip file."); - } - - //update upload in database, add upload exceptions if there are any - PreparedStatement updateStatement = connection.prepareStatement("UPDATE uploads SET status = ?, n_valid_flights = ?, n_warning_flights = ?, n_error_flights = ? WHERE id = ?"); - updateStatement.setString(1, status); - updateStatement.setInt(2, validFlights); - updateStatement.setInt(3, warningFlights); - updateStatement.setInt(4, errorFlights); - updateStatement.setInt(5, uploadId); - updateStatement.executeUpdate(); - updateStatement.close(); - - for (UploadException exception : flightErrors) { - FlightError.insertError(connection, uploadId, exception.getFilename(), exception.getMessage()); - } - } - - resultSet.close(); - uploadsPreparedStatement.close(); - - //TURN OFF FOR REGULAR USE - //System.exit(1); - } - - fleetSet.close(); - fleetPreparedStatement.close(); - - } catch (SQLException e) { - e.printStackTrace(); - System.exit(1); - } - - Instant end = Instant.now(); - double elapsed_millis = (double) Duration.between(start, end).toMillis(); - double elapsed_seconds = elapsed_millis / 1000; - System.err.println("finished in " + elapsed_seconds); - - try { - Thread.sleep(10000); - } catch (Exception e) { - System.err.println(e); - e.printStackTrace(); - } - - } + // //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? AND id = 2281"); + // //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id != 1"); + // //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ?"); + + // //targetFleetId = 1; + // //PreparedStatement uploadsPreparedStatement = connection.prepareStatement("SELECT id, uploader_id, fleet_id, filename FROM uploads WHERE status = ? AND fleet_id = ? AND id = 2863 LIMIT 1"); + // uploadsPreparedStatement.setString(1, "UPLOADED"); + // uploadsPreparedStatement.setInt(2, targetFleetId); + + // ResultSet resultSet = uploadsPreparedStatement.executeQuery(); + + // while (resultSet.next()) { + // ArrayList flightErrors = new ArrayList(); + + // int uploadId = resultSet.getInt(1); + // int uploaderId = resultSet.getInt(2); + // int fleetId = resultSet.getInt(3); + // String filename = resultSet.getString(4); + + // filename = WebServer.NGAFID_ARCHIVE_DIR + "/" + fleetId + "/" + uploaderId + "/" + uploadId + "__" + filename; + // System.err.println("processing: '" + filename + "'"); + + // String extension = filename.substring(filename.length() - 4); + // System.err.println("extension: '" + extension + "'"); + + // String status = "IMPORTED"; + + // Exception uploadException = null; + + // int validFlights = 0; + // int warningFlights = 0; + // int errorFlights = 0; + // if (extension.equals(".zip")) { + // try { + // System.err.println("processing zip file: '" + filename + "'"); + // ZipFile zipFile = new ZipFile(filename); + + // Enumeration entries = zipFile.entries(); + + // while (entries.hasMoreElements()) { + // ZipEntry entry = entries.nextElement(); + // String name = entry.getName(); + + // if (entry.isDirectory()) { + // //System.err.println("SKIPPING: " + entry.getName()); + // continue; + // } + + // if (name.contains("__MACOSX")) { + // //System.err.println("SKIPPING: " + entry.getName()); + // continue; + // } + + // System.err.println("PROCESSING: " + name); + + // if (entry.getName().endsWith(".csv")) { + // try { + // InputStream stream = zipFile.getInputStream(entry); + // Flight flight = new Flight(fleetId, entry.getName(), stream, connection); + + // if (connection != null) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // } + + // if (flight.getStatus().equals("WARNING")) warningFlights++; + + // validFlights++; + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException e) { + // System.err.println(e.getMessage()); + // flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + + // } else if (entry.getName().endsWith(".json")) { + // try { + // Flight flight = Flight.processJSON(fleetId, connection, zipFile.getInputStream(entry), entry.getName()); + + // if (connection != null) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // } + + // if (flight.getStatus().equals("WARNING")) warningFlights++; + + // validFlights++; + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | ParseException e) { + // System.err.println("ERROR: " + e.getMessage()); + // flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + // } else if (entry.getName().endsWith(".gpx")) { + // try { + // InputStream stream = zipFile.getInputStream(entry); + // ArrayList flights = Flight.processGPXFile(fleetId, connection, stream, entry.getName()); + + // if (connection != null) { + // for (Flight flight : flights) { + // System.out.println(flight.getFilename()); + // } + // for (Flight flight : flights) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // if (flight.getStatus().equals("WARNING")) warningFlights++; + // validFlights++; + // } + // } + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | ParserConfigurationException | SAXException | SQLException | ParseException e) { + // System.err.println(e.getMessage()); + // flightErrors.add(new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + // } else { + // flightErrors.add(new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", entry.getName())); + // errorFlights++; + // } + // } + + // } catch (java.nio.file.NoSuchFileException e) { + // System.err.println("IOException: " + e ); + // e.printStackTrace(); + + // UploadError.insertError(connection, uploadId, "Broken upload: please delete this upload and re-upload."); + // status = "ERROR"; + + // } catch (IOException e) { + // System.err.println("IOException: " + e ); + // e.printStackTrace(); + + // UploadError.insertError(connection, uploadId, "Could not read from zip file: please delete this upload and re-upload."); + // status = "ERROR"; + // } + + // } else { + // //insert an upload error for this upload + // status = "ERROR"; + // UploadError.insertError(connection, uploadId, "Uploaded file was not a zip file."); + // } + + // //update upload in database, add upload exceptions if there are any + // PreparedStatement updateStatement = connection.prepareStatement("UPDATE uploads SET status = ?, n_valid_flights = ?, n_warning_flights = ?, n_error_flights = ? WHERE id = ?"); + // updateStatement.setString(1, status); + // updateStatement.setInt(2, validFlights); + // updateStatement.setInt(3, warningFlights); + // updateStatement.setInt(4, errorFlights); + // updateStatement.setInt(5, uploadId); + // updateStatement.executeUpdate(); + // updateStatement.close(); + + // for (UploadException exception : flightErrors) { + // FlightError.insertError(connection, uploadId, exception.getFilename(), exception.getMessage()); + // } + // } + + // resultSet.close(); + // uploadsPreparedStatement.close(); + + // //TURN OFF FOR REGULAR USE + // //System.exit(1); + // } + + // fleetSet.close(); + // fleetPreparedStatement.close(); + + // } catch (SQLException e) { + // e.printStackTrace(); + // System.exit(1); + // } + + // Instant end = Instant.now(); + // double elapsed_millis = (double) Duration.between(start, end).toMillis(); + // double elapsed_seconds = elapsed_millis / 1000; + // System.err.println("finished in " + elapsed_seconds); + + // try { + // Thread.sleep(10000); + // } catch (Exception e) { + // System.err.println(e); + // e.printStackTrace(); + // } + + // } } } diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index 57f3dc3f2..6a35c2f91 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -37,6 +37,7 @@ import org.ngafid.flights.MalformedFlightFileException; import org.ngafid.flights.Upload; import org.ngafid.flights.UploadError; +import org.ngafid.flights.process.*; import org.ngafid.accounts.Fleet; import org.ngafid.accounts.User; @@ -295,7 +296,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException e) { + } catch (FlightProcessingException e) { System.err.println(e.getMessage()); flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); errorFlights++; @@ -317,7 +318,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle } } } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | - ParserConfigurationException | SAXException | SQLException | ParseException e) { + ParserConfigurationException | SAXException | ParseException e) { System.err.println(e.getMessage()); flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); errorFlights++; @@ -438,7 +439,6 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle //insert all the flight errors to the database for (Map.Entry entry : flightErrors.entrySet()) { UploadException exception = entry.getValue(); - FlightError.insertError(connection, uploadId, exception.getFilename(), exception.getMessage()); } diff --git a/src/main/java/org/ngafid/flights/Flight.java b/src/main/java/org/ngafid/flights/Flight.java index 84c67a047..44f41d07b 100644 --- a/src/main/java/org/ngafid/flights/Flight.java +++ b/src/main/java/org/ngafid/flights/Flight.java @@ -43,7 +43,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.HashSet; import java.util.Optional; @@ -2097,24 +2096,47 @@ private void process(Connection connection, InputStream inputStream) throws IOEx private ArrayList gatherProcessSteps(Connection connection) { ArrayList steps = new ArrayList<>(); + // TODO: add ScanEagle steps + steps.add(new ProcessAltAGL(connection, this)); + // TODO: Finish implementing this steps.add(new ProcessAirportProximity(connection, this)); + // TODO: Lcl Date; calculateStartEndTime + // startDateTime; endDateTime + // TODO: Total Fuel; calculateTotalFuel + // doubleTimeSeries + // TODO: AltMSL Lag Diff; calculateLaggedAltMSL + // doubleTimeSeries + // TODO: engine divergences; calculateDivergence + // doubleTiemSeries + // TODO: Frequency check + // (nil) + // TODO: calculateItinerary + // itinerary + // TODO: runLOCICalculations + // doubleTimeSeries + return steps; } - final private void newProcess(Connection connection) throws FatalFlightFileException { + private void newProcess(Connection connection, InputStream inputStream) throws IOException, FatalFlightFileException, SQLException, FlightProcessingException { + initialize(connection, inputStream); + newProcess(connection); + } + + final private void newProcess(Connection connection) throws FlightProcessingException { ArrayList steps = gatherProcessSteps(connection); // gatherProcessSteps will be an abstract method // These fields will be written to directly by the ProcessSteps. doubleTimeSeries = new ConcurrentHashMap<>(doubleTimeSeries); stringTimeSeries = new ConcurrentHashMap<>(stringTimeSeries); - ArrayList fatalExceptions = new DependencyGraph(this, steps).compute(); + DependencyGraph dg = new DependencyGraph(this, steps); + dg.cycleCheck(); + dg.compute(); - // Probably not worth the time to convert back to serial versions - // doubleTimeSeries = new HashMap<>(doubleTimeSeries); - // stringTimeSeries = new HashMap<>(stringTimeSeries); + checkExceptions(); } final private void process(Connection connection) throws IOException, FatalFlightFileException, SQLException { @@ -2388,17 +2410,11 @@ public Flight(int fleetId, String filename, String suggestedTailNumber, String a this.status = "SUCCESS"; } - public Flight(int fleetId, String zipEntryName, InputStream inputStream, Connection connection) throws IOException, FatalFlightFileException, FlightAlreadyExistsException, SQLException { + public Flight(int fleetId, String zipEntryName, InputStream inputStream, Connection connection) throws FlightProcessingException { this.fleetId = fleetId; this.filename = zipEntryName; this.tailConfirmed = false; - /* - if (!filename.contains("/")) { - throw new FatalFlightFileException("The flight file was not in a directory in the zip file. Flight files should be in a directory with the name of their tail number (or other aircraft identifier)."); - } - */ - String[] parts = zipEntryName.split("/"); if (parts.length <= 1) { suggestedTailNumber = null; @@ -2416,15 +2432,16 @@ public Flight(int fleetId, String zipEntryName, InputStream inputStream, Connect setMD5Hash(inputStream); //check to see if a flight with this MD5 hash already exists in the database - if (connection != null) checkIfExists(connection); + if (connection != null) + checkIfExists(connection); inputStream.reset(); - process(connection, inputStream); + newProcess(connection, inputStream); - } catch (FatalFlightFileException | IOException e) { + } catch (FatalFlightFileException | IOException | FlightAlreadyExistsException | SQLException e) { status = "WARNING"; - throw e; - } catch (SQLException e) { + throw new FlightProcessingException(e); + } catch (FlightProcessingException e) { System.out.println(e); e.printStackTrace(); System.exit(1); @@ -2433,11 +2450,6 @@ public Flight(int fleetId, String zipEntryName, InputStream inputStream, Connect checkExceptions(); } - // Constructor for a flight that takes lists of UNINSERTED time series (that is, they should not be in the database yet!) - private Flight(Connection connection, ArrayList doubleTimeSeries, ArrayList stringTimeSeries, Timestamp startTime, Timestamp endTime) { - - } - /** * GPX is an XML file that follows the schema found here http://www.topografix.com/GPX/1/1/ *

@@ -3114,7 +3126,7 @@ private static int indexOfMin(double[] a, int i, int n) { for (int j = i; j < i + n; j++) { if (v > a[j]) { mindex = j; - v = a[j]; + v t } } diff --git a/src/main/java/org/ngafid/flights/Parameters.java b/src/main/java/org/ngafid/flights/Parameters.java index c65571131..4245de67f 100644 --- a/src/main/java/org/ngafid/flights/Parameters.java +++ b/src/main/java/org/ngafid/flights/Parameters.java @@ -82,6 +82,7 @@ public interface Parameters { public static final String TOTAL_FUEL = "Total Fuel"; public static final String LCL_DATE = "Lcl Date"; public static final String LCL_TIME = "Lcl Time"; + public static final String UTC_OFFSET = "UTCOfst"; public static final String LATITUDE = "Latitude"; public static final String LONGITUDE = "Longitude"; public static final String STALL_PROBABILITY = "PStall"; diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java index 0926c6b8c..217dd8c65 100644 --- a/src/main/java/org/ngafid/flights/process/DependencyGraph.java +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -177,7 +177,7 @@ private void createEdges(DependencyNode node) throws FatalFlightFileException { DependencyNode rootNode; Flight flight; - public DependencyGraph(Flight flight, ArrayList steps) throws FatalFlightFileException { + public DependencyGraph(Flight flight, ArrayList steps) throws FlightProcessingException { /** * Create nodes for each step and create a mapping from output column name * to the node that outputs that column. This should be a unique mapping, as @@ -186,12 +186,17 @@ public DependencyGraph(Flight flight, ArrayList steps) throws Fatal this.flight = flight; - rootNode = registerStep(new DummyProcessStep(flight)); - for (var step : steps) registerStep(step); - for (var node : nodes) createEdges(node); + try { + rootNode = registerStep(new DummyProcessStep(flight)); + for (var step : steps) registerStep(step); + for (var node : nodes) createEdges(node); + } catch (FatalFlightFileException e) { + throw new FlightProcessingException(e); + } } - public ArrayList compute() { + // Modifies the flight object in place. + public void compute() throws FlightProcessingException { // Start with all of the leaf nodes. ConcurrentHashMap> tasks = new ConcurrentHashMap<>(); ArrayList> initialTasks = new ArrayList<>(); @@ -222,18 +227,29 @@ public Void compute() { ArrayList fatalExceptions = new ArrayList<>(); for (var node : nodes) { for (var e : node.exceptions) { - // TODO: Consider whether or not we should throw the first unrecoverable exception - // we encounter, or if we shoud batch them all together. if (e instanceof MalformedFlightFileException me) { flight.addException(me); } else if (e instanceof FatalFlightFileException fe) { fatalExceptions.add(fe); } else if (e instanceof SQLException se) { fatalExceptions.add(se); + } else { + LOG.severe( + "Encountered exception of unknown type when executing dependency graph. " + + "\"" + e.getMessage() + "\"" + "\n." + + "This should not be possible - if this seems plausible you should add a handler for this " + + "type of exception in DependencyGraph::compute."); + e.printStackTrace(); + System.exit(1); } } } - return fatalExceptions; + if (fatalExceptions.size() != 0) + throw new FlightProcessingException(fatalExceptions); + } + + public void cycleCheck() throws FlightProcessingException { + // TODO: Cycle check } } diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java new file mode 100644 index 000000000..122b52ea2 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java @@ -0,0 +1,46 @@ +package org.ngafid.flights.process; + +import java.util.Map; +import java.util.ArrayList; +import java.util.concurrent.ConcurrentHashMap; + +import org.ngafid.flights.*; + +public class FlightBuilder { + + private final ConcurrentHashMap doubleTimeSeries; + private final ConcurrentHashMap stringTimeSeries; + + private ArrayList itinerary = null; + + private String startDateTime = null, endDateTime = null; + + // Cosntrutor for each file type... + public FlightBuilder(Map doubleTimeSeries, Map stringTimeSeries) { + this.doubleTimeSeries = new ConcurrentHashMap<>(doubleTimeSeries); + this.stringTimeSeries = new ConcurrentHashMap<>(stringTimeSeries); + } + + public void addTimeSeries(String name, DoubleTimeSeries timeSeries) { + doubleTimeSeries.put(name, timeSeries); + } + + public void addTimeSeries(String name, StringTimeSeries timeSeries) { + stringTimeSeries.put(name, timeSeries); + } + + public synchronized void setStartDateTime(String startDateTime) { + this.startDateTime = startDateTime; + } + + public synchronized void setEndDateTime(String endDateTime) { + this.endDateTime = endDateTime; + } + + public synchronized void setItinerary(ArrayList itinerary) { + this.itinerary = itinerary; + } + + // TODO: implement this + public void validate() {} +} diff --git a/src/main/java/org/ngafid/flights/process/FlightProcessingException.java b/src/main/java/org/ngafid/flights/process/FlightProcessingException.java new file mode 100644 index 000000000..617f80280 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightProcessingException.java @@ -0,0 +1,49 @@ +package org.ngafid.flights.process; + +import java.util.List; +import java.util.Collections; + +/** + * An exception that contains all of the FATAL exceptions that occurred during flight processing. + * Namely SQLException, FatalFlightFileException, IOException, and FlightAlreadyExistsException. + * + * If flight processing steps are done in parallel multiple exceptions could be thrown, which is + * where this class comes in: it will contain all of the exceptions that occurred. + * + **/ +public class FlightProcessingException extends Exception { + private static final long serialVersionUID = 1235003; + private static final String DEFAULT_MESSAGE = "(exception message was empty / null)"; + + private List exceptions; + + public FlightProcessingException(Exception e) { + exceptions = List.of(e); + } + + public FlightProcessingException(List exceptions) { + this.exceptions = Collections.unmodifiableList(exceptions); + } + + public String getMessage() { + String message; + + if (exceptions.size() == 1) { + + message = exceptions.get(0).getMessage(); + if (message == null) + return DEFAULT_MESSAGE; + + } else { + message = "Encountered the following " + exceptions.size() + " errors when processing a flight:\n"; + for (var e : exceptions) { + String eMessage = e.getMessage(); + if (eMessage == null) + eMessage = DEFAULT_MESSAGE; + message += eMessage + "\n\n"; + } + } + + return message; + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java index 0f0199334..47181ed8d 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java +++ b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java @@ -24,7 +24,7 @@ public ProcessAltAGL(Connection connection, Flight flight) { public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } public Set getRequiredStringColumns() { return Collections.emptySet(); } - public Set getRequiredColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } public Set getOutputColumns() { return OUTPUT_COLUMNS; } public boolean airframeIsValid(String airframe) { return true; } diff --git a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java new file mode 100644 index 000000000..63a698296 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java @@ -0,0 +1,117 @@ +package org.ngafid.flights.process; + +import java.time.*; +import java.util.Set; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Collections; +import java.util.logging.Logger; + +import static org.ngafid.flights.Parameters.*; +import org.ngafid.common.*; +import org.ngafid.flights.Flight; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.MalformedFlightFileException; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.process.ProcessStep; + +public class ProcessStartEndTime extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessStartEndTime.class.getName()); + + public static Set REQUIRED_STRING_COLUMNS = Set.of(LCL_DATE, LCL_TIME, UTC_OFFSET); + + public ProcessStartEndTime(Connection connection, Flight flight) { + super(connection, flight); + } + + public Set getRequiredDoubleColumns() { return Collections.emptySet(); } + public Set getRequiredStringColumns() { return REQUIRED_STRING_COLUMNS; } + public Set getRequiredColumns() { return REQUIRED_STRING_COLUMNS; } + public Set getOutputColumns() { return Collections.emptySet(); } + + public boolean airframeIsValid(String airframe) { return true; } + public boolean isRequired() { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + StringTimeSeries dates = stringTimeSeries.get(LCL_DATE); + StringTimeSeries times = stringTimeSeries.get(LCL_TIME); + StringTimeSeries offsets = stringTimeSeries.get(UTC_OFFSET); + + int dateSize = dates.size(); + int timeSize = times.size(); + int offsetSize = offsets.size(); + + LOG.info("\tdate size: " + dateSize + ", time size: " + timeSize + ", offset size: " + offsetSize); + + //get the minimum sized length of each of these series, they should all be the same but + //if the last column was cut off it might not be the case + int minSize = dateSize; + if (minSize < timeSize) minSize = timeSize; + if (minSize < offsetSize) minSize = offsetSize; + + //find the first non-null time entry + int start = 0; + while (start < minSize && + (dates.get(start) == null || dates.get(start).equals("") || + times.get(start) == null || times.get(start).equals("") || + offsets.get(start) == null || offsets.get(start).equals("") || offsets.get(start).equals("+19:00"))) { + + start++; + } + + LOG.info("\tfirst date time and offset not null at index: " + start); + + if (start >= minSize) { + throw new MalformedFlightFileException("Date, Time or Offset columns were all null! Cannot set start/end times."); + } + + //find the last full date time offset entry row + int end = minSize - 1; + while (end >= 0 && + (dates.get(end) == null || dates.get(end).equals("") || + times.get(end) == null || times.get(end).equals("") || + offsets.get(end) == null || offsets.get(end).equals(""))) { + + end--; + } + + String startDate = dates.get(start); + String startTime = times.get(start); + String startOffset = offsets.get(start); + + String endDate = dates.get(end); + String endTime = times.get(end); + String endOffset = offsets.get(end); + + LOG.info("\t\t\tfirst not null " + start + " -- " + startDate + " " + startTime + " " + startOffset); + LOG.info("\t\t\tlast not null " + endDate + " " + endTime + " " + endOffset); + + OffsetDateTime startODT = null; + try { + startODT = TimeUtils.convertToOffset(startDate, startTime, startOffset, "+00:00"); + } catch (DateTimeException dte) { + System.err.println("Corrupt start time data in flight file: " + dte.getMessage()); + //System.exit(1); + throw new MalformedFlightFileException("Corrupt start time data in flight file: '" + dte.getMessage() + "'"); + } + + OffsetDateTime endODT = null; + try { + endODT = TimeUtils.convertToOffset(endDate, endTime, endOffset, "+00:00"); + } catch (DateTimeException dte) { + System.err.println("Corrupt end time data in flight file: " + dte.getMessage()); + //System.exit(1); + throw new MalformedFlightFileException("Corrupt end time data in flight file: '" + dte.getMessage() + "'"); + } + + if (startODT.isAfter(endODT)) { + startDateTime = null; + endDateTime = null; + + throw new MalformedFlightFileException("Corrupt time data in flight file, start time was after the end time"); + } + + startDateTime = startODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); + endDateTime = endODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); + } +} From 0ff0d4ad0f4c2b1147f6a64c5fe0056a75c61e2e Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Tue, 4 Apr 2023 14:04:41 -0400 Subject: [PATCH 03/12] In progress commit for review --- .../flights/process/DependencyGraph.java | 37 ++--------- .../ngafid/flights/process/FlightBuilder.java | 42 +++++++++++-- .../flights/process/FlightProcessor.java | 62 ------------------- .../ngafid/flights/process/ProcessAltAGL.java | 56 ++--------------- .../ngafid/flights/process/ProcessStep.java | 32 +++++----- 5 files changed, 63 insertions(+), 166 deletions(-) delete mode 100644 src/main/java/org/ngafid/flights/process/FlightProcessor.java diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java index 217dd8c65..8c700b1df 100644 --- a/src/main/java/org/ngafid/flights/process/DependencyGraph.java +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -109,32 +109,6 @@ public Void compute() { } } - /** - * Dummy step meant to act as a root node in DAG. This is done by adding all of the columns included in the file - * as output columns, so all other steps will depend on this. - **/ - class DummyProcessStep extends ProcessStep { - Set outputColumns = new HashSet<>(); - - public DummyProcessStep(Flight flight) { - // We can pass in null rather than a connection object - super(null, flight); - outputColumns.addAll(doubleTimeSeries.keySet()); - outputColumns.addAll(stringTimeSeries.keySet()); - } - - public Set getRequiredDoubleColumns() { return Collections.emptySet(); } - public Set getRequiredStringColumns() { return Collections.emptySet(); } - public Set getRequiredColumns() { return Collections.emptySet(); } - public Set getOutputColumns() { return outputColumns; } - - public boolean airframeIsValid(String airframe) { return true; } - public boolean isRequired() { return true; } - - // Left blank intentionally - public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {} - } - private void nodeConflictError(ProcessStep first, ProcessStep second) throws FatalFlightFileException { throw new FatalFlightFileException( "ERROR when building dependency graph! " @@ -175,16 +149,16 @@ private void createEdges(DependencyNode node) throws FatalFlightFileException { HashMap columnToSource = new HashMap<>(64); HashSet nodes = new HashSet<>(64); DependencyNode rootNode; - Flight flight; + FlightBuilder builder; - public DependencyGraph(Flight flight, ArrayList steps) throws FlightProcessingException { + public DependencyGraph(FlightBuilder builder, ArrayList steps) throws FlightProcessingException { /** * Create nodes for each step and create a mapping from output column name * to the node that outputs that column. This should be a unique mapping, as * we don't want two steps generating the same output column. **/ - this.flight = flight; + this.builder = builder; try { rootNode = registerStep(new DummyProcessStep(flight)); @@ -208,7 +182,8 @@ public void compute() throws FlightProcessingException { } } - ForkJoinPool ex = new ForkJoinPool(); + ForkJoinPool ex = new ForkJoinPool(); + try { ex.invoke(new RecursiveTask() { public Void compute() { @@ -228,7 +203,7 @@ public Void compute() { for (var node : nodes) { for (var e : node.exceptions) { if (e instanceof MalformedFlightFileException me) { - flight.addException(me); + builder.exceptions.add(me); } else if (e instanceof FatalFlightFileException fe) { fatalExceptions.add(fe); } else if (e instanceof SQLException se) { diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java index 122b52ea2..380fd186b 100644 --- a/src/main/java/org/ngafid/flights/process/FlightBuilder.java +++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java @@ -1,24 +1,37 @@ package org.ngafid.flights.process; +import java.sql.Connection; +import java.sql.SQLException; + import java.util.Map; +import java.util.List; import java.util.ArrayList; import java.util.concurrent.ConcurrentHashMap; import org.ngafid.flights.*; +import org.ngafid.flights.process.*; public class FlightBuilder { - private final ConcurrentHashMap doubleTimeSeries; - private final ConcurrentHashMap stringTimeSeries; + public final ConcurrentHashMap doubleTimeSeries; + public final ConcurrentHashMap stringTimeSeries; private ArrayList itinerary = null; + private String startDateTime = null, + endDateTime = null; + + public final int fleetId; + public final String airframeName, + tailNumber; - private String startDateTime = null, endDateTime = null; + public final ArrayList exceptions = new ArrayList<>(); - // Cosntrutor for each file type... - public FlightBuilder(Map doubleTimeSeries, Map stringTimeSeries) { + public FlightBuilder(int fleetId, String tailNumber, String airframeName, Map doubleTimeSeries, Map stringTimeSeries) { this.doubleTimeSeries = new ConcurrentHashMap<>(doubleTimeSeries); this.stringTimeSeries = new ConcurrentHashMap<>(stringTimeSeries); + this.fleetId = fleetId; + this.airframeName = airframeName; + this.tailNumber = tailNumber; } public void addTimeSeries(String name, DoubleTimeSeries timeSeries) { @@ -41,6 +54,25 @@ public synchronized void setItinerary(ArrayList itinerary) { this.itinerary = itinerary; } + // This can be overridden. + public List gatherSteps(Connection connection) { + // Add all of our processing steps here... + // The order doesn't matter; the DependencyGraph will resolve + // the order in the event that there are dependencies. + return List.of(new ProcessAltAGL(connection, this)); + } + + // throws a flight processing exception if an unrecoverable error occurred. + public Flight build(Connection connection) throws FlightProcessingException { + DependencyGraph dg = new DependencyGraph(this, gatherSteps()); + + dg.compute(); + + // TODO: Make sure headers are calculated appropriately. + // TODO: Make sure hasAGL and hasCoords get set correctly + return new Flight(fleetId, tailNumber, airframeName, doubleTimeSeries, stringTimeSeries, exceptions); + } + // TODO: implement this public void validate() {} } diff --git a/src/main/java/org/ngafid/flights/process/FlightProcessor.java b/src/main/java/org/ngafid/flights/process/FlightProcessor.java deleted file mode 100644 index f24004f9d..000000000 --- a/src/main/java/org/ngafid/flights/process/FlightProcessor.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.ngafid.flights.process; - -import java.util.List; -import java.util.ArrayList; -import java.util.logging.Logger; - -import org.ngafid.flights.Flight; -import org.ngafid.flights.MalformedFlightFileException; -import org.ngafid.flights.FatalFlightFileException; - -/** - * This class applies a sequence of processing steps to a flight that has already been parse (that is, a flight which - * has already had its columns parsed). The processing done here falls largely into a few categories: - * - * - Creation of new synthetic columns that aren't in the original files - * - Conversion of existing columns to different units - * - * Currently, the basic idea is that a list of process steps is created and then sequentially applied. - * Some of these steps are going to be mandatory, and some will only be applied to specific aircraft and/or - * aircraft that have the appropriate columns. - * - **/ -public class FlightProcessor { - - private static final Logger LOG = Logger.getLogger(FlightProcessor.class.getName()); - - Flight flight; - - public FlightProcessor(Flight flight) { - this.flight = flight; - } - - private static List requiredSteps = List.of(); - private static List optionalSteps = List.of(); - - protected ArrayList gatherProcessSteps() throws FatalFlightFileException { - ArrayList steps = new ArrayList<>(); - - for (ProcessStep.Factory factory : requiredSteps) { - ProcessStep step = factory.create(flight); - - if (!step.applicable()) - throw new FatalFlightFileException("Cannot apply required step " + step.toString() + " to flight " + flight.getId()); - - steps.add(step); - } - - for (ProcessStep.Factory factory : optionalSteps) { - ProcessStep step = factory.create(flight); - - if (!step.applicable()) - LOG.info("Cannot apply optional step " + step.toString() + " to flight " + flight.getId()); - - steps.add(step); - } - - return steps; - } - - final private void process() { - } -} diff --git a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java index 47181ed8d..5a099cd2c 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java +++ b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java @@ -18,8 +18,8 @@ public class ProcessAltAGL extends ProcessStep { private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_MSL, LATITUDE, LONGITUDE); private static Set OUTPUT_COLUMNS = Set.of(ALT_AGL); - public ProcessAltAGL(Connection connection, Flight flight) { - super(connection, flight); + public ProcessAltAGL(Connection connection, FlightBuilder builder) { + super(connection, builder); } public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } @@ -31,46 +31,9 @@ public ProcessAltAGL(Connection connection, Flight flight) { public boolean isRequired() { return true; } public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { - - flight.addHeader(ALT_AGL, UNIT_FT_AGL); - - DoubleTimeSeries altitudeMSLTS = doubleTimeSeries.get(ALT_MSL); - DoubleTimeSeries latitudeTS = doubleTimeSeries.get(LATITUDE); - DoubleTimeSeries longitudeTS = doubleTimeSeries.get(LONGITUDE); - - // TODO: Probably remove this stuff since ths method will only be called if the columns are available - if (altitudeMSLTS == null || latitudeTS == null || longitudeTS == null) { - String message = "Cannot calculate AGL, flight file had empty or missing "; - - int count = 0; - if (altitudeMSLTS == null) { - message += "'" + ALT_MSL + "'"; - count++; - } - - if (latitudeTS == null) { - if (count > 0) message += ", "; - message += "'" + LATITUDE + "'"; - count++; - } - - if (longitudeTS == null) { - if (count > 0) message += " and "; - message += "'" + LONGITUDE + "'"; - count++; - } - - message += " column"; - if (count >= 2) message += "s"; - message += "."; - - //should be initialized to false, but lets make sure - flight.setHasCoords(false); - flight.setHasAGL(false); - throw new MalformedFlightFileException(message); - } - flight.setHasCoords(true); - flight.setHasAGL(true); + DoubleTimeSeries altitudeMSLTS = builder.doubleTimeSeries.get(ALT_MSL); + DoubleTimeSeries latitudeTS = builder.doubleTimeSeries.get(LATITUDE); + DoubleTimeSeries longitudeTS = builder.doubleTimeSeries.get(LONGITUDE); DoubleTimeSeries altitudeAGLTS = withConnection(connection -> new DoubleTimeSeries(connection, ALT_AGL, UNIT_FT_AGL)); @@ -79,11 +42,8 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl double latitude = latitudeTS.get(i); double longitude = longitudeTS.get(i); - //System.err.println("getting AGL for latitude: " + latitude + ", " + longitude); - if (Double.isNaN(altitudeMSL) || Double.isNaN(latitude) || Double.isNaN(longitude)) { altitudeAGLTS.add(Double.NaN); - //System.err.println("result is: " + Double.NaN); continue; } @@ -91,15 +51,11 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl int altitudeAGL = TerrainCache.getAltitudeFt(altitudeMSL, latitude, longitude); altitudeAGLTS.add(altitudeAGL); } catch (NoSuchFileException e) { - System.err.println("ERROR: could not read terrain file: " + e); - - flight.setHasAGL(false); throw new MalformedFlightFileException("Could not calculate AGL for this flight as it had latitudes/longitudes outside of the United States."); } - } - doubleTimeSeries.put(ALT_AGL, altitudeAGLTS); + builder.doubleTimeSeries.put(ALT_AGL, altitudeAGLTS); } } diff --git a/src/main/java/org/ngafid/flights/process/ProcessStep.java b/src/main/java/org/ngafid/flights/process/ProcessStep.java index b02de7cf4..44aa20f7b 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessStep.java +++ b/src/main/java/org/ngafid/flights/process/ProcessStep.java @@ -6,6 +6,8 @@ import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.MalformedFlightFileException; +import org.ngafid.flights.process.FlightBuilder; + import java.util.Map; import java.util.Set; import java.sql.Connection; @@ -14,27 +16,15 @@ public abstract class ProcessStep { - - public interface Factory { - ProcessStep create(Flight flight); - } - - protected Flight flight; + protected FlightBuilder builder; // Connection is not accessible by subclasses directly by design, instead use the `withConnection` function. // This grabs the lock on the object so only one thread is using the connection at any given point in time. private Connection connection; - // References to the corresponding fields in `flight` - protected Map doubleTimeSeries; - protected Map stringTimeSeries; - - public ProcessStep(Connection connection, Flight flight) { + public ProcessStep(Connection connection, FlightBuilder builder) { this.connection = connection; - this.flight = flight; - - this.doubleTimeSeries = flight.getDoubleTimeSeriesMap(); - this.stringTimeSeries = flight.getStringTimeSeriesMap(); + this.builder = builder; } // These should probably return references to static immutable Sets. @@ -51,11 +41,13 @@ public ProcessStep(Connection connection, Flight flight) { final public boolean applicable() { return - airframeIsValid(flight.getAirframeName()) - && stringTimeSeries + airframeIsValid(builder.airframeName) + && builder + .stringTimeSeries .keySet() .containsAll(getRequiredStringColumns()) - && doubleTimeSeries + && builder + .doubleTimeSeries .keySet() .containsAll(getRequiredDoubleColumns()); } @@ -64,11 +56,15 @@ protected interface ConnectionFunctor { public T compute(Connection connection) throws SQLException; } + // This interface must be used to access the connection so that we can guarantee that only one + // thread is using it at any given time. final public T withConnection(ConnectionFunctor functor) throws SQLException { T value = null; + synchronized (connection) { value = functor.compute(connection); } + return value; } From 915df4a81c3153eb94c2eef3a381cfd0ba8cb72e Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Tue, 4 Apr 2023 16:02:29 -0400 Subject: [PATCH 04/12] Now compiles --- src/main/java/org/ngafid/ProcessUpload.java | 5 +- src/main/java/org/ngafid/flights/Flight.java | 91 +++++++------------ .../flights/process/DependencyGraph.java | 26 +----- .../ngafid/flights/process/FlightBuilder.java | 55 +++++++---- .../ngafid/flights/process/FlightMeta.java | 22 +++++ .../process/ProcessAirportProximity.java | 8 +- .../flights/process/ProcessStartEndTime.java | 31 +++---- .../ngafid/flights/process/ProcessStep.java | 21 ++--- 8 files changed, 125 insertions(+), 134 deletions(-) create mode 100644 src/main/java/org/ngafid/flights/process/FlightMeta.java diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index 6a35c2f91..7ac3f12a5 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -21,6 +21,7 @@ import java.text.ParseException; import java.util.Arrays; +import java.util.List; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; @@ -226,9 +227,9 @@ private static class FlightInfo { int id; int length; String filename; - ArrayList exceptions = new ArrayList(); + List exceptions; - public FlightInfo(int id, int length, String filename, ArrayList exceptions) { + public FlightInfo(int id, int length, String filename, List exceptions) { this.id = id; this.length = length; this.filename = filename; diff --git a/src/main/java/org/ngafid/flights/Flight.java b/src/main/java/org/ngafid/flights/Flight.java index 44f41d07b..21b66c19f 100644 --- a/src/main/java/org/ngafid/flights/Flight.java +++ b/src/main/java/org/ngafid/flights/Flight.java @@ -12,6 +12,7 @@ import java.text.ParseException; import java.util.Date; import java.util.Calendar; +import java.util.Collections; // XML stuff. import javax.xml.parsers.DocumentBuilderFactory; @@ -54,8 +55,8 @@ import javax.xml.bind.DatatypeConverter; import org.ngafid.common.*; +import org.apache.commons.lang.NotImplementedException; import org.ngafid.Database; -import org.ngafid.common.*; import org.ngafid.airports.Airport; import org.ngafid.airports.Airports; import org.ngafid.airports.Runway; @@ -85,6 +86,7 @@ public class Flight { private final static String FLIGHT_COLUMNS = "id, fleet_id, uploader_id, upload_id, system_id, airframe_id, airframe_type_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status"; private final static String FLIGHT_COLUMNS_TAILS = "id, fleet_id, uploader_id, upload_id, f.system_id, airframe_id, airframe_type_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status"; + // TODO: Roll a lot of this stuff up into some sort of meta-data object? private int id = -1; private int fleetId = -1; private int uploaderId = -1; @@ -99,7 +101,6 @@ public class Flight { private String tailNumber; private String suggestedTailNumber; - private String calculationEndpoint; private boolean tailConfirmed; private String md5Hash; @@ -129,7 +130,7 @@ public class Flight { private long processingStatus = 0; private String status; - private ArrayList exceptions = new ArrayList(); + private List exceptions = new ArrayList(); private int numberRows; private String fileInformation; @@ -165,7 +166,7 @@ public static ArrayList getFlightsFromUpload(Connection connection, int return flights; } - public ArrayList getExceptions() { + public List getExceptions() { return exceptions; } @@ -1353,6 +1354,34 @@ public static List getSimAircraft(Connection connection, int fleetId) th return paths; } + public Flight(Connection connection, FlightMeta meta, Map doubletimeSeries, Map stringTimeSeries, List itinerary, List exceptions) throws SQLException { + fleetId = meta.fleetId; + uploaderId = meta.uploaderId; + uploadId = meta.uploadId; + + filename = meta.filename; + + airframeName = meta.airframeName; + airframeNameId = Airframes.getNameId(connection, airframeName); + + airframeType = meta.airframeType; + airframeTypeId = Airframes.getTypeId(connection, airframeType); + + systemId = meta.systemId; + suggestedTailNumber = meta.suggestedTailNumber; + md5Hash = meta.md5Hash; + startDateTime = meta.startDateTime; + endDateTime = meta.endDateTime; + + hasCoords = doubleTimeSeries.containsKey(LATITUDE) && doubleTimeSeries.containsKey(LONGITUDE); + hasAGL = doubleTimeSeries.containsKey(ALT_AGL); + + this.exceptions = exceptions; + checkExceptions(); + + this.stringTimeSeries = Collections.unmodifiableMap(new HashMap<>(stringTimeSeries)); + } + public Flight(Connection connection, ResultSet resultSet) throws SQLException { id = resultSet.getInt(1); fleetId = resultSet.getInt(2); @@ -2093,52 +2122,6 @@ private void process(Connection connection, InputStream inputStream) throws IOEx List defaultPasses = List.of(); - private ArrayList gatherProcessSteps(Connection connection) { - ArrayList steps = new ArrayList<>(); - - // TODO: add ScanEagle steps - - steps.add(new ProcessAltAGL(connection, this)); - // TODO: Finish implementing this - steps.add(new ProcessAirportProximity(connection, this)); - - // TODO: Lcl Date; calculateStartEndTime - // startDateTime; endDateTime - // TODO: Total Fuel; calculateTotalFuel - // doubleTimeSeries - // TODO: AltMSL Lag Diff; calculateLaggedAltMSL - // doubleTimeSeries - // TODO: engine divergences; calculateDivergence - // doubleTiemSeries - // TODO: Frequency check - // (nil) - // TODO: calculateItinerary - // itinerary - // TODO: runLOCICalculations - // doubleTimeSeries - - return steps; - } - - private void newProcess(Connection connection, InputStream inputStream) throws IOException, FatalFlightFileException, SQLException, FlightProcessingException { - initialize(connection, inputStream); - newProcess(connection); - } - - final private void newProcess(Connection connection) throws FlightProcessingException { - ArrayList steps = gatherProcessSteps(connection); // gatherProcessSteps will be an abstract method - - // These fields will be written to directly by the ProcessSteps. - doubleTimeSeries = new ConcurrentHashMap<>(doubleTimeSeries); - stringTimeSeries = new ConcurrentHashMap<>(stringTimeSeries); - - DependencyGraph dg = new DependencyGraph(this, steps); - dg.cycleCheck(); - dg.compute(); - - checkExceptions(); - } - final private void process(Connection connection) throws IOException, FatalFlightFileException, SQLException { //TODO: these may be different for different airframes/flight //data recorders. depending on the airframe/flight data recorder @@ -2436,15 +2419,11 @@ public Flight(int fleetId, String zipEntryName, InputStream inputStream, Connect checkIfExists(connection); inputStream.reset(); - newProcess(connection, inputStream); + process(connection, inputStream); } catch (FatalFlightFileException | IOException | FlightAlreadyExistsException | SQLException e) { status = "WARNING"; throw new FlightProcessingException(e); - } catch (FlightProcessingException e) { - System.out.println(e); - e.printStackTrace(); - System.exit(1); } checkExceptions(); @@ -3126,7 +3105,7 @@ private static int indexOfMin(double[] a, int i, int n) { for (int j = i; j < i + n; j++) { if (v > a[j]) { mindex = j; - v t + v = a[j]; } } diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java index 8c700b1df..fdcca9941 100644 --- a/src/main/java/org/ngafid/flights/process/DependencyGraph.java +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -1,41 +1,23 @@ package org.ngafid.flights.process; -import java.util.Set; -import java.sql.Connection; import java.sql.SQLException; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.Executor; -import java.util.concurrent.Executors; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinTask; import java.util.concurrent.RecursiveTask; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.locks.Condition; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; -import java.util.HashSet; -import java.util.Queue; -import java.util.HashMap; -import java.util.ArrayList; -import java.util.Collections; +import java.util.*; import java.util.logging.Logger; -import org.ngafid.flights.Flight; import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.MalformedFlightFileException; -import org.ngafid.flights.process.ProcessStep; - /** * A dependency graph which represents the dependencies of ProcessSteps on one another. **/ public class DependencyGraph { private static final Logger LOG = Logger.getLogger(DependencyGraph.class.getName()); - private static final int PARALLELISM = Runtime.getRuntime().availableProcessors(); - class DependencyNode { final ProcessStep step; @@ -148,10 +130,9 @@ private void createEdges(DependencyNode node) throws FatalFlightFileException { // Maps column name to the node where that column is computed HashMap columnToSource = new HashMap<>(64); HashSet nodes = new HashSet<>(64); - DependencyNode rootNode; FlightBuilder builder; - public DependencyGraph(FlightBuilder builder, ArrayList steps) throws FlightProcessingException { + public DependencyGraph(FlightBuilder builder, List steps) throws FlightProcessingException { /** * Create nodes for each step and create a mapping from output column name * to the node that outputs that column. This should be a unique mapping, as @@ -161,7 +142,6 @@ public DependencyGraph(FlightBuilder builder, ArrayList steps) thro this.builder = builder; try { - rootNode = registerStep(new DummyProcessStep(flight)); for (var step : steps) registerStep(step); for (var node : nodes) createEdges(node); } catch (FatalFlightFileException e) { @@ -176,7 +156,7 @@ public void compute() throws FlightProcessingException { ArrayList> initialTasks = new ArrayList<>(); for (var node : nodes) { if (node.requiredBy.size() == 0) { - var task = new DependencyNodeTask(rootNode, tasks); + var task = new DependencyNodeTask(node, tasks); initialTasks.add(task); tasks.put(node, task); } diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java index 380fd186b..5f8bd46ee 100644 --- a/src/main/java/org/ngafid/flights/process/FlightBuilder.java +++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java @@ -10,6 +10,7 @@ import org.ngafid.flights.*; import org.ngafid.flights.process.*; +import org.ngafid.flights.process.FlightMeta; public class FlightBuilder { @@ -17,60 +18,74 @@ public class FlightBuilder { public final ConcurrentHashMap stringTimeSeries; private ArrayList itinerary = null; - private String startDateTime = null, - endDateTime = null; - public final int fleetId; - public final String airframeName, - tailNumber; + public final FlightMeta meta; public final ArrayList exceptions = new ArrayList<>(); - public FlightBuilder(int fleetId, String tailNumber, String airframeName, Map doubleTimeSeries, Map stringTimeSeries) { + public FlightBuilder(FlightMeta meta, Map doubleTimeSeries, Map stringTimeSeries) { this.doubleTimeSeries = new ConcurrentHashMap<>(doubleTimeSeries); this.stringTimeSeries = new ConcurrentHashMap<>(stringTimeSeries); - this.fleetId = fleetId; - this.airframeName = airframeName; - this.tailNumber = tailNumber; + this.meta = meta; } - public void addTimeSeries(String name, DoubleTimeSeries timeSeries) { + public FlightBuilder addTimeSeries(String name, DoubleTimeSeries timeSeries) { doubleTimeSeries.put(name, timeSeries); + return this; } - public void addTimeSeries(String name, StringTimeSeries timeSeries) { + public FlightBuilder addTimeSeries(String name, StringTimeSeries timeSeries) { stringTimeSeries.put(name, timeSeries); + return this; } - public synchronized void setStartDateTime(String startDateTime) { - this.startDateTime = startDateTime; + public synchronized FlightBuilder setStartDateTime(String startDateTime) { + this.meta.startDateTime = startDateTime; + return this; } - public synchronized void setEndDateTime(String endDateTime) { - this.endDateTime = endDateTime; + public synchronized FlightBuilder setEndDateTime(String endDateTime) { + this.meta.endDateTime = endDateTime; + return this; } - public synchronized void setItinerary(ArrayList itinerary) { + public synchronized FlightBuilder setItinerary(ArrayList itinerary) { this.itinerary = itinerary; + return this; } + public synchronized FlightBuilder updateProcessingStatus(int processingStatus) { + this.meta.processingStatus |= processingStatus; + return this; + } + + private static final List processSteps = List.of( + ProcessAltAGL::new, + ProcessAirportProximity::new, + ProcessStartEndTime::new + ); + // This can be overridden. - public List gatherSteps(Connection connection) { + protected List gatherSteps(Connection connection) { // Add all of our processing steps here... // The order doesn't matter; the DependencyGraph will resolve // the order in the event that there are dependencies. - return List.of(new ProcessAltAGL(connection, this)); + return processSteps.stream().map(factory -> factory.create(connection, this)).toList(); } // throws a flight processing exception if an unrecoverable error occurred. public Flight build(Connection connection) throws FlightProcessingException { - DependencyGraph dg = new DependencyGraph(this, gatherSteps()); + DependencyGraph dg = new DependencyGraph(this, gatherSteps(connection)); dg.compute(); // TODO: Make sure headers are calculated appropriately. // TODO: Make sure hasAGL and hasCoords get set correctly - return new Flight(fleetId, tailNumber, airframeName, doubleTimeSeries, stringTimeSeries, exceptions); + try { + return new Flight(connection, meta, doubleTimeSeries, stringTimeSeries, itinerary, exceptions); + } catch (SQLException e) { + throw new FlightProcessingException(e); + } } // TODO: implement this diff --git a/src/main/java/org/ngafid/flights/process/FlightMeta.java b/src/main/java/org/ngafid/flights/process/FlightMeta.java new file mode 100644 index 000000000..4ec170dd7 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightMeta.java @@ -0,0 +1,22 @@ +package org.ngafid.flights.process; + +/** + * Utility class used by FlightBuilder to call the Flight constructor. + **/ +public final class FlightMeta { + + public int fleetId = -1, + uploaderId = -1, + uploadId = -1, + processingStatus = 0; + + public String startDateTime, + endDateTime, + md5Hash, + airframeType, + systemId, + filename, + airframeName, + calculated, + suggestedTailNumber; +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java index 25fe6b954..7c5def32a 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java +++ b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java @@ -5,10 +5,10 @@ import java.sql.Connection; import java.sql.SQLException; -import org.ngafid.flights.Flight; import org.ngafid.flights.DoubleTimeSeries; import static org.ngafid.flights.Parameters.*; import org.ngafid.flights.process.ProcessStep; +import org.ngafid.flights.process.FlightBuilder; import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.MalformedFlightFileException; @@ -16,8 +16,8 @@ public class ProcessAirportProximity extends ProcessStep { private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LATITUDE, LONGITUDE, ALT_AGL); private static Set OUTPUT_COLUMNS = Set.of(NEAREST_RUNWAY, AIRPORT_DISTANCE, NEAREST_RUNWAY, RUNWAY_DISTANCE); - public ProcessAirportProximity(Connection connection, Flight flight) { - super(connection, flight); + public ProcessAirportProximity(Connection connection, FlightBuilder builder) { + super(connection, builder); } public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } @@ -29,6 +29,6 @@ public ProcessAirportProximity(Connection connection, Flight flight) { public boolean isRequired() { return true; } public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { - + float x = 1 / 0; } } diff --git a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java index 63a698296..b108190cb 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java +++ b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java @@ -6,22 +6,21 @@ import java.sql.SQLException; import java.util.Collections; import java.util.logging.Logger; +import java.time.format.DateTimeFormatter; import static org.ngafid.flights.Parameters.*; import org.ngafid.common.*; -import org.ngafid.flights.Flight; import org.ngafid.flights.StringTimeSeries; import org.ngafid.flights.MalformedFlightFileException; import org.ngafid.flights.FatalFlightFileException; -import org.ngafid.flights.process.ProcessStep; public class ProcessStartEndTime extends ProcessStep { private static final Logger LOG = Logger.getLogger(ProcessStartEndTime.class.getName()); public static Set REQUIRED_STRING_COLUMNS = Set.of(LCL_DATE, LCL_TIME, UTC_OFFSET); - public ProcessStartEndTime(Connection connection, Flight flight) { - super(connection, flight); + public ProcessStartEndTime(Connection connection, FlightBuilder builder) { + super(connection, builder); } public Set getRequiredDoubleColumns() { return Collections.emptySet(); } @@ -33,9 +32,9 @@ public ProcessStartEndTime(Connection connection, Flight flight) { public boolean isRequired() { return true; } public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { - StringTimeSeries dates = stringTimeSeries.get(LCL_DATE); - StringTimeSeries times = stringTimeSeries.get(LCL_TIME); - StringTimeSeries offsets = stringTimeSeries.get(UTC_OFFSET); + StringTimeSeries dates = builder.stringTimeSeries.get(LCL_DATE); + StringTimeSeries times = builder.stringTimeSeries.get(LCL_TIME); + StringTimeSeries offsets = builder.stringTimeSeries.get(UTC_OFFSET); int dateSize = dates.size(); int timeSize = times.size(); @@ -61,9 +60,8 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl LOG.info("\tfirst date time and offset not null at index: " + start); - if (start >= minSize) { + if (start >= minSize) throw new MalformedFlightFileException("Date, Time or Offset columns were all null! Cannot set start/end times."); - } //find the last full date time offset entry row int end = minSize - 1; @@ -90,8 +88,7 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl try { startODT = TimeUtils.convertToOffset(startDate, startTime, startOffset, "+00:00"); } catch (DateTimeException dte) { - System.err.println("Corrupt start time data in flight file: " + dte.getMessage()); - //System.exit(1); + LOG.severe("Corrupt start time data in flight file: " + dte.getMessage()); throw new MalformedFlightFileException("Corrupt start time data in flight file: '" + dte.getMessage() + "'"); } @@ -99,19 +96,17 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl try { endODT = TimeUtils.convertToOffset(endDate, endTime, endOffset, "+00:00"); } catch (DateTimeException dte) { - System.err.println("Corrupt end time data in flight file: " + dte.getMessage()); - //System.exit(1); + LOG.severe("Corrupt end time data in flight file: " + dte.getMessage()); throw new MalformedFlightFileException("Corrupt end time data in flight file: '" + dte.getMessage() + "'"); } if (startODT.isAfter(endODT)) { - startDateTime = null; - endDateTime = null; - + builder.setStartDateTime(null); + builder.setEndDateTime(null); throw new MalformedFlightFileException("Corrupt time data in flight file, start time was after the end time"); } - startDateTime = startODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); - endDateTime = endODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); + builder.setStartDateTime(startODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); + builder.setEndDateTime(endODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); } } diff --git a/src/main/java/org/ngafid/flights/process/ProcessStep.java b/src/main/java/org/ngafid/flights/process/ProcessStep.java index 44aa20f7b..9fe25403b 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessStep.java +++ b/src/main/java/org/ngafid/flights/process/ProcessStep.java @@ -1,21 +1,19 @@ package org.ngafid.flights.process; -import org.ngafid.flights.Flight; -import org.ngafid.flights.StringTimeSeries; -import org.ngafid.flights.DoubleTimeSeries; -import org.ngafid.flights.FatalFlightFileException; -import org.ngafid.flights.MalformedFlightFileException; - -import org.ngafid.flights.process.FlightBuilder; - -import java.util.Map; import java.util.Set; import java.sql.Connection; import java.sql.SQLException; -import java.util.HashSet; + +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; public abstract class ProcessStep { + + public interface Factory { + ProcessStep create(Connection connection, FlightBuilder builder); + } + protected FlightBuilder builder; // Connection is not accessible by subclasses directly by design, instead use the `withConnection` function. @@ -34,6 +32,7 @@ public ProcessStep(Connection connection, FlightBuilder builder) { public abstract Set getOutputColumns(); // Whether or not this ProcessStep is required / mandatory + // If a required step cannot be computed, a MalformedFlightFileException will be raised public abstract boolean isRequired(); // Whether or not this ProcessStep can be performed for a given airframe @@ -41,7 +40,7 @@ public ProcessStep(Connection connection, FlightBuilder builder) { final public boolean applicable() { return - airframeIsValid(builder.airframeName) + airframeIsValid(builder.meta.airframeName) && builder .stringTimeSeries .keySet() From 1e4a5c5c5ec79fb811ff1d48442d037133ce944b Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Thu, 6 Apr 2023 17:53:22 -0400 Subject: [PATCH 05/12] In progress --- .../org/ngafid/flights/DoubleTimeSeries.java | 47 ++++- src/main/java/org/ngafid/flights/Flight.java | 10 +- .../java/org/ngafid/flights/Parameters.java | 30 ++++ .../org/ngafid/flights/StringTimeSeries.java | 88 +++++---- .../flights/process/DependencyGraph.java | 86 +++++++-- .../ngafid/flights/process/FlightBuilder.java | 3 +- .../process/ProcessAirportProximity.java | 61 ++++++- .../ngafid/flights/process/ProcessAltAGL.java | 9 +- .../flights/process/ProcessDivergence.java | 170 ++++++++++++++++++ .../ngafid/flights/process/ProcessLOCI.java | 111 ++++++++++++ .../flights/process/ProcessLaggedAltMSL.java | 52 ++++++ .../flights/process/ProcessStartEndTime.java | 1 - .../ngafid/flights/process/ProcessStep.java | 53 +++++- .../flights/process/ProcessTotalFuel.java | 55 ++++++ 14 files changed, 702 insertions(+), 74 deletions(-) create mode 100644 src/main/java/org/ngafid/flights/process/ProcessDivergence.java create mode 100644 src/main/java/org/ngafid/flights/process/ProcessLOCI.java create mode 100644 src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java create mode 100644 src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java diff --git a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java index 43dbaf7da..0cb9bebff 100644 --- a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java @@ -47,16 +47,12 @@ public class DoubleTimeSeries { private double avg; private double max = -Double.MAX_VALUE; - public DoubleTimeSeries(Connection connection, String name, String dataType) throws SQLException { - this(connection, name, dataType, 16); - } - - public DoubleTimeSeries(Connection connection, String name, String dataType, int sizeHint) throws SQLException { + // Construct from an array + public DoubleTimeSeries(String name, String dataType, double[] data, int size) { this.name = name; - this.nameId = SeriesNames.getDoubleNameId(connection, name); this.dataType = dataType; - this.typeId = TypeNames.getId(connection, dataType); - this.data = new double[sizeHint]; + this.data = data; + this.size = size; min = Double.NaN; avg = Double.NaN; @@ -65,10 +61,30 @@ public DoubleTimeSeries(Connection connection, String name, String dataType, int validCount = 0; } + public DoubleTimeSeries(String name, String dataType, double[] data) { + this(name, dataType, data, data.length); + } + + public DoubleTimeSeries(String name, String dataType, int sizeHint) { + this(name, dataType, new double[sizeHint], 0); + } + + public DoubleTimeSeries(String name, String dataType) { + this(name, dataType, 16); + } + + public DoubleTimeSeries(Connection connection, String name, String dataType, int sizeHint) throws SQLException { + this(name, dataType, sizeHint); + setNameId(connection); + setTypeId(connection); + } + + public DoubleTimeSeries(Connection connection, String name, String dataType) throws SQLException { + this(connection, name, dataType, 16); + } public DoubleTimeSeries(Connection connection, String name, String dataType, boolean cache) throws SQLException { this(connection, name, dataType); - this.cache = cache; } @@ -118,6 +134,14 @@ public DoubleTimeSeries(Connection connection, String name, String dataType, Arr avg /= validCount; } + private void setNameId(Connection connection) throws SQLException { + this.nameId = SeriesNames.getDoubleNameId(connection, name); + } + + private void setTypeId(Connection connection) throws SQLException { + this.typeId = TypeNames.getId(connection, dataType); + } + /** * Checks to see whether this series will be cached in the database * @@ -376,6 +400,11 @@ public void updateDatabase(Connection connection, int flightId) { if (!this.cache) return; try { + if (typeId == -1) + setTypeId(connection); + if (nameId == -1) + setNameId(connection); + PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO double_series (flight_id, name_id, data_type_id, length, valid_length, min, avg, max, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"); preparedStatement.setInt(1, flightId); diff --git a/src/main/java/org/ngafid/flights/Flight.java b/src/main/java/org/ngafid/flights/Flight.java index 21b66c19f..0faaa412b 100644 --- a/src/main/java/org/ngafid/flights/Flight.java +++ b/src/main/java/org/ngafid/flights/Flight.java @@ -2177,13 +2177,15 @@ final private void process(Connection connection) throws IOException, FatalFligh } // END - + // DONE try { calculateAirportProximity(connection, "Latitude", "Longitude", "AltAGL"); } catch (MalformedFlightFileException e) { exceptions.add(e); } + // END + // DONE if (!airframeName.equals("ScanEagle") && !airframeName.contains("DJI")) { try { calculateTotalFuel(connection, new String[]{"FQtyL", "FQtyR"}, "Total Fuel"); @@ -2197,7 +2199,9 @@ final private void process(Connection connection) throws IOException, FatalFligh exceptions.add(e); } } + // END + // DONE try { if (airframeName.equals("Cessna 172S") || airframeName.equals("Cessna 172R")) { String chtNames[] = {"E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4"}; @@ -2217,8 +2221,6 @@ final private void process(Connection connection) throws IOException, FatalFligh String egt2Names[] = {"E2 EGT1", "E2 EGT2", "E2 EGT3", "E2 EGT4"}; calculateDivergence(connection, egt2Names, "E2 EGT Divergence", "deg F"); - - } else if (airframeName.equals("Cirrus SR20") || airframeName.equals("Cessna 182T") || airframeName.equals("Cessna T182T") || airframeName.equals("Beechcraft A36/G36") || airframeName.equals("Cirrus SR22") || airframeName.equals("Cessna 400")) { String chtNames[] = {"E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4", "E1 CHT5", "E1 CHT6"}; calculateDivergence(connection, chtNames, "E1 CHT Divergence", "deg F"); @@ -2255,6 +2257,7 @@ final private void process(Connection connection) throws IOException, FatalFligh LOG.severe("Skipping..."); // System.exit(1); } + // END if (!airframeName.equals("ScanEagle") && this.doubleTimeSeries.containsKey(ALT_B)) { //LOCI doesn't apply to UAS @@ -2304,7 +2307,6 @@ final private void process(Connection connection) throws IOException, FatalFligh } catch (MalformedFlightFileException e) { exceptions.add(e); } - } private void checkExceptions() { diff --git a/src/main/java/org/ngafid/flights/Parameters.java b/src/main/java/org/ngafid/flights/Parameters.java index 4245de67f..99086ae02 100644 --- a/src/main/java/org/ngafid/flights/Parameters.java +++ b/src/main/java/org/ngafid/flights/Parameters.java @@ -10,6 +10,8 @@ */ package org.ngafid.flights; +import java.util.Set; + public interface Parameters { /** * JSON-specific parameters @@ -65,6 +67,7 @@ public interface Parameters { public static final String ROLL = "Roll"; public static final String ALT_AGL = "AltAGL"; public static final String ALT_MSL = "AltMSL"; + public static final String ALT_MSL_LAG_DIFF = "AltMSL Lag Diff"; public static final String ALT_B = "AltB"; public static final String AOA_SIMPLE = "AOASimple"; public static final String E1_RPM = "E1 RPM"; @@ -88,6 +91,8 @@ public interface Parameters { public static final String STALL_PROBABILITY = "PStall"; public static final String LOSS_OF_CONTROL_PROBABILITY = "PLOCI"; public static final String HDG_TRK_DIFF = "HDG TRK Diff"; + public static final String FUEL_QTY_LEFT = "FQtyL"; + public static final String FUEL_QTY_RIGHT = "FQtyR"; public static final String NEAREST_RUNWAY = "NearestRunway"; public static final String RUNWAY_DISTANCE = "RunwayDistance"; @@ -98,6 +103,31 @@ public interface Parameters { * Units **/ public static final String UNIT_FT_AGL = "ft agl"; + public static final String UNIT_FT_MSL = "ft msl"; + public static final String UNIT_GALLONS = "gals"; + public static final String UNIT_DEG_F = "deg F"; + + /** + * {@link Airframes} names + * + * TODO: In the future, we may want to consider using Set reather than hardcoded strings. + * This would make our code more robust to varying airframe names + **/ + public static final String AIRFRAME_SCAN_EAGLE = "ScanEagle"; + public static final String AIRFRAME_DJI = "DJI"; + public static final String AIRFRAME_CESSNA_172S = "Cessna 172S"; + public static final String AIRFRAME_CESSNA_172R = "Cessna 172R"; + public static final String AIRFRAME_PA_28 = "PA-28-181"; + public static final String AIRFRAME_PA_44 = "PA-44-180"; + public static final String AIRFRAME_CIRRUS_SR20 = "Cirrus SR20"; + public static final String AIRFRAME_CIRRUS_SR22 = "Cirrus SR22"; + public static final String AIRFRAME_CESSNA_182T = "Cessna 182T"; + public static final String AIRFRAME_CESSNA_T182T = "Cessna T182T"; + public static final String AIRFRAME_BEECHCRAFT_A36_G36 = "Beechcraft A36/G36"; + public static final String AIRFRAME_CESSNA_400 = "Cessna 400"; + public static final String AIRFRAME_DIAMOND_DA__40 = "Diamond DA 40"; + public static final String AIRFRAME_DIAMOND_DA_40 = "Diamond DA40";; + public static final String AIRFRAME_DIAMOND_DA_40F = "Diamond DA 40 F"; /** * {@link Airframes} id's diff --git a/src/main/java/org/ngafid/flights/StringTimeSeries.java b/src/main/java/org/ngafid/flights/StringTimeSeries.java index b5e8c301a..c000c83e9 100644 --- a/src/main/java/org/ngafid/flights/StringTimeSeries.java +++ b/src/main/java/org/ngafid/flights/StringTimeSeries.java @@ -33,31 +33,41 @@ public class StringTimeSeries { private static final Logger LOG = Logger.getLogger(StringTimeSeries.class.getName()); private static final int COMPRESSION_LEVEL = Deflater.DEFAULT_COMPRESSION; + private static final int SIZE_HINT = 256; - private int nameId; + private int nameId = -1; private String name; - private int typeId; + private int typeId = -1; private String dataType; private ArrayList timeSeries; private int validCount; - public StringTimeSeries(Connection connection, String name, String dataType) throws SQLException { + public StringTimeSeries(String name, String dataType, int sizeHint) { this.name = name; - this.nameId = SeriesNames.getStringNameId(connection, name); this.dataType = dataType; - this.typeId = TypeNames.getId(connection, dataType); - this.timeSeries = new ArrayList(); + this.timeSeries = new ArrayList(sizeHint); validCount = 0; + + } + + public StringTimeSeries(String name, String dataType) { + this(name, dataType, SIZE_HINT); + } + + public StringTimeSeries(Connection connection, String name, String dataType) throws SQLException { + this(name, dataType, SIZE_HINT); + setNameId(connection); + setTypeId(connection); } public StringTimeSeries(Connection connection, String name, String dataType, ArrayList timeSeries) throws SQLException { this.name = name; - this.nameId = SeriesNames.getStringNameId(connection, name); this.dataType = dataType; - this.typeId = TypeNames.getId(connection, dataType); this.timeSeries = timeSeries; + setNameId(connection); + setTypeId(connection); validCount = 0; for (int i = 0; i < timeSeries.size(); i++) { @@ -65,10 +75,33 @@ public StringTimeSeries(Connection connection, String name, String dataType, Arr validCount++; } } - } + } + + // Added to get results for StringTimeSeries + public StringTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException, ClassNotFoundException { + + this.nameId = resultSet.getInt(1); + this.name = SeriesNames.getStringName(connection, this.nameId); + //System.out.println("name: " + name); + this.typeId = resultSet.getInt(2); + this.dataType = TypeNames.getName(connection, this.typeId); + //System.out.println("data type: " + dataType); - // Added to get StringTimeSeries + int length = resultSet.getInt(3); + //System.out.println("length: " + length); + validCount = resultSet.getInt(4); + //System.out.println("valid count: " + validCount); + + Blob values = resultSet.getBlob(5); + byte[] bytes = values.getBytes(1, (int)values.length()); + //System.out.println("values.length: " + (int)values.length()); + values.free(); + + // This unchecked caste warning can be fixed but it shouldnt be necessary if we only but ArrayList objects into the StringTimeSeries cache. + this.timeSeries = (ArrayList) Compression.inflateObject(bytes); + } + public static StringTimeSeries getStringTimeSeries(Connection connection, int flightId, String name) throws SQLException { PreparedStatement query = connection.prepareStatement("SELECT ss.name_id, ss.data_type_id, ss.length, ss.valid_length, ss.data FROM string_series AS ss INNER JOIN string_series_names AS ssn ON ssn.id = ss.name_id WHERE ssn.name = ? AND ss.flight_id = ?"); @@ -99,32 +132,14 @@ public static StringTimeSeries getStringTimeSeries(Connection connection, int fl return null; } } - - // Added to get results for StringTimeSeries - public StringTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException, ClassNotFoundException { - - this.nameId = resultSet.getInt(1); - this.name = SeriesNames.getStringName(connection, this.nameId); - //System.out.println("name: " + name); - - this.typeId = resultSet.getInt(2); - this.dataType = TypeNames.getName(connection, this.typeId); - //System.out.println("data type: " + dataType); - - int length = resultSet.getInt(3); - //System.out.println("length: " + length); - validCount = resultSet.getInt(4); - //System.out.println("valid count: " + validCount); - - Blob values = resultSet.getBlob(5); - byte[] bytes = values.getBytes(1, (int)values.length()); - //System.out.println("values.length: " + (int)values.length()); - values.free(); - - // This unchecked caste warning can be fixed but it shouldnt be necessary if we only but ArrayList objects into the StringTimeSeries cache. - this.timeSeries = (ArrayList) Compression.inflateObject(bytes); + + private void setNameId(Connection connection) throws SQLException { + this.nameId = SeriesNames.getDoubleNameId(connection, name); } + private void setTypeId(Connection connection) throws SQLException { + this.typeId = TypeNames.getId(connection, dataType); + } public String toString() { return "[StringTimeSeries '" + name + "' size: " + timeSeries.size() + ", validCount: " + validCount + "]"; } @@ -197,6 +212,11 @@ public void updateDatabase(Connection connection, int flightId) { //System.out.println("Updating database for " + this); try { + if (nameId == -1) + setNameId(connection); + if (typeId == -1) + setTypeId(connection); + PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO string_series (flight_id, name_id, data_type_id, length, valid_length, data) VALUES (?, ?, ?, ?, ?, ?)"); preparedStatement.setInt(1, flightId); diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java index fdcca9941..f73ee182d 100644 --- a/src/main/java/org/ngafid/flights/process/DependencyGraph.java +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -34,23 +34,40 @@ public DependencyNode(ProcessStep step) { this.step = step; } + void disableChildren() { + if (enabled.get()) { + enabled.set(false); + if (step.isRequired()) { + String reason = step.explainApplicability(); + LOG.severe("Required step " + step.toString() + " has been disabled for :\n " + reason); + exceptions.add(new FatalFlightFileException(reason)); + } + for (var child : requiredBy) child.disable(); + } + } + void disable() { - enabled.set(false); - if (step.isRequired()) { - LOG.severe("Required step " + step.toString() + " has been disabled."); - exceptions.add(new FatalFlightFileException("Required step " + step.toString() + " has been disabled.")); + if (enabled.get()) { + enabled.set(false); + if (step.isRequired()) { + LOG.severe("Required step " + step.toString() + " has been disabled."); + exceptions.add( + new FatalFlightFileException( + "Required step " + step.toString() + + " has been disabled because a required parent step has been disabled")); + } + for (var child : requiredBy) child.disable(); } - for (var child : requiredBy) - child.disable(); } void compute() { try { - if (step.applicable()) + if (step.applicable()) { step.compute(); - else - disable(); + } else { + disableChildren(); + } } catch (SQLException | MalformedFlightFileException | FatalFlightFileException e) { LOG.warning("Encountered exception when calculating process step " + step.toString() + ": " + e.toString()); @@ -81,11 +98,9 @@ public Void compute() { for (var requiredNode : node.requires) { getTask(requiredNode).join(); } - + if (node.enabled.get()) node.compute(); - else {} // TODO: Add some sort of exception here. We don't want to just silently - // let the processing pipeline fail somewhere return null; } @@ -204,7 +219,50 @@ public Void compute() { throw new FlightProcessingException(fatalExceptions); } - public void cycleCheck() throws FlightProcessingException { - // TODO: Cycle check + public void scrutinize() { + cycleCheck(); + requiredCheck(); + } + + // Ensure that there are no required steps that are children to optional steps, + // since that wouldn't make sense. + private void requiredCheck() { + for (var node : nodes) { + if (!node.step.isRequired()) + src continue; + + for (var parent : node.requiredBy) { + if (!parent.step.isRequired()) { + System.err.println("ERROR in your DependencyGraph! The optional step '" + parent + "' has a required dependent step '" + node + "'."); + System.exit(1); + } + } + } + } + + // Ensure there are no cycles! + private void cycleCheck() { + for (var src : nodes) { + for (var node : nodes) + node.mark = false; + + Queue q = new ArrayDeque<>(); + var dst = src; + for (var child : src.requiredBy) + q.add(child); + + while ((dst = q.poll()) != null) { + if (dst == src) { + System.err.println("ERROR in your DependencyGraph! Cycle was detected from step '" + src + "' to step '" + dst + "'."); + System.exit(1); + } + + dst.mark = true; + for (var child : dst.requiredBy) { + if (!child.mark) + q.add(child); + } + } + } } } diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java index 5f8bd46ee..ed1eff484 100644 --- a/src/main/java/org/ngafid/flights/process/FlightBuilder.java +++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java @@ -11,6 +11,7 @@ import org.ngafid.flights.*; import org.ngafid.flights.process.*; import org.ngafid.flights.process.FlightMeta; +import static org.ngafid.flights.process.ProcessStep.required; public class FlightBuilder { @@ -62,7 +63,7 @@ public synchronized FlightBuilder updateProcessingStatus(int processingStatus) { private static final List processSteps = List.of( ProcessAltAGL::new, ProcessAirportProximity::new, - ProcessStartEndTime::new + required(ProcessStartEndTime::new) ); // This can be overridden. diff --git a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java index 7c5def32a..de83951f0 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java +++ b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java @@ -5,7 +5,10 @@ import java.sql.Connection; import java.sql.SQLException; +import org.ngafid.airports.*; +import org.ngafid.common.MutableDouble; import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.StringTimeSeries; import static org.ngafid.flights.Parameters.*; import org.ngafid.flights.process.ProcessStep; import org.ngafid.flights.process.FlightBuilder; @@ -15,6 +18,8 @@ public class ProcessAirportProximity extends ProcessStep { private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LATITUDE, LONGITUDE, ALT_AGL); private static Set OUTPUT_COLUMNS = Set.of(NEAREST_RUNWAY, AIRPORT_DISTANCE, NEAREST_RUNWAY, RUNWAY_DISTANCE); + private final static double MAX_AIRPORT_DISTANCE_FT = 10000; + private final static double MAX_RUNWAY_DISTANCE_FT = 100; public ProcessAirportProximity(Connection connection, FlightBuilder builder) { super(connection, builder); @@ -26,9 +31,61 @@ public ProcessAirportProximity(Connection connection, FlightBuilder builder) { public Set getOutputColumns() { return OUTPUT_COLUMNS; } public boolean airframeIsValid(String airframe) { return true; } - public boolean isRequired() { return true; } public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { - float x = 1 / 0; + DoubleTimeSeries latitudeTS = builder.doubleTimeSeries.get(LATITUDE); + DoubleTimeSeries longitudeTS = builder.doubleTimeSeries.get(LONGITUDE); + DoubleTimeSeries altitudeAGLTS = builder.doubleTimeSeries.get(ALT_AGL); + + int sizeHint = latitudeTS.size(); + + StringTimeSeries nearestAirportTS = new StringTimeSeries("NearestAirport", "IATA Code", sizeHint); + stringTS.put("NearestAirport", nearestAirportTS); + + DoubleTimeSeries airportDistanceTS = new DoubleTimeSeries("AirportDistance", "ft", sizeHint); + doubleTS.put("AirportDistance", airportDistanceTS); + + StringTimeSeries nearestRunwayTS = new StringTimeSeries("NearestRunway", "IATA Code", sizeHint); + stringTS.put("NearestRunway", nearestRunwayTS); + + DoubleTimeSeries runwayDistanceTS = new DoubleTimeSeries("RunwayDistance", "ft", sizeHint); + doubleTS.put("RunwayDistance", runwayDistanceTS); + + + for (int i = 0; i < latitudeTS.size(); i++) { + double latitude = latitudeTS.get(i); + double longitude = longitudeTS.get(i); + double altitudeAGL = altitudeAGLTS.get(i); + + // TODO: Move this outside of the loop. To avoid re-allocation ? + MutableDouble airportDistance = new MutableDouble(); + + Airport airport = null; + if (altitudeAGL <= 2000) { + airport = Airports.getNearestAirportWithin(latitude, longitude, MAX_AIRPORT_DISTANCE_FT, airportDistance); + } + + if (airport == null) { + nearestAirportTS.add(""); + airportDistanceTS.add(Double.NaN); + nearestRunwayTS.add(""); + runwayDistanceTS.add(Double.NaN); + } else { + nearestAirportTS.add(airport.iataCode); + airportDistanceTS.add(airportDistance.get()); + + MutableDouble runwayDistance = new MutableDouble(); + Runway runway = airport.getNearestRunwayWithin(latitude, longitude, MAX_RUNWAY_DISTANCE_FT, runwayDistance); + if (runway == null) { + nearestRunwayTS.add(""); + runwayDistanceTS.add(Double.NaN); + } else { + nearestRunwayTS.add(runway.name); + runwayDistanceTS.add(runwayDistance.get()); + } + } + + } + } } diff --git a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java index 5a099cd2c..327613f74 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java +++ b/src/main/java/org/ngafid/flights/process/ProcessAltAGL.java @@ -28,12 +28,11 @@ public ProcessAltAGL(Connection connection, FlightBuilder builder) { public Set getOutputColumns() { return OUTPUT_COLUMNS; } public boolean airframeIsValid(String airframe) { return true; } - public boolean isRequired() { return true; } public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { - DoubleTimeSeries altitudeMSLTS = builder.doubleTimeSeries.get(ALT_MSL); - DoubleTimeSeries latitudeTS = builder.doubleTimeSeries.get(LATITUDE); - DoubleTimeSeries longitudeTS = builder.doubleTimeSeries.get(LONGITUDE); + DoubleTimeSeries altitudeMSLTS = doubleTS.get(ALT_MSL); + DoubleTimeSeries latitudeTS = doubleTS.get(LATITUDE); + DoubleTimeSeries longitudeTS = doubleTS.get(LONGITUDE); DoubleTimeSeries altitudeAGLTS = withConnection(connection -> new DoubleTimeSeries(connection, ALT_AGL, UNIT_FT_AGL)); @@ -55,7 +54,7 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl } } - builder.doubleTimeSeries.put(ALT_AGL, altitudeAGLTS); + doubleTS.put(ALT_AGL, altitudeAGLTS); } } diff --git a/src/main/java/org/ngafid/flights/process/ProcessDivergence.java b/src/main/java/org/ngafid/flights/process/ProcessDivergence.java new file mode 100644 index 000000000..18cce59da --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessDivergence.java @@ -0,0 +1,170 @@ +package org.ngafid.flights.process; + + +import java.util.Set; +import java.util.Map; +import static java.util.Map.entry; +import java.util.List; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.terrain.TerrainCache; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessDivergence extends ProcessStep { + + private record DivergenceConfig (List parameters, String output) {} + + private static final Set OUTPUT_COLUMNS = Set.of(TOTAL_FUEL); + private static final Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI); + + private static final List CESSNA_CONFIG = + List.of( + new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4"), "E1 CHT Divergence"), + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence") + ); + + private static final List PA_28_CONFIG = + List.of( + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence") + ); + + private static final List PA_44_CONFIG = + List.of( + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence"), + new DivergenceConfig(List.of("E2 EGT1", "E2 EGT2", "E2 EGT3", "E2 EGT4"), "E2 EGT Divergence") + ); + + private static final List SIX_CYLINDER_CIRRUS = + List.of( + new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4", "E1 CHT5", "E1 CHT6"), "E1 CHT Divergence"), + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4", "E1 EGT5", "E1 EGT6"), "E1 EGT Divergence") + ); + + private static final List DIAMOND_CONFIG = + List.of( + new DivergenceConfig(List.of("E1 CHT1", "E1 CHT2", "E1 CHT3", "E1 CHT4"), "E1 CHT Divergence"), + new DivergenceConfig(List.of("E1 EGT1", "E1 EGT2", "E1 EGT3", "E1 EGT4"), "E1 EGT Divergence") + ); + + private static final Map> CONFIG_MAP = + Map.ofEntries( + entry(AIRFRAME_CESSNA_172R, CESSNA_CONFIG), + entry(AIRFRAME_CESSNA_172S, CESSNA_CONFIG), + entry(AIRFRAME_PA_28, PA_28_CONFIG), + entry(AIRFRAME_PA_44, PA_44_CONFIG), + entry(AIRFRAME_CIRRUS_SR20, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_CESSNA_T182T, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_CESSNA_182T, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_BEECHCRAFT_A36_G36, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_CIRRUS_SR22, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_CESSNA_400, SIX_CYLINDER_CIRRUS), + entry(AIRFRAME_DIAMOND_DA_40F, DIAMOND_CONFIG), + entry(AIRFRAME_DIAMOND_DA_40, DIAMOND_CONFIG), + entry(AIRFRAME_DIAMOND_DA__40, DIAMOND_CONFIG) + ); + + public ProcessDivergence(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + private Set requiredDoubleColumns = null; + public Set getRequiredDoubleColumns() { + if (requiredDoubleColumns == null) { + + var configs = CONFIG_MAP.get(builder.meta.airframeName); + if (configs != null) { + + requiredDoubleColumns = new HashSet<>(32); + for (var config : configs) + requiredDoubleColumns.addAll(config.parameters); + + } else { + requiredDoubleColumns = Collections.emptySet(); + } + } + + return requiredDoubleColumns; + } + + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return getRequiredDoubleColumns(); } + + private Set outputColumns = null; + public Set getOutputColumns() { + if (outputColumns == null) { + + var configs = CONFIG_MAP.get(builder.meta.airframeName); + if (configs != null) { + + outputColumns = new HashSet<>(); + for (var config : configs) + outputColumns.add(config.output); + + } else { + outputColumns = Collections.emptySet(); + } + } + + return outputColumns; + } + + public boolean airframeIsValid(String airframe) { + for (String blacklisted : AIRFRAME_BLACKLIST) + if (airframe.contains(blacklisted)) + return false; + + return true; + } + private void calculateDivergence(List columnNames, String varianceColumnName) throws MalformedFlightFileException, SQLException { + DoubleTimeSeries columns[] = new DoubleTimeSeries[columnNames.size()]; + for (int i = 0; i < columns.length; i++) { + columns[i] = doubleTS.get(columnNames.get(i)); + + if (columns[i] == null) { + throw new MalformedFlightFileException("Cannot calculate '" + varianceColumnName + "' as parameter '" + columnNames.get(i) + "' was missing."); + } + } + + DoubleTimeSeries variance = new DoubleTimeSeries(varianceColumnName, UNIT_DEG_F, columns[0].size()); + + for (int i = 0; i < columns[0].size(); i++) { + double max = -Double.MAX_VALUE; + double min = Double.MAX_VALUE; + + for (int j = 0; j < columns.length; j++) { + double current = columns[j].get(i); + if (!Double.isNaN(current) && current > max) max = columns[j].get(i); + if (!Double.isNaN(current) && current < min) min = columns[j].get(i); + } + + double v = 0; + if (max != -Double.MAX_VALUE && min != Double.MAX_VALUE) { + v = max - min; + } + + variance.add(v); + } + + doubleTS.put(varianceColumnName, variance); + } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + List configs = CONFIG_MAP.get(builder.meta.airframeName); + + if (configs == null) + return; + + for (var config : configs) + calculateDivergence(config.parameters, config.output); + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java new file mode 100644 index 000000000..72e7cfc12 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java @@ -0,0 +1,111 @@ +package org.ngafid.flights.process; + +import java.time.*; +import java.util.Set; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Collections; +import java.util.logging.Logger; +import java.time.format.DateTimeFormatter; + +import static org.ngafid.flights.Parameters.*; +import org.ngafid.common.*; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.MalformedFlightFileException; +import org.ngafid.flights.FatalFlightFileException; + +public class ProcessLOCI extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessStartEndTime.class.getName()); + + public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(STALL_DEPENDENCIES); + + public ProcessLOCI(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return Collections.emptySet(); } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_STRING_COLUMNS; } + public Set getOutputColumns() { return Collections.emptySet(); } + + public boolean airframeIsValid(String airframe) { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + StringTimeSeries dates = builder.stringTimeSeries.get(LCL_DATE); + StringTimeSeries times = builder.stringTimeSeries.get(LCL_TIME); + StringTimeSeries offsets = builder.stringTimeSeries.get(UTC_OFFSET); + + int dateSize = dates.size(); + int timeSize = times.size(); + int offsetSize = offsets.size(); + + LOG.info("\tdate size: " + dateSize + ", time size: " + timeSize + ", offset size: " + offsetSize); + + //get the minimum sized length of each of these series, they should all be the same but + //if the last column was cut off it might not be the case + int minSize = dateSize; + if (minSize < timeSize) minSize = timeSize; + if (minSize < offsetSize) minSize = offsetSize; + + //find the first non-null time entry + int start = 0; + while (start < minSize && + (dates.get(start) == null || dates.get(start).equals("") || + times.get(start) == null || times.get(start).equals("") || + offsets.get(start) == null || offsets.get(start).equals("") || offsets.get(start).equals("+19:00"))) { + + start++; + } + + LOG.info("\tfirst date time and offset not null at index: " + start); + + if (start >= minSize) + throw new MalformedFlightFileException("Date, Time or Offset columns were all null! Cannot set start/end times."); + + //find the last full date time offset entry row + int end = minSize - 1; + while (end >= 0 && + (dates.get(end) == null || dates.get(end).equals("") || + times.get(end) == null || times.get(end).equals("") || + offsets.get(end) == null || offsets.get(end).equals(""))) { + + end--; + } + + String startDate = dates.get(start); + String startTime = times.get(start); + String startOffset = offsets.get(start); + + String endDate = dates.get(end); + String endTime = times.get(end); + String endOffset = offsets.get(end); + + LOG.info("\t\t\tfirst not null " + start + " -- " + startDate + " " + startTime + " " + startOffset); + LOG.info("\t\t\tlast not null " + endDate + " " + endTime + " " + endOffset); + + OffsetDateTime startODT = null; + try { + startODT = TimeUtils.convertToOffset(startDate, startTime, startOffset, "+00:00"); + } catch (DateTimeException dte) { + LOG.severe("Corrupt start time data in flight file: " + dte.getMessage()); + throw new MalformedFlightFileException("Corrupt start time data in flight file: '" + dte.getMessage() + "'"); + } + + OffsetDateTime endODT = null; + try { + endODT = TimeUtils.convertToOffset(endDate, endTime, endOffset, "+00:00"); + } catch (DateTimeException dte) { + LOG.severe("Corrupt end time data in flight file: " + dte.getMessage()); + throw new MalformedFlightFileException("Corrupt end time data in flight file: '" + dte.getMessage() + "'"); + } + + if (startODT.isAfter(endODT)) { + builder.setStartDateTime(null); + builder.setEndDateTime(null); + throw new MalformedFlightFileException("Corrupt time data in flight file, start time was after the end time"); + } + + builder.setStartDateTime(startODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); + builder.setEndDateTime(endODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); + } +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java b/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java new file mode 100644 index 000000000..5224b270e --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java @@ -0,0 +1,52 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.terrain.TerrainCache; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessLaggedAltMSL extends ProcessStep { + private static final Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_MSL); + private static final Set OUTPUT_COLUMNS = Set.of(ALT_MSL_LAG_DIFF); + private static final Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI); + private static final int LAG = 10; + + public ProcessLaggedAltMSL(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { + for (String blacklisted : AIRFRAME_BLACKLIST) + if (airframe.contains(blacklisted)) + return false; + + return true; + } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries altMSL = doubleTS.get(ALT_MSL); + DoubleTimeSeries laggedAltMSL = new DoubleTimeSeries(ALT_MSL_LAG_DIFF, UNIT_FT_MSL, altMSL.size()); + + for (int i = 0; i < LAG; i++) + laggedAltMSL.add(0.0); + for (int i = LAG; i < altMSL.size(); i++) + laggedAltMSL.add(altMSL.get(i) - altMSL.get(i - LAG)); + + doubleTS.put(ALT_MSL_LAG_DIFF, laggedAltMSL); + } + +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java index b108190cb..e20912ac5 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java +++ b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java @@ -29,7 +29,6 @@ public ProcessStartEndTime(Connection connection, FlightBuilder builder) { public Set getOutputColumns() { return Collections.emptySet(); } public boolean airframeIsValid(String airframe) { return true; } - public boolean isRequired() { return true; } public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { StringTimeSeries dates = builder.stringTimeSeries.get(LCL_DATE); diff --git a/src/main/java/org/ngafid/flights/process/ProcessStep.java b/src/main/java/org/ngafid/flights/process/ProcessStep.java index 9fe25403b..758f4a28e 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessStep.java +++ b/src/main/java/org/ngafid/flights/process/ProcessStep.java @@ -1,9 +1,12 @@ package org.ngafid.flights.process; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.sql.Connection; import java.sql.SQLException; +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.StringTimeSeries; import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.MalformedFlightFileException; @@ -14,7 +17,19 @@ public interface Factory { ProcessStep create(Connection connection, FlightBuilder builder); } - protected FlightBuilder builder; + public static Factory required(Factory factory) { + return (c, b) -> { + var step = factory.create(c, b); + step.required = true; + return step; + }; + } + + protected final FlightBuilder builder; + // References to the TS maps in builder + protected final ConcurrentHashMap doubleTS; + protected final ConcurrentHashMap stringTS; + // Connection is not accessible by subclasses directly by design, instead use the `withConnection` function. // This grabs the lock on the object so only one thread is using the connection at any given point in time. @@ -23,6 +38,8 @@ public interface Factory { public ProcessStep(Connection connection, FlightBuilder builder) { this.connection = connection; this.builder = builder; + doubleTS = builder.doubleTimeSeries; + stringTS = builder.stringTimeSeries; } // These should probably return references to static immutable Sets. @@ -30,15 +47,17 @@ public ProcessStep(Connection connection, FlightBuilder builder) { public abstract Set getRequiredStringColumns(); public abstract Set getRequiredColumns(); public abstract Set getOutputColumns(); - + + private boolean required = false; + // Whether or not this ProcessStep is required / mandatory // If a required step cannot be computed, a MalformedFlightFileException will be raised - public abstract boolean isRequired(); + public final boolean isRequired() { return required; } // Whether or not this ProcessStep can be performed for a given airframe public abstract boolean airframeIsValid(String airframe); - final public boolean applicable() { + public final boolean applicable() { return airframeIsValid(builder.meta.airframeName) && builder @@ -51,6 +70,32 @@ final public boolean applicable() { .containsAll(getRequiredDoubleColumns()); } + public final String explainApplicability() { + if (applicable()) { + return "is applicable - all required columns are present and the airframeName is valid)"; + } + + String className = this.getClass().getSimpleName(); + StringBuilder sb = new StringBuilder("Step '" + className + "' cannot be applied for the following reason(s):"); + + if (!airframeIsValid(builder.meta.airframeName)) { + sb.append(" - airframeName '" + builder.meta.airframeName + "' is invalid (" + + className + "::airframeIsValid returned false for airframeName '" + className + "')\n"); + } + + for (String key : getRequiredStringColumns()) { + if (!builder.stringTimeSeries.containsKey(key)) + sb.append(" - The required string column '" + key + "' is not available.\n"); + } + + for (String key : getRequiredDoubleColumns()) { + if (!builder.doubleTimeSeries.containsKey(key)) + sb.append(" - The required double column '" + key + "' is not available.\n"); + } + + return sb.toString(); + } + protected interface ConnectionFunctor { public T compute(Connection connection) throws SQLException; } diff --git a/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java b/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java new file mode 100644 index 000000000..325eb631a --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java @@ -0,0 +1,55 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.terrain.TerrainCache; +import org.ngafid.flights.DoubleTimeSeries; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessTotalFuel extends ProcessStep { + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(FUEL_QTY_LEFT, FUEL_QTY_RIGHT); + private static Set OUTPUT_COLUMNS = Set.of(TOTAL_FUEL); + private static Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI); + + public ProcessTotalFuel(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { + for (String blacklisted : AIRFRAME_BLACKLIST) + if (airframe.contains(blacklisted)) + return false; + + return true; + } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + double[] totalFuel = null; + + for (var columnName : REQUIRED_DOUBLE_COLUMNS) { + DoubleTimeSeries fuelTS = doubleTS.get(columnName); + if (totalFuel == null) + totalFuel = new double[fuelTS.size()]; + + for (int i = 0; i < fuelTS.size(); i++) + totalFuel[i] += fuelTS.get(i); + } + + DoubleTimeSeries totalFuelTS = new DoubleTimeSeries(TOTAL_FUEL, UNIT_GALLONS, totalFuel); + doubleTS.put(TOTAL_FUEL, totalFuelTS); + } + +} From 195a6d4b988a9d7ea0d5a917ba6ab7b389aec79c Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Fri, 7 Apr 2023 15:56:22 -0400 Subject: [PATCH 06/12] Intermediate commit w/ interface for Aaron --- src/main/java/org/ngafid/FindSpinEvents.java | 2 +- src/main/java/org/ngafid/ProcessUpload.java | 292 +++++++++++------- .../org/ngafid/flights/DoubleTimeSeries.java | 192 ++++++------ src/main/java/org/ngafid/flights/Flight.java | 2 +- .../CalculatedDoubleTimeSeries.java | 13 +- .../flights/calculations/VSPDRegression.java | 16 +- .../flights/process/DependencyGraph.java | 28 +- .../process/FlightFileFormatException.java | 16 + .../flights/process/FlightFileProcessor.java | 33 ++ .../ngafid/flights/process/ProcessLOCI.java | 122 +++----- .../flights/process/ProcessStallIndex.java | 96 ++++++ 11 files changed, 519 insertions(+), 293 deletions(-) create mode 100644 src/main/java/org/ngafid/flights/process/FlightFileFormatException.java create mode 100644 src/main/java/org/ngafid/flights/process/FlightFileProcessor.java create mode 100644 src/main/java/org/ngafid/flights/process/ProcessStallIndex.java diff --git a/src/main/java/org/ngafid/FindSpinEvents.java b/src/main/java/org/ngafid/FindSpinEvents.java index 174e8dfb8..7ef91819e 100644 --- a/src/main/java/org/ngafid/FindSpinEvents.java +++ b/src/main/java/org/ngafid/FindSpinEvents.java @@ -216,7 +216,7 @@ static void calculateVSPDDerived(Connection connection, Flight flight) throws IO if (dts == null) { flight.checkCalculationParameters(VSPD_CALCULATED, ALT_B); CalculatedDoubleTimeSeries dVSI = new CalculatedDoubleTimeSeries(connection, VSPD_CALCULATED, "ft/min", true, flight); - dVSI.create(new VSPDRegression(connection, flight)); + dVSI.create(new VSPDRegression(flight.getDoubleTimeSeries(ALT_B))); dVSI.updateDatabase(connection, flightId); } } diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index 7ac3f12a5..f2f67f1bc 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -26,8 +26,17 @@ import java.util.Enumeration; import java.util.HashMap; import java.util.Map; +import java.util.Objects; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; @@ -237,6 +246,39 @@ public FlightInfo(int id, int length, String filename, List { + public R apply(T t) throws E; + } + + static class CheckedMap implements Function { + final BiConsumer exceptionHandler; + final CheckedFunction f; + + public CheckedMap(CheckedFunction f, BiConsumer exceptionHandler) { + this.exceptionHandler = exceptionHandler; + this.f = f; + } + + public R apply(T t) { + try { + return f.apply(t); + } catch (Error | RuntimeException e) { + throw e; + } catch (Exception ex) { + @SuppressWarnings("unchecked") E e = (E) ex; + exceptionHandler.accept(t, e); + return null; + } + } + } + + + // Generates a function which when called will call the supplied function f which may raise an exception. + // In the event of an exception the exception the exceptionHandler is called and supplied the value T as + // well as the exception object, and finally null is returned. + private static CheckedMap mapOrNull(CheckedFunction f, BiConsumer exceptionHandler) { + return new CheckedMap(f, exceptionHandler); + } public static boolean ingestFlights(Connection connection, int uploadId, int fleetId, int uploaderId, String filename, UploadProcessedEmail uploadProcessedEmail) throws SQLException { Instant start = Instant.now(); @@ -258,13 +300,53 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle int validFlights = 0; int warningFlights = 0; int errorFlights = 0; + if (extension.equals(".zip")) { + BiConsumer handleFlightFileFormatException = + (z, e) -> { + flightErrors.put(z.getName(), new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", z.getName())); + errorFlights++; + }; + + BiConsumer handleExceptionInProcessor = + (p, e) -> { + flightErrors.put(p.filename, new UploadException(e.getMessage(), e, p.filename)); + errorFlights++; + }; + BiConsumer handleExceptionInBuilder = + (b, e) -> { + flightErrors.put(b.meta.filename, new UploadException(e.getMessage(), e, b.meta.filename)); + errorFlights++; + }; + try { System.err.println("processing zip file: '" + filename + "'"); ZipFile zipFile = new ZipFile(filename); Enumeration entries = zipFile.entries(); - + Stream validFiles = + StreamSupport.stream( + Spliterators.spliteratorUnknownSize(entries.asIterator(), Spliterator.ORDERED), + false + ) + .filter(z -> !z.getName().contains("__MACOSX")) + .filter(z -> !z.isDirectory()); + + Stream pipeline = + validFiles + .map(mapOrNull(FlightFileProcessor::create, handleFlightFileFormatException)) // Create a FlightFileProcessor for each file + .filter(Objects::nonNull) // Filter out any null values (nulls indicate files we cant process) + .map(mapOrNull(p -> p.parse(), handleExceptionInProcessor)) // Parse the files (this is the initial parsing step + .filter(Objects::nonNull) // Filter out any null values (nulls indicate something went awry in the parsing step) + .flatMap(builder -> builder) // Merge streams together + .map(mapOrNull(builder -> builder.build(connection), handleExceptionInBuilder)) // + .filter(Objects::nonNull); + + pipeline.forEach((Flight flight) -> { + flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + if (flight.getStatus().equals("WARNING")) warningFlights++; + }); + while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); String name = entry.getName(); @@ -283,108 +365,108 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle String entryName = entry.getName(); - if (entryName.contains(".csv")) { - try { - InputStream stream = zipFile.getInputStream(entry); - Flight flight = new Flight(fleetId, entry.getName(), stream, connection); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - - validFlights++; - } catch (FlightProcessingException e) { - System.err.println(e.getMessage()); - flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - - } else if (entryName.contains(".gpx")) { - try { - InputStream stream = zipFile.getInputStream(entry); - ArrayList flights = Flight.processGPXFile(fleetId, connection, stream, entry.getName()); - - if (connection != null) { - for (Flight flight : flights) { - flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - } - for (Flight flight : flights) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - if (flight.getStatus().equals("WARNING")) warningFlights++; - validFlights++; - } - } - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | - ParserConfigurationException | SAXException | ParseException e) { - System.err.println(e.getMessage()); - flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - } else if (entry.getName().endsWith(".json")) { - try { - Flight flight = Flight.processJSON(fleetId, connection, zipFile.getInputStream(entry), entry.getName()); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | - ParseException e) { - System.err.println("ERROR: " + e.getMessage()); - flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } - } else if (entry.getName().endsWith(".DAT")) { - String zipName = entry.getName().substring(entry.getName().lastIndexOf("/")); - String parentFolder = zipFile.getName().substring(0, zipFile.getName().lastIndexOf("/")); - File tempExtractedFile = new File(parentFolder, zipName); - - System.out.println("Extracting to " + tempExtractedFile.getAbsolutePath()); - try (InputStream inputStream = zipFile.getInputStream(entry); FileOutputStream fileOutputStream = new FileOutputStream(tempExtractedFile)) { - int len; - byte[] buffer = new byte[1024]; - - while ((len = inputStream.read(buffer)) > 0) { - fileOutputStream.write(buffer, 0, len); - } - } - - convertDATFile(tempExtractedFile); - File processedCSVFile = new File(tempExtractedFile.getAbsolutePath() + ".csv"); - placeInZip(processedCSVFile.getAbsolutePath(), zipFile.getName().substring(zipFile.getName().lastIndexOf("/") + 1)); - - try (InputStream stream = new FileInputStream(processedCSVFile)) { - Flight flight = processDATFile(fleetId, entry.getName(), stream, connection); - - if (connection != null) { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - } - - if (flight.getStatus().equals("WARNING")) warningFlights++; - - flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - - validFlights++; - } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | MalformedFlightFileException | - SQLException e) { - System.err.println(e.getMessage()); - flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - errorFlights++; - } finally { - Files.delete(Paths.get(processedCSVFile.getAbsolutePath())); - Files.delete(Paths.get(tempExtractedFile.getAbsolutePath())); - } - } else { - flightErrors.put(entry.getName(), new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", entry.getName())); - errorFlights++; - } + // if (entryName.contains(".csv")) { + // try { + // InputStream stream = zipFile.getInputStream(entry); + // Flight flight = new Flight(fleetId, entry.getName(), stream, connection); + + // if (connection != null) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // } + + // if (flight.getStatus().equals("WARNING")) warningFlights++; + + // flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); + + // validFlights++; + // } catch (FlightProcessingException e) { + // System.err.println(e.getMessage()); + // flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + + // } else if (entryName.contains(".gpx")) { + // try { + // InputStream stream = zipFile.getInputStream(entry); + // ArrayList flights = Flight.processGPXFile(fleetId, connection, stream, entry.getName()); + + // if (connection != null) { + // for (Flight flight : flights) { + // flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); + // } + // for (Flight flight : flights) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // if (flight.getStatus().equals("WARNING")) warningFlights++; + // validFlights++; + // } + // } + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | + // ParserConfigurationException | SAXException | ParseException e) { + // System.err.println(e.getMessage()); + // flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + // } else if (entry.getName().endsWith(".json")) { + // try { + // Flight flight = Flight.processJSON(fleetId, connection, zipFile.getInputStream(entry), entry.getName()); + + // if (connection != null) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // } + + // if (flight.getStatus().equals("WARNING")) warningFlights++; + + // validFlights++; + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | + // ParseException e) { + // System.err.println("ERROR: " + e.getMessage()); + // flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } + // } else if (entry.getName().endsWith(".DAT")) { + // String zipName = entry.getName().substring(entry.getName().lastIndexOf("/")); + // String parentFolder = zipFile.getName().substring(0, zipFile.getName().lastIndexOf("/")); + // File tempExtractedFile = new File(parentFolder, zipName); + + // System.out.println("Extracting to " + tempExtractedFile.getAbsolutePath()); + // try (InputStream inputStream = zipFile.getInputStream(entry); FileOutputStream fileOutputStream = new FileOutputStream(tempExtractedFile)) { + // int len; + // byte[] buffer = new byte[1024]; + + // while ((len = inputStream.read(buffer)) > 0) { + // fileOutputStream.write(buffer, 0, len); + // } + // } + + // convertDATFile(tempExtractedFile); + // File processedCSVFile = new File(tempExtractedFile.getAbsolutePath() + ".csv"); + // placeInZip(processedCSVFile.getAbsolutePath(), zipFile.getName().substring(zipFile.getName().lastIndexOf("/") + 1)); + + // try (InputStream stream = new FileInputStream(processedCSVFile)) { + // Flight flight = processDATFile(fleetId, entry.getName(), stream, connection); + + // if (connection != null) { + // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); + // } + + // if (flight.getStatus().equals("WARNING")) warningFlights++; + + // flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); + + // validFlights++; + // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | MalformedFlightFileException | + // SQLException e) { + // System.err.println(e.getMessage()); + // flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); + // errorFlights++; + // } finally { + // Files.delete(Paths.get(processedCSVFile.getAbsolutePath())); + // Files.delete(Paths.get(tempExtractedFile.getAbsolutePath())); + // } + // } else { + // flightErrors.put(entry.getName(), new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", entry.getName())); + // errorFlights++; + // } } } catch (java.nio.file.NoSuchFileException e) { @@ -467,7 +549,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle for (FlightInfo info : flightInfo) { uploadProcessedEmail.addFlight(info.filename, info.id, info.length); - ArrayList exceptions = info.exceptions; + List exceptions = info.exceptions; if (exceptions.size() == 0) { uploadProcessedEmail.flightImportOK(info.filename); } @@ -484,7 +566,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle uploadProcessedEmail.setWarningFlights(warningFlights); for (FlightInfo info : flightInfo) { - ArrayList exceptions = info.exceptions; + List exceptions = info.exceptions; if (exceptions.size() > 0) { for (MalformedFlightFileException exception : exceptions) { diff --git a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java index 0cb9bebff..dc6c2063a 100644 --- a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java @@ -13,8 +13,8 @@ import java.util.ArrayList; import java.util.Optional; import java.util.logging.Logger; +import java.util.stream.DoubleStream; import java.util.zip.Deflater; -import java.util.zip.Inflater; import org.ngafid.Database; import org.ngafid.common.Compression; @@ -29,7 +29,6 @@ public class DoubleTimeSeries { private static final int COMPRESSION_LEVEL = Deflater.DEFAULT_COMPRESSION; private static final String DS_COLUMNS = "ds.id, ds.flight_id, ds.name_id, ds.data_type_id, ds.length, ds.valid_length, ds.min, ds.avg, ds.max, ds.data"; - private boolean cache = true; private int id = -1; private int flightId = -1; private int nameId; @@ -54,11 +53,7 @@ public DoubleTimeSeries(String name, String dataType, double[] data, int size) { this.data = data; this.size = size; - min = Double.NaN; - avg = Double.NaN; - max = Double.NaN; - - validCount = 0; + calculateValidCountMinMaxAvg(); } public DoubleTimeSeries(String name, String dataType, double[] data) { @@ -83,11 +78,6 @@ public DoubleTimeSeries(Connection connection, String name, String dataType) thr this(connection, name, dataType, 16); } - public DoubleTimeSeries(Connection connection, String name, String dataType, boolean cache) throws SQLException { - this(connection, name, dataType); - this.cache = cache; - } - public DoubleTimeSeries(Connection connection, String name, String dataType, ArrayList stringTimeSeries) throws SQLException { this.name = name; this.nameId = SeriesNames.getDoubleNameId(connection, name); @@ -134,53 +124,36 @@ public DoubleTimeSeries(Connection connection, String name, String dataType, Arr avg /= validCount; } - private void setNameId(Connection connection) throws SQLException { - this.nameId = SeriesNames.getDoubleNameId(connection, name); - } - - private void setTypeId(Connection connection) throws SQLException { - this.typeId = TypeNames.getId(connection, dataType); - } - - /** - * Checks to see whether this series will be cached in the database - * - * @return a boolean representaion of wheteher or not it should be cached - */ - public final boolean isCached() { - return this.cache; - } - - /** - * Gets the name of the DoubleTimeSeries. - * @return the column name of the DoubleTimeSeries - */ - public String getName() { - return name; - } + public DoubleTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException { + id = resultSet.getInt(1); + flightId = resultSet.getInt(2); + nameId = resultSet.getInt(3); + name = SeriesNames.getDoubleName(connection, nameId); + typeId = resultSet.getInt(4); + dataType = TypeNames.getName(connection, typeId); + size = resultSet.getInt(5); + validCount = resultSet.getInt(6); + min = resultSet.getDouble(7); + avg = resultSet.getDouble(8); + max = resultSet.getDouble(9); - /** - * Gets the minimum value of the DoubleTimeSeries. - * @return the minimum value of the DoubleTimeSeries - */ - public double getMin() { - return min; + Blob values = resultSet.getBlob(10); + byte[] bytes = values.getBytes(1, (int)values.length()); + values.free(); + + this.data = Compression.inflateDoubleArray(bytes, size); } - - /** - * Gets the maximum value of the DoubleTimeSeries. - * @return the maximum value of the DoubleTimeSeries - */ - public double getMax() { - return max; + + public interface TimeStepCalculation { + double compute(int i); } - /** - * Gets the average value of the DoubleTimeSeries. - * @return the average value of the DoubleTimeSeries - */ - public double getAvg() { - return avg; + public static DoubleTimeSeries computed(String name, String dataType, int length, TimeStepCalculation calculation) { + double[] data = new double[length]; + for (int i = 0; i < length; i++) + data[i] = calculation.compute(i); + + return new DoubleTimeSeries(name, dataType, data, length); } public static Pair getMinMax(Connection connection, int flightId, String name) throws SQLException { @@ -287,44 +260,65 @@ public static DoubleTimeSeries getDoubleTimeSeries(Connection connection, int fl } } - public DoubleTimeSeries(Connection connection, ResultSet resultSet) throws SQLException, IOException { - id = resultSet.getInt(1); - flightId = resultSet.getInt(2); - nameId = resultSet.getInt(3); - name = SeriesNames.getDoubleName(connection, nameId); - typeId = resultSet.getInt(4); - dataType = TypeNames.getName(connection, typeId); - size = resultSet.getInt(5); - validCount = resultSet.getInt(6); - min = resultSet.getDouble(7); - avg = resultSet.getDouble(8); - max = resultSet.getDouble(9); + private void setNameId(Connection connection) throws SQLException { + this.nameId = SeriesNames.getDoubleNameId(connection, name); + } - Blob values = resultSet.getBlob(10); - byte[] bytes = values.getBytes(1, (int)values.length()); - values.free(); + private void setTypeId(Connection connection) throws SQLException { + this.typeId = TypeNames.getId(connection, dataType); + } + + private void calculateValidCountMinMaxAvg() { + if (size <= 0) + return; - this.data = Compression.inflateDoubleArray(bytes, size); - - // OLD COMPRESSION CODE - // byte[] bytes = values.getBytes(1, (int)values.length()); - // values.free(); - - // LOG.info("id: " + id + ", flightId: " + flightId + ", name: " + name + ", length: " + size + ", validLength: " + validCount + ", min: " + min + ", avg: " + avg + ", max: " + max); - - // try { - // Inflater inflater = new Inflater(); - // inflater.setInput(bytes, 0, bytes.length); - // ByteBuffer timeSeriesBytes = ByteBuffer.allocate(size * Double.BYTES); - // int _inflatedSize = inflater.inflate(timeSeriesBytes.array()); - // double[] timeSeriesArray = new double[size]; - // timeSeriesBytes.asDoubleBuffer().get(timeSeriesArray); - // this.data = timeSeriesArray; - // } catch (Exception e) { - // e.printStackTrace(); - // } - // - // LOG.info("id: " + id + ", flightId: " + flightId + ", name: " + name + ", length: " + size + ", validLength: " + validCount + ", min: " + min + ", avg: " + avg + ", max: " + max); + min = data[0]; + max = data[0]; + + double sum = 0.0; + for (int i = 1; i < size; i++) { + if (Double.isNaN(data[i])) + continue; + + sum += data[i]; + + min = min > data[i] ? data[i] : min; + max = max < data[i] ? data[i] : max; + } + + avg = sum / validCount; + } + + /** + * Gets the name of the DoubleTimeSeries. + * @return the column name of the DoubleTimeSeries + */ + public String getName() { + return name; + } + + /** + * Gets the minimum value of the DoubleTimeSeries. + * @return the minimum value of the DoubleTimeSeries + */ + public double getMin() { + return min; + } + + /** + * Gets the maximum value of the DoubleTimeSeries. + * @return the maximum value of the DoubleTimeSeries + */ + public double getMax() { + return max; + } + + /** + * Gets the average value of the DoubleTimeSeries. + * @return the average value of the DoubleTimeSeries + */ + public double getAvg() { + return avg; } public String toString() { @@ -397,8 +391,6 @@ public double[] sliceCopy(int from, int to) { public void updateDatabase(Connection connection, int flightId) { //System.out.println("Updating database for " + this); - if (!this.cache) return; - try { if (typeId == -1) setTypeId(connection); @@ -520,15 +512,19 @@ public DoubleTimeSeries lag(Connection connection, int n) throws SQLException { if (existingSeries.isPresent()) { return existingSeries.get(); + } else { + return lag(n); } + } - DoubleTimeSeries laggedSeries = new DoubleTimeSeries(connection, this.name + LAG_SUFFIX + n, "double"); + public DoubleTimeSeries lag(int n) { + DoubleTimeSeries laggedSeries = new DoubleTimeSeries(this.name + LAG_SUFFIX + n, "double"); for (int i = 0; i < data.length; i++) { laggedSeries.add((i >= n) ? data[i - n] : Double.NaN); } - return laggedSeries; + return laggedSeries; } public DoubleTimeSeries lead(Connection connection, int n) throws SQLException { @@ -536,9 +532,13 @@ public DoubleTimeSeries lead(Connection connection, int n) throws SQLException { if (existingSeries.isPresent()) { return existingSeries.get(); + } else { + return lead(n); } + } - DoubleTimeSeries leadingSeries = new DoubleTimeSeries(connection, this.name + LEAD_SUFFIX + n, "double"); + public DoubleTimeSeries lead(int n) { + DoubleTimeSeries leadingSeries = new DoubleTimeSeries(this.name + LEAD_SUFFIX + n, "double"); int len = data.length; for (int i = 0; i < len; i++) { diff --git a/src/main/java/org/ngafid/flights/Flight.java b/src/main/java/org/ngafid/flights/Flight.java index 0faaa412b..0f6c38d1e 100644 --- a/src/main/java/org/ngafid/flights/Flight.java +++ b/src/main/java/org/ngafid/flights/Flight.java @@ -2728,7 +2728,7 @@ public void runLOCICalculations(Connection connection) throws MalformedFlightFil } CalculatedDoubleTimeSeries vspdCalculated = new CalculatedDoubleTimeSeries(connection, VSPD_CALCULATED, "ft/min", true, this); - vspdCalculated.create(new VSPDRegression(connection, this)); + vspdCalculated.create(new VSPDRegression(getDoubleTimeSeries(ALT_B))); CalculatedDoubleTimeSeries densityRatio = new CalculatedDoubleTimeSeries(connection, DENSITY_RATIO, "ratio", false, this); densityRatio.create(index -> { diff --git a/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java b/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java index a6eccf41a..96ddd5699 100644 --- a/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/calculations/CalculatedDoubleTimeSeries.java @@ -13,6 +13,7 @@ public class CalculatedDoubleTimeSeries extends DoubleTimeSeries { private final Flight flight; + private final boolean cache; /** * Default Constructor @@ -23,7 +24,14 @@ public class CalculatedDoubleTimeSeries extends DoubleTimeSeries { * @param flight the flight instance the timeseries is being calcualted for */ public CalculatedDoubleTimeSeries(Connection connection, String name, String dataType, boolean cache, Flight flight) throws SQLException { - super(connection, name, dataType, cache); + super(connection, name, dataType); + this.flight = flight; + this.cache = cache; + } + + public CalculatedDoubleTimeSeries(String name, String dataType, boolean cache, Flight flight) throws SQLException { + super(name, dataType); + this.cache = cache; this.flight = flight; } @@ -39,6 +47,7 @@ public void create(Calculation calculation) throws IOException, SQLException { super.add(calculation.calculate(i)); } - flight.addDoubleTimeSeries(super.getName(), this); + if (cache) + flight.addDoubleTimeSeries(super.getName(), this); } } diff --git a/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java b/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java index 0677ae4a0..2d6ab1c7a 100644 --- a/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java +++ b/src/main/java/org/ngafid/flights/calculations/VSPDRegression.java @@ -1,6 +1,8 @@ package org.ngafid.flights.calculations; import org.ngafid.flights.*; +import org.ngafid.flights.DoubleTimeSeries.TimeStepCalculation; + import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -13,7 +15,7 @@ * @author Aidan LaBella @ RIT CS */ -public class VSPDRegression implements Calculation { +public class VSPDRegression implements TimeStepCalculation, Calculation { private final DoubleTimeSeries altB; private final DoubleTimeSeries altBLag; private final DoubleTimeSeries altBLead; @@ -25,10 +27,14 @@ public class VSPDRegression implements Calculation { * * @param flight the {@link Flight} to perform a regression on */ - public VSPDRegression(Connection connection, Flight flight) throws SQLException, IOException { - this.altB = flight.getDoubleTimeSeries(ALT_B); - this.altBLag = altB.lag(connection, VSI_LAG_DIFF); - this.altBLead = altB.lead(connection, VSI_LAG_DIFF); + public VSPDRegression(DoubleTimeSeries altB) { + this.altB = altB; + this.altBLag = altB.lag(VSI_LAG_DIFF); + this.altBLead = altB.lead(VSI_LAG_DIFF); + } + + public double compute(int index) { + return calculate(index); } /** diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java index f73ee182d..2b48dc308 100644 --- a/src/main/java/org/ngafid/flights/process/DependencyGraph.java +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -106,6 +106,31 @@ public Void compute() { } } + /** + * Dummy step meant to act as a root node in DAG. This is done by adding all of the columns included in the file + * as output columns, so all other steps will depend on this. + **/ + class DummyStep extends ProcessStep { + Set outputColumns = new HashSet<>(); + + public DummyStep(FlightBuilder builder) { + // We can pass in null rather than a connection object + super(null, builder); + outputColumns.addAll(doubleTS.keySet()); + outputColumns.addAll(stringTS.keySet()); + } + + public Set getRequiredDoubleColumns() { return Collections.emptySet(); } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return Collections.emptySet(); } + public Set getOutputColumns() { return outputColumns; } + + public boolean airframeIsValid(String airframe) { return true; } + + // Left blank intentionally + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {} + } + private void nodeConflictError(ProcessStep first, ProcessStep second) throws FatalFlightFileException { throw new FatalFlightFileException( "ERROR when building dependency graph! " @@ -157,6 +182,7 @@ public DependencyGraph(FlightBuilder builder, List steps) throws Fl this.builder = builder; try { + registerStep(new DummyStep(builder)); for (var step : steps) registerStep(step); for (var node : nodes) createEdges(node); } catch (FatalFlightFileException e) { @@ -229,7 +255,7 @@ public void scrutinize() { private void requiredCheck() { for (var node : nodes) { if (!node.step.isRequired()) - src continue; + continue; for (var parent : node.requiredBy) { if (!parent.step.isRequired()) { diff --git a/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java b/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java new file mode 100644 index 000000000..24f620d85 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightFileFormatException.java @@ -0,0 +1,16 @@ +package org.ngafid.flights.process; + +public class FlightFileFormatException extends Exception { + private static final long serialVersionUID = 124311; + + String filename; + + public FlightFileFormatException(String filename) { + this.filename = filename; + } + + public String getMessage() { + return "File '" + filename + "' is of an unrecognized or unsupported file format."; + } + +} diff --git a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java new file mode 100644 index 000000000..f45ba0cf0 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java @@ -0,0 +1,33 @@ +package org.ngafid.flights.process; + +import java.io.InputStream; +import java.util.stream.Stream; +import java.util.zip.ZipEntry; + +public abstract class FlightFileProcessor { + + public static FlightFileProcessor create(ZipEntry entry) throws FlightFileFormatException { + String filename = entry.getName(); + + int index = filename.lastIndexOf('.'); + String extension = index >= 0 ? filename.substring(index) : ""; + + switch (extension) { + // TODO: Add supported extensions here! + case "": // No extension + default: + throw new FlightFileFormatException(filename); + } + } + + public final String filename; + public final InputStream stream; + + public FlightFileProcessor(InputStream stream, String filename) { + this.filename = filename; + this.stream = stream; + } + + + public abstract Stream parse() throws FlightProcessingException; +} diff --git a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java index 72e7cfc12..7ac391966 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java +++ b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java @@ -10,102 +10,60 @@ import static org.ngafid.flights.Parameters.*; import org.ngafid.common.*; +import org.ngafid.flights.calculations.CalculatedDoubleTimeSeries; import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.DoubleTimeSeries; import org.ngafid.flights.MalformedFlightFileException; import org.ngafid.flights.FatalFlightFileException; public class ProcessLOCI extends ProcessStep { - private static final Logger LOG = Logger.getLogger(ProcessStartEndTime.class.getName()); + private static final Logger LOG = Logger.getLogger(ProcessLOCI.class.getName()); - public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(STALL_DEPENDENCIES); + public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LOCI_DEPENDENCIES); public ProcessLOCI(Connection connection, FlightBuilder builder) { super(connection, builder); } - public Set getRequiredDoubleColumns() { return Collections.emptySet(); } + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } public Set getRequiredStringColumns() { return Collections.emptySet(); } - public Set getRequiredColumns() { return REQUIRED_STRING_COLUMNS; } - public Set getOutputColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return Collections.emptySet(); } - public boolean airframeIsValid(String airframe) { return true; } + public boolean airframeIsValid(String airframe) { return airframe.equals(AIRFRAME_CESSNA_172S); } public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { - StringTimeSeries dates = builder.stringTimeSeries.get(LCL_DATE); - StringTimeSeries times = builder.stringTimeSeries.get(LCL_TIME); - StringTimeSeries offsets = builder.stringTimeSeries.get(UTC_OFFSET); - - int dateSize = dates.size(); - int timeSize = times.size(); - int offsetSize = offsets.size(); - - LOG.info("\tdate size: " + dateSize + ", time size: " + timeSize + ", offset size: " + offsetSize); - - //get the minimum sized length of each of these series, they should all be the same but - //if the last column was cut off it might not be the case - int minSize = dateSize; - if (minSize < timeSize) minSize = timeSize; - if (minSize < offsetSize) minSize = offsetSize; - - //find the first non-null time entry - int start = 0; - while (start < minSize && - (dates.get(start) == null || dates.get(start).equals("") || - times.get(start) == null || times.get(start).equals("") || - offsets.get(start) == null || offsets.get(start).equals("") || offsets.get(start).equals("+19:00"))) { - - start++; - } - - LOG.info("\tfirst date time and offset not null at index: " + start); - - if (start >= minSize) - throw new MalformedFlightFileException("Date, Time or Offset columns were all null! Cannot set start/end times."); - - //find the last full date time offset entry row - int end = minSize - 1; - while (end >= 0 && - (dates.get(end) == null || dates.get(end).equals("") || - times.get(end) == null || times.get(end).equals("") || - offsets.get(end) == null || offsets.get(end).equals(""))) { - - end--; - } - - String startDate = dates.get(start); - String startTime = times.get(start); - String startOffset = offsets.get(start); - - String endDate = dates.get(end); - String endTime = times.get(end); - String endOffset = offsets.get(end); - - LOG.info("\t\t\tfirst not null " + start + " -- " + startDate + " " + startTime + " " + startOffset); - LOG.info("\t\t\tlast not null " + endDate + " " + endTime + " " + endOffset); - - OffsetDateTime startODT = null; - try { - startODT = TimeUtils.convertToOffset(startDate, startTime, startOffset, "+00:00"); - } catch (DateTimeException dte) { - LOG.severe("Corrupt start time data in flight file: " + dte.getMessage()); - throw new MalformedFlightFileException("Corrupt start time data in flight file: '" + dte.getMessage() + "'"); - } - - OffsetDateTime endODT = null; - try { - endODT = TimeUtils.convertToOffset(endDate, endTime, endOffset, "+00:00"); - } catch (DateTimeException dte) { - LOG.severe("Corrupt end time data in flight file: " + dte.getMessage()); - throw new MalformedFlightFileException("Corrupt end time data in flight file: '" + dte.getMessage() + "'"); - } - - if (startODT.isAfter(endODT)) { - builder.setStartDateTime(null); - builder.setEndDateTime(null); - throw new MalformedFlightFileException("Corrupt time data in flight file, start time was after the end time"); - } - - builder.setStartDateTime(startODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); - builder.setEndDateTime(endODT.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); + DoubleTimeSeries hdg = doubleTS.get(HDG); + DoubleTimeSeries hdgLagged = withConnection((connection) -> hdg.lag(connection, YAW_RATE_LAG)); + DoubleTimeSeries roll = doubleTS.get(ROLL); + DoubleTimeSeries tas = doubleTS.get(TAS_FTMIN); + DoubleTimeSeries stallIndex = doubleTS.get(STALL_PROB); + + int length = roll.size(); + + DoubleTimeSeries coordIndex = DoubleTimeSeries.computed(PRO_SPIN_FORCE, "index", length, + (int index) -> { + double laggedHdg = hdgLagged.get(index); + double yawRate = Double.isNaN(laggedHdg) ? 0 : + 180 - Math.abs(180 - Math.abs(hdg.get(index) - laggedHdg) % 360); + + double yawComp = yawRate * COMP_CONV; + double vrComp = ((tas.get(index) / 60) * yawComp); + double rollComp = roll.get(index) * COMP_CONV; + double ctComp = Math.sin(rollComp) * 32.2; + double value = Math.min(((Math.abs(ctComp - vrComp) * 100) / PROSPIN_LIM), 100); + + return value; + } + ); + DoubleTimeSeries loci = DoubleTimeSeries.computed(LOCI, "index", length, + index -> { + double prob = stallIndex.get(index) * coordIndex.get(index); + return prob / 100; + } + ); + + doubleTS.put(PRO_SPIN_FORCE, coordIndex); + doubleTS.put(LOCI, loci); } } diff --git a/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java b/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java new file mode 100644 index 000000000..da4a5f0c1 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java @@ -0,0 +1,96 @@ +package org.ngafid.flights.process; + +import java.time.*; +import java.util.Set; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Collections; +import java.util.logging.Logger; +import java.time.format.DateTimeFormatter; + +import static org.ngafid.flights.Parameters.*; +import org.ngafid.common.*; +import org.ngafid.flights.calculations.CalculatedDoubleTimeSeries; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.MalformedFlightFileException; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.calculations.VSPDRegression; + +public class ProcessStallIndex extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessStallIndex.class.getName()); + + public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(STALL_DEPENDENCIES); + + public ProcessStallIndex(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return Collections.emptySet(); } + + public boolean airframeIsValid(String airframe) { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries ias = doubleTS.get(IAS); + int length = ias.size(); + + if (builder.meta.airframeName.equals(AIRFRAME_CESSNA_172S)) { + DoubleTimeSeries cas = DoubleTimeSeries.computed(CAS, "knots", length, + index -> { + double iasValue = ias.get(index); + + if (iasValue < 70.d) + iasValue = (0.7d * iasValue) + 20.667; + + return iasValue; + } + ); + doubleTS.put(CAS, cas); + } + + DoubleTimeSeries vspdCalculated = + DoubleTimeSeries.computed(VSPD_CALCULATED, "ft/min", length, new VSPDRegression(doubleTS.get(ALT_B))); + doubleTS.put(VSPD_CALCULATED, vspdCalculated); + + DoubleTimeSeries baroA = doubleTS.get(BARO_A); + DoubleTimeSeries oat = doubleTS.get(OAT); + DoubleTimeSeries densityRatio = DoubleTimeSeries.computed(DENSITY_RATIO, "ratio", length, + index -> { + double pressRatio = baroA.get(index) / STD_PRESS_INHG; + double tempRatio = (273 + oat.get(index)) / 288; + + return pressRatio / tempRatio; + } + ); + + DoubleTimeSeries airspeed = + builder.meta.airframeName.equals(AIRFRAME_CESSNA_172S) ? doubleTS.get(CAS) : doubleTS.get(IAS); + DoubleTimeSeries tasFtMin = DoubleTimeSeries.computed(TAS_FTMIN, "ft/min", length, + index -> { + return (airspeed.get(index) * Math.pow(densityRatio.get(index), -0.5)) * ((double) 6076 / 60); + }); + + DoubleTimeSeries pitch = doubleTS.get(PITCH); + DoubleTimeSeries aoaSimple = DoubleTimeSeries.computed(AOA_SIMPLE, "degrees", length, + index -> { + + double vspdGeo = vspdCalculated.get(index) * Math.pow(densityRatio.get(index), -0.5); + double fltPthAngle = Math.asin(vspdGeo / tasFtMin.get(index)); + fltPthAngle = fltPthAngle * (180 / Math.PI); + double value = pitch.get(index) - fltPthAngle; + + return value; + } + ); + + DoubleTimeSeries stallIndex = DoubleTimeSeries.computed(STALL_PROB, "index", length, + index -> { + return (Math.min(((Math.abs(aoaSimple.get(index) / AOA_CRIT)) * 100), 100)) / 100; + } + ); + doubleTS.put(STALL_PROB, stallIndex); + } +} From ea1fe636ec1c005368d723b9d0d2ab4fc3a7350a Mon Sep 17 00:00:00 2001 From: Aaron <30483634+AarC10@users.noreply.github.com> Date: Tue, 25 Apr 2023 16:58:44 -0400 Subject: [PATCH 07/12] New Flight File Processing (#87) * DAT File Processor * csv file processing * JSON Flight processing * GPX File Processing code * Attempt at porting over more DAT processing code * JSON File processing with new code * Part of CSVFileProcessing * Remove connection params from DATFileProcessor * Probably working general csv parsing * CSV File Processor * Remove storing Maps and FlightMeta in JSONFileProcessor instance * GPX processor * Integrate processors into the process upload class * More maintainable way of adding new flight file processors in the future * Fixed some compile errs in procupload. Still need to fix issue with convertAndInsert in dat file * DAT File changes. Modified FlightFileProcessor to do var args to support this * Add todo comment * Procesor docstrings * JavaDocs for FFP * Modify how the processors map is inited * Kotlin moment --- src/main/java/org/ngafid/ProcessUpload.java | 137 +--- .../ngafid/flights/DJIFlightProcessor.java | 1 - .../org/ngafid/flights/DoubleTimeSeries.java | 7 + .../org/ngafid/flights/StringTimeSeries.java | 9 + .../flights/process/CSVFileProcessor.java | 180 +++++ .../flights/process/DATFileProcessor.java | 741 ++++++++++++++++++ .../ngafid/flights/process/FlightBuilder.java | 3 +- .../flights/process/FlightFileProcessor.java | 9 +- .../ngafid/flights/process/FlightMeta.java | 105 ++- .../flights/process/GPXFileProcessor.java | 202 +++++ .../flights/process/JSONFileProcessor.java | 172 ++++ 11 files changed, 1460 insertions(+), 106 deletions(-) create mode 100644 src/main/java/org/ngafid/flights/process/CSVFileProcessor.java create mode 100644 src/main/java/org/ngafid/flights/process/DATFileProcessor.java create mode 100644 src/main/java/org/ngafid/flights/process/GPXFileProcessor.java create mode 100644 src/main/java/org/ngafid/flights/process/JSONFileProcessor.java diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index f2f67f1bc..046b7b80b 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -15,10 +15,6 @@ import java.time.format.DateTimeFormatter; import Files.*; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.text.ParseException; import java.util.Arrays; import java.util.List; @@ -29,10 +25,9 @@ import java.util.Objects; import java.util.Spliterator; import java.util.Spliterators; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; -import java.util.function.Consumer; import java.util.function.Function; -import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Stream; @@ -40,9 +35,7 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipFile; -import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.Flight; -import org.ngafid.flights.FlightAlreadyExistsException; import org.ngafid.flights.FlightError; import org.ngafid.flights.MalformedFlightFileException; import org.ngafid.flights.Upload; @@ -51,13 +44,27 @@ import org.ngafid.accounts.Fleet; import org.ngafid.accounts.User; -import static org.ngafid.flights.DJIFlightProcessor.processDATFile; + +@FunctionalInterface +interface FlightFileProcessors { + FlightFileProcessor create(InputStream stream, String filename, Object... args); +} public class ProcessUpload { private static Connection connection = null; private static Logger LOG = Logger.getLogger(ProcessUpload.class.getName()); private static final String ERROR_STATUS_STR = "ERROR"; - + private static final Map PROCESSORS; + + static { + PROCESSORS = Map.of( + "csv", CSVFileProcessor::new, + "gpx", GPXFileProcessor::new, + "json", JSONFileProcessor::new, + "dat", DATFileProcessor::new + ); + } + public static void main(String[] arguments) { System.out.println("arguments are:"); System.out.println(Arrays.toString(arguments)); @@ -301,6 +308,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle int warningFlights = 0; int errorFlights = 0; + if (extension.equals(".zip")) { BiConsumer handleFlightFileFormatException = (z, e) -> { @@ -351,78 +359,25 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle ZipEntry entry = entries.nextElement(); String name = entry.getName(); - if (entry.isDirectory()) { - //System.err.println("SKIPPING: " + entry.getName()); - continue; - } - - if (name.contains("__MACOSX")) { + if (entry.isDirectory() || name.contains("__MACOSX")) { //System.err.println("SKIPPING: " + entry.getName()); continue; } - System.err.println("PROCESSING: " + name); + System.err.println("PROCESSING: " + name); // TODO: Use a logger String entryName = entry.getName(); + String entryExtension = entryName.substring(entryName.lastIndexOf("."), entryName.length()).toLowerCase(); - // if (entryName.contains(".csv")) { - // try { - // InputStream stream = zipFile.getInputStream(entry); - // Flight flight = new Flight(fleetId, entry.getName(), stream, connection); - - // if (connection != null) { - // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - // } - - // if (flight.getStatus().equals("WARNING")) warningFlights++; - - // flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - - // validFlights++; - // } catch (FlightProcessingException e) { - // System.err.println(e.getMessage()); - // flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - // errorFlights++; - // } - - // } else if (entryName.contains(".gpx")) { - // try { - // InputStream stream = zipFile.getInputStream(entry); - // ArrayList flights = Flight.processGPXFile(fleetId, connection, stream, entry.getName()); - - // if (connection != null) { - // for (Flight flight : flights) { - // flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - // } - // for (Flight flight : flights) { - // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - // if (flight.getStatus().equals("WARNING")) warningFlights++; - // validFlights++; - // } - // } - // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | - // ParserConfigurationException | SAXException | ParseException e) { - // System.err.println(e.getMessage()); - // flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - // errorFlights++; - // } - // } else if (entry.getName().endsWith(".json")) { - // try { - // Flight flight = Flight.processJSON(fleetId, connection, zipFile.getInputStream(entry), entry.getName()); - - // if (connection != null) { - // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - // } + if (!PROCESSORS.containsKey(entryExtension)) { + flightErrors.put(entry.getName(), new UploadException("Unknown file type contained in zip file", entry.getName())); + errorFlights.getAndIncrement(); + continue; + } - // if (flight.getStatus().equals("WARNING")) warningFlights++; + FlightFileProcessor processor = PROCESSORS.get(entryExtension).create(zipFile.getInputStream(entry), entry.getName(), new Object[]{zipFile}); + Stream flights = processor.parse(); - // validFlights++; - // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | - // ParseException e) { - // System.err.println("ERROR: " + e.getMessage()); - // flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - // errorFlights++; - // } // } else if (entry.getName().endsWith(".DAT")) { // String zipName = entry.getName().substring(entry.getName().lastIndexOf("/")); // String parentFolder = zipFile.getName().substring(0, zipFile.getName().lastIndexOf("/")); @@ -454,19 +409,6 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle // flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); // validFlights++; - // } catch (IOException | FatalFlightFileException | FlightAlreadyExistsException | MalformedFlightFileException | - // SQLException e) { - // System.err.println(e.getMessage()); - // flightErrors.put(entry.getName(), new UploadException(e.getMessage(), e, entry.getName())); - // errorFlights++; - // } finally { - // Files.delete(Paths.get(processedCSVFile.getAbsolutePath())); - // Files.delete(Paths.get(tempExtractedFile.getAbsolutePath())); - // } - // } else { - // flightErrors.put(entry.getName(), new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", entry.getName())); - // errorFlights++; - // } } } catch (java.nio.file.NoSuchFileException e) { @@ -485,20 +427,13 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle UploadError.insertError(connection, uploadId, "Could not read from zip file: please delete this upload and re-upload."); status = ERROR_STATUS_STR; uploadException = new Exception(e.toString() + ", could not read from zip file: please delete this upload and re-upload."); - } catch (NotDatFile e) { - LOG.log(Level.SEVERE, "NotDatFile: {0}", e.toString()); - e.printStackTrace(); - - UploadError.insertError(connection, uploadId, "Tried to process a non-DAT file as a DAT file."); - status = ERROR_STATUS_STR; - uploadException = new Exception(e + ", tried to process a non-DAT file as a DAT file."); - } catch (FileEnd e) { - LOG.log(Level.SEVERE, "FileEnd: {0}", e.toString()); + } catch (FlightProcessingException e) { + LOG.log(Level.SEVERE, "FlightProcessingException: {0}", e.toString()); e.printStackTrace(); - UploadError.insertError(connection, uploadId, "Reached the end of a file while doing DAT processing"); + UploadError.insertError(connection, uploadId, "Got an exception while parsing data"); status = ERROR_STATUS_STR; - uploadException = new Exception(e + ", reached the end of a file while doing DAT processing"); + uploadException = new Exception(e + "exception while parsing data"); } } else { @@ -513,8 +448,8 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle PreparedStatement updateStatement = connection.prepareStatement("UPDATE uploads SET status = ?, n_valid_flights = ?, n_warning_flights = ?, n_error_flights = ? WHERE id = ?"); updateStatement.setString(1, status); updateStatement.setInt(2, validFlights); - updateStatement.setInt(3, warningFlights); - updateStatement.setInt(4, errorFlights); + updateStatement.setInt(3, warningFlights.get()); + updateStatement.setInt(4, errorFlights.get()); updateStatement.setInt(5, uploadId); updateStatement.executeUpdate(); updateStatement.close(); @@ -555,7 +490,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle } } - uploadProcessedEmail.setErrorFlights(errorFlights); + uploadProcessedEmail.setErrorFlights(errorFlights.get()); for (Map.Entry entry : flightErrors.entrySet()) { UploadException exception = entry.getValue(); @@ -563,7 +498,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle uploadProcessedEmail.flightImportError(exception.getFilename(), exception.getMessage()); } - uploadProcessedEmail.setWarningFlights(warningFlights); + uploadProcessedEmail.setWarningFlights(warningFlights.get()); for (FlightInfo info : flightInfo) { List exceptions = info.exceptions; diff --git a/src/main/java/org/ngafid/flights/DJIFlightProcessor.java b/src/main/java/org/ngafid/flights/DJIFlightProcessor.java index 344ff5cbd..8e9039565 100644 --- a/src/main/java/org/ngafid/flights/DJIFlightProcessor.java +++ b/src/main/java/org/ngafid/flights/DJIFlightProcessor.java @@ -43,7 +43,6 @@ public static Flight processDATFile(int fleetId, String entry, InputStream strea if (attributeMap.containsKey("dateTime")) { calculateDateTime(connection, doubleTimeSeriesMap, stringTimeSeriesMap, attributeMap.get("dateTime")); - } else { String dateTimeStr = findStartDateTime(doubleTimeSeriesMap); if (dateTimeStr != null) { diff --git a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java index dc6c2063a..a1dc719ae 100644 --- a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java @@ -554,5 +554,12 @@ public DoubleTimeSeries subSeries(Connection connection, int from, int until) th System.arraycopy(data, from, newSeries.data, 0, until - from); return newSeries; } + + public DoubleTimeSeries subSeries(int from, int until) throws SQLException { + DoubleTimeSeries newSeries = new DoubleTimeSeries(name, dataType, until - from); + newSeries.size = until - from; + System.arraycopy(data, from, newSeries.data, 0, until - from); + return newSeries; + } } diff --git a/src/main/java/org/ngafid/flights/StringTimeSeries.java b/src/main/java/org/ngafid/flights/StringTimeSeries.java index c000c83e9..4545c1170 100644 --- a/src/main/java/org/ngafid/flights/StringTimeSeries.java +++ b/src/main/java/org/ngafid/flights/StringTimeSeries.java @@ -248,5 +248,14 @@ public StringTimeSeries subSeries(Connection connection, int from, int until) th return newSeries; } + + public StringTimeSeries subSeries(int from, int until) throws SQLException { + StringTimeSeries newSeries = new StringTimeSeries(name, dataType); + + for (int i = from; i < until; i++) + newSeries.add(this.timeSeries.get(i)); + + return newSeries; + } } diff --git a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java new file mode 100644 index 000000000..df0d97868 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java @@ -0,0 +1,180 @@ +package org.ngafid.flights.process; + +import com.opencsv.CSVReader; +import com.opencsv.exceptions.CsvException; +import org.ngafid.flights.*; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Stream; + +/** + * Handles parsing of CSV files + * + * @author Aaron Chan + */ + +public class CSVFileProcessor extends FlightFileProcessor { + private static final Logger LOG = Logger.getLogger(CSVFileProcessor.class.getName()); + private String airframeName; + private String startDateTime; + private String endDateTime; + private String airframeType; + private String suggestedTailNumber; + private String systemId; + private final List headers; + private final List dataTypes; + + public CSVFileProcessor(InputStream stream, String filename, Object... args) { + super(stream, filename); + headers = new ArrayList<>(); + dataTypes = new ArrayList<>(); + + this.airframeType = "Fixed Wing"; // Fixed Wing By default + } + + @Override + public Stream parse() throws FlightProcessingException { + Map doubleTimeSeries = new HashMap<>(); + Map stringTimeSeries = new HashMap<>(); + + List csvValues = null; + List dataTypes = new ArrayList<>(); + List headers = new ArrayList<>(); + + try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(super.stream, StandardCharsets.UTF_8)); CSVReader csvReader = new CSVReader(bufferedReader)) { + String fileInformation = getFlightInfo(bufferedReader); // Will read a line + updateAirframe(); + + if (airframeName != null && airframeName.equals("ScanEagle")) { + scanEagleParsing(fileInformation); // TODO: Handle ScanEagle data + } else { + bufferedReader.read(); // Skip first char (#) + dataTypes = List.of(csvReader.readNext()); + headers = List.of(csvReader.readNext()); + csvValues = csvReader.readAll(); + } + + int colIndex = 0; + String[] firstRow = csvValues.get(0); + for (String data : firstRow) { + try { + Double.parseDouble(data); + doubleTimeSeries.put(headers.get(colIndex), new DoubleTimeSeries(headers.get(colIndex), dataTypes.get(colIndex))); + } catch (NumberFormatException e) { + stringTimeSeries.put(headers.get(colIndex), new StringTimeSeries(headers.get(colIndex), dataTypes.get(colIndex))); + } + + colIndex++; + } + + List finalHeaders = headers; + csvValues.forEach(row -> { + for (int i = 0; i < row.length; i++) { + String header = finalHeaders.get(i); + String value = row[i]; + + try { + doubleTimeSeries.get(header).add(Double.parseDouble(value)); + } catch (NumberFormatException e) { + stringTimeSeries.get(header).add(value); + } + } + }); + + + } catch (IOException | FatalFlightFileException | CsvException e) { + throw new FlightProcessingException(e); + } + + FlightBuilder builder = new FlightBuilder(new FlightMeta(), doubleTimeSeries, stringTimeSeries); + + return Stream.of(new FlightBuilder[]{builder}); + } + + + /** + * Updates the airframe type if airframe name does not belong to fixed wing + */ + private void updateAirframe() { + if (airframeName.equals("R44") || airframeName.equals("Robinson R44")) { + airframeName = "R44"; + airframeType = "Rotorcraft"; + } + } + + /** + * Gets the flight information from the first line of the file + * @param reader BufferedReader for reading the first line + * @return + * @throws FatalFlightFileException + * @throws IOException + */ + private String getFlightInfo(BufferedReader reader) throws FatalFlightFileException, IOException { + String fileInformation = reader.readLine(); + + if (fileInformation == null || fileInformation.trim().length() == 0) { + throw new FatalFlightFileException("The flight file was empty."); + } + + if (fileInformation.charAt(0) != '#' && fileInformation.charAt(0) != '{') { + if (fileInformation.startsWith("DID_")) { + LOG.info("CAME FROM A SCANEAGLE! CAN CALCULATE SUGGESTED TAIL/SYSTEM ID FROM FILENAME"); + + this.airframeName = "ScanEagle"; + this.airframeType = "UAS Fixed Wing"; + } else { + throw new FatalFlightFileException("First line of the flight file should begin with a '#' and contain flight recorder information."); + } + } + + return fileInformation; + } + + + /** + * Parses for ScanEagle flight data + * @param fileInformation First line of the file + */ + private void scanEagleParsing(String fileInformation) { + + //need a custom method to process ScanEagle data because the column + //names are different and there is no header info + scanEagleSetTailAndID(); + scanEagleHeaders(fileInformation); + } + + /** + * Handles setting the tail number and system id for ScanEagle data + */ + private void scanEagleSetTailAndID() { + String[] filenameParts = filename.split("_"); + startDateTime = filenameParts[0]; + endDateTime = startDateTime; + LOG.log(Level.INFO, "start date: '{0}'", startDateTime); + LOG.log(Level.INFO, "end date: '{0}'", startDateTime); + + //UND doesn't have the systemId for UAS anywhere in the filename or file (sigh) + suggestedTailNumber = "N" + filenameParts[1] + "ND"; + systemId = suggestedTailNumber; + + LOG.log(Level.INFO, "suggested tail number: '{0}'", suggestedTailNumber); + LOG.log(Level.INFO, "system id: '{0}'", systemId); + } + + + // TODO: Figure out ScanEagle data + private void scanEagleHeaders(String fileInformation) { + String headersLine = fileInformation; + headers.addAll(Arrays.asList(headersLine.split("\\,", -1))); + headers.replaceAll(String::trim); + System.out.println("headers are:\n" + headers.toString()); + //scan eagle files have no data types, set all to "" + for (int i = 0; i < headers.size(); i++) { + dataTypes.add("none"); + } + } +} diff --git a/src/main/java/org/ngafid/flights/process/DATFileProcessor.java b/src/main/java/org/ngafid/flights/process/DATFileProcessor.java new file mode 100644 index 000000000..b4fe791e8 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/DATFileProcessor.java @@ -0,0 +1,741 @@ +package org.ngafid.flights.process; + +import com.opencsv.CSVReader; +import com.opencsv.exceptions.CsvValidationException; +import org.ngafid.flights.*; + +import java.io.*; +import java.net.URI; +import java.nio.file.*; +import java.sql.Connection; +import java.sql.SQLException; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Stream; +import java.util.zip.ZipFile; + +import static org.ngafid.common.TimeUtils.addMilliseconds; + +import Files.*; + + +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.Flight; +import org.ngafid.flights.FlightAlreadyExistsException; +import org.ngafid.flights.MalformedFlightFileException; + +/** + * Parses DAT files from DJI flights after converting them to CSV + * + * @author Aaron Chan + */ + +public class DATFileProcessor extends FlightFileProcessor { + private static final Logger LOG = Logger.getLogger(DATFileProcessor.class.getName()); + + private static final Set STRING_COLS = new HashSet<>(List.of(new String[]{"flyCState", "flycCommand", "flightAction", + "nonGPSCause", "connectedToRC", "Battery:lowVoltage", "RC:ModeSwitch", "gpsUsed", "visionUsed", "IMUEX(0):err"})); + + private final ZipFile zipFile; + + public DATFileProcessor(InputStream stream, String filename, Object... args) { + super(stream, filename); + this.zipFile = (ZipFile) args[0]; + } + + @Override + public Stream parse() throws FlightProcessingException { + try { + convertAndInsert(); + List inputStreams = duplicateInputStream(stream, 2); + Map indexedCols = new HashMap<>(); + Map doubleTimeSeriesMap = new HashMap<>(); + Map stringTimeSeriesMap = new HashMap<>(); + Map attributeMap = getAttributeMap(inputStreams.remove(inputStreams.size() - 1)); + + if (!attributeMap.containsKey("mcID(SN)")) { + throw new FlightProcessingException(new FatalFlightFileException("No DJI serial number provided in binary.")); + } + + try (CSVReader reader = new CSVReader(new BufferedReader(new InputStreamReader(inputStreams.remove(inputStreams.size() - 1))))) { + processCols(reader.readNext(), indexedCols, doubleTimeSeriesMap, stringTimeSeriesMap); + + readData(reader, doubleTimeSeriesMap, stringTimeSeriesMap, indexedCols); + calculateLatLonGPS(doubleTimeSeriesMap); + + if (attributeMap.containsKey("dateTime")) { + calculateDateTime(doubleTimeSeriesMap, stringTimeSeriesMap, attributeMap.get("dateTime")); + String dateTimeStr = findStartDateTime(doubleTimeSeriesMap); + + if (dateTimeStr != null) { + calculateDateTime(doubleTimeSeriesMap, stringTimeSeriesMap, dateTimeStr); + } + } + } catch (CsvValidationException | FatalFlightFileException | IOException e) { + throw new FlightProcessingException(e); + } catch (ParseException e) { + e.printStackTrace(); + } + + dropBlankCols(doubleTimeSeriesMap, stringTimeSeriesMap); + doubleTimeSeriesMap.put("AltAGL", new DoubleTimeSeries("AltAGL", "ft")); // TODO: Should this be done in proc? + + FlightMeta meta = new FlightMeta(); + meta.setFilename(filename); + meta.setAirframeType("UAS Rotorcraft"); + meta.setAirframeName("DJI " + attributeMap.get("ACType")); + meta.setSystemId(attributeMap.get("mcID(SN)")); + + + return Stream.of(new FlightBuilder[]{new FlightBuilder(meta, doubleTimeSeriesMap, stringTimeSeriesMap)}); + } catch (NotDatFile | FileEnd | IOException e) { + throw new FlightProcessingException(e); + } + } + + // TODO: Validate the conversion works still. Also maybe figure out another way of doing this since var args forced into FFP + + /** + * Converts the DAT file to CSV and inserts it into the zip file + * @throws NotDatFile + * @throws IOException + * @throws FileEnd + */ + private void convertAndInsert() throws NotDatFile, IOException, FileEnd { + String zipName = filename.substring(filename.lastIndexOf("/")); + String parentFolder = zipFile.getName().substring(0, zipFile.getName().lastIndexOf("/")); + File tempExtractedFile = new File(parentFolder, zipName); + + System.out.println("Extracting to " + tempExtractedFile.getAbsolutePath()); + try (InputStream inputStream = zipFile.getInputStream(zipFile.getEntry(filename)); FileOutputStream fileOutputStream = new FileOutputStream(tempExtractedFile)) { + int len; + byte[] buffer = new byte[1024]; + + while ((len = inputStream.read(buffer)) > 0) { + fileOutputStream.write(buffer, 0, len); + } + } + + convertDATFile(tempExtractedFile); + File processedCSVFile = new File(tempExtractedFile.getAbsolutePath() + ".csv"); + placeInZip(processedCSVFile.getAbsolutePath(), zipFile.getName().substring(zipFile.getName().lastIndexOf("/") + 1)); + } + + /** + * Places a file into the given zip file + * @param file - File to place + * @param zipFileName - Name of the zip file + * @throws IOException + */ + private static void placeInZip(String file, String zipFileName) throws IOException { + LOG.info("Placing " + file + " in zip"); + + Map zipENV = new HashMap<>(); + zipENV.put("create", "true"); + + Path csvFilePath = Paths.get(file); + Path zipFilePath = Paths.get(csvFilePath.getParent() + "/" + zipFileName); + + URI zipURI = URI.create("jar:" + zipFilePath.toUri()); + try (FileSystem fileSystem = FileSystems.newFileSystem(zipURI, zipENV)) { + Path zipFileSystemPath = fileSystem.getPath(file.substring(file.lastIndexOf("/") + 1)); + Files.write(zipFileSystemPath, Files.readAllBytes(csvFilePath), StandardOpenOption.CREATE); + } + } + + /** + * Converts the DAT file to CSV + * @param file - File to convert + * @return - CSV converted file + * @throws NotDatFile + * @throws IOException + * @throws FileEnd + */ + private static File convertDATFile(File file) throws NotDatFile, IOException, FileEnd { + LOG.info("Converting to CSV: " + file.getAbsolutePath()); + DatFile datFile = DatFile.createDatFile(file.getAbsolutePath()); + datFile.reset(); + datFile.preAnalyze(); + + ConvertDat convertDat = datFile.createConVertDat(); + + String csvFilename = file.getAbsolutePath() + ".csv"; + convertDat.csvWriter = new CsvWriter(csvFilename); + convertDat.createRecordParsers(); + + datFile.reset(); + AnalyzeDatResults results = convertDat.analyze(false); + LOG.info(datFile.getFile().getAbsolutePath()); + + return datFile.getFile(); + } + + /** + * Reads the data from the converted CSV file + * @param reader - CSV reader + * @param doubleTimeSeriesMap - Map of double time series data + * @param stringTimeSeriesMap - Map of string time series data + * @param indexedCols - Map of indexed columns + * @throws IOException + * @throws CsvValidationException + */ + private static void readData(CSVReader reader, Map doubleTimeSeriesMap, + Map stringTimeSeriesMap, Map indexedCols) throws IOException, CsvValidationException { + String[] line; + + while ((line = reader.readNext()) != null) { + for (int i = 0; i < line.length; i++) { + + String column = indexedCols.get(i); + + try { + if (doubleTimeSeriesMap.containsKey(column)) { + DoubleTimeSeries colTimeSeries = doubleTimeSeriesMap.get(column); + double value = !line[i].equals("") ? Double.parseDouble(line[i]) : Double.NaN; + colTimeSeries.add(value); + } else { + StringTimeSeries colTimeSeries = stringTimeSeriesMap.get(column); + colTimeSeries.add(line[i]); + } + } catch (NullPointerException e) { + LOG.log(Level.WARNING, "Column {0} not found in time series map", column); + } catch (NumberFormatException e) { + LOG.log(Level.WARNING, "Could not parse value {0} as double", line[i]); + } + } + } + } + + /** + * Calculates GPS data from the given time series map + * @param doubleTimeSeriesMap - Map of double time series data + * @throws FatalFlightFileException + */ + private static void calculateLatLonGPS(Map doubleTimeSeriesMap) throws FatalFlightFileException { + DoubleTimeSeries lonRad = doubleTimeSeriesMap.get("GPS(0):Long"); + DoubleTimeSeries latRad = doubleTimeSeriesMap.get("GPS(0):Lat"); + DoubleTimeSeries altMSL = doubleTimeSeriesMap.get("GPS(0):heightMSL"); + + if (lonRad == null || latRad == null) { + LOG.log(Level.WARNING, "Could not find GPS(0):Long or GPS(0):Lat in time series map"); + throw new FatalFlightFileException("No GPS data found in binary."); + } + + DoubleTimeSeries longDeg = new DoubleTimeSeries("Longitude", "degrees"); + DoubleTimeSeries latDeg = new DoubleTimeSeries("Latitude", "degrees"); + DoubleTimeSeries msl = new DoubleTimeSeries("AltMSL", "ft"); + + for (int i = 0; i < lonRad.size(); i++) { + longDeg.add(lonRad.get(i)); + } + + for (int i = 0; i < lonRad.size(); i++) { + latDeg.add(latRad.get(i)); + } + + for (int i = 0; i < altMSL.size(); i++) { + msl.add(altMSL.get(i)); + } + + doubleTimeSeriesMap.put("Longitude", longDeg); + doubleTimeSeriesMap.put("Latitude", latDeg); + doubleTimeSeriesMap.put("AltMSL", altMSL); + } + + /** + * Calculates the local date and time from the given time series map + * @param doubleTimeSeriesMap - Map of double time series data + * @param stringTimeSeriesMap - Map of string time series data + * @param dateTimeStr - Format of the date and time + * @throws ParseException + */ + private static void calculateDateTime(Map doubleTimeSeriesMap, Map stringTimeSeriesMap, String dateTimeStr) throws ParseException { + StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd"); + StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss"); + StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm"); // Always 0 + DoubleTimeSeries seconds = doubleTimeSeriesMap.get("offsetTime"); + + SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss"); + + String[] dateTime = dateTimeStr.split(" "); + String date = dateTime[0]; + + if (date.split("-")[1].length() == 1) { + date = date.substring(0, 5) + "0" + date.substring(5); + } + + if (date.split("-")[2].length() == 1) { + date = date.substring(0, 8) + "0" + date.substring(8); + } + + String time = dateTime[1]; + + Date parsedDate = (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")).parse(date + " " + time); + for (int i = 0; i < seconds.size(); i++) { + int millis = (int) (seconds.get(i) * 1000); + Date newDate = addMilliseconds(parsedDate, millis); + + localDateSeries.add(lclDateFormat.format(newDate)); + localTimeSeries.add(lclTimeFormat.format(newDate)); + utcOfstSeries.add("+00:00"); + } + + stringTimeSeriesMap.put("Lcl Date", localDateSeries); + stringTimeSeriesMap.put("Lcl Time", localTimeSeries); + stringTimeSeriesMap.put("UTCOfst", utcOfstSeries); + } + + + /** + * Determine the start date and time from the given time series map + * @param doubleTimeSeriesMap - Map of double time series data + * @return + */ + private static String findStartDateTime(Map doubleTimeSeriesMap) { + DoubleTimeSeries dateSeries = doubleTimeSeriesMap.get("GPS(0):Date"); + DoubleTimeSeries timeSeries = doubleTimeSeriesMap.get("GPS(0):Time"); + DoubleTimeSeries offsetTime = doubleTimeSeriesMap.get("offsetTime"); + + if (dateSeries == null || timeSeries == null) { + LOG.log(Level.WARNING, "Could not find GPS(0):Date or GPS(0):Time in time series map"); + return null; + } + + int colCount = 0; + while (colCount < dateSeries.size() && colCount < timeSeries.size()) { + int date = (int) dateSeries.get(colCount); // Date is an integer in the format YYYYMMDD + int time = (int) timeSeries.get(colCount); + + + if (!Double.isNaN(date) && !Double.isNaN(time) && date != 0 && time != 0) { + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss"); + + String year = String.valueOf(date).substring(0, 4); + String month = String.valueOf(date).substring(4, 6); + String day = String.valueOf(date).substring(6, 8); + + String hour = String.valueOf(time).substring(0, 2); + String minute = String.valueOf(time).substring(2, 4); + String second = String.valueOf(time).substring(4, 6); + + try { + Date parsedDate = dateFormat.parse(year + month + day + hour + minute + second); + int currentOffset = (int) (offsetTime.get(colCount) * 1000); + Date newDate = addMilliseconds(parsedDate, -currentOffset); + + return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(newDate); + } catch (ParseException e) { + LOG.log(Level.WARNING, "Could not parse date {0} and time {1} as date", new Object[]{date, time}); + return null; + } + } + + colCount++; + } + + return null; + } + + /** + * Duplicate an input stream a given number of times + * @param inputStream - Input Stream to duplicate + * @param copies - Number of copies to make + * @return - List of input streams + * @throws IOException + */ + private static List duplicateInputStream(InputStream inputStream, int copies) throws IOException { + List inputStreams = new ArrayList<>(); + List outputStreams = new ArrayList<>(); + + for (int i = 0; i < copies; i++) { + outputStreams.add(new ByteArrayOutputStream()); + } + + byte[] buffer = new byte[1024]; + while (inputStream.read(buffer) > -1) { + for (OutputStream outputStream : outputStreams) { + outputStream.write(buffer); + } + } + + for (OutputStream outputStream : outputStreams) { + outputStream.flush(); + inputStreams.add(new ByteArrayInputStream(((ByteArrayOutputStream) outputStream).toByteArray())); + } + + return inputStreams; + } + + /** + * Gets the attributes of the flight + * @param stream - Input stream of flight file + * @return + */ + private static Map getAttributeMap(InputStream stream) { + Map attributeMap = new HashMap<>(); + try (CSVReader reader = new CSVReader(new BufferedReader(new InputStreamReader(stream)))) { + String[] line; + while ((line = reader.readNext()) != null) { + if (line[line.length - 1].contains("|")) { + String[] split = line[line.length - 1].split("\\|"); + attributeMap.put(split[0], split[1]); + } + } + } catch (IOException | CsvValidationException e) { + e.printStackTrace(); + } + + LOG.log(Level.INFO, "Attribute Map: {0}", attributeMap); + + return attributeMap; + } + + /** + * Drop all columns that have no data + * @param doubleTimeSeriesMap - Map of double time series data + * @param stringTimeSeriesMap - Map of string time series data + */ + private static void dropBlankCols(Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + for (String key : doubleTimeSeriesMap.keySet()) { + if (doubleTimeSeriesMap.get(key).size() == 0) { + doubleTimeSeriesMap.remove(key); + } + } + + for (String key : stringTimeSeriesMap.keySet()) { + if (stringTimeSeriesMap.get(key).size() == 0) { + stringTimeSeriesMap.remove(key); + } + } + } + + /** + * Initialize columns based on flight data + * @param cols + * @param indexedCols + * @param doubleTimeSeriesMap + * @param stringTimeSeriesMap + */ + private static void processCols(String[] cols, Map indexedCols, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + int i = 0; + for (String col : cols) { + indexedCols.put(i++, col); + String category = col.split(":")[0]; + + if (category.contains("(")) { + category = category.substring(0, category.indexOf("(")); + } + + switch (category) { + case "IMU_ATTI": + case "IMUEX": + handleIMUDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + break; + case "GPS": + handleGPSDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + break; + + case "Battery": + case "SMART_BATT": + handleBatteryDataType(col, doubleTimeSeriesMap); + break; + + case "Motor": + handleMotorDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + break; + + case "RC": + handleRCDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + break; + + case "AirComp": + handleAirCompDataType(col, doubleTimeSeriesMap); + break; + + case "General": + doubleTimeSeriesMap.put(col, new DoubleTimeSeries(col, "ft")); + break; + + case "Controller": + doubleTimeSeriesMap.put(col, new DoubleTimeSeries(col, "level")); + break; + + default: + handleMiscDataType(col, doubleTimeSeriesMap, stringTimeSeriesMap); + } + + } + } + + /** + * Helper for initializing IMU data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + * @param stringTimeSeriesMap - Map of string time series data + */ + private static void handleIMUDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + String dataType; + + if (colName.contains("accel")) { + dataType = "m/s^2"; + } else if (colName.contains("gyro") || colName.contains("Gyro")) { + dataType = "deg/s"; + } else if (colName.contains("vel") || colName.contains("Velocity")) { + dataType = "m/s"; + } else if (colName.contains("mag")) { + dataType = "A/m"; + } else if (colName.contains("Longitude") || colName.contains("Latitude")) { + dataType = "degrees"; + } else if (colName.contains("roll") || colName.contains("pitch") || colName.contains("yaw") || colName.contains("directionOfTravel")) { + dataType = "degrees"; + } else if (colName.contains("distance") || colName.contains("GPS-H") || colName.contains("Alti")) { + dataType = "ft"; + } else if (colName.contains("temperature")) { + dataType = "Celsius"; + } else if (colName.contains("barometer")) { + dataType = "atm"; + } else { + if (colName.contains("err")) { + stringTimeSeriesMap.put("IMUEX(0):err", new StringTimeSeries("IMUEX Error", "error")); + return; + } + + dataType = "number"; + if (!colName.contains("num")) { + LOG.log(Level.WARNING, "IMU Unknown data type: {0}", colName); + + } + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing battery data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleGPSDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + String dataType; + + if (colName.contains("dateTimeStamp")) { + stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "yyyy-mm-ddThh:mm:ssZ")); + return; + } + + if (colName.contains("Long") || colName.contains("Lat")) { + dataType = "degrees"; + } else if (colName.contains("vel")) { + dataType = "m/s"; + } else if (colName.contains("height")) { + dataType = "ft"; + } else if (colName.contains("DOP")) { + dataType = "DOP Value"; + } else if (colName.contains("Date")) { + dataType = "Date"; + } else if (colName.contains("Time")) { + dataType = "Time"; + } else if (colName.contains("sAcc")) { + dataType = "cm/s"; + } else { + dataType = "number"; + if (!colName.contains("num")) { + LOG.log(Level.WARNING, "GPS Unknown data type: {0}", colName); + } + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing battery data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleBatteryDataType(String colName, Map doubleTimeSeriesMap) { + String dataType = "number"; + String lowerColName = colName.toLowerCase(); + + if (lowerColName.contains("volt")) { + dataType = "Voltage"; + } else if (lowerColName.contains("watts")) { + dataType = "Watts"; + } else if (lowerColName.contains("current")) { + dataType = "Amps"; + } else if (lowerColName.contains("cap")) { + dataType = "Capacity"; + } else if (lowerColName.contains("temp")) { + dataType = "Celsius"; + } else if (lowerColName.contains("%")) { + dataType = "Percentage"; + } else if (lowerColName.contains("time")) { + dataType = "seconds"; + } else { + LOG.log(Level.WARNING, "Battery Unknown data type: {0}", colName); + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing motor data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleMotorDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + if (colName.contains("lowVoltage")) { + stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "Low Voltage")); + return; + } else if (colName.contains("status")) { + stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, "Battery Status")); + return; + } + + String dataType = "number"; + + if (colName.contains("V_out") || colName.contains("Volts")) { + dataType = "Voltage"; + } else if (colName.contains("Speed")) { + dataType = "m/s"; + } else if (colName.contains("Current")) { + dataType = "Amps"; + } else if (colName.contains("PPMrecv")) { + dataType = "RC Stop Command"; + } else if (colName.contains("Temp")) { + dataType = "Celsius"; + } else if (colName.contains("Status")) { + dataType = "Status Number"; + } else if (colName.contains("Hz")) { + dataType = "Status Number"; + } else { + LOG.log(Level.WARNING, "Battery Unknown data type: {0}", colName); + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing RC data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleRCDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + String dataType = "number"; + + if (colName.contains("Aileron")) { + dataType = "Aileron"; + } else if (colName.contains("Elevator")) { + dataType = "Elevator"; + } else if (colName.contains("Rudder")) { + dataType = "Rudder"; + } else if (colName.contains("Throttle")) { + dataType = "Throttle"; + } else { + if (colName.equals("RC:ModeSwitch")) { + stringTimeSeriesMap.put(colName, new StringTimeSeries("RC Mode Switch", "Mode")); + return; + } + + LOG.log(Level.WARNING, "RC Unknown data type: {0}", colName); + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing air comp data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleAirCompDataType(String colName, Map doubleTimeSeriesMap) { + String dataType; + + if (colName.contains("AirSpeed")) { + dataType = "knots"; + } else if (colName.contains("Alti")) { + dataType = "ft"; + } else if (colName.contains("Vel")) { + dataType = "k/h"; + } else { + dataType = "number"; + LOG.log(Level.WARNING, "AirComp Unknown data type: {0}", colName); + } + + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } + + /** + * Helper for initializing other types of data + * @param colName - Name of column + * @param doubleTimeSeriesMap - Map of double time series data + */ + private static void handleMiscDataType(String colName, Map doubleTimeSeriesMap, Map stringTimeSeriesMap) { + String dataType; + boolean isDouble = true; + switch (colName) { + case "Tick#": + dataType = "tick"; + break; + + case "offsetTime": + case "flightTime": + dataType = "seconds"; + break; + + case "gpsHealth": + dataType = "GPS Health"; + break; + + case "flyCState": + dataType = "C State"; + isDouble = false; + break; + + case "flycCommand": + dataType = "Command"; + isDouble = false; + break; + + case "flightAction": + dataType = "Action"; + isDouble = false; + break; + + case "nonGPSCause": + dataType = "GPS Cause"; + isDouble = false; + break; + + case "connectedToRC": + dataType = "Connection"; + isDouble = false; + break; + + case "gpsUsed": + case "visionUsed": + dataType = "boolean"; + isDouble = false; + break; + + case "Attribute|Value": + dataType = "Key-Value Pair"; + isDouble = false; + break; + + default: + dataType = "N/A"; + isDouble = false; + LOG.log(Level.WARNING, "Misc Unknown data type: {0}", colName); + } + + if (isDouble) { + doubleTimeSeriesMap.put(colName, new DoubleTimeSeries(colName, dataType)); + } else { + stringTimeSeriesMap.put(colName, new StringTimeSeries(colName, dataType)); + } + } +} diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java index ed1eff484..1834690fe 100644 --- a/src/main/java/org/ngafid/flights/process/FlightBuilder.java +++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java @@ -7,6 +7,7 @@ import java.util.List; import java.util.ArrayList; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; import org.ngafid.flights.*; import org.ngafid.flights.process.*; @@ -71,7 +72,7 @@ protected List gatherSteps(Connection connection) { // Add all of our processing steps here... // The order doesn't matter; the DependencyGraph will resolve // the order in the event that there are dependencies. - return processSteps.stream().map(factory -> factory.create(connection, this)).toList(); + return processSteps.stream().map(factory -> factory.create(connection, this)).collect(Collectors.toList()); } // throws a flight processing exception if an unrecoverable error occurred. diff --git a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java index f45ba0cf0..6179a0be0 100644 --- a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java @@ -23,11 +23,16 @@ public static FlightFileProcessor create(ZipEntry entry) throws FlightFileFormat public final String filename; public final InputStream stream; - public FlightFileProcessor(InputStream stream, String filename) { - this.filename = filename; + public FlightFileProcessor(InputStream stream, String filename, Object... args) { this.stream = stream; + this.filename = filename; } + /** + * Parses the file for flight data to be processed + * @return A stream of FlightBuilders + * @throws FlightProcessingException + */ public abstract Stream parse() throws FlightProcessingException; } diff --git a/src/main/java/org/ngafid/flights/process/FlightMeta.java b/src/main/java/org/ngafid/flights/process/FlightMeta.java index 4ec170dd7..5e8007ac1 100644 --- a/src/main/java/org/ngafid/flights/process/FlightMeta.java +++ b/src/main/java/org/ngafid/flights/process/FlightMeta.java @@ -4,7 +4,7 @@ * Utility class used by FlightBuilder to call the Flight constructor. **/ public final class FlightMeta { - + // TODO: Should be made private public int fleetId = -1, uploaderId = -1, uploadId = -1, @@ -19,4 +19,107 @@ public final class FlightMeta { airframeName, calculated, suggestedTailNumber; + public int getFleetId() { + return fleetId; + } + + public void setFleetId(int fleetId) { + this.fleetId = fleetId; + } + + public int getUploaderId() { + return uploaderId; + } + + public void setUploaderId(int uploaderId) { + this.uploaderId = uploaderId; + } + + public int getUploadId() { + return uploadId; + } + + public void setUploadId(int uploadId) { + this.uploadId = uploadId; + } + + public int getProcessingStatus() { + return processingStatus; + } + + public void setProcessingStatus(int processingStatus) { + this.processingStatus = processingStatus; + } + + public String getStartDateTime() { + return startDateTime; + } + + public void setStartDateTime(String startDateTime) { + this.startDateTime = startDateTime; + } + + public String getEndDateTime() { + return endDateTime; + } + + public void setEndDateTime(String endDateTime) { + this.endDateTime = endDateTime; + } + + public String getMd5Hash() { + return md5Hash; + } + + public void setMd5Hash(String md5Hash) { + this.md5Hash = md5Hash; + } + + public String getAirframeType() { + return airframeType; + } + + public void setAirframeType(String airframeType) { + this.airframeType = airframeType; + } + + public String getSystemId() { + return systemId; + } + + public void setSystemId(String systemId) { + this.systemId = systemId; + } + + public String getFilename() { + return filename; + } + + public void setFilename(String filename) { + this.filename = filename; + } + + public String getAirframeName() { + return airframeName; + } + + public void setAirframeName(String airframeName) { + this.airframeName = airframeName; + } + + public String getCalculated() { + return calculated; + } + + public void setCalculated(String calculated) { + this.calculated = calculated; + } + + public String getSuggestedTailNumber() { + return suggestedTailNumber; + } + + public void setSuggestedTailNumber(String suggestedTailNumber) { + this.suggestedTailNumber = suggestedTailNumber; + } } diff --git a/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java b/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java new file mode 100644 index 000000000..02e06fc5c --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java @@ -0,0 +1,202 @@ +package org.ngafid.flights.process; + +import org.ngafid.flights.*; +import org.w3c.dom.Document; +import org.w3c.dom.NamedNodeMap; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.InputStream; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.logging.Logger; +import java.util.stream.Stream; + +/** + * This class is responsible for parsing GPX files. + * + * @author Josh Karns + */ + +public class GPXFileProcessor extends FlightFileProcessor { + private static final Logger LOG = Logger.getLogger(GPXFileProcessor.class.getName()); + + public GPXFileProcessor(InputStream stream, String filename, Object... args) { + super(stream, filename); + } + + @Override + public Stream parse() throws FlightProcessingException { + try { + List flights = new ArrayList<>(); + parseFlights(filename, stream); + + return flights.stream(); + } catch (SQLException | MalformedFlightFileException | IOException | FatalFlightFileException | + FlightAlreadyExistsException e) { + throw new RuntimeException(e); + } + } + + public List parseFlights(String entry, InputStream stream) throws SQLException, MalformedFlightFileException, IOException, FatalFlightFileException, FlightAlreadyExistsException { + List flights = new ArrayList<>(); + // BE-GPS-2200 + DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + try { + DocumentBuilder db = dbf.newDocumentBuilder(); + Document doc = db.parse(stream); + + NodeList l = doc.getElementsByTagName("trkseg"); + if (l.getLength() == 0) + throw new FatalFlightFileException("could not parse GPX data file: failed to find data node."); + + if (l.getLength() != 1) + throw new FatalFlightFileException("could not parse GPX data file: found multiple data nodes."); + + Node dataNode = l.item(0); + int len = dataNode.getChildNodes().getLength(); + + DoubleTimeSeries lat = new DoubleTimeSeries("Latitude", "degrees", len); + DoubleTimeSeries lon = new DoubleTimeSeries("Longitude", "degrees", len); + DoubleTimeSeries msl = new DoubleTimeSeries("AltMSL", "ft", len); + DoubleTimeSeries spd = new DoubleTimeSeries("GndSpd", "kt", len); + ArrayList timestamps = new ArrayList(len); + StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd"); + StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss"); + StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm"); + // ss.SSSSSSXXX + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); + + SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss"); + + // NodeList serialNumberNodes = doc.getElementsByTagName("badelf:modelSerialNumber"); + // String serialNumber = serialNumberNodes.item(0).getTextContent(); + NodeList nicknameNodes = doc.getElementsByTagName("badelf:modelNickname"); + if (nicknameNodes.item(0) == null) + throw new FatalFlightFileException("GPX file is missing necessary metadata (modelNickname)."); + String nickname = nicknameNodes.item(0).getTextContent(); + + NodeList fdrModel = doc.getElementsByTagName("badelf:modelName"); + if (fdrModel.item(0) == null) + throw new FatalFlightFileException("GPX file is missing necessary metadata (modelName)."); + String airframeName = fdrModel.item(0).getTextContent(); + LOG.info("Airframe name: " + airframeName); + + NodeList dates = doc.getElementsByTagName("time"); + NodeList datanodes = doc.getElementsByTagName("trkpt"); + NodeList elenodes = doc.getElementsByTagName("ele"); + NodeList spdnodes = doc.getElementsByTagName("badelf:speed"); + + + if (spdnodes.item(0) == null) + throw new FatalFlightFileException("GPX file is missing GndSpd."); + + if (!(dates.getLength() == datanodes.getLength() && + dates.getLength() == elenodes.getLength() && + dates.getLength() == spdnodes.getLength())) { + throw new FatalFlightFileException("Mismatching number of data tags in GPX file"); + } + + for (int i = 0; i < dates.getLength(); i++) { + Date parsedDate = dateFormat.parse(dates.item(i).getTextContent()); + timestamps.add(new Timestamp(parsedDate.getTime())); + Calendar cal = new Calendar.Builder().setInstant(parsedDate).build(); + + int offsetMS = cal.getTimeZone().getOffset(parsedDate.getTime()); + String sign = offsetMS < 0 ? "-" : "+"; + offsetMS = offsetMS < 0 ? -offsetMS : offsetMS; + + int offsetSEC = offsetMS / 1000; + int offsetMIN = offsetSEC / 60; + int offsetHRS = offsetMIN / 60; + offsetMIN %= 60; + + String offsetHrsStr = (offsetHRS < 10 ? "0" : "") + offsetHRS; + String offsetMinStr = (offsetMIN < 10 ? "0" : "") + offsetMIN; + // This should look like +HH:mm + utcOfstSeries.add(sign + offsetHrsStr + ":" + offsetMinStr); + + localDateSeries.add(lclDateFormat.format(parsedDate)); + localTimeSeries.add(lclTimeFormat.format(parsedDate)); + + Node spdNode = spdnodes.item(i); + // Convert m / s to knots + spd.add(Double.parseDouble(spdNode.getTextContent()) * 1.94384); + + Node eleNode = elenodes.item(i); + // Convert meters to feet. + msl.add(Double.parseDouble(eleNode.getTextContent()) * 3.28084); + + Node d = datanodes.item(i); + NamedNodeMap attrs = d.getAttributes(); + + Node latNode = attrs.getNamedItem("lat"); + lat.add(Double.parseDouble(latNode.getTextContent())); + + Node lonNode = attrs.getNamedItem("lon"); + lon.add(Double.parseDouble(lonNode.getTextContent())); + } + + int start = 0; + for (int end = 1; end < timestamps.size(); end++) { + // 1 minute delay -> new flight. + if (timestamps.get(end).getTime() - timestamps.get(end - 1).getTime() > 60000 + || end == localTimeSeries.size() - 1) { + if (end == localTimeSeries.size() - 1) { + end += 1; + } + + if (end - start < 60) { + start = end; + continue; + } + + StringTimeSeries localTime = localTimeSeries.subSeries(start, end); + StringTimeSeries localDate = localDateSeries.subSeries(start, end); + StringTimeSeries offset = utcOfstSeries.subSeries(start, end); + DoubleTimeSeries nlat = lat.subSeries(start, end); + DoubleTimeSeries nlon = lon.subSeries(start, end); + DoubleTimeSeries nmsl = msl.subSeries(start, end); + DoubleTimeSeries nspd = spd.subSeries(start, end); + + + HashMap doubleSeries = new HashMap<>(); + doubleSeries.put("GndSpd", nspd); + doubleSeries.put("Longitude", nlon); + doubleSeries.put("Latitude", nlat); + doubleSeries.put("AltMSL", nmsl); + + HashMap stringSeries = new HashMap<>(); + stringSeries.put("Lcl Date", localDate); + stringSeries.put("Lcl Time", localTime); + stringSeries.put("UTCOfst", offset); + + FlightMeta meta = new FlightMeta(); + meta.setFilename(this.filename); + meta.setAirframeName(airframeName); + meta.setSuggestedTailNumber(nickname); + meta.setSystemId(nickname); + meta.setAirframeType("Fixed Wing"); + + flights.add(new FlightBuilder(meta, doubleSeries, stringSeries)); + start = end; + } + } + + } catch (ParserConfigurationException | SAXException | ParseException e) { + throw new FatalFlightFileException("Could not parse GPX data file: " + e.getMessage()); + } + + return flights; + } +} diff --git a/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java b/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java new file mode 100644 index 000000000..4397f1897 --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java @@ -0,0 +1,172 @@ +package org.ngafid.flights.process; + +import com.google.gson.Gson; +import com.google.gson.stream.JsonReader; +import org.ngafid.common.TimeUtils; +import org.ngafid.flights.*; + +import javax.xml.bind.DatatypeConverter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.logging.Logger; +import java.util.stream.Stream; + +/** + * This class is responsible for parsing JSON files. + * + * @author Aaron Chan + */ + +public class JSONFileProcessor extends FlightFileProcessor { + private static final Logger LOG = Logger.getLogger(JSONFileProcessor.class.getName()); + + public JSONFileProcessor(InputStream stream, String filename, Object... args) { + super(stream, filename); + + } + + @Override + public Stream parse() throws FlightProcessingException { + FlightMeta flightMeta = new FlightMeta(); + final Map doubleTimeSeries = new HashMap<>(); + final Map stringTimeSeries = new HashMap<>(); + + + try { + processTimeSeries(flightMeta, doubleTimeSeries, stringTimeSeries); + } catch (SQLException | MalformedFlightFileException | IOException | FatalFlightFileException | + FlightAlreadyExistsException e) { + throw new FlightProcessingException(e); + } + + return Stream.of(new FlightBuilder(flightMeta, doubleTimeSeries, stringTimeSeries)); + } + + private void processTimeSeries(FlightMeta flightMeta, Map doubleTimeSeries, Map stringTimeSeries) throws SQLException, MalformedFlightFileException, IOException, FatalFlightFileException, FlightAlreadyExistsException { + String status = ""; + Gson gson = new Gson(); + JsonReader reader = new JsonReader(new InputStreamReader(super.stream)); + Map jsonMap = gson.fromJson(reader, Map.class); + + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HHmmssZ"); + + Date parsedDate; + try { + parsedDate = dateFormat.parse((String) jsonMap.get("date")); + } catch (Exception e) { + throw new MalformedFlightFileException("Could not parse date from JSON file: " + e.getMessage()); + } + + int timezoneOffset = parsedDate.getTimezoneOffset() / 60; + String timezoneOffsetString = (timezoneOffset >= 0 ? "+" : "-") + String.format("%02d:00", timezoneOffset); + + ArrayList headers = (ArrayList) jsonMap.get("details_headers"); + ArrayList> lines = (ArrayList>) jsonMap.get("details_data"); + int len = headers.size(); + + DoubleTimeSeries lat = new DoubleTimeSeries("Latitude", "degrees", len); + DoubleTimeSeries lon = new DoubleTimeSeries("Longitude", "degrees", len); + DoubleTimeSeries agl = new DoubleTimeSeries("AltAGL", "ft", len); + DoubleTimeSeries spd = new DoubleTimeSeries("GndSpd", "kt", len); + + ArrayList timestamps = new ArrayList<>(len); + StringTimeSeries localDateSeries = new StringTimeSeries("Lcl Date", "yyyy-mm-dd"); + StringTimeSeries localTimeSeries = new StringTimeSeries("Lcl Time", "hh:mm:ss"); + StringTimeSeries utcOfstSeries = new StringTimeSeries("UTCOfst", "hh:mm"); + + SimpleDateFormat lclDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + SimpleDateFormat lclTimeFormat = new SimpleDateFormat("HH:mm:ss"); + + int latIndex = headers.indexOf("product_gps_latitude"); + int lonIndex = headers.indexOf("product_gps_longitude"); + int altIndex = headers.indexOf("altitude"); + int spdIndex = headers.indexOf("speed"); + int timeIndex = headers.indexOf("time"); + + double timeDiff = ((double) lines.get(lines.size() - 1).get(timeIndex)) - ((double) lines.get(0).get(timeIndex)); + if (timeDiff < 180) throw new FatalFlightFileException("Flight file was less than 3 minutes long, ignoring."); + + double prevSeconds = 0; + double metersToFeet = 3.28084; + + for (ArrayList line : lines) { + double milliseconds = (double) line.get(timeIndex) - prevSeconds; + prevSeconds = (double) line.get(timeIndex); + parsedDate = TimeUtils.addMilliseconds(parsedDate, (int) milliseconds); + + if ((double) line.get(latIndex) > 90 || (double) line.get(latIndex) < -90) { + LOG.severe("Invalid latitude: " + line.get(latIndex)); + status = "WARNING"; + lat.add(Double.NaN); + } else { + lat.add((Double) line.get(latIndex)); + } + + if ((double) line.get(lonIndex) > 180 || (double) line.get(lonIndex) < -180) { + LOG.severe("Invalid longitude: " + line.get(lonIndex)); + status = "WARNING"; + lon.add(Double.NaN); + } else { + lon.add((Double) line.get(lonIndex)); + } + + agl.add((Double) line.get(altIndex) * metersToFeet); + spd.add((Double) line.get(spdIndex)); + + localDateSeries.add(lclDateFormat.format(parsedDate)); + localTimeSeries.add(lclTimeFormat.format(parsedDate)); + utcOfstSeries.add(timezoneOffsetString); + timestamps.add(new Timestamp(parsedDate.getTime())); + } + + int start = 0; + int end = timestamps.size() - 1; + + DoubleTimeSeries nspd = spd.subSeries(start, end); + DoubleTimeSeries nlon = lon.subSeries(start, end); + DoubleTimeSeries nlat = lat.subSeries(start, end); + DoubleTimeSeries nagl = agl.subSeries(start, end); + + doubleTimeSeries.put("GndSpd", nspd); + doubleTimeSeries.put("Longitude", nlon); + doubleTimeSeries.put("Latitude", nlat); + doubleTimeSeries.put("AltAGL", nagl); // Parrot data is stored as AGL and most likely in meters + + StringTimeSeries localDate = localDateSeries.subSeries(start, end); + StringTimeSeries localTime = localTimeSeries.subSeries(start, end); + StringTimeSeries offset = utcOfstSeries.subSeries(start, end); + + stringTimeSeries.put("Lcl Date", localDate); + stringTimeSeries.put("Lcl Time", localTime); + stringTimeSeries.put("UTCOfst", offset); + + MessageDigest md = null; + try { + md = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException e) { + throw new FatalFlightFileException("Could not create MD5 hash: " + e.getMessage()); + } + byte[] hash = md.digest(filename.getBytes()); + + flightMeta.setStartDateTime(localDateSeries.get(0) + " " + localTimeSeries.get(0) + " " + utcOfstSeries.get(0)); + flightMeta.setEndDateTime(localDateSeries.get(localDateSeries.size() - 1) + " " + localTimeSeries.get(localTimeSeries.size() - 1) + " " + utcOfstSeries.get(utcOfstSeries.size() - 1)); + flightMeta.setMd5Hash(DatatypeConverter.printHexBinary(hash).toLowerCase()); + flightMeta.setAirframeType("UAS Rotorcraft"); + flightMeta.setSystemId((String) jsonMap.get("serial_number")); + flightMeta.setFilename(super.filename); + flightMeta.setAirframeName((String) jsonMap.get("controller_model")); + flightMeta.setCalculated(""); // TODO: Figure this out + flightMeta.setSuggestedTailNumber((String) jsonMap.get("serial_number")); + } +} From 4b8ff219f37d5b7b3b22fb25b4fc639cee6b02b7 Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Thu, 27 Apr 2023 10:55:11 -0400 Subject: [PATCH 08/12] Introduced flight file processing pipeline --- pom.xml | 8 +- src/main/java/org/ngafid/ProcessUpload.java | 147 +++------------ .../flights/process/CSVFileProcessor.java | 5 +- .../flights/process/DATFileProcessor.java | 6 +- .../ngafid/flights/process/FlightBuilder.java | 11 +- .../flights/process/FlightFileProcessor.java | 176 ++++++++++++++++-- .../flights/process/GPXFileProcessor.java | 9 +- .../flights/process/JSONFileProcessor.java | 5 +- .../flights/process/ProcessItinerary.java | 104 +++++++++++ 9 files changed, 319 insertions(+), 152 deletions(-) create mode 100644 src/main/java/org/ngafid/flights/process/ProcessItinerary.java diff --git a/pom.xml b/pom.xml index f1c046161..54aed3e23 100644 --- a/pom.xml +++ b/pom.xml @@ -129,8 +129,8 @@ maven-compiler-plugin 3.8.1 - 11 - 11 + 16 + 16 -Xlint:all -Xmaxwarns @@ -145,8 +145,8 @@ org.apache.maven.plugins maven-compiler-plugin - 11 - 11 + 16 + 16 diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index 046b7b80b..83e529a01 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -17,6 +17,7 @@ import Files.*; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.ArrayList; import java.util.Enumeration; @@ -54,16 +55,6 @@ public class ProcessUpload { private static Connection connection = null; private static Logger LOG = Logger.getLogger(ProcessUpload.class.getName()); private static final String ERROR_STATUS_STR = "ERROR"; - private static final Map PROCESSORS; - - static { - PROCESSORS = Map.of( - "csv", CSVFileProcessor::new, - "gpx", GPXFileProcessor::new, - "json", JSONFileProcessor::new, - "dat", DATFileProcessor::new - ); - } public static void main(String[] arguments) { System.out.println("arguments are:"); @@ -205,7 +196,7 @@ public static void processUpload(Upload upload) { UploadProcessedEmail uploadProcessedEmail = new UploadProcessedEmail(recipients, bccRecipients); - boolean success = ingestFlights(connection, uploadId, fleetId, uploaderId, filename, uploadProcessedEmail); + boolean success = ingestFlights(connection, upload, uploadProcessedEmail); //only progress if the upload ingestion was successful if (success) { @@ -287,9 +278,15 @@ private static CheckedMap mapOrNull(Checked return new CheckedMap(f, exceptionHandler); } - public static boolean ingestFlights(Connection connection, int uploadId, int fleetId, int uploaderId, String filename, UploadProcessedEmail uploadProcessedEmail) throws SQLException { + + public static boolean ingestFlights(Connection connection, Upload upload, UploadProcessedEmail uploadProcessedEmail) throws SQLException { Instant start = Instant.now(); + int uploadId = upload.getId(); + int uploaderId = upload.getUploaderId(); + int fleetId = upload.getFleetId(); + + String filename = upload.getFilename(); filename = WebServer.NGAFID_ARCHIVE_DIR + "/" + fleetId + "/" + uploaderId + "/" + uploadId + "__" + filename; System.err.println("processing: '" + filename + "'"); @@ -302,114 +299,29 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle ArrayList flightInfo = new ArrayList(); - HashMap flightErrors = new HashMap(); + Map flightErrors = Collections.emptyMap(); int validFlights = 0; int warningFlights = 0; int errorFlights = 0; - if (extension.equals(".zip")) { - BiConsumer handleFlightFileFormatException = - (z, e) -> { - flightErrors.put(z.getName(), new UploadException("Unknown file type contained in zip file (flight logs should be .csv files).", z.getName())); - errorFlights++; - }; - - BiConsumer handleExceptionInProcessor = - (p, e) -> { - flightErrors.put(p.filename, new UploadException(e.getMessage(), e, p.filename)); - errorFlights++; - }; - BiConsumer handleExceptionInBuilder = - (b, e) -> { - flightErrors.put(b.meta.filename, new UploadException(e.getMessage(), e, b.meta.filename)); - errorFlights++; - }; - try { System.err.println("processing zip file: '" + filename + "'"); ZipFile zipFile = new ZipFile(filename); - Enumeration entries = zipFile.entries(); - Stream validFiles = - StreamSupport.stream( - Spliterators.spliteratorUnknownSize(entries.asIterator(), Spliterator.ORDERED), - false - ) - .filter(z -> !z.getName().contains("__MACOSX")) - .filter(z -> !z.isDirectory()); - - Stream pipeline = - validFiles - .map(mapOrNull(FlightFileProcessor::create, handleFlightFileFormatException)) // Create a FlightFileProcessor for each file - .filter(Objects::nonNull) // Filter out any null values (nulls indicate files we cant process) - .map(mapOrNull(p -> p.parse(), handleExceptionInProcessor)) // Parse the files (this is the initial parsing step - .filter(Objects::nonNull) // Filter out any null values (nulls indicate something went awry in the parsing step) - .flatMap(builder -> builder) // Merge streams together - .map(mapOrNull(builder -> builder.build(connection), handleExceptionInBuilder)) // - .filter(Objects::nonNull); - - pipeline.forEach((Flight flight) -> { - flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - if (flight.getStatus().equals("WARNING")) warningFlights++; - }); - - while (entries.hasMoreElements()) { - ZipEntry entry = entries.nextElement(); - String name = entry.getName(); - - if (entry.isDirectory() || name.contains("__MACOSX")) { - //System.err.println("SKIPPING: " + entry.getName()); - continue; - } - - System.err.println("PROCESSING: " + name); // TODO: Use a logger - - String entryName = entry.getName(); - String entryExtension = entryName.substring(entryName.lastIndexOf("."), entryName.length()).toLowerCase(); - - if (!PROCESSORS.containsKey(entryExtension)) { - flightErrors.put(entry.getName(), new UploadException("Unknown file type contained in zip file", entry.getName())); - errorFlights.getAndIncrement(); - continue; - } - - FlightFileProcessor processor = PROCESSORS.get(entryExtension).create(zipFile.getInputStream(entry), entry.getName(), new Object[]{zipFile}); - Stream flights = processor.parse(); + FlightFileProcessor.Pipeline pipeline = new FlightFileProcessor.Pipeline(connection, upload, zipFile); + pipeline + .stream() + .map(pipeline::parse) + .flatMap(pipeline::build) + .map(pipeline::insert) + .forEach(pipeline::tabulateFlightStatus); - // } else if (entry.getName().endsWith(".DAT")) { - // String zipName = entry.getName().substring(entry.getName().lastIndexOf("/")); - // String parentFolder = zipFile.getName().substring(0, zipFile.getName().lastIndexOf("/")); - // File tempExtractedFile = new File(parentFolder, zipName); - - // System.out.println("Extracting to " + tempExtractedFile.getAbsolutePath()); - // try (InputStream inputStream = zipFile.getInputStream(entry); FileOutputStream fileOutputStream = new FileOutputStream(tempExtractedFile)) { - // int len; - // byte[] buffer = new byte[1024]; - - // while ((len = inputStream.read(buffer)) > 0) { - // fileOutputStream.write(buffer, 0, len); - // } - // } - - // convertDATFile(tempExtractedFile); - // File processedCSVFile = new File(tempExtractedFile.getAbsolutePath() + ".csv"); - // placeInZip(processedCSVFile.getAbsolutePath(), zipFile.getName().substring(zipFile.getName().lastIndexOf("/") + 1)); - - // try (InputStream stream = new FileInputStream(processedCSVFile)) { - // Flight flight = processDATFile(fleetId, entry.getName(), stream, connection); - - // if (connection != null) { - // flight.updateDatabase(connection, uploadId, uploaderId, fleetId); - // } - - // if (flight.getStatus().equals("WARNING")) warningFlights++; - - // flightInfo.add(new FlightInfo(flight.getId(), flight.getNumberRows(), flight.getFilename(), flight.getExceptions())); - - // validFlights++; - } + flightErrors = pipeline.getFlightErrors(); + errorFlights = flightErrors.size(); + warningFlights = pipeline.getWarningFlightsCount(); + validFlights = pipeline.getValidFlightsCount(); } catch (java.nio.file.NoSuchFileException e) { System.err.println("NoSuchFileException: " + e); @@ -427,15 +339,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle UploadError.insertError(connection, uploadId, "Could not read from zip file: please delete this upload and re-upload."); status = ERROR_STATUS_STR; uploadException = new Exception(e.toString() + ", could not read from zip file: please delete this upload and re-upload."); - } catch (FlightProcessingException e) { - LOG.log(Level.SEVERE, "FlightProcessingException: {0}", e.toString()); - e.printStackTrace(); - - UploadError.insertError(connection, uploadId, "Got an exception while parsing data"); - status = ERROR_STATUS_STR; - uploadException = new Exception(e + "exception while parsing data"); } - } else { //insert an upload error for this upload status = ERROR_STATUS_STR; @@ -448,8 +352,8 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle PreparedStatement updateStatement = connection.prepareStatement("UPDATE uploads SET status = ?, n_valid_flights = ?, n_warning_flights = ?, n_error_flights = ? WHERE id = ?"); updateStatement.setString(1, status); updateStatement.setInt(2, validFlights); - updateStatement.setInt(3, warningFlights.get()); - updateStatement.setInt(4, errorFlights.get()); + updateStatement.setInt(3, warningFlights); + updateStatement.setInt(4, errorFlights); updateStatement.setInt(5, uploadId); updateStatement.executeUpdate(); updateStatement.close(); @@ -457,6 +361,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle //insert all the flight errors to the database for (Map.Entry entry : flightErrors.entrySet()) { UploadException exception = entry.getValue(); + exception.printStackTrace(); FlightError.insertError(connection, uploadId, exception.getFilename(), exception.getMessage()); } @@ -490,7 +395,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle } } - uploadProcessedEmail.setErrorFlights(errorFlights.get()); + uploadProcessedEmail.setErrorFlights(errorFlights); for (Map.Entry entry : flightErrors.entrySet()) { UploadException exception = entry.getValue(); @@ -498,7 +403,7 @@ public static boolean ingestFlights(Connection connection, int uploadId, int fle uploadProcessedEmail.flightImportError(exception.getFilename(), exception.getMessage()); } - uploadProcessedEmail.setWarningFlights(warningFlights.get()); + uploadProcessedEmail.setWarningFlights(warningFlights); for (FlightInfo info : flightInfo) { List exceptions = info.exceptions; diff --git a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java index df0d97868..039ba1bdc 100644 --- a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java @@ -4,6 +4,7 @@ import com.opencsv.exceptions.CsvException; import org.ngafid.flights.*; +import java.sql.Connection; import java.io.*; import java.nio.charset.StandardCharsets; import java.util.*; @@ -28,8 +29,8 @@ public class CSVFileProcessor extends FlightFileProcessor { private final List headers; private final List dataTypes; - public CSVFileProcessor(InputStream stream, String filename, Object... args) { - super(stream, filename); + public CSVFileProcessor(Connection connection, InputStream stream, String filename) { + super(connection, stream, filename); headers = new ArrayList<>(); dataTypes = new ArrayList<>(); diff --git a/src/main/java/org/ngafid/flights/process/DATFileProcessor.java b/src/main/java/org/ngafid/flights/process/DATFileProcessor.java index b4fe791e8..29fea4dba 100644 --- a/src/main/java/org/ngafid/flights/process/DATFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/DATFileProcessor.java @@ -41,9 +41,9 @@ public class DATFileProcessor extends FlightFileProcessor { private final ZipFile zipFile; - public DATFileProcessor(InputStream stream, String filename, Object... args) { - super(stream, filename); - this.zipFile = (ZipFile) args[0]; + public DATFileProcessor(Connection connection, InputStream stream, String filename, ZipFile file) { + super(connection, stream, filename); + this.zipFile = file; } @Override diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java index 1834690fe..e64eca828 100644 --- a/src/main/java/org/ngafid/flights/process/FlightBuilder.java +++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java @@ -62,9 +62,16 @@ public synchronized FlightBuilder updateProcessingStatus(int processingStatus) { } private static final List processSteps = List.of( - ProcessAltAGL::new, + required(ProcessAltAGL::new), + required(ProcessAirportProximity::new), + required(ProcessStartEndTime::new), + ProcessLaggedAltMSL::new, + ProcessStallIndex::new, + ProcessTotalFuel::new, ProcessAirportProximity::new, - required(ProcessStartEndTime::new) + ProcessDivergence::new, + ProcessLOCI::new, + ProcessItinerary::new ); // This can be overridden. diff --git a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java index 6179a0be0..534ab8cec 100644 --- a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java @@ -1,38 +1,190 @@ package org.ngafid.flights.process; +import java.io.IOException; import java.io.InputStream; +import java.sql.Connection; +import java.util.Map; +import java.util.HashMap; import java.util.stream.Stream; +import java.util.stream.StreamSupport; import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Enumeration; +import java.util.Objects; +import java.util.Spliterator; +import java.util.Spliterators; + +import org.ngafid.filters.Pair; +import org.ngafid.UploadException; +import org.ngafid.flights.Flight; +import org.ngafid.flights.Upload; public abstract class FlightFileProcessor { - public static FlightFileProcessor create(ZipEntry entry) throws FlightFileFormatException { - String filename = entry.getName(); + interface Factory { + FlightFileProcessor create(Connection connection, InputStream is, String filename); + } + + // Right now this is only for zip files but this could easily be extended to handle other types of archives. + // Most of the code is reusable. + public static class Pipeline { + final Connection connection; + final ZipFile zipFile; + final Map factories; + final Upload upload; + private int validFlightsCount = 0; + private int warningFlightsCount = 0; + + private HashMap flightErrors = new HashMap<>(); + + public Pipeline(Connection connection, Upload upload, ZipFile zipFile) { + this.connection = connection; + this.upload = upload; + this.zipFile = zipFile; + + this.factories = Map.of( + "csv", CSVFileProcessor::new, + "dat", this::createDATFileProcessor, + "json", JSONFileProcessor::new, + "gpx", GPXFileProcessor::new + ); + } + + public Map getFlightErrors() { + return Collections.unmodifiableMap(flightErrors); + } + + private FlightFileProcessor createDATFileProcessor(Connection connection, InputStream is, String filename) { + return new DATFileProcessor(connection, is, filename, zipFile); + } + + public Stream stream() { + Enumeration entries = zipFile.entries(); + Stream validFiles = + StreamSupport.stream( + Spliterators.spliteratorUnknownSize(entries.asIterator(), Spliterator.ORDERED), + false + ) + .filter(z -> !z.getName().contains("__MACOSX")) + .filter(z -> !z.isDirectory()); + + return validFiles.map(this::create).filter(Objects::nonNull); + } + + public Stream parse(FlightFileProcessor processor) { + try { + return processor.parse(); + } catch (FlightProcessingException e) { + flightErrors.put(processor.filename, new UploadException(e.getMessage(), e, processor.filename)); + return Stream.of(); + } + } + + public Flight build(FlightBuilder fb) { + try { + return fb.build(connection); + } catch (FlightProcessingException e) { + flightErrors.put(fb.meta.filename, new UploadException(e.getMessage(), e, fb.meta.filename)); + return null; + } + } + + public Stream build(Stream fbs) { + return fbs.map(this::build).filter(Objects::nonNull); + } + + private FlightFileProcessor create(ZipEntry entry) { + String filename = entry.getName(); + + int index = filename.lastIndexOf('.'); + String extension = index >= 0 ? filename.substring(index + 1).toLowerCase() : ""; + System.out.println("Extension: " + extension); + Factory f = factories.get(extension); + if (f != null) { + try { + return f.create(connection, zipFile.getInputStream(entry), zipFile.getName()); + } catch (IOException e) { + flightErrors.put(filename, new UploadException(e.getMessage(), e, filename)); + } + } else { + flightErrors.put(filename, new UploadException("Unknown file type '" + extension + "' contained in zip file.", filename)); + } - int index = filename.lastIndexOf('.'); - String extension = index >= 0 ? filename.substring(index) : ""; + return null; + } + + public Flight insert(Flight flight) { + flight.updateDatabase(connection, upload.getId(), upload.getUploaderId(), upload.getFleetId()); + return flight; + } - switch (extension) { - // TODO: Add supported extensions here! - case "": // No extension - default: - throw new FlightFileFormatException(filename); + public void tabulateFlightStatus(Flight flight) { + if (flight.getStatus().equals("WARNING")) + warningFlightsCount++; + else + validFlightsCount++; + } + + public int getWarningFlightsCount() { + return warningFlightsCount; + } + + public int getValidFlightsCount() { + return validFlightsCount; } } + public final Connection connection; public final String filename; public final InputStream stream; - public FlightFileProcessor(InputStream stream, String filename, Object... args) { - this.stream = stream; + public FlightFileProcessor(Connection connection, InputStream stream, String filename) { + this.connection = connection; this.filename = filename; + this.stream = stream; } + // If an exception occurs, it will be stored here. + FlightProcessingException parseException = null; /** * Parses the file for flight data to be processed * @return A stream of FlightBuilders * @throws FlightProcessingException */ - public abstract Stream parse() throws FlightProcessingException; + private Stream parsedFlightBuilders = null; + protected abstract Stream parse() throws FlightProcessingException; + + public FlightFileProcessor pipelinedParse() { + try { + parsedFlightBuilders = parse(); + assert parsedFlightBuilders != null; + } catch (FlightProcessingException e) { + parseException = e; + } + + return this; + } + + protected Stream flights = null; + protected final ArrayList buildExceptions = new ArrayList<>(); + + private Flight build(FlightBuilder fb) { + try { + return fb.build(connection); + } catch (FlightProcessingException e) { + buildExceptions.add(e); + } + return null; + } + + public FlightFileProcessor pipelinedBuild() { + if (parseException == null) { + flights = parsedFlightBuilders.map(this::build).filter(Objects::nonNull); + } + + return this; + } } diff --git a/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java b/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java index 02e06fc5c..15e9ef75e 100644 --- a/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/GPXFileProcessor.java @@ -30,15 +30,14 @@ public class GPXFileProcessor extends FlightFileProcessor { private static final Logger LOG = Logger.getLogger(GPXFileProcessor.class.getName()); - public GPXFileProcessor(InputStream stream, String filename, Object... args) { - super(stream, filename); + public GPXFileProcessor(Connection connection, InputStream stream, String filename) { + super(connection, stream, filename); } @Override public Stream parse() throws FlightProcessingException { try { - List flights = new ArrayList<>(); - parseFlights(filename, stream); + List flights = parseFlights(filename, stream); return flights.stream(); } catch (SQLException | MalformedFlightFileException | IOException | FatalFlightFileException | @@ -182,7 +181,7 @@ public List parseFlights(String entry, InputStream stream) throws stringSeries.put("UTCOfst", offset); FlightMeta meta = new FlightMeta(); - meta.setFilename(this.filename); + meta.setFilename(this.filename + ":" + start + "-" + end); meta.setAirframeName(airframeName); meta.setSuggestedTailNumber(nickname); meta.setSystemId(nickname); diff --git a/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java b/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java index 4397f1897..7d0a41601 100644 --- a/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/JSONFileProcessor.java @@ -31,9 +31,8 @@ public class JSONFileProcessor extends FlightFileProcessor { private static final Logger LOG = Logger.getLogger(JSONFileProcessor.class.getName()); - public JSONFileProcessor(InputStream stream, String filename, Object... args) { - super(stream, filename); - + public JSONFileProcessor(Connection connection, InputStream stream, String filename) { + super(connection, stream, filename); } @Override diff --git a/src/main/java/org/ngafid/flights/process/ProcessItinerary.java b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java new file mode 100644 index 000000000..735c518bc --- /dev/null +++ b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java @@ -0,0 +1,104 @@ +package org.ngafid.flights.process; + +import java.util.Set; +import java.util.ArrayList; +import java.util.Collections; +import java.sql.Connection; +import java.sql.SQLException; + +import java.nio.file.NoSuchFileException; + +import org.ngafid.flights.Flight; +import org.ngafid.flights.DoubleTimeSeries; +import org.ngafid.flights.StringTimeSeries; +import org.ngafid.flights.Itinerary; +import static org.ngafid.flights.Parameters.*; +import org.ngafid.flights.FatalFlightFileException; +import org.ngafid.flights.MalformedFlightFileException; + +public class ProcessItinerary extends ProcessStep { + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_AGL, LATITUDE, LONGITUDE, AIRPORT_DISTANCE, RUNWAY_DISTANCE, GND_SPD, E1_RPM); + private static Set REQUIRED_STRING_COLUMNS = Set.of(AIRPORT_DISTANCE, NEAREST_AIRPORT, NEAREST_RUNWAY); + private static Set OUTPUT_COLUMNS = Set.of("_itinerary"); // This is a fake column; never actually created. + + public ProcessItinerary(Connection connection, FlightBuilder builder) { + super(connection, builder); + } + + public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getRequiredStringColumns() { return REQUIRED_STRING_COLUMNS; } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } + + public boolean airframeIsValid(String airframe) { return true; } + + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + DoubleTimeSeries groundSpeed = doubleTS.get(GND_SPD); + DoubleTimeSeries rpm = doubleTS.get(E1_RPM); + + StringTimeSeries nearestAirportTS = stringTS.get(NEAREST_AIRPORT); + DoubleTimeSeries airportDistanceTS = doubleTS.get(AIRPORT_DISTANCE); + DoubleTimeSeries altitudeAGL = doubleTS.get(ALT_AGL); + + StringTimeSeries nearestRunwayTS = stringTS.get(NEAREST_RUNWAY); + DoubleTimeSeries runwayDistanceTS = doubleTS.get(RUNWAY_DISTANCE); + + ArrayList itinerary = new ArrayList<>(); + + Itinerary currentItinerary = null; + for (int i = 1; i < nearestAirportTS.size(); i++) { + String airport = nearestAirportTS.get(i); + String runway = nearestRunwayTS.get(i); + + if (airport != null && !airport.equals("")) { + //We've gotten close to an airport, so create a stop if there + //isn't one. If there is one, update the runway being visited. + //If the airport is a new airport (this shouldn't happen really), + //then create a new stop. + if (currentItinerary == null) { + currentItinerary = new Itinerary(airport, runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i)); + } else if (airport.equals(currentItinerary.getAirport())) { + currentItinerary.update(runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i)); + } else { + currentItinerary.selectBestRunway(); + if (currentItinerary.wasApproach()) itinerary.add(currentItinerary); + currentItinerary = new Itinerary(airport, runway, i, altitudeAGL.get(i), airportDistanceTS.get(i), runwayDistanceTS.get(i), groundSpeed.get(i), rpm.get(i)); + } + + } else { + //aiport is null, so if there was an airport being visited + //then we can determine it's runway and add it to the itinerary + if (currentItinerary != null) { + currentItinerary.selectBestRunway(); + if (currentItinerary.wasApproach()) itinerary.add(currentItinerary); + } + + //set the currentItinerary to null until we approach another + //airport + currentItinerary = null; + } + } + + //dont forget to add the last stop in the itinerary if it wasn't set to null + if (currentItinerary != null) { + currentItinerary.selectBestRunway(); + if (currentItinerary.wasApproach()) itinerary.add(currentItinerary); + } + + /////////////////////////////////////////////////////////////////////////////////////////////////////////////// + // setting and determining itinerary type + int itinerary_size = itinerary.size(); + for (int i = 0; i < itinerary_size; i++) { + itinerary.get(i).determineType(); + } + /////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + System.err.println("Itinerary:"); + for (int i = 0; i < itinerary.size(); i++) { + System.err.println(itinerary.get(i)); + } + + builder.setItinerary(itinerary); + } + +} From 917f4018a82703b4064ecc8d88ff0350b9ddcf01 Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Thu, 27 Apr 2023 11:56:17 -0400 Subject: [PATCH 09/12] Refactored airframe definitions; adding file info processing to the CSVFileProcessor --- .../java/org/ngafid/flights/Airframes.java | 60 ++++++++++++++++ .../java/org/ngafid/flights/Parameters.java | 22 ------ .../flights/process/CSVFileProcessor.java | 70 ++++++++++++++++++- .../flights/process/FlightFileProcessor.java | 16 +++-- .../flights/process/ProcessDivergence.java | 9 +-- 5 files changed, 143 insertions(+), 34 deletions(-) diff --git a/src/main/java/org/ngafid/flights/Airframes.java b/src/main/java/org/ngafid/flights/Airframes.java index bf0b33fda..e5e3f8606 100644 --- a/src/main/java/org/ngafid/flights/Airframes.java +++ b/src/main/java/org/ngafid/flights/Airframes.java @@ -7,12 +7,50 @@ import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Set; import java.util.logging.Logger; public class Airframes { private static final Logger LOG = Logger.getLogger(Airframes.class.getName()); + + /** + * {@link Airframes} names + * + * TODO: In the future, we may want to consider using Set reather than hardcoded strings. + * This would make our code more robust to varying airframe names + **/ + public static final String AIRFRAME_SCAN_EAGLE = "ScanEagle"; + public static final String AIRFRAME_DJI = "DJI"; + + public static final String AIRFRAME_CESSNA_172S = "Cessna 172S"; + public static final String AIRFRAME_CESSNA_172R = "Cessna 172R"; + public static final String AIRFRAME_CESSNA_172T = "Cessna 172T"; + public static final String AIRFRAME_CESSNA_400 = "Cessna 400"; + public static final String AIRFRAME_CESSNA_525 = "Cessna 525"; + public static final String AIRFRAME_CESSNA_MODEL_525 = "Cessna Model 525"; + public static final String AIRFRAME_CESSNA_T182T = "Cessna T182T"; + public static final String AIRFRAME_CESSNA_182T = "Cessna 182T"; + + public static final String AIRFRAME_PA_28_181 = "PA-28-181"; + public static final String AIRFRAME_PA_44_180 = "PA-44-180"; + public static final String AIRFRAME_PIPER_PA_46_500TP_MERIDIAN = "Piper PA-46-500TP Meridian"; + + public static final String AIRFRAME_CIRRUS_SR20 = "Cirrus SR20"; + public static final String AIRFRAME_CIRRUS_SR22 = "Cirrus SR22"; + + public static final String AIRFRAME_BEECHCRAFT_A36_G36 = "Beechcraft A36/G36"; + public static final String AIRFRAME_BEECHCRAFT_G58 = "Beechcraft G58"; + + public static final String AIRFRAME_DIAMOND_DA_40 = "Diamond DA 40"; + public static final String AIRFRAME_DIAMOND_DA40 = "Diamond DA40"; + public static final String AIRFRAME_DIAMOND_DA40NG = "Diamond DA40NG"; + public static final String AIRFRAME_DIAMOND_DA42NG = "Diamond DA42NG"; + public static final String AIRFRAME_DIAMOND_DA_40_F = "Diamond DA 40 F"; + + public static final String AIRFRAME_QUEST_KODIAK_100 = "Quest Kodiak 100"; private static HashMap nameIdMap = new HashMap<>(); private static HashMap airframeNameMap = new HashMap<>(); @@ -21,6 +59,28 @@ public class Airframes { private static HashSet fleetAirframes = new HashSet<>(); + public static final Set FIXED_WING_AIRFRAMES = Collections.unmodifiableSet(Set.of( + AIRFRAME_CESSNA_172R, + AIRFRAME_CESSNA_172S, + AIRFRAME_CESSNA_172T, + AIRFRAME_CESSNA_182T, + AIRFRAME_CESSNA_T182T, + AIRFRAME_CESSNA_MODEL_525, + AIRFRAME_CIRRUS_SR20, + AIRFRAME_CIRRUS_SR22, + AIRFRAME_DIAMOND_DA40, + AIRFRAME_DIAMOND_DA_40_F, + AIRFRAME_DIAMOND_DA40NG, + AIRFRAME_DIAMOND_DA42NG, + AIRFRAME_PA_28_181, + AIRFRAME_PA_44_180, + AIRFRAME_PIPER_PA_46_500TP_MERIDIAN, + AIRFRAME_QUEST_KODIAK_100, + AIRFRAME_CESSNA_400, + AIRFRAME_BEECHCRAFT_A36_G36, + AIRFRAME_BEECHCRAFT_G58 + )); + public static void setAirframeFleet(Connection connection, int airframeId, int fleetId) throws SQLException { String key = airframeId + "-" + fleetId; diff --git a/src/main/java/org/ngafid/flights/Parameters.java b/src/main/java/org/ngafid/flights/Parameters.java index 99086ae02..e91db55ac 100644 --- a/src/main/java/org/ngafid/flights/Parameters.java +++ b/src/main/java/org/ngafid/flights/Parameters.java @@ -107,28 +107,6 @@ public interface Parameters { public static final String UNIT_GALLONS = "gals"; public static final String UNIT_DEG_F = "deg F"; - /** - * {@link Airframes} names - * - * TODO: In the future, we may want to consider using Set reather than hardcoded strings. - * This would make our code more robust to varying airframe names - **/ - public static final String AIRFRAME_SCAN_EAGLE = "ScanEagle"; - public static final String AIRFRAME_DJI = "DJI"; - public static final String AIRFRAME_CESSNA_172S = "Cessna 172S"; - public static final String AIRFRAME_CESSNA_172R = "Cessna 172R"; - public static final String AIRFRAME_PA_28 = "PA-28-181"; - public static final String AIRFRAME_PA_44 = "PA-44-180"; - public static final String AIRFRAME_CIRRUS_SR20 = "Cirrus SR20"; - public static final String AIRFRAME_CIRRUS_SR22 = "Cirrus SR22"; - public static final String AIRFRAME_CESSNA_182T = "Cessna 182T"; - public static final String AIRFRAME_CESSNA_T182T = "Cessna T182T"; - public static final String AIRFRAME_BEECHCRAFT_A36_G36 = "Beechcraft A36/G36"; - public static final String AIRFRAME_CESSNA_400 = "Cessna 400"; - public static final String AIRFRAME_DIAMOND_DA__40 = "Diamond DA 40"; - public static final String AIRFRAME_DIAMOND_DA_40 = "Diamond DA40";; - public static final String AIRFRAME_DIAMOND_DA_40F = "Diamond DA 40 F"; - /** * {@link Airframes} id's */ diff --git a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java index 039ba1bdc..14651c96f 100644 --- a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java @@ -29,8 +29,12 @@ public class CSVFileProcessor extends FlightFileProcessor { private final List headers; private final List dataTypes; - public CSVFileProcessor(Connection connection, InputStream stream, String filename) { + private final Upload upload; + + public CSVFileProcessor(Connection connection, InputStream stream, String filename, Upload upload) { super(connection, stream, filename); + this.upload = upload; + headers = new ArrayList<>(); dataTypes = new ArrayList<>(); @@ -53,6 +57,7 @@ public Stream parse() throws FlightProcessingException { if (airframeName != null && airframeName.equals("ScanEagle")) { scanEagleParsing(fileInformation); // TODO: Handle ScanEagle data } else { + processFileInormation(fileInformation); bufferedReader.read(); // Skip first char (#) dataTypes = List.of(csvReader.readNext()); headers = List.of(csvReader.readNext()); @@ -93,7 +98,7 @@ public Stream parse() throws FlightProcessingException { FlightBuilder builder = new FlightBuilder(new FlightMeta(), doubleTimeSeries, stringTimeSeries); - return Stream.of(new FlightBuilder[]{builder}); + return Stream.of(builder); } @@ -116,6 +121,7 @@ private void updateAirframe() { */ private String getFlightInfo(BufferedReader reader) throws FatalFlightFileException, IOException { String fileInformation = reader.readLine(); + System.out.println("File information = " + fileInformation); if (fileInformation == null || fileInformation.trim().length() == 0) { throw new FatalFlightFileException("The flight file was empty."); @@ -135,6 +141,66 @@ private String getFlightInfo(BufferedReader reader) throws FatalFlightFileExcept return fileInformation; } + private void processFileInormation(String fileInformation) throws FatalFlightFileException { + String[] infoParts = fileInformation.split(","); + try { + for (int i = 1; i < infoParts.length; i++) { + //process everything else (G1000 data) + if (infoParts[i].trim().length() == 0) continue; + + //System.err.println("splitting key/value: '" + infoParts[i] + "'"); + String subParts[] = infoParts[i].trim().split("="); + String key = subParts[0]; + String value = subParts[1]; + + //System.err.println("key: '" + key + "'"); + //System.err.println("value: '" + value + "'"); + + // TODO: Create some sort of automatic mapping for synonomous airframe names. + if (key.equals("airframe_name")) { + airframeName = value.substring(1, value.length() - 1); + + //throw an error for 'Unknown Aircraft' + if (airframeName.equals("Unknown Aircraft")) { + throw new FatalFlightFileException("Flight airframe name was 'Unknown Aircraft', please fix and re-upload so the flight can be properly identified and processed."); + } + + + if (airframeName.equals("Diamond DA 40")) { + airframeName = "Diamond DA40"; + } else if ((airframeName.equals("Garmin Flight Display") || airframeName.equals("Robinson R44 Raven I")) && upload.getFleetId() == 1 /*This is a hack for UND who has their airframe names set up incorrectly for their helicopters*/) { + airframeName = "R44"; + } else if (airframeName.equals("Garmin Flight Display")) { + throw new FatalFlightFileException("Flight airframe name was 'Garmin Flight Display' which does not specify what airframe type the flight was, please fix and re-upload so the flight can be properly identified and processed."); + + } + + if (airframeName.equals("Cirrus SR22 (3600 GW)")) { + airframeName = "Cirrus SR22"; + } + + if (Airframes.FIXED_WING_AIRFRAMES.contains(airframeName) || airframeName.contains("Garmin")) { + airframeType = "Fixed Wing"; + } else if (airframeName.equals("R44") || airframeName.equals("Robinson R44")) { + airframeName = "R44"; + airframeType = "Rotorcraft"; + } else { + System.err.println("Could not import flight because the aircraft type was unknown for the following airframe name: '" + airframeName + "'"); + System.err.println("Please add this to the the `airframe_type` table in the database and update this method."); + System.exit(1); + } + + } else if (key.equals("system_id")) { + systemId = value.substring(1, value.length() - 1); + } + } + } catch (Exception e) { + //LOG.info("parsting flight information threw exception: " + e); + //e.printStackTrace(); + throw new FatalFlightFileException("Flight information line was not properly formed with key value pairs.", e); + } + } + /** * Parses for ScanEagle flight data diff --git a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java index 534ab8cec..11a6564ca 100644 --- a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java @@ -22,7 +22,7 @@ import org.ngafid.flights.Upload; public abstract class FlightFileProcessor { - + interface Factory { FlightFileProcessor create(Connection connection, InputStream is, String filename); } @@ -45,7 +45,7 @@ public Pipeline(Connection connection, Upload upload, ZipFile zipFile) { this.zipFile = zipFile; this.factories = Map.of( - "csv", CSVFileProcessor::new, + "csv", this::createCSVFileProcessor, "dat", this::createDATFileProcessor, "json", JSONFileProcessor::new, "gpx", GPXFileProcessor::new @@ -60,6 +60,10 @@ private FlightFileProcessor createDATFileProcessor(Connection connection, InputS return new DATFileProcessor(connection, is, filename, zipFile); } + private FlightFileProcessor createCSVFileProcessor(Connection connection, InputStream is, String filename) { + return new CSVFileProcessor(connection, is, filename, upload); + } + public Stream stream() { Enumeration entries = zipFile.entries(); Stream validFiles = @@ -136,14 +140,14 @@ public int getValidFlightsCount() { } } - public final Connection connection; - public final String filename; - public final InputStream stream; + protected final Connection connection; + protected final InputStream stream; + protected final String filename; public FlightFileProcessor(Connection connection, InputStream stream, String filename) { this.connection = connection; - this.filename = filename; this.stream = stream; + this.filename = filename; } // If an exception occurs, it will be stored here. diff --git a/src/main/java/org/ngafid/flights/process/ProcessDivergence.java b/src/main/java/org/ngafid/flights/process/ProcessDivergence.java index 18cce59da..7a2fb9715 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessDivergence.java +++ b/src/main/java/org/ngafid/flights/process/ProcessDivergence.java @@ -17,6 +17,7 @@ import org.ngafid.terrain.TerrainCache; import org.ngafid.flights.DoubleTimeSeries; import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.MalformedFlightFileException; @@ -60,17 +61,17 @@ private record DivergenceConfig (List parameters, String output) {} Map.ofEntries( entry(AIRFRAME_CESSNA_172R, CESSNA_CONFIG), entry(AIRFRAME_CESSNA_172S, CESSNA_CONFIG), - entry(AIRFRAME_PA_28, PA_28_CONFIG), - entry(AIRFRAME_PA_44, PA_44_CONFIG), + entry(AIRFRAME_PA_28_181, PA_28_CONFIG), + entry(AIRFRAME_PA_44_180, PA_44_CONFIG), entry(AIRFRAME_CIRRUS_SR20, SIX_CYLINDER_CIRRUS), entry(AIRFRAME_CESSNA_T182T, SIX_CYLINDER_CIRRUS), entry(AIRFRAME_CESSNA_182T, SIX_CYLINDER_CIRRUS), entry(AIRFRAME_BEECHCRAFT_A36_G36, SIX_CYLINDER_CIRRUS), entry(AIRFRAME_CIRRUS_SR22, SIX_CYLINDER_CIRRUS), entry(AIRFRAME_CESSNA_400, SIX_CYLINDER_CIRRUS), - entry(AIRFRAME_DIAMOND_DA_40F, DIAMOND_CONFIG), + entry(AIRFRAME_DIAMOND_DA_40_F, DIAMOND_CONFIG), entry(AIRFRAME_DIAMOND_DA_40, DIAMOND_CONFIG), - entry(AIRFRAME_DIAMOND_DA__40, DIAMOND_CONFIG) + entry(AIRFRAME_DIAMOND_DA40, DIAMOND_CONFIG) ); public ProcessDivergence(Connection connection, FlightBuilder builder) { From 3d44e18b2202b5a19639d17d5d8586e011bed36d Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Fri, 28 Apr 2023 11:19:27 -0400 Subject: [PATCH 10/12] Importing appears to work. Next step is validation --- .../java/org/ngafid/CalculateExceedences.java | 64 ++++----- .../org/ngafid/FindLowEndingFuelEvents.java | 4 +- src/main/java/org/ngafid/ProcessUpload.java | 47 ++----- .../java/org/ngafid/airports/Airports.java | 29 +++-- .../org/ngafid/flights/DoubleTimeSeries.java | 19 ++- src/main/java/org/ngafid/flights/Flight.java | 9 +- .../java/org/ngafid/flights/Parameters.java | 2 +- .../org/ngafid/flights/StringTimeSeries.java | 13 +- .../flights/process/CSVFileProcessor.java | 121 ++++++++---------- .../flights/process/DependencyGraph.java | 14 +- .../ngafid/flights/process/FlightBuilder.java | 4 +- .../flights/process/FlightFileProcessor.java | 5 +- .../process/ProcessAirportProximity.java | 8 +- .../flights/process/ProcessItinerary.java | 2 +- .../ngafid/flights/process/ProcessLOCI.java | 2 +- .../flights/process/ProcessLaggedAltMSL.java | 2 +- .../flights/process/ProcessStallIndex.java | 9 +- .../flights/process/ProcessStartEndTime.java | 1 + .../flights/process/ProcessTotalFuel.java | 12 +- .../java/org/ngafid/terrain/SRTMTile.java | 15 ++- .../java/org/ngafid/terrain/TerrainCache.java | 4 +- 21 files changed, 187 insertions(+), 199 deletions(-) diff --git a/src/main/java/org/ngafid/CalculateExceedences.java b/src/main/java/org/ngafid/CalculateExceedences.java index 2a46aa045..517c7af29 100644 --- a/src/main/java/org/ngafid/CalculateExceedences.java +++ b/src/main/java/org/ngafid/CalculateExceedences.java @@ -29,9 +29,11 @@ import org.ngafid.filters.Conditional; import org.ngafid.filters.Filter; import org.ngafid.filters.Pair; +import java.util.logging.*; public class CalculateExceedences { - + private static final Logger LOG = Logger.getLogger(CalculateExceedences.class.getName()); + static String timeSeriesName = "Lcl Time"; static String dateSeriesName = "Lcl Date"; @@ -55,24 +57,24 @@ public void processFlight(Connection connection, Flight flight, EventDefinition int airframeNameId = flight.getAirframeNameId(); String flightFilename = flight.getFilename(); - System.out.println("Processing flight: " + flightId + ", " + flightFilename); + LOG.info("Processing flight: " + flightId + ", " + flightFilename); try { - System.out.println("Event is: '" + eventDefinition.getName() + "'"); + LOG.info("Event is: '" + eventDefinition.getName() + "'"); //first check and see if this was actually a flight (RPM > 800) Pair minMaxRPM1 = DoubleTimeSeries.getMinMax(connection, flightId, "E1 RPM"); Pair minMaxRPM2 = DoubleTimeSeries.getMinMax(connection, flightId, "E2 RPM"); - System.out.println("minMaxRPM1: " + minMaxRPM1); - System.out.println("minMaxRPM2: " + minMaxRPM2); + // LOG.info("minMaxRPM1: " + minMaxRPM1); + // LOG.info("minMaxRPM2: " + minMaxRPM2); if ((minMaxRPM1 == null && minMaxRPM2 == null) //both RPM values are null, can't calculate exceedence || (minMaxRPM2 == null && minMaxRPM1 != null && minMaxRPM1.second() < 800) //RPM2 is null, RPM1 is < 800 || (minMaxRPM1 == null && minMaxRPM2 != null && minMaxRPM2.second() < 800) //RPM1 is null, RPM2 is < 800 || (minMaxRPM1 != null && minMaxRPM1.second() < 800) && (minMaxRPM2 != null && minMaxRPM2.second() < 800)) { //RPM1 and RPM2 < 800 //couldn't calculate exceedences for this flight because the engines never kicked on (it didn't fly) - System.out.println("engines never turned on, setting flight_processed.had_error = 1"); + LOG.info("engines never turned on, setting flight_processed.had_error = 1"); if (uploadProcessedEmail != null) uploadProcessedEmail.addExceedenceError(flightFilename, "could not calculate exceedences for flight " + flightId + ", '" + flightFilename + "' - engines never turned on"); @@ -80,14 +82,14 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + // LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); return; } TreeSet columnNames = eventDefinition.getColumnNames(); - System.out.println("Number of Column Name(s): [ " + columnNames.size() + " ]"); + // LOG.info("Number of Column Name(s): [ " + columnNames.size() + " ]"); //first test and see if min/max values can violate exceedence, otherwise we can skip conditional.reset(); @@ -95,7 +97,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition Pair minMax = DoubleTimeSeries.getMinMax(connection, flightId, columnName); if (minMax == null) { - System.out.println("minMax was null, setting flight_processed.had_error = 1"); + LOG.info("minMax was null, setting flight_processed.had_error = 1"); //couldn't calculate this exceedence because at least one of the columns was missing if (uploadProcessedEmail != null) uploadProcessedEmail.addExceedenceError(flightFilename, "could not calculate '" + eventDefinition.getName() + "' for flight " + flightId + ", '" + flightFilename + "' - " + columnName + " was missing"); @@ -103,19 +105,19 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + // LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); return; } - System.out.println(columnName + ", min: " + minMax.first() + ", max: " + minMax.second()); + LOG.info(columnName + ", min: " + minMax.first() + ", max: " + minMax.second()); conditional.set(columnName, minMax); } - System.out.println("Post-set conditional: " + conditional.toString()); + LOG.info("Post-set conditional: " + conditional.toString()); boolean result = conditional.evaluate(); - System.out.println("overall result: " + result); + LOG.info("overall result: " + result); if (!result) { //this flight could not have caused one of these events @@ -123,7 +125,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + // LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -136,14 +138,14 @@ public void processFlight(Connection connection, Flight flight, EventDefinition if (timeSeries == null || dateSeries == null) { //couldn't calculate this exceedence because the date or time column was missing - System.out.println("time series or date series was missing, setting flight_processed.had_error = 1"); + LOG.info("time series or date series was missing, setting flight_processed.had_error = 1"); if (uploadProcessedEmail != null) uploadProcessedEmail.addExceedenceError(flightFilename, "could not calculate exceedences for flight " + flightId + ", '" + flightFilename + "' - date or time was missing"); PreparedStatement stmt = connection.prepareStatement("INSERT INTO flight_processed SET fleet_id = ?, flight_id = ?, event_definition_id = ?, count = 0, had_error = 1"); stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); return; @@ -174,23 +176,23 @@ public void processFlight(Connection connection, Flight flight, EventDefinition lineNumber = i; double currentValue = doubleSeries[0].get(i); - //System.out.println("Pre-set conditional: " + conditional.toString()); + //LOG.info("Pre-set conditional: " + conditional.toString()); conditional.reset(); for (DoubleTimeSeries series : doubleSeries) { conditional.set(series.getName(), series.get(i)); } - //System.out.println("Post-set conditional: " + conditional.toString()); + //LOG.info("Post-set conditional: " + conditional.toString()); result = conditional.evaluate(); - //System.out.println(conditional + ", result: " + result); + //LOG.info(conditional + ", result: " + result); if (!result) { if (startTime != null) { //we're tracking an event, so increment the stopCount stopCount++; - System.out.println("stopCount: " + stopCount + " with on line: " + lineNumber ); + LOG.info("stopCount: " + stopCount + " with on line: " + lineNumber ); if (stopCount == stopBuffer) { System.err.println("Stop count (" + stopCount + ") reached the stop buffer (" + stopBuffer + "), new event created!"); @@ -224,7 +226,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition startLine = lineNumber; severity = eventDefinition.getSeverity(doubleSeries, i); - System.out.println("start date time: " + startTime + ", start line number: " + startLine); + LOG.info("start date time: " + startTime + ", start line number: " + startLine); } endLine = lineNumber; endTime = dateSeries.get(i) + " " + timeSeries.get(i); @@ -240,11 +242,11 @@ public void processFlight(Connection connection, Flight flight, EventDefinition Event event = new Event(startTime, endTime, startLine, endLine, severity); eventList.add( event ); } - System.out.println(""); + LOG.info(""); for (i = 0; i < eventList.size(); i++) { Event event = eventList.get(i); - System.out.println( "Event : [line: " + event.getStartLine() + " to " + event.getEndLine() + ", time: " + event.getStartTime() + " to " + event.getEndTime() + "]" ); + LOG.info( "Event : [line: " + event.getStartLine() + " to " + event.getEndLine() + ", time: " + event.getStartTime() + " to " + event.getEndTime() + "]" ); if (uploadProcessedEmail != null) uploadProcessedEmail.addExceedence(flightFilename, "flight " + flightId + ", '" + flightFilename + "' - '" + eventDefinition.getName() + "' from " + event.getStartTime() + " to " + event.getEndTime()); } @@ -284,7 +286,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setDouble(8, sumSeverity); stmt.setDouble(9, minSeverity); stmt.setDouble(10, maxSeverity); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -295,7 +297,7 @@ public void processFlight(Connection connection, Flight flight, EventDefinition stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, eventDefinition.getId()); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -316,14 +318,14 @@ public static void calculateExceedences(Connection connection, int uploadId, Upl if (allEvents == null) { allEvents = EventDefinition.getAll(connection, "id > ?", new Object[]{0}); } - System.out.println("n events = " + allEvents.size()); + LOG.info("n events = " + allEvents.size()); int airframeTypeId = Airframes.getTypeId(connection, "Fixed Wing"); for (int i = 0; i < allEvents.size(); i++) { //process events for this event type EventDefinition currentDefinition = allEvents.get(i); - System.out.println("\t" + currentDefinition.toString()); + LOG.info("\t" + currentDefinition.toString()); CalculateExceedences currentCalculator = new CalculateExceedences(currentDefinition); @@ -349,7 +351,7 @@ public static void calculateExceedences(Connection connection, int uploadId, Upl Instant end = Instant.now(); long elapsed_millis = Duration.between(start, end).toMillis(); double elapsed_seconds = ((double) elapsed_millis) / 1000; - System.out.println("finished in " + elapsed_seconds); + LOG.info("finished in " + elapsed_seconds); if (uploadProcessedEmail != null) uploadProcessedEmail.setExceedencesElapsedTime(elapsed_seconds); } @@ -365,11 +367,11 @@ public static void main(String[] arguments) { connection = Database.resetConnection(); Instant start = Instant.now(); ArrayList allEvents = EventDefinition.getAll(connection, "id > ?", new Object[]{0}); - System.out.println("n events = " + allEvents.size()); + LOG.info("n events = " + allEvents.size()); for (int i = 0; i < allEvents.size(); i++) { //process events for this event type EventDefinition currentDefinition = allEvents.get(i); - System.out.println("\t" + currentDefinition.toString()); + LOG.info("\t" + currentDefinition.toString()); CalculateExceedences currentCalculator = new CalculateExceedences(currentDefinition); @@ -395,7 +397,7 @@ public static void main(String[] arguments) { Instant end = Instant.now(); long elapsed_millis = Duration.between(start, end).toMillis(); double elapsed_seconds = ((double) elapsed_millis) / 1000; - System.out.println("finished in " + elapsed_seconds); + LOG.info("finished in " + elapsed_seconds); try { Thread.sleep(3000); diff --git a/src/main/java/org/ngafid/FindLowEndingFuelEvents.java b/src/main/java/org/ngafid/FindLowEndingFuelEvents.java index 039898454..7700c250c 100644 --- a/src/main/java/org/ngafid/FindLowEndingFuelEvents.java +++ b/src/main/java/org/ngafid/FindLowEndingFuelEvents.java @@ -37,7 +37,7 @@ public static void findLowEndFuelEventsInUpload(Connection connection, Upload up try { findLowEndFuel(connection, flight); } catch (MalformedFlightFileException e) { - System.out.println("Could not process flight " + flight.getId()); + LOG.info("Could not process flight " + flight.getId()); } catch (ParseException e) { LOG.info("Error parsing date"); e.printStackTrace(); @@ -81,7 +81,7 @@ public static void findLowEndFuel(Connection connection, Flight flight) throws S String[] lastValidDateAndIndex = date.getLastValidAndIndex(); int i = Integer.parseInt(lastValidDateAndIndex[1]); - System.out.println("last valid date and index: " + i); + LOG.info("last valid date and index: " + i); String endTime = lastValidDateAndIndex[0] + " " + time.getLastValid(); diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index 83e529a01..9df2b6f1a 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -193,10 +193,15 @@ public static void processUpload(Upload upload) { upload.reset(connection); System.out.println("upload was reset!\n\n"); - UploadProcessedEmail uploadProcessedEmail = new UploadProcessedEmail(recipients, bccRecipients); + long start = System.nanoTime(); boolean success = ingestFlights(connection, upload, uploadProcessedEmail); + long end = System.nanoTime(); + + long diff = end - start; + double asSeconds = ((double) diff) / 1.0e-9; + System.out.println("Took " + asSeconds + "s to ingest upload " + upload.getFilename()); //only progress if the upload ingestion was successful if (success) { @@ -244,41 +249,6 @@ public FlightInfo(int id, int length, String filename, List { - public R apply(T t) throws E; - } - - static class CheckedMap implements Function { - final BiConsumer exceptionHandler; - final CheckedFunction f; - - public CheckedMap(CheckedFunction f, BiConsumer exceptionHandler) { - this.exceptionHandler = exceptionHandler; - this.f = f; - } - - public R apply(T t) { - try { - return f.apply(t); - } catch (Error | RuntimeException e) { - throw e; - } catch (Exception ex) { - @SuppressWarnings("unchecked") E e = (E) ex; - exceptionHandler.accept(t, e); - return null; - } - } - } - - - // Generates a function which when called will call the supplied function f which may raise an exception. - // In the event of an exception the exception the exceptionHandler is called and supplied the value T as - // well as the exception object, and finally null is returned. - private static CheckedMap mapOrNull(CheckedFunction f, BiConsumer exceptionHandler) { - return new CheckedMap(f, exceptionHandler); - } - - public static boolean ingestFlights(Connection connection, Upload upload, UploadProcessedEmail uploadProcessedEmail) throws SQLException { Instant start = Instant.now(); @@ -313,8 +283,9 @@ public static boolean ingestFlights(Connection connection, Upload upload, Upload FlightFileProcessor.Pipeline pipeline = new FlightFileProcessor.Pipeline(connection, upload, zipFile); pipeline .stream() - .map(pipeline::parse) - .flatMap(pipeline::build) + .flatMap(pipeline::parse) + .map(pipeline::build) + .filter(Objects::nonNull) .map(pipeline::insert) .forEach(pipeline::tabulateFlightStatus); diff --git a/src/main/java/org/ngafid/airports/Airports.java b/src/main/java/org/ngafid/airports/Airports.java index 6c0608c75..86c83aa19 100644 --- a/src/main/java/org/ngafid/airports/Airports.java +++ b/src/main/java/org/ngafid/airports/Airports.java @@ -9,11 +9,13 @@ import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - +import java.util.logging.*; import org.ngafid.common.MutableDouble; public class Airports { + private static final Logger LOG = Logger.getLogger(Airports.class.getName()); + public final static double AVERAGE_RADIUS_OF_EARTH_KM = 6371; private static HashMap> geoHashToAirport; @@ -33,7 +35,7 @@ public class Airports { System.err.println("export AIRPORTS_FILE="); System.exit(1); } - System.out.println("AIRPORTS_FILE: '" + System.getenv("AIRPORTS_FILE") + "'"); + LOG.info("AIRPORTS_FILE: '" + System.getenv("AIRPORTS_FILE") + "'"); AIRPORTS_FILE = System.getenv("AIRPORTS_FILE"); @@ -96,13 +98,13 @@ public class Airports { numberAirports++; } - System.out.println("Creating buffered reader for '" + RUNWAYS_FILE + "'"); + LOG.info("Creating buffered reader for '" + RUNWAYS_FILE + "'"); //now read the runways file and add runways to airports br = new BufferedReader(new FileReader(RUNWAYS_FILE)); - System.out.println("buffered reader is ready? " + br.ready()); + LOG.info("buffered reader is ready? " + br.ready()); while ((line = br.readLine()) != null) { - //System.out.println("read runways line: " + line); + //LOG.info("read runways line: " + line); String[] values = line.split(","); @@ -135,7 +137,7 @@ public class Airports { } airport.addRunway(runway); - //System.out.println("Adding " + runway + " to " + airport); + //LOG.info("Adding " + runway + " to " + airport); } } catch (Exception e) { @@ -143,9 +145,9 @@ public class Airports { System.exit(1); } - System.out.println("Read " + numberAirports + " airports."); - System.out.println("airports HashMap size: " + geoHashToAirport.size()); - System.out.println("max airport ArrayList: " + maxHashSize); + LOG.info("Read " + numberAirports + " airports."); + LOG.info("airports HashMap size: " + geoHashToAirport.size()); + LOG.info("max airport ArrayList: " + maxHashSize); } /** @@ -231,11 +233,11 @@ public static Airport getNearestAirportWithin(double latitude, double longitude, ArrayList hashedAirports = geoHashToAirport.get(geoHashes[i]); if (hashedAirports != null) { - // System.out.println("\t" + geoHashes[i] + " resulted in " + hashedAirports.size() + " airports."); + // LOG.info("\t" + geoHashes[i] + " resulted in " + hashedAirports.size() + " airports."); for (int j = 0; j < hashedAirports.size(); j++) { Airport airport = hashedAirports.get(j); double distanceFt = calculateDistanceInFeet(latitude, longitude, airport.latitude, airport.longitude); - // System.out.println("\t\t" + airport + ", distanceFt: " + distanceFt); + // LOG.info("\t\t" + airport + ", distanceFt: " + distanceFt); if (distanceFt < minDistance) { nearestAirport = airport; @@ -248,9 +250,9 @@ public static Airport getNearestAirportWithin(double latitude, double longitude, /* if (nearestAirport != null) { - System.out.println("nearest airport: " + nearestAirport + ", " + minDistance); + LOG.info("nearest airport: " + nearestAirport + ", " + minDistance); } else { - System.out.println("nearest airport: NULL"); + LOG.info("nearest airport: NULL"); } */ @@ -258,7 +260,6 @@ public static Airport getNearestAirportWithin(double latitude, double longitude, } public static boolean hasRunwayInfo(String iataCode) { - System.out.println("checking to see if airport '" + iataCode + "' has runway info"); return iataToAirport.get(iataCode).hasRunways(); } diff --git a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java index a1dc719ae..90760d962 100644 --- a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java @@ -39,6 +39,9 @@ public class DoubleTimeSeries { private double[] data; private int size = 0; + // Set this to true if this double time series is temporary and should not be written to the database. + private boolean temporary = false; + // Now called size since data.length is the buffer length and size is the number of elements in the buffer // private int length = -1; private double min = Double.MAX_VALUE; @@ -79,12 +82,15 @@ public DoubleTimeSeries(Connection connection, String name, String dataType) thr } public DoubleTimeSeries(Connection connection, String name, String dataType, ArrayList stringTimeSeries) throws SQLException { + this(name, dataType, stringTimeSeries); + setNameId(connection); + setTypeId(connection); + } + + public DoubleTimeSeries(String name, String dataType, ArrayList stringTimeSeries) { this.name = name; - this.nameId = SeriesNames.getDoubleNameId(connection, name); this.dataType = dataType; - this.typeId = TypeNames.getId(connection, dataType); - // timeSeries = new ArrayList(); this.data = new double[stringTimeSeries.size()]; int emptyValues = 0; @@ -94,7 +100,6 @@ public DoubleTimeSeries(Connection connection, String name, String dataType, Arr for (int i = 0; i < stringTimeSeries.size(); i++) { String currentValue = stringTimeSeries.get(i); if (currentValue.length() == 0) { - //System.err.println("WARNING: double column '" + name + "' value[" + i + "] is empty."); this.add(Double.NaN); emptyValues++; continue; @@ -148,6 +153,10 @@ public interface TimeStepCalculation { double compute(int i); } + public void setTemporary(boolean temp) { + this.temporary = temp; + } + public static DoubleTimeSeries computed(String name, String dataType, int length, TimeStepCalculation calculation) { double[] data = new double[length]; for (int i = 0; i < length; i++) @@ -391,6 +400,8 @@ public double[] sliceCopy(int from, int to) { public void updateDatabase(Connection connection, int flightId) { //System.out.println("Updating database for " + this); + if (this.temporary) + return; try { if (typeId == -1) setTypeId(connection); diff --git a/src/main/java/org/ngafid/flights/Flight.java b/src/main/java/org/ngafid/flights/Flight.java index 0f6c38d1e..f907838c1 100644 --- a/src/main/java/org/ngafid/flights/Flight.java +++ b/src/main/java/org/ngafid/flights/Flight.java @@ -1268,7 +1268,7 @@ public static FlightTag editTag(Connection connection, FlightTag flightTag) thro } queryString.append("WHERE id = " + flightTag.hashCode()); - System.out.println("Query String Update: " + queryString.toString()); + LOG.info("Query String Update: " + queryString.toString()); PreparedStatement query = connection.prepareStatement(queryString.toString()); query.executeUpdate(); @@ -3403,7 +3403,7 @@ public void updateDatabase(Connection connection, int uploadId, int uploaderId, preparedStatement.setString(17, startDateTime); preparedStatement.setString(18, endDateTime); - System.out.println(preparedStatement); + LOG.info(preparedStatement.toString()); preparedStatement.executeUpdate(); ResultSet resultSet = preparedStatement.getGeneratedKeys(); @@ -3439,7 +3439,7 @@ public void updateDatabase(Connection connection, int uploadId, int uploaderId, ps.close(); } else { - System.err.println("ERROR: insertion of flight to the database did not result in an id. This should never happen."); + LOG.severe("ERROR: insertion of flight to the database did not result in an id. This should never happen."); System.exit(1); } @@ -3467,10 +3467,9 @@ public void writeToFile(Connection connection, String filename) throws IOExcepti for (int i = 0; i < series.size(); i++) { String name = series.get(i).getName(); if (name.equals("AirportDistance") || name.equals("RunwayDistance") || series.get(i).getMin() == series.get(i).getMax()) { - System.out.println("Skipping column: '" + name + "'"); + LOG.warning("Skipping column: '" + name + "'"); continue; } - System.out.println("'" + name + "' min - max: " + (series.get(i).getMin() - series.get(i).getMax())); if (afterFirst) printWriter.print(","); printWriter.print(series.get(i).getName()); diff --git a/src/main/java/org/ngafid/flights/Parameters.java b/src/main/java/org/ngafid/flights/Parameters.java index e91db55ac..b97e3819b 100644 --- a/src/main/java/org/ngafid/flights/Parameters.java +++ b/src/main/java/org/ngafid/flights/Parameters.java @@ -123,7 +123,7 @@ public interface Parameters { /** * Strings that represent the parameters used in the Stall Index calculation */ - public static final String [] LOCI_DEPENDENCIES = {HDG, ROLL}; + public static final String [] LOCI_DEPENDENCIES = {HDG, ROLL, TAS_FTMIN}; // // use these for a real true airspeed (Shelbys method) /*GND_SPD, WIND_SPEED, WIND_DIRECTION};*/ public static final String [] SPIN_DEPENDENCIES = {IAS, VSPD_CALCULATED, NORM_AC, LAT_AC, ALT_AGL}; diff --git a/src/main/java/org/ngafid/flights/StringTimeSeries.java b/src/main/java/org/ngafid/flights/StringTimeSeries.java index 4545c1170..8966de829 100644 --- a/src/main/java/org/ngafid/flights/StringTimeSeries.java +++ b/src/main/java/org/ngafid/flights/StringTimeSeries.java @@ -49,7 +49,6 @@ public StringTimeSeries(String name, String dataType, int sizeHint) { this.timeSeries = new ArrayList(sizeHint); validCount = 0; - } public StringTimeSeries(String name, String dataType) { @@ -63,12 +62,15 @@ public StringTimeSeries(Connection connection, String name, String dataType) thr } public StringTimeSeries(Connection connection, String name, String dataType, ArrayList timeSeries) throws SQLException { - this.name = name; - this.dataType = dataType; - this.timeSeries = timeSeries; + this(name, dataType, timeSeries); setNameId(connection); setTypeId(connection); + } + public StringTimeSeries(String name, String dataType, ArrayList timeSeries) { + this.name = name; + this.dataType = dataType; + this.timeSeries = timeSeries; validCount = 0; for (int i = 0; i < timeSeries.size(); i++) { if (!timeSeries.get(i).equals("")) { @@ -134,7 +136,7 @@ public static StringTimeSeries getStringTimeSeries(Connection connection, int fl } private void setNameId(Connection connection) throws SQLException { - this.nameId = SeriesNames.getDoubleNameId(connection, name); + this.nameId = SeriesNames.getStringNameId(connection, name); } private void setTypeId(Connection connection) throws SQLException { @@ -209,7 +211,6 @@ public int validCount() { } public void updateDatabase(Connection connection, int flightId) { - //System.out.println("Updating database for " + this); try { if (nameId == -1) diff --git a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java index 14651c96f..6786b8d61 100644 --- a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java @@ -10,6 +10,7 @@ import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.stream.Collectors; import java.util.stream.Stream; /** @@ -20,14 +21,9 @@ public class CSVFileProcessor extends FlightFileProcessor { private static final Logger LOG = Logger.getLogger(CSVFileProcessor.class.getName()); - private String airframeName; - private String startDateTime; - private String endDateTime; - private String airframeType; - private String suggestedTailNumber; - private String systemId; private final List headers; private final List dataTypes; + private final FlightMeta meta = new FlightMeta(); private final Upload upload; @@ -38,7 +34,8 @@ public CSVFileProcessor(Connection connection, InputStream stream, String filena headers = new ArrayList<>(); dataTypes = new ArrayList<>(); - this.airframeType = "Fixed Wing"; // Fixed Wing By default + meta.airframeType = "Fixed Wing"; // Fixed Wing By default + meta.filename = filename; } @Override @@ -52,51 +49,45 @@ public Stream parse() throws FlightProcessingException { try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(super.stream, StandardCharsets.UTF_8)); CSVReader csvReader = new CSVReader(bufferedReader)) { String fileInformation = getFlightInfo(bufferedReader); // Will read a line - updateAirframe(); - if (airframeName != null && airframeName.equals("ScanEagle")) { + if (meta.airframeName != null && meta.airframeName.equals("ScanEagle")) { scanEagleParsing(fileInformation); // TODO: Handle ScanEagle data } else { processFileInormation(fileInformation); bufferedReader.read(); // Skip first char (#) dataTypes = List.of(csvReader.readNext()); - headers = List.of(csvReader.readNext()); - csvValues = csvReader.readAll(); + headers = Arrays.stream(csvReader.readNext()).map(String::strip).collect(Collectors.toList()); } + + updateAirframe(); + + ArrayList> columns = new ArrayList<>(); + String[] firstRow = csvReader.peek(); + for (int i = 0; i < firstRow.length; i++) + columns.add(new ArrayList<>()); - int colIndex = 0; - String[] firstRow = csvValues.get(0); - for (String data : firstRow) { + String[] row = null; + while ((row = csvReader.readNext()) != null && row.length == firstRow.length) + for (int i = 0; i < row.length; i++) + columns.get(i).add(row[i].trim()); + + for (int i = 0; i < columns.size(); i++) { + ArrayList column = columns.get(i); + String name = headers.get(i); + String dataType = dataTypes.get(i); try { - Double.parseDouble(data); - doubleTimeSeries.put(headers.get(colIndex), new DoubleTimeSeries(headers.get(colIndex), dataTypes.get(colIndex))); + Double.parseDouble(column.get(0)); + doubleTimeSeries.put(name, new DoubleTimeSeries(name, dataType, column)); } catch (NumberFormatException e) { - stringTimeSeries.put(headers.get(colIndex), new StringTimeSeries(headers.get(colIndex), dataTypes.get(colIndex))); + stringTimeSeries.put(name, new StringTimeSeries(name, dataType, column)); } - - colIndex++; } - List finalHeaders = headers; - csvValues.forEach(row -> { - for (int i = 0; i < row.length; i++) { - String header = finalHeaders.get(i); - String value = row[i]; - - try { - doubleTimeSeries.get(header).add(Double.parseDouble(value)); - } catch (NumberFormatException e) { - stringTimeSeries.get(header).add(value); - } - } - }); - - } catch (IOException | FatalFlightFileException | CsvException e) { throw new FlightProcessingException(e); } - FlightBuilder builder = new FlightBuilder(new FlightMeta(), doubleTimeSeries, stringTimeSeries); + FlightBuilder builder = new FlightBuilder(meta, doubleTimeSeries, stringTimeSeries); return Stream.of(builder); } @@ -106,9 +97,9 @@ public Stream parse() throws FlightProcessingException { * Updates the airframe type if airframe name does not belong to fixed wing */ private void updateAirframe() { - if (airframeName.equals("R44") || airframeName.equals("Robinson R44")) { - airframeName = "R44"; - airframeType = "Rotorcraft"; + if (meta.airframeName.equals("R44") || meta.airframeName.equals("Robinson R44")) { + meta.airframeName = "R44"; + meta.airframeType = "Rotorcraft"; } } @@ -121,7 +112,6 @@ private void updateAirframe() { */ private String getFlightInfo(BufferedReader reader) throws FatalFlightFileException, IOException { String fileInformation = reader.readLine(); - System.out.println("File information = " + fileInformation); if (fileInformation == null || fileInformation.trim().length() == 0) { throw new FatalFlightFileException("The flight file was empty."); @@ -131,8 +121,8 @@ private String getFlightInfo(BufferedReader reader) throws FatalFlightFileExcept if (fileInformation.startsWith("DID_")) { LOG.info("CAME FROM A SCANEAGLE! CAN CALCULATE SUGGESTED TAIL/SYSTEM ID FROM FILENAME"); - this.airframeName = "ScanEagle"; - this.airframeType = "UAS Fixed Wing"; + meta.airframeName = "ScanEagle"; + meta.airframeType = "UAS Fixed Wing"; } else { throw new FatalFlightFileException("First line of the flight file should begin with a '#' and contain flight recorder information."); } @@ -158,40 +148,40 @@ private void processFileInormation(String fileInformation) throws FatalFlightFil // TODO: Create some sort of automatic mapping for synonomous airframe names. if (key.equals("airframe_name")) { - airframeName = value.substring(1, value.length() - 1); + meta.airframeName = value.substring(1, value.length() - 1); //throw an error for 'Unknown Aircraft' - if (airframeName.equals("Unknown Aircraft")) { + if (meta.airframeName.equals("Unknown Aircraft")) { throw new FatalFlightFileException("Flight airframe name was 'Unknown Aircraft', please fix and re-upload so the flight can be properly identified and processed."); } - if (airframeName.equals("Diamond DA 40")) { - airframeName = "Diamond DA40"; - } else if ((airframeName.equals("Garmin Flight Display") || airframeName.equals("Robinson R44 Raven I")) && upload.getFleetId() == 1 /*This is a hack for UND who has their airframe names set up incorrectly for their helicopters*/) { - airframeName = "R44"; - } else if (airframeName.equals("Garmin Flight Display")) { + if (meta.airframeName.equals("Diamond DA 40")) { + meta.airframeName = "Diamond DA40"; + } else if ((meta.airframeName.equals("Garmin Flight Display") || meta.airframeName.equals("Robinson R44 Raven I")) && upload.getFleetId() == 1 /*This is a hack for UND who has their airframe names set up incorrectly for their helicopters*/) { + meta.airframeName = "R44"; + } else if (meta.airframeName.equals("Garmin Flight Display")) { throw new FatalFlightFileException("Flight airframe name was 'Garmin Flight Display' which does not specify what airframe type the flight was, please fix and re-upload so the flight can be properly identified and processed."); } - if (airframeName.equals("Cirrus SR22 (3600 GW)")) { - airframeName = "Cirrus SR22"; + if (meta.airframeName.equals("Cirrus SR22 (3600 GW)")) { + meta.airframeName = "Cirrus SR22"; } - if (Airframes.FIXED_WING_AIRFRAMES.contains(airframeName) || airframeName.contains("Garmin")) { - airframeType = "Fixed Wing"; - } else if (airframeName.equals("R44") || airframeName.equals("Robinson R44")) { - airframeName = "R44"; - airframeType = "Rotorcraft"; + if (Airframes.FIXED_WING_AIRFRAMES.contains(meta.airframeName) || meta.airframeName.contains("Garmin")) { + meta.airframeType = "Fixed Wing"; + } else if (meta.airframeName.equals("R44") || meta.airframeName.equals("Robinson R44")) { + meta.airframeName = "R44"; + meta.airframeType = "Rotorcraft"; } else { - System.err.println("Could not import flight because the aircraft type was unknown for the following airframe name: '" + airframeName + "'"); + System.err.println("Could not import flight because the aircraft type was unknown for the following airframe name: '" + meta.airframeName + "'"); System.err.println("Please add this to the the `airframe_type` table in the database and update this method."); System.exit(1); } } else if (key.equals("system_id")) { - systemId = value.substring(1, value.length() - 1); + meta.systemId = value.substring(1, value.length() - 1); } } } catch (Exception e) { @@ -219,17 +209,17 @@ private void scanEagleParsing(String fileInformation) { */ private void scanEagleSetTailAndID() { String[] filenameParts = filename.split("_"); - startDateTime = filenameParts[0]; - endDateTime = startDateTime; - LOG.log(Level.INFO, "start date: '{0}'", startDateTime); - LOG.log(Level.INFO, "end date: '{0}'", startDateTime); + meta.startDateTime = filenameParts[0]; + meta.endDateTime = meta.startDateTime; + LOG.log(Level.INFO, "start date: '{0}'", meta.startDateTime); + LOG.log(Level.INFO, "end date: '{0}'", meta.startDateTime); //UND doesn't have the systemId for UAS anywhere in the filename or file (sigh) - suggestedTailNumber = "N" + filenameParts[1] + "ND"; - systemId = suggestedTailNumber; + meta.suggestedTailNumber = "N" + filenameParts[1] + "ND"; + meta.systemId = meta.suggestedTailNumber; - LOG.log(Level.INFO, "suggested tail number: '{0}'", suggestedTailNumber); - LOG.log(Level.INFO, "system id: '{0}'", systemId); + LOG.log(Level.INFO, "suggested tail number: '{0}'", meta.suggestedTailNumber); + LOG.log(Level.INFO, "system id: '{0}'", meta.systemId); } @@ -238,7 +228,6 @@ private void scanEagleHeaders(String fileInformation) { String headersLine = fileInformation; headers.addAll(Arrays.asList(headersLine.split("\\,", -1))); headers.replaceAll(String::trim); - System.out.println("headers are:\n" + headers.toString()); //scan eagle files have no data types, set all to "" for (int i = 0; i < headers.size(); i++) { dataTypes.add("none"); diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java index 2b48dc308..9a698c0d3 100644 --- a/src/main/java/org/ngafid/flights/process/DependencyGraph.java +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -8,6 +8,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.*; import java.util.logging.Logger; +import java.util.stream.Collectors; import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.MalformedFlightFileException; @@ -39,6 +40,7 @@ void disableChildren() { enabled.set(false); if (step.isRequired()) { String reason = step.explainApplicability(); + LOG.severe(builder.doubleTimeSeries.keySet().toString()); LOG.severe("Required step " + step.toString() + " has been disabled for :\n " + reason); exceptions.add(new FatalFlightFileException(reason)); } @@ -98,7 +100,7 @@ public Void compute() { for (var requiredNode : node.requires) { getTask(requiredNode).join(); } - + if (node.enabled.get()) node.compute(); @@ -128,7 +130,9 @@ public DummyStep(FlightBuilder builder) { public boolean airframeIsValid(String airframe) { return true; } // Left blank intentionally - public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException {} + public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + LOG.info("Computed dummy step!"); + } } private void nodeConflictError(ProcessStep first, ProcessStep second) throws FatalFlightFileException { @@ -202,8 +206,7 @@ public void compute() throws FlightProcessingException { tasks.put(node, task); } } - - ForkJoinPool ex = new ForkJoinPool(); + ForkJoinPool ex = new ForkJoinPool(8); try { ex.invoke(new RecursiveTask() { @@ -211,8 +214,7 @@ public Void compute() { initialTasks .stream() .map(x -> x.fork()) - .map(x -> x.join()) - .count(); + .forEach(ForkJoinTask::join); return null; } }); diff --git a/src/main/java/org/ngafid/flights/process/FlightBuilder.java b/src/main/java/org/ngafid/flights/process/FlightBuilder.java index e64eca828..69fcd45e9 100644 --- a/src/main/java/org/ngafid/flights/process/FlightBuilder.java +++ b/src/main/java/org/ngafid/flights/process/FlightBuilder.java @@ -10,9 +10,8 @@ import java.util.stream.Collectors; import org.ngafid.flights.*; -import org.ngafid.flights.process.*; -import org.ngafid.flights.process.FlightMeta; import static org.ngafid.flights.process.ProcessStep.required; +import org.ngafid.flights.process.*; public class FlightBuilder { @@ -68,7 +67,6 @@ public synchronized FlightBuilder updateProcessingStatus(int processingStatus) { ProcessLaggedAltMSL::new, ProcessStallIndex::new, ProcessTotalFuel::new, - ProcessAirportProximity::new, ProcessDivergence::new, ProcessLOCI::new, ProcessItinerary::new diff --git a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java index 11a6564ca..b183bff38 100644 --- a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java @@ -104,7 +104,6 @@ private FlightFileProcessor create(ZipEntry entry) { int index = filename.lastIndexOf('.'); String extension = index >= 0 ? filename.substring(index + 1).toLowerCase() : ""; - System.out.println("Extension: " + extension); Factory f = factories.get(extension); if (f != null) { try { @@ -120,7 +119,9 @@ private FlightFileProcessor create(ZipEntry entry) { } public Flight insert(Flight flight) { - flight.updateDatabase(connection, upload.getId(), upload.getUploaderId(), upload.getFleetId()); + synchronized (connection) { + flight.updateDatabase(connection, upload.getId(), upload.getUploaderId(), upload.getFleetId()); + } return flight; } diff --git a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java index de83951f0..291d6a96e 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java +++ b/src/main/java/org/ngafid/flights/process/ProcessAirportProximity.java @@ -17,7 +17,7 @@ public class ProcessAirportProximity extends ProcessStep { private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(LATITUDE, LONGITUDE, ALT_AGL); - private static Set OUTPUT_COLUMNS = Set.of(NEAREST_RUNWAY, AIRPORT_DISTANCE, NEAREST_RUNWAY, RUNWAY_DISTANCE); + private static Set OUTPUT_COLUMNS = Set.of(NEAREST_RUNWAY, AIRPORT_DISTANCE, RUNWAY_DISTANCE, NEAREST_AIRPORT); private final static double MAX_AIRPORT_DISTANCE_FT = 10000; private final static double MAX_RUNWAY_DISTANCE_FT = 100; @@ -27,7 +27,7 @@ public ProcessAirportProximity(Connection connection, FlightBuilder builder) { public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } public Set getRequiredStringColumns() { return Collections.emptySet(); } - public Set getRequiredColumns() { return Collections.emptySet(); } + public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } public Set getOutputColumns() { return OUTPUT_COLUMNS; } public boolean airframeIsValid(String airframe) { return true; } @@ -87,5 +87,9 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl } + stringTS.put(NEAREST_RUNWAY, nearestRunwayTS); + stringTS.put(NEAREST_AIRPORT, nearestAirportTS); + doubleTS.put(RUNWAY_DISTANCE, runwayDistanceTS); + doubleTS.put(AIRPORT_DISTANCE, airportDistanceTS); } } diff --git a/src/main/java/org/ngafid/flights/process/ProcessItinerary.java b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java index 735c518bc..898fa2338 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessItinerary.java +++ b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java @@ -18,7 +18,7 @@ public class ProcessItinerary extends ProcessStep { private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_AGL, LATITUDE, LONGITUDE, AIRPORT_DISTANCE, RUNWAY_DISTANCE, GND_SPD, E1_RPM); - private static Set REQUIRED_STRING_COLUMNS = Set.of(AIRPORT_DISTANCE, NEAREST_AIRPORT, NEAREST_RUNWAY); + private static Set REQUIRED_STRING_COLUMNS = Set.of(NEAREST_AIRPORT, NEAREST_RUNWAY); private static Set OUTPUT_COLUMNS = Set.of("_itinerary"); // This is a fake column; never actually created. public ProcessItinerary(Connection connection, FlightBuilder builder) { diff --git a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java index 7ac391966..7515afb97 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java +++ b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java @@ -9,8 +9,8 @@ import java.time.format.DateTimeFormatter; import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; import org.ngafid.common.*; -import org.ngafid.flights.calculations.CalculatedDoubleTimeSeries; import org.ngafid.flights.StringTimeSeries; import org.ngafid.flights.DoubleTimeSeries; import org.ngafid.flights.MalformedFlightFileException; diff --git a/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java b/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java index 5224b270e..d26d711cb 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java +++ b/src/main/java/org/ngafid/flights/process/ProcessLaggedAltMSL.java @@ -8,9 +8,9 @@ import java.nio.file.NoSuchFileException; import org.ngafid.flights.Flight; -import org.ngafid.terrain.TerrainCache; import org.ngafid.flights.DoubleTimeSeries; import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.MalformedFlightFileException; diff --git a/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java b/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java index da4a5f0c1..dc62d42f8 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java +++ b/src/main/java/org/ngafid/flights/process/ProcessStallIndex.java @@ -9,8 +9,8 @@ import java.time.format.DateTimeFormatter; import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; import org.ngafid.common.*; -import org.ngafid.flights.calculations.CalculatedDoubleTimeSeries; import org.ngafid.flights.StringTimeSeries; import org.ngafid.flights.DoubleTimeSeries; import org.ngafid.flights.MalformedFlightFileException; @@ -21,6 +21,7 @@ public class ProcessStallIndex extends ProcessStep { private static final Logger LOG = Logger.getLogger(ProcessStallIndex.class.getName()); public static Set REQUIRED_DOUBLE_COLUMNS = Set.of(STALL_DEPENDENCIES); + public static Set OUTPUT_COLUMNS = Set.of(STALL_PROB, TAS_FTMIN, VSPD_CALCULATED, CAS); public ProcessStallIndex(Connection connection, FlightBuilder builder) { super(connection, builder); @@ -29,7 +30,7 @@ public ProcessStallIndex(Connection connection, FlightBuilder builder) { public Set getRequiredDoubleColumns() { return REQUIRED_DOUBLE_COLUMNS; } public Set getRequiredStringColumns() { return Collections.emptySet(); } public Set getRequiredColumns() { return REQUIRED_DOUBLE_COLUMNS; } - public Set getOutputColumns() { return Collections.emptySet(); } + public Set getOutputColumns() { return OUTPUT_COLUMNS; } public boolean airframeIsValid(String airframe) { return true; } @@ -48,11 +49,13 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl return iasValue; } ); + cas.setTemporary(true); doubleTS.put(CAS, cas); } DoubleTimeSeries vspdCalculated = DoubleTimeSeries.computed(VSPD_CALCULATED, "ft/min", length, new VSPDRegression(doubleTS.get(ALT_B))); + vspdCalculated.setTemporary(true); doubleTS.put(VSPD_CALCULATED, vspdCalculated); DoubleTimeSeries baroA = doubleTS.get(BARO_A); @@ -72,6 +75,7 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl index -> { return (airspeed.get(index) * Math.pow(densityRatio.get(index), -0.5)) * ((double) 6076 / 60); }); + tasFtMin.setTemporary(true); DoubleTimeSeries pitch = doubleTS.get(PITCH); DoubleTimeSeries aoaSimple = DoubleTimeSeries.computed(AOA_SIMPLE, "degrees", length, @@ -92,5 +96,6 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl } ); doubleTS.put(STALL_PROB, stallIndex); + doubleTS.put(TAS_FTMIN, tasFtMin); } } diff --git a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java index e20912ac5..3792ddd5b 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java +++ b/src/main/java/org/ngafid/flights/process/ProcessStartEndTime.java @@ -4,6 +4,7 @@ import java.util.Set; import java.sql.Connection; import java.sql.SQLException; +import java.util.Arrays; import java.util.Collections; import java.util.logging.Logger; import java.time.format.DateTimeFormatter; diff --git a/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java b/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java index 325eb631a..24ff74872 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java +++ b/src/main/java/org/ngafid/flights/process/ProcessTotalFuel.java @@ -1,6 +1,7 @@ package org.ngafid.flights.process; import java.util.Set; +import java.util.logging.Logger; import java.util.Collections; import java.sql.Connection; import java.sql.SQLException; @@ -8,13 +9,15 @@ import java.nio.file.NoSuchFileException; import org.ngafid.flights.Flight; -import org.ngafid.terrain.TerrainCache; import org.ngafid.flights.DoubleTimeSeries; import static org.ngafid.flights.Parameters.*; +import static org.ngafid.flights.Airframes.*; import org.ngafid.flights.FatalFlightFileException; import org.ngafid.flights.MalformedFlightFileException; public class ProcessTotalFuel extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessTotalFuel.class.getName()); + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(FUEL_QTY_LEFT, FUEL_QTY_RIGHT); private static Set OUTPUT_COLUMNS = Set.of(TOTAL_FUEL); private static Set AIRFRAME_BLACKLIST = Set.of(AIRFRAME_SCAN_EAGLE, AIRFRAME_DJI); @@ -29,14 +32,11 @@ public ProcessTotalFuel(Connection connection, FlightBuilder builder) { public Set getOutputColumns() { return OUTPUT_COLUMNS; } public boolean airframeIsValid(String airframe) { - for (String blacklisted : AIRFRAME_BLACKLIST) - if (airframe.contains(blacklisted)) - return false; - - return true; + return !AIRFRAME_BLACKLIST.contains(airframe); } public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { + LOG.info("Computing total fuel..."); double[] totalFuel = null; for (var columnName : REQUIRED_DOUBLE_COLUMNS) { diff --git a/src/main/java/org/ngafid/terrain/SRTMTile.java b/src/main/java/org/ngafid/terrain/SRTMTile.java index 48d1e55da..c330de94c 100644 --- a/src/main/java/org/ngafid/terrain/SRTMTile.java +++ b/src/main/java/org/ngafid/terrain/SRTMTile.java @@ -7,8 +7,11 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.NoSuchFileException; +import java.util.logging.*; public class SRTMTile { + private static final Logger LOG = Logger.getLogger(SRTMTile.class.getName()); + public static final int srtmTileSize = 1201; public static final double srtmGridSize = 1.0/(srtmTileSize - 1.0); @@ -29,8 +32,8 @@ public SRTMTile(int latitudeS, int longitudeW) throws NoSuchFileException { directory = TerrainCache.getDirectoryFromLatLon(latitudeS, longitudeW); filename = TerrainCache.getFilenameFromLatLon(latitudeS, longitudeW); - System.out.println("loading terrain from: '" + directory + "/" + filename + "'"); - System.out.println("lat and lon for SW corner -- latitude_s: " + latitudeS + ", longitude_w: " + longitudeW); + LOG.info("loading terrain from: '" + directory + "/" + filename + "'"); + LOG.info("lat and lon for SW corner -- latitude_s: " + latitudeS + ", longitude_w: " + longitudeW); Path path = Paths.get(TerrainCache.TERRAIN_DIRECTORY + "/" + directory + "/" + filename); //Path path = Paths.get(TerrainCache.getTerrainDirectory() + "/" + filename); @@ -79,11 +82,11 @@ public SRTMTile(int latitudeS, int longitudeW) throws NoSuchFileException { } } - System.out.println("read " + bytes.length + " bytes."); - System.out.println("final offset: " + offset); + LOG.info("read " + bytes.length + " bytes."); + LOG.info("final offset: " + offset); - System.out.println("max: " + max); - System.out.println("min: " + min); + LOG.info("max: " + max); + LOG.info("min: " + min); } public double getAltitudeFt(double latitude, double longitude) { diff --git a/src/main/java/org/ngafid/terrain/TerrainCache.java b/src/main/java/org/ngafid/terrain/TerrainCache.java index 10ad74721..2a648b2bf 100644 --- a/src/main/java/org/ngafid/terrain/TerrainCache.java +++ b/src/main/java/org/ngafid/terrain/TerrainCache.java @@ -49,7 +49,7 @@ public static String getDirectoryFromLatLon(int latitude, int longitude) { ilongitude /= 6; ilongitude += 1; - System.out.println("iLatitude: " + ilatitude + ", iLongitude: " + ilongitude); + // System.out.println("iLatitude: " + ilatitude + ", iLongitude: " + ilongitude); //note that ascii 65 == 'A' directory += Character.toString((char)(65 + ilatitude)) + ilongitude; @@ -92,7 +92,7 @@ public static int getAltitudeFt(double msl, double latitude, double longitude) t SRTMTile tile = tiles[latIndex][lonIndex]; if (tile == null) { - System.out.println("tiles[" + latIndex + "][" + lonIndex + "] not initialized, loading!"); + // System.out.println("tiles[" + latIndex + "][" + lonIndex + "] not initialized, loading!"); tile = new SRTMTile(90 - latIndex, lonIndex - 180); tiles[latIndex][lonIndex] = tile; } From 5d540ba4b16342f81fa3012885f495a9b93f2766 Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Tue, 2 May 2023 11:16:06 -0400 Subject: [PATCH 11/12] Batching database access --- .../java/org/ngafid/CalculateProximity.java | 91 +++++---- src/main/java/org/ngafid/ProcessUpload.java | 73 +++---- .../org/ngafid/flights/DoubleTimeSeries.java | 121 +++++------ .../java/org/ngafid/flights/ErrorMessage.java | 4 +- src/main/java/org/ngafid/flights/Flight.java | 191 +++++++++++------- .../java/org/ngafid/flights/FlightError.java | 2 - .../org/ngafid/flights/FlightWarning.java | 23 ++- .../java/org/ngafid/flights/Itinerary.java | 95 +++++---- .../org/ngafid/flights/StringTimeSeries.java | 42 ++-- .../flights/process/CSVFileProcessor.java | 47 +++-- .../flights/process/DependencyGraph.java | 24 +-- .../flights/process/FlightFileProcessor.java | 26 ++- .../flights/process/ProcessItinerary.java | 11 +- .../ngafid/flights/process/ProcessLOCI.java | 2 +- .../ngafid/flights/process/ProcessStep.java | 2 +- 15 files changed, 406 insertions(+), 348 deletions(-) diff --git a/src/main/java/org/ngafid/CalculateProximity.java b/src/main/java/org/ngafid/CalculateProximity.java index dac3bdd5e..3d3a26b95 100644 --- a/src/main/java/org/ngafid/CalculateProximity.java +++ b/src/main/java/org/ngafid/CalculateProximity.java @@ -42,7 +42,10 @@ import org.ngafid.airports.Airports; +import java.util.logging.*; + public class CalculateProximity { + private static final Logger LOG = Logger.getLogger(CalculateProximity.class.getName()); //Proximity events (and potentially other complicated event calculations) will have negative IDs so they //can be excluded from the regular event calculation process @@ -96,7 +99,7 @@ public FlightTimeLocation(Connection connection, int fleetId, int flightId, int this.endDateTime = endDateTime; //first check and see if the flight had a start and end time, if not we cannot process it - //System.out.println("Getting info for flight with start date time: " + startDateTime + " and end date time: " + endDateTime); + //LOG.info("Getting info for flight with start date time: " + startDateTime + " and end date time: " + endDateTime); if (startDateTime == null || endDateTime == null) { //flight didnt have a start or end time @@ -108,11 +111,11 @@ public FlightTimeLocation(Connection connection, int fleetId, int flightId, int Pair minMaxRPM1 = DoubleTimeSeries.getMinMax(connection, flightId, "E1 RPM"); Pair minMaxRPM2 = DoubleTimeSeries.getMinMax(connection, flightId, "E2 RPM"); - System.out.println("minMaxRPM1: " + minMaxRPM1); - System.out.println("minMaxRPM2: " + minMaxRPM2); + LOG.info("minMaxRPM1: " + minMaxRPM1); + LOG.info("minMaxRPM2: " + minMaxRPM2); - if (minMaxRPM1 != null) System.out.println("min max E1 RPM: " + minMaxRPM1.first() + ", " + minMaxRPM1.second()); - if (minMaxRPM2 != null) System.out.println("min max E2 RPM: " + minMaxRPM2.first() + ", " + minMaxRPM2.second()); + if (minMaxRPM1 != null) LOG.info("min max E1 RPM: " + minMaxRPM1.first() + ", " + minMaxRPM1.second()); + if (minMaxRPM2 != null) LOG.info("min max E2 RPM: " + minMaxRPM2.first() + ", " + minMaxRPM2.second()); if ((minMaxRPM1 == null && minMaxRPM2 == null) //both RPM values are null, can't calculate exceedence || (minMaxRPM2 == null && minMaxRPM1.second() < 800) //RPM2 is null, RPM1 is < 800 (RPM1 would not be null as well) @@ -127,8 +130,8 @@ public FlightTimeLocation(Connection connection, int fleetId, int flightId, int Pair minMaxLatitude = DoubleTimeSeries.getMinMax(connection, flightId, "Latitude"); Pair minMaxLongitude = DoubleTimeSeries.getMinMax(connection, flightId, "Longitude"); - //if (minMaxLatitude != null) System.out.println("min max latitude: " + minMaxLatitude.first() + ", " + minMaxLatitude.second()); - //if (minMaxLongitude != null) System.out.println("min max longitude: " + minMaxLongitude.first() + ", " + minMaxLongitude.second()); + //if (minMaxLatitude != null) LOG.info("min max latitude: " + minMaxLatitude.first() + ", " + minMaxLatitude.second()); + //if (minMaxLongitude != null) LOG.info("min max longitude: " + minMaxLongitude.first() + ", " + minMaxLongitude.second()); if (minMaxLatitude == null || minMaxLongitude == null) { //flight didn't have latitude or longitude @@ -144,7 +147,7 @@ public FlightTimeLocation(Connection connection, int fleetId, int flightId, int //then check and see if this flight had alt MSL, if not we cannot calculate adjacency Pair minMaxAltMSL = DoubleTimeSeries.getMinMax(connection, flightId, "AltMSL"); - //if (minMaxAltMSL != null) System.out.println("min max alt MSL: " + minMaxAltMSL.first() + ", " + minMaxAltMSL.second()); + //if (minMaxAltMSL != null) LOG.info("min max alt MSL: " + minMaxAltMSL.first() + ", " + minMaxAltMSL.second()); if (minMaxAltMSL == null) { //flight didn't have alt MSL @@ -185,7 +188,7 @@ public boolean getSeriesData(Connection connection) throws SQLException { //check to see if we could get these columns if (dateSeries == null || timeSeries == null || utcOffsetSeries == null) return false; - //System.out.println("date length: " + dateSeries.size() + ", time length: " + timeSeries.size() + ", utc length: " + utcOffsetSeries.size()); + //LOG.info("date length: " + dateSeries.size() + ", time length: " + timeSeries.size() + ", utc length: " + utcOffsetSeries.size()); int length = dateSeries.size(); epochTime = new long[length]; @@ -224,17 +227,17 @@ public boolean alreadyProcessed(Connection connection) throws SQLException { stmt.setInt(2, flightId); stmt.setInt(3, adjacencyEventDefinitionId); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); //if there was a flight processed entry for this flight it was already processed ResultSet resultSet = stmt.executeQuery(); if (resultSet.next()) { - System.out.println("already processed!"); + LOG.info("already processed!"); resultSet.close(); stmt.close(); return true; } else { - System.out.println("not already processed!"); + LOG.info("not already processed!"); resultSet.close(); stmt.close(); return false; @@ -246,17 +249,17 @@ public static boolean proximityAlreadyCalculated(Connection connection, FlightTi stmt.setInt(1, first.flightId); stmt.setInt(2, second.flightId); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); //if there was a flight processed entry for this flight it was already processed ResultSet resultSet = stmt.executeQuery(); if (resultSet.next()) { - System.out.println("proximity event already exists!"); + LOG.info("proximity event already exists!"); resultSet.close(); stmt.close(); return true; } else { - System.out.println("proximity does not already exist!"); + LOG.info("proximity does not already exist!"); resultSet.close(); stmt.close(); return false; @@ -281,7 +284,7 @@ public void updateWithEvent(Connection connection, Event event, String startDate stmt.setDouble(7, severity); stmt.setDouble(8, severity); stmt.setDouble(9, severity); - System.out.println(stmt.toString()); + LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -298,13 +301,13 @@ public static void processFlightWithError(Connection connection, int fleetId, in stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, adjacencyEventDefinitionId); - //System.out.println(stmt.toString()); + //LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); } public static void processFlight(Connection connection, Flight flight, UploadProcessedEmail uploadProcessedEmail) { - System.out.println("Processing flight: " + flight.getId() + ", " + flight.getFilename()); + LOG.info("Processing flight: " + flight.getId() + ", " + flight.getFilename()); int fleetId = flight.getFleetId(); int flightId = flight.getId(); @@ -323,10 +326,10 @@ public static void processFlight(Connection connection, Flight flight, UploadPro ArrayList potentialFlights = Flight.getFlights(connection, "(id != " + flightId + " AND start_timestamp <= UNIX_TIMESTAMP('" + flightInfo.endDateTime + "') AND end_timestamp >= UNIX_TIMESTAMP('" + flightInfo.startDateTime + "'))"); - System.out.println("Found " + potentialFlights.size() + " potential time matched flights."); - //System.out.println("Flight start time: " + flightInfo.startDateTime + ", end time: " + flightInfo.endDateTime); - //System.out.println("Flight latitude min: " + flightInfo.minLatitude + ", max: " + flightInfo.maxLatitude); - //System.out.println("Flight longitude min: " + flightInfo.minLongitude + ", max: " + flightInfo.maxLongitude); + LOG.info("Found " + potentialFlights.size() + " potential time matched flights."); + //LOG.info("Flight start time: " + flightInfo.startDateTime + ", end time: " + flightInfo.endDateTime); + //LOG.info("Flight latitude min: " + flightInfo.minLatitude + ", max: " + flightInfo.maxLatitude); + //LOG.info("Flight longitude min: " + flightInfo.minLongitude + ", max: " + flightInfo.maxLongitude); ArrayList eventList = new ArrayList<>(); String startTime = null; @@ -349,7 +352,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro int stopBuffer = 30; for (Flight otherFlight : potentialFlights) { - //System.out.println("\tmatched to flight with start time: " + otherFlight.getStartDateTime() + ", end time: " + otherFlight.getEndDateTime()); + //LOG.info("\tmatched to flight with start time: " + otherFlight.getStartDateTime() + ", end time: " + otherFlight.getEndDateTime()); timeMatchFlights++; FlightTimeLocation otherInfo = new FlightTimeLocation(connection, otherFlight.getFleetId(), otherFlight.getId(), otherFlight.getAirframeNameId(), otherFlight.getStartDateTime(), otherFlight.getEndDateTime()); @@ -360,15 +363,15 @@ public static void processFlight(Connection connection, Flight flight, UploadPro //see if proximity between these two flights was already calculated, if so we can skip if (FlightTimeLocation.proximityAlreadyCalculated(connection, otherInfo, flightInfo)) { - System.out.println("Not re-performing proximity calculation"); + LOG.info("Not re-performing proximity calculation"); continue; } - //System.out.println("\t\tother latitude min: " + otherInfo.minLatitude + ", max: " + otherInfo.maxLatitude); - //System.out.println("\t\tother longitude min: " + otherInfo.minLongitude + ", max: " + otherInfo.maxLongitude); + //LOG.info("\t\tother latitude min: " + otherInfo.minLatitude + ", max: " + otherInfo.maxLatitude); + //LOG.info("\t\tother longitude min: " + otherInfo.minLongitude + ", max: " + otherInfo.maxLongitude); if (flightInfo.hasRegionOverlap(otherInfo)) { - //System.out.println("\t\tLatitude/Longitude overlap!"); + //LOG.info("\t\tLatitude/Longitude overlap!"); locMatchFlights++; if (!flightInfo.hasSeriesData()) { @@ -389,7 +392,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro int i = 30, j = 30; int totalMatches = 0; - //System.out.println("\t\tgot series data for both flights, iterate over times"); + //LOG.info("\t\tgot series data for both flights, iterate over times"); while (i < flightInfo.epochTime.length && j < otherInfo.epochTime.length) { //skip entries where the epoch time was 0 (the date/time was null) if (flightInfo.epochTime[i] == 0) { @@ -420,13 +423,13 @@ public static void processFlight(Connection connection, Flight flight, UploadPro if (distanceFt < 1000.0 && flightInfo.altitudeAGL[i] >= 50 && otherInfo.altitudeAGL[j] >= 50 && flightInfo.indicatedAirspeed[i] > 20 && otherInfo.indicatedAirspeed[j] > 20) { /* - System.out.println("\t\t\tother time[" + j + "]: " + otherInfo.epochTime[j] + " == flight time[" + i + "]: " + flightInfo.epochTime[i] + LOG.info("\t\t\tother time[" + j + "]: " + otherInfo.epochTime[j] + " == flight time[" + i + "]: " + flightInfo.epochTime[i] + ", flight lat/lon: " + flightInfo.latitude[i] + " " + flightInfo.longitude[i] + ", other lat/lon: " + otherInfo.latitude[j] + " " + otherInfo.longitude[j] + " -- distance: " + distanceFt ); */ - //System.out.println("\t\t\t\t\tflight alt AGL: " + flightInfo.altitudeAGL[i] + ", other alt AGL: " + otherInfo.altitudeAGL[j] + ", final distance: " + distanceFt); + //LOG.info("\t\t\t\t\tflight alt AGL: " + flightInfo.altitudeAGL[i] + ", other alt AGL: " + otherInfo.altitudeAGL[j] + ", final distance: " + distanceFt); //startTime is null if an exceedence is not being tracked if (startTime == null) { @@ -438,7 +441,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro otherStartLine = j; severity = distanceFt; - //System.out.println("start date time: " + startTime + ", start line number: " + startLine); + //LOG.info("start date time: " + startTime + ", start line number: " + startLine); } endLine = i; otherEndLine = j; @@ -500,7 +503,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro j++; totalMatches++; } - //System.out.println("\t\tseries matched time on " + totalMatches + " rows"); + //LOG.info("\t\tseries matched time on " + totalMatches + " rows"); //if there was an event still going when one flight ended, create it and add it to the list @@ -517,12 +520,12 @@ public static void processFlight(Connection connection, Flight flight, UploadPro //end the loop processing all flights for (Event event : eventList) { - System.out.println("\t" + event.toString()); + LOG.info("\t" + event.toString()); eventsFound++; uploadProcessedEmail.addProximity(flightFilename, "flight " + flightId + ", '" + flightFilename + "' - had a proximity event with flight " + event.getOtherFlightId() + " from " + event.getStartTime() + " to " + event.getEndTime()); } - System.out.println("\n"); + LOG.info("\n"); //Step 2: export the events and their statistics in the database @@ -540,8 +543,8 @@ public static void processFlight(Connection connection, Flight flight, UploadPro } else if (event.getEndTime() != null) { EventStatistics.updateEventStatistics(connection, fleetId, airframeNameId, adjacencyEventDefinitionId, event.getEndTime(), event.getSeverity(), event.getDuration()); } else { - System.out.println("WARNING: could not update event statistics for event: " + event); - System.out.println("WARNING: event start and end time were both null."); + LOG.info("WARNING: could not update event statistics for event: " + event); + LOG.info("WARNING: event start and end time were both null."); } double currentSeverity = eventList.get(i).getSeverity(); @@ -567,7 +570,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro stmt.setDouble(8, sumSeverity); stmt.setDouble(9, minSeverity); stmt.setDouble(10, maxSeverity); - //System.out.println(stmt.toString()); + //LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -578,7 +581,7 @@ public static void processFlight(Connection connection, Flight flight, UploadPro stmt.setInt(1, fleetId); stmt.setInt(2, flightId); stmt.setInt(3, adjacencyEventDefinitionId); - //System.out.println(stmt.toString()); + //LOG.info(stmt.toString()); stmt.executeUpdate(); stmt.close(); @@ -615,9 +618,9 @@ public static void calculateProximity(Connection connection, int uploadId, Uploa double avgTimeMatchedFlights = ((double)timeMatchFlights / (double) count); double avgLocationMatchedFlights = ((double)locMatchFlights / (double)count); - System.out.println("calculated " + count + " proximity evaluations in " + elapsed_seconds + " seconds, averaged: " + average_seconds + " seconds per flight"); - System.out.println("avg time matched flights: " + avgTimeMatchedFlights + ", avg loc matched flights: " + avgLocationMatchedFlights); - System.out.println("proximity events found:" + eventsFound); + LOG.info("calculated " + count + " proximity evaluations in " + elapsed_seconds + " seconds, averaged: " + average_seconds + " seconds per flight"); + LOG.info("avg time matched flights: " + avgTimeMatchedFlights + ", avg loc matched flights: " + avgLocationMatchedFlights); + LOG.info("proximity events found:" + eventsFound); uploadProcessedEmail.setProximityElapsedTime(elapsed_seconds, average_seconds, avgTimeMatchedFlights, avgLocationMatchedFlights); } @@ -648,9 +651,9 @@ public static void main(String[] arguments) { Instant end = Instant.now(); double elapsed_seconds = (double)Duration.between(start, end).toMillis() / 1000.0; double average_seconds = ((double) elapsed_seconds) / (double)count; - System.out.println("calculated " + count + " adjacency evaluations in " + elapsed_seconds + " seconds, averaged: " + average_seconds + " seconds per flight"); - System.out.println("avg time matched flights: " + ((double)timeMatchFlights / (double) count) + ", avg loc matched flights: " + ((double)locMatchFlights / (double)count)); - System.out.println("evnets found:" + eventsFound); + LOG.info("calculated " + count + " adjacency evaluations in " + elapsed_seconds + " seconds, averaged: " + average_seconds + " seconds per flight"); + LOG.info("avg time matched flights: " + ((double)timeMatchFlights / (double) count) + ", avg loc matched flights: " + ((double)locMatchFlights / (double)count)); + LOG.info("evnets found:" + eventsFound); //System.exit(1); try { diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index 9df2b6f1a..6cb490db8 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -20,20 +20,13 @@ import java.util.Collections; import java.util.List; import java.util.ArrayList; -import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.Spliterator; -import java.util.Spliterators; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiConsumer; -import java.util.function.Function; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ForkJoinPool; import java.util.logging.Level; import java.util.logging.Logger; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; -import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.ngafid.flights.Flight; @@ -46,19 +39,14 @@ import org.ngafid.accounts.User; -@FunctionalInterface -interface FlightFileProcessors { - FlightFileProcessor create(InputStream stream, String filename, Object... args); -} - public class ProcessUpload { private static Connection connection = null; private static Logger LOG = Logger.getLogger(ProcessUpload.class.getName()); private static final String ERROR_STATUS_STR = "ERROR"; public static void main(String[] arguments) { - System.out.println("arguments are:"); - System.out.println(Arrays.toString(arguments)); + LOG.info("arguments are:"); + LOG.info(Arrays.toString(arguments)); connection = Database.getConnection(); @@ -130,7 +118,7 @@ public static void operateAsDaemon() { } public static void processFleetUploads(int fleetId) { - System.out.println("processing uploads from fleet with id: " + fleetId); + LOG.info("processing uploads from fleet with id: " + fleetId); try { Fleet fleet = Fleet.get(connection, fleetId); String f = fleet.getName() == null ? " NULL NAME " : fleet.getName(); @@ -154,7 +142,7 @@ public static void processFleetUploads(int fleetId) { } public static void processUpload(int uploadId) { - System.out.println("processing upload with id: " + uploadId); + LOG.info("processing upload with id: " + uploadId); try { Upload upload = Upload.getUploadById(connection, uploadId); @@ -191,7 +179,7 @@ public static void processUpload(Upload upload) { SendEmail.sendEmail(recipients, bccRecipients, subject, body); upload.reset(connection); - System.out.println("upload was reset!\n\n"); + LOG.info("upload was reset!\n\n"); UploadProcessedEmail uploadProcessedEmail = new UploadProcessedEmail(recipients, bccRecipients); @@ -200,7 +188,7 @@ public static void processUpload(Upload upload) { long end = System.nanoTime(); long diff = end - start; - double asSeconds = ((double) diff) / 1.0e-9; + double asSeconds = ((double) diff) * 1.0e-9; System.out.println("Took " + asSeconds + "s to ingest upload " + upload.getFilename()); //only progress if the upload ingestion was successful @@ -249,6 +237,8 @@ public FlightInfo(int id, int length, String filename, List(); + long startNanos = System.nanoTime(); + pool.submit(() -> + pipeline + .stream() + .parallel() + .flatMap(pipeline::parse) + .map(pipeline::build) + .filter(Objects::nonNull) + .map(pipeline::tabulateFlightStatus) + .forEach(flights::add) + ).join(); + long endNanos = System.nanoTime(); + double s = 1e-9 * (double) (endNanos - startNanos); + System.out.println("Took " + s + "s to process flights"); + + startNanos = System.nanoTime(); + + Flight.batchUpdateDatabase(connection, upload, flights); + flights.forEach(f -> f.updateDatabase(connection, uploadId, uploaderId, fleetId)); + + endNanos = System.nanoTime(); + s = 1e-9 * (double) (endNanos - startNanos); + System.out.println("Took " + s + "s to upload flights to database"); flightErrors = pipeline.getFlightErrors(); errorFlights = flightErrors.size(); warningFlights = pipeline.getWarningFlightsCount(); @@ -332,7 +338,6 @@ public static boolean ingestFlights(Connection connection, Upload upload, Upload //insert all the flight errors to the database for (Map.Entry entry : flightErrors.entrySet()) { UploadException exception = entry.getValue(); - exception.printStackTrace(); FlightError.insertError(connection, uploadId, exception.getFilename(), exception.getMessage()); } @@ -351,9 +356,9 @@ public static boolean ingestFlights(Connection connection, Upload upload, Upload System.err.println("email in " + elapsed_seconds); uploadProcessedEmail.setImportElapsedTime(elapsed_seconds); - System.out.println("valid flights: " + validFlights); - System.out.println("warning flights: " + warningFlights); - System.out.println("error flights: " + errorFlights); + LOG.info("valid flights: " + validFlights); + LOG.info("warning flights: " + warningFlights); + LOG.info("error flights: " + errorFlights); uploadProcessedEmail.setValidFlights(validFlights); //iterate over all the flights without warnings @@ -408,7 +413,7 @@ private static void placeInZip(String file, String zipFileName) throws IOExcepti } private static File convertDATFile(File file) throws NotDatFile, IOException, FileEnd { - System.out.println("Converting to CSV: " + file.getAbsolutePath()); + LOG.info("Converting to CSV: " + file.getAbsolutePath()); DatFile datFile = DatFile.createDatFile(file.getAbsolutePath()); datFile.reset(); datFile.preAnalyze(); @@ -421,7 +426,7 @@ private static File convertDATFile(File file) throws NotDatFile, IOException, Fi datFile.reset(); AnalyzeDatResults results = convertDat.analyze(false); - System.out.println(datFile.getFile().getAbsolutePath()); + LOG.info(datFile.getFile().getAbsolutePath()); return datFile.getFile(); } diff --git a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java index 90760d962..408686a72 100644 --- a/src/main/java/org/ngafid/flights/DoubleTimeSeries.java +++ b/src/main/java/org/ngafid/flights/DoubleTimeSeries.java @@ -398,6 +398,50 @@ public double[] sliceCopy(int from, int to) { return slice; } + public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO double_series (flight_id, name_id, data_type_id, length, valid_length, min, avg, max, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"); + } + + public void addBatch(Connection connection, PreparedStatement preparedStatement, int flightId) throws SQLException, IOException { + if (typeId == -1) + setTypeId(connection); + if (nameId == -1) + setNameId(connection); + + preparedStatement.setInt(1, flightId); + preparedStatement.setInt(2, nameId); + preparedStatement.setInt(3, typeId); + + preparedStatement.setInt(4, this.size); + preparedStatement.setInt(5, validCount); + + if (Double.isNaN(min)) { + preparedStatement.setNull(6, java.sql.Types.DOUBLE); + } else { + preparedStatement.setDouble(6, min); + } + + if (Double.isNaN(avg)) { + preparedStatement.setNull(7, java.sql.Types.DOUBLE); + } else { + preparedStatement.setDouble(7, avg); + } + + if (Double.isNaN(max)) { + preparedStatement.setNull(8, java.sql.Types.DOUBLE); + } else { + preparedStatement.setDouble(8, max); + } + + // UPDATED COMPRESSION CODE + byte[] compressed = Compression.compressDoubleArray(this.data); + Blob seriesBlob = new SerialBlob(compressed); + + preparedStatement.setBlob(9, seriesBlob); + + preparedStatement.addBatch(); + } + public void updateDatabase(Connection connection, int flightId) { //System.out.println("Updating database for " + this); if (this.temporary) @@ -409,81 +453,10 @@ public void updateDatabase(Connection connection, int flightId) { setNameId(connection); PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO double_series (flight_id, name_id, data_type_id, length, valid_length, min, avg, max, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"); - - preparedStatement.setInt(1, flightId); - preparedStatement.setInt(2, nameId); - preparedStatement.setInt(3, typeId); - - preparedStatement.setInt(4, this.size); - preparedStatement.setInt(5, validCount); - - if (Double.isNaN(min)) { - preparedStatement.setNull(6, java.sql.Types.DOUBLE); - } else { - preparedStatement.setDouble(6, min); - } - - if (Double.isNaN(avg)) { - preparedStatement.setNull(7, java.sql.Types.DOUBLE); - } else { - preparedStatement.setDouble(7, avg); - } - - if (Double.isNaN(max)) { - preparedStatement.setNull(8, java.sql.Types.DOUBLE); - } else { - preparedStatement.setDouble(8, max); - } - - // UPDATED COMPRESSION CODE - // byte[] compressed = Compression.compressDoubleArray(this.data); - // Blob seriesBlob = new SerialBlob(compressed); - - // Possible optimization: using an array instead of an array list for timeSeries, since ArrayList - // is a list of objects rather than a list of primitives - it consumes much more memory. - // It may also be possible to use some memory tricks to do this with no copying by wrapping the double[]. - ByteBuffer timeSeriesBytes = ByteBuffer.allocate(size * Double.BYTES); - for (int i = 0; i < size; i++) - timeSeriesBytes.putDouble(data[i]); - - // Hopefully this is enough memory. It should be enough. - int bufferSize = timeSeriesBytes.capacity() + 256; - ByteBuffer compressedTimeSeries; - - // This is probably super overkill but it won't hurt? - // If there is not enough memory in the buffer it will through BufferOverflowException. If that happens, - // allocate more memory. - // I don't think it should happen unless the time series unless the compressed data is larger than the - // raw data, which should never happen. - int compressedDataLength; - - for (;;) { - compressedTimeSeries = ByteBuffer.allocate(bufferSize); - try { - Deflater deflater = new Deflater(DoubleTimeSeries.COMPRESSION_LEVEL); - deflater.setInput(timeSeriesBytes.array()); - deflater.finish(); - compressedDataLength = deflater.deflate(compressedTimeSeries.array()); - deflater.end(); - break; - } catch (BufferOverflowException _boe) { - bufferSize *= 2; - } - } - - // Have to do this to make sure there are no extra zeroes at the end of the buffer, which may happen because - // we don't know what the compressed data size until after it is done being compressed - byte[] blobBytes = new byte[compressedDataLength]; - compressedTimeSeries.get(blobBytes); - Blob seriesBlob = new SerialBlob(blobBytes); - - preparedStatement.setBlob(9, seriesBlob); - preparedStatement.executeUpdate(); + this.addBatch(connection, preparedStatement, flightId); + preparedStatement.executeBatch(); preparedStatement.close(); - - seriesBlob.free(); - - } catch (SQLException e) { // | IOException e) { // Re-enable this for the new compression code. + } catch (SQLException | IOException e) { e.printStackTrace(); System.exit(1); } diff --git a/src/main/java/org/ngafid/flights/ErrorMessage.java b/src/main/java/org/ngafid/flights/ErrorMessage.java index 4195f46de..361fb66cc 100644 --- a/src/main/java/org/ngafid/flights/ErrorMessage.java +++ b/src/main/java/org/ngafid/flights/ErrorMessage.java @@ -21,15 +21,13 @@ public static int getMessageId(Connection connection, String message) throws SQL if (id != null) { return id; - } else { //id wasn't in the hashmap, look it up String queryString = "SELECT id FROM flight_messages WHERE message = ?"; PreparedStatement query = connection.prepareStatement(queryString); query.setString(1, message); - LOG.info(query.toString()); - System.out.println(query); + // LOG.info(query.toString()); ResultSet resultSet = query.executeQuery(); if (resultSet.next()) { diff --git a/src/main/java/org/ngafid/flights/Flight.java b/src/main/java/org/ngafid/flights/Flight.java index f907838c1..4630b1f18 100644 --- a/src/main/java/org/ngafid/flights/Flight.java +++ b/src/main/java/org/ngafid/flights/Flight.java @@ -49,6 +49,7 @@ import java.util.Optional; import java.util.logging.Logger; import java.util.stream.Collectors; +import java.util.stream.Stream; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; @@ -559,7 +560,6 @@ public static long getTotalFlightHours(Connection connection, String queryString resultSet.next(); long diffSeconds = resultSet.getLong(1); - System.out.println("total time is: " + diffSeconds); resultSet.close(); query.close(); @@ -786,7 +786,6 @@ public static List getFlightsByRange(Connection connection, Filter filte while (resultSet.next()) { flights.add(new Flight(connection, resultSet)); } - System.out.println(flights); resultSet.close(); query.close(); @@ -1123,8 +1122,6 @@ public static List getUnassociatedTags(Connection connection, int fli return getAllTags(connection, fleetId); } - System.out.println("TAG NUMS: " + tagIds.toString()); - String queryString = "SELECT id, fleet_id, name, description, color FROM flight_tags " + idLimStr(tagIds, true); PreparedStatement query = connection.prepareStatement(queryString); ResultSet resultSet = query.executeQuery(); @@ -1310,7 +1307,6 @@ public static FlightTag createTag(int fleetId, int flightId, String name, String index = resultSet.getInt(1); } - System.out.println(index); associateTag(flightId, index, connection); return new FlightTag(index, fleetId, name, description, color); @@ -1629,8 +1625,6 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu DoubleTimeSeries latitudes = doubleTimeSeries.get(latColumnName); DoubleTimeSeries longitudes = doubleTimeSeries.get(lonColumnName); - System.out.println("times: " + times + ", latitudes: " + latitudes + ", longitudes: " + longitudes); - if (times == null) { throw new MalformedFlightFileException("Time column '" + timeColumnName + "' did not exist! Cannot set start/end times."); } @@ -1650,9 +1644,6 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu int latSize = latitudes.size(); int lonSize = longitudes.size(); - System.out.println("\ttime size: " + timeSize + ", lat size: " + latSize + ", lon size: " + lonSize); - System.out.println("\tstart time: " + startDateTime); - System.out.println("\tend time: " + endDateTime); String firstTime = null; for (int i = 0; i < times.size(); i++) { @@ -1661,7 +1652,6 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu break; } } - System.out.println("\tfirst time: '" + firstTime + "'"); String lastTime = null; for (int i = times.size() - 1; i >= 0; i--) { @@ -1670,18 +1660,15 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu break; } } - System.out.println("\tlast time: '" + lastTime + "'"); double firstLat = 0.0; for (int i = 0; i < latitudes.size(); i++) { - //System.out.println("\t\tlat[" + i + "]: " + latitudes.get(i)); double lat = latitudes.get(i); if (lat != 0.0 && !Double.isNaN(lat)) { firstLat = latitudes.get(i); break; } } - System.out.println("\tfirst lat: '" + firstLat + "'"); double firstLon = 0.0; for (int i = 0; i < longitudes.size(); i++) { @@ -1692,15 +1679,12 @@ public void calculateScanEagleStartEndTime(String timeColumnName, String latColu break; } } - System.out.println("\tfirst long: '" + firstLon + "'"); //TODO: can't get time offset from lat/long because they aren't being set correctly startDateTime += " " + firstTime; endDateTime += " " + lastTime; - System.out.println("start date time: " + startDateTime); - System.out.println("end date time: " + endDateTime); } public void calculateStartEndTime(String dateColumnName, String timeColumnName, String offsetColumnName) throws MalformedFlightFileException { @@ -1725,8 +1709,6 @@ public void calculateStartEndTime(String dateColumnName, String timeColumnName, int timeSize = times.size(); int offsetSize = offsets.size(); - System.out.println("\tdate size: " + dateSize + ", time size: " + timeSize + ", offset size: " + offsetSize); - //get the minimum sized length of each of these series, they should all be the same but //if the last column was cut off it might not be the case int minSize = dateSize; @@ -1743,8 +1725,6 @@ public void calculateStartEndTime(String dateColumnName, String timeColumnName, start++; } - System.out.println("\tfirst date time and offset not null at index: " + start); - if (start >= minSize) { throw new MalformedFlightFileException("Date, Time or Offset columns were all null! Cannot set start/end times."); } @@ -1767,9 +1747,6 @@ public void calculateStartEndTime(String dateColumnName, String timeColumnName, String endTime = times.get(end); String endOffset = offsets.get(end); - System.out.println("\t\t\tfirst not null " + start + " -- " + startDate + " " + startTime + " " + startOffset); - System.out.println("\t\t\tlast not null " + endDate + " " + endTime + " " + endOffset); - OffsetDateTime startODT = null; try { startODT = TimeUtils.convertToOffset(startDate, startTime, startOffset, "+00:00"); @@ -1818,8 +1795,6 @@ private void initialize(Connection connection, InputStream inputStream) throws F throw new FatalFlightFileException("The flight file was empty."); if (fileInformation.charAt(0) != '#' && fileInformation.charAt(0) != '{') { if (fileInformation.substring(0, 4).equals("DID_")) { - System.out.println("CAME FROM A SCANEAGLE! CAN CALCULATE SUGGESTED TAIL/SYSTEM ID FROM FILENAME"); - airframeName = "ScanEagle"; airframeType = "UAS Fixed Wing"; } else { @@ -1834,19 +1809,12 @@ private void initialize(Connection connection, InputStream inputStream) throws F String[] filenameParts = filename.split("_"); startDateTime = filenameParts[0]; endDateTime = startDateTime; - System.out.println("start date: '" + startDateTime + "'"); - System.out.println("end date: '" + startDateTime + "'"); //UND doesn't have the systemId for UAS anywhere in the filename or file (sigh) suggestedTailNumber = "N" + filenameParts[1] + "ND"; systemId = suggestedTailNumber; - System.out.println("suggested tail number: '" + suggestedTailNumber + "'"); - System.out.println("system id: '" + systemId + "'"); - } else if (headers.size() > 0) { - System.out.println("JSON detected"); - Gson gson = new Gson(); JsonReader reader = new JsonReader(new InputStreamReader(inputStream)); Map jsonMap = gson.fromJson(reader, Map.class); @@ -1949,7 +1917,6 @@ private void initialize(Connection connection, InputStream inputStream) throws F //System.out.println("Headers line is: " + headersLine); headers.addAll(Arrays.asList(headersLine.split("\\,", -1))); headers.replaceAll(String::trim); - System.out.println("headers are:\n" + headers.toString()); //scan eagle files have no data types, set all to "" for (int i = 0; i < headers.size(); i++) { @@ -1972,7 +1939,6 @@ private void initialize(Connection connection, InputStream inputStream) throws F String headersLine = bufferedReader.readLine(); if (headersLine.length() == 0) headersLine = bufferedReader.readLine(); //handle windows files with carriage returns - System.out.println("Headers line is: " + headersLine); headers.addAll(Arrays.asList(headersLine.split("\\,", -1))); headers.replaceAll(String::trim); @@ -3367,6 +3333,119 @@ public void printValues(String[] requestedHeaders) { System.out.println(); } + public static void batchUpdateDatabase(Connection connection, Upload upload, Iterable flights) { + int fleetId = upload.getFleetId(); + int uploaderId = upload.getUploaderId(); + int uploadId = upload.getId(); + + try { + PreparedStatement preparedStatement = createPreparedStatement(connection); + for (Flight flight : flights) { + // This is fine because this stuff is mostly cached + flight.airframeNameId = Airframes.getNameId(connection, flight.airframeName); + flight.airframeTypeId = Airframes.getTypeId(connection, flight.airframeType); + Airframes.setAirframeFleet(connection, flight.airframeNameId, fleetId); + + Tails.setSuggestedTail(connection, fleetId, flight.systemId, flight.suggestedTailNumber); + flight.tailNumber = Tails.getTail(connection, fleetId, flight.systemId); + flight.tailConfirmed = Tails.getConfirmed(connection, fleetId, flight.systemId); + flight.fleetId = fleetId; + flight.uploaderId = uploaderId; + flight.uploadId = uploadId; + flight.addBatch(preparedStatement); + } + + int[] _results = preparedStatement.executeBatch(); + ResultSet results = preparedStatement.getGeneratedKeys(); + int count = 0; + + + for (Flight flight : flights) { + if (!results.next()) { + LOG.severe("ERROR: insertion of flight to the database did not result in an id. This should never happen."); + System.exit(1); + } + flight.id = results.getInt(1); + } + + preparedStatement.close(); + + PreparedStatement doubleTSPreparedStatement = DoubleTimeSeries.createPreparedStatement(connection); + + for (Flight flight : flights) + for (var doubleTS : flight.doubleTimeSeries.values()) + doubleTS.addBatch(connection, doubleTSPreparedStatement, flight.id); + + doubleTSPreparedStatement.executeBatch(); + doubleTSPreparedStatement.close(); + + PreparedStatement stringTSPreparedStatement = StringTimeSeries.createPreparedStatement(connection); + + for (Flight flight : flights) + for (var stringTS : flight.stringTimeSeries.values()) + stringTS.addBatch(connection, stringTSPreparedStatement, flight.id); + + stringTSPreparedStatement.executeBatch(); + stringTSPreparedStatement.close(); + + PreparedStatement itineraryPreparedStatement = Itinerary.createPreparedStatement(connection); + PreparedStatement airportPreparedStatement = Itinerary.createAirportPreparedStatement(connection); + PreparedStatement runwayPreparedStatement = Itinerary.createRunwayPreparedStatement(connection); + + for (Flight flight : flights) { + for (int i = 0; i < flight.itinerary.size(); i++) + flight.itinerary.get(i).addBatch(itineraryPreparedStatement, airportPreparedStatement, runwayPreparedStatement, fleetId, flight.id, i); + } + + itineraryPreparedStatement.executeBatch(); + itineraryPreparedStatement.close(); + airportPreparedStatement.executeBatch(); + airportPreparedStatement.close(); + runwayPreparedStatement.executeBatch(); + runwayPreparedStatement.close(); + + PreparedStatement warningPreparedStatement = FlightWarning.createPreparedStatement(connection); + + for (Flight flight : flights) + for (var e : flight.exceptions) + new FlightWarning(e.getMessage()).addBatch(connection, preparedStatement, flight.id); + + warningPreparedStatement.executeBatch(); + warningPreparedStatement.close(); + + } catch (SQLException | IOException e) { + LOG.severe("Encountered the following exception while inserting batch of flights: \n" + e.getMessage()); + e.printStackTrace(); + System.exit(1); + } + } + + private static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO flights (fleet_id, uploader_id, upload_id, airframe_id, airframe_type_id, system_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status, start_timestamp, end_timestamp) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, UNIX_TIMESTAMP(?), UNIX_TIMESTAMP(?))", Statement.RETURN_GENERATED_KEYS); + } + + private void addBatch(PreparedStatement preparedStatement) throws SQLException { + preparedStatement.setInt(1, fleetId); + preparedStatement.setInt(2, uploaderId); + preparedStatement.setInt(3, uploadId); + preparedStatement.setInt(4, airframeNameId); + preparedStatement.setInt(5, airframeTypeId); + preparedStatement.setString(6, systemId); + preparedStatement.setString(7, startDateTime); + preparedStatement.setString(8, endDateTime); + preparedStatement.setString(9, filename); + preparedStatement.setString(10, md5Hash); + preparedStatement.setInt(11, numberRows); + preparedStatement.setString(12, status); + preparedStatement.setBoolean(13, hasCoords); + preparedStatement.setBoolean(14, hasAGL); + preparedStatement.setBoolean(15, false); //insert not yet completed + preparedStatement.setLong(16, processingStatus); + preparedStatement.setString(17, startDateTime); + preparedStatement.setString(18, endDateTime); + preparedStatement.addBatch(); + } + public void updateDatabase(Connection connection, int uploadId, int uploaderId, int fleetId) { this.fleetId = fleetId; this.uploaderId = uploaderId; @@ -3383,55 +3462,29 @@ public void updateDatabase(Connection connection, int uploadId, int uploaderId, tailNumber = Tails.getTail(connection, fleetId, systemId); tailConfirmed = Tails.getConfirmed(connection, fleetId, systemId); - PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO flights (fleet_id, uploader_id, upload_id, airframe_id, airframe_type_id, system_id, start_time, end_time, filename, md5_hash, number_rows, status, has_coords, has_agl, insert_completed, processing_status, start_timestamp, end_timestamp) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, UNIX_TIMESTAMP(?), UNIX_TIMESTAMP(?))", Statement.RETURN_GENERATED_KEYS); - preparedStatement.setInt(1, fleetId); - preparedStatement.setInt(2, uploaderId); - preparedStatement.setInt(3, uploadId); - preparedStatement.setInt(4, airframeNameId); - preparedStatement.setInt(5, airframeTypeId); - preparedStatement.setString(6, systemId); - preparedStatement.setString(7, startDateTime); - preparedStatement.setString(8, endDateTime); - preparedStatement.setString(9, filename); - preparedStatement.setString(10, md5Hash); - preparedStatement.setInt(11, numberRows); - preparedStatement.setString(12, status); - preparedStatement.setBoolean(13, hasCoords); - preparedStatement.setBoolean(14, hasAGL); - preparedStatement.setBoolean(15, false); //insert not yet completed - preparedStatement.setLong(16, processingStatus); - preparedStatement.setString(17, startDateTime); - preparedStatement.setString(18, endDateTime); + PreparedStatement preparedStatement = createPreparedStatement(connection); + + this.addBatch(preparedStatement); LOG.info(preparedStatement.toString()); - preparedStatement.executeUpdate(); + preparedStatement.executeBatch(); ResultSet resultSet = preparedStatement.getGeneratedKeys(); if (resultSet.next()) { int flightId = resultSet.getInt(1); this.id = flightId; - // Comment this out unless debugging - //for (String key : doubleTimeSeries.keySet()) { - //System.out.println("double time series key: '" + key); - //System.out.println("\tis " + doubleTimeSeries.get(key).toString()); - //} - - for (DoubleTimeSeries series : doubleTimeSeries.values()) { + for (DoubleTimeSeries series : doubleTimeSeries.values()) series.updateDatabase(connection, flightId); - } - for (StringTimeSeries series : stringTimeSeries.values()) { + for (StringTimeSeries series : stringTimeSeries.values()) series.updateDatabase(connection, flightId); - } - for (Exception exception : exceptions) { + for (Exception exception : exceptions) FlightWarning.insertWarning(connection, flightId, exception.getMessage()); - } - for (int i = 0; i < itinerary.size(); i++) { + for (int i = 0; i < itinerary.size(); i++) itinerary.get(i).updateDatabase(connection, fleetId, flightId, i); - } PreparedStatement ps = connection.prepareStatement("UPDATE flights SET insert_completed = 1 WHERE id = ?"); ps.setInt(1, this.id); diff --git a/src/main/java/org/ngafid/flights/FlightError.java b/src/main/java/org/ngafid/flights/FlightError.java index dc9eda052..027f851d1 100644 --- a/src/main/java/org/ngafid/flights/FlightError.java +++ b/src/main/java/org/ngafid/flights/FlightError.java @@ -25,8 +25,6 @@ public static void insertError(Connection connection, int uploadId, String filen exceptionPreparedStatement.setString(2, filename); exceptionPreparedStatement.setInt(3, ErrorMessage.getMessageId(connection, message)); - LOG.info(exceptionPreparedStatement.toString()); - exceptionPreparedStatement.executeUpdate(); exceptionPreparedStatement.close(); } diff --git a/src/main/java/org/ngafid/flights/FlightWarning.java b/src/main/java/org/ngafid/flights/FlightWarning.java index 3c6536704..9908ac0e2 100644 --- a/src/main/java/org/ngafid/flights/FlightWarning.java +++ b/src/main/java/org/ngafid/flights/FlightWarning.java @@ -18,15 +18,22 @@ public class FlightWarning { private String filename; private String message; private String stackTrace; + + public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO flight_warnings (flight_id, message_id) VALUES (?, ?)"); + } + public void addBatch(Connection connection, PreparedStatement preparedStatement, int flightId) throws SQLException { + preparedStatement.setInt(1, flightId); + preparedStatement.setInt(2, ErrorMessage.getMessageId(connection, message)); + } + public static void insertWarning(Connection connection, int flightId, String message) throws SQLException { - PreparedStatement exceptionPreparedStatement = connection.prepareStatement("INSERT INTO flight_warnings (flight_id, message_id) VALUES (?, ?)"); - exceptionPreparedStatement.setInt(1, flightId); - exceptionPreparedStatement.setInt(2, ErrorMessage.getMessageId(connection, message)); - - LOG.info(exceptionPreparedStatement.toString()); + PreparedStatement exceptionPreparedStatement = createPreparedStatement(connection); - exceptionPreparedStatement.executeUpdate(); + new FlightWarning(message).addBatch(connection, exceptionPreparedStatement, flightId); + + exceptionPreparedStatement.executeBatch(); exceptionPreparedStatement.close(); } @@ -68,6 +75,10 @@ public static int getCount(Connection connection, int fleetId) throws SQLExcepti return count ; } + public FlightWarning(String message) { + this.message = message; + } + public FlightWarning(Connection connection, ResultSet resultSet) throws SQLException { filename = resultSet.getString(1); uploadId = resultSet.getInt(2); diff --git a/src/main/java/org/ngafid/flights/Itinerary.java b/src/main/java/org/ngafid/flights/Itinerary.java index 800e0c298..7f9357c40 100644 --- a/src/main/java/org/ngafid/flights/Itinerary.java +++ b/src/main/java/org/ngafid/flights/Itinerary.java @@ -320,22 +320,15 @@ public void update(String runway, int index, double altitudeAGL, double airportD public void selectBestRunway() { runway = null; int maxCount = 0; - System.err.println("Selecting runway:"); - System.err.println("min airport distance: " + minAirportDistance); - System.err.println("min runway distance: " + minRunwayDistance); - System.err.println("min altitude agl: " + minAltitude); - + for (String key : runwayCounts.keySet()) { int count = runwayCounts.get(key); - System.err.println("\trunway: " + key + ", count: " + count); if (count > maxCount) { runway = key; maxCount = count; } } - - System.err.println("selected runway '" + runway + "' with count: " + maxCount); } public boolean wasApproach() { @@ -358,45 +351,59 @@ public boolean wasApproach() { } } - public void updateDatabase(Connection connection, int fleetId, int flightId, int order) throws SQLException { + public static PreparedStatement createAirportPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT IGNORE INTO visited_airports SET fleet_id = ?, airport = ?"); + } + + public static PreparedStatement createRunwayPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT IGNORE INTO visited_runways SET fleet_id = ?, runway = ?"); + } + + public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO itinerary (flight_id, `order`, min_altitude_index, min_altitude, min_airport_distance, min_runway_distance, airport, runway, start_of_approach, end_of_approach, start_of_takeoff, end_of_takeoff, type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); + } + + public void addBatch(PreparedStatement itineraryStatement, PreparedStatement airportStatement, PreparedStatement runwayStatement, int fleetId, int flightId, int order) throws SQLException { this.order = order; + + airportStatement.setInt(1, fleetId); + airportStatement.setString(2, airport); + airportStatement.addBatch(); + + runwayStatement.setInt(1, fleetId); + runwayStatement.setString(2, airport + " - " + runway); + runwayStatement.addBatch(); + + itineraryStatement.setInt(1, flightId); + itineraryStatement.setInt(2, order); + itineraryStatement.setInt(3, minAltitudeIndex); + itineraryStatement.setDouble(4, minAltitude); + itineraryStatement.setDouble(5, minAirportDistance); + itineraryStatement.setDouble(6, minRunwayDistance); + itineraryStatement.setString(7, airport); + itineraryStatement.setString(8, runway); + itineraryStatement.setInt(9, startOfApproach); + itineraryStatement.setInt(10, endOfApproach); + itineraryStatement.setInt(11, startOfTakeoff); + itineraryStatement.setInt(12, endOfTakeoff); + itineraryStatement.setString(13, type); + itineraryStatement.addBatch(); + } + public void updateDatabase(Connection connection, int fleetId, int flightId, int order) throws SQLException { //insert new visited airports and runways -- will ignore if it already exists - PreparedStatement preparedStatement = connection.prepareStatement("INSERT IGNORE INTO visited_airports SET fleet_id = ?, airport = ?"); - preparedStatement.setInt(1, fleetId); - preparedStatement.setString(2, airport); - - System.err.println(preparedStatement); - preparedStatement.executeUpdate(); - preparedStatement.close(); - - preparedStatement = connection.prepareStatement("INSERT IGNORE INTO visited_runways SET fleet_id = ?, runway = ?"); - preparedStatement.setInt(1, fleetId); - preparedStatement.setString(2, airport + " - " + runway); - - System.err.println(preparedStatement); - preparedStatement.executeUpdate(); - preparedStatement.close(); - - //now insert the itinerary - preparedStatement = connection.prepareStatement("INSERT INTO itinerary (flight_id, `order`, min_altitude_index, min_altitude, min_airport_distance, min_runway_distance, airport, runway, start_of_approach, end_of_approach, start_of_takeoff, end_of_takeoff, type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); - preparedStatement.setInt(1, flightId); - preparedStatement.setInt(2, order); - preparedStatement.setInt(3, minAltitudeIndex); - preparedStatement.setDouble(4, minAltitude); - preparedStatement.setDouble(5, minAirportDistance); - preparedStatement.setDouble(6, minRunwayDistance); - preparedStatement.setString(7, airport); - preparedStatement.setString(8, runway); - preparedStatement.setInt(9, startOfApproach); - preparedStatement.setInt(10, endOfApproach); - preparedStatement.setInt(11, startOfTakeoff); - preparedStatement.setInt(12, endOfTakeoff); - preparedStatement.setString(13, type); - - System.err.println(preparedStatement); - preparedStatement.executeUpdate(); - preparedStatement.close(); + PreparedStatement statement = createPreparedStatement(connection); + PreparedStatement airportStatement = createAirportPreparedStatement(connection); + PreparedStatement runwayStatement = createRunwayPreparedStatement(connection); + + this.addBatch(statement, airportStatement, runwayStatement, fleetId, flightId, order); + + statement.executeBatch(); + statement.close(); + airportStatement.executeBatch(); + airportStatement.close(); + runwayStatement.executeBatch(); + runwayStatement.close(); } public String toString() { // TODO: add new columns to toString? diff --git a/src/main/java/org/ngafid/flights/StringTimeSeries.java b/src/main/java/org/ngafid/flights/StringTimeSeries.java index 8966de829..4ee2fe75d 100644 --- a/src/main/java/org/ngafid/flights/StringTimeSeries.java +++ b/src/main/java/org/ngafid/flights/StringTimeSeries.java @@ -210,30 +210,38 @@ public int validCount() { return validCount; } - public void updateDatabase(Connection connection, int flightId) { + public static PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + return connection.prepareStatement("INSERT INTO string_series (flight_id, name_id, data_type_id, length, valid_length, data) VALUES (?, ?, ?, ?, ?, ?)"); + } - try { - if (nameId == -1) - setNameId(connection); - if (typeId == -1) - setTypeId(connection); + public void addBatch(Connection connection, PreparedStatement preparedStatement, int flightId) throws SQLException, IOException { + if (nameId == -1) + setNameId(connection); + if (typeId == -1) + setTypeId(connection); + + preparedStatement.setInt(1, flightId); + preparedStatement.setInt(2, nameId); + preparedStatement.setInt(3, typeId); + preparedStatement.setInt(4, timeSeries.size()); + preparedStatement.setInt(5, validCount); - PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO string_series (flight_id, name_id, data_type_id, length, valid_length, data) VALUES (?, ?, ?, ?, ?, ?)"); + // To get rid of extra bytes at the end of the buffer + byte[] compressed = Compression.compressObject(this.timeSeries); + Blob seriesBlob = new SerialBlob(compressed); + preparedStatement.setBlob(6, seriesBlob); - preparedStatement.setInt(1, flightId); - preparedStatement.setInt(2, nameId); - preparedStatement.setInt(3, typeId); - preparedStatement.setInt(4, timeSeries.size()); - preparedStatement.setInt(5, validCount); + preparedStatement.addBatch(); + } + + public void updateDatabase(Connection connection, int flightId) { + try { + PreparedStatement preparedStatement = createPreparedStatement(connection); - // To get rid of extra bytes at the end of the buffer - byte[] compressed = Compression.compressObject(this.timeSeries); - Blob seriesBlob = new SerialBlob(compressed); + this.addBatch(connection, preparedStatement, flightId); - preparedStatement.setBlob(6, seriesBlob); preparedStatement.executeUpdate(); preparedStatement.close(); - seriesBlob.free(); } catch (SQLException | IOException e) { e.printStackTrace(); diff --git a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java index 6786b8d61..b32e17266 100644 --- a/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/CSVFileProcessor.java @@ -8,9 +8,11 @@ import java.io.*; import java.nio.charset.StandardCharsets; import java.util.*; +import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.stream.Stream; /** @@ -31,6 +33,7 @@ public CSVFileProcessor(Connection connection, InputStream stream, String filena super(connection, stream, filename); this.upload = upload; + headers = new ArrayList<>(); dataTypes = new ArrayList<>(); @@ -40,12 +43,10 @@ public CSVFileProcessor(Connection connection, InputStream stream, String filena @Override public Stream parse() throws FlightProcessingException { - Map doubleTimeSeries = new HashMap<>(); - Map stringTimeSeries = new HashMap<>(); + Map doubleTimeSeries = new ConcurrentHashMap<>(); + Map stringTimeSeries = new ConcurrentHashMap<>(); List csvValues = null; - List dataTypes = new ArrayList<>(); - List headers = new ArrayList<>(); try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(super.stream, StandardCharsets.UTF_8)); CSVReader csvReader = new CSVReader(bufferedReader)) { String fileInformation = getFlightInfo(bufferedReader); // Will read a line @@ -55,8 +56,12 @@ public Stream parse() throws FlightProcessingException { } else { processFileInormation(fileInformation); bufferedReader.read(); // Skip first char (#) - dataTypes = List.of(csvReader.readNext()); - headers = Arrays.stream(csvReader.readNext()).map(String::strip).collect(Collectors.toList()); + Arrays.stream(csvReader.readNext()) + .map(String::strip) + .forEachOrdered(dataTypes::add);; + Arrays.stream(csvReader.readNext()) + .map(String::strip) + .forEachOrdered(headers::add);; } updateAirframe(); @@ -71,18 +76,24 @@ public Stream parse() throws FlightProcessingException { for (int i = 0; i < row.length; i++) columns.get(i).add(row[i].trim()); - for (int i = 0; i < columns.size(); i++) { - ArrayList column = columns.get(i); - String name = headers.get(i); - String dataType = dataTypes.get(i); - try { - Double.parseDouble(column.get(0)); - doubleTimeSeries.put(name, new DoubleTimeSeries(name, dataType, column)); - } catch (NumberFormatException e) { - stringTimeSeries.put(name, new StringTimeSeries(name, dataType, column)); - } - } - + final int granulatiry = 8; + IntStream.range(0, columns.size() / granulatiry) + .parallel() + .forEach(g -> { + var max = Math.max(g * granulatiry + granulatiry, columns.size()); + for (int i = g * granulatiry; i < max; i++) { + var column = columns.get(i); + var name = headers.get(i); + var dataType = dataTypes.get(i); + + try { + Double.parseDouble(column.get(0)); + doubleTimeSeries.put(name, new DoubleTimeSeries(name, dataType, column)); + } catch (NumberFormatException e) { + stringTimeSeries.put(name, new StringTimeSeries(name, dataType, column)); + } + } + }); } catch (IOException | FatalFlightFileException | CsvException e) { throw new FlightProcessingException(e); } diff --git a/src/main/java/org/ngafid/flights/process/DependencyGraph.java b/src/main/java/org/ngafid/flights/process/DependencyGraph.java index 9a698c0d3..a977fe2db 100644 --- a/src/main/java/org/ngafid/flights/process/DependencyGraph.java +++ b/src/main/java/org/ngafid/flights/process/DependencyGraph.java @@ -40,8 +40,7 @@ void disableChildren() { enabled.set(false); if (step.isRequired()) { String reason = step.explainApplicability(); - LOG.severe(builder.doubleTimeSeries.keySet().toString()); - LOG.severe("Required step " + step.toString() + " has been disabled for :\n " + reason); + LOG.severe("Required step " + step.getClass().getName() + " has been disabled for the following reason:\n " + reason); exceptions.add(new FatalFlightFileException(reason)); } for (var child : requiredBy) child.disable(); @@ -55,7 +54,7 @@ void disable() { LOG.severe("Required step " + step.toString() + " has been disabled."); exceptions.add( new FatalFlightFileException( - "Required step " + step.toString() + "Required step " + step.getClass().getName() + " has been disabled because a required parent step has been disabled")); } for (var child : requiredBy) child.disable(); @@ -206,21 +205,12 @@ public void compute() throws FlightProcessingException { tasks.put(node, task); } } - ForkJoinPool ex = new ForkJoinPool(8); - try { - ex.invoke(new RecursiveTask() { - public Void compute() { - initialTasks - .stream() - .map(x -> x.fork()) - .forEach(ForkJoinTask::join); - return null; - } - }); - } finally { - ex.shutdown(); - } + var handles = initialTasks + .stream() + .map(x -> x.fork()) + .collect(Collectors.toList()); + handles.forEach(ForkJoinTask::join); ArrayList fatalExceptions = new ArrayList<>(); for (var node : nodes) { diff --git a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java index b183bff38..764c88acc 100644 --- a/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java +++ b/src/main/java/org/ngafid/flights/process/FlightFileProcessor.java @@ -5,6 +5,8 @@ import java.sql.Connection; import java.util.Map; import java.util.HashMap; +import java.util.List; +import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import java.util.zip.ZipEntry; @@ -15,6 +17,7 @@ import java.util.Objects; import java.util.Spliterator; import java.util.Spliterators; +import java.util.concurrent.ConcurrentHashMap; import org.ngafid.filters.Pair; import org.ngafid.UploadException; @@ -37,8 +40,8 @@ public static class Pipeline { private int validFlightsCount = 0; private int warningFlightsCount = 0; - private HashMap flightErrors = new HashMap<>(); - + private ConcurrentHashMap flightErrors = new ConcurrentHashMap<>(); + public Pipeline(Connection connection, Upload upload, ZipFile zipFile) { this.connection = connection; this.upload = upload; @@ -74,7 +77,7 @@ public Stream stream() { .filter(z -> !z.getName().contains("__MACOSX")) .filter(z -> !z.isDirectory()); - return validFiles.map(this::create).filter(Objects::nonNull); + return validFiles.map(this::create).filter(Objects::nonNull).collect(Collectors.toList()).stream(); } public Stream parse(FlightFileProcessor processor) { @@ -95,8 +98,8 @@ public Flight build(FlightBuilder fb) { } } - public Stream build(Stream fbs) { - return fbs.map(this::build).filter(Objects::nonNull); + public List build(Stream fbs) { + return fbs.map(this::build).filter(Objects::nonNull).collect(Collectors.toList()); } private FlightFileProcessor create(ZipEntry entry) { @@ -107,7 +110,7 @@ private FlightFileProcessor create(ZipEntry entry) { Factory f = factories.get(extension); if (f != null) { try { - return f.create(connection, zipFile.getInputStream(entry), zipFile.getName()); + return f.create(connection, zipFile.getInputStream(entry), filename); } catch (IOException e) { flightErrors.put(filename, new UploadException(e.getMessage(), e, filename)); } @@ -118,18 +121,13 @@ private FlightFileProcessor create(ZipEntry entry) { return null; } - public Flight insert(Flight flight) { - synchronized (connection) { - flight.updateDatabase(connection, upload.getId(), upload.getUploaderId(), upload.getFleetId()); - } - return flight; - } - - public void tabulateFlightStatus(Flight flight) { + public Flight tabulateFlightStatus(Flight flight) { if (flight.getStatus().equals("WARNING")) warningFlightsCount++; else validFlightsCount++; + + return flight; } public int getWarningFlightsCount() { diff --git a/src/main/java/org/ngafid/flights/process/ProcessItinerary.java b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java index 898fa2338..46f39e673 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessItinerary.java +++ b/src/main/java/org/ngafid/flights/process/ProcessItinerary.java @@ -2,6 +2,7 @@ import java.util.Set; import java.util.ArrayList; +import java.util.logging.Logger; import java.util.Collections; import java.sql.Connection; import java.sql.SQLException; @@ -17,6 +18,8 @@ import org.ngafid.flights.MalformedFlightFileException; public class ProcessItinerary extends ProcessStep { + private static final Logger LOG = Logger.getLogger(ProcessItinerary.class.getName()); + private static Set REQUIRED_DOUBLE_COLUMNS = Set.of(ALT_AGL, LATITUDE, LONGITUDE, AIRPORT_DISTANCE, RUNWAY_DISTANCE, GND_SPD, E1_RPM); private static Set REQUIRED_STRING_COLUMNS = Set.of(NEAREST_AIRPORT, NEAREST_RUNWAY); private static Set OUTPUT_COLUMNS = Set.of("_itinerary"); // This is a fake column; never actually created. @@ -93,10 +96,10 @@ public void compute() throws SQLException, MalformedFlightFileException, FatalFl } /////////////////////////////////////////////////////////////////////////////////////////////////////////////// - System.err.println("Itinerary:"); - for (int i = 0; i < itinerary.size(); i++) { - System.err.println(itinerary.get(i)); - } + // LOG.info("Itinerary:"); + // for (int i = 0; i < itinerary.size(); i++) { + // LOG.info(itinerary.get(i).toString()); + // } builder.setItinerary(itinerary); } diff --git a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java index 7515afb97..29e23496e 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessLOCI.java +++ b/src/main/java/org/ngafid/flights/process/ProcessLOCI.java @@ -34,7 +34,7 @@ public ProcessLOCI(Connection connection, FlightBuilder builder) { public void compute() throws SQLException, MalformedFlightFileException, FatalFlightFileException { DoubleTimeSeries hdg = doubleTS.get(HDG); - DoubleTimeSeries hdgLagged = withConnection((connection) -> hdg.lag(connection, YAW_RATE_LAG)); + DoubleTimeSeries hdgLagged = hdg.lag(YAW_RATE_LAG); DoubleTimeSeries roll = doubleTS.get(ROLL); DoubleTimeSeries tas = doubleTS.get(TAS_FTMIN); DoubleTimeSeries stallIndex = doubleTS.get(STALL_PROB); diff --git a/src/main/java/org/ngafid/flights/process/ProcessStep.java b/src/main/java/org/ngafid/flights/process/ProcessStep.java index 758f4a28e..ce67e19e6 100644 --- a/src/main/java/org/ngafid/flights/process/ProcessStep.java +++ b/src/main/java/org/ngafid/flights/process/ProcessStep.java @@ -76,7 +76,7 @@ public final String explainApplicability() { } String className = this.getClass().getSimpleName(); - StringBuilder sb = new StringBuilder("Step '" + className + "' cannot be applied for the following reason(s):"); + StringBuilder sb = new StringBuilder("Step '" + className + "' cannot be applied for the following reason(s):\n"); if (!airframeIsValid(builder.meta.airframeName)) { sb.append(" - airframeName '" + builder.meta.airframeName + "' is invalid (" From dff3a5b47d6d5522591c6d18e3b78164f7b28f64 Mon Sep 17 00:00:00 2001 From: Joshua Karns Date: Tue, 2 May 2023 11:19:37 -0400 Subject: [PATCH 12/12] Committed flights to db twice by mistake --- src/main/java/org/ngafid/ProcessUpload.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/ngafid/ProcessUpload.java b/src/main/java/org/ngafid/ProcessUpload.java index 6cb490db8..2ff681208 100644 --- a/src/main/java/org/ngafid/ProcessUpload.java +++ b/src/main/java/org/ngafid/ProcessUpload.java @@ -290,7 +290,7 @@ public static boolean ingestFlights(Connection connection, Upload upload, Upload startNanos = System.nanoTime(); Flight.batchUpdateDatabase(connection, upload, flights); - flights.forEach(f -> f.updateDatabase(connection, uploadId, uploaderId, fleetId)); + // flights.forEach(f -> f.updateDatabase(connection, uploadId, uploaderId, fleetId)); endNanos = System.nanoTime(); s = 1e-9 * (double) (endNanos - startNanos);