diff --git a/pom.xml b/pom.xml
index e65e61aac1..4aa7884618 100644
--- a/pom.xml
+++ b/pom.xml
@@ -106,7 +106,7 @@
2.9.0
1.10.0
0.5
- 2.4
+ 2.7
2.6
3.13.0
3.6.1
@@ -133,7 +133,8 @@
1.1.3
9.4.29.v20200521
1.5.0
- 0.7.0-alpha
+ 1.3.0
+ 0.8.4
2.9
1.0
2.12.2
@@ -158,6 +159,7 @@
4.3.22
2.1.1
23.3.0.23.09
+ 6.0.22
5.7.1
4.6.3
42.6.0
@@ -169,6 +171,7 @@
2.9.0
2.0.9
0.35.0
+ 0.18.0
2.8.1
1.4.20
2.7.3
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java b/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java
index f1556bee3e..81110f5bc2 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java
@@ -21,7 +21,7 @@
import java.util.List;
import java.util.Map;
-import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException;
+import org.vcell.cli.run.hdf5.BiosimulationsHdfWriterException;
public class ExecuteImpl {
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java b/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java
index 24d739be3a..223e06ac9a 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java
@@ -15,7 +15,7 @@
import java.nio.file.Paths;
import java.util.List;
-import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException;
+import org.vcell.cli.run.hdf5.BiosimulationsHdfWriterException;
/**
* Contains the code necessary to execute an Omex archive in VCell
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/OmexHandler.java b/vcell-cli/src/main/java/org/vcell/cli/run/OmexHandler.java
index 7b0857cc4d..82eddbbf77 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/OmexHandler.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/OmexHandler.java
@@ -128,11 +128,12 @@ public List getSedmlLocationsRelative(){
// Test corner cases
if (sedmlMap.get(MASTER).isEmpty()){
- if (masterCount > 0)
- throw new RuntimeException("No SED-MLs are intended to be executed (non SED-ML file is set to be master)");
- if (sedmlMap.get(REGULAR).isEmpty())
+ if (sedmlMap.get(REGULAR).isEmpty()) {
throw new RuntimeException("There are no SED-MLs in the archive to execute");
-
+ }
+ if (masterCount > 0) {
+ logger.warn("No SED-MLs are marked as master, so will run them all");
+ }
return sedmlMap.get(REGULAR).stream().map(ArchiveEntry::getFilePath).toList();
}
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java
deleted file mode 100644
index dd7b6ba472..0000000000
--- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java
+++ /dev/null
@@ -1,447 +0,0 @@
-package org.vcell.cli.run.hdf5;
-
-import ncsa.hdf.hdf5lib.H5;
-import ncsa.hdf.hdf5lib.HDF5Constants;
-import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
-import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.vcell.cli.run.hdf5.Hdf5DataPreparer.Hdf5PreparedData;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException;
-
-/**
- * Class to handle Hdf5 creation, data, and assist with I/O.
- */
-public class BiosimulationsHdf5File {
- // NB: Hdf5 group management is ***important***.
- private final static Logger logger = LogManager.getLogger(BiosimulationsHdf5File.class);
- private static boolean needToCreateFile = true;
-
- final private int H5F_ACC_TRUNC = HDF5Constants.H5F_ACC_TRUNC;
- final private int H5P_DEFAULT = HDF5Constants.H5P_DEFAULT;
- final private int H5F_ACC_RDWR = HDF5Constants.H5F_ACC_RDWR;
- final private int H5T_C_S1 = HDF5Constants.H5T_C_S1;
- final private int H5I_INVALID_HID = HDF5Constants.H5I_INVALID_HID;
- final private int H5T_VARIABLE = HDF5Constants.H5T_VARIABLE;
- final private int H5T_STR_NULLTERM = HDF5Constants.H5T_STR_NULLTERM;
- final private int H5T_CSET_UTF8 = HDF5Constants.H5T_CSET_UTF8;
- final private int H5E_DEFAULT_ERROR_STACK = HDF5Constants.H5E_DEFAULT;
-
- private File javaFileTarget;
- private int fileId;
- private boolean isOpen, allowExceptions;
-
- private Map idToPathMap;
- private Map pathToIdMap;
- private Map datasetToDataspaceMap;
-
- private BiosimulationsHdf5File(){
- this.fileId = HDF5Constants.H5I_INVALID_HID;
- this.isOpen = false;
-
- // Explicit generic typing to highlight relationship; this is a "bi-map":
- this.idToPathMap = new HashMap();
- this.pathToIdMap = new HashMap();
- this.datasetToDataspaceMap = new HashMap<>();
- BiosimulationsHdf5File.needToCreateFile = false;
- }
-
- /**
- * Creates an BiosimulationsHdf5File named "reports.h5" in the provided directory, and will throw exceptions where c-style error codes would be returned.
- *
- * @param parentDir the directory to put the Hdf5 file inside.
- */
- public BiosimulationsHdf5File(File parentDir) { //"/home/ldrescher/VCell/hdf5Rebuild/testingDir"
- this(parentDir, true);
- }
-
- /**
- * The main constructor for BiosimulationsHdf5File. Note the special interpretation of allowExceptions.
- *
- * @param parentDir the directory to put the Hdf5 file inside of.
- * @param allowExceptions Whether to interperate C-style error codes as exceptions or let the user handle them.
- * Hdf5 Library-produced exceptions will still be generated regardless.
- */
- public BiosimulationsHdf5File(File parentDir, boolean allowExceptions){
- this(parentDir, "reports.h5", allowExceptions);
- }
-
-
- public void printErrorStack() {
- try {
- H5.H5Eprint2(H5E_DEFAULT_ERROR_STACK, null);
- } catch (HDF5LibraryException e){
- String message = "Catastrophic HDF5 error reporting failure detected; Something big just happened...";
- logger.error(message, e);
- throw new RuntimeException(message, e);
- }
- }
-
- /**
- * Complete constructor of `BiosimulationsHdf5File`
- *
- * @param parentDir the directory to put the Hdf5 file inside.
- * @param filename name of the h5 file to write.
- * @param allowExceptions Whether to interpret C-style error codes as exceptions or let the user handle them.
- * Hdf5 Library-produced exceptions will still be generated regardless.
- */
- public BiosimulationsHdf5File(File parentDir, String filename, boolean allowExceptions){
- this();
- this.javaFileTarget = new File(parentDir, filename);
- this.allowExceptions = allowExceptions;
- }
-
- /**
- * Opens the Hdf5 file
- *
- * @throws HDF5LibraryException
- * @throws IOException
- */
- public void open() throws BiosimulationsHdf5Writer.BiosimulationsHdfWriterException, IOException {
- this.open(BiosimulationsHdf5File.needToCreateFile);
- }
-
- /**
- * Opens the Hdf5 file if and only the file does not already exist
- *
- * @param overwrite allow an overwrite of an existing file
- * @return the HDF5 id number of the file
- * @throws HDF5LibraryException
- * @throws IOException
- */
- public int open(boolean overwrite) throws BiosimulationsHdfWriterException, IOException {
- try {
- String path = this.javaFileTarget.getCanonicalPath();
- if (overwrite) this.fileId = H5.H5Fopen(path, H5F_ACC_RDWR, H5P_DEFAULT);
- else this.fileId = H5.H5Fcreate(path, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- } catch (HDF5LibraryException e) {
- throw new BiosimulationsHdf5Writer.BiosimulationsHdfWriterException("HDF5 Library Exception", e);
- }
- if (this.fileId < 0) {
- String typeOfOperation = overwrite ? "opened [H5Fopen]" : "created [H5Fopened]";
- String message = "HDF5 File could not be " + typeOfOperation + "; Something is preventing this.";
- IOException e = new IOException(message); // investigate if Hdf5Exception would be more appropriate
- logger.warn("Hdf5 error occured", e);
- if (this.allowExceptions) throw e;
- }
-
- this.isOpen = true;
- return this.fileId;
- }
-
- /**
- * Add a group to the Hdf5 file based on a given path. If the group exists, the group_id will be returned.
- *
- * @param groupPath the unix-style path *relative from the Hdf5 root (known as "/")* to place the group at
- * while hdf5 does allow with relative pathing from other groups, VCell does not support that at this time.
- * @return the group ID
- */
- public int addGroup(String groupPath) throws HDF5Exception {
- if (!this.isOpen){
- if (this.allowExceptions) throw new HDF5Exception("Hdf5 file is not open.");
- return -1;
- }
-
- if (groupPath == null || groupPath.charAt(0) != '/'){
- if (this.allowExceptions) throw new HDF5Exception("groupPath is not formatted correctly, or null");
- return -1;
- }
-
- int groupId = H5.H5Gcreate(this.fileId, groupPath, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
-
- if (groupId < 0){
- String message = "HDF5 File could not be created [H5Fcreate]; HDF5 files can not be generated.";
- HDF5Exception e = new HDF5Exception(message); // investigate if Hdf5Exception would be more appropriate
- logger.warn("Hdf5 error occured", e);
- if (this.allowExceptions) throw e;
- }
-
- this.idToPathMap.put(groupId, groupPath);
- this.pathToIdMap.put(groupPath, groupId);
-
- return groupId;
- }
-
- /**
- * Get the path to the group referenced by the provided group ID
- *
- * @param id the identification number of the group
- * @return the hdf5 path connected to the id, or null if the group is not registered / does not exist.
- */
- public String getGroupPath(int id){
- if (this.idToPathMap.containsKey(id)) return this.idToPathMap.get(id);
- return null;
- }
-
- /**
- * Get the group ID of a group specfified by the provided hdf5 path
- *
- * @param path path where the the group is located in the HDF5 file
- * @return the group ID, or -1 if the group is not registered / does not exist.
- */
- public int getGroup(String path){
- if (this.pathToIdMap.containsKey(path)) return this.pathToIdMap.get(path);
- return -1;
- }
-
- /**
- * Checks if a group exists based on a provided group ID
- *
- * @param id the woulb be identification number of the group
- * @return whether or not the group could be found
- */
- public boolean containsGroup(int id){
- return this.idToPathMap.containsKey(id);
- }
-
- /**
- * Checks if a group exists based on a provided hdf5 path
- *
- * @param path path where the the group would be located in the HDF5 file
- * @return whether or not the group could be found
- */
- public boolean containsGroup(String path){
- return this.pathToIdMap.containsKey(path);
- }
-
- /**
- * [BROKEN, made private until var strings inplemented] Inserts a HDF5 attribute into a HDF5 group (including datasets) with a variable length string datum
- *
- * @param hdf5GroupID the id of the group to place the attribute in
- * @param attributeName the name of the attribute to insert
- * @param datum the attribute data / value to apply
- * @throws HDF5LibraryException if HDF5 encountered a problem
- */
- private void insertVarStringAttribute(int hdf5GroupID, String attributeName, String datum) throws HDF5LibraryException {
- String attr = datum;
-
- int datatypeId = this.createVLStringDatatype();
- int dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
- int attribute_id = H5.H5Acreate(hdf5GroupID, attributeName, datatypeId, dataspace_id, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);
- H5.H5Awrite(attribute_id, datatypeId, attr.getBytes());
- H5.H5Sclose(dataspace_id);
- H5.H5Aclose(attribute_id);
- H5.H5Tclose(datatypeId);
- }
-
- /**
- * Inserts a HDF5 attribute into a HDF5 group (including datasets) with a fixed length string datum
- *
- * @param hdf5GroupID the id of the group to place the attribute in
- * @param attributeName the name of the attribute to insert
- * @param datum the attribute data / value to apply
- * @throws HDF5LibraryException if HDF5 encountered a problem
- */
- public void insertFixedStringAttribute (int hdf5GroupID, String attributeName, String datum) throws HDF5LibraryException {
- String attr = datum + '\u0000';
-
- //https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/vlstra.c
- int h5attrcs1 = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
- H5.H5Tset_size (h5attrcs1, attr.length() /*HDF5Constants.H5T_VARIABLE*/);
- int dataspace_id = -1;
- //dataspace_id = H5.H5Screate_simple(dims.length, dims,null);
- dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
- int attribute_id = H5.H5Acreate(hdf5GroupID, attributeName, h5attrcs1, dataspace_id, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);
- H5.H5Awrite(attribute_id, h5attrcs1, attr.getBytes());
- H5.H5Sclose(dataspace_id);
- H5.H5Aclose(attribute_id);
- H5.H5Tclose(h5attrcs1);
- }
-
- /**
- * [BROKEN, made private until var strings inplemented] Inserts a HDF5 attribute into a HDF5 group (including datasets) with a list of fixed length strings of data
- *
- * @param hdf5GroupID the id of the group to place the attribute in
- * @param attributeName the name of the attribute to insert
- * @param data the attribute data / value list to apply
- * @throws HDF5Exception if HDF5 encountered a problem.
- */
- private void insertVarStringAttributes(int hdf5GroupID, String attributeName, List data) throws HDF5Exception {
- String flatData = "";
- for (String datum : data){
- flatData += (datum + '\u0000');
- }
-
- int typeId = this.createVLStringDatatype();
- long dims[] = new long[]{data.size()};
- int dataspaceId = H5.H5Screate_simple(1, dims, null);
- //dataspaceId = H5.H5Screate(typeId);
- int attributeId = H5.H5Acreate(hdf5GroupID, attributeName, typeId, dataspaceId, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);
- H5.H5Awrite(attributeId, typeId, flatData.getBytes());
- H5.H5Sclose(dataspaceId);
- H5.H5Aclose(attributeId);
- H5.H5Tclose(typeId);
- }
-
- /**
- * Inserts a HDF5 attribute into a HDF5 group (including datasets) with a fixed length string of data
- *
- * @param hdf5GroupID the id of the group to place the attribute in
- * @param attributeName the name of the attribute to insert
- * @param data the attribute data / value list to apply
- * @throws HDF5Exception if HDF5 encountered a problem
- */
- public void insertFixedStringAttributes(int hdf5GroupID, String attributeName, List data) throws HDF5Exception {
- String[] attr = data.toArray(new String[0]);
- long[] dims = new long[] {attr.length}; // Always an array of length == 1
- StringBuffer sb = new StringBuffer();
- int MAXSTRSIZE= -1;
-
- // Get the max length of all the data strings
- for(int i = 0; i < attr.length; i++) {
- int len = attr[i] == null ? -1 : attr[i].length();
-
- if (len == 0) len = 1; // Need to pad with null char for empty str; passing a 0 causes null exception
- if (attr[i] == null) attr[i] = ""; // Padding comes later, don't worry.
-
- MAXSTRSIZE = Math.max(MAXSTRSIZE, len);
- }
-
- // Append data to single string buffer, padding with null characters to create uniformity.
- for(int i = 0; i < attr.length; i++) {
- sb.append(attr[i]);
- for(int j = 0; j < (MAXSTRSIZE - attr[i].length()); j++) {
- sb.append('\u0000'); //null terminated string for hdf5 native code
- }
- }
-
- //https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/vlstra.c
- int h5attrcs1 = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
- H5.H5Tset_size (h5attrcs1, MAXSTRSIZE/*HDF5Constants.H5T_VARIABLE*/);
- int dataspace_id = -1;
- dataspace_id = H5.H5Screate_simple(dims.length, dims,null);
- int attribute_id = H5.H5Acreate(hdf5GroupID, attributeName, h5attrcs1, dataspace_id, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);
- H5.H5Awrite(attribute_id, h5attrcs1, sb.toString().getBytes());
- H5.H5Sclose(dataspace_id);
- H5.H5Aclose(attribute_id);
- H5.H5Tclose(h5attrcs1);
- }
-
- /**
- * Inserts a HDF5 attribute into a HDF5 group (including datasets) with a fixed length string of data
- *
- * @param hdf5GroupID the id of the group to place the attribute in
- * @param dataspaceName the name of the attribute to insert
- * @param data the attribute data / value list to apply
- * @throws HDF5Exception if HDF5 encountered a problem
- */
- public void insertNumericAttributes(int hdf5GroupID,String dataspaceName,double[] data) throws HDF5Exception {
- long[] dims = new long[] {data.length};
- //https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/vlstra.c
- int dataspace_id = H5.H5Screate_simple(dims.length, dims,null);
- int attribute_id = H5.H5Acreate(hdf5GroupID, dataspaceName, HDF5Constants.H5T_NATIVE_DOUBLE, dataspace_id, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);
- H5.H5Awrite (attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, this.byteArray(data));
- H5.H5Sclose(dataspace_id);
- H5.H5Aclose(attribute_id);
- }
-
- public Integer insertSedmlData(String canonicalGroupPath, Hdf5PreparedData preparedData) throws HDF5Exception {
- if (this.pathToIdMap.containsKey(canonicalGroupPath)){
- return this.insertSedmlData(this.pathToIdMap.get(canonicalGroupPath), preparedData);
- }
- if (this.allowExceptions) throw new HDF5Exception("Group path provided has not been created.");
- return H5I_INVALID_HID;
- }
-
- public int insertSedmlData(int groupId, Hdf5PreparedData preparedData) throws HDF5Exception {
- //String datasetPath = Paths.get(sedmlUri, datasetWrapper.datasetMetadata.sedmlId).toString();
- int hdf5DataspaceID = H5.H5Screate_simple(preparedData.dataDimensions.length, preparedData.dataDimensions, null);
- int hdf5DatasetID = H5.H5Dcreate(groupId, preparedData.sedmlId, HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceID, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
- H5.H5Dwrite_double(hdf5DatasetID, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, (double[])preparedData.flattenedDataBuffer);
-
- this.datasetToDataspaceMap.put(hdf5DatasetID, hdf5DataspaceID); // Put the ids in here to ensure we close everything right.
- return hdf5DatasetID;
- }
-
- /**
- *
- *
- * @param datasetId
- * @return
- * @throws HDF5Exception
- */
- public int closeDataset(int datasetId) throws HDF5Exception {
- if (!this.datasetToDataspaceMap.containsKey(datasetId)){
- if (this.allowExceptions) throw new HDF5Exception("Dataset provided has not been created.");
- return H5I_INVALID_HID;
- }
-
- H5.H5Sclose(this.datasetToDataspaceMap.get(datasetId));
- this.datasetToDataspaceMap.remove(datasetId);
- return H5.H5Dclose(datasetId);
- }
-
- public int close() throws BiosimulationsHdfWriterException {
- if (!this.isOpen) return 0;
- //this.fileId = HDF5Constants.H5I_INVALID_HID;
- this.isOpen = false;
-
- try {
- // Don't forget to close datasets (and their dataspaces)
- for (int datasetId : this.datasetToDataspaceMap.keySet()){
- this.closeDataset(datasetId);
- }
-
- // Don't forget to close all groups
- for (int groupId : this.idToPathMap.keySet()){
- H5.H5Gclose(groupId);
- }
- this.idToPathMap.clear();
- this.pathToIdMap.clear();
-
- return this.fileId < 0 ? this.fileId : (this.fileId = H5.H5Fclose(this.fileId));
- } catch (HDF5Exception e) {
- throw new BiosimulationsHdfWriterException(e.getMessage(),e);
- }
- }
-
-
-
- //----------------------------------------------------------------------------------------------------------------
-
-
-
- private int createVLStringDatatype() {
- int datatypeId = H5I_INVALID_HID;
- try {
- datatypeId = H5.H5Tcopy(H5T_C_S1);
- int status = H5.H5Tset_size(datatypeId, H5T_VARIABLE);
- H5.H5Tset_strpad(datatypeId, H5T_STR_NULLTERM);
- H5.H5Tset_cset(datatypeId, H5T_CSET_UTF8);
-
- if (status < 0){
- throw new HDF5LibraryException("Size unable to be set");
- }
- } catch (HDF5LibraryException e){
- String message = "Unable to generate important datatype: Var string.";
- logger.error(message, e);
- throw new RuntimeException(message, e);
- }
-
- return datatypeId;
- // Remember to close the dataspace when you're done
- }
-
- private byte[] byteArray(double[] doubleArray) {
- int times = Double.SIZE / Byte.SIZE;
- byte[] bytes = new byte[doubleArray.length * times];
- for (int i = 0; i < doubleArray.length; i++) {
- getByteBuffer(bytes, i, times).putDouble(doubleArray[i]);
- }
- return bytes;
- }
-
- private static ByteBuffer getByteBuffer(byte[] bytes, int index, int times) {
- return ByteBuffer.wrap(bytes, index * times, times).order(ByteOrder.LITTLE_ENDIAN);
- }
-
-}
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java
index 5f5eacee4c..a10c9cc8e7 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java
@@ -1,17 +1,16 @@
package org.vcell.cli.run.hdf5;
-import cbit.vcell.resource.NativeLib;
-import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
-import org.apache.logging.log4j.Level;
+import cbit.vcell.export.server.JhdfUtils;
+import io.jhdf.HdfFile;
+import io.jhdf.WritableHdfFile;
+import io.jhdf.api.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jlibsedml.Report;
import org.jlibsedml.SedML;
-import org.vcell.cli.run.hdf5.Hdf5DataPreparer.Hdf5PreparedData;
import java.io.File;
import java.io.IOException;
-import java.io.Serial;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
@@ -21,14 +20,6 @@
*/
public class BiosimulationsHdf5Writer {
- public static class BiosimulationsHdfWriterException extends Exception {
- @Serial
- private static final long serialVersionUID = 1L;
- public BiosimulationsHdfWriterException(String message, Exception e) {
- super(message, e);
- }
- }
-
private final static Logger logger = LogManager.getLogger(BiosimulationsHdf5Writer.class);
private BiosimulationsHdf5Writer(){} // Static class = no instances allowed
@@ -38,20 +29,16 @@ private BiosimulationsHdf5Writer(){} // Static class = no instances allowed
*
* @param hdf5ExecutionResults the wrapper of hdf5 relevant data
* @param outDirForCurrentSedml the directory to place the report file into, NOT the report file itself.
- * @throws BiosimulationsHdfWriterException if there is an expection thrown from hdf5 while using the library.
- * @throws IOException if the computer encounteres an unexepcted system IO problem
+ * @throws BiosimulationsHdfWriterException if there is an exception thrown from hdf5 while using the library.
+ * @throws IOException if the computer encounters an unexpected system IO problem
*/
public static void writeHdf5(HDF5ExecutionResults hdf5ExecutionResults, File outDirForCurrentSedml) throws BiosimulationsHdfWriterException, IOException {
boolean didFail = false;
- BiosimulationsHdf5File masterHdf5;
-
- // Boot Hdf5 Library
- NativeLib.HDF5.load();
// Create and open the Hdf5 file
logger.info("Creating hdf5 file `reports.h5` in" + outDirForCurrentSedml.getAbsolutePath());
- masterHdf5 = new BiosimulationsHdf5File(outDirForCurrentSedml);
- masterHdf5.open();
+ File tempFile = new File(outDirForCurrentSedml, "reports.h5");
+ WritableHdfFile hdf5File = HdfFile.write(tempFile.toPath());
try {
// Sanity Check
@@ -62,80 +49,76 @@ public static void writeHdf5(HDF5ExecutionResults hdf5ExecutionResults, File out
if (uriSet.size() != resultsSet.size()) throw new RuntimeException("Sets are mismatched");
for (Report report : resultsSet){
// Process Parent Groups
- String path = "";
- for (String group : hdf5DataWrapper.reportToUriMap.get(report).split("/")){
- path += ("/" + group); // Start from first group, then travel down the path.
- if (masterHdf5.containsGroup(path)) continue;
- int groupId = masterHdf5.addGroup(path);
- String relativePath = path.substring(1);
- masterHdf5.insertFixedStringAttribute(groupId, "combineArchiveLocation", relativePath);
- masterHdf5.insertFixedStringAttribute(groupId, "uri", relativePath);
- // We leave them open because there may be other datasets that share (some of) the same parent groups
+ String groupPath = hdf5DataWrapper.reportToUriMap.get(report);
+ Node child = hdf5File.getChild(groupPath);
+ WritableGroup group = null;
+ if (child instanceof WritableGroup) {
+ group = (WritableGroup) child;
+ } else {
+ group = hdf5File.putGroup(groupPath);
+ JhdfUtils.putAttribute(group,"combineArchiveLocation", groupPath);
+ JhdfUtils.putAttribute(group,"uri", groupPath);
}
// Process the Dataset
for (Hdf5SedmlResults data : hdf5DataWrapper.reportToResultsMap.get(report)){
- Hdf5PreparedData preparedData;
+ final Hdf5PreparedData preparedData;
if (data.dataSource instanceof Hdf5SedmlResultsNonspatial)
preparedData = Hdf5DataPreparer.prepareNonspatialData(data, report, hdf5DataWrapper.trackSubSetsInReports);
else if (data.dataSource instanceof Hdf5SedmlResultsSpatial)
preparedData = Hdf5DataPreparer.prepareSpatialData(data, report, hdf5DataWrapper.trackSubSetsInReports);
else continue;
- int currentDatasetId = masterHdf5.insertSedmlData(path, preparedData);
+ // multiDimDataArray is a double[], double[][], double[][][], ... depending on the data dimensions
+ final String datasetName = preparedData.sedmlId;
+ final Object multiDimDataArray = JhdfUtils.createMultidimensionalArray(preparedData.dataDimensions, preparedData.flattenedDataBuffer);
+ WritiableDataset dataset = group.putDataset(datasetName, multiDimDataArray);
if (data.dataSource instanceof Hdf5SedmlResultsSpatial){
- masterHdf5.insertNumericAttributes(currentDatasetId, "times", Hdf5DataPreparer.getSpatialHdf5Attribute_Times(report, data));
- }
- masterHdf5.insertFixedStringAttribute(currentDatasetId, "_type", data.datasetMetadata._type);
- masterHdf5.insertFixedStringAttributes(currentDatasetId, "sedmlDataSetDataTypes", data.datasetMetadata.sedmlDataSetDataTypes);
- masterHdf5.insertFixedStringAttributes(currentDatasetId, "sedmlDataSetIds", data.datasetMetadata.sedmlDataSetIds);
-
- if (data.datasetMetadata.sedmlDataSetNames.contains(null)) {
- for (int i = 0; i < data.datasetMetadata.sedmlDataSetNames.size(); i++){
- String oldValue = data.datasetMetadata.sedmlDataSetNames.get(i);
- String newValue = oldValue == null ? "" : oldValue;
- data.datasetMetadata.sedmlDataSetNames.set(i, newValue);
- }
+ JhdfUtils.putAttribute(dataset,"times", Hdf5DataPreparer.getSpatialHdf5Attribute_Times(report, data));
}
- masterHdf5.insertFixedStringAttributes(currentDatasetId, "sedmlDataSetNames", data.datasetMetadata.sedmlDataSetNames);
- masterHdf5.insertFixedStringAttributes(currentDatasetId, "sedmlDataSetLabels", data.datasetMetadata.sedmlDataSetLabels);
- masterHdf5.insertFixedStringAttributes(currentDatasetId, "sedmlDataSetShapes", data.datasetMetadata.sedmlDataSetShapes);
- if (null != data.dataSource.scanParameterValues) {
+ JhdfUtils.putAttribute(dataset, "_type", data.datasetMetadata._type);
+ JhdfUtils.putAttribute(dataset, "sedmlDataSetDataTypes", data.datasetMetadata.sedmlDataSetDataTypes);
+ JhdfUtils.putAttribute(dataset, "sedmlDataSetIds", data.datasetMetadata.sedmlDataSetIds);
+ JhdfUtils.putAttribute(dataset, "sedmlDataSetNames", data.datasetMetadata.sedmlDataSetNames);
+ JhdfUtils.putAttribute(dataset, "sedmlDataSetLabels", data.datasetMetadata.sedmlDataSetLabels);
+ JhdfUtils.putAttribute(dataset, "sedmlDataSetShapes", data.datasetMetadata.sedmlDataSetShapes);
+ if (data.dataSource.scanParameterValues != null && data.dataSource.scanParameterValues.length > 0) {
List scanValues = Arrays.stream(data.dataSource.scanParameterValues).map(Arrays::toString).toList();
- masterHdf5.insertFixedStringAttributes(currentDatasetId, "sedmlRepeatedTaskValues", scanValues);
+ JhdfUtils.putAttribute(dataset, "sedmlRepeatedTaskValues", scanValues);
}
- if (null != data.dataSource.scanParameterNames) masterHdf5.insertFixedStringAttributes(currentDatasetId, "sedmlRepeatedTaskParameterNames", Arrays.asList(data.dataSource.scanParameterNames));
- masterHdf5.insertFixedStringAttribute(currentDatasetId, "sedmlId", data.datasetMetadata.sedmlId);
- masterHdf5.insertFixedStringAttribute(currentDatasetId, "sedmlName", data.datasetMetadata.sedmlName);
- masterHdf5.insertFixedStringAttribute(currentDatasetId, "uri", path.substring(1) + "/" + data.datasetMetadata.sedmlId);
-
- masterHdf5.closeDataset(currentDatasetId);
+ if (data.dataSource.scanParameterNames != null && data.dataSource.scanParameterNames.length > 0) {
+ JhdfUtils.putAttribute(dataset, "sedmlRepeatedTaskParameterNames", Arrays.asList(data.dataSource.scanParameterNames));
+ }
+ JhdfUtils.putAttribute(dataset, "sedmlId", data.datasetMetadata.sedmlId);
+ if (data.datasetMetadata.sedmlName != null) {
+ JhdfUtils.putAttribute(dataset, "sedmlName", data.datasetMetadata.sedmlName);
+ }else{
+ JhdfUtils.putAttribute(dataset, "sedmlName", data.datasetMetadata.sedmlId);
+ }
+ JhdfUtils.putAttribute(dataset, "uri", groupPath + "/" + data.datasetMetadata.sedmlId);
}
}
}
- } catch (HDF5Exception e) { // Catch runtime exceptions
+ } catch (RuntimeException e) { // Catch runtime exceptions
didFail = true;
String message = "Error encountered while writing to BioSim-style HDF5.";
logger.error(message, e);
throw new BiosimulationsHdfWriterException(message, e);
} finally {
try {
- final Level errorLevel = didFail ? Level.ERROR : Level.INFO;
- final String message = didFail ?
- "HDF5 successfully closed, but there were errors preventing proper execution." :
- "HDF5 file successfully written to.";
- // Close up the file; lets deliver what we can write and flush out.
- masterHdf5.close();
- logger.log(errorLevel, message);
- } catch (BiosimulationsHdfWriterException e){
- masterHdf5.printErrorStack();
+ hdf5File.close();
+ if (didFail) {
+ logger.error("HDF5 successfully closed, but there were errors preventing proper execution.\"");
+ } else {
+ logger.info("HDF5 file successfully written to.");
+ }
+ } catch (Exception e){
String message = "HDF5 Library Exception encountered while writing out to HDF5 file; Check std::err for stack";
logger.error(message);
if (!didFail) throw new BiosimulationsHdfWriterException(message, e);
- } catch (Exception e) {
- e.printStackTrace();
}
}
}
+
}
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdfWriterException.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdfWriterException.java
new file mode 100644
index 0000000000..4f74377baa
--- /dev/null
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdfWriterException.java
@@ -0,0 +1,12 @@
+package org.vcell.cli.run.hdf5;
+
+import java.io.Serial;
+
+public class BiosimulationsHdfWriterException extends Exception {
+ @Serial
+ private static final long serialVersionUID = 1L;
+
+ public BiosimulationsHdfWriterException(String message, Exception e) {
+ super(message, e);
+ }
+}
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java
index 8c4d363a97..ceff772072 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java
@@ -13,12 +13,6 @@
public class Hdf5DataPreparer {
private final static Logger logger = LogManager.getLogger(Hdf5DataPreparer.class);
- public static class Hdf5PreparedData{
- public String sedmlId;
- public long[] dataDimensions;
- public double[] flattenedDataBuffer;
- }
-
/**
* Spatial Data has a special attribute called "times". This function extracts that value
*
@@ -142,4 +136,5 @@ public static Hdf5PreparedData prepareNonspatialData(Hdf5SedmlResults datasetWra
preparedData.flattenedDataBuffer = bigDataBuffer;
return preparedData;
}
+
}
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5PreparedData.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5PreparedData.java
new file mode 100644
index 0000000000..32f2603d4e
--- /dev/null
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5PreparedData.java
@@ -0,0 +1,7 @@
+package org.vcell.cli.run.hdf5;
+
+public class Hdf5PreparedData {
+ public String sedmlId;
+ public long[] dataDimensions;
+ public double[] flattenedDataBuffer;
+}
diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/BSTSBasedOmexExecTest.java b/vcell-cli/src/test/java/org/vcell/cli/run/BSTSBasedOmexExecTest.java
index afbd756079..a02819c839 100644
--- a/vcell-cli/src/test/java/org/vcell/cli/run/BSTSBasedOmexExecTest.java
+++ b/vcell-cli/src/test/java/org/vcell/cli/run/BSTSBasedOmexExecTest.java
@@ -32,7 +32,6 @@ public class BSTSBasedOmexExecTest {
@BeforeAll
public static void setup() throws PythonStreamException, IOException {
PropertyLoader.setProperty(PropertyLoader.installationRoot, new File("..").getAbsolutePath());
- NativeLib.HDF5.load();
VCellUtilityHub.startup(VCellUtilityHub.MODE.CLI);
PropertyLoader.setProperty(PropertyLoader.cliWorkingDir, new File("../vcell-cli-utils").getAbsolutePath());
diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/BiosimulationsExecTest.java b/vcell-cli/src/test/java/org/vcell/cli/run/BiosimulationsExecTest.java
index 1013352efb..6c4d694615 100644
--- a/vcell-cli/src/test/java/org/vcell/cli/run/BiosimulationsExecTest.java
+++ b/vcell-cli/src/test/java/org/vcell/cli/run/BiosimulationsExecTest.java
@@ -35,7 +35,6 @@ public class BiosimulationsExecTest {
@BeforeAll
public static void setup() throws PythonStreamException, IOException {
PropertyLoader.setProperty(PropertyLoader.installationRoot, new File("..").getAbsolutePath());
- NativeLib.HDF5.load();
VCellUtilityHub.startup(VCellUtilityHub.MODE.CLI);
PropertyLoader.setProperty(PropertyLoader.cliWorkingDir, new File("../vcell-cli-utils").getAbsolutePath());
diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/BiosimulationsFiles.java b/vcell-cli/src/test/java/org/vcell/cli/run/BiosimulationsFiles.java
index 9594469851..cfddd80d85 100644
--- a/vcell-cli/src/test/java/org/vcell/cli/run/BiosimulationsFiles.java
+++ b/vcell-cli/src/test/java/org/vcell/cli/run/BiosimulationsFiles.java
@@ -15,7 +15,8 @@ public class BiosimulationsFiles {
private final static String[] allProjectIDs = new String[]{
"BIOMD0000000003_tellurium_A_minimal_cascade_model_for_th",
"BIOMD0000000006_tellurium_Modeling_the_cell_division_cyc",
- "BIOMD0000000036_tellurium_A_simple_model_of_circadian_rh"
+ "BIOMD0000000036_tellurium_A_simple_model_of_circadian_rh",
+ "BIOMD0000000300"
};
public static String[] getProjectIDs() {
diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/ExecuteImplTest.java b/vcell-cli/src/test/java/org/vcell/cli/run/ExecuteImplTest.java
index a414dc2da6..c7bb4b4a65 100644
--- a/vcell-cli/src/test/java/org/vcell/cli/run/ExecuteImplTest.java
+++ b/vcell-cli/src/test/java/org/vcell/cli/run/ExecuteImplTest.java
@@ -21,7 +21,6 @@ public class ExecuteImplTest {
@Test
public void test_singleExecOmex() throws Exception {
PropertyLoader.setProperty(PropertyLoader.installationRoot, new File("..").getAbsolutePath());
- NativeLib.HDF5.load();
VCellUtilityHub.startup(VCellUtilityHub.MODE.CLI);
PropertyLoader.setProperty(PropertyLoader.cliWorkingDir, new File("../vcell-cli-utils").getAbsolutePath());
diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/SpatialExecTest.java b/vcell-cli/src/test/java/org/vcell/cli/run/SpatialExecTest.java
index aaa316b0de..b6f913c95c 100644
--- a/vcell-cli/src/test/java/org/vcell/cli/run/SpatialExecTest.java
+++ b/vcell-cli/src/test/java/org/vcell/cli/run/SpatialExecTest.java
@@ -5,6 +5,7 @@
import cbit.vcell.resource.PropertyLoader;
import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.params.ParameterizedTest;
@@ -31,7 +32,6 @@ public class SpatialExecTest {
@BeforeAll
public static void setup() throws PythonStreamException, IOException {
PropertyLoader.setProperty(PropertyLoader.installationRoot, new File("..").getAbsolutePath());
- NativeLib.HDF5.load();
VCellUtilityHub.startup(VCellUtilityHub.MODE.CLI);
PropertyLoader.setProperty(PropertyLoader.cliWorkingDir, new File("../vcell-cli-utils").getAbsolutePath());
@@ -103,6 +103,13 @@ public static Collection testCases() {
@ParameterizedTest
@MethodSource("testCases")
public void testSpatialOmex(String testCaseFilename) throws Exception {
+ String osName = System.getProperty("os.name").toLowerCase();
+ String osArch = System.getProperty("os.arch").toLowerCase();
+
+ // Skip test if running on macOS ARM64
+ Assumptions.assumeFalse(osName.contains("mac") && osArch.equals("aarch64"),
+ "Test skipped on macOS ARM64");
+
SpatialExecTest.FAULT knownFault = knownFaults().get(testCaseFilename);
try {
System.out.println("running test " + testCaseFilename);
diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java b/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java
index 942f76aa1a..72cee36ada 100644
--- a/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java
+++ b/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java
@@ -1,6 +1,5 @@
package org.vcell.cli.run.hdf5;
-import cbit.vcell.resource.NativeLib;
import cbit.vcell.resource.PropertyLoader;
import com.google.common.io.Files;
import org.jlibsedml.DataSet;
@@ -15,8 +14,6 @@
import java.util.Arrays;
import java.util.List;
-import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException;
-
@Tag("Fast")
public class BiosimulationsHdf5WriterTest {
@@ -100,7 +97,6 @@ public static HDF5ExecutionResults createExampleData() {
public void test() throws BiosimulationsHdfWriterException, IOException {
PropertyLoader.setProperty(PropertyLoader.installationRoot, new File("..").getAbsolutePath());
VCellUtilityHub.startup(VCellUtilityHub.MODE.CLI);
- NativeLib.HDF5.load();
HDF5ExecutionResults exampleHdf5FileWrapper = BiosimulationsHdf5WriterTest.createExampleData();
File dir = Files.createTempDir();
BiosimulationsHdf5Writer.writeHdf5(exampleHdf5FileWrapper, dir);
diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Jhdf5UtilsTest.java b/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Jhdf5UtilsTest.java
new file mode 100644
index 0000000000..357ef04e89
--- /dev/null
+++ b/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Jhdf5UtilsTest.java
@@ -0,0 +1,114 @@
+package org.vcell.cli.run.hdf5;
+
+import cbit.vcell.export.server.JhdfUtils;
+import io.jhdf.HdfFile;
+import io.jhdf.WritableHdfFile;
+import io.jhdf.api.Dataset;
+import io.jhdf.api.WritiableDataset;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.List;
+
+@Tag("Fast")
+public class Jhdf5UtilsTest {
+
+ @Test
+ public void testCreateMultidimensionalArray_1D() {
+ long[] dims = new long[]{3};
+ double[] flattenedData = new double[]{1, 2, 3};
+ Object multidimensionalArray = JhdfUtils.createMultidimensionalArray(dims, flattenedData);
+ assert multidimensionalArray instanceof double[];
+ double[] data = (double[]) multidimensionalArray;
+ assert data.length == 3;
+ assert data[0] == 1;
+ assert data[1] == 2;
+ assert data[2] == 3;
+ }
+
+ @Test
+ public void testCreateMultidimensionalArray_2D() {
+ long[] dims = new long[]{2, 3};
+ double[] flattenedData = new double[]{1, 2, 3, 4, 5, 6};
+ Object multidimensionalArray = JhdfUtils.createMultidimensionalArray(dims, flattenedData);
+ assert multidimensionalArray instanceof double[][];
+ double[][] data = (double[][]) multidimensionalArray;
+ assert data.length == 2;
+ assert data[0].length == 3;
+ assert data[0][0] == 1;
+ assert data[0][1] == 2;
+ assert data[0][2] == 3;
+ assert data[1][0] == 4;
+ assert data[1][1] == 5;
+ assert data[1][2] == 6;
+ }
+
+ @Test
+ public void testCreateMultidimensionalArray_3D() {
+ long[] dims = new long[]{2, 3, 4};
+ double[] flattenedData = new double[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24};
+ Object multidimensionalArray = JhdfUtils.createMultidimensionalArray(dims, flattenedData);
+ assert multidimensionalArray instanceof double[][][];
+ double[][][] data = (double[][][]) multidimensionalArray;
+ assert data.length == 2;
+ assert data[0].length == 3;
+ assert data[0][0].length == 4;
+ assert data[0][0][0] == 1;
+ assert data[0][0][1] == 2;
+ assert data[0][0][2] == 3;
+ assert data[0][0][3] == 4;
+ assert data[0][1][0] == 5;
+ assert data[0][1][1] == 6;
+ assert data[0][1][2] == 7;
+ assert data[0][1][3] == 8;
+ assert data[0][2][0] == 9;
+ assert data[0][2][1] == 10;
+ assert data[0][2][2] == 11;
+ assert data[0][2][3] == 12;
+ assert data[1][0][0] == 13;
+ assert data[1][0][1] == 14;
+ assert data[1][0][2] == 15;
+ assert data[1][0][3] == 16;
+ assert data[1][1][0] == 17;
+ assert data[1][1][1] == 18;
+ assert data[1][1][2] == 19;
+ assert data[1][1][3] == 20;
+ assert data[1][2][0] == 21;
+ assert data[1][2][1] == 22;
+ assert data[1][2][2] == 23;
+ assert data[1][2][3] == 24;
+ }
+
+ @Test
+ public void testStringAttributes() throws IOException {
+ Path tempFile = Files.createTempFile(this.getClass().getSimpleName(), ".hdf5");
+ WritableHdfFile writableHdfFile = HdfFile.write(tempFile);
+
+ // Write a dataset with string attributes
+ WritiableDataset writiableDataset = writableHdfFile.putDataset("dataset", new double[] {0.1, 0.2, 0.3});
+ JhdfUtils.putAttribute(writiableDataset, "labels", Arrays.asList("vv", "xx", "abcdef"));
+ JhdfUtils.putAttribute(writiableDataset, "units", Arrays.asList("", "1", "mm2"));
+ writableHdfFile.close();
+
+ // Now read it back
+ try (HdfFile hdfFile = new HdfFile(tempFile)) {
+ Dataset dataset = hdfFile.getDatasetByPath("dataset");
+
+ // Expected :["vv", "xx", "abcdef"]
+ // Actual :["vv", "cdedf", ""]
+ Assertions.assertEquals(Arrays.asList("vv", "xx", "abcdef"), List.of((String[])dataset.getAttribute("labels").getData()));
+
+ // Expected :["", "1", "mm2"]
+ // Actual :["", "m2", ""]
+ Assertions.assertEquals(Arrays.asList("", "1", "mm2"), List.of((String[])dataset.getAttribute("units").getData()));
+ } finally {
+ tempFile.toFile().delete();
+ }
+
+ }
+}
diff --git a/vcell-cli/src/test/resources/BiosimulationsOmexWithResults/BIOMD0000000300.h5 b/vcell-cli/src/test/resources/BiosimulationsOmexWithResults/BIOMD0000000300.h5
new file mode 100644
index 0000000000..62d2162c22
Binary files /dev/null and b/vcell-cli/src/test/resources/BiosimulationsOmexWithResults/BIOMD0000000300.h5 differ
diff --git a/vcell-cli/src/test/resources/BiosimulationsOmexWithResults/BIOMD0000000300.spec.omex b/vcell-cli/src/test/resources/BiosimulationsOmexWithResults/BIOMD0000000300.spec.omex
new file mode 100644
index 0000000000..d2be7a3f11
Binary files /dev/null and b/vcell-cli/src/test/resources/BiosimulationsOmexWithResults/BIOMD0000000300.spec.omex differ
diff --git a/vcell-core/pom.xml b/vcell-core/pom.xml
index 3f9c1847ff..206d26d404 100644
--- a/vcell-core/pom.xml
+++ b/vcell-core/pom.xml
@@ -152,21 +152,34 @@
org.openrdf.sesame
sesame-rio-rdfxml
${sesame-rio-rdfxml.version}
+
+
+ commons-io
+ commons-io
+
+
org.openrdf.sesame
sesame-rio-n3
${sesame-rio-n3.version}
+
+
+ commons-io
+ commons-io
+
+
+
org.jgrapht
jgrapht-core
- 1.3.0
+ ${jgrapht.version}
org.apache.thrift
libthrift
- 0.18.0
+ ${thrift.version}
gov.nist.math
@@ -263,13 +276,13 @@
org.apache.commons
commons-lang3
- 3.13.0
+ ${commons-lang3.version}
org.openmicroscopy
ome-common
- 6.0.22
+ ${ome-common.version}
com.fasterxml.jackson.core
@@ -379,7 +392,7 @@
io.jhdf
jhdf
- ${jhdf5.version}
+ ${jhdf.version}
org.slf4j
@@ -396,6 +409,11 @@
jhdf5_2.10
${jhdf5-2-10.version}
+
+ commons-io
+ commons-io
+ ${commons-io.version}
+
org.apache.commons
commons-math3
diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/JhdfUtils.java b/vcell-core/src/main/java/cbit/vcell/export/server/JhdfUtils.java
new file mode 100644
index 0000000000..8d4e80d0e6
--- /dev/null
+++ b/vcell-core/src/main/java/cbit/vcell/export/server/JhdfUtils.java
@@ -0,0 +1,172 @@
+package cbit.vcell.export.server;
+
+import io.jhdf.api.Attribute;
+import io.jhdf.api.WritableNode;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class JhdfUtils {
+ public static Object createMultidimensionalArray(long[] dataDimensions, double[] flattenedDataBuffer) {
+
+ int dim0 = (int) dataDimensions[0];
+ Integer dim1 = (dataDimensions.length > 1) ? (int) dataDimensions[1] : null;
+ Integer dim2 = (dataDimensions.length > 2) ? (int) dataDimensions[2] : null;
+ Integer dim3 = (dataDimensions.length > 3) ? (int) dataDimensions[3] : null;
+ Integer dim4 = (dataDimensions.length > 4) ? (int) dataDimensions[4] : null;
+ Integer dim5 = (dataDimensions.length > 5) ? (int) dataDimensions[5] : null;
+ Integer dim6 = (dataDimensions.length > 6) ? (int) dataDimensions[6] : null;
+ Integer dim7 = (dataDimensions.length > 7) ? (int) dataDimensions[7] : null;
+
+ // load data from flattenedDataBuffer into correctly dimensioned double array
+ switch (dataDimensions.length) {
+ case 1: {
+ double[] data = new double[dim0];
+ System.arraycopy(flattenedDataBuffer, 0, data, 0, flattenedDataBuffer.length);
+ return data;
+ }
+ case 2: {
+ double[][] data = new double[dim0][dim1];
+ int index2 = 0;
+ for (int i = 0; i < dim0; i++) {
+ for (int j = 0; j < dim1; j++) {
+ data[i][j] = flattenedDataBuffer[index2];
+ index2++;
+ }
+ }
+ return data;
+ }
+ case 3: {
+ double[][][] data = new double[dim0][dim1][dim2];
+ int index3 = 0;
+ for (int i = 0; i < dim0; i++) {
+ for (int j = 0; j < dim1; j++) {
+ for (int k = 0; k < dim2; k++) {
+ data[i][j][k] = flattenedDataBuffer[index3];
+ index3++;
+ }
+ }
+ }
+ return data;
+ }
+ case 4: {
+ double[][][][] data = new double[dim0][dim1][dim2][dim3];
+ int index4 = 0;
+ for (int i = 0; i < dim0; i++) {
+ for (int j = 0; j < dim1; j++) {
+ for (int k = 0; k < dim2; k++) {
+ for (int l = 0; l < dim3; l++) {
+ data[i][j][k][l] = flattenedDataBuffer[index4];
+ index4++;
+ }
+ }
+ }
+ }
+ return data;
+ }
+ case 5: {
+ double[][][][][] data = new double[dim0][dim1][dim2][dim3][dim4];
+ int index5 = 0;
+ for (int i = 0; i < dim0; i++) {
+ for (int j = 0; j < dim1; j++) {
+ for (int k = 0; k < dim2; k++) {
+ for (int l = 0; l < dim3; l++) {
+ for (int m = 0; m < dim4; m++) {
+ data[i][j][k][l][m] = flattenedDataBuffer[index5];
+ index5++;
+ }
+ }
+ }
+ }
+ }
+ return data;
+ }
+ case 6: {
+ double[][][][][][] data = new double[dim0][dim1][dim2][dim3][dim4][dim5];
+ int index6 = 0;
+ for (int i = 0; i < data.length; i++) {
+ for (int j = 0; j < dim1; j++) {
+ for (int k = 0; k < dim2; k++) {
+ for (int l = 0; l < dim3; l++) {
+ for (int m = 0; m < dim4; m++) {
+ for (int n = 0; n < dim5; n++) {
+ data[i][j][k][l][m][n] = flattenedDataBuffer[index6];
+ index6++;
+ }
+ }
+ }
+ }
+ }
+ }
+ return data;
+ }
+ case 7: {
+ double[][][][][][][] data = new double[dim0][dim1][dim2][dim3][dim4][dim5][dim6];
+ int index7 = 0;
+ for (int i = 0; i < data.length; i++) {
+ for (int j = 0; j < dim1; j++) {
+ for (int k = 0; k < dim2; k++) {
+ for (int l = 0; l < dim3; l++) {
+ for (int m = 0; m < dim4; m++) {
+ for (int n = 0; n < dim5; n++) {
+ for (int o = 0; o < dim6; o++) {
+ data[i][j][k][l][m][n][o] = flattenedDataBuffer[index7];
+ index7++;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return data;
+ }
+ case 8: {
+ double[][][][][][][][] data = new double[dim0][dim1][dim2][dim3][dim4][dim5][dim6][dim7];
+ int index8 = 0;
+ for (int i = 0; i < data.length; i++) {
+ for (int j = 0; j < dim1; j++) {
+ for (int k = 0; k < dim2; k++) {
+ for (int l = 0; l < dim3; l++) {
+ for (int m = 0; m < dim4; m++) {
+ for (int n = 0; n < dim5; n++) {
+ for (int o = 0; o < dim6; o++) {
+ for (int p = 0; p < dim7; p++) {
+ data[i][j][k][l][m][n][o][p] = flattenedDataBuffer[index8];
+ index8++;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return data;
+ }
+ default:
+ throw new IllegalArgumentException("Cannot create a dataset with more than 9 dimensions.");
+ }
+ }
+
+ public static void putAttribute(WritableNode node, String name, List values) {
+ if (values.contains(null)) {
+ //
+ // replace all null entries with empty strings
+ // e.g. for a list of ["a", null, "abc"], replace null with ["a", "" ,"abc"]
+ //
+ String[] paddedValues = values.stream().map(s -> s == null ? "" : s).toArray(String[]::new);
+ node.putAttribute(name, paddedValues);
+ } else {
+ node.putAttribute(name, values.toArray(new String[0]));
+ }
+ }
+
+ public static void putAttribute(WritableNode node, String name, String value) {
+ node.putAttribute(name, value);
+ }
+
+ public static void putAttribute(WritableNode node, String name, double[] value) {
+ node.putAttribute(name, value);
+ }
+}
diff --git a/vcell-core/src/main/java/cbit/vcell/model/Kinetics.java b/vcell-core/src/main/java/cbit/vcell/model/Kinetics.java
index b46a586edd..9198958da6 100644
--- a/vcell-core/src/main/java/cbit/vcell/model/Kinetics.java
+++ b/vcell-core/src/main/java/cbit/vcell/model/Kinetics.java
@@ -905,8 +905,7 @@ private final void cleanupParameters() throws ModelException, ExpressionExceptio
try {
exp.bindExpression(reactionStep);
}catch (ExpressionBindingException e){
- if (logger.isDebugEnabled()) logger.error("error binding expression '"+exp.infix()+"': "+e.getMessage(), e);
- else logger.error("error binding expression '"+exp.infix()+"': "+e.getMessage(), e);
+ logger.debug("Kinetics.cleanupParameters(): binding expression '"+exp.infix()+"': "+e.getMessage(), e);
}
}
}
@@ -1827,7 +1826,9 @@ public void propertyChange(PropertyChangeEvent event) {
refreshUnits();
cleanupParameters();
}
- }catch (Throwable e){
+ }catch (ExpressionBindingException e){
+ logger.debug(e.getMessage(), e);
+ }catch (Exception e){
logger.error(e.getMessage(), e);
}
}