From ab31a4e2caa003286503531aee4485f28f67d699 Mon Sep 17 00:00:00 2001 From: zhangjunfeng Date: Sat, 11 May 2024 19:28:15 +0800 Subject: [PATCH] merge-from-upstream-2024 --- .../modules/cassandra-ext/README.txt | 28 + .../modules/cassandra-ext/pom.xml | 72 + .../cassandra-ext/serializers/README.txt | 34 + .../serializers/licenses/apache-2.0.txt | 202 +++ .../modules/cassandra-ext/serializers/pom.xml | 141 ++ .../cassandra/serializer/KryoSerializer.java | 93 ++ .../cassandra/serializer/package-info.java | 22 + .../ignite/tests/KryoSerializerTest.java | 68 + .../java/org/apache/ignite/tests/MyPojo.java | 97 ++ .../apache/ignite/tests/SerializerSuite.java | 29 + .../modules/cassandra-ext/store/.toDelete | 0 .../modules/cassandra-ext/store/README.txt | 32 + .../mc-10-big-Digest.crc32 | 1 + .../mc-10-big-TOC.txt | 8 + .../mc-9-big-Digest.crc32 | 1 + .../mc-9-big-TOC.txt | 8 + .../mc-13-big-Digest.crc32 | 1 + .../mc-13-big-TOC.txt | 8 + .../mc-14-big-Digest.crc32 | 1 + .../mc-14-big-TOC.txt | 8 + .../mc-37-big-Digest.crc32 | 1 + .../mc-37-big-TOC.txt | 8 + .../mc-38-big-Digest.crc32 | 1 + .../mc-38-big-TOC.txt | 8 + .../mc-39-big-Digest.crc32 | 1 + .../mc-39-big-TOC.txt | 8 + .../mc-17-big-Digest.crc32 | 1 + .../mc-17-big-TOC.txt | 8 + .../mc-18-big-Digest.crc32 | 1 + .../mc-18-big-TOC.txt | 8 + .../mc-19-big-Digest.crc32 | 1 + .../mc-19-big-TOC.txt | 8 + .../mc-17-big-Digest.crc32 | 1 + .../mc-17-big-TOC.txt | 8 + .../mc-18-big-Digest.crc32 | 1 + .../mc-18-big-TOC.txt | 8 + .../mc-19-big-Digest.crc32 | 1 + .../mc-19-big-TOC.txt | 8 + .../mc-20-big-Digest.crc32 | 1 + .../mc-20-big-TOC.txt | 8 + .../mc-17-big-Digest.crc32 | 1 + .../mc-17-big-TOC.txt | 8 + .../mc-18-big-Digest.crc32 | 1 + .../mc-18-big-TOC.txt | 8 + .../mc-19-big-Digest.crc32 | 1 + .../mc-19-big-TOC.txt | 8 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../mc-5-big-Digest.crc32 | 1 + .../mc-5-big-TOC.txt | 8 + .../mc-25-big-Digest.crc32 | 1 + .../mc-25-big-TOC.txt | 8 + .../mc-26-big-Digest.crc32 | 1 + .../mc-26-big-TOC.txt | 8 + .../mc-27-big-Digest.crc32 | 1 + .../mc-27-big-TOC.txt | 8 + .../mc-5-big-Digest.crc32 | 1 + .../mc-5-big-TOC.txt | 8 + .../mc-5-big-Digest.crc32 | 1 + .../mc-5-big-TOC.txt | 8 + .../mc-10-big-Digest.crc32 | 1 + .../mc-10-big-TOC.txt | 8 + .../mc-9-big-Digest.crc32 | 1 + .../mc-9-big-TOC.txt | 8 + .../mc-37-big-Digest.crc32 | 1 + .../mc-37-big-TOC.txt | 8 + .../mc-33-big-Digest.crc32 | 1 + .../mc-33-big-TOC.txt | 8 + .../mc-5-big-Digest.crc32 | 1 + .../mc-5-big-TOC.txt | 8 + .../mc-5-big-Digest.crc32 | 1 + .../mc-5-big-TOC.txt | 8 + .../mc-5-big-Digest.crc32 | 1 + .../mc-5-big-TOC.txt | 8 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 20 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 20 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 20 + .../dropped-1595321483076-order/manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../dropped-1595321483076-order/schema.cql | 23 + .../dropped-1595321498825-order/manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../dropped-1595321498825-order/schema.cql | 23 + .../mc-1-big-Digest.crc32 | 1 + .../.persons_married_idx/mc-1-big-TOC.txt | 8 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../dropped-1595321499562-persons/schema.cql | 33 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 28 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 30 + .../mc-1-big-Digest.crc32 | 1 + .../.pojo_test3_married_idx/mc-1-big-TOC.txt | 8 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 33 + .../mc-1-big-Digest.crc32 | 1 + .../.pojo_test5_married_idx/mc-1-big-TOC.txt | 8 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 32 + .../mc-1-big-Digest.crc32 | 1 + .../.pojo_test6_married_idx/mc-1-big-TOC.txt | 8 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 32 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 20 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../schema.cql | 20 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../dropped-1595321482866-product/schema.cql | 23 + .../manifest.json | 1 + .../mc-1-big-Digest.crc32 | 1 + .../mc-1-big-TOC.txt | 8 + .../dropped-1595321498402-product/schema.cql | 23 + .../store/licenses/apache-2.0.txt | 202 +++ .../modules/cassandra-ext/store/pom.xml | 327 ++++ .../store/cassandra/CassandraCacheStore.java | 546 ++++++ .../cassandra/CassandraCacheStoreFactory.java | 201 +++ .../cassandra/common/CassandraHelper.java | 182 ++ .../common/PropertyMappingHelper.java | 239 +++ .../store/cassandra/common/RandomSleeper.java | 105 ++ .../store/cassandra/common/SystemHelper.java | 46 + .../store/cassandra/common/package-info.java | 22 + .../cassandra/datasource/Credentials.java | 39 + .../cassandra/datasource/DataSource.java | 658 ++++++++ .../datasource/PlainCredentials.java | 53 + .../cassandra/datasource/package-info.java | 22 + .../cache/store/cassandra/package-info.java | 22 + .../persistence/KeyPersistenceSettings.java | 249 +++ .../KeyValuePersistenceSettings.java | 531 ++++++ .../persistence/PersistenceController.java | 459 +++++ .../persistence/PersistenceSettings.java | 557 +++++++ .../persistence/PersistenceStrategy.java | 62 + .../cassandra/persistence/PojoField.java | 238 +++ .../persistence/PojoFieldAccessor.java | 161 ++ .../cassandra/persistence/PojoKeyField.java | 97 ++ .../cassandra/persistence/PojoValueField.java | 157 ++ .../persistence/ValuePersistenceSettings.java | 100 ++ .../cassandra/persistence/package-info.java | 22 + .../cassandra/serializer/JavaSerializer.java | 80 + .../cassandra/serializer/Serializer.java | 43 + .../cassandra/serializer/package-info.java | 22 + .../session/BatchExecutionAssistant.java | 102 ++ .../session/BatchLoaderAssistant.java | 47 + .../cassandra/session/CassandraSession.java | 69 + .../session/CassandraSessionImpl.java | 1030 ++++++++++++ .../cassandra/session/ExecutionAssistant.java | 84 + .../GenericBatchExecutionAssistant.java | 71 + .../session/LoadCacheCustomQueryWorker.java | 121 ++ .../session/WrappedPreparedStatement.java | 180 ++ .../cassandra/session/WrappedSession.java | 91 + .../store/cassandra/session/package-info.java | 22 + .../cassandra/session/pool/IdleSession.java | 73 + .../cassandra/session/pool/SessionPool.java | 174 ++ .../cassandra/session/pool/package-info.java | 22 + .../session/transaction/BaseMutation.java | 68 + .../session/transaction/DeleteMutation.java | 57 + .../session/transaction/Mutation.java | 63 + .../session/transaction/WriteMutation.java | 58 + .../session/transaction/package-info.java | 22 + .../store/cassandra/utils/DDLGenerator.java | 83 + .../store/cassandra/utils/package-info.java | 22 + .../store/src/test/bootstrap/aws/README.txt | 13 + .../aws/cassandra/cassandra-bootstrap.sh | 336 ++++ .../bootstrap/aws/cassandra/cassandra-env.sh | 287 ++++ .../aws/cassandra/cassandra-start.sh | 217 +++ .../aws/cassandra/cassandra-template.yaml | 888 ++++++++++ .../store/src/test/bootstrap/aws/common.sh | 1481 +++++++++++++++++ .../store/src/test/bootstrap/aws/env.sh | 113 ++ .../test/bootstrap/aws/ganglia/agent-start.sh | 75 + .../aws/ganglia/ganglia-bootstrap.sh | 417 +++++ .../bootstrap/aws/ignite/ignite-bootstrap.sh | 336 ++++ .../ignite-cassandra-server-template.xml | 181 ++ .../test/bootstrap/aws/ignite/ignite-env.sh | 29 + .../test/bootstrap/aws/ignite/ignite-start.sh | 266 +++ .../src/test/bootstrap/aws/logs-collector.sh | 173 ++ .../ignite-cassandra-client-template.xml | 183 ++ .../bootstrap/aws/tests/tests-bootstrap.sh | 317 ++++ .../test/bootstrap/aws/tests/tests-manager.sh | 458 +++++ .../test/bootstrap/aws/tests/tests-report.sh | 499 ++++++ .../ignite/tests/CassandraConfigTest.java | 141 ++ .../CassandraDirectPersistenceLoadTest.java | 108 ++ .../tests/CassandraDirectPersistenceTest.java | 767 +++++++++ .../ignite/tests/CassandraLocalServer.java | 60 + .../tests/CassandraSessionImplTest.java | 211 +++ .../apache/ignite/tests/DDLGeneratorTest.java | 60 + .../tests/DatasourceSerializationTest.java | 155 ++ .../tests/IgnitePersistentStoreLoadTest.java | 112 ++ .../IgnitePersistentStorePrimitiveTest.java | 143 ++ .../tests/IgnitePersistentStoreTest.java | 920 ++++++++++ .../LoadTestsCassandraArtifactsCreator.java | 107 ++ .../apache/ignite/tests/load/Generator.java | 27 + .../ignite/tests/load/IntGenerator.java | 33 + .../ignite/tests/load/LoadTestDriver.java | 238 +++ .../ignite/tests/load/LongGenerator.java | 28 + .../ignite/tests/load/PersonGenerator.java | 45 + .../ignite/tests/load/PersonIdGenerator.java | 31 + .../ignite/tests/load/StringGenerator.java | 28 + .../org/apache/ignite/tests/load/Worker.java | 432 +++++ .../tests/load/cassandra/BulkReadWorker.java | 63 + .../tests/load/cassandra/BulkWriteWorker.java | 52 + .../tests/load/cassandra/ReadWorker.java | 51 + .../tests/load/cassandra/WriteWorker.java | 51 + .../tests/load/cassandra/package-info.java | 22 + .../tests/load/ignite/BulkReadWorker.java | 52 + .../tests/load/ignite/BulkWriteWorker.java | 52 + .../ignite/tests/load/ignite/ReadWorker.java | 51 + .../ignite/tests/load/ignite/WriteWorker.java | 51 + .../tests/load/ignite/package-info.java | 22 + .../ignite/tests/load/package-info.java | 22 + .../org/apache/ignite/tests/package-info.java | 22 + .../org/apache/ignite/tests/pojos/Person.java | 261 +++ .../apache/ignite/tests/pojos/PersonId.java | 103 ++ .../apache/ignite/tests/pojos/Product.java | 116 ++ .../ignite/tests/pojos/ProductOrder.java | 142 ++ .../ignite/tests/pojos/SimplePerson.java | 184 ++ .../ignite/tests/pojos/SimplePersonId.java | 87 + .../ignite/tests/pojos/package-info.java | 22 + .../ignite/tests/utils/CacheStoreHelper.java | 78 + .../utils/CassandraAdminCredentials.java | 38 + .../ignite/tests/utils/CassandraHelper.java | 366 ++++ .../tests/utils/CassandraLifeCycleBean.java | 150 ++ .../utils/CassandraRegularCredentials.java | 38 + .../ignite/tests/utils/TestCacheSession.java | 94 ++ .../ignite/tests/utils/TestTransaction.java | 157 ++ .../ignite/tests/utils/TestsHelper.java | 752 +++++++++ .../ignite/tests/utils/package-info.java | 22 + .../store/IgniteCassandraStoreTestSuite.java | 82 + .../cassandra/store/package-info.java | 21 + .../src/test/resources/log4j2.properties | 178 ++ .../tests/cassandra/connection-settings.xml | 52 + .../tests/cassandra/connection.properties | 17 + .../tests/cassandra/credentials.properties | 22 + .../tests/cassandra/embedded-cassandra.yaml | 120 ++ .../tests/cassandra/keyspaces.properties | 17 + .../tests/persistence/blob/ignite-config.xml | 96 ++ .../blob/persistence-settings-1.xml | 21 + .../blob/persistence-settings-2.xml | 21 + .../blob/persistence-settings-3.xml | 29 + .../loadall_blob/ignite-config.xml | 90 + .../loadall_blob/persistence-settings.xml | 29 + .../tests/persistence/pojo/ignite-config.xml | 212 +++ .../ignite/tests/persistence/pojo/order.xml | 21 + .../pojo/persistence-settings-1.xml | 21 + .../pojo/persistence-settings-2.xml | 21 + .../pojo/persistence-settings-3.xml | 175 ++ .../pojo/persistence-settings-4.xml | 175 ++ .../pojo/persistence-settings-5.xml | 21 + .../pojo/persistence-settings-6.xml | 174 ++ .../ignite/tests/persistence/pojo/product.xml | 21 + .../persistence/primitive/ignite-config.xml | 96 ++ .../primitive/ignite-remote-client-config.xml | 99 ++ .../primitive/ignite-remote-server-config.xml | 110 ++ .../primitive/persistence-settings-1.xml | 21 + .../primitive/persistence-settings-2.xml | 21 + .../store/src/test/resources/tests.properties | 65 + .../src/test/scripts/cassandra-load-tests.bat | 41 + .../src/test/scripts/cassandra-load-tests.sh | 39 + .../src/test/scripts/ignite-load-tests.bat | 41 + .../src/test/scripts/ignite-load-tests.sh | 39 + .../store/src/test/scripts/jvm-opt.sh | 21 + .../store/src/test/scripts/jvm-opts.bat | 24 + .../scripts/recreate-cassandra-artifacts.bat | 41 + .../scripts/recreate-cassandra-artifacts.sh | 39 + .../modules/elasticsearch-relay/pom.xml | 4 +- .../modules/ml-ext/examples/pom.xml | 14 +- .../ml/structures/LabeledVectorSet.java | 1 - .../ignite/ml/tree/DecisionTreeTrainer.java | 4 +- .../util/SimpleStepFunctionCompressor.java | 8 +- .../RandomForestClassifierTrainer.java | 4 +- .../org.apache.ignite.plugin.PluginProvider | 1 + .../ignite/ml/math/d3-dataset-template.html | 112 ++ .../ignite/ml/math/d3-matrix-template.html | 121 ++ .../ignite/ml/math/d3-vector-template.html | 106 ++ .../apache/ignite/ml/IgniteMLTestSuite.java | 75 + .../java/org/apache/ignite/ml/TestUtils.java | 481 ++++++ .../ml/clustering/ClusteringTestSuite.java | 49 + .../ignite/ml/clustering/KMeansModelTest.java | 60 + .../ml/clustering/KMeansTrainerTest.java | 115 ++ .../gmm/CovarianceMatricesAggregatorTest.java | 161 ++ .../ml/clustering/gmm/GmmModelTest.java | 88 + .../clustering/gmm/GmmPartitionDataTest.java | 90 + .../gmm/GmmTrainerIntegrationTest.java | 94 ++ .../ml/clustering/gmm/GmmTrainerTest.java | 106 ++ .../MeanWithClusterProbAggregatorTest.java | 131 ++ .../NewComponentStatisticsAggregatorTest.java | 147 ++ .../ignite/ml/common/CollectionsTest.java | 127 ++ .../ignite/ml/common/CommonTestSuite.java | 35 + .../ignite/ml/common/ExternalizeTest.java | 101 ++ .../ignite/ml/common/KeepBinaryTest.java | 109 ++ .../ignite/ml/common/LocalModelsTest.java | 188 +++ .../apache/ignite/ml/common/ModelTest.java | 45 + .../apache/ignite/ml/common/TrainerTest.java | 1189 +++++++++++++ .../ml/composition/CompositionTestSuite.java | 45 + .../ignite/ml/composition/StackingTest.java | 168 ++ .../ml/composition/bagging/BaggingTest.java | 239 +++ .../composition/boosting/GDBTrainerTest.java | 210 +++ .../convergence/ConvergenceCheckerTest.java | 80 + .../MeanAbsValueConvergenceCheckerTest.java | 84 + .../MedianOfMedianConvergenceCheckerTest.java | 66 + .../MeanValuePredictionsAggregatorTest.java | 34 + .../OnMajorityPredictionsAggregatorTest.java | 36 + .../WeightedPredictionsAggregatorTest.java | 65 + .../ignite/ml/dataset/DatasetTestSuite.java | 55 + .../dataset/feature/ObjectHistogramTest.java | 215 +++ .../feature/extractor/VectorizerTest.java | 92 + .../cache/CacheBasedDatasetBuilderTest.java | 150 ++ .../impl/cache/CacheBasedDatasetTest.java | 363 ++++ .../impl/cache/util/ComputeUtilsTest.java | 323 ++++ .../DatasetAffinityFunctionWrapperTest.java | 110 ++ ...WithConcurrentModificationCheckerTest.java | 91 + .../cache/util/PartitionDataStorageTest.java | 49 + .../impl/local/LocalDatasetBuilderTest.java | 137 ++ .../dataset/primitive/DatasetWrapperTest.java | 138 ++ .../dataset/primitive/SimpleDatasetTest.java | 81 + .../primitive/SimpleLabeledDatasetTest.java | 97 ++ .../ml/environment/EnvironmentTestSuite.java | 37 + .../LearningEnvironmentBuilderTest.java | 91 + .../environment/LearningEnvironmentTest.java | 179 ++ .../ignite/ml/environment/PromiseTest.java | 79 + .../deploy/DeployingContextImplTest.java | 150 ++ .../environment/deploy/MLDeployingTest.java | 245 +++ .../inference/IgniteModelStorageUtilTest.java | 81 + .../ml/inference/InferenceTestSuite.java | 41 + .../IgniteDistributedModelBuilderTest.java | 67 + .../builder/ModelBuilderTestUtil.java | 53 + .../builder/SingleModelBuilderTest.java | 42 + .../builder/ThreadedModelBuilderTest.java | 44 + .../model/AbstractModelStorageTest.java | 142 ++ .../model/DefaultModelStorageTest.java | 145 ++ .../util/DirectorySerializerTest.java | 126 ++ .../ignite/ml/knn/KNNRegressionTest.java | 150 ++ .../apache/ignite/ml/knn/KNNTestSuite.java | 39 + .../ignite/ml/knn/LabeledDatasetHelper.java | 56 + .../ml/knn/utils/ArraySpatialIndexTest.java | 32 + .../knn/utils/BallTreeSpatialIndexTest.java | 32 + .../ml/knn/utils/KDTreeSpatialIndexTest.java | 32 + .../ignite/ml/math/ExternalizableTest.java | 67 + .../ignite/ml/math/ExternalizeTest.java | 68 + .../ignite/ml/math/MathImplMainTestSuite.java | 36 + .../org/apache/ignite/ml/math/TracerTest.java | 248 +++ .../ignite/ml/math/VectorUtilsTest.java | 69 + .../distances/BrayCurtisDistanceTest.java | 110 ++ .../math/distances/CosineSimilarityTest.java | 41 + .../ml/math/distances/DistanceTest.java | 230 +++ .../ml/math/distances/DistancesTestSuite.java | 35 + .../ml/math/distances/JaccardIndexTest.java | 43 + .../distances/JensenShannonDistanceTest.java | 113 ++ .../WeightedMinkowskiDistanceTest.java | 121 ++ .../ml/math/isolve/lsqr/LSQROnHeapTest.java | 154 ++ .../ml/math/primitives/MathTestConstants.java | 88 + .../matrix/DenseMatrixConstructorTest.java | 71 + .../matrix/LUDecompositionTest.java | 254 +++ .../matrix/MatrixArrayStorageTest.java | 57 + .../matrix/MatrixAttributeTest.java | 135 ++ .../matrix/MatrixBaseStorageTest.java | 88 + .../MatrixStorageImplementationTest.java | 77 + .../matrix/MatrixViewConstructorTest.java | 112 ++ .../matrix/SparseMatrixConstructorTest.java | 53 + .../DelegatingVectorConstructorTest.java | 65 + .../vector/MatrixVectorViewTest.java | 218 +++ .../vector/SparseVectorConstructorTest.java | 57 + .../vector/VectorBaseStorageTest.java | 68 + .../vector/VectorImplementationsTest.java | 850 ++++++++++ .../vector/VectorNormCasesTest.java | 114 ++ .../primitives/vector/VectorNormTest.java | 239 +++ .../primitives/vector/VectorToMatrixTest.java | 261 +++ .../primitives/vector/VectorViewTest.java | 163 ++ .../storage/SparseVectorStorageTest.java | 47 + .../ml/math/stat/DistributionMixtureTest.java | 84 + .../MultivariateGaussianDistributionTest.java | 40 + .../ignite/ml/math/stat/StatsTestSuite.java | 32 + .../ml/multiclass/MultiClassTestSuite.java | 32 + .../ml/multiclass/OneVsRestTrainerTest.java | 124 ++ .../ml/naivebayes/NaiveBayesTestSuite.java | 48 + .../compound/CompoundNaiveBayesModelTest.java | 94 ++ .../compound/CompoundNaiveBayesTest.java | 64 + .../CompoundNaiveBayesTrainerTest.java | 107 ++ .../ignite/ml/naivebayes/compound/Data.java | 87 + .../discrete/DiscreteNaiveBayesModelTest.java | 46 + .../discrete/DiscreteNaiveBayesTest.java | 70 + .../DiscreteNaiveBayesTrainerTest.java | 183 ++ .../gaussian/GaussianNaiveBayesModelTest.java | 49 + .../gaussian/GaussianNaiveBayesTest.java | 85 + .../GaussianNaiveBayesTrainerTest.java | 183 ++ .../ignite/ml/nn/LossFunctionsTest.java | 92 + .../ignite/ml/nn/MLPConstInitializer.java | 67 + .../java/org/apache/ignite/ml/nn/MLPTest.java | 265 +++ .../org/apache/ignite/ml/nn/MLPTestSuite.java | 34 + .../ml/nn/MLPTrainerIntegrationTest.java | 158 ++ .../apache/ignite/ml/nn/MLPTrainerTest.java | 260 +++ .../MLPTrainerMnistIntegrationTest.java | 114 ++ .../nn/performance/MLPTrainerMnistTest.java | 86 + .../ml/nn/performance/MnistMLPTestUtil.java | 96 ++ .../ignite/ml/pipeline/PipelineMdlTest.java | 79 + .../ignite/ml/pipeline/PipelineTest.java | 85 + .../ignite/ml/pipeline/PipelineTestSuite.java | 33 + .../preprocessing/PreprocessingTestSuite.java | 69 + .../BinarizationPreprocessorTest.java | 55 + .../binarization/BinarizationTrainerTest.java | 93 ++ .../encoding/EncoderTrainerTest.java | 285 ++++ .../FrequencyEncoderPreprocessorTest.java | 74 + .../LabelEncoderPreprocessorTest.java | 62 + .../OneHotEncoderPreprocessorTest.java | 207 +++ .../StringEncoderPreprocessorTest.java | 74 + .../TargetEncoderPreprocessorTest.java | 104 ++ .../imputing/ImputerPreprocessorTest.java | 56 + .../imputing/ImputerTrainerTest.java | 183 ++ .../MaxAbsScalerPreprocessorTest.java | 58 + .../MaxAbsScalerTrainerTest.java | 61 + .../MinMaxScalerPreprocessorTest.java | 75 + .../MinMaxScalerTrainerTest.java | 62 + .../NormalizationPreprocessorTest.java | 56 + .../NormalizationTrainerTest.java | 68 + .../StandardScalerPreprocessorTest.java | 62 + .../StandardScalerTrainerTest.java | 93 ++ .../RecommendationTestSuite.java | 34 + .../RecommendationTrainerSQLTest.java | 126 ++ .../RecommendationTrainerTest.java | 211 +++ .../ml/regressions/RegressionsTestSuite.java | 41 + .../LinearRegressionLSQRTrainerTest.java | 152 ++ .../linear/LinearRegressionModelTest.java | 72 + .../LinearRegressionSGDTrainerTest.java | 130 ++ .../logistic/LogisticRegressionModelTest.java | 95 ++ .../LogisticRegressionSGDTrainerTest.java | 106 ++ .../ml/selection/SelectionTestSuite.java | 56 + .../ml/selection/cv/CrossValidationTest.java | 344 ++++ .../paramgrid/ParameterSetGeneratorTest.java | 57 + .../scoring/TestLabelPairCursor.java | 91 + .../cursor/CacheBasedLabelPairCursorTest.java | 79 + .../cursor/LocalLabelPairCursorTest.java | 58 + ...ionPointwiseMetricStatsAggregatorTest.java | 104 ++ .../RegressionMetricStatsAggregatorTest.java | 96 ++ ...ryClassificationEvaluationContextTest.java | 149 ++ .../BinaryClassificationMetricsTest.java | 87 + .../regression/RegressionMetricsTest.java | 65 + .../split/TrainTestDatasetSplitterTest.java | 46 + .../split/mapper/SHA256UniformMapperTest.java | 70 + .../ml/structures/DatasetStructureTest.java | 50 + .../ml/structures/LabeledVectorSetTest.java | 291 ++++ .../ml/structures/StructuresTestSuite.java | 32 + .../ignite/ml/svm/SVMBinaryTrainerTest.java | 93 ++ .../apache/ignite/ml/svm/SVMModelTest.java | 118 ++ .../apache/ignite/ml/svm/SVMTestSuite.java | 33 + ...eClassificationTrainerIntegrationTest.java | 93 ++ ...DecisionTreeClassificationTrainerTest.java | 103 ++ ...nTreeRegressionTrainerIntegrationTest.java | 102 ++ .../DecisionTreeRegressionTrainerTest.java | 94 ++ .../ignite/ml/tree/DecisionTreeTestSuite.java | 50 + .../ml/tree/data/DecisionTreeDataTest.java | 76 + .../ml/tree/data/TreeDataIndexTest.java | 159 ++ .../GiniImpurityMeasureCalculatorTest.java | 120 ++ .../gini/GiniImpurityMeasureTest.java | 129 ++ .../mse/MSEImpurityMeasureCalculatorTest.java | 76 + .../impurity/mse/MSEImpurityMeasureTest.java | 109 ++ .../SimpleStepFunctionCompressorTest.java | 108 ++ .../tree/impurity/util/StepFunctionTest.java | 71 + .../impurity/util/TestImpurityMeasure.java | 88 + .../DecisionTreeMNISTIntegrationTest.java | 95 ++ .../performance/DecisionTreeMNISTTest.java | 64 + .../RandomForestClassifierTrainerTest.java | 100 ++ .../RandomForestIntegrationTest.java | 92 + .../RandomForestRegressionTrainerTest.java | 95 ++ .../tree/randomforest/RandomForestTest.java | 78 + .../RandomForestTreeTestSuite.java | 42 + .../tree/randomforest/data/TreeNodeTest.java | 78 + .../impurity/GiniFeatureHistogramTest.java | 257 +++ .../data/impurity/ImpurityHistogramTest.java | 90 + .../data/impurity/MSEHistogramTest.java | 139 ++ ...malDistributionStatisticsComputerTest.java | 131 ++ .../apache/ignite/ml/util/LRUCacheTest.java | 68 + .../apache/ignite/ml/util/UtilTestSuite.java | 35 + .../DataStreamGeneratorFillCacheTest.java | 123 ++ .../generators/DataStreamGeneratorTest.java | 208 +++ .../DataStreamGeneratorTestSuite.java | 50 + .../scalar/DiscreteRandomProducerTest.java | 102 ++ .../scalar/GaussRandomProducerTest.java | 66 + .../primitives/scalar/RandomProducerTest.java | 79 + .../scalar/UniformRandomProducerTest.java | 67 + .../vector/ParametricVectorGeneratorTest.java | 50 + .../vector/VectorGeneratorPrimitivesTest.java | 110 ++ .../vector/VectorGeneratorTest.java | 194 +++ .../vector/VectorGeneratorsFamilyTest.java | 116 ++ .../ml/util/genetic/GeneticAlgorithmTest.java | 100 ++ .../ml/util/genetic/PopulationTest.java | 126 ++ .../ml/src/test/resources/datasets/README.md | 5 + .../datasets/knn/cleared_machines.txt | 209 +++ .../src/test/resources/datasets/knn/empty.txt | 0 .../src/test/resources/datasets/knn/iris.txt | 150 ++ .../resources/datasets/knn/iris_incorrect.txt | 150 ++ .../resources/datasets/knn/machine.data.txt | 209 +++ .../resources/datasets/knn/missed_data.txt | 3 + .../test/resources/datasets/knn/no_data.txt | 6 + .../resources/datasets/regression/README.md | 98 ++ .../resources/datasets/regression/boston.csv | 506 ++++++ .../datasets/regression/diabetes.csv | 442 +++++ .../trees/columntrees.manualrun.properties | 21 + .../modules/mongodb-relay/pom.xml | 13 +- 526 files changed, 51137 insertions(+), 30 deletions(-) create mode 100644 ignite-extensions/modules/cassandra-ext/README.txt create mode 100644 ignite-extensions/modules/cassandra-ext/pom.xml create mode 100644 ignite-extensions/modules/cassandra-ext/serializers/README.txt create mode 100644 ignite-extensions/modules/cassandra-ext/serializers/licenses/apache-2.0.txt create mode 100644 ignite-extensions/modules/cassandra-ext/serializers/pom.xml create mode 100644 ignite-extensions/modules/cassandra-ext/serializers/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/KryoSerializer.java create mode 100644 ignite-extensions/modules/cassandra-ext/serializers/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/KryoSerializerTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/MyPojo.java create mode 100644 ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/SerializerSuite.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/.toDelete create mode 100644 ignite-extensions/modules/cassandra-ext/store/README.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-10-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-10-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-9-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-9-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-13-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-13-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-14-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-14-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-37-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-37-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-38-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-38-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-39-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-39-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-17-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-17-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-18-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-18-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-19-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-19-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-17-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-17-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-18-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-18-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-19-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-19-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-20-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-20-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-17-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-17-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-18-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-18-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-19-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-19-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_auth/roles-5bc52802de2535edaeab188eecebb090/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_auth/roles-5bc52802de2535edaeab188eecebb090/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/aggregates-924c55872e3a345bb10c12f37c1ba895/mc-5-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/aggregates-924c55872e3a345bb10c12f37c1ba895/mc-5-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-25-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-25-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-26-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-26-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-27-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-27-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/dropped_columns-5e7583b5f3f43af19a39b7e1d6f5f11f/mc-5-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/dropped_columns-5e7583b5f3f43af19a39b7e1d6f5f11f/mc-5-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/functions-96489b7980be3e14a70166a0b9159450/mc-5-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/functions-96489b7980be3e14a70166a0b9159450/mc-5-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-10-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-10-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-9-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-9-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/keyspaces-abac5682dea631c5b535b3d6cffd0fb6/mc-37-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/keyspaces-abac5682dea631c5b535b3d6cffd0fb6/mc-37-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/tables-afddfb9dbc1e30688056eed6c302ba09/mc-33-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/tables-afddfb9dbc1e30688056eed6c302ba09/mc-33-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/triggers-4df70b666b05325195a132b54005fd48/mc-5-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/triggers-4df70b666b05325195a132b54005fd48/mc-5-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/types-5a8b1ca866023f77a0459273d308917a/mc-5-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/types-5a8b1ca866023f77a0459273d308917a/mc-5-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/views-9786ac1cdd583201a7cdad556410c985/mc-5-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/views-9786ac1cdd583201a7cdad556410c985/mc-5-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/.persons_married_idx/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/.persons_married_idx/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/.pojo_test3_married_idx/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/.pojo_test3_married_idx/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/.pojo_test5_married_idx/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/.pojo_test5_married_idx/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/.pojo_test6_married_idx/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/.pojo_test6_married_idx/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/manifest.json create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/mc-1-big-Digest.crc32 create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/mc-1-big-TOC.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/schema.cql create mode 100644 ignite-extensions/modules/cassandra-ext/store/licenses/apache-2.0.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/pom.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/CassandraCacheStore.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/CassandraCacheStoreFactory.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/CassandraHelper.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/PropertyMappingHelper.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/RandomSleeper.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/SystemHelper.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/Credentials.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/DataSource.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/PlainCredentials.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/KeyPersistenceSettings.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/KeyValuePersistenceSettings.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceController.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceSettings.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceStrategy.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoField.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoFieldAccessor.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoKeyField.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoValueField.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/ValuePersistenceSettings.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/JavaSerializer.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/Serializer.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/BatchExecutionAssistant.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/BatchLoaderAssistant.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/CassandraSession.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/CassandraSessionImpl.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/ExecutionAssistant.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/GenericBatchExecutionAssistant.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/LoadCacheCustomQueryWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/WrappedPreparedStatement.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/WrappedSession.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/IdleSession.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/SessionPool.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/BaseMutation.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/DeleteMutation.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/Mutation.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/WriteMutation.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/utils/DDLGenerator.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/utils/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/README.txt create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-bootstrap.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-env.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-start.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-template.yaml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/common.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/env.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ganglia/agent-start.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ganglia/ganglia-bootstrap.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-bootstrap.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-cassandra-server-template.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-env.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-start.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/logs-collector.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/ignite-cassandra-client-template.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-bootstrap.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-manager.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-report.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraConfigTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraDirectPersistenceLoadTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraDirectPersistenceTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraLocalServer.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraSessionImplTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/DDLGeneratorTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/DatasourceSerializationTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStoreLoadTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStorePrimitiveTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStoreTest.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/LoadTestsCassandraArtifactsCreator.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/Generator.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/IntGenerator.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/LoadTestDriver.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/LongGenerator.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/PersonGenerator.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/PersonIdGenerator.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/StringGenerator.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/Worker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/BulkReadWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/BulkWriteWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/ReadWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/WriteWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/BulkReadWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/BulkWriteWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/ReadWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/WriteWorker.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/Person.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/PersonId.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/Product.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/ProductOrder.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/SimplePerson.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/SimplePersonId.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CacheStoreHelper.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraAdminCredentials.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraHelper.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraLifeCycleBean.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraRegularCredentials.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestCacheSession.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestTransaction.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestsHelper.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/testsuites/cassandra/store/IgniteCassandraStoreTestSuite.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/testsuites/cassandra/store/package-info.java create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/log4j2.properties create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/connection-settings.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/connection.properties create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/credentials.properties create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/embedded-cassandra.yaml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/keyspaces.properties create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/ignite-config.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-1.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-2.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-3.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/loadall_blob/ignite-config.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/loadall_blob/persistence-settings.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/ignite-config.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/order.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-1.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-2.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-4.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-5.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-6.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/product.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-config.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-remote-client-config.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-remote-server-config.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/persistence-settings-1.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/persistence-settings-2.xml create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/resources/tests.properties create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/scripts/cassandra-load-tests.bat create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/scripts/cassandra-load-tests.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/scripts/ignite-load-tests.bat create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/scripts/ignite-load-tests.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/scripts/jvm-opt.sh create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/scripts/jvm-opts.bat create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/scripts/recreate-cassandra-artifacts.bat create mode 100644 ignite-extensions/modules/cassandra-ext/store/src/test/scripts/recreate-cassandra-artifacts.sh create mode 100644 ignite-extensions/modules/ml-ext/ml/src/main/resources/META-INF/services/org.apache.ignite.plugin.PluginProvider create mode 100644 ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-dataset-template.html create mode 100644 ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-matrix-template.html create mode 100644 ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-vector-template.html create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/TestUtils.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/CovarianceMatricesAggregatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmPartitionDataTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmTrainerIntegrationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/MeanWithClusterProbAggregatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/NewComponentStatisticsAggregatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/CollectionsTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/CommonTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/ExternalizeTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/KeepBinaryTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/LocalModelsTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/ModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/TrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/CompositionTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/StackingTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/bagging/BaggingTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/GDBTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/ConvergenceCheckerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/mean/MeanAbsValueConvergenceCheckerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/median/MedianOfMedianConvergenceCheckerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/MeanValuePredictionsAggregatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/OnMajorityPredictionsAggregatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/WeightedPredictionsAggregatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/DatasetTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/feature/ObjectHistogramTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/feature/extractor/VectorizerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/CacheBasedDatasetBuilderTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/CacheBasedDatasetTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/ComputeUtilsTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/DatasetAffinityFunctionWrapperTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/IteratorWithConcurrentModificationCheckerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/PartitionDataStorageTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/local/LocalDatasetBuilderTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/DatasetWrapperTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/SimpleDatasetTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/SimpleLabeledDatasetTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/EnvironmentTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/LearningEnvironmentBuilderTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/LearningEnvironmentTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/PromiseTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/deploy/DeployingContextImplTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/deploy/MLDeployingTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/IgniteModelStorageUtilTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/InferenceTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/IgniteDistributedModelBuilderTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/ModelBuilderTestUtil.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/SingleModelBuilderTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/ThreadedModelBuilderTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/storage/model/AbstractModelStorageTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/storage/model/DefaultModelStorageTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/util/DirectorySerializerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/KNNRegressionTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/KNNTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/LabeledDatasetHelper.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/ArraySpatialIndexTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/BallTreeSpatialIndexTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/KDTreeSpatialIndexTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/ExternalizableTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/ExternalizeTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/MathImplMainTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/TracerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/VectorUtilsTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/BrayCurtisDistanceTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/CosineSimilarityTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/DistanceTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/DistancesTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/JaccardIndexTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/JensenShannonDistanceTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/WeightedMinkowskiDistanceTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/isolve/lsqr/LSQROnHeapTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/MathTestConstants.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/DenseMatrixConstructorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/LUDecompositionTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixArrayStorageTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixAttributeTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixBaseStorageTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixStorageImplementationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixViewConstructorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/SparseMatrixConstructorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/DelegatingVectorConstructorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/MatrixVectorViewTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/SparseVectorConstructorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorBaseStorageTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorImplementationsTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormCasesTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorToMatrixTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorViewTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/storage/SparseVectorStorageTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/DistributionMixtureTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/MultivariateGaussianDistributionTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/StatsTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/multiclass/MultiClassTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/multiclass/OneVsRestTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/NaiveBayesTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/Data.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/LossFunctionsTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineMdlTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/PreprocessingTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/EncoderTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/FrequencyEncoderPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/LabelEncoderPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/OneHotEncoderPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/StringEncoderPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/TargetEncoderPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/maxabsscaling/MaxAbsScalerPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/maxabsscaling/MaxAbsScalerTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/standardscaling/StandardScalerPreprocessorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/standardscaling/StandardScalerTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTrainerSQLTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/RegressionsTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/SelectionTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/cv/CrossValidationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/paramgrid/ParameterSetGeneratorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/TestLabelPairCursor.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/cursor/CacheBasedLabelPairCursorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/cursor/LocalLabelPairCursorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/aggregator/BinaryClassificationPointwiseMetricStatsAggregatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/aggregator/RegressionMetricStatsAggregatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/context/BinaryClassificationEvaluationContextTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/metric/classification/BinaryClassificationMetricsTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/metric/regression/RegressionMetricsTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/split/TrainTestDatasetSplitterTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/split/mapper/SHA256UniformMapperTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/DatasetStructureTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/LabeledVectorSetTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/StructuresTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeClassificationTrainerIntegrationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeClassificationTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeRegressionTrainerIntegrationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeRegressionTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/data/DecisionTreeDataTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/data/TreeDataIndexTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/gini/GiniImpurityMeasureCalculatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/gini/GiniImpurityMeasureTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/mse/MSEImpurityMeasureCalculatorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/mse/MSEImpurityMeasureTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/SimpleStepFunctionCompressorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/StepFunctionTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/TestImpurityMeasure.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/performance/DecisionTreeMNISTIntegrationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/performance/DecisionTreeMNISTTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestClassifierTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestIntegrationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestRegressionTrainerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestTreeTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/TreeNodeTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/GiniFeatureHistogramTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/ImpurityHistogramTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/MSEHistogramTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/statistics/NormalDistributionStatisticsComputerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/LRUCacheTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/UtilTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorFillCacheTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorTestSuite.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/DiscreteRandomProducerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/GaussRandomProducerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/RandomProducerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/UniformRandomProducerTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/ParametricVectorGeneratorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorPrimitivesTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorsFamilyTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/genetic/GeneticAlgorithmTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/genetic/PopulationTest.java create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/README.md create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/cleared_machines.txt create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/empty.txt create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/iris.txt create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/iris_incorrect.txt create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/machine.data.txt create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/missed_data.txt create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/no_data.txt create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/README.md create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/boston.csv create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/diabetes.csv create mode 100644 ignite-extensions/modules/ml-ext/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties diff --git a/ignite-extensions/modules/cassandra-ext/README.txt b/ignite-extensions/modules/cassandra-ext/README.txt new file mode 100644 index 0000000000000..9c3bb3f08c345 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/README.txt @@ -0,0 +1,28 @@ +Apache Ignite Cassandra Module +------------------------ + +Apache Ignite Cassandra module, used just as a parent container for other Cassandra related modules. + +Importing Cassandra Module In Maven Project +------------------------------------- + +If you are using Maven to manage dependencies of your project, you can add Cassandra Store module +dependency like this (replace '${ignite.version}' with actual Ignite version you are +interested in): + + + ... + + ... + + org.apache.ignite + ignite-cassandra + ${ignite.version} + + ... + + ... + diff --git a/ignite-extensions/modules/cassandra-ext/pom.xml b/ignite-extensions/modules/cassandra-ext/pom.xml new file mode 100644 index 0000000000000..2f78e75233e1d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/pom.xml @@ -0,0 +1,72 @@ + + + + + + + 4.0.0 + + + org.apache.ignite + ignite-parent-internal + 2.16.999-SNAPSHOT + ../../parent-internal/pom.xml + + + ignite-cassandra + pom + + http://ignite.apache.org + + + + + ${project.groupId} + ignite-cassandra-store + ${project.version} + + + org.mockito + mockito-core + ${mockito.version} + test + + + + + + store + serializers + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + false + + true + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/serializers/README.txt b/ignite-extensions/modules/cassandra-ext/serializers/README.txt new file mode 100644 index 0000000000000..fefe84117a65b --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/serializers/README.txt @@ -0,0 +1,34 @@ +Apache Ignite Cassandra Serializers Module +------------------------ + +Apache Ignite Cassandra Serializers module provides additional serializers to store objects as BLOBs in Cassandra. The +module could be used as an addition to Ignite Cassandra Store module. + +To enable Cassandra Serializers module when starting a standalone node, move 'optional/ignite-cassandra-serializers' +folder to 'libs' folder before running 'ignite.{sh|bat}' script. The content of the module folder will be added to +classpath in this case. Note, copying folder 'optional/ignite-cassandra-serializers' requires copying +'optional/ignite-cassandra-store' folder. + +Importing Cassandra Serializers Module In Maven Project +------------------------------------- + +If you are using Maven to manage dependencies of your project, you can add Cassandra Store module +dependency like this (replace '${ignite.version}' with actual Ignite version you are +interested in): + + + ... + + ... + + org.apache.ignite + ignite-cassandra-serializers + ${ignite.version} + + ... + + ... + diff --git a/ignite-extensions/modules/cassandra-ext/serializers/licenses/apache-2.0.txt b/ignite-extensions/modules/cassandra-ext/serializers/licenses/apache-2.0.txt new file mode 100644 index 0000000000000..75b52484ea471 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/serializers/licenses/apache-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ignite-extensions/modules/cassandra-ext/serializers/pom.xml b/ignite-extensions/modules/cassandra-ext/serializers/pom.xml new file mode 100644 index 0000000000000..78dd0bda1eb21 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/serializers/pom.xml @@ -0,0 +1,141 @@ + + + + + + + 4.0.0 + + + org.apache.ignite + ignite-cassandra + 2.16.999-SNAPSHOT + ../pom.xml + + + ignite-cassandra-serializers + + http://ignite.apache.org + + + 3.0.3 + 1.10.1 + 1.3.0 + 5.0.3 + 2.1 + + + + + ${project.groupId} + ignite-cassandra-store + + + + + com.esotericsoftware + kryo + ${kryo.version} + + + + com.esotericsoftware + reflectasm + ${reflectasm.version} + + + + com.esotericsoftware + minlog + ${minlog.version} + + + + org.ow2.asm + asm + ${asm.version} + + + + org.objenesis + objenesis + ${objenesis.version} + + + + ${project.groupId} + ignite-tools + test + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.7.0 + + UTF-8 + true + false + lines,vars,source + 256 + 512 + + + + + org.apache.maven.plugins + maven-dependency-plugin + 2.10 + + + copy-main-dependencies + package + + copy-dependencies + + + ${project.build.directory}/libs + false + false + true + true + + ignite-cassandra-store + + runtime + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + false + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/serializers/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/KryoSerializer.java b/ignite-extensions/modules/cassandra-ext/serializers/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/KryoSerializer.java new file mode 100644 index 0000000000000..9de841b5533ba --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/serializers/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/KryoSerializer.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.serializer; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.nio.ByteBuffer; +import com.esotericsoftware.kryo.Kryo; +import com.esotericsoftware.kryo.io.Input; +import com.esotericsoftware.kryo.io.Output; +import org.apache.ignite.internal.util.typedef.internal.U; + +/** + * Serializer based on Kryo serialization. + */ +public class KryoSerializer implements Serializer { + /** */ + private static final long serialVersionUID = 0L; + + /** */ + private static final int DFLT_BUFFER_SIZE = 4096; + + /** Thread local instance of {@link Kryo} */ + private transient ThreadLocal kryos = new ThreadLocal() { + /** {@inheritDoc} */ + @Override protected Kryo initialValue() { + return new Kryo(); + } + }; + + /** {@inheritDoc} */ + @Override public ByteBuffer serialize(Object obj) { + if (obj == null) + return null; + + ByteArrayOutputStream stream = null; + + Output out = null; + + try { + stream = new ByteArrayOutputStream(DFLT_BUFFER_SIZE); + + out = new Output(stream); + + kryos.get().writeClassAndObject(out, obj); + out.flush(); + + return ByteBuffer.wrap(stream.toByteArray()); + } + catch (Throwable e) { + throw new IllegalStateException("Failed to serialize object of the class '" + obj.getClass().getName() + "'", e); + } + finally { + U.closeQuiet(out); + U.closeQuiet(stream); + } + } + + /** {@inheritDoc} */ + @Override public Object deserialize(ByteBuffer buf) { + ByteArrayInputStream stream = null; + Input in = null; + + try { + stream = new ByteArrayInputStream(buf.array()); + in = new Input(stream); + + return kryos.get().readClassAndObject(in); + } + catch (Throwable e) { + throw new IllegalStateException("Failed to deserialize object from byte stream", e); + } + finally { + U.closeQuiet(in); + U.closeQuiet(stream); + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/serializers/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/package-info.java b/ignite-extensions/modules/cassandra-ext/serializers/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/package-info.java new file mode 100644 index 0000000000000..bc77f7a529580 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/serializers/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains Cassandra serializers. + */ + +package org.apache.ignite.cache.store.cassandra.serializer; diff --git a/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/KryoSerializerTest.java b/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/KryoSerializerTest.java new file mode 100644 index 0000000000000..b840363953877 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/KryoSerializerTest.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.nio.ByteBuffer; +import java.util.Date; +import org.apache.ignite.cache.store.cassandra.serializer.KryoSerializer; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Simple test for KryoSerializer. + */ +public class KryoSerializerTest { + /** + * Serialize simple object test. + */ + @Test + public void simpleTest() { + MyPojo pojo1 = new MyPojo("123", 1, 123423453467L, new Date(), null); + + KryoSerializer ser = new KryoSerializer(); + + ByteBuffer buff = ser.serialize(pojo1); + MyPojo pojo2 = (MyPojo)ser.deserialize(buff); + + assertEquals("Kryo simple serialization test failed", pojo1, pojo2); + } + + /** + * Serialize object with cyclic references test. + */ + @Test + public void cyclicStructureTest() { + MyPojo pojo1 = new MyPojo("123", 1, 123423453467L, new Date(), null); + MyPojo pojo2 = new MyPojo("321", 2, 123456L, new Date(), pojo1); + pojo1.setRef(pojo2); + + KryoSerializer ser = new KryoSerializer(); + + ByteBuffer buff1 = ser.serialize(pojo1); + ByteBuffer buff2 = ser.serialize(pojo2); + + MyPojo pojo3 = (MyPojo)ser.deserialize(buff1); + MyPojo pojo4 = (MyPojo)ser.deserialize(buff2); + + assertEquals("Kryo cyclic structure serialization test failed", pojo1, pojo3); + assertEquals("Kryo cyclic structure serialization test failed", pojo1.getRef(), pojo3.getRef()); + assertEquals("Kryo cyclic structure serialization test failed", pojo2, pojo4); + assertEquals("Kryo cyclic structure serialization test failed", pojo2.getRef(), pojo4.getRef()); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/MyPojo.java b/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/MyPojo.java new file mode 100644 index 0000000000000..ea6a93232faf4 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/MyPojo.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.io.Serializable; +import java.util.Date; + +/** + * Sample POJO for tests. + */ +public class MyPojo implements Serializable { + /** */ + private String field1; + + /** */ + private int field2; + + /** */ + private long field3; + + /** */ + private Date field4; + + /** */ + private MyPojo ref; + + /** + * Empty constructor. + */ + public MyPojo() { + // No-op. + } + + /** + * Full constructor. + * + * @param field1 Some value. + * @param field2 Some value. + * @param field3 Some value. + * @param field4 Some value. + * @param ref Reference to other pojo. + */ + public MyPojo(String field1, int field2, long field3, Date field4, MyPojo ref) { + this.field1 = field1; + this.field2 = field2; + this.field3 = field3; + this.field4 = field4; + this.ref = ref; + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { + if (!(obj instanceof MyPojo)) + return false; + + MyPojo myObj = (MyPojo)obj; + + if ((field1 == null && myObj.field1 != null) || + (field1 != null && !field1.equals(myObj.field1))) + return false; + + if ((field4 == null && myObj.field4 != null) || + (field4 != null && !field4.equals(myObj.field4))) + return false; + + return field2 == myObj.field2 && field3 == myObj.field3; + } + + /** + * @param ref New reference. + */ + public void setRef(MyPojo ref) { + this.ref = ref; + } + + /** + * @return Reference to some POJO. + */ + public MyPojo getRef() { + return ref; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/SerializerSuite.java b/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/SerializerSuite.java new file mode 100644 index 0000000000000..b6f5e0782d33f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/serializers/src/test/java/org/apache/ignite/tests/SerializerSuite.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Suite contains serializers tests. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses(KryoSerializerTest.class) +public class SerializerSuite { +} diff --git a/ignite-extensions/modules/cassandra-ext/store/.toDelete b/ignite-extensions/modules/cassandra-ext/store/.toDelete new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ignite-extensions/modules/cassandra-ext/store/README.txt b/ignite-extensions/modules/cassandra-ext/store/README.txt new file mode 100644 index 0000000000000..c90dc7cd50962 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/README.txt @@ -0,0 +1,32 @@ +Apache Ignite Cassandra Store Module +------------------------ + +Apache Ignite Cassandra Store module provides CacheStore implementation backed by Cassandra database. + +To enable Cassandra Store module when starting a standalone node, move 'optional/ignite-cassandra-store' folder to +'libs' folder before running 'ignite.{sh|bat}' script. The content of the module folder will +be added to classpath in this case. + +Importing Cassandra Store Module In Maven Project +------------------------------------- + +If you are using Maven to manage dependencies of your project, you can add Cassandra Store module +dependency like this (replace '${ignite.version}' with actual Ignite version you are +interested in): + + + ... + + ... + + org.apache.ignite + ignite-cassandra-store + ${ignite.version} + + ... + + ... + diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-10-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-10-big-Digest.crc32 new file mode 100644 index 0000000000000..db3988d0b9948 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-10-big-Digest.crc32 @@ -0,0 +1 @@ +378099679 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-10-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-10-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-10-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-9-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-9-big-Digest.crc32 new file mode 100644 index 0000000000000..670f17bef54b8 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-9-big-Digest.crc32 @@ -0,0 +1 @@ +351496339 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-9-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-9-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/IndexInfo-9f5c6374d48532299a0a5094af9ad1e3/mc-9-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-13-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-13-big-Digest.crc32 new file mode 100644 index 0000000000000..56d1de63f47ab --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-13-big-Digest.crc32 @@ -0,0 +1 @@ +552938811 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-13-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-13-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-13-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-14-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-14-big-Digest.crc32 new file mode 100644 index 0000000000000..8d329bbeb91fc --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-14-big-Digest.crc32 @@ -0,0 +1 @@ +3040650569 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-14-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-14-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/compaction_history-b4dbb7b4dc493fb5b3bfce6e434832ca/mc-14-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-37-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-37-big-Digest.crc32 new file mode 100644 index 0000000000000..a3676e9f6fb77 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-37-big-Digest.crc32 @@ -0,0 +1 @@ +2831464264 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-37-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-37-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-37-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-38-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-38-big-Digest.crc32 new file mode 100644 index 0000000000000..5a66db1f9aa0a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-38-big-Digest.crc32 @@ -0,0 +1 @@ +4090916003 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-38-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-38-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-38-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-39-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-39-big-Digest.crc32 new file mode 100644 index 0000000000000..10a8b2e03bd60 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-39-big-Digest.crc32 @@ -0,0 +1 @@ +3386012046 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-39-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-39-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/local-7ad54392bcdd35a684174e047860b377/mc-39-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-17-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-17-big-Digest.crc32 new file mode 100644 index 0000000000000..0fed0b07379c1 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-17-big-Digest.crc32 @@ -0,0 +1 @@ +3646937270 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-17-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-17-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-17-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-18-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-18-big-Digest.crc32 new file mode 100644 index 0000000000000..b64e04ce836b2 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-18-big-Digest.crc32 @@ -0,0 +1 @@ +2930833415 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-18-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-18-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-18-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-19-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-19-big-Digest.crc32 new file mode 100644 index 0000000000000..c2bb5b6c3ff05 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-19-big-Digest.crc32 @@ -0,0 +1 @@ +1754602695 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-19-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-19-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/prepared_statements-18a9c2576a0c3841ba718cd529849fef/mc-19-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-17-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-17-big-Digest.crc32 new file mode 100644 index 0000000000000..2c8905253b32e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-17-big-Digest.crc32 @@ -0,0 +1 @@ +2560650336 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-17-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-17-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-17-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-18-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-18-big-Digest.crc32 new file mode 100644 index 0000000000000..bf9844753b9ba --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-18-big-Digest.crc32 @@ -0,0 +1 @@ +2425553357 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-18-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-18-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-18-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-19-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-19-big-Digest.crc32 new file mode 100644 index 0000000000000..169e6a456390f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-19-big-Digest.crc32 @@ -0,0 +1 @@ +830604840 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-19-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-19-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-19-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-20-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-20-big-Digest.crc32 new file mode 100644 index 0000000000000..28119ca02af15 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-20-big-Digest.crc32 @@ -0,0 +1 @@ +247567017 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-20-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-20-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/size_estimates-618f817b005f3678b8a453f3930b8e86/mc-20-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-17-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-17-big-Digest.crc32 new file mode 100644 index 0000000000000..04b3aee188c96 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-17-big-Digest.crc32 @@ -0,0 +1 @@ +922928279 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-17-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-17-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-17-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-18-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-18-big-Digest.crc32 new file mode 100644 index 0000000000000..2549f32d311ad --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-18-big-Digest.crc32 @@ -0,0 +1 @@ +2660208835 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-18-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-18-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-18-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-19-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-19-big-Digest.crc32 new file mode 100644 index 0000000000000..2879ece5c32a3 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-19-big-Digest.crc32 @@ -0,0 +1 @@ +2734395315 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-19-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-19-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system/sstable_activity-5a1ff267ace03f128563cfae6103c65e/mc-19-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_auth/roles-5bc52802de2535edaeab188eecebb090/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_auth/roles-5bc52802de2535edaeab188eecebb090/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..3cbe2b85f30a0 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_auth/roles-5bc52802de2535edaeab188eecebb090/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +812905827 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_auth/roles-5bc52802de2535edaeab188eecebb090/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_auth/roles-5bc52802de2535edaeab188eecebb090/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_auth/roles-5bc52802de2535edaeab188eecebb090/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/aggregates-924c55872e3a345bb10c12f37c1ba895/mc-5-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/aggregates-924c55872e3a345bb10c12f37c1ba895/mc-5-big-Digest.crc32 new file mode 100644 index 0000000000000..05f13572f5932 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/aggregates-924c55872e3a345bb10c12f37c1ba895/mc-5-big-Digest.crc32 @@ -0,0 +1 @@ +1267382698 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/aggregates-924c55872e3a345bb10c12f37c1ba895/mc-5-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/aggregates-924c55872e3a345bb10c12f37c1ba895/mc-5-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/aggregates-924c55872e3a345bb10c12f37c1ba895/mc-5-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-25-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-25-big-Digest.crc32 new file mode 100644 index 0000000000000..cecc37872af47 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-25-big-Digest.crc32 @@ -0,0 +1 @@ +3929871361 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-25-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-25-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-25-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-26-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-26-big-Digest.crc32 new file mode 100644 index 0000000000000..720acb070e6ab --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-26-big-Digest.crc32 @@ -0,0 +1 @@ +312373358 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-26-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-26-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-26-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-27-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-27-big-Digest.crc32 new file mode 100644 index 0000000000000..e9048b1dd94d9 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-27-big-Digest.crc32 @@ -0,0 +1 @@ +2663063175 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-27-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-27-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/columns-24101c25a2ae3af787c1b40ee1aca33f/mc-27-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/dropped_columns-5e7583b5f3f43af19a39b7e1d6f5f11f/mc-5-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/dropped_columns-5e7583b5f3f43af19a39b7e1d6f5f11f/mc-5-big-Digest.crc32 new file mode 100644 index 0000000000000..05f13572f5932 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/dropped_columns-5e7583b5f3f43af19a39b7e1d6f5f11f/mc-5-big-Digest.crc32 @@ -0,0 +1 @@ +1267382698 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/dropped_columns-5e7583b5f3f43af19a39b7e1d6f5f11f/mc-5-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/dropped_columns-5e7583b5f3f43af19a39b7e1d6f5f11f/mc-5-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/dropped_columns-5e7583b5f3f43af19a39b7e1d6f5f11f/mc-5-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/functions-96489b7980be3e14a70166a0b9159450/mc-5-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/functions-96489b7980be3e14a70166a0b9159450/mc-5-big-Digest.crc32 new file mode 100644 index 0000000000000..05f13572f5932 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/functions-96489b7980be3e14a70166a0b9159450/mc-5-big-Digest.crc32 @@ -0,0 +1 @@ +1267382698 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/functions-96489b7980be3e14a70166a0b9159450/mc-5-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/functions-96489b7980be3e14a70166a0b9159450/mc-5-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/functions-96489b7980be3e14a70166a0b9159450/mc-5-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-10-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-10-big-Digest.crc32 new file mode 100644 index 0000000000000..e9048b1dd94d9 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-10-big-Digest.crc32 @@ -0,0 +1 @@ +2663063175 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-10-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-10-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-10-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-9-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-9-big-Digest.crc32 new file mode 100644 index 0000000000000..8736e50ef4e1e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-9-big-Digest.crc32 @@ -0,0 +1 @@ +1700832550 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-9-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-9-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/indexes-0feb57ac311f382fba6d9024d305702f/mc-9-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/keyspaces-abac5682dea631c5b535b3d6cffd0fb6/mc-37-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/keyspaces-abac5682dea631c5b535b3d6cffd0fb6/mc-37-big-Digest.crc32 new file mode 100644 index 0000000000000..3228d87bb6398 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/keyspaces-abac5682dea631c5b535b3d6cffd0fb6/mc-37-big-Digest.crc32 @@ -0,0 +1 @@ +1011368173 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/keyspaces-abac5682dea631c5b535b3d6cffd0fb6/mc-37-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/keyspaces-abac5682dea631c5b535b3d6cffd0fb6/mc-37-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/keyspaces-abac5682dea631c5b535b3d6cffd0fb6/mc-37-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/tables-afddfb9dbc1e30688056eed6c302ba09/mc-33-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/tables-afddfb9dbc1e30688056eed6c302ba09/mc-33-big-Digest.crc32 new file mode 100644 index 0000000000000..95e862b0fbfe2 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/tables-afddfb9dbc1e30688056eed6c302ba09/mc-33-big-Digest.crc32 @@ -0,0 +1 @@ +2584933075 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/tables-afddfb9dbc1e30688056eed6c302ba09/mc-33-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/tables-afddfb9dbc1e30688056eed6c302ba09/mc-33-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/tables-afddfb9dbc1e30688056eed6c302ba09/mc-33-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/triggers-4df70b666b05325195a132b54005fd48/mc-5-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/triggers-4df70b666b05325195a132b54005fd48/mc-5-big-Digest.crc32 new file mode 100644 index 0000000000000..05f13572f5932 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/triggers-4df70b666b05325195a132b54005fd48/mc-5-big-Digest.crc32 @@ -0,0 +1 @@ +1267382698 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/triggers-4df70b666b05325195a132b54005fd48/mc-5-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/triggers-4df70b666b05325195a132b54005fd48/mc-5-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/triggers-4df70b666b05325195a132b54005fd48/mc-5-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/types-5a8b1ca866023f77a0459273d308917a/mc-5-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/types-5a8b1ca866023f77a0459273d308917a/mc-5-big-Digest.crc32 new file mode 100644 index 0000000000000..05f13572f5932 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/types-5a8b1ca866023f77a0459273d308917a/mc-5-big-Digest.crc32 @@ -0,0 +1 @@ +1267382698 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/types-5a8b1ca866023f77a0459273d308917a/mc-5-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/types-5a8b1ca866023f77a0459273d308917a/mc-5-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/types-5a8b1ca866023f77a0459273d308917a/mc-5-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/views-9786ac1cdd583201a7cdad556410c985/mc-5-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/views-9786ac1cdd583201a7cdad556410c985/mc-5-big-Digest.crc32 new file mode 100644 index 0000000000000..05f13572f5932 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/views-9786ac1cdd583201a7cdad556410c985/mc-5-big-Digest.crc32 @@ -0,0 +1 @@ +1267382698 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/views-9786ac1cdd583201a7cdad556410c985/mc-5-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/views-9786ac1cdd583201a7cdad556410c985/mc-5-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/system_schema/views-9786ac1cdd583201a7cdad556410c985/mc-5-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..1f326d142624d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +501219100 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/schema.cql new file mode 100644 index 0000000000000..99ffbfb9721a7 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test1-58666b30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481370-blob_test1/schema.cql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS test1.blob_test1 ( + key bigint PRIMARY KEY, + value blob) + WITH ID = 58666b30-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..433d26070e353 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +1124038907 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/schema.cql new file mode 100644 index 0000000000000..c76a1e967b1ef --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test2-58890e60cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481527-blob_test2/schema.cql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS test1.blob_test2 ( + key bigint PRIMARY KEY, + value blob) + WITH ID = 58890e60-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..6fef1f73fb742 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +1273088272 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/schema.cql new file mode 100644 index 0000000000000..abbbad983dbbd --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/blob_test3-58b108c0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481694-blob_test3/schema.cql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS test1.blob_test3 ( + key blob PRIMARY KEY, + value blob) + WITH ID = 58b108c0-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..6cc95c1bb0ab7 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +2501458868 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/schema.cql new file mode 100644 index 0000000000000..410ca2727d353 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-59ff33a0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321483076-order/schema.cql @@ -0,0 +1,23 @@ +CREATE TABLE IF NOT EXISTS test1."order" ( + id bigint PRIMARY KEY, + amount int, + date timestamp, + price float, + productid bigint) + WITH ID = 59ff33a0-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..c8e0927c336ca --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +3986204851 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/schema.cql new file mode 100644 index 0000000000000..57c0dcf7fdcbd --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/order-60dfe9d0cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498825-order/schema.cql @@ -0,0 +1,23 @@ +CREATE TABLE IF NOT EXISTS test1."order" ( + id bigint PRIMARY KEY, + amount int, + date timestamp, + price float, + productid bigint) + WITH ID = 60dfe9d0-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/.persons_married_idx/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/.persons_married_idx/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..ee8717082a4e5 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/.persons_married_idx/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +3873247419 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/.persons_married_idx/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/.persons_married_idx/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/.persons_married_idx/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..0f4357850e348 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +2824655931 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/schema.cql new file mode 100644 index 0000000000000..b4043a4c581e2 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/persons-6271d470cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499562-persons/schema.cql @@ -0,0 +1,33 @@ +CREATE TABLE IF NOT EXISTS test1.persons ( + company text, + department text, + number bigint, + age smallint, + birth_date timestamp, + first_name text, + fullname text, + height bigint, + last_name text, + married boolean, + phones blob, + weight float, + PRIMARY KEY ((company, department), number)) + WITH ID = 6271d470-cb2f-11ea-89fa-b3608d80e62a + AND CLUSTERING ORDER BY (number DESC) + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.2 + AND speculative_retry = '99PERCENTILE' + AND comment = 'A most excellent and useful table' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; +CREATE INDEX persons_married_idx ON test1.persons (married); diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..9fd549c9cff91 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +3496131264 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/schema.cql new file mode 100644 index 0000000000000..522adcb5ce188 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test1-61ee4c90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498969-pojo_test1/schema.cql @@ -0,0 +1,28 @@ +CREATE TABLE IF NOT EXISTS test1.pojo_test1 ( + key bigint PRIMARY KEY, + age smallint, + birthdate timestamp, + firstname text, + height bigint, + lastname text, + married boolean, + personnumber bigint, + phones blob, + weight float) + WITH ID = 61ee4c90-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..031b03a97c07f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +2479793598 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/schema.cql new file mode 100644 index 0000000000000..22a84acca06b2 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test2-620d1f30cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499109-pojo_test2/schema.cql @@ -0,0 +1,30 @@ +CREATE TABLE IF NOT EXISTS test1.pojo_test2 ( + companycode text, + departmentcode text, + personnumber bigint, + age smallint, + birthdate timestamp, + firstname text, + height bigint, + lastname text, + married boolean, + phones blob, + weight float, + PRIMARY KEY ((companycode, departmentcode, personnumber))) + WITH ID = 620d1f30-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/.pojo_test3_married_idx/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/.pojo_test3_married_idx/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..774193356fb18 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/.pojo_test3_married_idx/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +3866868469 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/.pojo_test3_married_idx/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/.pojo_test3_married_idx/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/.pojo_test3_married_idx/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..ded36dc1530c5 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +1268074040 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/schema.cql new file mode 100644 index 0000000000000..e55f2945b2c7d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test3-622d5160cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499339-pojo_test3/schema.cql @@ -0,0 +1,33 @@ +CREATE TABLE IF NOT EXISTS test1.pojo_test3 ( + company text, + department text, + number bigint, + age smallint, + birth_date timestamp, + first_name text, + fullname text, + height bigint, + last_name text, + married boolean, + phones blob, + weight float, + PRIMARY KEY ((company, department), number)) + WITH ID = 622d5160-cb2f-11ea-89fa-b3608d80e62a + AND CLUSTERING ORDER BY (number DESC) + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.2 + AND speculative_retry = '99PERCENTILE' + AND comment = 'A most excellent and useful table' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; +CREATE INDEX pojo_test3_married_idx ON test1.pojo_test3 (married); diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/.pojo_test5_married_idx/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/.pojo_test5_married_idx/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..2ead54b50444c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/.pojo_test5_married_idx/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +2283306245 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/.pojo_test5_married_idx/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/.pojo_test5_married_idx/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/.pojo_test5_married_idx/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..9dc1e5a3ab837 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +400696021 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/schema.cql new file mode 100644 index 0000000000000..b6ee538c3464f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test5-62dfff90cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321499803-pojo_test5/schema.cql @@ -0,0 +1,32 @@ +CREATE TABLE IF NOT EXISTS test1.pojo_test5 ( + company_code text, + department_code text, + person_num bigint, + age smallint, + birth_date timestamp, + first_name text, + height bigint, + last_name text, + married boolean, + phones blob, + weight float, + PRIMARY KEY ((company_code, department_code), person_num)) + WITH ID = 62dfff90-cb2f-11ea-89fa-b3608d80e62a + AND CLUSTERING ORDER BY (person_num ASC) + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; +CREATE INDEX pojo_test5_married_idx ON test1.pojo_test5 (married); diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/.pojo_test6_married_idx/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/.pojo_test6_married_idx/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..f7c44684035aa --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/.pojo_test6_married_idx/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +1201254105 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/.pojo_test6_married_idx/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/.pojo_test6_married_idx/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/.pojo_test6_married_idx/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..50d80c353fe9a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +2723034422 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/schema.cql new file mode 100644 index 0000000000000..20e2992603fb7 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/pojo_test6-63208b00cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321500040-pojo_test6/schema.cql @@ -0,0 +1,32 @@ +CREATE TABLE IF NOT EXISTS test1.pojo_test6 ( + company text, + department text, + number bigint, + age smallint, + birth_date timestamp, + first_name text, + height bigint, + last_name text, + married boolean, + phones blob, + weight float, + PRIMARY KEY ((company, department), number)) + WITH ID = 63208b00-cb2f-11ea-89fa-b3608d80e62a + AND CLUSTERING ORDER BY (number DESC) + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.2 + AND speculative_retry = '99PERCENTILE' + AND comment = 'A most excellent and useful table' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; +CREATE INDEX pojo_test6_married_idx ON test1.pojo_test6 (married); diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..45cb8a7f61b82 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +2957190361 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/schema.cql new file mode 100644 index 0000000000000..4a6cb2f264465 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test1-57dd6510cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321480902-primitive_test1/schema.cql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS test1.primitive_test1 ( + key bigint PRIMARY KEY, + value bigint) + WITH ID = 57dd6510-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..ab2023668141e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +1654318954 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/schema.cql new file mode 100644 index 0000000000000..02993abf0cdad --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/primitive_test2-58075b40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321481207-primitive_test2/schema.cql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS test1.primitive_test2 ( + key text PRIMARY KEY, + value text) + WITH ID = 58075b40-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..e5b41645431b5 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +2322509072 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/schema.cql new file mode 100644 index 0000000000000..dd31697025d01 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-59de8c40cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321482866-product/schema.cql @@ -0,0 +1,23 @@ +CREATE TABLE IF NOT EXISTS test1.product ( + id bigint PRIMARY KEY, + description text, + price float, + title text, + type text) + WITH ID = 59de8c40-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/manifest.json b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/manifest.json new file mode 100644 index 0000000000000..2f112eb744484 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/manifest.json @@ -0,0 +1 @@ +{"files":["mc-1-big-Data.db"]} diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/mc-1-big-Digest.crc32 b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/mc-1-big-Digest.crc32 new file mode 100644 index 0000000000000..19756819ee67a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/mc-1-big-Digest.crc32 @@ -0,0 +1 @@ +1074173508 \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/mc-1-big-TOC.txt b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/mc-1-big-TOC.txt new file mode 100644 index 0000000000000..6471ccaaeb64d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/mc-1-big-TOC.txt @@ -0,0 +1,8 @@ +Statistics.db +Summary.db +Index.db +Filter.db +CompressionInfo.db +Digest.crc32 +Data.db +TOC.txt diff --git a/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/schema.cql b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/schema.cql new file mode 100644 index 0000000000000..2c5bc0f68c873 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/data/data/test1/product-60be5810cb2f11ea89fab3608d80e62a/snapshots/dropped-1595321498402-product/schema.cql @@ -0,0 +1,23 @@ +CREATE TABLE IF NOT EXISTS test1.product ( + id bigint PRIMARY KEY, + description text, + price float, + title text, + type text) + WITH ID = 60be5810-cb2f-11ea-89fa-b3608d80e62a + AND bloom_filter_fp_chance = 0.01 + AND dclocal_read_repair_chance = 0.1 + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND min_index_interval = 128 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE' + AND comment = '' + AND caching = { 'keys': 'ALL', 'rows_per_partition': 'NONE' } + AND compaction = { 'max_threshold': '32', 'min_threshold': '4', 'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy' } + AND compression = { 'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor' } + AND cdc = false + AND extensions = { }; diff --git a/ignite-extensions/modules/cassandra-ext/store/licenses/apache-2.0.txt b/ignite-extensions/modules/cassandra-ext/store/licenses/apache-2.0.txt new file mode 100644 index 0000000000000..75b52484ea471 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/licenses/apache-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ignite-extensions/modules/cassandra-ext/store/pom.xml b/ignite-extensions/modules/cassandra-ext/store/pom.xml new file mode 100644 index 0000000000000..9350250c15902 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/pom.xml @@ -0,0 +1,327 @@ + + + + + + + 4.0.0 + + + org.apache.ignite + ignite-cassandra + 2.16.999-SNAPSHOT + ../pom.xml + + + ignite-cassandra-store + + http://ignite.apache.org + + + 1.9.4 + 3.2.0 + 3.11.3 + 3.0.2 + + + + + + commons-beanutils + commons-beanutils + ${commons-beanutils.version} + + + + + ${project.groupId} + ignite-core + + + + ${project.groupId} + ignite-spring + + + + ${project.groupId} + ignite-log4j2 + test + + + + ${project.groupId} + ignite-tools + test + + + + + com.datastax.cassandra + cassandra-driver-core + ${cassandra-driver.version} + + + + io.netty + netty-handler + ${netty.version} + + + + io.netty + netty-buffer + ${netty.version} + + + + io.netty + netty-common + ${netty.version} + + + + io.netty + netty-transport + ${netty.version} + + + + io.netty + netty-codec + ${netty.version} + + + + io.netty + netty-resolver + ${netty.version} + + + + com.google.guava + guava + ${guava.version} + + + + com.codahale.metrics + metrics-core + ${metrics-core.version} + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + + org.apache.cassandra + cassandra-all + ${cassandra-all.version} + test + + + log4j-over-slf4j + org.slf4j + + + commons-codec + commons-codec + + + + + + org.mockito + mockito-core + test + + + + ${project.groupId} + ignite-core + test-jar + test + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + UTF-8 + false + false + lines,vars,source + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + copy-all-dependencies + package + + copy-dependencies + + + ${project.build.directory}/tests-package/lib + false + false + true + + netty-all,cassandra-all,snappy-java,lz4,compress-lzf,commons-codec,commons-lang3,commons-math3, + concurrentlinkedhashmap-lru,antlr,ST4,antlr-runtime,jcl-over-slf4j,jackson-core-asl, + jackson-mapper-asl,json-simple,high-scale-lib,snakeyaml,jbcrypt,reporter-config3, + reporter-config-base,hibernate-validator,validation-api,jboss-logging,thrift-server, + disruptor,stream,fastutil,logback-core,logback-classic,libthrift,httpclient,httpcore, + cassandra-thrift,jna,jamm,joda-time,sigar,ecj,tools + + + + + + copy-main-dependencies + package + + copy-dependencies + + + ${project.build.directory}/libs + false + false + true + true + + ${project.groupId},org.springframework,org.gridgain + + + commons-logging,slf4j-api,cache-api,slf4j-api,aopalliance + + runtime + + + + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + + ant-contrib + ant-contrib + 1.0b3 + + + ant + ant + + + + + + + package-tests + package + + run + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + true + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/CassandraCacheStore.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/CassandraCacheStore.java new file mode 100644 index 0000000000000..03abc4e300ce1 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/CassandraCacheStore.java @@ -0,0 +1,546 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import javax.cache.Cache; +import javax.cache.integration.CacheLoaderException; +import javax.cache.integration.CacheWriterException; +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.PreparedStatement; +import com.datastax.driver.core.Row; +import com.datastax.driver.core.Statement; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteLogger; +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.cache.store.CacheStoreSession; +import org.apache.ignite.cache.store.cassandra.datasource.DataSource; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.cache.store.cassandra.persistence.PersistenceController; +import org.apache.ignite.cache.store.cassandra.session.CassandraSession; +import org.apache.ignite.cache.store.cassandra.session.ExecutionAssistant; +import org.apache.ignite.cache.store.cassandra.session.GenericBatchExecutionAssistant; +import org.apache.ignite.cache.store.cassandra.session.LoadCacheCustomQueryWorker; +import org.apache.ignite.cache.store.cassandra.session.transaction.DeleteMutation; +import org.apache.ignite.cache.store.cassandra.session.transaction.Mutation; +import org.apache.ignite.cache.store.cassandra.session.transaction.WriteMutation; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.lang.IgniteBiInClosure; +import org.apache.ignite.logger.NullLogger; +import org.apache.ignite.resources.CacheStoreSessionResource; +import org.apache.ignite.resources.IgniteInstanceResource; +import org.apache.ignite.resources.LoggerResource; +import org.apache.ignite.thread.IgniteThreadFactory; + +/** + * Implementation of {@link CacheStore} backed by Cassandra database. + * + * @param Ignite cache key type. + * @param Ignite cache value type. + */ +public class CassandraCacheStore implements CacheStore { + /** Buffer to store mutations performed withing transaction. */ + private static final String TRANSACTION_BUFFER = "CASSANDRA_TRANSACTION_BUFFER"; + + /** Thread name. */ + private static final String CACHE_LOADER_THREAD_NAME = "cassandra-cache-loader"; + + /** Auto-injected ignite instance. */ + @SuppressWarnings("unused") + @IgniteInstanceResource + private Ignite ignite; + + /** Auto-injected store session. */ + @SuppressWarnings("unused") + @CacheStoreSessionResource + private CacheStoreSession storeSes; + + /** Auto-injected logger instance. */ + @SuppressWarnings("unused") + @LoggerResource + private IgniteLogger log; + + /** Cassandra data source. */ + private DataSource dataSrc; + + /** Max workers thread count. These threads are responsible for load cache. */ + private int maxPoolSize = Runtime.getRuntime().availableProcessors(); + + /** Controller component responsible for serialization logic. */ + private final PersistenceController controller; + + /** + * Store constructor. + * + * @param dataSrc Data source. + * @param settings Persistence settings for Ignite key and value objects. + * @param maxPoolSize Max workers thread count. + */ + public CassandraCacheStore(DataSource dataSrc, KeyValuePersistenceSettings settings, int maxPoolSize) { + this.dataSrc = dataSrc; + this.controller = new PersistenceController(settings); + this.maxPoolSize = maxPoolSize; + } + + /** {@inheritDoc} */ + @Override public void loadCache(IgniteBiInClosure clo, Object... args) throws CacheLoaderException { + if (clo == null) + return; + + if (args == null || args.length == 0) + args = new String[] {"select * from " + controller.getPersistenceSettings().getKeyspace() + "." + cassandraTable() + ";"}; + + ExecutorService pool = null; + + Collection> futs = new ArrayList<>(args.length); + + try { + pool = Executors.newFixedThreadPool(maxPoolSize, new IgniteThreadFactory(ignite.name(), CACHE_LOADER_THREAD_NAME)); + + CassandraSession ses = getCassandraSession(); + + for (Object obj : args) { + LoadCacheCustomQueryWorker task = null; + + if (obj instanceof Statement) + task = new LoadCacheCustomQueryWorker<>(ses, (Statement)obj, controller, log, clo); + else if (obj instanceof String) { + String qry = ((String)obj).trim(); + + if (qry.toLowerCase().startsWith("select")) + task = new LoadCacheCustomQueryWorker<>(ses, (String)obj, controller, log, clo); + } + + if (task != null) + futs.add(pool.submit(task)); + } + + for (Future fut : futs) + U.get(fut); + + if (log != null && log.isDebugEnabled() && storeSes != null) + log.debug("Cache loaded from db: " + storeSes.cacheName()); + } + catch (IgniteCheckedException e) { + if (storeSes != null) + throw new CacheLoaderException("Failed to load Ignite cache: " + storeSes.cacheName(), e.getCause()); + else + throw new CacheLoaderException("Failed to load cache", e.getCause()); + } + finally { + U.shutdownNow(getClass(), pool, log); + } + } + + /** {@inheritDoc} */ + @Override public void sessionEnd(boolean commit) throws CacheWriterException { + if (!storeSes.isWithinTransaction()) + return; + + List mutations = mutations(); + if (mutations == null || mutations.isEmpty()) + return; + + CassandraSession ses = getCassandraSession(); + + try { + ses.execute(mutations); + } + finally { + mutations.clear(); + U.closeQuiet(ses); + } + } + + /** {@inheritDoc} */ + @SuppressWarnings({"unchecked"}) + @Override public V load(final K key) throws CacheLoaderException { + if (key == null) + return null; + + CassandraSession ses = getCassandraSession(); + + try { + return ses.execute(new ExecutionAssistant() { + /** {@inheritDoc} */ + @Override public boolean tableExistenceRequired() { + return false; + } + + /** {@inheritDoc} */ + @Override public String getTable() { + return cassandraTable(); + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return controller.getLoadStatement(cassandraTable(), false); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement) { + return controller.bindKey(statement, key); + } + + /** {@inheritDoc} */ + @Override public KeyValuePersistenceSettings getPersistenceSettings() { + return controller.getPersistenceSettings(); + } + + /** {@inheritDoc} */ + @Override public String operationName() { + return "READ"; + } + + /** {@inheritDoc} */ + @Override public V process(Row row) { + return row == null ? null : (V)controller.buildValueObject(row); + } + }); + } + finally { + U.closeQuiet(ses); + } + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override public Map loadAll(Iterable keys) throws CacheLoaderException { + if (keys == null || !keys.iterator().hasNext()) + return new HashMap<>(); + + CassandraSession ses = getCassandraSession(); + + try { + return ses.execute(new GenericBatchExecutionAssistant, K>() { + private Map data = new HashMap<>(); + + /** {@inheritDoc} */ + @Override public String getTable() { + return cassandraTable(); + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return controller.getLoadStatement(cassandraTable(), true); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement, K key) { + return controller.bindKey(statement, key); + } + + /** {@inheritDoc} */ + @Override public KeyValuePersistenceSettings getPersistenceSettings() { + return controller.getPersistenceSettings(); + } + + /** {@inheritDoc} */ + @Override public String operationName() { + return "BULK_READ"; + } + + /** {@inheritDoc} */ + @Override public Map processedData() { + return data; + } + + /** {@inheritDoc} */ + @Override protected void process(Row row) { + if (row != null) + data.put((K)controller.buildKeyObject(row), (V)controller.buildValueObject(row)); + } + }, keys); + } + finally { + U.closeQuiet(ses); + } + } + + /** {@inheritDoc} */ + @Override public void write(final Cache.Entry entry) throws CacheWriterException { + if (entry == null || entry.getKey() == null) + return; + + if (storeSes.isWithinTransaction()) { + accumulate(new WriteMutation(entry, cassandraTable(), controller)); + return; + } + + CassandraSession ses = getCassandraSession(); + + try { + ses.execute(new ExecutionAssistant() { + /** {@inheritDoc} */ + @Override public boolean tableExistenceRequired() { + return true; + } + + /** {@inheritDoc} */ + @Override public String getTable() { + return cassandraTable(); + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return controller.getWriteStatement(cassandraTable()); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement) { + return controller.bindKeyValue(statement, entry.getKey(), entry.getValue()); + } + + /** {@inheritDoc} */ + @Override public KeyValuePersistenceSettings getPersistenceSettings() { + return controller.getPersistenceSettings(); + } + + /** {@inheritDoc} */ + @Override public String operationName() { + return "WRITE"; + } + + /** {@inheritDoc} */ + @Override public Void process(Row row) { + return null; + } + }); + } + finally { + U.closeQuiet(ses); + } + } + + /** {@inheritDoc} */ + @Override public void writeAll(Collection> entries) throws CacheWriterException { + if (entries == null || entries.isEmpty()) + return; + + if (storeSes.isWithinTransaction()) { + for (Cache.Entry entry : entries) + accumulate(new WriteMutation(entry, cassandraTable(), controller)); + + return; + } + + CassandraSession ses = getCassandraSession(); + + try { + ses.execute(new GenericBatchExecutionAssistant>() { + /** {@inheritDoc} */ + @Override public String getTable() { + return cassandraTable(); + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return controller.getWriteStatement(cassandraTable()); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement, + Cache.Entry entry) { + return controller.bindKeyValue(statement, entry.getKey(), entry.getValue()); + } + + /** {@inheritDoc} */ + @Override public KeyValuePersistenceSettings getPersistenceSettings() { + return controller.getPersistenceSettings(); + } + + /** {@inheritDoc} */ + @Override public String operationName() { + return "BULK_WRITE"; + } + + /** {@inheritDoc} */ + @Override public boolean tableExistenceRequired() { + return true; + } + }, entries); + } + finally { + U.closeQuiet(ses); + } + } + + /** {@inheritDoc} */ + @Override public void delete(final Object key) throws CacheWriterException { + if (key == null) + return; + + if (storeSes.isWithinTransaction()) { + accumulate(new DeleteMutation(key, cassandraTable(), controller)); + return; + } + + CassandraSession ses = getCassandraSession(); + + try { + ses.execute(new ExecutionAssistant() { + /** {@inheritDoc} */ + @Override public boolean tableExistenceRequired() { + return false; + } + + /** {@inheritDoc} */ + @Override public String getTable() { + return cassandraTable(); + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return controller.getDeleteStatement(cassandraTable()); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement) { + return controller.bindKey(statement, key); + } + + /** {@inheritDoc} */ + @Override public KeyValuePersistenceSettings getPersistenceSettings() { + return controller.getPersistenceSettings(); + } + + /** {@inheritDoc} */ + @Override public String operationName() { + return "DELETE"; + } + + /** {@inheritDoc} */ + @Override public Void process(Row row) { + return null; + } + }); + } + finally { + U.closeQuiet(ses); + } + } + + /** {@inheritDoc} */ + @Override public void deleteAll(Collection keys) throws CacheWriterException { + if (keys == null || keys.isEmpty()) + return; + + if (storeSes.isWithinTransaction()) { + for (Object key : keys) + accumulate(new DeleteMutation(key, cassandraTable(), controller)); + + return; + } + + CassandraSession ses = getCassandraSession(); + + try { + ses.execute(new GenericBatchExecutionAssistant() { + /** {@inheritDoc} */ + @Override public String getTable() { + return cassandraTable(); + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return controller.getDeleteStatement(cassandraTable()); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement, Object key) { + return controller.bindKey(statement, key); + } + + /** {@inheritDoc} */ + @Override public KeyValuePersistenceSettings getPersistenceSettings() { + return controller.getPersistenceSettings(); + } + + /** {@inheritDoc} */ + @Override public String operationName() { + return "BULK_DELETE"; + } + }, keys); + } + finally { + U.closeQuiet(ses); + } + } + + /** + * Gets Cassandra session wrapper or creates new if it doesn't exist. + * This wrapper hides all the low-level Cassandra interaction details by providing only high-level methods. + * + * @return Cassandra session wrapper. + */ + private CassandraSession getCassandraSession() { + return dataSrc.session(log != null ? log : new NullLogger()); + } + + /** + * Returns table name to use for all Cassandra based operations (READ/WRITE/DELETE). + * + * @return Table name. + */ + private String cassandraTable() { + return controller.getPersistenceSettings().getTable() != null ? + controller.getPersistenceSettings().getTable() : storeSes.cacheName().trim().toLowerCase(); + } + + /** + * Accumulates mutation in the transaction buffer. + * + * @param mutation Mutation operation. + */ + private void accumulate(Mutation mutation) { + //noinspection unchecked + List mutations = (List)storeSes.properties().get(TRANSACTION_BUFFER); + + if (mutations == null) { + mutations = new LinkedList<>(); + storeSes.properties().put(TRANSACTION_BUFFER, mutations); + } + + mutations.add(mutation); + } + + /** + * Returns all the mutations performed withing transaction. + * + * @return Mutations + */ + private List mutations() { + //noinspection unchecked + return (List)storeSes.properties().get(TRANSACTION_BUFFER); + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(CassandraCacheStore.class, this); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/CassandraCacheStoreFactory.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/CassandraCacheStoreFactory.java new file mode 100644 index 0000000000000..d170949e63807 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/CassandraCacheStoreFactory.java @@ -0,0 +1,201 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra; + +import javax.cache.configuration.Factory; +import org.apache.ignite.IgniteException; +import org.apache.ignite.cache.store.cassandra.datasource.DataSource; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.internal.IgniteComponentType; +import org.apache.ignite.internal.util.spring.IgniteSpringHelper; +import org.apache.ignite.resources.SpringApplicationContextResource; + +/** + * Factory class to instantiate {@link CassandraCacheStore}. + * + * @param Ignite cache key type + * @param Ignite cache value type + */ +public class CassandraCacheStoreFactory implements Factory> { + /** */ + private static final long serialVersionUID = 0L; + + /** Auto-injected Spring ApplicationContext resource. */ + @SpringApplicationContextResource + private Object appCtx; + + /** Name of data source bean. */ + private String dataSrcBean; + + /** Name of persistence settings bean. */ + private String persistenceSettingsBean; + + /** Data source. */ + private DataSource dataSrc; + + /** Persistence settings. */ + private KeyValuePersistenceSettings persistenceSettings; + + /** Max workers thread count. These threads are responsible for load cache. */ + private int maxPoolSize = Runtime.getRuntime().availableProcessors(); + + /** {@inheritDoc} */ + @Override public CassandraCacheStore create() { + return new CassandraCacheStore<>(getDataSource(), getPersistenceSettings(), getMaxPoolSize()); + } + + /** + * Sets data source. + * + * @param dataSrc Data source. + * + * @return {@code This} for chaining. + */ + public CassandraCacheStoreFactory setDataSource(DataSource dataSrc) { + this.dataSrc = dataSrc; + + return this; + } + + /** + * Sets data source bean name. + * + * @param beanName Data source bean name. + * @return {@code This} for chaining. + */ + public CassandraCacheStoreFactory setDataSourceBean(String beanName) { + this.dataSrcBean = beanName; + + return this; + } + + /** + * Sets persistence settings. + * + * @param settings Persistence settings. + * @return {@code This} for chaining. + */ + public CassandraCacheStoreFactory setPersistenceSettings(KeyValuePersistenceSettings settings) { + this.persistenceSettings = settings; + + return this; + } + + /** + * Sets persistence settings bean name. + * + * @param beanName Persistence settings bean name. + * @return {@code This} for chaining. + */ + public CassandraCacheStoreFactory setPersistenceSettingsBean(String beanName) { + this.persistenceSettingsBean = beanName; + + return this; + } + + /** + * @return Data source. + */ + private DataSource getDataSource() { + if (dataSrc != null) + return dataSrc; + + if (dataSrcBean == null) + throw new IllegalStateException("Either DataSource bean or DataSource itself should be specified"); + + if (appCtx == null) { + throw new IllegalStateException("Failed to get Cassandra DataSource cause Spring application " + + "context wasn't injected into CassandraCacheStoreFactory"); + } + + Object obj = loadSpringContextBean(appCtx, dataSrcBean); + + if (!(obj instanceof DataSource)) + throw new IllegalStateException("Incorrect connection bean '" + dataSrcBean + "' specified"); + + return dataSrc = (DataSource)obj; + } + + /** + * @return Persistence settings. + */ + private KeyValuePersistenceSettings getPersistenceSettings() { + if (persistenceSettings != null) + return persistenceSettings; + + if (persistenceSettingsBean == null) { + throw new IllegalStateException("Either persistence settings bean or persistence settings itself " + + "should be specified"); + } + + if (appCtx == null) { + throw new IllegalStateException("Failed to get Cassandra persistence settings cause Spring application " + + "context wasn't injected into CassandraCacheStoreFactory"); + } + + Object obj = loadSpringContextBean(appCtx, persistenceSettingsBean); + + if (!(obj instanceof KeyValuePersistenceSettings)) { + throw new IllegalStateException("Incorrect persistence settings bean '" + + persistenceSettingsBean + "' specified"); + } + + return persistenceSettings = (KeyValuePersistenceSettings)obj; + } + + /** + * Get maximum workers thread count. These threads are responsible for queries execution. + * + * @return Maximum workers thread count. + */ + public int getMaxPoolSize() { + return maxPoolSize; + } + + /** + * Set Maximum workers thread count. These threads are responsible for queries execution. + * + * @param maxPoolSize Max workers thread count. + * @return {@code This} for chaining. + */ + public CassandraCacheStoreFactory setMaxPoolSize(int maxPoolSize) { + this.maxPoolSize = maxPoolSize; + + return this; + } + + /** + * Loads bean from Spring ApplicationContext. + * + * @param appCtx Application context. + * @param beanName Bean name to load. + * @return Loaded bean. + */ + private Object loadSpringContextBean(Object appCtx, String beanName) { + try { + IgniteSpringHelper spring = IgniteComponentType.SPRING.create(false); + return spring.loadBeanFromAppContext(appCtx, beanName); + } + catch (Exception e) { + throw new IgniteException( + "Failed to load bean in application context [beanName=" + beanName + ", igniteConfig=" + appCtx + ']', + e + ); + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/CassandraHelper.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/CassandraHelper.java new file mode 100644 index 0000000000000..139a97d030c9a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/CassandraHelper.java @@ -0,0 +1,182 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.common; + +import java.net.InetSocketAddress; +import java.util.Map; +import java.util.regex.Pattern; +import com.datastax.driver.core.Cluster; +import com.datastax.driver.core.DataType; +import com.datastax.driver.core.Session; +import com.datastax.driver.core.exceptions.DriverException; +import com.datastax.driver.core.exceptions.InvalidQueryException; +import com.datastax.driver.core.exceptions.NoHostAvailableException; +import com.datastax.driver.core.exceptions.ReadTimeoutException; +import org.apache.ignite.internal.util.typedef.internal.U; + +/** + * Helper class providing methods to work with Cassandra session and exceptions + */ +public class CassandraHelper { + /** Cassandra error message if specified keyspace doesn't exist. */ + private static final Pattern KEYSPACE_EXIST_ERROR1 = Pattern.compile("Keyspace [0-9a-zA-Z_]+ does not exist"); + + /** Cassandra error message if trying to create table inside nonexistent keyspace. */ + private static final Pattern KEYSPACE_EXIST_ERROR2 = Pattern.compile("Cannot add table '[0-9a-zA-Z_]+' to non existing keyspace.*"); + + /** Cassandra error message if trying to create table inside nonexistent keyspace. */ + private static final Pattern KEYSPACE_EXIST_ERROR3 = Pattern.compile("Error preparing query, got ERROR INVALID: " + + "Keyspace [0-9a-zA-Z_]+ does not exist"); + + /** Cassandra error message if specified table doesn't exist. */ + private static final Pattern TABLE_EXIST_ERROR1 = Pattern.compile("unconfigured table [0-9a-zA-Z_]+"); + + /** Cassandra error message if specified table doesn't exist. */ + private static final String TABLE_EXIST_ERROR2 = "Error preparing query, got ERROR INVALID: unconfigured table"; + + /** Cassandra error message if specified table doesn't exist. */ + private static final Pattern TABLE_EXIST_ERROR3 = Pattern.compile("unconfigured columnfamily [0-9a-zA-Z_]+"); + + /** Cassandra error message if trying to use prepared statement created from another session. */ + private static final String PREP_STATEMENT_CLUSTER_INSTANCE_ERROR = "You may have used a PreparedStatement that " + + "was created with another Cluster instance"; + + /** + * Closes Cassandra driver session. + * + * @param driverSes Session to close. + */ + public static void closeSession(Session driverSes) { + if (driverSes == null) + return; + + Cluster cluster = driverSes.getCluster(); + + if (!driverSes.isClosed()) + U.closeQuiet(driverSes); + + if (!cluster.isClosed()) + U.closeQuiet(cluster); + } + + /** + * Checks if Cassandra keyspace absence error occur. + * + * @param e Exception to check. + * @return {@code true} in case of keyspace absence error. + */ + public static boolean isKeyspaceAbsenceError(Throwable e) { + while (e != null) { + if (e instanceof InvalidQueryException && + (KEYSPACE_EXIST_ERROR1.matcher(e.getMessage()).matches() || + KEYSPACE_EXIST_ERROR2.matcher(e.getMessage()).matches())) + return true; + + e = e.getCause(); + } + + return false; + } + + /** + * Checks if Cassandra table absence error occur. + * + * @param e Exception to check. + * @return {@code true} in case of table absence error. + */ + public static boolean isTableAbsenceError(Throwable e) { + while (e != null) { + if (e instanceof InvalidQueryException && + (TABLE_EXIST_ERROR1.matcher(e.getMessage()).matches() || + TABLE_EXIST_ERROR3.matcher(e.getMessage()).matches() || + KEYSPACE_EXIST_ERROR1.matcher(e.getMessage()).matches() || + KEYSPACE_EXIST_ERROR2.matcher(e.getMessage()).matches())) + return true; + + if (e instanceof NoHostAvailableException && ((NoHostAvailableException)e).getErrors() != null) { + NoHostAvailableException ex = (NoHostAvailableException)e; + + for (Map.Entry entry : ex.getErrors().entrySet()) { + Throwable error = entry.getValue(); + + if (error instanceof DriverException && + (error.getMessage().contains(TABLE_EXIST_ERROR2) || + KEYSPACE_EXIST_ERROR3.matcher(error.getMessage()).matches())) + return true; + } + } + + e = e.getCause(); + } + + return false; + } + + /** + * Checks if Cassandra host availability error occur, thus host became unavailable. + * + * @param e Exception to check. + * @return {@code true} in case of host not available error. + */ + public static boolean isHostsAvailabilityError(Throwable e) { + while (e != null) { + if (e instanceof NoHostAvailableException || + e instanceof ReadTimeoutException) + return true; + + e = e.getCause(); + } + + return false; + } + + /** + * Checks if Cassandra error occur because of prepared statement created in one session was used in another session. + * + * @param e Exception to check. + * @return {@code true} in case of invalid usage of prepared statement. + */ + public static boolean isPreparedStatementClusterError(Throwable e) { + while (e != null) { + if (e instanceof InvalidQueryException && e.getMessage().contains(PREP_STATEMENT_CLUSTER_INSTANCE_ERROR)) + return true; + + e = e.getCause(); + } + + return false; + } + + /** + * Checks if two Java classes are Cassandra compatible - mapped to the same Cassandra type. + * + * @param type1 First type. + * @param type2 Second type. + * @return {@code true} if classes are compatible and {@code false} if not. + */ + public static boolean isCassandraCompatibleTypes(Class type1, Class type2) { + if (type1 == null || type2 == null) + return false; + + DataType.Name t1 = PropertyMappingHelper.getCassandraType(type1); + DataType.Name t2 = PropertyMappingHelper.getCassandraType(type2); + + return t1 != null && t2 != null && t1.equals(t2); + } +} + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/PropertyMappingHelper.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/PropertyMappingHelper.java new file mode 100644 index 0000000000000..9f6d457a8e210 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/PropertyMappingHelper.java @@ -0,0 +1,239 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.common; + +import java.beans.PropertyDescriptor; +import java.lang.reflect.Field; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.net.InetAddress; +import java.nio.ByteBuffer; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import com.datastax.driver.core.DataType; +import com.datastax.driver.core.Row; +import org.apache.commons.beanutils.PropertyUtils; +import org.apache.ignite.cache.store.cassandra.persistence.PojoFieldAccessor; +import org.apache.ignite.cache.store.cassandra.serializer.Serializer; + +/** + * Helper class providing bunch of methods to discover fields of POJO objects and + * map builtin Java types to appropriate Cassandra types. + */ +public class PropertyMappingHelper { + /** Bytes array Class type. */ + private static final Class BYTES_ARRAY_CLASS = (new byte[] {}).getClass(); + + /** Mapping from Java to Cassandra types. */ + private static final Map JAVA_TO_CASSANDRA_MAPPING = new HashMap(); + + static { + put(String.class, DataType.Name.TEXT); + put(Integer.class, DataType.Name.INT); + put(int.class, DataType.Name.INT); + put(Short.class, DataType.Name.SMALLINT); + put(short.class, DataType.Name.SMALLINT); + put(Long.class, DataType.Name.BIGINT); + put(long.class, DataType.Name.BIGINT); + put(Double.class, DataType.Name.DOUBLE); + put(double.class, DataType.Name.DOUBLE); + put(Boolean.class, DataType.Name.BOOLEAN); + put(boolean.class, DataType.Name.BOOLEAN); + put(Float.class, DataType.Name.FLOAT); + put(float.class, DataType.Name.FLOAT); + put(ByteBuffer.class, DataType.Name.BLOB); + put(BYTES_ARRAY_CLASS, DataType.Name.BLOB); + put(BigDecimal.class, DataType.Name.DECIMAL); + put(InetAddress.class, DataType.Name.INET); + put(Date.class, DataType.Name.TIMESTAMP); + put(UUID.class, DataType.Name.UUID); + put(BigInteger.class, DataType.Name.VARINT); + } + + /** */ + private static void put(Class cls, DataType.Name name) { + JAVA_TO_CASSANDRA_MAPPING.put(cls, name); + } + + /** + * Maps Cassandra type to specified Java type. + * + * @param clazz java class. + * + * @return Cassandra type. + */ + public static DataType.Name getCassandraType(Class clazz) { + return JAVA_TO_CASSANDRA_MAPPING.get(clazz); + } + + /** + * Returns property accessor by class property name. + * + * @param clazz class from which to get property accessor. + * @param prop name of the property. + * + * @return property accessor. + */ + public static PojoFieldAccessor getPojoFieldAccessor(Class clazz, String prop) { + PropertyDescriptor[] descriptors = PropertyUtils.getPropertyDescriptors(clazz); + + if (descriptors != null) { + for (PropertyDescriptor descriptor : descriptors) { + if (descriptor.getName().equals(prop)) { + Field field = null; + + try { + field = clazz.getDeclaredField(prop); + } + catch (Throwable ignore) { + } + + return new PojoFieldAccessor(descriptor, field); + } + } + } + + try { + return new PojoFieldAccessor(clazz.getDeclaredField(prop)); + } + catch (Throwable e) { + throw new IllegalArgumentException("POJO class " + clazz.getName() + " doesn't have '" + prop + "' property"); + } + } + + /** + * Returns value of specific column in the row returned by CQL statement. + * + * @param row row returned by CQL statement. + * @param col column name. + * @param clazz java class to which column value should be casted. + * @param serializer serializer to use if column stores BLOB otherwise could be null. + * + * @return row column value. + */ + public static Object getCassandraColumnValue(Row row, String col, Class clazz, Serializer serializer) { + if (String.class.equals(clazz)) + return row.getString(col); + + if (Integer.class.equals(clazz)) + return row.isNull(col) ? null : row.getInt(col); + + if (int.class.equals(clazz)) { + if (row.isNull(col)) { + throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + + "' to " + "int value used in domain object model"); + } + + return row.getInt(col); + } + + if (Short.class.equals(clazz)) + return row.isNull(col) ? null : row.getShort(col); + + if (short.class.equals(clazz)) { + if (row.isNull(col)) { + throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + + "' to " + "short value used in domain object model"); + } + + return row.getShort(col); + } + + if (Long.class.equals(clazz)) + return row.isNull(col) ? null : row.getLong(col); + + if (long.class.equals(clazz)) { + if (row.isNull(col)) { + throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + + "' to " + "long value used in domain object model"); + } + + return row.getLong(col); + } + + if (Double.class.equals(clazz)) + return row.isNull(col) ? null : row.getDouble(col); + + if (double.class.equals(clazz)) { + if (row.isNull(col)) { + throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + + "' to " + "double value used in domain object model"); + } + + return row.getDouble(col); + } + + if (Boolean.class.equals(clazz)) + return row.isNull(col) ? null : row.getBool(col); + + if (boolean.class.equals(clazz)) { + if (row.isNull(col)) { + throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + + "' to " + "boolean value used in domain object model"); + } + + return row.getBool(col); + } + + if (Float.class.equals(clazz)) + return row.isNull(col) ? null : row.getFloat(col); + + if (float.class.equals(clazz)) { + if (row.isNull(col)) { + throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + + "' to " + "float value used in domain object model"); + } + + return row.getFloat(col); + } + + if (ByteBuffer.class.equals(clazz)) + return row.getBytes(col); + + if (PropertyMappingHelper.BYTES_ARRAY_CLASS.equals(clazz)) { + ByteBuffer buf = row.getBytes(col); + return buf == null ? null : buf.array(); + } + + if (BigDecimal.class.equals(clazz)) + return row.getDecimal(col); + + if (InetAddress.class.equals(clazz)) + return row.getInet(col); + + if (Date.class.equals(clazz)) + return row.getTimestamp(col); + + if (UUID.class.equals(clazz)) + return row.getUUID(col); + + if (BigInteger.class.equals(clazz)) + return row.getVarint(col); + + if (serializer == null) { + throw new IllegalStateException("Can't deserialize value from '" + col + "' Cassandra column, " + + "cause there is no BLOB serializer specified"); + } + + ByteBuffer buf = row.getBytes(col); + + return buf == null ? null : serializer.deserialize(buf); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/RandomSleeper.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/RandomSleeper.java new file mode 100644 index 0000000000000..dcf5334e1a4de --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/RandomSleeper.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.common; + +import java.util.Random; +import org.apache.ignite.IgniteException; +import org.apache.ignite.IgniteLogger; + +/** + * Provides sleep method with randomly selected sleep time from specified range and + * incrementally shifts sleep time range for each next sleep attempt + * + */ +public class RandomSleeper { + /** */ + private int min; + + /** */ + private int max; + + /** */ + private int incr; + + /** */ + private IgniteLogger log; + + /** */ + private Random random = new Random(System.currentTimeMillis()); + + /** */ + private int summary; + + /** + * Creates sleeper instance. + * + * @param min minimum sleep time (in milliseconds) + * @param max maximum sleep time (in milliseconds) + * @param incr time range shift increment (in milliseconds) + * @param log Instance of the Ignite logger. + */ + public RandomSleeper(int min, int max, int incr, IgniteLogger log) { + if (min <= 0) + throw new IllegalArgumentException("Incorrect min time specified: " + min); + + if (max <= min) + throw new IllegalArgumentException("Incorrect max time specified: " + max); + + if (incr < 10) + throw new IllegalArgumentException("Incorrect increment specified: " + incr); + + this.min = min; + this.max = max; + this.incr = incr; + this.log = log; + } + + /** + * Sleeps + */ + public void sleep() { + try { + int timeout = random.nextInt(max - min + 1) + min; + + if (log != null) + log.info("Sleeping for " + timeout + "ms"); + + Thread.sleep(timeout); + + summary += timeout; + + if (log != null) + log.info("Sleep completed"); + } + catch (InterruptedException e) { + throw new IgniteException("Random sleep interrupted", e); + } + + min += incr; + max += incr; + } + + /** + * Returns summary sleep time. + * + * @return Summary sleep time in milliseconds. + */ + public int getSleepSummary() { + return summary; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/SystemHelper.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/SystemHelper.java new file mode 100644 index 0000000000000..5d5148831eae1 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/SystemHelper.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.common; + +import java.net.InetAddress; +import java.net.UnknownHostException; + +/** + * Helper class providing system information about the host (ip, hostname, os and etc.) + */ +public class SystemHelper { + /** System line separator. */ + public static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** Host name. */ + public static final String HOST_NAME; + + /** Host IP address */ + public static final String HOST_IP; + + static { + try { + InetAddress addr = InetAddress.getLocalHost(); + HOST_NAME = addr.getHostName(); + HOST_IP = addr.getHostAddress(); + } + catch (UnknownHostException e) { + throw new IllegalStateException("Failed to get host/ip of current computer", e); + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/package-info.java new file mode 100644 index 0000000000000..2505a24e0c78f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains commonly used helper classes + */ + +package org.apache.ignite.cache.store.cassandra.common; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/Credentials.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/Credentials.java new file mode 100644 index 0000000000000..a2358a63d136f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/Credentials.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.datasource; + +import java.io.Serializable; + +/** + * Provides credentials for Cassandra (instead of specifying user/password directly in Spring context XML). + */ +public interface Credentials extends Serializable { + /** + * Returns user name + * + * @return user name + */ + public String getUser(); + + /** + * Returns password + * + * @return password + */ + public String getPassword(); +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/DataSource.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/DataSource.java new file mode 100644 index 0000000000000..19ebbe311e557 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/DataSource.java @@ -0,0 +1,658 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.datasource; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.io.Serializable; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.LinkedList; +import java.util.List; +import java.util.UUID; +import com.datastax.driver.core.AuthProvider; +import com.datastax.driver.core.Cluster; +import com.datastax.driver.core.ConsistencyLevel; +import com.datastax.driver.core.NettyOptions; +import com.datastax.driver.core.PoolingOptions; +import com.datastax.driver.core.ProtocolOptions; +import com.datastax.driver.core.ProtocolVersion; +import com.datastax.driver.core.SSLOptions; +import com.datastax.driver.core.SocketOptions; +import com.datastax.driver.core.policies.AddressTranslator; +import com.datastax.driver.core.policies.LoadBalancingPolicy; +import com.datastax.driver.core.policies.ReconnectionPolicy; +import com.datastax.driver.core.policies.RetryPolicy; +import com.datastax.driver.core.policies.SpeculativeExecutionPolicy; +import org.apache.ignite.IgniteException; +import org.apache.ignite.IgniteLogger; +import org.apache.ignite.cache.store.cassandra.session.CassandraSession; +import org.apache.ignite.cache.store.cassandra.session.CassandraSessionImpl; +import org.apache.ignite.internal.util.tostring.GridToStringExclude; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.internal.util.typedef.internal.U; + +/** + * Data source abstraction to specify configuration of the Cassandra session to be used. + */ +public class DataSource implements Externalizable { + /** */ + private static final long serialVersionUID = 0L; + + /** + * Null object, used as a replacement for those Cassandra connection options which + * don't support serialization (RetryPolicy, LoadBalancingPolicy and etc). + */ + private static final UUID NULL_OBJECT = UUID.fromString("45ffae47-3193-5910-84a2-048fe65735d9"); + + /** Default expiration timeout for Cassandra driver session. */ + public static final long DFLT_SESSION_EXPIRATION_TIMEOUT = 300000; // 5 minutes. + + /** Number of rows to immediately fetch in CQL statement execution. */ + private Integer fetchSize; + + /** Consistency level for READ operations. */ + private ConsistencyLevel readConsistency; + + /** Consistency level for WRITE operations. */ + private ConsistencyLevel writeConsistency; + + /** Username to use for authentication. */ + @GridToStringExclude + private String user; + + /** Password to use for authentication. */ + @GridToStringExclude + private String pwd; + + /** Port to use for Cassandra connection. */ + private Integer port; + + /** List of contact points to connect to Cassandra cluster. */ + private List contactPoints; + + /** List of contact points with ports to connect to Cassandra cluster. */ + private List contactPointsWithPorts; + + /** Maximum time to wait for schema agreement before returning from a DDL query. */ + private Integer maxSchemaAgreementWaitSeconds; + + /** The native protocol version to use. */ + private Integer protoVer; + + /** Compression to use for the transport. */ + private String compression; + + /** Use SSL for communications with Cassandra. */ + private Boolean useSSL; + + /** Enables metrics collection. */ + private Boolean collectMetrix; + + /** Enables JMX reporting of the metrics. */ + private Boolean jmxReporting; + + /** Credentials to use for authentication. */ + private Credentials creds; + + /** Load balancing policy to use. */ + private LoadBalancingPolicy loadBalancingPlc; + + /** Reconnection policy to use. */ + private ReconnectionPolicy reconnectionPlc; + + /** Retry policy to use. */ + private RetryPolicy retryPlc; + + /** Address translator to use. */ + private AddressTranslator addrTranslator; + + /** Speculative execution policy to use. */ + private SpeculativeExecutionPolicy speculativeExecutionPlc; + + /** Authentication provider to use. */ + private AuthProvider authProvider; + + /** SSL options to use. */ + private SSLOptions sslOptions; + + /** Connection pooling options to use. */ + private PoolingOptions poolingOptions; + + /** Socket options to use. */ + private SocketOptions sockOptions; + + /** Netty options to use for connection. */ + private NettyOptions nettyOptions; + + /** Expiration timeout for Cassandra driver session. */ + private long sessionExpirationTimeout = DFLT_SESSION_EXPIRATION_TIMEOUT; + + /** Cassandra session wrapper instance. */ + private volatile CassandraSession ses; + + /** + * Sets user name to use for authentication. + * + * @param user user name + */ + public void setUser(String user) { + this.user = user; + + invalidate(); + } + + /** + * Sets password to use for authentication. + * + * @param pwd password + */ + public void setPassword(String pwd) { + this.pwd = pwd; + + invalidate(); + } + + /** + * Sets port to use for Cassandra connection. + * + * @param port port + */ + public void setPort(int port) { + this.port = port; + + invalidate(); + } + + /** + * Sets list of contact points to connect to Cassandra cluster. + * + * @param points contact points + */ + public void setContactPoints(String... points) { + if (points == null || points.length == 0) + return; + + for (String point : points) { + if (point.contains(":")) { + if (contactPointsWithPorts == null) + contactPointsWithPorts = new LinkedList<>(); + + String[] chunks = point.split(":"); + + try { + contactPointsWithPorts.add(InetSocketAddress.createUnresolved(chunks[0].trim(), Integer.parseInt(chunks[1].trim()))); + } + catch (Throwable e) { + throw new IllegalArgumentException("Incorrect contact point '" + point + "' specified for Cassandra cache storage", e); + } + } + else { + if (contactPoints == null) + contactPoints = new LinkedList<>(); + + try { + contactPoints.add(InetAddress.getByName(point)); + } + catch (Throwable e) { + throw new IllegalArgumentException("Incorrect contact point '" + point + "' specified for Cassandra cache storage", e); + } + } + } + + invalidate(); + } + + /** @param seconds Maximum time to wait for schema agreement before returning from a DDL query. */ + public void setMaxSchemaAgreementWaitSeconds(int seconds) { + maxSchemaAgreementWaitSeconds = seconds; + + invalidate(); + } + + /** + * Sets the native protocol version to use. + * + * @param ver version number + */ + public void setProtocolVersion(int ver) { + protoVer = ver; + + invalidate(); + } + + /** + * Sets compression algorithm to use for the transport. + * + * @param compression Compression algorithm. + */ + public void setCompression(String compression) { + this.compression = compression == null || compression.trim().isEmpty() ? null : compression.trim(); + + try { + if (this.compression != null) + ProtocolOptions.Compression.valueOf(this.compression); + } + catch (Throwable e) { + throw new IgniteException("Incorrect compression '" + compression + "' specified for Cassandra connection", e); + } + + invalidate(); + } + + /** + * Enables SSL for communications with Cassandra. + * + * @param use Flag to enable/disable SSL. + */ + public void setUseSSL(boolean use) { + useSSL = use; + + invalidate(); + } + + /** + * Enables metrics collection. + * + * @param collect Flag to enable/disable metrics collection. + */ + public void setCollectMetrix(boolean collect) { + collectMetrix = collect; + + invalidate(); + } + + /** + * Enables JMX reporting of the metrics. + * + * @param enableReporting Flag to enable/disable JMX reporting. + */ + public void setJmxReporting(boolean enableReporting) { + jmxReporting = enableReporting; + + invalidate(); + } + + /** + * Sets number of rows to immediately fetch in CQL statement execution. + * + * @param size Number of rows to fetch. + */ + public void setFetchSize(int size) { + fetchSize = size; + + invalidate(); + } + + /** + * Set consistency level for READ operations. + * + * @param level Consistency level. + */ + public void setReadConsistency(String level) { + readConsistency = parseConsistencyLevel(level); + + invalidate(); + } + + /** + * Set consistency level for WRITE operations. + * + * @param level Consistency level. + */ + public void setWriteConsistency(String level) { + writeConsistency = parseConsistencyLevel(level); + + invalidate(); + } + + /** + * Sets credentials to use for authentication. + * + * @param creds Credentials. + */ + public void setCredentials(Credentials creds) { + this.creds = creds; + + invalidate(); + } + + /** + * Sets load balancing policy. + * + * @param plc Load balancing policy. + */ + public void setLoadBalancingPolicy(LoadBalancingPolicy plc) { + loadBalancingPlc = plc; + + invalidate(); + } + + /** + * Sets reconnection policy. + * + * @param plc Reconnection policy. + */ + public void setReconnectionPolicy(ReconnectionPolicy plc) { + reconnectionPlc = plc; + + invalidate(); + } + + /** + * Sets retry policy. + * + * @param plc Retry policy. + */ + public void setRetryPolicy(RetryPolicy plc) { + retryPlc = plc; + + invalidate(); + } + + /** + * Sets address translator. + * + * @param translator Address translator. + */ + public void setAddressTranslator(AddressTranslator translator) { + addrTranslator = translator; + + invalidate(); + } + + /** + * Sets speculative execution policy. + * + * @param plc Speculative execution policy. + */ + public void setSpeculativeExecutionPolicy(SpeculativeExecutionPolicy plc) { + speculativeExecutionPlc = plc; + + invalidate(); + } + + /** + * Sets authentication provider. + * + * @param provider Authentication provider. + */ + public void setAuthProvider(AuthProvider provider) { + authProvider = provider; + + invalidate(); + } + + /** + * Sets SSL options. + * + * @param options SSL options. + */ + public void setSslOptions(SSLOptions options) { + sslOptions = options; + + invalidate(); + } + + /** + * Sets pooling options. + * + * @param options pooling options to use. + */ + public void setPoolingOptions(PoolingOptions options) { + poolingOptions = options; + + invalidate(); + } + + /** + * Sets socket options to use. + * + * @param options Socket options. + */ + public void setSocketOptions(SocketOptions options) { + sockOptions = options; + + invalidate(); + } + + /** + * Sets netty options to use. + * + * @param options netty options. + */ + public void setNettyOptions(NettyOptions options) { + nettyOptions = options; + + invalidate(); + } + + /** + * Sets expiration timeout for Cassandra driver session. Idle sessions that are not + * used during this timeout value will be automatically closed and recreated later + * on demand. + *

+ * If set to {@code 0}, timeout is disabled. + *

+ * Default value is {@link #DFLT_SESSION_EXPIRATION_TIMEOUT}. + * + * @param sessionExpirationTimeout Expiration timeout for Cassandra driver session. + */ + public void setSessionExpirationTimeout(long sessionExpirationTimeout) { + this.sessionExpirationTimeout = sessionExpirationTimeout; + + invalidate(); + } + + /** + * Creates Cassandra session wrapper if it wasn't created yet and returns it + * + * @param log logger + * @return Cassandra session wrapper + */ + public synchronized CassandraSession session(IgniteLogger log) { + if (ses != null) + return ses; + + Cluster.Builder builder = Cluster.builder(); + + if (user != null) + builder = builder.withCredentials(user, pwd); + + if (port != null) + builder = builder.withPort(port); + + if (contactPoints != null) + builder = builder.addContactPoints(contactPoints); + + if (contactPointsWithPorts != null) + builder = builder.addContactPointsWithPorts(contactPointsWithPorts); + + if (maxSchemaAgreementWaitSeconds != null) + builder = builder.withMaxSchemaAgreementWaitSeconds(maxSchemaAgreementWaitSeconds); + + if (protoVer != null) + builder = builder.withProtocolVersion(ProtocolVersion.fromInt(protoVer)); + + if (compression != null) { + try { + builder = builder.withCompression(ProtocolOptions.Compression.valueOf(compression.trim().toLowerCase())); + } + catch (IllegalArgumentException e) { + throw new IgniteException("Incorrect compression option '" + compression + "' specified for Cassandra connection", e); + } + } + + if (useSSL != null && useSSL) + builder = builder.withSSL(); + + if (sslOptions != null) + builder = builder.withSSL(sslOptions); + + if (collectMetrix != null && !collectMetrix) + builder = builder.withoutMetrics(); + + if (jmxReporting != null && !jmxReporting) + builder = builder.withoutJMXReporting(); + + if (creds != null) + builder = builder.withCredentials(creds.getUser(), creds.getPassword()); + + if (loadBalancingPlc != null) + builder = builder.withLoadBalancingPolicy(loadBalancingPlc); + + if (reconnectionPlc != null) + builder = builder.withReconnectionPolicy(reconnectionPlc); + + if (retryPlc != null) + builder = builder.withRetryPolicy(retryPlc); + + if (addrTranslator != null) + builder = builder.withAddressTranslator(addrTranslator); + + if (speculativeExecutionPlc != null) + builder = builder.withSpeculativeExecutionPolicy(speculativeExecutionPlc); + + if (authProvider != null) + builder = builder.withAuthProvider(authProvider); + + if (poolingOptions != null) + builder = builder.withPoolingOptions(poolingOptions); + + if (sockOptions != null) + builder = builder.withSocketOptions(sockOptions); + + if (nettyOptions != null) + builder = builder.withNettyOptions(nettyOptions); + + return ses = new CassandraSessionImpl( + builder, fetchSize, readConsistency, writeConsistency, sessionExpirationTimeout, log); + } + + /** {@inheritDoc} */ + @Override public void writeExternal(ObjectOutput out) throws IOException { + out.writeObject(fetchSize); + out.writeObject(readConsistency); + out.writeObject(writeConsistency); + U.writeString(out, user); + U.writeString(out, pwd); + out.writeObject(port); + out.writeObject(contactPoints); + out.writeObject(contactPointsWithPorts); + out.writeObject(maxSchemaAgreementWaitSeconds); + out.writeObject(protoVer); + U.writeString(out, compression); + out.writeObject(useSSL); + out.writeObject(collectMetrix); + out.writeObject(jmxReporting); + out.writeObject(creds); + writeObject(out, loadBalancingPlc); + writeObject(out, reconnectionPlc); + writeObject(out, addrTranslator); + writeObject(out, speculativeExecutionPlc); + writeObject(out, authProvider); + writeObject(out, sslOptions); + writeObject(out, poolingOptions); + writeObject(out, sockOptions); + writeObject(out, nettyOptions); + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { + fetchSize = (Integer)in.readObject(); + readConsistency = (ConsistencyLevel)in.readObject(); + writeConsistency = (ConsistencyLevel)in.readObject(); + user = U.readString(in); + pwd = U.readString(in); + port = (Integer)in.readObject(); + contactPoints = (List)in.readObject(); + contactPointsWithPorts = (List)in.readObject(); + maxSchemaAgreementWaitSeconds = (Integer)in.readObject(); + protoVer = (Integer)in.readObject(); + compression = U.readString(in); + useSSL = (Boolean)in.readObject(); + collectMetrix = (Boolean)in.readObject(); + jmxReporting = (Boolean)in.readObject(); + creds = (Credentials)in.readObject(); + loadBalancingPlc = (LoadBalancingPolicy)readObject(in); + reconnectionPlc = (ReconnectionPolicy)readObject(in); + addrTranslator = (AddressTranslator)readObject(in); + speculativeExecutionPlc = (SpeculativeExecutionPolicy)readObject(in); + authProvider = (AuthProvider)readObject(in); + sslOptions = (SSLOptions)readObject(in); + poolingOptions = (PoolingOptions)readObject(in); + sockOptions = (SocketOptions)readObject(in); + nettyOptions = (NettyOptions)readObject(in); + } + + /** + * Helper method used to serialize class members + * @param out the stream to write the object to + * @param obj the object to be written + * @throws IOException Includes any I/O exceptions that may occur + */ + private void writeObject(ObjectOutput out, Object obj) throws IOException { + out.writeObject(obj == null || !(obj instanceof Serializable) ? NULL_OBJECT : obj); + } + + /** + * Helper method used to deserialize class members + * @param in the stream to read data from in order to restore the object + * @throws IOException Includes any I/O exceptions that may occur + * @throws ClassNotFoundException If the class for an object being restored cannot be found + * @return deserialized object + */ + private Object readObject(ObjectInput in) throws IOException, ClassNotFoundException { + Object obj = in.readObject(); + return NULL_OBJECT.equals(obj) ? null : obj; + } + + /** + * Parses consistency level provided as string. + * + * @param level consistency level string. + * + * @return consistency level. + */ + private ConsistencyLevel parseConsistencyLevel(String level) { + if (level == null) + return null; + + try { + return ConsistencyLevel.valueOf(level.trim().toUpperCase()); + } + catch (Throwable e) { + throw new IgniteException("Incorrect consistency level '" + level + "' specified for Cassandra connection", e); + } + } + + /** + * Invalidates session. + */ + private synchronized void invalidate() { + ses = null; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(DataSource.class, this); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/PlainCredentials.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/PlainCredentials.java new file mode 100644 index 0000000000000..46ebdc543b7f8 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/PlainCredentials.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.datasource; + +/** + * Simple implementation of {@link Credentials} which just uses its constructor to hold user/password values. + */ +public class PlainCredentials implements Credentials { + /** */ + private static final long serialVersionUID = 0L; + + /** User name. */ + private String user; + + /** User password. */ + private String pwd; + + /** + * Creates credentials object. + * + * @param user User name. + * @param pwd User password. + */ + public PlainCredentials(String user, String pwd) { + this.user = user; + this.pwd = pwd; + } + + /** {@inheritDoc} */ + @Override public String getUser() { + return user; + } + + /** {@inheritDoc} */ + @Override public String getPassword() { + return pwd; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/package-info.java new file mode 100644 index 0000000000000..6e1d22aed9aa2 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/datasource/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains data source implementation + */ + +package org.apache.ignite.cache.store.cassandra.datasource; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/package-info.java new file mode 100644 index 0000000000000..00aee904054d0 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains {@link org.apache.ignite.cache.store.CacheStore} implementation backed by Cassandra database + */ + +package org.apache.ignite.cache.store.cassandra; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/KeyPersistenceSettings.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/KeyPersistenceSettings.java new file mode 100644 index 0000000000000..dd4505811d1cb --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/KeyPersistenceSettings.java @@ -0,0 +1,249 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; + +import org.apache.ignite.cache.affinity.AffinityKeyMapped; +import org.w3c.dom.Element; +import org.w3c.dom.NodeList; + +/** + * Stores persistence settings for Ignite cache key + */ +public class KeyPersistenceSettings extends PersistenceSettings { + /** Partition key XML tag. */ + private static final String PARTITION_KEY_ELEMENT = "partitionKey"; + + /** Cluster key XML tag. */ + private static final String CLUSTER_KEY_ELEMENT = "clusterKey"; + + /** POJO field XML tag. */ + private static final String FIELD_ELEMENT = "field"; + + /** POJO fields. */ + private List fields = new LinkedList<>(); + + /** Partition key fields. */ + private List partKeyFields = new LinkedList<>(); + + /** Cluster key fields. */ + private List clusterKeyFields = new LinkedList<>(); + + /** + * Creates key persistence settings object based on it's XML configuration. + * + * @param el XML element storing key persistence settings + */ + public KeyPersistenceSettings(Element el) { + super(el); + + if (PersistenceStrategy.POJO != getStrategy()) { + init(); + + return; + } + + Element node = el.getElementsByTagName(PARTITION_KEY_ELEMENT) != null ? + (Element)el.getElementsByTagName(PARTITION_KEY_ELEMENT).item(0) : null; + + NodeList partKeysNodes = node == null ? null : node.getElementsByTagName(FIELD_ELEMENT); + + node = el.getElementsByTagName(CLUSTER_KEY_ELEMENT) != null ? + (Element)el.getElementsByTagName(CLUSTER_KEY_ELEMENT).item(0) : null; + + NodeList clusterKeysNodes = node == null ? null : node.getElementsByTagName(FIELD_ELEMENT); + + if ((partKeysNodes == null || partKeysNodes.getLength() == 0) && + clusterKeysNodes != null && clusterKeysNodes.getLength() > 0) { + throw new IllegalArgumentException("It's not allowed to specify cluster key fields mapping, but " + + "doesn't specify partition key mappings"); + } + + // Detecting partition key fields + partKeyFields = detectPojoFields(partKeysNodes); + + if (partKeyFields == null || partKeyFields.isEmpty()) { + throw new IllegalStateException("Failed to initialize partition key fields for class '" + + getJavaClass().getName() + "'"); + } + + List filteredFields = new LinkedList<>(); + + // Find all fields annotated by @AffinityKeyMapped + for (PojoKeyField field : partKeyFields) { + if (field.getAnnotation(AffinityKeyMapped.class) != null) + filteredFields.add(field); + } + + // If there are any fields annotated by @AffinityKeyMapped then all other fields are part of cluster key + partKeyFields = !filteredFields.isEmpty() ? filteredFields : partKeyFields; + + // Detecting cluster key fields + clusterKeyFields = detectPojoFields(clusterKeysNodes); + + filteredFields = new LinkedList<>(); + + // Removing out all fields which are already in partition key fields list + for (PojoKeyField field : clusterKeyFields) { + if (!PojoField.containsField(partKeyFields, field.getName())) + filteredFields.add(field); + } + + clusterKeyFields = filteredFields; + + fields = new LinkedList<>(); + fields.addAll(partKeyFields); + fields.addAll(clusterKeyFields); + + checkDuplicates(fields); + + init(); + } + + /** {@inheritDoc} */ + @Override public List getFields() { + return fields; + } + + /** {@inheritDoc} */ + @Override protected PojoKeyField createPojoField(Element el, Class clazz) { + return new PojoKeyField(el, clazz); + } + + /** {@inheritDoc} */ + @Override protected PojoKeyField createPojoField(PojoFieldAccessor accessor) { + return new PojoKeyField(accessor); + } + + /** {@inheritDoc} */ + @Override protected PojoKeyField createPojoField(PojoKeyField field, Class clazz) { + return new PojoKeyField(field, clazz); + } + + /** + * Returns Cassandra DDL for primary key. + * + * @return DDL statement. + */ + public String getPrimaryKeyDDL() { + StringBuilder partKey = new StringBuilder(); + + List cols = getPartitionKeyColumns(); + for (String column : cols) { + if (partKey.length() != 0) + partKey.append(", "); + + partKey.append("\"").append(column).append("\""); + } + + StringBuilder clusterKey = new StringBuilder(); + + cols = getClusterKeyColumns(); + if (cols != null) { + for (String column : cols) { + if (clusterKey.length() != 0) + clusterKey.append(", "); + + clusterKey.append("\"").append(column).append("\""); + } + } + + return clusterKey.length() == 0 ? + " primary key ((" + partKey + "))" : + " primary key ((" + partKey + "), " + clusterKey + ")"; + } + + /** + * Returns Cassandra DDL for cluster key. + * + * @return Cluster key DDL. + */ + public String getClusteringDDL() { + StringBuilder builder = new StringBuilder(); + + for (PojoField field : clusterKeyFields) { + PojoKeyField.SortOrder sortOrder = ((PojoKeyField)field).getSortOrder(); + + if (sortOrder == null) + continue; + + if (builder.length() != 0) + builder.append(", "); + + boolean asc = PojoKeyField.SortOrder.ASC == sortOrder; + + builder.append("\"").append(field.getColumn()).append("\" ").append(asc ? "asc" : "desc"); + } + + return builder.length() == 0 ? null : "clustering order by (" + builder + ")"; + } + + /** {@inheritDoc} */ + @Override protected String defaultColumnName() { + return "key"; + } + + /** + * Returns partition key columns of Cassandra table. + * + * @return List of column names. + */ + private List getPartitionKeyColumns() { + List cols = new LinkedList<>(); + + if (PersistenceStrategy.BLOB == getStrategy() || PersistenceStrategy.PRIMITIVE == getStrategy()) { + cols.add(getColumn()); + return cols; + } + + if (partKeyFields != null) { + for (PojoField field : partKeyFields) + cols.add(field.getColumn()); + } + + return cols; + } + + /** + * Returns cluster key columns of Cassandra table. + * + * @return List of column names. + */ + private List getClusterKeyColumns() { + List cols = new LinkedList<>(); + + if (clusterKeyFields != null) { + for (PojoField field : clusterKeyFields) + cols.add(field.getColumn()); + } + + return cols; + } + + /** + * @see java.io.Serializable + */ + private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + + fields = enrichFields(fields); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/KeyValuePersistenceSettings.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/KeyValuePersistenceSettings.java new file mode 100644 index 0000000000000..f865674e76442 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/KeyValuePersistenceSettings.java @@ -0,0 +1,531 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Serializable; +import java.io.StringReader; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import org.apache.ignite.IgniteException; +import org.apache.ignite.cache.store.cassandra.common.CassandraHelper; +import org.apache.ignite.cache.store.cassandra.common.SystemHelper; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.springframework.core.io.Resource; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.InputSource; + +/** + * Stores persistence settings for Ignite cache key and value + */ +public class KeyValuePersistenceSettings implements Serializable { + /** + * Default Cassandra keyspace options which should be used to create new keyspace. + *

    + *
  • SimpleStrategy for replication work well for single data center Cassandra cluster.
    + * If your Cassandra cluster deployed across multiple data centers it's better to use NetworkTopologyStrategy. + *
  • + *
  • Three replicas will be created for each data block.
  • + *
  • Setting DURABLE_WRITES to true specifies that all data should be written to commit log.
  • + *
+ */ + private static final String DFLT_KEYSPACE_OPTIONS = "replication = {'class' : 'SimpleStrategy', " + + "'replication_factor' : 3} and durable_writes = true"; + + /** Xml attribute specifying Cassandra keyspace to use. */ + private static final String KEYSPACE_ATTR = "keyspace"; + + /** Xml attribute specifying Cassandra table to use. */ + private static final String TABLE_ATTR = "table"; + + /** Xml attribute specifying ttl (time to leave) for rows inserted in Cassandra. */ + private static final String TTL_ATTR = "ttl"; + + /** Root xml element containing persistence settings specification. */ + private static final String PERSISTENCE_NODE = "persistence"; + + /** Xml element specifying Cassandra keyspace options. */ + private static final String KEYSPACE_OPTIONS_NODE = "keyspaceOptions"; + + /** Xml element specifying Cassandra table options. */ + private static final String TABLE_OPTIONS_NODE = "tableOptions"; + + /** Xml element specifying Ignite cache key persistence settings. */ + private static final String KEY_PERSISTENCE_NODE = "keyPersistence"; + + /** Xml element specifying Ignite cache value persistence settings. */ + private static final String VALUE_PERSISTENCE_NODE = "valuePersistence"; + + /** + * TTL (time to leave) for rows inserted into Cassandra table + * {@link Expiring data}. + */ + private Integer ttl; + + /** Cassandra keyspace (analog of tablespace in relational databases). */ + private String keyspace; + + /** Cassandra table. */ + private String tbl; + + /** + * Cassandra table creation options + * {@link CREATE TABLE}. + */ + private String tblOptions; + + /** + * Cassandra keyspace creation options + * {@link CREATE KEYSPACE}. + */ + private String keyspaceOptions = DFLT_KEYSPACE_OPTIONS; + + /** Persistence settings for Ignite cache keys. */ + private KeyPersistenceSettings keyPersistenceSettings; + + /** Persistence settings for Ignite cache values. */ + private ValuePersistenceSettings valPersistenceSettings; + + /** List of Cassandra table columns */ + private List tableColumns; + + /** + * Constructs Ignite cache key/value persistence settings. + * + * @param settings string containing xml with persistence settings for Ignite cache key/value + */ + public KeyValuePersistenceSettings(String settings) { + init(settings); + } + + /** + * Constructs Ignite cache key/value persistence settings. + * + * @param settingsFile xml file with persistence settings for Ignite cache key/value + */ + public KeyValuePersistenceSettings(File settingsFile) { + InputStream in; + + try { + in = new FileInputStream(settingsFile); + } + catch (IOException e) { + throw new IgniteException("Failed to get input stream for Cassandra persistence settings file: " + + settingsFile.getAbsolutePath(), e); + } + + init(loadSettings(in)); + } + + /** + * Constructs Ignite cache key/value persistence settings. + * + * @param settingsRsrc resource containing xml with persistence settings for Ignite cache key/value + */ + public KeyValuePersistenceSettings(Resource settingsRsrc) { + InputStream in; + + try { + in = settingsRsrc.getInputStream(); + } + catch (IOException e) { + throw new IgniteException("Failed to get input stream for Cassandra persistence settings resource: " + settingsRsrc, e); + } + + init(loadSettings(in)); + } + + /** + * Returns ttl to use for while inserting new rows into Cassandra table. + * + * @return ttl + */ + public Integer getTTL() { + return ttl; + } + + /** + * Returns Cassandra keyspace to use. + * + * @return keyspace. + */ + public String getKeyspace() { + return keyspace; + } + + /** + * Returns Cassandra table to use. + * + * @return table. + */ + public String getTable() { + return tbl; + } + + /** + * Returns persistence settings for Ignite cache keys. + * + * @return keys persistence settings. + */ + public KeyPersistenceSettings getKeyPersistenceSettings() { + return keyPersistenceSettings; + } + + /** + * Returns persistence settings for Ignite cache values. + * + * @return values persistence settings. + */ + public ValuePersistenceSettings getValuePersistenceSettings() { + return valPersistenceSettings; + } + + /** + * Returns list of POJO fields to be mapped to Cassandra table columns. + * + * @return POJO fields list. + */ + public List getFields() { + List fields = new LinkedList<>(); + + for (PojoField field : keyPersistenceSettings.getFields()) + fields.add(field); + + for (PojoField field : valPersistenceSettings.getFields()) + fields.add(field); + + return fields; + } + + /** + * Returns list of Ignite cache key POJO fields to be mapped to Cassandra table columns. + * + * @return POJO fields list. + */ + public List getKeyFields() { + return keyPersistenceSettings.getFields(); + } + + /** + * Returns list of Ignite cache value POJO fields to be mapped to Cassandra table columns. + * + * @return POJO fields list. + */ + public List getValueFields() { + return valPersistenceSettings.getFields(); + } + + /** + * Returns DDL statement to create Cassandra keyspace. + * + * @return Keyspace DDL statement. + */ + public String getKeyspaceDDLStatement() { + StringBuilder builder = new StringBuilder(); + builder.append("create keyspace if not exists \"").append(keyspace).append("\""); + + if (keyspaceOptions != null) { + if (!keyspaceOptions.trim().toLowerCase().startsWith("with")) + builder.append("\nwith"); + + builder.append(" ").append(keyspaceOptions); + } + + String statement = builder.toString().trim().replaceAll(" +", " "); + + return statement.endsWith(";") ? statement : statement + ";"; + } + + /** + * Returns column names for Cassandra table. + * + * @return Column names. + */ + public List getTableColumns() { + return tableColumns; + } + + /** + * Returns DDL statement to create Cassandra table. + * + * @param table Table name. + * @return Table DDL statement. + */ + public String getTableDDLStatement(String table) { + if (table == null || table.trim().isEmpty()) + throw new IllegalArgumentException("Table name should be specified"); + + String keyColumnsDDL = keyPersistenceSettings.getTableColumnsDDL(); + String valColumnsDDL = valPersistenceSettings.getTableColumnsDDL(new HashSet<>(keyPersistenceSettings.getTableColumns())); + + String colsDDL = keyColumnsDDL; + + if (valColumnsDDL != null && !valColumnsDDL.trim().isEmpty()) + colsDDL += ",\n" + valColumnsDDL; + + String primaryKeyDDL = keyPersistenceSettings.getPrimaryKeyDDL(); + + String clusteringDDL = keyPersistenceSettings.getClusteringDDL(); + + String optionsDDL = tblOptions != null && !tblOptions.trim().isEmpty() ? tblOptions.trim() : ""; + + if (clusteringDDL != null && !clusteringDDL.isEmpty()) + optionsDDL = optionsDDL.isEmpty() ? clusteringDDL : optionsDDL + " and " + clusteringDDL; + + if (!optionsDDL.trim().isEmpty()) + optionsDDL = optionsDDL.trim().toLowerCase().startsWith("with") ? optionsDDL.trim() : "with " + optionsDDL.trim(); + + StringBuilder builder = new StringBuilder(); + + builder.append("create table if not exists \"").append(keyspace).append("\".\"").append(table).append("\""); + builder.append("\n(\n").append(colsDDL).append(",\n").append(primaryKeyDDL).append("\n)"); + + if (!optionsDDL.isEmpty()) + builder.append(" \n").append(optionsDDL); + + String tblDDL = builder.toString().trim().replaceAll(" +", " "); + + return tblDDL.endsWith(";") ? tblDDL : tblDDL + ";"; + } + + /** + * Returns DDL statements to create Cassandra table secondary indexes. + * + * @param table Table name. + * @return DDL statements to create secondary indexes. + */ + public List getIndexDDLStatements(String table) { + List idxDDLs = new LinkedList<>(); + + Set keyCols = new HashSet<>(keyPersistenceSettings.getTableColumns()); + + List fields = valPersistenceSettings.getFields(); + + for (PojoField field : fields) { + if (!keyCols.contains(field.getColumn()) && ((PojoValueField)field).isIndexed()) + idxDDLs.add(((PojoValueField)field).getIndexDDL(keyspace, table)); + } + + return idxDDLs; + } + + /** + * Loads Ignite cache persistence settings from resource. + * + * @param in Input stream. + * @return String containing xml with Ignite cache persistence settings. + */ + private String loadSettings(InputStream in) { + StringBuilder settings = new StringBuilder(); + BufferedReader reader = null; + + try { + reader = new BufferedReader(new InputStreamReader(in)); + + String line = reader.readLine(); + + while (line != null) { + if (settings.length() != 0) + settings.append(SystemHelper.LINE_SEPARATOR); + + settings.append(line); + + line = reader.readLine(); + } + } + catch (Throwable e) { + throw new IgniteException("Failed to read input stream for Cassandra persistence settings", e); + } + finally { + U.closeQuiet(reader); + U.closeQuiet(in); + } + + return settings.toString(); + } + + /** + * @param elem Element with data. + * @param attr Attribute name. + * @return Numeric value for specified attribute. + */ + private int extractIntAttribute(Element elem, String attr) { + String val = elem.getAttribute(attr).trim(); + + try { + return Integer.parseInt(val); + } + catch (NumberFormatException ignored) { + throw new IllegalArgumentException("Incorrect value '" + val + "' specified for '" + attr + "' attribute"); + } + } + + /** + * Initializes persistence settings from XML string. + * + * @param settings XML string containing Ignite cache persistence settings configuration. + */ + @SuppressWarnings("IfCanBeSwitch") + private void init(String settings) { + Document doc; + + try { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + DocumentBuilder builder = factory.newDocumentBuilder(); + doc = builder.parse(new InputSource(new StringReader(settings))); + } + catch (Throwable e) { + throw new IllegalArgumentException("Failed to parse persistence settings:" + + SystemHelper.LINE_SEPARATOR + settings, e); + } + + Element root = doc.getDocumentElement(); + + if (!PERSISTENCE_NODE.equals(root.getNodeName())) { + throw new IllegalArgumentException("Incorrect persistence settings specified. " + + "Root XML element should be 'persistence'"); + } + + if (!root.hasAttribute(KEYSPACE_ATTR)) { + throw new IllegalArgumentException("Incorrect persistence settings '" + KEYSPACE_ATTR + + "' attribute should be specified"); + } + + keyspace = root.getAttribute(KEYSPACE_ATTR).trim(); + tbl = root.hasAttribute(TABLE_ATTR) ? root.getAttribute(TABLE_ATTR).trim() : null; + + if (root.hasAttribute(TTL_ATTR)) + ttl = extractIntAttribute(root, TTL_ATTR); + + if (!root.hasChildNodes()) { + throw new IllegalArgumentException("Incorrect Cassandra persistence settings specification, " + + "there are no key and value persistence settings specified"); + } + + NodeList children = root.getChildNodes(); + int cnt = children.getLength(); + + for (int i = 0; i < cnt; i++) { + Node node = children.item(i); + + if (node.getNodeType() != Node.ELEMENT_NODE) + continue; + + Element el = (Element)node; + String nodeName = el.getNodeName(); + + if (nodeName.equals(TABLE_OPTIONS_NODE)) { + tblOptions = el.getTextContent(); + tblOptions = tblOptions.replace("\n", " ").replace("\r", "").replace("\t", " "); + } + else if (nodeName.equals(KEYSPACE_OPTIONS_NODE)) { + keyspaceOptions = el.getTextContent(); + keyspaceOptions = keyspaceOptions.replace("\n", " ").replace("\r", "").replace("\t", " "); + } + else if (nodeName.equals(KEY_PERSISTENCE_NODE)) + keyPersistenceSettings = new KeyPersistenceSettings(el); + else if (nodeName.equals(VALUE_PERSISTENCE_NODE)) + valPersistenceSettings = new ValuePersistenceSettings(el); + } + + if (keyPersistenceSettings == null) { + throw new IllegalArgumentException("Incorrect Cassandra persistence settings specification, " + + "there are no key persistence settings specified"); + } + + if (valPersistenceSettings == null) { + throw new IllegalArgumentException("Incorrect Cassandra persistence settings specification, " + + "there are no value persistence settings specified"); + } + + List keyFields = keyPersistenceSettings.getFields(); + List valFields = valPersistenceSettings.getFields(); + + if (PersistenceStrategy.POJO == keyPersistenceSettings.getStrategy() && + (keyFields == null || keyFields.isEmpty())) { + throw new IllegalArgumentException("Incorrect Cassandra persistence settings specification, " + + "there are no key fields found"); + } + + if (PersistenceStrategy.POJO == valPersistenceSettings.getStrategy() && + (valFields == null || valFields.isEmpty())) { + throw new IllegalArgumentException("Incorrect Cassandra persistence settings specification, " + + "there are no value fields found"); + } + + // Validating aliases compatibility - fields having different names, but mapped to the same Cassandra table column. + if (valFields != null && !valFields.isEmpty()) { + String keyColumn = keyPersistenceSettings.getColumn(); + Class keyClass = keyPersistenceSettings.getJavaClass(); + + if (keyColumn != null && !keyColumn.isEmpty()) { + for (PojoField valField : valFields) { + if (keyColumn.equals(valField.getColumn()) && + !CassandraHelper.isCassandraCompatibleTypes(keyClass, valField.getJavaClass())) { + throw new IllegalArgumentException("Value field '" + valField.getName() + "' shares the same " + + "Cassandra table column '" + keyColumn + "' with key, but their Java classes are " + + "different. Fields sharing the same column should have the same Java class as their " + + "type or should be mapped to the same Cassandra primitive type."); + } + } + } + + if (keyFields != null && !keyFields.isEmpty()) { + for (PojoField keyField : keyFields) { + for (PojoField valField : valFields) { + if (keyField.getColumn().equals(valField.getColumn()) && + !CassandraHelper.isCassandraCompatibleTypes(keyField.getJavaClass(), valField.getJavaClass())) { + throw new IllegalArgumentException("Value field '" + valField.getName() + "' shares the same " + + "Cassandra table column '" + keyColumn + "' with key field '" + keyField.getName() + "', " + + "but their Java classes are different. Fields sharing the same column should have " + + "the same Java class as their type or should be mapped to the same Cassandra " + + "primitive type."); + } + } + } + } + } + + tableColumns = new LinkedList<>(); + + for (String column : keyPersistenceSettings.getTableColumns()) { + if (!tableColumns.contains(column)) + tableColumns.add(column); + } + + for (String column : valPersistenceSettings.getTableColumns()) { + if (!tableColumns.contains(column)) + tableColumns.add(column); + } + + tableColumns = Collections.unmodifiableList(tableColumns); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceController.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceController.java new file mode 100644 index 0000000000000..59e066784ee94 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceController.java @@ -0,0 +1,459 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.PreparedStatement; +import com.datastax.driver.core.Row; +import org.apache.ignite.IgniteException; +import org.apache.ignite.cache.store.cassandra.common.PropertyMappingHelper; +import org.apache.ignite.cache.store.cassandra.serializer.Serializer; + +/** + * Intermediate layer between persistent store (Cassandra) and Ignite cache key/value classes. + * Handles all the mappings to/from Java classes into Cassandra and responsible for all the details + * of how Java objects should be written/loaded to/from Cassandra. + */ +public class PersistenceController { + /** Ignite cache key/value persistence settings. */ + private final KeyValuePersistenceSettings persistenceSettings; + + /** List of key unique POJO fields (skipping aliases pointing to the same Cassandra table column). */ + private final List keyUniquePojoFields; + + /** List of value unique POJO fields (skipping aliases pointing to the same Cassandra table column). */ + private final List valUniquePojoFields; + + /** CQL statement template to insert row into Cassandra table. */ + private final String writeStatementTempl; + + /** CQL statement template to delete row from Cassandra table. */ + private final String delStatementTempl; + + /** CQL statement template to select value fields from Cassandra table. */ + private final String loadStatementTempl; + + /** CQL statement template to select key/value fields from Cassandra table. */ + private final String loadWithKeyFieldsStatementTempl; + + /** CQL statements to insert row into Cassandra table. */ + private volatile Map writeStatements = new HashMap<>(); + + /** CQL statements to delete row from Cassandra table. */ + private volatile Map delStatements = new HashMap<>(); + + /** CQL statements to select value fields from Cassandra table. */ + private volatile Map loadStatements = new HashMap<>(); + + /** CQL statements to select key/value fields from Cassandra table. */ + private volatile Map loadWithKeyFieldsStatements = new HashMap<>(); + + /** + * Constructs persistence controller from Ignite cache persistence settings. + * + * @param settings persistence settings. + */ + public PersistenceController(KeyValuePersistenceSettings settings) { + if (settings == null) + throw new IllegalArgumentException("Persistent settings can't be null"); + + persistenceSettings = settings; + + String[] loadStatements = prepareLoadStatements(); + + loadWithKeyFieldsStatementTempl = loadStatements[0]; + loadStatementTempl = loadStatements[1]; + writeStatementTempl = prepareWriteStatement(); + delStatementTempl = prepareDeleteStatement(); + + keyUniquePojoFields = settings.getKeyPersistenceSettings().cassandraUniqueFields(); + + List _valUniquePojoFields = settings.getValuePersistenceSettings().cassandraUniqueFields(); + + if (_valUniquePojoFields == null || _valUniquePojoFields.isEmpty()) { + valUniquePojoFields = _valUniquePojoFields; + + return; + } + + List keyColumns = new LinkedList<>(); + + if (keyUniquePojoFields == null) + keyColumns.add(settings.getKeyPersistenceSettings().getColumn()); + else { + for (PojoField field : keyUniquePojoFields) + keyColumns.add(field.getColumn()); + } + + List fields = new LinkedList<>(_valUniquePojoFields); + + for (String column : keyColumns) { + for (int i = 0; i < fields.size(); i++) { + if (column.equals(fields.get(i).getColumn())) { + fields.remove(i); + break; + } + } + } + + valUniquePojoFields = fields.isEmpty() ? null : Collections.unmodifiableList(fields); + } + + /** + * Returns Ignite cache persistence settings. + * + * @return persistence settings. + */ + public KeyValuePersistenceSettings getPersistenceSettings() { + return persistenceSettings; + } + + /** + * Returns CQL statement to insert row into Cassandra table. + * + * @param table Table name. + * @return CQL statement. + */ + public String getWriteStatement(String table) { + return getStatement(table, writeStatementTempl, writeStatements); + } + + /** + * Returns CQL statement to delete row from Cassandra table. + * + * @param table Table name. + * @return CQL statement. + */ + public String getDeleteStatement(String table) { + return getStatement(table, delStatementTempl, delStatements); + } + + /** + * Returns CQL statement to select key/value fields from Cassandra table. + * + * @param table Table name. + * @param includeKeyFields whether to include/exclude key fields from the returned row. + * + * @return CQL statement. + */ + public String getLoadStatement(String table, boolean includeKeyFields) { + return includeKeyFields ? + getStatement(table, loadWithKeyFieldsStatementTempl, loadWithKeyFieldsStatements) : + getStatement(table, loadStatementTempl, loadStatements); + } + + /** + * Binds Ignite cache key object to {@link PreparedStatement}. + * + * @param statement statement to which key object should be bind. + * @param key key object. + * + * @return statement with bounded key. + */ + public BoundStatement bindKey(PreparedStatement statement, Object key) { + PersistenceSettings settings = persistenceSettings.getKeyPersistenceSettings(); + + Object[] values = PersistenceStrategy.POJO != settings.getStrategy() ? + new Object[1] : new Object[keyUniquePojoFields.size()]; + + bindValues(settings.getStrategy(), settings.getSerializer(), keyUniquePojoFields, key, values, 0); + + return statement.bind(values); + } + + /** + * Binds Ignite cache key and value object to {@link com.datastax.driver.core.PreparedStatement}. + * + * @param statement statement to which key and value object should be bind. + * @param key key object. + * @param val value object. + * + * @return statement with bounded key and value. + */ + public BoundStatement bindKeyValue(PreparedStatement statement, Object key, Object val) { + Object[] values = new Object[persistenceSettings.getTableColumns().size()]; + + PersistenceSettings keySettings = persistenceSettings.getKeyPersistenceSettings(); + PersistenceSettings valSettings = persistenceSettings.getValuePersistenceSettings(); + + int offset = bindValues(keySettings.getStrategy(), keySettings.getSerializer(), keyUniquePojoFields, key, values, 0); + bindValues(valSettings.getStrategy(), valSettings.getSerializer(), valUniquePojoFields, val, values, offset); + + return statement.bind(values); + } + + /** + * Builds Ignite cache key object from returned Cassandra table row. + * + * @param row Cassandra table row. + * + * @return key object. + */ + public Object buildKeyObject(Row row) { + return buildObject(row, persistenceSettings.getKeyPersistenceSettings()); + } + + /** + * Builds Ignite cache value object from Cassandra table row . + * + * @param row Cassandra table row. + * + * @return value object. + */ + public Object buildValueObject(Row row) { + return buildObject(row, persistenceSettings.getValuePersistenceSettings()); + } + + /** + * Service method to prepare CQL write statement. + * + * @return CQL write statement. + */ + private String prepareWriteStatement() { + Collection cols = persistenceSettings.getTableColumns(); + + StringBuilder colsList = new StringBuilder(); + StringBuilder questionsList = new StringBuilder(); + + for (String column : cols) { + if (colsList.length() != 0) { + colsList.append(", "); + questionsList.append(","); + } + + colsList.append("\"").append(column).append("\""); + questionsList.append("?"); + } + + String statement = "insert into \"" + persistenceSettings.getKeyspace() + "\".\"%1$s" + + "\" (" + colsList + ") values (" + questionsList + ")"; + + if (persistenceSettings.getTTL() != null) + statement += " using ttl " + persistenceSettings.getTTL(); + + return statement + ";"; + } + + /** + * Service method to prepare CQL delete statement. + * + * @return CQL write statement. + */ + private String prepareDeleteStatement() { + Collection cols = persistenceSettings.getKeyPersistenceSettings().getTableColumns(); + + StringBuilder statement = new StringBuilder(); + + for (String column : cols) { + if (statement.length() != 0) + statement.append(" and "); + + statement.append("\"").append(column).append("\"=?"); + } + + statement.append(";"); + + return "delete from \"" + persistenceSettings.getKeyspace() + "\".\"%1$s\" where " + statement; + } + + /** + * Service method to prepare CQL load statements including and excluding key columns. + * + * @return array having two CQL statements (including and excluding key columns). + */ + private String[] prepareLoadStatements() { + PersistenceSettings settings = persistenceSettings.getKeyPersistenceSettings(); + boolean pojoStrategy = PersistenceStrategy.POJO == settings.getStrategy(); + Collection keyCols = settings.getTableColumns(); + StringBuilder hdrWithKeyFields = new StringBuilder(); + + for (String column : keyCols) { + // omit calculated fields in load statement + if (pojoStrategy && settings.getFieldByColumn(column).calculatedField()) + continue; + + if (hdrWithKeyFields.length() > 0) + hdrWithKeyFields.append(", "); + + hdrWithKeyFields.append("\"").append(column).append("\""); + } + + settings = persistenceSettings.getValuePersistenceSettings(); + pojoStrategy = PersistenceStrategy.POJO == settings.getStrategy(); + Collection valCols = settings.getTableColumns(); + StringBuilder hdr = new StringBuilder(); + + for (String column : valCols) { + // omit calculated fields in load statement + if (pojoStrategy && settings.getFieldByColumn(column).calculatedField()) + continue; + + if (hdr.length() > 0) + hdr.append(", "); + + hdr.append("\"").append(column).append("\""); + + if (!keyCols.contains(column)) + hdrWithKeyFields.append(", \"").append(column).append("\""); + } + + hdrWithKeyFields.insert(0, "select "); + hdr.insert(0, "select "); + + StringBuilder statement = new StringBuilder(); + + statement.append(" from \""); + statement.append(persistenceSettings.getKeyspace()); + statement.append("\".\"%1$s"); + statement.append("\" where "); + + int i = 0; + + for (String column : keyCols) { + if (i > 0) + statement.append(" and "); + + statement.append("\"").append(column).append("\"=?"); + i++; + } + + statement.append(";"); + + return new String[] {hdrWithKeyFields + statement.toString(), hdr + statement.toString()}; + } + + /** + * @param table Table. + * @param template Template. + * @param statements Statements. + * @return Statement. + */ + private String getStatement(final String table, final String template, final Map statements) { + //noinspection SynchronizationOnLocalVariableOrMethodParameter + synchronized (statements) { + String st = statements.get(table); + + if (st == null) { + st = String.format(template, table); + statements.put(table, st); + } + + return st; + } + } + + /** + * Builds object from Cassandra table row. + * + * @param row Cassandra table row. + * @param settings persistence settings to use. + * + * @return object. + */ + private Object buildObject(Row row, PersistenceSettings settings) { + if (row == null) + return null; + + PersistenceStrategy stg = settings.getStrategy(); + + Class clazz = settings.getJavaClass(); + String col = settings.getColumn(); + + if (PersistenceStrategy.PRIMITIVE == stg) + return PropertyMappingHelper.getCassandraColumnValue(row, col, clazz, null); + + if (PersistenceStrategy.BLOB == stg) + return settings.getSerializer().deserialize(row.getBytes(col)); + + List fields = settings.getFields(); + + Object obj; + + try { + obj = clazz.newInstance(); + } + catch (Throwable e) { + throw new IgniteException("Failed to instantiate object of type '" + clazz.getName() + "' using reflection", e); + } + + for (PojoField field : fields) { + if (!field.calculatedField()) + field.setValueFromRow(row, obj, settings.getSerializer()); + } + + return obj; + } + + /** + * Extracts field values from POJO object, converts into Java types + * which could be mapped to Cassandra types and stores them inside provided values + * array starting from specified offset. + * + * @param stgy Persistence strategy to use. + * @param serializer Serializer to use for BLOBs. + * @param fields Fields who's values should be extracted. + * @param obj Object instance who's field values should be extracted. + * @param values Array to store values. + * @param offset Offset starting from which to store fields values in the provided values array. + * + * @return next offset + */ + private int bindValues(PersistenceStrategy stgy, Serializer serializer, List fields, Object obj, + Object[] values, int offset) { + if (PersistenceStrategy.PRIMITIVE == stgy) { + if (PropertyMappingHelper.getCassandraType(obj.getClass()) == null || + obj.getClass().equals(ByteBuffer.class) || obj instanceof byte[]) { + throw new IllegalArgumentException("Couldn't deserialize instance of class '" + + obj.getClass().getName() + "' using PRIMITIVE strategy. Please use BLOB strategy for this case."); + } + + values[offset] = obj; + + return ++offset; + } + + if (PersistenceStrategy.BLOB == stgy) { + values[offset] = serializer.serialize(obj); + + return ++offset; + } + + if (fields == null || fields.isEmpty()) + return offset; + + for (PojoField field : fields) { + Object val = field.getValueFromObject(obj, serializer); + + if (val instanceof byte[]) + val = ByteBuffer.wrap((byte[])val); + + values[offset] = val; + + offset++; + } + + return offset; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceSettings.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceSettings.java new file mode 100644 index 0000000000000..6a0d703a30a39 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceSettings.java @@ -0,0 +1,557 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import java.beans.PropertyDescriptor; +import java.io.IOException; +import java.io.Serializable; +import java.lang.reflect.Field; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import com.datastax.driver.core.DataType; +import org.apache.commons.beanutils.PropertyUtils; +import org.apache.ignite.IgniteException; +import org.apache.ignite.cache.query.annotations.QuerySqlField; +import org.apache.ignite.cache.store.cassandra.common.CassandraHelper; +import org.apache.ignite.cache.store.cassandra.common.PropertyMappingHelper; +import org.apache.ignite.cache.store.cassandra.serializer.JavaSerializer; +import org.apache.ignite.cache.store.cassandra.serializer.Serializer; +import org.w3c.dom.Element; +import org.w3c.dom.NodeList; + +/** + * Stores persistence settings, which describes how particular key/value + * from Ignite cache should be stored in Cassandra. + */ +public abstract class PersistenceSettings implements Serializable { + /** Xml attribute specifying persistence strategy. */ + private static final String STRATEGY_ATTR = "strategy"; + + /** Xml attribute specifying Cassandra column name. */ + private static final String COLUMN_ATTR = "column"; + + /** Xml attribute specifying BLOB serializer to use. */ + private static final String SERIALIZER_ATTR = "serializer"; + + /** Xml attribute specifying java class of the object to be persisted. */ + private static final String CLASS_ATTR = "class"; + + /** Persistence strategy to use. */ + private PersistenceStrategy stgy; + + /** Java class of the object to be persisted. */ + private Class javaCls; + + /** Cassandra table column name where object should be persisted in + * case of using BLOB or PRIMITIVE persistence strategy. */ + private String col; + + /** Serializer for BLOBs. */ + private Serializer serializer = new JavaSerializer(); + + /** List of Cassandra table columns */ + private List tableColumns; + + /** + * List of POJO fields having unique mapping to Cassandra columns - skipping aliases pointing + * to the same Cassandra table column. + */ + private List casUniqueFields; + + /** + * Extracts property descriptor from the descriptors list by its name. + * + * @param descriptors descriptors list. + * @param propName property name. + * + * @return property descriptor. + */ + public static PropertyDescriptor findPropertyDescriptor(List descriptors, String propName) { + if (descriptors == null || descriptors.isEmpty() || propName == null || propName.trim().isEmpty()) + return null; + + for (PropertyDescriptor descriptor : descriptors) { + if (descriptor.getName().equals(propName)) + return descriptor; + } + + return null; + } + + /** + * Constructs persistence settings from corresponding XML element. + * + * @param el xml element containing persistence settings configuration. + */ + @SuppressWarnings("unchecked") + public PersistenceSettings(Element el) { + if (el == null) + throw new IllegalArgumentException("DOM element representing key/value persistence object can't be null"); + + if (!el.hasAttribute(STRATEGY_ATTR)) { + throw new IllegalArgumentException("DOM element representing key/value persistence object should have '" + + STRATEGY_ATTR + "' attribute"); + } + + try { + stgy = PersistenceStrategy.valueOf(el.getAttribute(STRATEGY_ATTR).trim().toUpperCase()); + } + catch (IllegalArgumentException ignored) { + throw new IllegalArgumentException("Incorrect persistence strategy specified: " + el.getAttribute(STRATEGY_ATTR)); + } + + if (!el.hasAttribute(CLASS_ATTR) && PersistenceStrategy.BLOB != stgy) { + throw new IllegalArgumentException("DOM element representing key/value persistence object should have '" + + CLASS_ATTR + "' attribute or have BLOB persistence strategy"); + } + + try { + javaCls = el.hasAttribute(CLASS_ATTR) ? getClassInstance(el.getAttribute(CLASS_ATTR).trim()) : null; + } + catch (Throwable e) { + throw new IllegalArgumentException("Incorrect java class specified '" + el.getAttribute(CLASS_ATTR) + "' " + + "for Cassandra persistence", e); + } + + if (PersistenceStrategy.BLOB != stgy && + (ByteBuffer.class.equals(javaCls) || byte[].class.equals(javaCls))) { + throw new IllegalArgumentException("Java class '" + el.getAttribute(CLASS_ATTR) + "' " + + "specified could only be persisted using BLOB persistence strategy"); + } + + if (PersistenceStrategy.PRIMITIVE == stgy && + PropertyMappingHelper.getCassandraType(javaCls) == null) { + throw new IllegalArgumentException("Current implementation doesn't support persisting '" + + javaCls.getName() + "' object using PRIMITIVE strategy"); + } + + if (PersistenceStrategy.POJO == stgy) { + if (javaCls == null) + throw new IllegalStateException("Object java class should be specified for POJO persistence strategy"); + + try { + javaCls.getConstructor(); + } + catch (Throwable e) { + throw new IllegalArgumentException("Java class '" + javaCls.getName() + "' couldn't be used as POJO " + + "cause it doesn't have no arguments constructor", e); + } + } + + if (el.hasAttribute(COLUMN_ATTR)) { + if (PersistenceStrategy.BLOB != stgy && PersistenceStrategy.PRIMITIVE != stgy) { + throw new IllegalArgumentException("Incorrect configuration of Cassandra key/value persistence settings, " + + "'" + COLUMN_ATTR + "' attribute is only applicable for PRIMITIVE or BLOB strategy"); + } + + col = el.getAttribute(COLUMN_ATTR).trim(); + } + + if (el.hasAttribute(SERIALIZER_ATTR)) { + if (PersistenceStrategy.BLOB != stgy && PersistenceStrategy.POJO != stgy) { + throw new IllegalArgumentException("Incorrect configuration of Cassandra key/value persistence settings, " + + "'" + SERIALIZER_ATTR + "' attribute is only applicable for BLOB and POJO strategies"); + } + + Object obj = newObjectInstance(el.getAttribute(SERIALIZER_ATTR).trim()); + + if (!(obj instanceof Serializer)) { + throw new IllegalArgumentException("Incorrect configuration of Cassandra key/value persistence settings, " + + "serializer class '" + el.getAttribute(SERIALIZER_ATTR) + "' doesn't implement '" + + Serializer.class.getName() + "' interface"); + } + + serializer = (Serializer)obj; + } + + if ((PersistenceStrategy.BLOB == stgy || PersistenceStrategy.PRIMITIVE == stgy) && col == null) + col = defaultColumnName(); + } + + /** + * Returns java class of the object to be persisted. + * + * @return java class. + */ + public Class getJavaClass() { + return javaCls; + } + + /** + * Returns persistence strategy to use. + * + * @return persistence strategy. + */ + public PersistenceStrategy getStrategy() { + return stgy; + } + + /** + * Returns Cassandra table column name where object should be persisted in + * case of using BLOB or PRIMITIVE persistence strategy. + * + * @return column name. + */ + public String getColumn() { + return col; + } + + /** + * Returns serializer to be used for BLOBs. + * + * @return serializer. + */ + public Serializer getSerializer() { + return serializer; + } + + /** + * Returns a list of POJO fields to be persisted. + * + * @return list of fields. + */ + public abstract List getFields(); + + /** + * Returns POJO field by Cassandra table column name. + * + * @param column column name. + * + * @return POJO field or null if not exists. + */ + public PojoField getFieldByColumn(String column) { + List fields = getFields(); + + if (fields == null || fields.isEmpty()) + return null; + + for (PojoField field : fields) { + if (field.getColumn().equals(column)) + return field; + } + + return null; + } + + /** + * List of POJO fields having unique mapping to Cassandra columns - skipping aliases pointing + * to the same Cassandra table column. + * + * @return List of fields. + */ + public List cassandraUniqueFields() { + return casUniqueFields; + } + + /** + * Returns set of database column names, used to persist field values + * + * @return set of database column names + */ + public List getTableColumns() { + return tableColumns; + } + + /** + * Returns Cassandra table columns DDL, corresponding to POJO fields which should be persisted. + * + * @return DDL statement for Cassandra table fields. + */ + public String getTableColumnsDDL() { + return getTableColumnsDDL(null); + } + + /** + * Returns Cassandra table columns DDL, corresponding to POJO fields which should be persisted. + * + * @param ignoreColumns Table columns to ignore (exclude) from DDL. + * @return DDL statement for Cassandra table fields. + */ + public String getTableColumnsDDL(Set ignoreColumns) { + if (PersistenceStrategy.BLOB == stgy) + return " \"" + col + "\" " + DataType.Name.BLOB.toString(); + + if (PersistenceStrategy.PRIMITIVE == stgy) + return " \"" + col + "\" " + PropertyMappingHelper.getCassandraType(javaCls); + + List fields = getFields(); + + if (fields == null || fields.isEmpty()) { + throw new IllegalStateException("There are no POJO fields found for '" + javaCls.toString() + + "' class to be presented as a Cassandra primary key"); + } + + // Accumulating already processed columns in the set, to prevent duplicating columns + // shared by two different POJO fields. + Set processedColumns = new HashSet<>(); + + StringBuilder builder = new StringBuilder(); + + for (F field : fields) { + if ((ignoreColumns != null && ignoreColumns.contains(field.getColumn())) || + processedColumns.contains(field.getColumn())) { + continue; + } + + if (builder.length() > 0) + builder.append(",\n"); + + builder.append(" ").append(field.getColumnDDL()); + + processedColumns.add(field.getColumn()); + } + + return builder.toString(); + } + + /** + * Returns default name for Cassandra column (if it's not specified explicitly). + * + * @return column name + */ + protected abstract String defaultColumnName(); + + /** + * Creates instance of {@link PojoField} based on it's description in XML element. + * + * @param el XML element describing POJO field + * @param clazz POJO java class. + */ + protected abstract F createPojoField(Element el, Class clazz); + + /** + * Creates instance of {@link PojoField} from its field accessor. + * + * @param accessor field accessor. + */ + protected abstract F createPojoField(PojoFieldAccessor accessor); + + /** + * Creates instance of {@link PojoField} based on the other instance and java class + * to initialize accessor. + * + * @param field PojoField instance + * @param clazz java class + */ + protected abstract F createPojoField(F field, Class clazz); + + /** + * Class instance initialization. + */ + protected void init() { + if (getColumn() != null && !getColumn().trim().isEmpty()) { + tableColumns = new LinkedList<>(); + tableColumns.add(getColumn()); + tableColumns = Collections.unmodifiableList(tableColumns); + + return; + } + + List fields = getFields(); + + if (fields == null || fields.isEmpty()) + return; + + tableColumns = new LinkedList<>(); + casUniqueFields = new LinkedList<>(); + + for (F field : fields) { + if (!tableColumns.contains(field.getColumn())) { + tableColumns.add(field.getColumn()); + casUniqueFields.add(field); + } + } + + tableColumns = Collections.unmodifiableList(tableColumns); + casUniqueFields = Collections.unmodifiableList(casUniqueFields); + } + + /** + * Checks if there are POJO filed with the same name or same Cassandra column specified in persistence settings. + * + * @param fields List of fields to be persisted into Cassandra. + */ + protected void checkDuplicates(List fields) { + if (fields == null || fields.isEmpty()) + return; + + for (PojoField field1 : fields) { + boolean sameNames = false; + boolean sameCols = false; + + for (PojoField field2 : fields) { + if (field1.getName().equals(field2.getName())) { + if (sameNames) { + throw new IllegalArgumentException("Incorrect Cassandra persistence settings, " + + "two POJO fields with the same name '" + field1.getName() + "' specified"); + } + + sameNames = true; + } + + if (field1.getColumn().equals(field2.getColumn())) { + if (sameCols && !CassandraHelper.isCassandraCompatibleTypes(field1.getJavaClass(), field2.getJavaClass())) { + throw new IllegalArgumentException("Field '" + field1.getName() + "' shares the same Cassandra table " + + "column '" + field1.getColumn() + "' with field '" + field2.getName() + "', but their Java " + + "classes are different. Fields sharing the same column should have the same " + + "Java class as their type or should be mapped to the same Cassandra primitive type."); + } + + sameCols = true; + } + } + } + } + + /** + * Extracts POJO fields from a list of corresponding XML field nodes. + * + * @param fieldNodes Field nodes to process. + * @return POJO fields list. + */ + protected List detectPojoFields(NodeList fieldNodes) { + List detectedFields = new LinkedList<>(); + + if (fieldNodes != null && fieldNodes.getLength() != 0) { + int cnt = fieldNodes.getLength(); + + for (int i = 0; i < cnt; i++) { + F field = createPojoField((Element)fieldNodes.item(i), getJavaClass()); + + // Just checking that such field exists in the class + PropertyMappingHelper.getPojoFieldAccessor(getJavaClass(), field.getName()); + + detectedFields.add(field); + } + + return detectedFields; + } + + PropertyDescriptor[] descriptors = PropertyUtils.getPropertyDescriptors(getJavaClass()); + + // Collecting Java Beans property descriptors + if (descriptors != null) { + for (PropertyDescriptor desc : descriptors) { + // Skip POJO field if it's read-only + if (desc.getWriteMethod() != null) { + Field field = null; + + try { + field = getJavaClass().getDeclaredField(desc.getName()); + } + catch (Throwable ignore) { + } + + detectedFields.add(createPojoField(new PojoFieldAccessor(desc, field))); + } + } + } + + Field[] fields = getJavaClass().getDeclaredFields(); + + // Collecting all fields annotated with @QuerySqlField + if (fields != null) { + for (Field field : fields) { + if (field.getAnnotation(QuerySqlField.class) != null && !PojoField.containsField(detectedFields, field.getName())) + detectedFields.add(createPojoField(new PojoFieldAccessor(field))); + } + } + + return detectedFields; + } + + /** + * Instantiates Class object for particular class + * + * @param clazz class name + * @return Class object + */ + private Class getClassInstance(String clazz) { + try { + return Class.forName(clazz); + } + catch (ClassNotFoundException ignored) { + } + + try { + return Class.forName(clazz, true, Thread.currentThread().getContextClassLoader()); + } + catch (ClassNotFoundException ignored) { + } + + try { + return Class.forName(clazz, true, PersistenceSettings.class.getClassLoader()); + } + catch (ClassNotFoundException ignored) { + } + + try { + return Class.forName(clazz, true, ClassLoader.getSystemClassLoader()); + } + catch (ClassNotFoundException ignored) { + } + + throw new IgniteException("Failed to load class '" + clazz + "' using reflection"); + } + + /** + * Creates new object instance of particular class + * + * @param clazz class name + * @return object + */ + private Object newObjectInstance(String clazz) { + try { + return getClassInstance(clazz).newInstance(); + } + catch (Throwable e) { + throw new IgniteException("Failed to instantiate class '" + clazz + "' using default constructor", e); + } + } + + /** + * @see java.io.Serializable + */ + private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + casUniqueFields = Collections.unmodifiableList(enrichFields(casUniqueFields)); + } + + /** + * Sets accessor for the given {@code src} fields. + * Required as accessor is transient and is not present + * after deserialization. + */ + protected List enrichFields(List src) { + if (src != null) { + List enriched = new ArrayList<>(); + + for (F sourceField : src) + enriched.add(createPojoField(sourceField, getJavaClass())); + + return enriched; + } + else + return new ArrayList<>(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceStrategy.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceStrategy.java new file mode 100644 index 0000000000000..4b1e2d8274424 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PersistenceStrategy.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +/** + * Describes persistence strategy to be used to persist object data into Cassandra. + */ +public enum PersistenceStrategy { + /** + * Stores object value as is, by mapping its value to Cassandra table column with corresponding type. + *

+ * Could be used for primitive java type (like Integer, String, Long and etc) which could be directly mapped + * to appropriate Cassandra types. + */ + PRIMITIVE, + + /** + * Stores object value as BLOB, by mapping its value to Cassandra table column with blob type. + * Could be used for any java type. Conversion of java object to BLOB is handled by specified serializer. + *

+ * Available serializer implementations: + *

    + *
  • + * org.apache.ignite.cache.store.cassandra.serializer.JavaSerializer - uses standard Java + * serialization framework. + *
  • + *
  • + * org.apache.ignite.cache.store.cassandra.serializer.KryoSerializer - uses Kryo serialization + * framework. + *
  • + *
+ */ + BLOB, + + /** + * Stores each field of an object as a column having corresponding type in Cassandra table. + * Provides ability to utilize Cassandra secondary indexes for object fields. + *

+ * Could be used for objects which follow JavaBeans convention and having empty public constructor. + * Object fields should be: + *

    + *
  • Primitive java types like int, long, String and etc.
  • + *
  • Collections of primitive java types like List, Map, Set
  • + *
+ */ + POJO +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoField.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoField.java new file mode 100644 index 0000000000000..facd48c215aa5 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoField.java @@ -0,0 +1,238 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import java.io.Serializable; +import java.lang.annotation.Annotation; +import java.util.List; +import com.datastax.driver.core.DataType; +import com.datastax.driver.core.Row; +import org.apache.ignite.cache.query.annotations.QuerySqlField; +import org.apache.ignite.cache.store.cassandra.common.PropertyMappingHelper; +import org.apache.ignite.cache.store.cassandra.serializer.Serializer; +import org.w3c.dom.Element; + +/** + * Descriptor for particular field in a POJO object, specifying how this field + * should be written to or loaded from Cassandra. + */ +public abstract class PojoField implements Serializable { + /** Name attribute of XML element describing Pojo field. */ + private static final String NAME_ATTR = "name"; + + /** Column attribute of XML element describing Pojo field. */ + private static final String COLUMN_ATTR = "column"; + + /** Field name. */ + private String name; + + /** Field column name in Cassandra table. */ + private String col; + + /** Field column DDL. */ + private String colDDL; + + /** Indicator for calculated field. */ + private Boolean calculated; + + /** Field property accessor. */ + private transient PojoFieldAccessor accessor; + + /** + * Checks if list contains POJO field with the specified name. + * + * @param fields list of POJO fields. + * @param fieldName field name. + * @return true if list contains field or false otherwise. + */ + public static boolean containsField(List fields, String fieldName) { + if (fields == null || fields.isEmpty()) + return false; + + for (PojoField field : fields) { + if (field.getName().equals(fieldName)) + return true; + } + + return false; + } + + /** + * Creates instance of {@link PojoField} based on it's description in XML element. + * + * @param el XML element describing Pojo field + * @param pojoCls Pojo java class. + */ + public PojoField(Element el, Class pojoCls) { + if (el == null) + throw new IllegalArgumentException("DOM element representing POJO field object can't be null"); + + if (!el.hasAttribute(NAME_ATTR)) { + throw new IllegalArgumentException("DOM element representing POJO field object should have '" + + NAME_ATTR + "' attribute"); + } + + this.name = el.getAttribute(NAME_ATTR).trim(); + this.col = el.hasAttribute(COLUMN_ATTR) ? el.getAttribute(COLUMN_ATTR).trim() : name.toLowerCase(); + + init(PropertyMappingHelper.getPojoFieldAccessor(pojoCls, name)); + } + + /** + * Creates instance of {@link PojoField} from its field accessor. + * + * @param accessor field accessor. + */ + public PojoField(PojoFieldAccessor accessor) { + this.name = accessor.getName(); + + QuerySqlField sqlField = (QuerySqlField)accessor.getAnnotation(QuerySqlField.class); + + col = sqlField != null && sqlField.name() != null && !sqlField.name().isEmpty() ? + sqlField.name() : name.toLowerCase(); + + init(accessor); + } + + /** + * Creates instance of {@link PojoField} from the other instance + * and java class. + * + * @param field {@link PojoField} instance to copy from. + * @param pojoCls Class of the {@link PojoField} instance. + */ + public PojoField(PojoField field, Class pojoCls) { + this.name = field.name; + this.col = field.col; + this.colDDL = field.colDDL; + + init(PropertyMappingHelper.getPojoFieldAccessor(pojoCls, name)); + } + + /** + * @return field name. + */ + public String getName() { + return name; + } + + /** + * Returns java class of the field. + * + * @return Java class. + */ + public Class getJavaClass() { + return accessor.getFieldType(); + } + + /** + * @return Cassandra table column name. + */ + public String getColumn() { + return col; + } + + /** + * @return Cassandra table column DDL statement. + */ + public String getColumnDDL() { + return colDDL; + } + + /** + * Indicates if it's a calculated field - field which value just generated based on other field values. + * Such field will be stored in Cassandra as all other POJO fields, but it's value shouldn't be read from + * Cassandra - cause it's again just generated based on other field values. One of the good applications of such + * kind of fields - Cassandra materialized views build on top of other tables. + * + * @return {@code true} if it's auto generated field, {@code false} if not. + */ + public boolean calculatedField() { + if (calculated != null) + return calculated; + + return calculated = accessor.isReadOnly(); + } + + /** + * Gets field value as an object having Cassandra compatible type. + * This it could be stored directly into Cassandra without any conversions. + * + * @param obj Object instance. + * @param serializer {@link org.apache.ignite.cache.store.cassandra.serializer.Serializer} to use. + * @return Object to store in Cassandra table column. + */ + public Object getValueFromObject(Object obj, Serializer serializer) { + Object val = accessor.getValue(obj); + + if (val == null) + return null; + + DataType.Name cassandraType = PropertyMappingHelper.getCassandraType(val.getClass()); + + if (cassandraType != null) + return val; + + if (serializer == null) { + throw new IllegalStateException("Can't serialize value from object '" + + val.getClass().getName() + "' field '" + name + "', cause there is no BLOB serializer specified"); + } + + return serializer.serialize(val); + } + + /** + * Returns POJO field annotation. + * + * @param clazz Class of the annotation to get. + * @return annotation. + */ + public Annotation getAnnotation(Class clazz) { + return accessor.getAnnotation(clazz); + } + + /** + * Sets object field value from a {@link com.datastax.driver.core.Row} returned by Cassandra CQL statement. + * + * @param row {@link com.datastax.driver.core.Row} + * @param obj object which field should be populated from {@link com.datastax.driver.core.Row} + * @param serializer {@link org.apache.ignite.cache.store.cassandra.serializer.Serializer} to use. + */ + public void setValueFromRow(Row row, Object obj, Serializer serializer) { + if (calculatedField()) + return; + + Object val = PropertyMappingHelper.getCassandraColumnValue(row, col, accessor.getFieldType(), serializer); + + accessor.setValue(obj, val); + } + + /** + * Initializes field info from property descriptor. + * + * @param accessor {@link PojoFieldAccessor} accessor. + */ + private void init(PojoFieldAccessor accessor) { + DataType.Name cassandraType = PropertyMappingHelper.getCassandraType(accessor.getFieldType()); + cassandraType = cassandraType == null ? DataType.Name.BLOB : cassandraType; + + this.colDDL = "\"" + col + "\" " + cassandraType.toString(); + + this.accessor = accessor; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoFieldAccessor.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoFieldAccessor.java new file mode 100644 index 0000000000000..c8ff3e54d0dbb --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoFieldAccessor.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import java.beans.PropertyDescriptor; +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import org.apache.ignite.IgniteException; + +/** + * Property accessor provides read/write access to POJO object properties defined through: + * 1) Getter/setter methods + * 2) Raw class members + */ +public class PojoFieldAccessor { + /** Java Bean property descriptor */ + private PropertyDescriptor desc; + + /** Object field associated with property descriptor. Used just to get annotations which + * applied not to property descriptor, but directly to object field associated with the property. */ + private Field descField; + + /** Object field */ + private Field field; + + /** + * Constructs object instance from Java Bean property descriptor, providing access to getter/setter. + * + * @param desc Java Bean property descriptor. + * @param field object field associated with property descriptor. + */ + public PojoFieldAccessor(PropertyDescriptor desc, Field field) { + if (desc.getReadMethod() == null) { + throw new IllegalArgumentException("Field '" + desc.getName() + + "' of the class instance '" + desc.getPropertyType().getName() + + "' doesn't provide getter method"); + } + + desc.getReadMethod().setAccessible(true); + + if (desc.getWriteMethod() != null) + desc.getWriteMethod().setAccessible(true); + + this.desc = desc; + this.descField = field; + } + + /** + * Constructs object instance from Field, providing direct access to class member. + * + * @param field Field descriptor. + */ + public PojoFieldAccessor(Field field) { + field.setAccessible(true); + this.field = field; + } + + /** + * Returns POJO field name. + * + * @return field name. + */ + public String getName() { + return desc != null ? desc.getName() : field.getName(); + } + + /** + * Indicates if it's read-only field. + * + * @return true if field read-only, false if not. + */ + public boolean isReadOnly() { + return desc != null && desc.getWriteMethod() == null; + } + + /** + * Returns POJO field annotation. + * + * @param clazz Class of the annotation to get. + * @return annotation. + */ + public Annotation getAnnotation(Class clazz) { + if (field != null) + return field.getAnnotation(clazz); + + Annotation ann = desc.getReadMethod().getAnnotation(clazz); + + if (ann != null) + return ann; + + ann = desc.getWriteMethod() == null ? null : desc.getWriteMethod().getAnnotation(clazz); + + if (ann != null) + return ann; + + return descField == null ? null : descField.getAnnotation(clazz); + } + + /** + * Returns field value for the object instance. + * + * @param obj object instance. + * @return field value. + */ + public Object getValue(Object obj) { + try { + return desc != null ? desc.getReadMethod().invoke(obj) : field.get(obj); + } + catch (Throwable e) { + throw new IgniteException("Failed to get value of the field '" + getName() + "' from the instance " + + " of '" + obj.getClass().toString() + "' class", e); + } + } + + /** + * Assigns value for the object field. + * + * @param obj object instance. + * @param val value to assign. + */ + public void setValue(Object obj, Object val) { + if (isReadOnly()) + throw new IgniteException("Can't assign value to read-only field '" + getName() + "' of the instance " + + " of '" + obj.getClass().toString() + "' class"); + + try { + if (desc != null) + desc.getWriteMethod().invoke(obj, val); + else + field.set(obj, val); + } + catch (Throwable e) { + throw new IgniteException("Failed to set value of the field '" + getName() + "' of the instance " + + " of '" + obj.getClass().toString() + "' class", e); + } + } + + /** + * Returns field type. + * + * @return field type. + */ + public Class getFieldType() { + return desc != null ? desc.getPropertyType() : field.getType(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoKeyField.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoKeyField.java new file mode 100644 index 0000000000000..2b02fe5c4c66e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoKeyField.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import org.apache.ignite.cache.query.annotations.QuerySqlField; +import org.w3c.dom.Element; + +/** + * Descriptor for Ignite key POJO class + */ +public class PojoKeyField extends PojoField { + /** + * Specifies sort order for POJO key field + */ + public enum SortOrder { + /** Ascending sort order. */ + ASC, + /** Descending sort order. */ + DESC + } + + /** Xml attribute specifying sort order. */ + private static final String SORT_ATTR = "sort"; + + /** Sort order. */ + private SortOrder sortOrder; + + /** + * Constructs Ignite cache key POJO object descriptor. + * + * @param el xml configuration element. + * @param pojoCls java class of key POJO field. + */ + public PojoKeyField(Element el, Class pojoCls) { + super(el, pojoCls); + + if (el.hasAttribute(SORT_ATTR)) { + try { + sortOrder = SortOrder.valueOf(el.getAttribute(SORT_ATTR).trim().toUpperCase()); + } + catch (IllegalArgumentException ignored) { + throw new IllegalArgumentException("Incorrect sort order '" + el.getAttribute(SORT_ATTR) + "' specified"); + } + } + } + + /** + * Constructs instance of {@code PojoKeyField} based on the other instance and java class + * to initialize accessor. + * + * @param field PojoKeyField instance + * @param pojoCls java class of the corresponding POJO + */ + public PojoKeyField(PojoKeyField field, Class pojoCls) { + super(field, pojoCls); + + sortOrder = field.sortOrder; + } + + /** + * Constructs Ignite cache key POJO object descriptor. + * + * @param accessor property descriptor. + */ + public PojoKeyField(PojoFieldAccessor accessor) { + super(accessor); + + QuerySqlField sqlField = (QuerySqlField)accessor.getAnnotation(QuerySqlField.class); + + if (sqlField != null && sqlField.descending()) + sortOrder = SortOrder.DESC; + } + + /** + * Returns sort order for the field. + * + * @return sort order. + */ + public SortOrder getSortOrder() { + return sortOrder; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoValueField.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoValueField.java new file mode 100644 index 0000000000000..0427e6cd77b6f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/PojoValueField.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import org.apache.ignite.cache.query.annotations.QuerySqlField; +import org.w3c.dom.Element; + +/** + * Descriptor for Ignite value POJO class + */ +public class PojoValueField extends PojoField { + /** Xml attribute specifying that Cassandra column is static. */ + private static final String STATIC_ATTR = "static"; + + /** Xml attribute specifying that secondary index should be created for Cassandra column. */ + private static final String INDEX_ATTR = "index"; + + /** Xml attribute specifying secondary index custom class. */ + private static final String INDEX_CLASS_ATTR = "indexClass"; + + /** Xml attribute specifying secondary index options. */ + private static final String INDEX_OPTIONS_ATTR = "indexOptions"; + + /** Indicates if Cassandra column should be indexed. */ + private Boolean isIndexed; + + /** Custom java class for Cassandra secondary index. */ + private String idxCls; + + /** Secondary index options. */ + private String idxOptions; + + /** Indicates if Cassandra column is static. */ + private Boolean isStatic; + + /** + * Constructs Ignite cache value field descriptor. + * + * @param el field descriptor xml configuration element. + * @param pojoCls field java class + */ + public PojoValueField(Element el, Class pojoCls) { + super(el, pojoCls); + + if (el.hasAttribute(STATIC_ATTR)) + isStatic = Boolean.parseBoolean(el.getAttribute(STATIC_ATTR).trim().toLowerCase()); + + if (el.hasAttribute(INDEX_ATTR)) + isIndexed = Boolean.parseBoolean(el.getAttribute(INDEX_ATTR).trim().toLowerCase()); + + if (el.hasAttribute(INDEX_CLASS_ATTR)) + idxCls = el.getAttribute(INDEX_CLASS_ATTR).trim(); + + if (el.hasAttribute(INDEX_OPTIONS_ATTR)) { + idxOptions = el.getAttribute(INDEX_OPTIONS_ATTR).trim(); + + if (!idxOptions.toLowerCase().startsWith("with")) { + idxOptions = idxOptions.toLowerCase().startsWith("options") ? + "with " + idxOptions : + "with options = " + idxOptions; + } + } + } + + /** + * Constructs Ignite cache value field descriptor. + * + * @param accessor field property accessor. + */ + public PojoValueField(PojoFieldAccessor accessor) { + super(accessor); + + QuerySqlField sqlField = (QuerySqlField)accessor.getAnnotation(QuerySqlField.class); + + isIndexed = sqlField != null && sqlField.index(); + } + + /** + * Constructs instance of {@code PojoValueField} based on the other instance and java class + * to initialize accessor. + * + * @param field PojoValueField instance + * @param pojoCls java class of the corresponding POJO + */ + public PojoValueField(PojoValueField field, Class pojoCls) { + super(field, pojoCls); + + isStatic = field.isStatic; + isIndexed = field.isIndexed; + idxCls = field.idxCls; + idxOptions = field.idxOptions; + } + + /** {@inheritDoc} */ + @Override public String getColumnDDL() { + String colDDL = super.getColumnDDL(); + + if (isStatic != null && isStatic) + colDDL += " static"; + + return colDDL; + } + + /** + * Indicates if secondary index should be created for the field. + * + * @return true/false if secondary index should/shouldn't be created for the field. + */ + public boolean isIndexed() { + return isIndexed != null && isIndexed; + } + + /** + * Returns DDL for the field secondary index. + * + * @param keyspace Cassandra keyspace where index should be created. + * @param tbl Cassandra table for which secondary index should be created. + * + * @return secondary index DDL. + */ + public String getIndexDDL(String keyspace, String tbl) { + if (isIndexed == null || !isIndexed) + return null; + + StringBuilder builder = new StringBuilder(); + + if (idxCls != null) + builder.append("create custom index if not exists on \"").append(keyspace).append("\".\"").append(tbl).append("\""); + else + builder.append("create index if not exists on \"").append(keyspace).append("\".\"").append(tbl).append("\""); + + builder.append(" (\"").append(getColumn()).append("\")"); + + if (idxCls != null) + builder.append(" using '").append(idxCls).append("'"); + + if (idxOptions != null) + builder.append(" ").append(idxOptions); + + return builder.append(";").toString(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/ValuePersistenceSettings.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/ValuePersistenceSettings.java new file mode 100644 index 0000000000000..5e106af70555e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/ValuePersistenceSettings.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.persistence; + +import java.io.IOException; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + +import org.w3c.dom.Element; +import org.w3c.dom.NodeList; + +/** + * Stores persistence settings for Ignite cache value + */ +public class ValuePersistenceSettings extends PersistenceSettings { + /** XML element describing value field settings. */ + private static final String FIELD_ELEMENT = "field"; + + /** Value fields. */ + private List fields = new LinkedList<>(); + + /** + * Creates class instance from XML configuration. + * + * @param el XML element describing value persistence settings. + */ + public ValuePersistenceSettings(Element el) { + super(el); + + if (PersistenceStrategy.POJO != getStrategy()) { + init(); + + return; + } + + NodeList nodes = el.getElementsByTagName(FIELD_ELEMENT); + + fields = detectPojoFields(nodes); + + if (fields.isEmpty()) + throw new IllegalStateException("Failed to initialize value fields for class '" + getJavaClass().getName() + "'"); + + checkDuplicates(fields); + + init(); + } + + /** + * @return List of value fields. + */ + @Override public List getFields() { + return fields == null ? null : Collections.unmodifiableList(fields); + } + + /** {@inheritDoc} */ + @Override protected String defaultColumnName() { + return "value"; + } + + /** {@inheritDoc} */ + @Override protected PojoValueField createPojoField(Element el, Class clazz) { + return new PojoValueField(el, clazz); + } + + /** {@inheritDoc} */ + @Override protected PojoValueField createPojoField(PojoFieldAccessor accessor) { + return new PojoValueField(accessor); + } + + /** {@inheritDoc} */ + @Override protected PojoValueField createPojoField(PojoValueField field, Class clazz) { + return new PojoValueField(field, clazz); + } + + /** + * @see java.io.Serializable + */ + private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + + fields = enrichFields(fields); + } + +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/package-info.java new file mode 100644 index 0000000000000..7dd0840ae00c7 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/persistence/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains persistent settings configuration + */ + +package org.apache.ignite.cache.store.cassandra.persistence; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/JavaSerializer.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/JavaSerializer.java new file mode 100644 index 0000000000000..44d2d47019f26 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/JavaSerializer.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.serializer; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.nio.ByteBuffer; +import org.apache.ignite.internal.util.typedef.internal.U; + +/** + * Serializer based on standard Java serialization. + */ +public class JavaSerializer implements Serializer { + /** */ + private static final int DFLT_BUFFER_SIZE = 4096; + + /** {@inheritDoc} */ + @Override public ByteBuffer serialize(Object obj) { + if (obj == null) + return null; + + ByteArrayOutputStream stream = null; + ObjectOutputStream out = null; + + try { + stream = new ByteArrayOutputStream(DFLT_BUFFER_SIZE); + + out = new ObjectOutputStream(stream); + out.writeObject(obj); + out.flush(); + + return ByteBuffer.wrap(stream.toByteArray()); + } + catch (IOException e) { + throw new IllegalStateException("Failed to serialize object of the class '" + obj.getClass().getName() + "'", e); + } + finally { + U.closeQuiet(out); + U.closeQuiet(stream); + } + } + + /** {@inheritDoc} */ + @Override public Object deserialize(ByteBuffer buf) { + ByteArrayInputStream stream = null; + ObjectInputStream in = null; + + try { + stream = new ByteArrayInputStream(buf.array()); + in = new ObjectInputStream(stream); + + return in.readObject(); + } + catch (Throwable e) { + throw new IllegalStateException("Failed to deserialize object from byte stream", e); + } + finally { + U.closeQuiet(in); + U.closeQuiet(stream); + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/Serializer.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/Serializer.java new file mode 100644 index 0000000000000..5b8d5422b32fb --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/Serializer.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.serializer; + +import java.io.Serializable; +import java.nio.ByteBuffer; + +/** + * Interface which should be implemented by all serializers responsible + * for writing/loading data to/from Cassandra in binary (BLOB) format. + */ +public interface Serializer extends Serializable { + /** + * Serializes object into byte buffer. + * + * @param obj Object to serialize. + * @return Byte buffer with binary data. + */ + public ByteBuffer serialize(Object obj); + + /** + * Deserializes object from byte buffer. + * + * @param buf Byte buffer. + * @return Deserialized object. + */ + public Object deserialize(ByteBuffer buf); +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/package-info.java new file mode 100644 index 0000000000000..aa1bccfa07d7b --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/serializer/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains serializers implementation, to store BLOBs into Cassandra + */ + +package org.apache.ignite.cache.store.cassandra.serializer; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/BatchExecutionAssistant.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/BatchExecutionAssistant.java new file mode 100644 index 0000000000000..5d971e8779d43 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/BatchExecutionAssistant.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.PreparedStatement; +import com.datastax.driver.core.Row; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; + +/** + * Provides information for batch operations (loadAll, deleteAll, writeAll) of Ignite cache + * backed by {@link org.apache.ignite.cache.store.cassandra.CassandraCacheStore}. + * + * @param type of the result returned from batch operation. + * @param type of the value used in batch operation. + */ +public interface BatchExecutionAssistant { + /** + * Indicates if Cassandra tables existence is required for this batch operation. + * + * @return {@code true} true if table existence required. + */ + public boolean tableExistenceRequired(); + + /** + * Cassandra table to use for an operation. + * + * @return Table name. + */ + public String getTable(); + + /** + * Returns unbind CLQ statement for to be executed inside batch operation. + * + * @return Unbind CQL statement. + */ + public String getStatement(); + + /** + * Binds prepared statement to current Cassandra session. + * + * @param statement Statement. + * @param obj Parameters for statement binding. + * @return Bounded statement. + */ + public BoundStatement bindStatement(PreparedStatement statement, V obj); + + /** + * Returns Ignite cache key/value persistence settings. + * + * @return persistence settings. + */ + public KeyValuePersistenceSettings getPersistenceSettings(); + + /** + * Display name for the batch operation. + * + * @return Operation display name. + */ + public String operationName(); + + /** + * Processes particular row inside batch operation. + * + * @param row Row to process. + * @param seqNum Sequential number of the row. + */ + public void process(Row row, int seqNum); + + /** + * Checks if row/object with specified sequential number is already processed. + * + * @param seqNum object sequential number + * @return {@code true} if object is already processed + */ + public boolean alreadyProcessed(int seqNum); + + /** + * @return number of processed objects/rows. + */ + public int processedCount(); + + /** + * @return batch operation result. + */ + public R processedData(); +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/BatchLoaderAssistant.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/BatchLoaderAssistant.java new file mode 100644 index 0000000000000..387c98f4bdfdf --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/BatchLoaderAssistant.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import com.datastax.driver.core.Row; +import com.datastax.driver.core.Statement; + +/** + * Provides information for loadCache operation of {@link org.apache.ignite.cache.store.cassandra.CassandraCacheStore}. + */ +public interface BatchLoaderAssistant { + /** + * Returns name of the batch load operation. + * + * @return operation name. + */ + public String operationName(); + + /** + * Returns CQL statement to use in batch load operation. + * + * @return CQL statement for batch load operation. + */ + public Statement getStatement(); + + /** + * Processes each row returned by batch load operation. + * + * @param row row selected from Cassandra table. + */ + public void process(Row row); +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/CassandraSession.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/CassandraSession.java new file mode 100644 index 0000000000000..facfa40a49487 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/CassandraSession.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import java.io.Closeable; +import java.util.List; +import org.apache.ignite.cache.store.cassandra.session.transaction.Mutation; + +/** + * Wrapper around Cassandra driver session, to automatically handle: + *
    + *
  • Keyspace and table absence exceptions
  • + *
  • Timeout exceptions
  • + *
  • Batch operations
  • + *
+ */ +public interface CassandraSession extends Closeable { + /** + * Execute single synchronous operation against Cassandra database. + * + * @param assistant execution assistance to perform the main operation logic. + * @param type of the result returned from operation. + * + * @return result of the operation. + */ + public V execute(ExecutionAssistant assistant); + + /** + * Executes batch asynchronous operation against Cassandra database. + * + * @param assistant execution assistance to perform the main operation logic. + * @param data data which should be processed in batch operation. + * @param type of the result returned from batch operation. + * @param type of the value used in batch operation. + * + * @return result of the operation. + */ + public R execute(BatchExecutionAssistant assistant, Iterable data); + + /** + * Executes batch asynchronous operation to load bunch of records + * specified by CQL statement from Cassandra database + * + * @param assistant execution assistance to perform the main operation logic. + */ + public void execute(BatchLoaderAssistant assistant); + + /** + * Executes all the mutations performed withing Ignite transaction against Cassandra database. + * + * @param mutations Mutations. + */ + public void execute(List mutations); +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/CassandraSessionImpl.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/CassandraSessionImpl.java new file mode 100644 index 0000000000000..53aa424744171 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/CassandraSessionImpl.java @@ -0,0 +1,1030 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import java.io.IOException; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.ReentrantLock; +import javax.cache.Cache; +import com.datastax.driver.core.BatchStatement; +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.Cluster; +import com.datastax.driver.core.ConsistencyLevel; +import com.datastax.driver.core.PreparedStatement; +import com.datastax.driver.core.ResultSet; +import com.datastax.driver.core.ResultSetFuture; +import com.datastax.driver.core.Row; +import com.datastax.driver.core.Session; +import com.datastax.driver.core.Statement; +import com.datastax.driver.core.exceptions.AlreadyExistsException; +import com.datastax.driver.core.exceptions.InvalidQueryException; +import com.datastax.driver.core.querybuilder.Batch; +import org.apache.ignite.IgniteException; +import org.apache.ignite.IgniteLogger; +import org.apache.ignite.cache.store.cassandra.common.CassandraHelper; +import org.apache.ignite.cache.store.cassandra.common.RandomSleeper; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.cache.store.cassandra.session.pool.SessionPool; +import org.apache.ignite.cache.store.cassandra.session.transaction.Mutation; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.internal.util.typedef.internal.LT; + +/** + * Implementation for {@link org.apache.ignite.cache.store.cassandra.session.CassandraSession}. + */ +public class CassandraSessionImpl implements CassandraSession { + /** Number of CQL query execution attempts. */ + private static final int CQL_EXECUTION_ATTEMPTS_COUNT = 20; + + /** Min timeout between CQL query execution attempts. */ + private static final int CQL_EXECUTION_ATTEMPT_MIN_TIMEOUT = 100; + + /** Max timeout between CQL query execution attempts. */ + private static final int CQL_EXECUTION_ATTEMPT_MAX_TIMEOUT = 500; + + /** Timeout increment for CQL query execution attempts. */ + private static final int CQL_ATTEMPTS_TIMEOUT_INCREMENT = 100; + + /** Cassandra cluster builder. */ + private volatile Cluster.Builder builder; + + /** + * Current generation number of Cassandra session. Each time session recreated its generation will be incremented. + * The main idea behind session generation is to track prepared statements created with old Cassandra + * session (which is not valid anymore) and avoid extra refresh of Cassandra session by multiple threads. + **/ + private volatile Long generation = 0L; + + /** Wrapped Cassandra session. **/ + private volatile WrappedSession wrapperSes; + + /** Number of references to Cassandra driver session (for multithreaded environment). */ + private volatile int refCnt; + + /** Storage for the session prepared statements */ + private static final Map sesStatements = new HashMap<>(); + + /** Number of records to immediately fetch in CQL statement execution. */ + private Integer fetchSize; + + /** Consistency level for Cassandra READ operations (select). */ + private ConsistencyLevel readConsistency; + + /** Consistency level for Cassandra WRITE operations (insert/update/delete). */ + private ConsistencyLevel writeConsistency; + + /** Expiration timeout. */ + private long expirationTimeout; + + /** Logger. */ + private IgniteLogger log; + + /** Table absence error handlers counter. */ + private final Map tblAbsenceHandlersCnt = new ConcurrentHashMap<>(); + + /** Lock used to synchronize multiple threads trying to do session refresh. **/ + private final ReentrantLock refreshLock = new ReentrantLock(); + + /** + * Creates instance of Cassandra driver session wrapper. + * + * @param builder Builder for Cassandra cluster. + * @param fetchSize Number of rows to immediately fetch in CQL statement execution. + * @param readConsistency Consistency level for Cassandra READ operations (select). + * @param writeConsistency Consistency level for Cassandra WRITE operations (insert/update/delete). + * @param expirationTimeout Expiration timout. + * @param log Logger. + */ + public CassandraSessionImpl(Cluster.Builder builder, Integer fetchSize, ConsistencyLevel readConsistency, + ConsistencyLevel writeConsistency, long expirationTimeout, IgniteLogger log) { + this.builder = builder; + this.fetchSize = fetchSize; + this.readConsistency = readConsistency; + this.writeConsistency = writeConsistency; + this.expirationTimeout = expirationTimeout; + this.log = log; + } + + /** {@inheritDoc} */ + @Override public V execute(ExecutionAssistant assistant) { + int attempt = 0; + Throwable error = null; + String errorMsg = "Failed to execute Cassandra CQL statement: " + assistant.getStatement(); + + RandomSleeper sleeper = newSleeper(); + + incrementSessionRefs(); + + try { + while (attempt < CQL_EXECUTION_ATTEMPTS_COUNT) { + if (attempt != 0) { + log.warning("Trying " + (attempt + 1) + " attempt to execute Cassandra CQL statement: " + + assistant.getStatement()); + } + + WrappedPreparedStatement preparedSt = null; + WrappedSession ses = null; + + try { + preparedSt = prepareStatement(assistant.getTable(), assistant.getStatement(), + assistant.getPersistenceSettings(), assistant.tableExistenceRequired()); + + if (preparedSt == null) + return null; + + Statement statement = tuneStatementExecutionOptions(assistant.bindStatement(preparedSt)); + + ses = session(); + + ResultSet res = ses.execute(statement); + + Row row = res == null || !res.iterator().hasNext() ? null : res.iterator().next(); + + return row == null ? null : assistant.process(row); + } + catch (Throwable e) { + error = e; + + if (CassandraHelper.isTableAbsenceError(e)) { + if (!assistant.tableExistenceRequired()) { + log.warning(errorMsg, e); + return null; + } + + handleTableAbsenceError(assistant.getTable(), assistant.getPersistenceSettings()); + } + else if (CassandraHelper.isHostsAvailabilityError(e)) + handleHostsAvailabilityError(ses == null ? -1 : ses.generation, e, attempt, errorMsg); + else if (CassandraHelper.isPreparedStatementClusterError(e)) + handlePreparedStatementClusterError(preparedSt == null ? -1 : preparedSt.generation, e); + else + // For an error which we don't know how to handle, we will not try next attempts and terminate. + throw new IgniteException(errorMsg, e); + } + + if (!CassandraHelper.isTableAbsenceError(error)) + sleeper.sleep(); + + attempt++; + } + } + catch (Throwable e) { + error = e; + } + finally { + decrementSessionRefs(); + } + + log.error(errorMsg, error); + + throw new IgniteException(errorMsg, error); + } + + /** {@inheritDoc} */ + @Override public R execute(BatchExecutionAssistant assistant, Iterable data) { + if (data == null || !data.iterator().hasNext()) + return assistant.processedData(); + + int attempt = 0; + String errorMsg = "Failed to execute Cassandra " + assistant.operationName() + " operation"; + Throwable error = new IgniteException(errorMsg); + + RandomSleeper sleeper = newSleeper(); + + int dataSize = 0; + + incrementSessionRefs(); + + try { + while (attempt < CQL_EXECUTION_ATTEMPTS_COUNT) { + if (attempt != 0) { + log.warning("Trying " + (attempt + 1) + " attempt to execute Cassandra batch " + + assistant.operationName() + " operation to process rest " + + (dataSize - assistant.processedCount()) + " of " + dataSize + " elements"); + } + + //clean errors info before next communication with Cassandra + Throwable unknownEx = null; + Throwable tblAbsenceEx = null; + Throwable hostsAvailEx = null; + Throwable prepStatEx = null; + + List> futResults = new LinkedList<>(); + + WrappedPreparedStatement preparedSt = prepareStatement(assistant.getTable(), assistant.getStatement(), + assistant.getPersistenceSettings(), assistant.tableExistenceRequired()); + + if (preparedSt == null) + return null; + + WrappedSession ses = null; + + int seqNum = 0; + + for (V obj : data) { + if (!assistant.alreadyProcessed(seqNum)) { + try { + ses = session(); + Statement statement = tuneStatementExecutionOptions(assistant.bindStatement(preparedSt, obj)); + ResultSetFuture fut = ses.executeAsync(statement); + futResults.add(new CacheEntryImpl<>(seqNum, fut)); + } + catch (Throwable e) { + if (CassandraHelper.isTableAbsenceError(e)) { + // If there are table absence error and it is not required for the operation we can return. + if (!assistant.tableExistenceRequired()) + return assistant.processedData(); + + tblAbsenceEx = e; + handleTableAbsenceError(assistant.getTable(), assistant.getPersistenceSettings()); + } + else if (CassandraHelper.isHostsAvailabilityError(e)) { + hostsAvailEx = e; + + // Handle host availability only once. + if (hostsAvailEx == null) + handleHostsAvailabilityError(ses == null ? 0 : ses.generation, e, attempt, errorMsg); + } + else if (CassandraHelper.isPreparedStatementClusterError(e)) { + prepStatEx = e; + + handlePreparedStatementClusterError(preparedSt.generation, e); + + preparedSt = prepareStatement(assistant.getTable(), assistant.getStatement(), + assistant.getPersistenceSettings(), assistant.tableExistenceRequired()); + + if (preparedSt == null) + return null; + } + else + unknownEx = e; + } + } + + seqNum++; + } + + dataSize = seqNum; + + // For an error which we don't know how to handle, we will not try next attempts and terminate. + if (unknownEx != null) + throw new IgniteException(errorMsg, unknownEx); + + // Remembering any of last errors. + if (tblAbsenceEx != null) + error = tblAbsenceEx; + else if (hostsAvailEx != null) + error = hostsAvailEx; + else if (prepStatEx != null) + error = prepStatEx; + + // Clean errors info before next communication with Cassandra. + unknownEx = null; + tblAbsenceEx = null; + hostsAvailEx = null; + prepStatEx = null; + + for (Cache.Entry futureResult : futResults) { + try { + ResultSet resSet = futureResult.getValue().getUninterruptibly(); + Row row = resSet != null && resSet.iterator().hasNext() ? resSet.iterator().next() : null; + + assistant.process(row, futureResult.getKey()); + } + catch (Throwable e) { + if (CassandraHelper.isTableAbsenceError(e)) + tblAbsenceEx = e; + else if (CassandraHelper.isHostsAvailabilityError(e)) + hostsAvailEx = e; + else if (CassandraHelper.isPreparedStatementClusterError(e)) + prepStatEx = e; + else + unknownEx = e; + } + } + + // For an error which we don't know how to handle, we will not try next attempts and terminate. + if (unknownEx != null) + throw new IgniteException(errorMsg, unknownEx); + + // If there are no errors occurred it means that operation successfully completed and we can return. + if (tblAbsenceEx == null && hostsAvailEx == null && prepStatEx == null && assistant.processedCount() == dataSize) + return assistant.processedData(); + + if (tblAbsenceEx != null) { + // If there are table absence error and it is not required for the operation we can return. + if (!assistant.tableExistenceRequired()) + return assistant.processedData(); + + error = tblAbsenceEx; + handleTableAbsenceError(assistant.getTable(), assistant.getPersistenceSettings()); + } + + if (hostsAvailEx != null) { + error = hostsAvailEx; + handleHostsAvailabilityError(ses.generation, hostsAvailEx, attempt, errorMsg); + } + + if (prepStatEx != null) { + error = prepStatEx; + handlePreparedStatementClusterError(preparedSt.generation, prepStatEx); + } + + if (!CassandraHelper.isTableAbsenceError(error)) + sleeper.sleep(); + + attempt++; + } + } + catch (Throwable e) { + error = e; + } + finally { + decrementSessionRefs(); + } + + errorMsg = "Failed to process " + (dataSize - assistant.processedCount()) + + " of " + dataSize + " elements, during " + assistant.operationName() + + " operation with Cassandra"; + + LT.warn(log, error, errorMsg, false, false); + + throw new IgniteException(errorMsg, error); + } + + /** {@inheritDoc} */ + @Override public void execute(BatchLoaderAssistant assistant) { + int attempt = 0; + String errorMsg = "Failed to execute Cassandra " + assistant.operationName() + " operation"; + Throwable error = new IgniteException(errorMsg); + + RandomSleeper sleeper = newSleeper(); + + incrementSessionRefs(); + + try { + while (attempt < CQL_EXECUTION_ATTEMPTS_COUNT) { + if (attempt != 0) + log.warning("Trying " + (attempt + 1) + " attempt to load Ignite cache"); + + Statement statement = tuneStatementExecutionOptions(assistant.getStatement()); + + WrappedSession ses = null; + + try { + ses = session(); + + ResultSetFuture fut = ses.executeAsync(statement); + ResultSet resSet = fut.getUninterruptibly(); + + if (resSet == null || !resSet.iterator().hasNext()) + return; + + for (Row row : resSet) + assistant.process(row); + + return; + } + catch (Throwable e) { + error = e; + + if (CassandraHelper.isTableAbsenceError(e)) + return; + else if (CassandraHelper.isHostsAvailabilityError(e)) + handleHostsAvailabilityError(ses == null ? 0 : ses.generation, e, attempt, errorMsg); + else + // For an error which we don't know how to handle, we will not try next attempts and terminate. + throw new IgniteException(errorMsg, e); + } + + sleeper.sleep(); + + attempt++; + } + } + catch (Throwable e) { + error = e; + } + finally { + decrementSessionRefs(); + } + + log.error(errorMsg, error); + + throw new IgniteException(errorMsg, error); + } + + /** {@inheritDoc} */ + @Override public void execute(List mutations) { + if (mutations == null || mutations.isEmpty()) + return; + + Throwable error = null; + String errorMsg = "Failed to apply " + mutations.size() + " mutations performed withing Ignite " + + "transaction into Cassandra"; + + int attempt = 0; + boolean tableExistenceRequired = false; + Map statements = new HashMap<>(); + Map tableSettings = new HashMap<>(); + RandomSleeper sleeper = newSleeper(); + + incrementSessionRefs(); + + try { + while (attempt < CQL_EXECUTION_ATTEMPTS_COUNT) { + if (attempt != 0) { + log.warning("Trying " + (attempt + 1) + " attempt to apply " + mutations.size() + " mutations " + + "performed withing Ignite transaction into Cassandra"); + } + + WrappedPreparedStatement prepStatement = null; + WrappedSession ses = null; + + try { + BatchStatement batch = new BatchStatement(); + + // accumulating all the mutations into one Cassandra logged batch + for (Mutation mutation : mutations) { + String key = mutation.getTable() + mutation.getClass().getName(); + prepStatement = statements.get(key); + + if (prepStatement == null) { + prepStatement = prepareStatement(mutation.getTable(), mutation.getStatement(), + mutation.getPersistenceSettings(), mutation.tableExistenceRequired()); + + if (prepStatement != null) + statements.put(key, prepStatement); + } + + if (prepStatement != null) + batch.add(mutation.bindStatement(prepStatement)); + + if (attempt == 0) { + if (mutation.tableExistenceRequired()) { + tableExistenceRequired = true; + + if (!tableSettings.containsKey(mutation.getTable())) + tableSettings.put(mutation.getTable(), mutation.getPersistenceSettings()); + } + } + } + + // committing logged batch into Cassandra + if (batch.size() > 0) { + ses = session(); + ses.execute(tuneStatementExecutionOptions(batch)); + } + + return; + } + catch (Throwable e) { + error = e; + + if (CassandraHelper.isTableAbsenceError(e)) { + if (tableExistenceRequired) { + for (Map.Entry entry : tableSettings.entrySet()) + handleTableAbsenceError(entry.getKey(), entry.getValue()); + } + else + return; + } + else if (CassandraHelper.isHostsAvailabilityError(e)) { + if (handleHostsAvailabilityError(ses == null ? 0 : ses.generation, e, attempt, errorMsg)) + statements.clear(); + } + else if (CassandraHelper.isPreparedStatementClusterError(e)) { + handlePreparedStatementClusterError(prepStatement == null ? 0 : prepStatement.generation, e); + statements.clear(); + } + else { + // For an error which we don't know how to handle, we will not try next attempts and terminate. + throw new IgniteException(errorMsg, e); + } + } + + if (!CassandraHelper.isTableAbsenceError(error)) + sleeper.sleep(); + + attempt++; + } + } + catch (Throwable e) { + error = e; + } + finally { + decrementSessionRefs(); + } + + log.error(errorMsg, error); + throw new IgniteException(errorMsg, error); + } + + /** {@inheritDoc} */ + @Override public synchronized void close() throws IOException { + if (decrementSessionRefs() == 0 && wrapperSes != null) { + SessionPool.put(this, wrapperSes.ses, expirationTimeout); + wrapperSes = null; + } + } + + /** + * Recreates Cassandra driver session. + */ + private synchronized void refresh() { + //make sure that session removed from the pool + SessionPool.get(this); + + //closing and reopening session + if (wrapperSes != null) + CassandraHelper.closeSession(wrapperSes.ses); + + wrapperSes = null; + + session(); + } + + /** + * Returns Cassandra session and its generation number. + * + * @return Wrapper object providing Cassandra session and its generation number. + */ + private synchronized WrappedSession session() { + if (wrapperSes != null) + return wrapperSes; + + Session ses = SessionPool.get(this); + + if (ses != null) { + this.wrapperSes = new WrappedSession(ses, generation); + return this.wrapperSes; + } + + synchronized (sesStatements) { + sesStatements.clear(); + } + + try { + ses = builder.build().connect(); + generation++; + this.wrapperSes = new WrappedSession(ses, generation); + } + catch (Throwable e) { + throw new IgniteException("Failed to establish session with Cassandra database", e); + } + + return this.wrapperSes; + } + + /** + * Increments number of references to Cassandra driver session (required for multithreaded environment). + */ + private synchronized void incrementSessionRefs() { + refCnt++; + } + + /** + * Decrements number of references to Cassandra driver session (required for multithreaded environment). + */ + private synchronized int decrementSessionRefs() { + if (refCnt != 0) + refCnt--; + + return refCnt; + } + + /** + * Prepares CQL statement using current Cassandra driver session. + * + * @param statement CQL statement. + * @param settings Persistence settings. + * @param tblExistenceRequired Flag indicating if table existence is required for the statement. + * @return Prepared statement. + */ + private WrappedPreparedStatement prepareStatement(String table, String statement, KeyValuePersistenceSettings settings, + boolean tblExistenceRequired) { + + int attempt = 0; + Throwable error = null; + String errorMsg = "Failed to prepare Cassandra CQL statement: " + statement; + + RandomSleeper sleeper = newSleeper(); + + incrementSessionRefs(); + + try { + synchronized (sesStatements) { + WrappedPreparedStatement wrapper = sesStatements.get(statement); + + if (wrapper != null) { + // Prepared statement is still actual, cause it was created with the current Cassandra session. + if (generation == wrapper.generation) + return wrapper; + // Prepared statement is not actual anymore, cause it was created with the previous Cassandra session. + else + sesStatements.remove(statement); + } + } + + while (attempt < CQL_EXECUTION_ATTEMPTS_COUNT) { + WrappedSession ses = null; + + try { + ses = session(); + + WrappedPreparedStatement prepStatement = ses.prepare(statement); + + synchronized (sesStatements) { + sesStatements.put(statement, prepStatement); + } + + return prepStatement; + } + catch (Throwable e) { + if (CassandraHelper.isTableAbsenceError(e)) { + if (!tblExistenceRequired) + return null; + + handleTableAbsenceError(table, settings); + } + else if (CassandraHelper.isHostsAvailabilityError(e)) + handleHostsAvailabilityError(ses == null ? 0 : ses.generation, e, attempt, errorMsg); + else + throw new IgniteException(errorMsg, e); + + error = e; + } + + if (!CassandraHelper.isTableAbsenceError(error)) + sleeper.sleep(); + + attempt++; + } + } + finally { + decrementSessionRefs(); + } + + throw new IgniteException(errorMsg, error); + } + + /** + * Creates Cassandra keyspace. + * + * @param settings Persistence settings. + */ + private void createKeyspace(KeyValuePersistenceSettings settings) { + int attempt = 0; + Throwable error = null; + String errorMsg = "Failed to create Cassandra keyspace '" + settings.getKeyspace() + "'"; + + while (attempt < CQL_EXECUTION_ATTEMPTS_COUNT) { + WrappedSession ses = null; + + try { + ses = session(); + + if (log.isInfoEnabled()) { + log.info("-----------------------------------------------------------------------"); + log.info("Creating Cassandra keyspace '" + settings.getKeyspace() + "'"); + log.info("-----------------------------------------------------------------------\n\n" + + settings.getKeyspaceDDLStatement() + "\n"); + log.info("-----------------------------------------------------------------------"); + } + + ses.execute(settings.getKeyspaceDDLStatement()); + + if (log.isInfoEnabled()) + log.info("Cassandra keyspace '" + settings.getKeyspace() + "' was successfully created"); + + return; + } + catch (AlreadyExistsException ignored) { + if (log.isInfoEnabled()) + log.info("Cassandra keyspace '" + settings.getKeyspace() + "' already exist"); + + return; + } + catch (Throwable e) { + if (!CassandraHelper.isHostsAvailabilityError(e)) + throw new IgniteException(errorMsg, e); + + handleHostsAvailabilityError(ses == null ? 0 : ses.generation, e, attempt, errorMsg); + + error = e; + } + + attempt++; + } + + throw new IgniteException(errorMsg, error); + } + + /** + * Creates Cassandra table. + * + * @param settings Persistence settings. + */ + private void createTable(String table, KeyValuePersistenceSettings settings) { + int attempt = 0; + Throwable error = null; + String tableFullName = settings.getKeyspace() + "." + table; + String errorMsg = "Failed to create Cassandra table '" + tableFullName + "'"; + + while (attempt < CQL_EXECUTION_ATTEMPTS_COUNT) { + WrappedSession ses = null; + + try { + ses = session(); + + if (log.isInfoEnabled()) { + log.info("-----------------------------------------------------------------------"); + log.info("Creating Cassandra table '" + tableFullName + "'"); + log.info("-----------------------------------------------------------------------\n\n" + + settings.getTableDDLStatement(table) + "\n"); + log.info("-----------------------------------------------------------------------"); + } + + ses.execute(settings.getTableDDLStatement(table)); + + if (log.isInfoEnabled()) + log.info("Cassandra table '" + tableFullName + "' was successfully created"); + + return; + } + catch (AlreadyExistsException ignored) { + if (log.isInfoEnabled()) + log.info("Cassandra table '" + tableFullName + "' already exist"); + + return; + } + catch (Throwable e) { + if (!CassandraHelper.isHostsAvailabilityError(e) && !CassandraHelper.isKeyspaceAbsenceError(e)) + throw new IgniteException(errorMsg, e); + + if (CassandraHelper.isKeyspaceAbsenceError(e)) { + log.warning("Failed to create Cassandra table '" + tableFullName + + "' cause appropriate keyspace doesn't exist", e); + createKeyspace(settings); + } + else if (CassandraHelper.isHostsAvailabilityError(e)) + handleHostsAvailabilityError(ses == null ? 0 : ses.generation, e, attempt, errorMsg); + + error = e; + } + + attempt++; + } + + throw new IgniteException(errorMsg, error); + } + + /** + * Creates Cassandra table indexes. + * + * @param settings Persistence settings. + */ + private void createTableIndexes(String table, KeyValuePersistenceSettings settings) { + List indexDDLStatements = settings.getIndexDDLStatements(table); + + if (indexDDLStatements == null || indexDDLStatements.isEmpty()) + return; + + int attempt = 0; + Throwable error = null; + String tableFullName = settings.getKeyspace() + "." + table; + String errorMsg = "Failed to create indexes for Cassandra table " + tableFullName; + + while (attempt < CQL_EXECUTION_ATTEMPTS_COUNT) { + WrappedSession ses = null; + + try { + ses = session(); + + if (log.isInfoEnabled()) { + log.info("-----------------------------------------------------------------------"); + log.info("Creating indexes for Cassandra table '" + tableFullName + "'"); + log.info("-----------------------------------------------------------------------"); + } + + for (String statement : indexDDLStatements) { + try { + if (log.isInfoEnabled()) { + log.info(statement); + log.info("-----------------------------------------------------------------------"); + } + + ses.execute(statement); + } + catch (AlreadyExistsException ignored) { + } + catch (Throwable e) { + if (!(e instanceof InvalidQueryException) || !"Index already exists".equals(e.getMessage())) + throw new IgniteException(errorMsg, e); + } + } + + if (log.isInfoEnabled()) + log.info("Indexes for Cassandra table '" + tableFullName + "' were successfully created"); + + return; + } + catch (Throwable e) { + if (CassandraHelper.isHostsAvailabilityError(e)) + handleHostsAvailabilityError(ses == null ? 0 : ses.generation, e, attempt, errorMsg); + else if (CassandraHelper.isTableAbsenceError(e)) + createTable(table, settings); + else + throw new IgniteException(errorMsg, e); + + error = e; + } + + attempt++; + } + + throw new IgniteException(errorMsg, error); + } + + /** + * Tunes CQL statement execution options (consistency level, fetch option and etc.). + * + * @param statement Statement. + * @return Modified statement. + */ + private Statement tuneStatementExecutionOptions(Statement statement) { + String qry = ""; + + if (statement instanceof BoundStatement) + qry = ((BoundStatement)statement).preparedStatement().getQueryString().trim().toLowerCase(); + else if (statement instanceof PreparedStatement) + qry = ((PreparedStatement)statement).getQueryString().trim().toLowerCase(); + + boolean readStatement = qry.startsWith("select"); + boolean writeStatement = statement instanceof Batch || statement instanceof BatchStatement || + qry.startsWith("insert") || qry.startsWith("delete") || qry.startsWith("update"); + + if (readStatement && readConsistency != null) + statement.setConsistencyLevel(readConsistency); + + if (writeStatement && writeConsistency != null) + statement.setConsistencyLevel(writeConsistency); + + if (fetchSize != null) + statement.setFetchSize(fetchSize); + + return statement; + } + + /** + * Handles situation when Cassandra table doesn't exist. + * + * @param settings Persistence settings. + */ + private void handleTableAbsenceError(String table, KeyValuePersistenceSettings settings) { + String tableFullName = settings.getKeyspace() + "." + table; + + AtomicInteger counter = tblAbsenceHandlersCnt.computeIfAbsent(tableFullName, k -> new AtomicInteger(-1)); + + int hndNum = counter.incrementAndGet(); + + try { + synchronized (counter) { + // Oooops... I am not the first thread who tried to handle table absence problem. + if (hndNum != 0) { + log.warning("Table " + tableFullName + " absence problem detected. " + + "Another thread already fixed it."); + return; + } + + log.warning("Table " + tableFullName + " absence problem detected. " + + "Trying to create table."); + + createKeyspace(settings); + createTable(table, settings); + createTableIndexes(table, settings); + } + } + finally { + if (hndNum == 0) + counter.set(-1); + } + } + + /** + * Handles situation when prepared statement execution failed cause session to the cluster was released. + * + * @param sesGeneration Generation of Cassandra session used to create prepared statement. + * @param e Exception thrown during statement execution. + */ + private void handlePreparedStatementClusterError(long sesGeneration, Throwable e) { + if (sesGeneration < generation) { + log.warning("Prepared statement cluster error detected, another thread already fixed the problem", e); + return; + } + + refreshLock.lock(); + + try { + if (sesGeneration < generation) { + log.warning("Prepared statement cluster error detected, another thread already fixed the problem", e); + return; + } + + log.warning("Prepared statement cluster error detected, refreshing Cassandra session", e); + + refresh(); + + log.warning("Cassandra session refreshed"); + } + finally { + refreshLock.unlock(); + } + } + + /** + * Handles situation when Cassandra host which is responsible for CQL query execution became unavailable. + * + * @param sesGeneration Generation of Cassandra session used to run CQL statement. + * @param e Exception to handle. + * @param attempt Number of attempts. + * @param msg Error message. + * @return {@code true} if host unavailability was successfully handled. + */ + private boolean handleHostsAvailabilityError(long sesGeneration, Throwable e, int attempt, String msg) { + if (attempt >= CQL_EXECUTION_ATTEMPTS_COUNT) { + log.error("Host availability problem detected. " + + "Number of CQL execution attempts reached maximum " + CQL_EXECUTION_ATTEMPTS_COUNT + + ", exception will be thrown to upper execution layer.", e); + throw msg == null ? new IgniteException(e) : new IgniteException(msg, e); + } + + if (attempt == CQL_EXECUTION_ATTEMPTS_COUNT / 4 || + attempt == CQL_EXECUTION_ATTEMPTS_COUNT / 2 || + attempt == CQL_EXECUTION_ATTEMPTS_COUNT / 2 + CQL_EXECUTION_ATTEMPTS_COUNT / 4 || + attempt == CQL_EXECUTION_ATTEMPTS_COUNT - 1) { + + refreshLock.lock(); + + try { + if (sesGeneration < generation) + log.warning("Host availability problem detected, but already handled by another thread"); + else { + log.warning("Host availability problem detected, CQL execution attempt " + (attempt + 1) + ", " + + "refreshing Cassandra session", e); + + refresh(); + + log.warning("Cassandra session refreshed"); + + return true; + } + } + finally { + refreshLock.unlock(); + } + } + + log.warning("Host availability problem detected, CQL execution attempt " + (attempt + 1) + ", " + + "sleeping extra " + CQL_EXECUTION_ATTEMPT_MAX_TIMEOUT + " milliseconds", e); + + try { + Thread.sleep(CQL_EXECUTION_ATTEMPT_MAX_TIMEOUT); + } + catch (InterruptedException ignored) { + } + + log.warning("Sleep completed"); + + return false; + } + + /** + * @return New random sleeper. + */ + private RandomSleeper newSleeper() { + return new RandomSleeper(CQL_EXECUTION_ATTEMPT_MIN_TIMEOUT, + CQL_EXECUTION_ATTEMPT_MAX_TIMEOUT, + CQL_ATTEMPTS_TIMEOUT_INCREMENT, log); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/ExecutionAssistant.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/ExecutionAssistant.java new file mode 100644 index 0000000000000..b0dba8bf79f0a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/ExecutionAssistant.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.PreparedStatement; +import com.datastax.driver.core.Row; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; + +/** + * Provides information for single operations (load, delete, write) of Ignite cache + * backed by {@link org.apache.ignite.cache.store.cassandra.CassandraCacheStore}. + * + * @param type of the result returned from operation. + */ +public interface ExecutionAssistant { + /** + * Indicates if Cassandra table existence is required for an operation. + * + * @return true if table existence required. + */ + public boolean tableExistenceRequired(); + + /** + * Cassandra table to use for an operation. + * + * @return Table name. + */ + public String getTable(); + + /** + * Returns CQL statement to be used for an operation. + * + * @return CQL statement. + */ + public String getStatement(); + + /** + * Binds prepared statement. + * + * @param statement prepared statement. + * + * @return bound statement. + */ + public BoundStatement bindStatement(PreparedStatement statement); + + /** + * Persistence settings to use for an operation. + * + * @return persistence settings. + */ + public KeyValuePersistenceSettings getPersistenceSettings(); + + /** + * Returns operation name. + * + * @return operation name. + */ + public String operationName(); + + /** + * Processes Cassandra database table row returned by specified CQL statement. + * + * @param row Cassandra database table row. + * + * @return result of the operation. + */ + public R process(Row row); +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/GenericBatchExecutionAssistant.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/GenericBatchExecutionAssistant.java new file mode 100644 index 0000000000000..1133caf3a5726 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/GenericBatchExecutionAssistant.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import java.util.HashSet; +import java.util.Set; +import com.datastax.driver.core.Row; + +/** + * Implementation of the {@link org.apache.ignite.cache.store.cassandra.session.BatchExecutionAssistant}. + * + * @param Type of the result returned from batch operation + * @param Type of the value used in batch operation + */ +public abstract class GenericBatchExecutionAssistant implements BatchExecutionAssistant { + /** Identifiers of already processed objects. */ + private Set processed = new HashSet<>(); + + /** {@inheritDoc} */ + @Override public void process(Row row, int seqNum) { + if (processed.contains(seqNum)) + return; + + process(row); + + processed.add(seqNum); + } + + /** {@inheritDoc} */ + @Override public boolean alreadyProcessed(int seqNum) { + return processed.contains(seqNum); + } + + /** {@inheritDoc} */ + @Override public int processedCount() { + return processed.size(); + } + + /** {@inheritDoc} */ + @Override public R processedData() { + return null; + } + + /** {@inheritDoc} */ + @Override public boolean tableExistenceRequired() { + return false; + } + + /** + * Processes particular row inside batch operation. + * + * @param row Row to process. + */ + protected void process(Row row) { + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/LoadCacheCustomQueryWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/LoadCacheCustomQueryWorker.java new file mode 100644 index 0000000000000..ab0795bf48108 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/LoadCacheCustomQueryWorker.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import java.util.concurrent.Callable; +import com.datastax.driver.core.Row; +import com.datastax.driver.core.SimpleStatement; +import com.datastax.driver.core.Statement; +import org.apache.ignite.IgniteException; +import org.apache.ignite.IgniteLogger; +import org.apache.ignite.cache.store.cassandra.persistence.PersistenceController; +import org.apache.ignite.lang.IgniteBiInClosure; + +/** + * Worker for load cache using custom user query. + * + * @param Key type. + * @param Value type. + */ +public class LoadCacheCustomQueryWorker implements Callable { + /** Cassandra session to execute CQL query */ + private final CassandraSession ses; + + /** Statement. */ + private final Statement stmt; + + /** Persistence controller */ + private final PersistenceController ctrl; + + /** Logger */ + private final IgniteLogger log; + + /** Closure for loaded values. */ + private final IgniteBiInClosure clo; + + /** + * @param ses Session. + * @param qry Query. + * @param ctrl Control. + * @param log Logger. + * @param clo Closure for loaded values. + */ + public LoadCacheCustomQueryWorker(CassandraSession ses, String qry, PersistenceController ctrl, + IgniteLogger log, IgniteBiInClosure clo) { + this(ses, new SimpleStatement(qry.trim().endsWith(";") ? qry : qry + ';'), ctrl, log, clo); + } + + /** + * @param ses Session. + * @param stmt Statement. + * @param ctrl Control. + * @param log Logger. + * @param clo Closure for loaded values. + */ + public LoadCacheCustomQueryWorker(CassandraSession ses, Statement stmt, PersistenceController ctrl, + IgniteLogger log, IgniteBiInClosure clo) { + this.ses = ses; + this.stmt = stmt; + this.ctrl = ctrl; + this.log = log; + this.clo = clo; + } + + /** {@inheritDoc} */ + @Override public Void call() throws Exception { + ses.execute(new BatchLoaderAssistant() { + /** {@inheritDoc} */ + @Override public String operationName() { + return "loadCache"; + } + + /** {@inheritDoc} */ + @Override public Statement getStatement() { + return stmt; + } + + /** {@inheritDoc} */ + @Override public void process(Row row) { + K key; + V val; + + try { + key = (K)ctrl.buildKeyObject(row); + } + catch (Throwable e) { + log.error("Failed to build Ignite key object from provided Cassandra row", e); + + throw new IgniteException("Failed to build Ignite key object from provided Cassandra row", e); + } + + try { + val = (V)ctrl.buildValueObject(row); + } + catch (Throwable e) { + log.error("Failed to build Ignite value object from provided Cassandra row", e); + + throw new IgniteException("Failed to build Ignite value object from provided Cassandra row", e); + } + + clo.apply(key, val); + } + }); + + return null; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/WrappedPreparedStatement.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/WrappedPreparedStatement.java new file mode 100644 index 0000000000000..46d5306311066 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/WrappedPreparedStatement.java @@ -0,0 +1,180 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import java.nio.ByteBuffer; +import java.util.Map; +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.CodecRegistry; +import com.datastax.driver.core.ColumnDefinitions; +import com.datastax.driver.core.ConsistencyLevel; +import com.datastax.driver.core.PreparedId; +import com.datastax.driver.core.PreparedStatement; +import com.datastax.driver.core.policies.RetryPolicy; + +/** + * Simple wrapper providing access to Cassandra prepared statement and generation of Cassandra + * session which was used to create this statement + */ +public class WrappedPreparedStatement implements PreparedStatement { + /** Prepared statement. **/ + private final PreparedStatement st; + + /** Generation of Cassandra session which was used to prepare this statement. **/ + final long generation; + + /** + * Constructor. + * + * @param st Prepared statement. + * @param generation Generation of Cassandra session used to prepare this statement. + */ + WrappedPreparedStatement(PreparedStatement st, long generation) { + this.st = st; + this.generation = generation; + } + + /** + * Getter for wrapped statement. + * + * @return Wrapped original statement. + */ + public PreparedStatement getWrappedStatement() { + return st; + } + + /** {@inheritDoc} */ + @Override public ColumnDefinitions getVariables() { + return st.getVariables(); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bind(Object... values) { + return st.bind(values); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bind() { + return st.bind(); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement setRoutingKey(ByteBuffer routingKey) { + return st.setRoutingKey(routingKey); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement setRoutingKey(ByteBuffer... routingKeyComponents) { + return st.setRoutingKey(routingKeyComponents); + } + + /** {@inheritDoc} */ + @Override public ByteBuffer getRoutingKey() { + return st.getRoutingKey(); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement setConsistencyLevel(ConsistencyLevel consistency) { + return st.setConsistencyLevel(consistency); + } + + /** {@inheritDoc} */ + @Override public ConsistencyLevel getConsistencyLevel() { + return st.getConsistencyLevel(); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement setSerialConsistencyLevel(ConsistencyLevel serialConsistency) { + return st.setSerialConsistencyLevel(serialConsistency); + } + + /** {@inheritDoc} */ + @Override public ConsistencyLevel getSerialConsistencyLevel() { + return st.getSerialConsistencyLevel(); + } + + /** {@inheritDoc} */ + @Override public String getQueryString() { + return st.getQueryString(); + } + + /** {@inheritDoc} */ + @Override public String getQueryKeyspace() { + return st.getQueryKeyspace(); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement enableTracing() { + return st.enableTracing(); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement disableTracing() { + return st.disableTracing(); + } + + /** {@inheritDoc} */ + @Override public boolean isTracing() { + return st.isTracing(); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement setRetryPolicy(RetryPolicy policy) { + return st.setRetryPolicy(policy); + } + + /** {@inheritDoc} */ + @Override public RetryPolicy getRetryPolicy() { + return st.getRetryPolicy(); + } + + /** {@inheritDoc} */ + @Override public PreparedId getPreparedId() { + return st.getPreparedId(); + } + + /** {@inheritDoc} */ + @Override public Map getIncomingPayload() { + return st.getIncomingPayload(); + } + + /** {@inheritDoc} */ + @Override public Map getOutgoingPayload() { + return st.getOutgoingPayload(); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement setOutgoingPayload(Map payload) { + return st.setOutgoingPayload(payload); + } + + /** {@inheritDoc} */ + @Override public CodecRegistry getCodecRegistry() { + return st.getCodecRegistry(); + } + + /** {@inheritDoc} */ + @Override public PreparedStatement setIdempotent(Boolean idempotent) { + return st.setIdempotent(idempotent); + } + + /** {@inheritDoc} */ + @Override public Boolean isIdempotent() { + return st.isIdempotent(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/WrappedSession.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/WrappedSession.java new file mode 100644 index 0000000000000..d9b722435aaf6 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/WrappedSession.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session; + +import com.datastax.driver.core.ResultSet; +import com.datastax.driver.core.ResultSetFuture; +import com.datastax.driver.core.Session; +import com.datastax.driver.core.Statement; +import com.datastax.driver.core.exceptions.NoHostAvailableException; + +/** + * Simple container for Cassandra session and its generation number. + */ +public class WrappedSession { + /** Cassandra driver session. **/ + final Session ses; + + /** Cassandra session generation number. **/ + final long generation; + + /** + * Constructor. + * + * @param ses Cassandra session. + * @param generation Cassandra session generation number. + */ + WrappedSession(Session ses, long generation) { + this.ses = ses; + this.generation = generation; + } + + /** + * Prepares the provided query string. + * + * @param query the CQL query string to prepare + * @return the prepared statement corresponding to {@code query}. + * @throws NoHostAvailableException if no host in the cluster can be + * contacted successfully to prepare this query. + */ + WrappedPreparedStatement prepare(String query) { + return new WrappedPreparedStatement(ses.prepare(query), generation); + } + + /** + * Executes the provided query. + * + * @param statement The CQL query to execute (that can be any {@link Statement}). + * + * @return The result of the query. That result will never be null but can + */ + ResultSet execute(Statement statement) { + return ses.execute(statement); + } + + /** + * Executes the provided query. + * + * @param query The CQL query to execute (that can be any {@link Statement}). + * + * @return The result of the query. That result will never be null but can + */ + ResultSet execute(String query) { + return ses.execute(query); + } + + /** + * Executes the provided query asynchronously. + * + * @param statement the CQL query to execute (that can be any {@code Statement}). + * + * @return a future on the result of the query. + */ + ResultSetFuture executeAsync(Statement statement) { + return ses.executeAsync(statement); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/package-info.java new file mode 100644 index 0000000000000..9c8b917babce7 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains classes responsible for handling sessions and communication with Cassandra + */ + +package org.apache.ignite.cache.store.cassandra.session; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/IdleSession.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/IdleSession.java new file mode 100644 index 0000000000000..0faf4d3a9981f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/IdleSession.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session.pool; + +import com.datastax.driver.core.Session; +import org.apache.ignite.cache.store.cassandra.common.CassandraHelper; + +/** + * Simple wrapper for idle Cassandra session returned to pool, responsible for monitoring session expiration and its closing. + */ +public class IdleSession { + /** Cassandra driver session. */ + private Session ses; + + /** Expiration timeout. */ + private long expirationTimeout; + + /** Wrapper creation time. */ + private long time; + + /** + * Creates instance of Cassandra driver session wrapper. + * + * @param ses Cassandra driver session. + * @param expirationTimeout Session expiration timeout. + */ + public IdleSession(Session ses, long expirationTimeout) { + this.ses = ses; + this.expirationTimeout = expirationTimeout; + this.time = System.currentTimeMillis(); + } + + /** + * Checks if Cassandra driver session expired. + * + * @return true if session expired. + */ + public boolean expired() { + return expirationTimeout > 0 && System.currentTimeMillis() - time > expirationTimeout; + } + + /** + * Returns wrapped Cassandra driver session. + * + * @return Cassandra driver session. + */ + public Session driverSession() { + return ses; + } + + /** + * Closes wrapped Cassandra driver session + */ + public void release() { + CassandraHelper.closeSession(ses); + ses = null; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/SessionPool.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/SessionPool.java new file mode 100644 index 0000000000000..3fd48017036ba --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/SessionPool.java @@ -0,0 +1,174 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session.pool; + +import java.lang.Thread.State; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import com.datastax.driver.core.Session; +import org.apache.ignite.cache.store.cassandra.session.CassandraSessionImpl; + +/** + * Cassandra driver sessions pool. + */ +public class SessionPool { + /** + * Monitors session pool and closes unused session. + */ + private static class SessionMonitor extends Thread { + /** {@inheritDoc} */ + @Override public void run() { + try { + while (true) { + try { + Thread.sleep(SLEEP_TIMEOUT); + } + catch (InterruptedException ignored) { + return; + } + + List> expiredSessions = new LinkedList<>(); + + int sessionsCnt; + + synchronized (sessions) { + sessionsCnt = sessions.size(); + + for (Map.Entry entry : sessions.entrySet()) { + if (entry.getValue().expired()) + expiredSessions.add(entry); + } + + for (Map.Entry entry : expiredSessions) + sessions.remove(entry.getKey()); + } + + for (Map.Entry entry : expiredSessions) + entry.getValue().release(); + + // all sessions in the pool expired, thus we don't need additional thread to manage sessions in the pool + if (sessionsCnt == expiredSessions.size()) + return; + } + } + finally { + release(); + } + } + } + + /** Sessions monitor sleep timeout. */ + private static final long SLEEP_TIMEOUT = 60000; // 1 minute. + + /** Sessions which were returned to pool. */ + private static final Map sessions = new HashMap<>(); + + /** Singleton instance. */ + private static SessionMonitor monitorSingleton; + + static { + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override public void run() { + release(); + } + }); + } + + /** + * Returns Cassandra driver session to sessions pool. + * + * @param cassandraSes Session wrapper. + * @param driverSes Driver session. + * @param expirationTimeout Expiration timeout. + */ + public static void put(CassandraSessionImpl cassandraSes, Session driverSes, long expirationTimeout) { + if (cassandraSes == null || driverSes == null) + return; + + IdleSession old; + + synchronized (sessions) { + old = sessions.put(cassandraSes, new IdleSession(driverSes, expirationTimeout)); + + if (monitorSingleton == null || State.TERMINATED.equals(monitorSingleton.getState())) { + monitorSingleton = new SessionMonitor(); + monitorSingleton.setDaemon(true); + monitorSingleton.setName("Cassandra-sessions-pool"); + monitorSingleton.start(); + } + } + + if (old != null) + old.release(); + } + + /** + * Extracts Cassandra driver session from pool. + * + * @param cassandraSes Session wrapper. + * @return Cassandra driver session. + */ + public static Session get(CassandraSessionImpl cassandraSes) { + if (cassandraSes == null) + return null; + + IdleSession wrapper; + + synchronized (sessions) { + wrapper = sessions.remove(cassandraSes); + } + + return wrapper == null ? null : wrapper.driverSession(); + } + + /** + * Releases all session from pool and closes all their connections to Cassandra database. + */ + public static void release() { + Collection wrappers; + + synchronized (sessions) { + try { + if (sessions.isEmpty()) + return; + + wrappers = new LinkedList<>(); + + for (IdleSession wrapper : sessions.values()) + wrappers.add(wrapper); + + sessions.clear(); + } + finally { + if (!(Thread.currentThread() instanceof SessionMonitor) && monitorSingleton != null) { + try { + monitorSingleton.interrupt(); + } + catch (Throwable ignored) { + } + } + } + } + + for (IdleSession wrapper : wrappers) + wrapper.release(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/package-info.java new file mode 100644 index 0000000000000..4460793551f9f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/pool/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains session pool implenetation for Cassandra sessions + */ + +package org.apache.ignite.cache.store.cassandra.session.pool; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/BaseMutation.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/BaseMutation.java new file mode 100644 index 0000000000000..2625e87619ea2 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/BaseMutation.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session.transaction; + +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.cache.store.cassandra.persistence.PersistenceController; + +/** + * Base class to inherit from to implement specific mutations operation. + */ +public abstract class BaseMutation implements Mutation { + /** Cassandra table to use. */ + private final String table; + + /** Persistence controller to be utilized for mutation. */ + private final PersistenceController ctrl; + + /** + * Creates instance of mutation operation. + * + * @param table Cassandra table which should be used for the mutation. + * @param ctrl Persistence controller to use. + */ + public BaseMutation(String table, PersistenceController ctrl) { + if (table == null || table.trim().isEmpty()) + throw new IllegalArgumentException("Table name should be specified"); + + if (ctrl == null) + throw new IllegalArgumentException("Persistence controller should be specified"); + + this.table = table; + this.ctrl = ctrl; + } + + /** {@inheritDoc} */ + @Override public String getTable() { + return table; + } + + /** {@inheritDoc} */ + @Override public KeyValuePersistenceSettings getPersistenceSettings() { + return ctrl.getPersistenceSettings(); + } + + /** + * Service method to get persistence controller instance + * + * @return Persistence controller to use for the mutation + */ + protected PersistenceController controller() { + return ctrl; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/DeleteMutation.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/DeleteMutation.java new file mode 100644 index 0000000000000..79c0bfe08186e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/DeleteMutation.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session.transaction; + +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.PreparedStatement; +import org.apache.ignite.cache.store.cassandra.persistence.PersistenceController; + +/** + * Mutation which deletes object from Cassandra. + */ +public class DeleteMutation extends BaseMutation { + /** Ignite cache key of the object which should be deleted. */ + private final Object key; + + /** + * Creates instance of delete mutation operation. + * + * @param key Ignite cache key of the object which should be deleted. + * @param table Cassandra table which should be used for the mutation. + * @param ctrl Persistence controller to use. + */ + public DeleteMutation(Object key, String table, PersistenceController ctrl) { + super(table, ctrl); + this.key = key; + } + + /** {@inheritDoc} */ + @Override public boolean tableExistenceRequired() { + return false; + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return controller().getDeleteStatement(getTable()); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement) { + return controller().bindKey(statement, key); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/Mutation.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/Mutation.java new file mode 100644 index 0000000000000..f3fb35480c8a1 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/Mutation.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session.transaction; + +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.PreparedStatement; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; + +/** + * Provides information about particular mutation operation performed withing transaction. + */ +public interface Mutation { + /** + * Cassandra table to use for an operation. + * + * @return Table name. + */ + public String getTable(); + + /** + * Indicates if Cassandra tables existence is required for this operation. + * + * @return {@code true} true if table existence required. + */ + public boolean tableExistenceRequired(); + + /** + * Returns Ignite cache key/value persistence settings. + * + * @return persistence settings. + */ + public KeyValuePersistenceSettings getPersistenceSettings(); + + /** + * Returns unbind CLQ statement for to be executed. + * + * @return Unbind CQL statement. + */ + public String getStatement(); + + /** + * Binds prepared statement to current Cassandra session. + * + * @param statement Statement. + * @return Bounded statement. + */ + public BoundStatement bindStatement(PreparedStatement statement); +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/WriteMutation.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/WriteMutation.java new file mode 100644 index 0000000000000..22ecf2a78b53d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/WriteMutation.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.session.transaction; + +import javax.cache.Cache; +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.PreparedStatement; +import org.apache.ignite.cache.store.cassandra.persistence.PersistenceController; + +/** + * Mutation which writes(inserts) object into Cassandra. + */ +public class WriteMutation extends BaseMutation { + /** Ignite cache entry to be inserted into Cassandra. */ + private final Cache.Entry entry; + + /** + * Creates instance of delete mutation operation. + * + * @param entry Ignite cache entry to be inserted into Cassandra. + * @param table Cassandra table which should be used for the mutation. + * @param ctrl Persistence controller to use. + */ + public WriteMutation(Cache.Entry entry, String table, PersistenceController ctrl) { + super(table, ctrl); + this.entry = entry; + } + + /** {@inheritDoc} */ + @Override public boolean tableExistenceRequired() { + return true; + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return controller().getWriteStatement(getTable()); + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement) { + return controller().bindKeyValue(statement, entry.getKey(), entry.getValue()); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/package-info.java new file mode 100644 index 0000000000000..e4d437716768a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/session/transaction/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains mutations implementation, to store changes made inside Ignite transaction + */ + +package org.apache.ignite.cache.store.cassandra.session.transaction; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/utils/DDLGenerator.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/utils/DDLGenerator.java new file mode 100644 index 0000000000000..569c65de5eb71 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/utils/DDLGenerator.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.cassandra.utils; + +import java.io.File; +import java.util.List; + +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; + +/** + * Generates Cassandra DDL statements from persistence descriptor xml file. + */ +public class DDLGenerator { + /** + * DDLGenerator entry point. + * + * @param args Arguments for DDLGenerator. + */ + public static void main(String[] args) { + if (args == null || args.length == 0) + return; + + boolean success = true; + + for (String arg : args) { + File file = new File(arg); + if (!file.isFile()) { + success = false; + System.out.println("-------------------------------------------------------------"); + System.out.println("Incorrect file specified: " + arg); + System.out.println("-------------------------------------------------------------"); + continue; + } + + try { + KeyValuePersistenceSettings settings = new KeyValuePersistenceSettings(file); + String table = settings.getTable() != null ? settings.getTable() : "my_table"; + + System.out.println("-------------------------------------------------------------"); + System.out.println("DDL for keyspace/table from file: " + arg); + System.out.println("-------------------------------------------------------------"); + System.out.println(); + System.out.println(settings.getKeyspaceDDLStatement()); + System.out.println(); + System.out.println(settings.getTableDDLStatement(table)); + System.out.println(); + + List statements = settings.getIndexDDLStatements(table); + if (statements != null && !statements.isEmpty()) { + for (String st : statements) { + System.out.println(st); + System.out.println(); + } + } + } + catch (Throwable e) { + success = false; + System.out.println("-------------------------------------------------------------"); + System.out.println("Invalid file specified: " + arg); + System.out.println("-------------------------------------------------------------"); + e.printStackTrace(); + } + } + + if (!success) + throw new RuntimeException("Failed to process some of the specified files"); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/utils/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/utils/package-info.java new file mode 100644 index 0000000000000..3a2cd108f782c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/main/java/org/apache/ignite/cache/store/cassandra/utils/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains utility classes + */ + +package org.apache.ignite.cache.store.cassandra.utils; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/README.txt b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/README.txt new file mode 100644 index 0000000000000..a61b235132fd2 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/README.txt @@ -0,0 +1,13 @@ +Shell scripts to spin up Ignite, Cassandra and Load tests clusters in AWS. + +1) cassandra - bootstrap scripts for Cassandra cluster nodes +2) ganglia - bootstrap scripts for Ganglia master and agents +3) ignite - bootstrap scripts for Ignite cluster nodes +4) tests - bootstrap scripts for Load Tests cluster nodes +5) common.sh - definitions for common functions +6) env.sh - definitions for common variables +7) log-collector.sh - log collector daemon script, to collect logs and upload them to S3 + +For more details please look at the documentation: + + https://apacheignite.readme.io/docs/aws-infrastructure-deployment \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-bootstrap.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-bootstrap.sh new file mode 100644 index 0000000000000..017b1b13b1eea --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-bootstrap.sh @@ -0,0 +1,336 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Bootstrap script to spin up Cassandra cluster +# ----------------------------------------------------------------------------------------------- + +# URL to download AWS CLI tools +AWS_CLI_DOWNLOAD_URL=https://s3.amazonaws.com/aws-cli/awscli-bundle.zip + +# URL to download JDK +JDK_DOWNLOAD_URL=http://download.oracle.com/otn-pub/java/jdk/8u77-b03/jdk-8u77-linux-x64.tar.gz + +# URL to download Ignite-Cassandra tests package - you should previously package and upload it to this place +TESTS_PACKAGE_DONLOAD_URL=s3:////ignite-cassandra-tests-.zip + +# Terminates script execution and upload logs to S3 +terminate() +{ + SUCCESS_URL=$S3_CASSANDRA_BOOTSTRAP_SUCCESS + FAILURE_URL=$S3_CASSANDRA_BOOTSTRAP_FAILURE + + if [ -n "$SUCCESS_URL" ] && [[ "$SUCCESS_URL" != */ ]]; then + SUCCESS_URL=${SUCCESS_URL}/ + fi + + if [ -n "$FAILURE_URL" ] && [[ "$FAILURE_URL" != */ ]]; then + FAILURE_URL=${FAILURE_URL}/ + fi + + host_name=$(hostname -f | tr '[:upper:]' '[:lower:]') + msg=$host_name + + if [ -n "$1" ]; then + echo "[ERROR] $1" + echo "[ERROR]-----------------------------------------------------" + echo "[ERROR] Cassandra node bootstrap failed" + echo "[ERROR]-----------------------------------------------------" + msg=$1 + + if [ -z "$FAILURE_URL" ]; then + exit 1 + fi + + reportFolder=${FAILURE_URL}${host_name} + reportFile=$reportFolder/__error__ + else + echo "[INFO]-----------------------------------------------------" + echo "[INFO] Cassandra node bootstrap successfully completed" + echo "[INFO]-----------------------------------------------------" + + if [ -z "$SUCCESS_URL" ]; then + exit 0 + fi + + reportFolder=${SUCCESS_URL}${host_name} + reportFile=$reportFolder/__success__ + fi + + echo $msg > /opt/bootstrap-result + + aws s3 rm --recursive $reportFolder + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to drop report folder: $reportFolder" + fi + + aws s3 cp --sse AES256 /opt/bootstrap-result $reportFile + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to report bootstrap result to: $reportFile" + fi + + rm -f /opt/bootstrap-result + + if [ -n "$1" ]; then + exit 1 + fi + + exit 0 +} + +# Downloads specified package +downloadPackage() +{ + echo "[INFO] Downloading $3 package from $1 into $2" + + for i in 0 9; + do + if [[ "$1" == s3* ]]; then + aws s3 cp $1 $2 + code=$? + else + curl "$1" -o "$2" + code=$? + fi + + if [ $code -eq 0 ]; then + echo "[INFO] $3 package successfully downloaded from $1 into $2" + return 0 + fi + + echo "[WARN] Failed to download $3 package from $i attempt, sleeping extra 5sec" + sleep 5s + done + + terminate "All 10 attempts to download $3 package from $1 are failed" +} + +# Downloads and setup JDK +setupJava() +{ + rm -Rf /opt/java /opt/jdk.tar.gz + + echo "[INFO] Downloading 'jdk'" + wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" "$JDK_DOWNLOAD_URL" -O /opt/jdk.tar.gz + if [ $? -ne 0 ]; then + terminate "Failed to download 'jdk'" + fi + + echo "[INFO] Untaring 'jdk'" + tar -xvzf /opt/jdk.tar.gz -C /opt + if [ $? -ne 0 ]; then + terminate "Failed to untar 'jdk'" + fi + + rm -Rf /opt/jdk.tar.gz + + unzipDir=$(ls /opt | grep "jdk") + if [ "$unzipDir" != "java" ]; then + mv /opt/$unzipDir /opt/java + fi +} + +# Downloads and setup AWS CLI +setupAWSCLI() +{ + echo "[INFO] Installing 'awscli'" + pip install --upgrade awscli + if [ $? -eq 0 ]; then + return 0 + fi + + echo "[ERROR] Failed to install 'awscli' using pip" + echo "[INFO] Trying to install awscli using zip archive" + echo "[INFO] Downloading awscli zip" + + downloadPackage "$AWS_CLI_DOWNLOAD_URL" "/opt/awscli-bundle.zip" "awscli" + + echo "[INFO] Unzipping awscli zip" + unzip /opt/awscli-bundle.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip awscli zip" + fi + + rm -Rf /opt/awscli-bundle.zip + + echo "[INFO] Installing awscli" + /opt/awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws + if [ $? -ne 0 ]; then + terminate "Failed to install awscli" + fi + + echo "[INFO] Successfully installed awscli from zip archive" +} + +# Setup all the pre-requisites (packages, settings and etc.) +setupPreRequisites() +{ + echo "[INFO] Installing 'wget' package" + yum -y install wget + if [ $? -ne 0 ]; then + terminate "Failed to install 'wget' package" + fi + + echo "[INFO] Installing 'net-tools' package" + yum -y install net-tools + if [ $? -ne 0 ]; then + terminate "Failed to install 'net-tools' package" + fi + + echo "[INFO] Installing 'python' package" + yum -y install python + if [ $? -ne 0 ]; then + terminate "Failed to install 'python' package" + fi + + echo "[INFO] Installing 'unzip' package" + yum -y install unzip + if [ $? -ne 0 ]; then + terminate "Failed to install 'unzip' package" + fi + + downloadPackage "https://bootstrap.pypa.io/get-pip.py" "/opt/get-pip.py" "get-pip.py" + + echo "[INFO] Installing 'pip'" + python /opt/get-pip.py + if [ $? -ne 0 ]; then + terminate "Failed to install 'pip'" + fi +} + +# Downloads and setup tests package +setupTestsPackage() +{ + downloadPackage "$TESTS_PACKAGE_DONLOAD_URL" "/opt/ignite-cassandra-tests.zip" "Tests" + + rm -Rf /opt/ignite-cassandra-tests + + unzip /opt/ignite-cassandra-tests.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip tests package" + fi + + rm -f /opt/ignite-cassandra-tests.zip + + unzipDir=$(ls /opt | grep "ignite-cassandra") + if [ "$unzipDir" != "ignite-cassandra-tests" ]; then + mv /opt/$unzipDir /opt/ignite-cassandra-tests + fi + + find /opt/ignite-cassandra-tests -type f -name "*.sh" -exec chmod ug+x {} \; + + . /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "cassandra" + + setupNTP + + echo "[INFO] Starting logs collector daemon" + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + /opt/ignite-cassandra-tests/bootstrap/aws/logs-collector.sh "$S3_LOGS_TRIGGER" "$S3_CASSANDRA_LOGS/$HOST_NAME" "/opt/cassandra/logs" "/opt/cassandra/cassandra-start.log" > /opt/logs-collector.log & + + echo "[INFO] Logs collector daemon started: $!" + + echo "----------------------------------------------------------------------------------------" + printInstanceInfo + echo "----------------------------------------------------------------------------------------" + tagInstance + bootstrapGangliaAgent "cassandra" 8641 +} + +# Downloads Cassandra package +downloadCassandra() +{ + downloadPackage "$CASSANDRA_DOWNLOAD_URL" "/opt/apache-cassandra.tar.gz" "Cassandra" + + rm -Rf /opt/cassandra + + echo "[INFO] Untaring Cassandra package" + tar -xvzf /opt/apache-cassandra.tar.gz -C /opt + if [ $? -ne 0 ]; then + terminate "Failed to untar Cassandra package" + fi + + rm -f /opt/apache-cassandra.tar.gz + + unzipDir=$(ls /opt | grep "cassandra" | grep "apache") + if [ "$unzipDir" != "cassandra" ]; then + mv /opt/$unzipDir /opt/cassandra + fi +} + +# Setups Cassandra +setupCassandra() +{ + echo "[INFO] Creating 'cassandra' group" + exists=$(cat /etc/group | grep cassandra) + if [ -z "$exists" ]; then + groupadd cassandra + if [ $? -ne 0 ]; then + terminate "Failed to create 'cassandra' group" + fi + fi + + echo "[INFO] Creating 'cassandra' user" + exists=$(cat /etc/passwd | grep cassandra) + if [ -z "$exists" ]; then + useradd -g cassandra cassandra + if [ $? -ne 0 ]; then + terminate "Failed to create 'cassandra' user" + fi + fi + + rm -f /opt/cassandra/conf/cassandra-env.sh /opt/cassandra/conf/cassandra-template.yaml + + cp /opt/ignite-cassandra-tests/bootstrap/aws/cassandra/cassandra-env.sh /opt/cassandra/conf + cp /opt/ignite-cassandra-tests/bootstrap/aws/cassandra/cassandra-template.yaml /opt/cassandra/conf + + chown -R cassandra:cassandra /opt/cassandra /opt/ignite-cassandra-tests + + createCassandraStorageLayout + + cat /opt/cassandra/conf/cassandra-template.yaml | sed -r "s/\\\$\{CASSANDRA_DATA_DIR\}/$CASSANDRA_DATA_DIR/g" > /opt/cassandra/conf/cassandra-template-1.yaml + cat /opt/cassandra/conf/cassandra-template-1.yaml | sed -r "s/\\\$\{CASSANDRA_COMMITLOG_DIR\}/$CASSANDRA_COMMITLOG_DIR/g" > /opt/cassandra/conf/cassandra-template-2.yaml + cat /opt/cassandra/conf/cassandra-template-2.yaml | sed -r "s/\\\$\{CASSANDRA_CACHES_DIR\}/$CASSANDRA_CACHES_DIR/g" > /opt/cassandra/conf/cassandra-template-3.yaml + + rm -f /opt/cassandra/conf/cassandra-template.yaml /opt/cassandra/conf/cassandra-template-1.yaml /opt/cassandra/conf/cassandra-template-2.yaml + mv /opt/cassandra/conf/cassandra-template-3.yaml /opt/cassandra/conf/cassandra-template.yaml + + echo "export JAVA_HOME=/opt/java" >> $1 + echo "export CASSANDRA_HOME=/opt/cassandra" >> $1 + echo "export PATH=\$JAVA_HOME/bin:\$CASSANDRA_HOME/bin:\$PATH" >> $1 +} + +################################################################################################################### + +echo "[INFO]-----------------------------------------------------------------" +echo "[INFO] Bootstrapping Cassandra node" +echo "[INFO]-----------------------------------------------------------------" + +setupPreRequisites +setupJava +setupAWSCLI +setupTestsPackage +downloadCassandra +setupCassandra "/root/.bash_profile" + +cmd="/opt/ignite-cassandra-tests/bootstrap/aws/cassandra/cassandra-start.sh" + +#sudo -u cassandra -g cassandra sh -c "$cmd | tee /opt/cassandra/cassandra-start.log" + +$cmd | tee /opt/cassandra/cassandra-start.log \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-env.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-env.sh new file mode 100644 index 0000000000000..ba764018bfe0a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-env.sh @@ -0,0 +1,287 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Environment setup script from Cassandra distribution +# ----------------------------------------------------------------------------------------------- + +calculate_heap_sizes() +{ + case "`uname`" in + Linux) + system_memory_in_mb=`free -m | awk '/:/ {print $2;exit}'` + system_cpu_cores=`egrep -c 'processor([[:space:]]+):.*' /proc/cpuinfo` + ;; + FreeBSD) + system_memory_in_bytes=`sysctl hw.physmem | awk '{print $2}'` + system_memory_in_mb=`expr $system_memory_in_bytes / 1024 / 1024` + system_cpu_cores=`sysctl hw.ncpu | awk '{print $2}'` + ;; + SunOS) + system_memory_in_mb=`prtconf | awk '/Memory size:/ {print $3}'` + system_cpu_cores=`psrinfo | wc -l` + ;; + Darwin) + system_memory_in_bytes=`sysctl hw.memsize | awk '{print $2}'` + system_memory_in_mb=`expr $system_memory_in_bytes / 1024 / 1024` + system_cpu_cores=`sysctl hw.ncpu | awk '{print $2}'` + ;; + *) + # assume reasonable defaults for e.g. a modern desktop or + # cheap server + system_memory_in_mb="2048" + system_cpu_cores="2" + ;; + esac + + # some systems like the raspberry pi don't report cores, use at least 1 + if [ "$system_cpu_cores" -lt "1" ] + then + system_cpu_cores="1" + fi + + # set max heap size based on the following + # max(min(1/2 ram, 1024MB), min(1/4 ram, 8GB)) + # calculate 1/2 ram and cap to 1024MB + # calculate 1/4 ram and cap to 8192MB + # pick the max + half_system_memory_in_mb=`expr $system_memory_in_mb / 2` + quarter_system_memory_in_mb=`expr $half_system_memory_in_mb / 2` + if [ "$half_system_memory_in_mb" -gt "1024" ] + then + half_system_memory_in_mb="1024" + fi + if [ "$quarter_system_memory_in_mb" -gt "8192" ] + then + quarter_system_memory_in_mb="8192" + fi + if [ "$half_system_memory_in_mb" -gt "$quarter_system_memory_in_mb" ] + then + max_heap_size_in_mb="$half_system_memory_in_mb" + else + max_heap_size_in_mb="$quarter_system_memory_in_mb" + fi + MAX_HEAP_SIZE="${max_heap_size_in_mb}M" + + # Young gen: min(max_sensible_per_modern_cpu_core * num_cores, 1/4 * heap size) + max_sensible_yg_per_core_in_mb="100" + max_sensible_yg_in_mb=`expr $max_sensible_yg_per_core_in_mb "*" $system_cpu_cores` + + desired_yg_in_mb=`expr $max_heap_size_in_mb / 4` + + if [ "$desired_yg_in_mb" -gt "$max_sensible_yg_in_mb" ] + then + HEAP_NEWSIZE="${max_sensible_yg_in_mb}M" + else + HEAP_NEWSIZE="${desired_yg_in_mb}M" + fi +} + +# Determine the sort of JVM we'll be running on. +java_ver_output=`"${JAVA:-java}" -version 2>&1` +jvmver=`echo "$java_ver_output" | grep '[openjdk|java] version' | awk -F'"' 'NR==1 {print $2}'` +JVM_VERSION=${jvmver%_*} +JVM_PATCH_VERSION=${jvmver#*_} + +if [ "$JVM_VERSION" \< "1.8" ] ; then + echo "Cassandra 3.0 and later require Java 8u40 or later." + exit 1; +fi + +if [ "$JVM_VERSION" \< "1.8" ] && [ "$JVM_PATCH_VERSION" \< "40" ] ; then + echo "Cassandra 3.0 and later require Java 8u40 or later." + exit 1; +fi + +jvm=`echo "$java_ver_output" | grep -A 1 'java version' | awk 'NR==2 {print $1}'` +case "$jvm" in + OpenJDK) + JVM_VENDOR=OpenJDK + # this will be "64-Bit" or "32-Bit" + JVM_ARCH=`echo "$java_ver_output" | awk 'NR==3 {print $2}'` + ;; + "Java(TM)") + JVM_VENDOR=Oracle + # this will be "64-Bit" or "32-Bit" + JVM_ARCH=`echo "$java_ver_output" | awk 'NR==3 {print $3}'` + ;; + *) + # Help fill in other JVM values + JVM_VENDOR=other + JVM_ARCH=unknown + ;; +esac + +# Override these to set the amount of memory to allocate to the JVM at +# start-up. For production use you may wish to adjust this for your +# environment. MAX_HEAP_SIZE is the total amount of memory dedicated +# to the Java heap. HEAP_NEWSIZE refers to the size of the young +# generation. Both MAX_HEAP_SIZE and HEAP_NEWSIZE should be either set +# or not (if you set one, set the other). +# +# The main trade-off for the young generation is that the larger it +# is, the longer GC pause times will be. The shorter it is, the more +# expensive GC will be (usually). +# +# The example HEAP_NEWSIZE assumes a modern 8-core+ machine for decent pause +# times. If in doubt, and if you do not particularly want to tweak, go with +# 100 MB per physical CPU core. + +#MAX_HEAP_SIZE="4G" +#HEAP_NEWSIZE="800M" + +# Set this to control the amount of arenas per-thread in glibc +#export MALLOC_ARENA_MAX=4 + +# only calculate the size if it's not set manually +if [ "x$MAX_HEAP_SIZE" = "x" ] && [ "x$HEAP_NEWSIZE" = "x" ]; then + calculate_heap_sizes +else + if [ "x$MAX_HEAP_SIZE" = "x" ] || [ "x$HEAP_NEWSIZE" = "x" ]; then + echo "please set or unset MAX_HEAP_SIZE and HEAP_NEWSIZE in pairs (see cassandra-env.sh)" + exit 1 + fi +fi + +if [ "x$MALLOC_ARENA_MAX" = "x" ] ; then + export MALLOC_ARENA_MAX=4 +fi + +#GC log path has to be defined here because it needs to access CASSANDRA_HOME +JVM_OPTS="$JVM_OPTS -Xloggc:${CASSANDRA_HOME}/logs/gc.log" + +# Here we create the arguments that will get passed to the jvm when +# starting cassandra. + +# Read user-defined JVM options from jvm.options file +JVM_OPTS_FILE=$CASSANDRA_CONF/jvm.options +for opt in `grep "^-" $JVM_OPTS_FILE` +do + JVM_OPTS="$JVM_OPTS $opt" +done + +# Check what parameters were defined on jvm.options file to avoid conflicts +echo $JVM_OPTS | grep -q Xmn +DEFINED_XMN=$? +echo $JVM_OPTS | grep -q Xmx +DEFINED_XMX=$? +echo $JVM_OPTS | grep -q Xms +DEFINED_XMS=$? +echo $JVM_OPTS | grep -q UseConcMarkSweepGC +USING_CMS=$? + +# We only set -Xms and -Xmx if they were not defined on jvm.options file +# If defined, both Xmx and Xms should be defined together. +if [ $DEFINED_XMX -ne 0 ] && [ $DEFINED_XMS -ne 0 ]; then + JVM_OPTS="$JVM_OPTS -Xms${MAX_HEAP_SIZE}" + JVM_OPTS="$JVM_OPTS -Xmx${MAX_HEAP_SIZE}" +elif [ $DEFINED_XMX -ne 0 ] || [ $DEFINED_XMS -ne 0 ]; then + echo "Please set or unset -Xmx and -Xms flags in pairs on jvm.options file." + exit 1 +fi + +# We only set -Xmn flag if it was not defined in jvm.options file +# and if the CMS GC is being used +# If defined, both Xmn and Xmx should be defined together. +if [ $DEFINED_XMN -eq 0 ] && [ $DEFINED_XMX -ne 0 ]; then + echo "Please set or unset -Xmx and -Xmn flags in pairs on jvm.options file." + exit 1 +elif [ $DEFINED_XMN -ne 0 ] && [ $USING_CMS -eq 0 ]; then + JVM_OPTS="$JVM_OPTS -Xmn${HEAP_NEWSIZE}" +fi + +if [ "$JVM_ARCH" = "64-Bit" ] && [ $USING_CMS -eq 0 ]; then + JVM_OPTS="$JVM_OPTS -XX:+UseCondCardMark" +fi + +# provides hints to the JIT compiler +JVM_OPTS="$JVM_OPTS -XX:CompileCommandFile=$CASSANDRA_CONF/hotspot_compiler" + +# add the jamm javaagent +JVM_OPTS="$JVM_OPTS -javaagent:$CASSANDRA_HOME/lib/jamm-0.3.0.jar" + +# set jvm HeapDumpPath with CASSANDRA_HEAPDUMP_DIR +if [ "x$CASSANDRA_HEAPDUMP_DIR" != "x" ]; then + JVM_OPTS="$JVM_OPTS -XX:HeapDumpPath=$CASSANDRA_HEAPDUMP_DIR/cassandra-`date +%s`-pid$$.hprof" +fi + +# jmx: metrics and administration interface +# +# add this if you're having trouble connecting: +# JVM_OPTS="$JVM_OPTS -Djava.rmi.server.hostname=" +# +# see +# https://blogs.oracle.com/jmxetc/entry/troubleshooting_connection_problems_in_jconsole +# for more on configuring JMX through firewalls, etc. (Short version: +# get it working with no firewall first.) +# +# Cassandra ships with JMX accessible *only* from localhost. +# To enable remote JMX connections, uncomment lines below +# with authentication and/or ssl enabled. See https://wiki.apache.org/cassandra/JmxSecurity +# +if [ "x$LOCAL_JMX" = "x" ]; then + LOCAL_JMX=yes +fi + +# Specifies the default port over which Cassandra will be available for +# JMX connections. +# For security reasons, you should not expose this port to the internet. Firewall it if needed. +JMX_PORT="7199" + +if [ "$LOCAL_JMX" = "yes" ]; then +# JVM_OPTS="$JVM_OPTS -Dcassandra.jmx.local.port=$JMX_PORT -XX:+DisableExplicitGC" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.local.only=false" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.authenticate=false" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.ssl=false" + JVM_OPTS="$JVM_OPTS -XX:+UnlockCommercialFeatures" + JVM_OPTS="$JVM_OPTS -XX:+FlightRecorder" + JVM_OPTS="$JVM_OPTS -XX:FlightRecorderOptions=defaultrecording=true" +else + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.ssl=false" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.authenticate=true" + JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.password.file=/etc/cassandra/jmxremote.password" +# JVM_OPTS="$JVM_OPTS -Djavax.net.ssl.keyStore=/path/to/keystore" +# JVM_OPTS="$JVM_OPTS -Djavax.net.ssl.keyStorePassword=" +# JVM_OPTS="$JVM_OPTS -Djavax.net.ssl.trustStore=/path/to/truststore" +# JVM_OPTS="$JVM_OPTS -Djavax.net.ssl.trustStorePassword=" +# JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.ssl.need.client.auth=true" +# JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.registry.ssl=true" +# JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.ssl.enabled.protocols=" +# JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.ssl.enabled.cipher.suites=" +fi + +# To use mx4j, an HTML interface for JMX, add mx4j-tools.jar to the lib/ +# directory. +# See http://wiki.apache.org/cassandra/Operations#Monitoring_with_MX4J +# By default mx4j listens on 0.0.0.0:8081. Uncomment the following lines +# to control its listen address and port. +#MX4J_ADDRESS="-Dmx4jaddress=127.0.0.1" +#MX4J_PORT="-Dmx4jport=8081" + +# Cassandra uses SIGAR to capture OS metrics CASSANDRA-7838 +# for SIGAR we have to set the java.library.path +# to the location of the native libraries. +JVM_OPTS="$JVM_OPTS -Djava.library.path=$CASSANDRA_HOME/lib/sigar-bin" + +JVM_OPTS="$JVM_OPTS $MX4J_ADDRESS" +JVM_OPTS="$JVM_OPTS $MX4J_PORT" +JVM_OPTS="$JVM_OPTS $JVM_EXTRA_OPTS" diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-start.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-start.sh new file mode 100644 index 0000000000000..4a6daef6cef39 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-start.sh @@ -0,0 +1,217 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Script to start Cassandra daemon (used by cassandra-bootstrap.sh) +# ----------------------------------------------------------------------------------------------- + +#profile=/home/cassandra/.bash_profile +profile=/root/.bash_profile + +. $profile +. /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "cassandra" + +# Setups Cassandra seeds for this EC2 node. Looks for the information in S3 about +# already up and running Cassandra cluster nodes +setupCassandraSeeds() +{ + if [ "$FIRST_NODE_LOCK" == "true" ]; then + echo "[INFO] Setting up Cassandra seeds" + + CLUSTER_SEEDS=$(hostname -f | tr '[:upper:]' '[:lower:]') + + echo "[INFO] Using host address as a seed for the first Cassandra node: $CLUSTER_SEEDS" + + aws s3 rm --recursive ${S3_CASSANDRA_NODES_DISCOVERY::-1} + if [ $? -ne 0 ]; then + terminate "Failed to clean Cassandra node discovery URL: $S3_CASSANDRA_NODES_DISCOVERY" + fi + else + setupClusterSeeds "cassandra" "true" + CLUSTER_SEEDS=$(echo $CLUSTER_SEEDS | sed -r "s/ /,/g") + fi + + cat /opt/cassandra/conf/cassandra-template.yaml | sed -r "s/\\\$\{CASSANDRA_SEEDS\}/$CLUSTER_SEEDS/g" > /opt/cassandra/conf/cassandra.yaml +} + +# Gracefully starts Cassandra daemon and waits until it joins Cassandra cluster +startCassandra() +{ + echo "[INFO]-------------------------------------------------------------" + echo "[INFO] Trying attempt $START_ATTEMPT to start Cassandra daemon" + echo "[INFO]-------------------------------------------------------------" + echo "" + + setupCassandraSeeds + + waitToJoinCluster + + if [ "$FIRST_NODE_LOCK" == "true" ]; then + aws s3 rm --recursive ${S3_CASSANDRA_NODES_DISCOVERY::-1} + if [ $? -ne 0 ]; then + terminate "Failed to clean Cassandra node discovery URL: $S3_IGNITE_NODES_DISCOVERY" + fi + fi + + proc=$(ps -ef | grep java | grep "org.apache.cassandra.service.CassandraDaemon") + proc=($proc) + + if [ -n "${proc[1]}" ]; then + echo "[INFO] Terminating existing Cassandra process ${proc[1]}" + kill -9 ${proc[1]} + fi + + echo "[INFO] Starting Cassandra" + rm -Rf /opt/cassandra/logs/* /storage/cassandra/* + /opt/cassandra/bin/cassandra -R & + + echo "[INFO] Cassandra job id: $!" + + sleep 1m + + START_ATTEMPT=$(( $START_ATTEMPT+1 )) +} + +####################################################################################################### + +START_ATTEMPT=0 + +# Cleans all the previous metadata about this EC2 node +unregisterNode + +# Tries to get first-node lock +tryToGetFirstNodeLock + +echo "[INFO]-----------------------------------------------------------------" + +if [ "$FIRST_NODE_LOCK" == "true" ]; then + echo "[INFO] Starting first Cassandra node" +else + echo "[INFO] Starting Cassandra node" +fi + +echo "[INFO]-----------------------------------------------------------------" +printInstanceInfo +echo "[INFO]-----------------------------------------------------------------" + +if [ "$FIRST_NODE_LOCK" != "true" ]; then + waitFirstClusterNodeRegistered "true" +else + cleanupMetadata +fi + +# Start Cassandra daemon +startCassandra + +startTime=$(date +%s) + +# Trying multiple attempts to start Cassandra daemon +while true; do + proc=$(ps -ef | grep java | grep "org.apache.cassandra.service.CassandraDaemon") + + /opt/cassandra/bin/nodetool status &> /dev/null + + if [ $? -eq 0 ]; then + echo "[INFO]-----------------------------------------------------" + echo "[INFO] Cassandra daemon successfully started" + echo "[INFO]-----------------------------------------------------" + echo $proc + echo "[INFO]-----------------------------------------------------" + + # Once node joined the cluster we need to remove cluster-join lock + # to allow other EC2 nodes to acquire it and join cluster sequentially + removeClusterJoinLock + + break + fi + + currentTime=$(date +%s) + duration=$(( $currentTime-$startTime )) + duration=$(( $duration/60 )) + + if [ $duration -gt $SERVICE_STARTUP_TIME ]; then + if [ "$FIRST_NODE_LOCK" == "true" ]; then + # If the first node of Cassandra cluster failed to start Cassandra daemon in SERVICE_STARTUP_TIME min, + # we will not try any other attempts and just terminate with error. Terminate function itself, will + # take care about removing all the locks holding by this node. + terminate "${SERVICE_STARTUP_TIME}min timeout expired, but first Cassandra daemon is still not up and running" + else + # If node isn't the first node of Cassandra cluster and it failed to start we need to + # remove cluster-join lock to allow other EC2 nodes to acquire it + removeClusterJoinLock + + # If node failed all SERVICE_START_ATTEMPTS attempts to start Cassandra daemon we will not + # try anymore and terminate with error + if [ $START_ATTEMPT -gt $SERVICE_START_ATTEMPTS ]; then + terminate "${SERVICE_START_ATTEMPTS} attempts exceed, but Cassandra daemon is still not up and running" + fi + + # New attempt to start Cassandra daemon + startCassandra + fi + + continue + fi + + # Checking for the situation when two nodes trying to simultaneously join Cassandra cluster. + # This actually can happen only in not standard situation, when you are trying to start + # Cassandra daemon on some EC2 nodes manually and not using bootstrap script. + concurrencyError=$(cat /opt/cassandra/logs/system.log | grep "java.lang.UnsupportedOperationException: Other bootstrapping/leaving/moving nodes detected, cannot bootstrap while cassandra.consistent.rangemovement is true") + + if [ -n "$concurrencyError" ] && [ "$FIRST_NODE_LOCK" != "true" ]; then + # Remove cluster-join lock to allow other EC2 nodes to acquire it + removeClusterJoinLock + + echo "[WARN] Failed to concurrently start Cassandra daemon. Sleeping for extra 30sec" + sleep 30s + + # New attempt to start Cassandra daemon + startCassandra + + continue + fi + + # Handling situation when Cassandra daemon process abnormally terminated + if [ -z "$proc" ]; then + # If this is the first node of Cassandra cluster just terminating with error + if [ "$FIRST_NODE_LOCK" == "true" ]; then + terminate "Failed to start Cassandra daemon" + fi + + # Remove cluster-join lock to allow other EC2 nodes to acquire it + removeClusterJoinLock + + echo "[WARN] Failed to start Cassandra daemon. Sleeping for extra 30sec" + sleep 30s + + # New attempt to start Cassandra daemon + startCassandra + + continue + fi + + echo "[INFO] Waiting for Cassandra daemon to start, time passed ${duration}min" + sleep 30s +done + +# Once Cassandra daemon successfully started we registering new Cassandra node in S3 +registerNode + +# Terminating script with zero exit code +terminate \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-template.yaml b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-template.yaml new file mode 100644 index 0000000000000..e621886d64310 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/cassandra/cassandra-template.yaml @@ -0,0 +1,888 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Cassandra storage config YAML + +# NOTE: +# See http://wiki.apache.org/cassandra/StorageConfiguration for +# full explanations of configuration directives +# /NOTE + +# The name of the cluster. This is mainly used to prevent machines in +# one logical cluster from joining another. +cluster_name: 'CassandraIgnite' + +# It makes new (non-seed) nodes automatically migrate the right data to themselves. +# When initializing a fresh cluster with no data, add auto_bootstrap: false +auto_bootstrap: false + +# This defines the number of tokens randomly assigned to this node on the ring +# The more tokens, relative to other nodes, the larger the proportion of data +# that this node will store. You probably want all nodes to have the same number +# of tokens assuming they have equal hardware capability. +# +# If you leave this unspecified, Cassandra will use the default of 1 token for legacy compatibility, +# and will use the initial_token as described below. +# +# Specifying initial_token will override this setting on the node's initial start, +# on subsequent starts, this setting will apply even if initial token is set. +# +# If you already have a cluster with 1 token per node, and wish to migrate to +# multiple tokens per node, see http://wiki.apache.org/cassandra/Operations +num_tokens: 256 + +# initial_token allows you to specify tokens manually. While you can use # it with +# vnodes (num_tokens > 1, above) -- in which case you should provide a +# comma-separated list -- it's primarily used when adding nodes # to legacy clusters +# that do not have vnodes enabled. +# initial_token: + +# See http://wiki.apache.org/cassandra/HintedHandoff +# May either be "true" or "false" to enable globally, or contain a list +# of data centers to enable per-datacenter. +# hinted_handoff_enabled: DC1,DC2 +hinted_handoff_enabled: true +# this defines the maximum amount of time a dead host will have hints +# generated. After it has been dead this long, new hints for it will not be +# created until it has been seen alive and gone down again. +max_hint_window_in_ms: 10800000 # 3 hours +# Maximum throttle in KBs per second, per delivery thread. This will be +# reduced proportionally to the number of nodes in the cluster. (If there +# are two nodes in the cluster, each delivery thread will use the maximum +# rate; if there are three, each will throttle to half of the maximum, +# since we expect two nodes to be delivering hints simultaneously.) +hinted_handoff_throttle_in_kb: 1024 +# Number of threads with which to deliver hints; +# Consider increasing this number when you have multi-dc deployments, since +# cross-dc handoff tends to be slower +max_hints_delivery_threads: 2 + +# Maximum throttle in KBs per second, total. This will be +# reduced proportionally to the number of nodes in the cluster. +batchlog_replay_throttle_in_kb: 1024 + +# Authentication backend, implementing IAuthenticator; used to identify users +# Out of the box, Cassandra provides org.apache.cassandra.auth.{AllowAllAuthenticator, +# PasswordAuthenticator}. +# +# - AllowAllAuthenticator performs no checks - set it to disable authentication. +# - PasswordAuthenticator relies on username/password pairs to authenticate +# users. It keeps usernames and hashed passwords in system_auth.credentials table. +# Please increase system_auth keyspace replication factor if you use this authenticator. +# If using PasswordAuthenticator, CassandraRoleManager must also be used (see below) +#authenticator: PasswordAuthenticator +authenticator: AllowAllAuthenticator + +# Authorization backend, implementing IAuthorizer; used to limit access/provide permissions +# Out of the box, Cassandra provides org.apache.cassandra.auth.{AllowAllAuthorizer, +# CassandraAuthorizer}. +# +# - AllowAllAuthorizer allows any action to any user - set it to disable authorization. +# - CassandraAuthorizer stores permissions in system_auth.permissions table. Please +# increase system_auth keyspace replication factor if you use this authorizer. +#authorizer: CassandraAuthorizer +authorizer: AllowAllAuthorizer + +# Part of the Authentication & Authorization backend, implementing IRoleManager; used +# to maintain grants and memberships between roles. +# Out of the box, Cassandra provides org.apache.cassandra.auth.CassandraRoleManager, +# which stores role information in the system_auth keyspace. Most functions of the +# IRoleManager require an authenticated login, so unless the configured IAuthenticator +# actually implements authentication, most of this functionality will be unavailable. +# +# - CassandraRoleManager stores role data in the system_auth keyspace. Please +# increase system_auth keyspace replication factor if you use this role manager. +role_manager: CassandraRoleManager + +# Validity period for roles cache (fetching permissions can be an +# expensive operation depending on the authorizer). Granted roles are cached for +# authenticated sessions in AuthenticatedUser and after the period specified +# here, become eligible for (async) reload. +# Defaults to 2000, set to 0 to disable. +# Will be disabled automatically for AllowAllAuthenticator. +roles_validity_in_ms: 2000 + +# Refresh interval for roles cache (if enabled). +# After this interval, cache entries become eligible for refresh. Upon next +# access, an async reload is scheduled and the old value returned until it +# completes. If roles_validity_in_ms is non-zero, then this must be +# also. +# Defaults to the same value as roles_validity_in_ms. +# roles_update_interval_in_ms: 1000 + +# Validity period for permissions cache (fetching permissions can be an +# expensive operation depending on the authorizer, CassandraAuthorizer is +# one example). Defaults to 2000, set to 0 to disable. +# Will be disabled automatically for AllowAllAuthorizer. +permissions_validity_in_ms: 2000 + +# Refresh interval for permissions cache (if enabled). +# After this interval, cache entries become eligible for refresh. Upon next +# access, an async reload is scheduled and the old value returned until it +# completes. If permissions_validity_in_ms is non-zero, then this must be +# also. +# Defaults to the same value as permissions_validity_in_ms. +# permissions_update_interval_in_ms: 1000 + +# The partitioner is responsible for distributing groups of rows (by +# partition key) across nodes in the cluster. You should leave this +# alone for new clusters. The partitioner can NOT be changed without +# reloading all data, so when upgrading you should set this to the +# same partitioner you were already using. +# +# Besides Murmur3Partitioner, partitioners included for backwards +# compatibility include RandomPartitioner, ByteOrderedPartitioner, and +# OrderPreservingPartitioner. +# +partitioner: org.apache.cassandra.dht.Murmur3Partitioner + +# Directories where Cassandra should store data on disk. Cassandra +# will spread data evenly across them, subject to the granularity of +# the configured compaction strategy. +# If not set, the default directory is $CASSANDRA_HOME/data/data. +data_file_directories: ${CASSANDRA_DATA_DIR} + +# commit log. when running on magnetic HDD, this should be a +# separate spindle than the data directories. +# If not set, the default directory is $CASSANDRA_HOME/data/commitlog. +commitlog_directory: ${CASSANDRA_COMMITLOG_DIR} + +# policy for data disk failures: +# die: shut down gossip and client transports and kill the JVM for any fs errors or +# single-sstable errors, so the node can be replaced. +# stop_paranoid: shut down gossip and client transports even for single-sstable errors, +# kill the JVM for errors during startup. +# stop: shut down gossip and client transports, leaving the node effectively dead, but +# can still be inspected via JMX, kill the JVM for errors during startup. +# best_effort: stop using the failed disk and respond to requests based on +# remaining available sstables. This means you WILL see obsolete +# data at CL.ONE! +# ignore: ignore fatal errors and let requests fail, as in pre-1.2 Cassandra +disk_failure_policy: stop + +# policy for commit disk failures: +# die: shut down gossip and Thrift and kill the JVM, so the node can be replaced. +# stop: shut down gossip and Thrift, leaving the node effectively dead, but +# can still be inspected via JMX. +# stop_commit: shutdown the commit log, letting writes collect but +# continuing to service reads, as in pre-2.0.5 Cassandra +# ignore: ignore fatal errors and let the batches fail +commit_failure_policy: stop + +# Maximum size of the key cache in memory. +# +# Each key cache hit saves 1 seek and each row cache hit saves 2 seeks at the +# minimum, sometimes more. The key cache is fairly tiny for the amount of +# time it saves, so it's worthwhile to use it at large numbers. +# The row cache saves even more time, but must contain the entire row, +# so it is extremely space-intensive. It's best to only use the +# row cache if you have hot rows or static rows. +# +# NOTE: if you reduce the size, you may not get you hottest keys loaded on startup. +# +# Default value is empty to make it "auto" (min(5% of Heap (in MB), 100MB)). Set to 0 to disable key cache. +key_cache_size_in_mb: + +# Duration in seconds after which Cassandra should +# save the key cache. Caches are saved to saved_caches_directory as +# specified in this configuration file. +# +# Saved caches greatly improve cold-start speeds, and is relatively cheap in +# terms of I/O for the key cache. Row cache saving is much more expensive and +# has limited use. +# +# Default is 14400 or 4 hours. +key_cache_save_period: 14400 + +# Number of keys from the key cache to save +# Disabled by default, meaning all keys are going to be saved +# key_cache_keys_to_save: 100 + +# Row cache implementation class name. +# Available implementations: +# org.apache.cassandra.cache.OHCProvider Fully off-heap row cache implementation (default). +# org.apache.cassandra.cache.SerializingCacheProvider This is the row cache implementation availabile +# in previous releases of Cassandra. +# row_cache_class_name: org.apache.cassandra.cache.OHCProvider + +# Maximum size of the row cache in memory. +# Please note that OHC cache implementation requires some additional off-heap memory to manage +# the map structures and some in-flight memory during operations before/after cache entries can be +# accounted against the cache capacity. This overhead is usually small compared to the whole capacity. +# Do not specify more memory that the system can afford in the worst usual situation and leave some +# headroom for OS block level cache. Do never allow your system to swap. +# +# Default value is 0, to disable row caching. +row_cache_size_in_mb: 0 + +# Duration in seconds after which Cassandra should save the row cache. +# Caches are saved to saved_caches_directory as specified in this configuration file. +# +# Saved caches greatly improve cold-start speeds, and is relatively cheap in +# terms of I/O for the key cache. Row cache saving is much more expensive and +# has limited use. +# +# Default is 0 to disable saving the row cache. +row_cache_save_period: 0 + +# Number of keys from the row cache to save. +# Specify 0 (which is the default), meaning all keys are going to be saved +# row_cache_keys_to_save: 100 + +# Maximum size of the counter cache in memory. +# +# Counter cache helps to reduce counter locks' contention for hot counter cells. +# In case of RF = 1 a counter cache hit will cause Cassandra to skip the read before +# write entirely. With RF > 1 a counter cache hit will still help to reduce the duration +# of the lock hold, helping with hot counter cell updates, but will not allow skipping +# the read entirely. Only the local (clock, count) tuple of a counter cell is kept +# in memory, not the whole counter, so it's relatively cheap. +# +# NOTE: if you reduce the size, you may not get you hottest keys loaded on startup. +# +# Default value is empty to make it "auto" (min(2.5% of Heap (in MB), 50MB)). Set to 0 to disable counter cache. +# NOTE: if you perform counter deletes and rely on low gcgs, you should disable the counter cache. +counter_cache_size_in_mb: + +# Duration in seconds after which Cassandra should +# save the counter cache (keys only). Caches are saved to saved_caches_directory as +# specified in this configuration file. +# +# Default is 7200 or 2 hours. +counter_cache_save_period: 7200 + +# Number of keys from the counter cache to save +# Disabled by default, meaning all keys are going to be saved +# counter_cache_keys_to_save: 100 + +# The off-heap memory allocator. Affects storage engine metadata as +# well as caches. Experiments show that JEMAlloc saves some memory +# than the native GCC allocator (i.e., JEMalloc is more +# fragmentation-resistant). +# +# Supported values are: NativeAllocator, JEMallocAllocator +# +# If you intend to use JEMallocAllocator you have to install JEMalloc as library and +# modify cassandra-env.sh as directed in the file. +# +# Defaults to NativeAllocator +# memory_allocator: NativeAllocator + +# saved caches +# If not set, the default directory is $CASSANDRA_HOME/data/saved_caches. +saved_caches_directory: ${CASSANDRA_CACHES_DIR} + +# commitlog_sync may be either "periodic" or "batch." +# +# When in batch mode, Cassandra won't ack writes until the commit log +# has been fsynced to disk. It will wait +# commitlog_sync_batch_window_in_ms milliseconds between fsyncs. +# This window should be kept short because the writer threads will +# be unable to do extra work while waiting. (You may need to increase +# concurrent_writes for the same reason.) +# +# commitlog_sync: batch +# commitlog_sync_batch_window_in_ms: 2 +# +# the other option is "periodic" where writes may be acked immediately +# and the CommitLog is simply synced every commitlog_sync_period_in_ms +# milliseconds. +commitlog_sync: periodic +commitlog_sync_period_in_ms: 10000 + +# The size of the individual commitlog file segments. A commitlog +# segment may be archived, deleted, or recycled once all the data +# in it (potentially from each columnfamily in the system) has been +# flushed to sstables. +# +# The default size is 32, which is almost always fine, but if you are +# archiving commitlog segments (see commitlog_archiving.properties), +# then you probably want a finer granularity of archiving; 8 or 16 MB +# is reasonable. +commitlog_segment_size_in_mb: 32 + +# Compression to apply to the commit log. If omitted, the commit log +# will be written uncompressed. LZ4, Snappy, and Deflate compressors +# are supported. +#commitlog_compression: +# - class_name: LZ4Compressor +# parameters: +# - + +# any class that implements the SeedProvider interface and has a +# constructor that takes a Map of parameters will do. +seed_provider: + # Addresses of hosts that are deemed contact points. + # Cassandra nodes use this list of hosts to find each other and learn + # the topology of the ring. You must change this if you are running + # multiple nodes! + - class_name: org.apache.cassandra.locator.SimpleSeedProvider + parameters: + # seeds is actually a comma-delimited list of addresses. + # Ex: ",," + - seeds: "${CASSANDRA_SEEDS}" + +# For workloads with more data than can fit in memory, Cassandra's +# bottleneck will be reads that need to fetch data from +# disk. "concurrent_reads" should be set to (16 * number_of_drives) in +# order to allow the operations to enqueue low enough in the stack +# that the OS and drives can reorder them. Same applies to +# "concurrent_counter_writes", since counter writes read the current +# values before incrementing and writing them back. +# +# On the other hand, since writes are almost never IO bound, the ideal +# number of "concurrent_writes" is dependent on the number of cores in +# your system; (8 * number_of_cores) is a good rule of thumb. +concurrent_reads: 32 +concurrent_writes: 32 +concurrent_counter_writes: 32 + +# Total memory to use for sstable-reading buffers. Defaults to +# the smaller of 1/4 of heap or 512MB. +# file_cache_size_in_mb: 512 + +# Total permitted memory to use for memtables. Cassandra will stop +# accepting writes when the limit is exceeded until a flush completes, +# and will trigger a flush based on memtable_cleanup_threshold +# If omitted, Cassandra will set both to 1/4 the size of the heap. +# memtable_heap_space_in_mb: 2048 +# memtable_offheap_space_in_mb: 2048 + +# Ratio of occupied non-flushing memtable size to total permitted size +# that will trigger a flush of the largest memtable. Lager mct will +# mean larger flushes and hence less compaction, but also less concurrent +# flush activity which can make it difficult to keep your disks fed +# under heavy write load. +# +# memtable_cleanup_threshold defaults to 1 / (memtable_flush_writers + 1) +# memtable_cleanup_threshold: 0.11 + +# Specify the way Cassandra allocates and manages memtable memory. +# Options are: +# heap_buffers: on heap nio buffers +# offheap_buffers: off heap (direct) nio buffers +# offheap_objects: native memory, eliminating nio buffer heap overhead +memtable_allocation_type: heap_buffers + +# Total space to use for commit logs on disk. +# +# If space gets above this value, Cassandra will flush every dirty CF +# in the oldest segment and remove it. So a small total commitlog space +# will tend to cause more flush activity on less-active columnfamilies. +# +# The default value is 8192. +# commitlog_total_space_in_mb: 8192 + +# This sets the amount of memtable flush writer threads. These will +# be blocked by disk io, and each one will hold a memtable in memory +# while blocked. +# +# memtable_flush_writers defaults to the smaller of (number of disks, +# number of cores), with a minimum of 2 and a maximum of 8. +# +# If your data directories are backed by SSD, you should increase this +# to the number of cores. +#memtable_flush_writers: 8 + +# A fixed memory pool size in MB for for SSTable index summaries. If left +# empty, this will default to 5% of the heap size. If the memory usage of +# all index summaries exceeds this limit, SSTables with low read rates will +# shrink their index summaries in order to meet this limit. However, this +# is a best-effort process. In extreme conditions Cassandra may need to use +# more than this amount of memory. +index_summary_capacity_in_mb: + +# How frequently index summaries should be resampled. This is done +# periodically to redistribute memory from the fixed-size pool to sstables +# proportional their recent read rates. Setting to -1 will disable this +# process, leaving existing index summaries at their current sampling level. +index_summary_resize_interval_in_minutes: 60 + +# Whether to, when doing sequential writing, fsync() at intervals in +# order to force the operating system to flush the dirty +# buffers. Enable this to avoid sudden dirty buffer flushing from +# impacting read latencies. Almost always a good idea on SSDs; not +# necessarily on platters. +trickle_fsync: false +trickle_fsync_interval_in_kb: 10240 + +# TCP port, for commands and data +# For security reasons, you should not expose this port to the internet. Firewall it if needed. +storage_port: 7000 + +# SSL port, for encrypted communication. Unused unless enabled in +# encryption_options +# For security reasons, you should not expose this port to the internet. Firewall it if needed. +ssl_storage_port: 7001 + +# Address or interface to bind to and tell other Cassandra nodes to connect to. +# You _must_ change this if you want multiple nodes to be able to communicate! +# +# Set listen_address OR listen_interface, not both. Interfaces must correspond +# to a single address, IP aliasing is not supported. +# +# Leaving it blank leaves it up to InetAddress.getLocalHost(). This +# will always do the Right Thing _if_ the node is properly configured +# (hostname, name resolution, etc), and the Right Thing is to use the +# address associated with the hostname (it might not be). +# +# Setting listen_address to 0.0.0.0 is always wrong. +# +# If you choose to specify the interface by name and the interface has an ipv4 and an ipv6 address +# you can specify which should be chosen using listen_interface_prefer_ipv6. If false the first ipv4 +# address will be used. If true the first ipv6 address will be used. Defaults to false preferring +# ipv4. If there is only one address it will be selected regardless of ipv4/ipv6. +listen_address: +# listen_interface: eth0 +# listen_interface_prefer_ipv6: false + +# Address to broadcast to other Cassandra nodes +# Leaving this blank will set it to the same value as listen_address +# broadcast_address: 1.2.3.4 + +# Internode authentication backend, implementing IInternodeAuthenticator; +# used to allow/disallow connections from peer nodes. +# internode_authenticator: org.apache.cassandra.auth.AllowAllInternodeAuthenticator + +# Whether to start the native transport server. +# Please note that the address on which the native transport is bound is the +# same as the rpc_address. The port however is different and specified below. +start_native_transport: true +# port for the CQL native transport to listen for clients on +# For security reasons, you should not expose this port to the internet. Firewall it if needed. +native_transport_port: 9042 +# The maximum threads for handling requests when the native transport is used. +# This is similar to rpc_max_threads though the default differs slightly (and +# there is no native_transport_min_threads, idle threads will always be stopped +# after 30 seconds). +# native_transport_max_threads: 128 +# +# The maximum size of allowed frame. Frame (requests) larger than this will +# be rejected as invalid. The default is 256MB. +# native_transport_max_frame_size_in_mb: 256 + +# The maximum number of concurrent client connections. +# The default is -1, which means unlimited. +# native_transport_max_concurrent_connections: -1 + +# The maximum number of concurrent client connections per source ip. +# The default is -1, which means unlimited. +# native_transport_max_concurrent_connections_per_ip: -1 + +# Whether to start the thrift rpc server. +start_rpc: true + +# The address or interface to bind the Thrift RPC service and native transport +# server to. +# +# Set rpc_address OR rpc_interface, not both. Interfaces must correspond +# to a single address, IP aliasing is not supported. +# +# Leaving rpc_address blank has the same effect as on listen_address +# (i.e. it will be based on the configured hostname of the node). +# +# Note that unlike listen_address, you can specify 0.0.0.0, but you must also +# set broadcast_rpc_address to a value other than 0.0.0.0. +# +# For security reasons, you should not expose this port to the internet. Firewall it if needed. +# +# If you choose to specify the interface by name and the interface has an ipv4 and an ipv6 address +# you can specify which should be chosen using rpc_interface_prefer_ipv6. If false the first ipv4 +# address will be used. If true the first ipv6 address will be used. Defaults to false preferring +# ipv4. If there is only one address it will be selected regardless of ipv4/ipv6. +rpc_address: +# rpc_interface: eth1 +# rpc_interface_prefer_ipv6: false + +# port for Thrift to listen for clients on +rpc_port: 9160 + +# RPC address to broadcast to drivers and other Cassandra nodes. This cannot +# be set to 0.0.0.0. If left blank, this will be set to the value of +# rpc_address. If rpc_address is set to 0.0.0.0, broadcast_rpc_address must +# be set. +broadcast_rpc_address: + +# enable or disable keepalive on rpc/native connections +rpc_keepalive: true + +# Cassandra provides two out-of-the-box options for the RPC Server: +# +# sync -> One thread per thrift connection. For a very large number of clients, memory +# will be your limiting factor. On a 64 bit JVM, 180KB is the minimum stack size +# per thread, and that will correspond to your use of virtual memory (but physical memory +# may be limited depending on use of stack space). +# +# hsha -> Stands for "half synchronous, half asynchronous." All thrift clients are handled +# asynchronously using a small number of threads that does not vary with the amount +# of thrift clients (and thus scales well to many clients). The rpc requests are still +# synchronous (one thread per active request). If hsha is selected then it is essential +# that rpc_max_threads is changed from the default value of unlimited. +# +# The default is sync because on Windows hsha is about 30% slower. On Linux, +# sync/hsha performance is about the same, with hsha of course using less memory. +# +# Alternatively, can provide your own RPC server by providing the fully-qualified class name +# of an o.a.c.t.TServerFactory that can create an instance of it. +rpc_server_type: sync + +# Uncomment rpc_min|max_thread to set request pool size limits. +# +# Regardless of your choice of RPC server (see above), the number of maximum requests in the +# RPC thread pool dictates how many concurrent requests are possible (but if you are using the sync +# RPC server, it also dictates the number of clients that can be connected at all). +# +# The default is unlimited and thus provides no protection against clients overwhelming the server. You are +# encouraged to set a maximum that makes sense for you in production, but do keep in mind that +# rpc_max_threads represents the maximum number of client requests this server may execute concurrently. +# +# rpc_min_threads: 16 +# rpc_max_threads: 2048 + +# uncomment to set socket buffer sizes on rpc connections +# rpc_send_buff_size_in_bytes: +# rpc_recv_buff_size_in_bytes: + +# Uncomment to set socket buffer size for internode communication +# Note that when setting this, the buffer size is limited by net.core.wmem_max +# and when not setting it it is defined by net.ipv4.tcp_wmem +# See: +# /proc/sys/net/core/wmem_max +# /proc/sys/net/core/rmem_max +# /proc/sys/net/ipv4/tcp_wmem +# /proc/sys/net/ipv4/tcp_wmem +# and: man tcp +# internode_send_buff_size_in_bytes: +# internode_recv_buff_size_in_bytes: + +# Frame size for thrift (maximum message length). +thrift_framed_transport_size_in_mb: 15 + +# Set to true to have Cassandra create a hard link to each sstable +# flushed or streamed locally in a backups/ subdirectory of the +# keyspace data. Removing these links is the operator's +# responsibility. +incremental_backups: false + +# Whether or not to take a snapshot before each compaction. Be +# careful using this option, since Cassandra won't clean up the +# snapshots for you. Mostly useful if you're paranoid when there +# is a data format change. +snapshot_before_compaction: false + +# Whether or not a snapshot is taken of the data before keyspace truncation +# or dropping of column families. The STRONGLY advised default of true +# should be used to provide data safety. If you set this flag to false, you will +# lose data on truncation or drop. +auto_snapshot: true + +# When executing a scan, within or across a partition, we need to keep the +# tombstones seen in memory so we can return them to the coordinator, which +# will use them to make sure other replicas also know about the deleted rows. +# With workloads that generate a lot of tombstones, this can cause performance +# problems and even exaust the server heap. +# (http://www.datastax.com/dev/blog/cassandra-anti-patterns-queues-and-queue-like-datasets) +# Adjust the thresholds here if you understand the dangers and want to +# scan more tombstones anyway. These thresholds may also be adjusted at runtime +# using the StorageService mbean. +tombstone_warn_threshold: 1000 +tombstone_failure_threshold: 100000 + +# Granularity of the collation index of rows within a partition. +# Increase if your rows are large, or if you have a very large +# number of rows per partition. The competing goals are these: +# 1) a smaller granularity means more index entries are generated +# and looking up rows withing the partition by collation column +# is faster +# 2) but, Cassandra will keep the collation index in memory for hot +# rows (as part of the key cache), so a larger granularity means +# you can cache more hot rows +column_index_size_in_kb: 64 + + +# Log WARN on any batch size exceeding this value. 5kb per batch by default. +# Caution should be taken on increasing the size of this threshold as it can lead to node instability. +batch_size_warn_threshold_in_kb: 5 + +# Fail any batch exceeding this value. 50kb (10x warn threshold) by default. +batch_size_fail_threshold_in_kb: 50 + +# Number of simultaneous compactions to allow, NOT including +# validation "compactions" for anti-entropy repair. Simultaneous +# compactions can help preserve read performance in a mixed read/write +# workload, by mitigating the tendency of small sstables to accumulate +# during a single long running compactions. The default is usually +# fine and if you experience problems with compaction running too +# slowly or too fast, you should look at +# compaction_throughput_mb_per_sec first. +# +# concurrent_compactors defaults to the smaller of (number of disks, +# number of cores), with a minimum of 2 and a maximum of 8. +# +# If your data directories are backed by SSD, you should increase this +# to the number of cores. +#concurrent_compactors: 1 + +# Throttles compaction to the given total throughput across the entire +# system. The faster you insert data, the faster you need to compact in +# order to keep the sstable count down, but in general, setting this to +# 16 to 32 times the rate you are inserting data is more than sufficient. +# Setting this to 0 disables throttling. Note that this account for all types +# of compaction, including validation compaction. +compaction_throughput_mb_per_sec: 16 + +# Log a warning when compacting partitions larger than this value +compaction_large_partition_warning_threshold_mb: 100 + +# When compacting, the replacement sstable(s) can be opened before they +# are completely written, and used in place of the prior sstables for +# any range that has been written. This helps to smoothly transfer reads +# between the sstables, reducing page cache churn and keeping hot rows hot +sstable_preemptive_open_interval_in_mb: 50 + +# Throttles all outbound streaming file transfers on this node to the +# given total throughput in Mbps. This is necessary because Cassandra does +# mostly sequential IO when streaming data during bootstrap or repair, which +# can lead to saturating the network connection and degrading rpc performance. +# When unset, the default is 200 Mbps or 25 MB/s. +# stream_throughput_outbound_megabits_per_sec: 200 + +# Throttles all streaming file transfer between the datacenters, +# this setting allows users to throttle inter dc stream throughput in addition +# to throttling all network stream traffic as configured with +# stream_throughput_outbound_megabits_per_sec +# inter_dc_stream_throughput_outbound_megabits_per_sec: + +# How long the coordinator should wait for read operations to complete +read_request_timeout_in_ms: 50000 +# How long the coordinator should wait for seq or index scans to complete +range_request_timeout_in_ms: 10000 +# How long the coordinator should wait for writes to complete +write_request_timeout_in_ms: 20000 +# How long the coordinator should wait for counter writes to complete +counter_write_request_timeout_in_ms: 5000 +# How long a coordinator should continue to retry a CAS operation +# that contends with other proposals for the same row +cas_contention_timeout_in_ms: 1000 +# How long the coordinator should wait for truncates to complete +# (This can be much longer, because unless auto_snapshot is disabled +# we need to flush first so we can snapshot before removing the data.) +truncate_request_timeout_in_ms: 60000 +# The default timeout for other, miscellaneous operations +request_timeout_in_ms: 10000 + +# Enable operation timeout information exchange between nodes to accurately +# measure request timeouts. If disabled, replicas will assume that requests +# were forwarded to them instantly by the coordinator, which means that +# under overload conditions we will waste that much extra time processing +# already-timed-out requests. +# +# Warning: before enabling this property make sure to ntp is installed +# and the times are synchronized between the nodes. +cross_node_timeout: false + +# Enable socket timeout for streaming operation. +# When a timeout occurs during streaming, streaming is retried from the start +# of the current file. This _can_ involve re-streaming an important amount of +# data, so you should avoid setting the value too low. +# Default value is 3600000, which means streams timeout after an hour. +# streaming_socket_timeout_in_ms: 3600000 + +# phi value that must be reached for a host to be marked down. +# most users should never need to adjust this. +# phi_convict_threshold: 8 + +# endpoint_snitch -- Set this to a class that implements +# IEndpointSnitch. The snitch has two functions: +# - it teaches Cassandra enough about your network topology to route +# requests efficiently +# - it allows Cassandra to spread replicas around your cluster to avoid +# correlated failures. It does this by grouping machines into +# "datacenters" and "racks." Cassandra will do its best not to have +# more than one replica on the same "rack" (which may not actually +# be a physical location) +# +# IF YOU CHANGE THE SNITCH AFTER DATA IS INSERTED INTO THE CLUSTER, +# YOU MUST RUN A FULL REPAIR, SINCE THE SNITCH AFFECTS WHERE REPLICAS +# ARE PLACED. +# +# Out of the box, Cassandra provides +# - SimpleSnitch: +# Treats Strategy order as proximity. This can improve cache +# locality when disabling read repair. Only appropriate for +# single-datacenter deployments. +# - GossipingPropertyFileSnitch +# This should be your go-to snitch for production use. The rack +# and datacenter for the local node are defined in +# cassandra-rackdc.properties and propagated to other nodes via +# gossip. If cassandra-topology.properties exists, it is used as a +# fallback, allowing migration from the PropertyFileSnitch. +# - PropertyFileSnitch: +# Proximity is determined by rack and data center, which are +# explicitly configured in cassandra-topology.properties. +# - Ec2Snitch: +# Appropriate for EC2 deployments in a single Region. Loads Region +# and Availability Zone information from the EC2 API. The Region is +# treated as the datacenter, and the Availability Zone as the rack. +# Only private IPs are used, so this will not work across multiple +# Regions. +# - Ec2MultiRegionSnitch: +# Uses public IPs as broadcast_address to allow cross-region +# connectivity. (Thus, you should set seed addresses to the public +# IP as well.) You will need to open the storage_port or +# ssl_storage_port on the public IP firewall. (For intra-Region +# traffic, Cassandra will switch to the private IP after +# establishing a connection.) +# - RackInferringSnitch: +# Proximity is determined by rack and data center, which are +# assumed to correspond to the 3rd and 2nd octet of each node's IP +# address, respectively. Unless this happens to match your +# deployment conventions, this is best used as an example of +# writing a custom Snitch class and is provided in that spirit. +# +# You can use a custom Snitch by setting this to the full class name +# of the snitch, which will be assumed to be on your classpath. +endpoint_snitch: Ec2Snitch + +# controls how often to perform the more expensive part of host score +# calculation +dynamic_snitch_update_interval_in_ms: 100 +# controls how often to reset all host scores, allowing a bad host to +# possibly recover +dynamic_snitch_reset_interval_in_ms: 600000 +# if set greater than zero and read_repair_chance is < 1.0, this will allow +# 'pinning' of replicas to hosts in order to increase cache capacity. +# The badness threshold will control how much worse the pinned host has to be +# before the dynamic snitch will prefer other replicas over it. This is +# expressed as a double which represents a percentage. Thus, a value of +# 0.2 means Cassandra would continue to prefer the static snitch values +# until the pinned host was 20% worse than the fastest. +dynamic_snitch_badness_threshold: 0.1 + +# request_scheduler -- Set this to a class that implements +# RequestScheduler, which will schedule incoming client requests +# according to the specific policy. This is useful for multi-tenancy +# with a single Cassandra cluster. +# NOTE: This is specifically for requests from the client and does +# not affect inter node communication. +# org.apache.cassandra.scheduler.NoScheduler - No scheduling takes place +# org.apache.cassandra.scheduler.RoundRobinScheduler - Round robin of +# client requests to a node with a separate queue for each +# request_scheduler_id. The scheduler is further customized by +# request_scheduler_options as described below. +request_scheduler: org.apache.cassandra.scheduler.NoScheduler + +# Scheduler Options vary based on the type of scheduler +# NoScheduler - Has no options +# RoundRobin +# - throttle_limit -- The throttle_limit is the number of in-flight +# requests per client. Requests beyond +# that limit are queued up until +# running requests can complete. +# The value of 80 here is twice the number of +# concurrent_reads + concurrent_writes. +# - default_weight -- default_weight is optional and allows for +# overriding the default which is 1. +# - weights -- Weights are optional and will default to 1 or the +# overridden default_weight. The weight translates into how +# many requests are handled during each turn of the +# RoundRobin, based on the scheduler id. +# +# request_scheduler_options: +# throttle_limit: 80 +# default_weight: 5 +# weights: +# Keyspace1: 1 +# Keyspace2: 5 + +# request_scheduler_id -- An identifier based on which to perform +# the request scheduling. Currently the only valid option is keyspace. +# request_scheduler_id: keyspace + +# Enable or disable inter-node encryption +# Default settings are TLS v1, RSA 1024-bit keys (it is imperative that +# users generate their own keys) TLS_RSA_WITH_AES_128_CBC_SHA as the cipher +# suite for authentication, key exchange and encryption of the actual data transfers. +# Use the DHE/ECDHE ciphers if running in FIPS 140 compliant mode. +# NOTE: No custom encryption options are enabled at the moment +# The available internode options are : all, none, dc, rack +# +# If set to dc cassandra will encrypt the traffic between the DCs +# If set to rack cassandra will encrypt the traffic between the racks +# +# The passwords used in these options must match the passwords used when generating +# the keystore and truststore. For instructions on generating these files, see: +# http://download.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#CreateKeystore +# +server_encryption_options: + internode_encryption: none + keystore: conf/.keystore + keystore_password: cassandra + truststore: conf/.truststore + truststore_password: cassandra + # More advanced defaults below: + # protocol: TLS + # algorithm: SunX509 + # store_type: JKS + # cipher_suites: [TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA,TLS_DHE_RSA_WITH_AES_128_CBC_SHA,TLS_DHE_RSA_WITH_AES_256_CBC_SHA,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA] + # require_client_auth: false + +# enable or disable client/server encryption. +client_encryption_options: + enabled: false + keystore: conf/.keystore + keystore_password: cassandra + # require_client_auth: false + # Set trustore and truststore_password if require_client_auth is true + # truststore: conf/.truststore + # truststore_password: cassandra + # More advanced defaults below: + # protocol: TLS + # algorithm: SunX509 + # store_type: JKS + # cipher_suites: [TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA,TLS_DHE_RSA_WITH_AES_128_CBC_SHA,TLS_DHE_RSA_WITH_AES_256_CBC_SHA,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA] + +# internode_compression controls whether traffic between nodes is +# compressed. +# can be: all - all traffic is compressed +# dc - traffic between different datacenters is compressed +# none - nothing is compressed. +internode_compression: all + +# Enable or disable tcp_nodelay for inter-dc communication. +# Disabling it will result in larger (but fewer) network packets being sent, +# reducing overhead from the TCP protocol itself, at the cost of increasing +# latency if you block for cross-datacenter responses. +inter_dc_tcp_nodelay: false + +# TTL for different trace types used during logging of the repair process. +tracetype_query_ttl: 86400 +tracetype_repair_ttl: 604800 + +# UDFs (user defined functions) are disabled by default. +# As of Cassandra 2.2, there is no security manager or anything else in place that +# prevents execution of evil code. CASSANDRA-9402 will fix this issue for Cassandra 3.0. +# This will inherently be backwards-incompatible with any 2.2 UDF that perform insecure +# operations such as opening a socket or writing to the filesystem. +enable_user_defined_functions: false + +# The default Windows kernel timer and scheduling resolution is 15.6ms for power conservation. +# Lowering this value on Windows can provide much tighter latency and better throughput, however +# some virtualized environments may see a negative performance impact from changing this setting +# below their system default. The sysinternals 'clockres' tool can confirm your system's default +# setting. +windows_timer_interval: 1 diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/common.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/common.sh new file mode 100644 index 0000000000000..6469e951c3618 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/common.sh @@ -0,0 +1,1481 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Common purpose functions used by bootstrap scripts +# ----------------------------------------------------------------------------------------------- + +# Validates values of the main environment variables specified in env.sh +validate() +{ + if [ -n "$TESTS_TYPE" ] && [ "$TESTS_TYPE" != "ignite" ] && [ "$TESTS_TYPE" != "cassandra" ]; then + terminate "Incorrect tests type specified: $TESTS_TYPE" + fi + + if [ -z "$S3_TESTS_NODES_DISCOVERY" ]; then + terminate "Tests discovery URL doesn't specified" + fi + + if [[ "$S3_TESTS_NODES_DISCOVERY" != */ ]]; then + S3_TESTS_NODES_DISCOVERY=${S3_TESTS_NODES_DISCOVERY}/ + fi + + if [ -z "$S3_TESTS_SUCCESS" ]; then + terminate "Tests success URL doesn't specified" + fi + + if [[ "$S3_TESTS_SUCCESS" != */ ]]; then + S3_TESTS_SUCCESS=${S3_TESTS_SUCCESS}/ + fi + + if [ -z "$S3_TESTS_FAILURE" ]; then + terminate "Tests failure URL doesn't specified" + fi + + if [[ "$S3_TESTS_FAILURE" != */ ]]; then + S3_TESTS_FAILURE=${S3_TESTS_FAILURE}/ + fi + + if [ -z "$S3_TESTS_IDLE" ]; then + terminate "Tests idle URL doesn't specified" + fi + + if [[ "$S3_TESTS_IDLE" != */ ]]; then + S3_TESTS_IDLE=${S3_TESTS_IDLE}/ + fi + + if [ -z "$S3_TESTS_PREPARING" ]; then + terminate "Tests preparing URL doesn't specified" + fi + + if [[ "$S3_TESTS_PREPARING" != */ ]]; then + S3_TESTS_PREPARING=${S3_TESTS_PREPARING}/ + fi + + if [ -z "$S3_TESTS_RUNNING" ]; then + terminate "Tests running URL doesn't specified" + fi + + if [[ "$S3_TESTS_RUNNING" != */ ]]; then + S3_TESTS_RUNNING=${S3_TESTS_RUNNING}/ + fi + + if [ -z "$S3_TESTS_WAITING" ]; then + terminate "Tests waiting URL doesn't specified" + fi + + if [[ "$S3_TESTS_WAITING" != */ ]]; then + S3_TESTS_WAITING=${S3_TESTS_WAITING}/ + fi + + if [ -z "$S3_IGNITE_NODES_DISCOVERY" ]; then + terminate "Ignite discovery URL doesn't specified" + fi + + if [[ "$S3_IGNITE_NODES_DISCOVERY" != */ ]]; then + S3_IGNITE_NODES_DISCOVERY=${S3_IGNITE_NODES_DISCOVERY}/ + fi + + if [ -z "$S3_IGNITE_BOOTSTRAP_SUCCESS" ]; then + terminate "Ignite success URL doesn't specified" + fi + + if [[ "$S3_IGNITE_BOOTSTRAP_SUCCESS" != */ ]]; then + S3_IGNITE_BOOTSTRAP_SUCCESS=${S3_IGNITE_BOOTSTRAP_SUCCESS}/ + fi + + if [ -z "$S3_IGNITE_BOOTSTRAP_FAILURE" ]; then + terminate "Ignite failure URL doesn't specified" + fi + + if [[ "$S3_IGNITE_BOOTSTRAP_FAILURE" != */ ]]; then + S3_IGNITE_BOOTSTRAP_FAILURE=${S3_IGNITE_BOOTSTRAP_FAILURE}/ + fi + + if [ -z "$S3_CASSANDRA_NODES_DISCOVERY" ]; then + terminate "Cassandra discovery URL doesn't specified" + fi + + if [[ "$S3_CASSANDRA_NODES_DISCOVERY" != */ ]]; then + S3_CASSANDRA_NODES_DISCOVERY=${S3_CASSANDRA_NODES_DISCOVERY}/ + fi + + if [ -z "$S3_CASSANDRA_BOOTSTRAP_SUCCESS" ]; then + terminate "Cassandra success URL doesn't specified" + fi + + if [[ "$S3_CASSANDRA_BOOTSTRAP_SUCCESS" != */ ]]; then + S3_CASSANDRA_BOOTSTRAP_SUCCESS=${S3_CASSANDRA_BOOTSTRAP_SUCCESS}/ + fi + + if [ -z "$S3_CASSANDRA_BOOTSTRAP_FAILURE" ]; then + terminate "Cassandra failure URL doesn't specified" + fi + + if [[ "$S3_CASSANDRA_BOOTSTRAP_FAILURE" != */ ]]; then + S3_CASSANDRA_BOOTSTRAP_FAILURE=${S3_CASSANDRA_BOOTSTRAP_FAILURE}/ + fi + + if [ -z "$S3_GANGLIA_MASTER_DISCOVERY" ]; then + terminate "Ganglia master discovery URL doesn't specified" + fi + + if [[ "$S3_GANGLIA_MASTER_DISCOVERY" != */ ]]; then + S3_GANGLIA_MASTER_DISCOVERY=${S3_GANGLIA_MASTER_DISCOVERY}/ + fi + + if [ -z "$S3_GANGLIA_BOOTSTRAP_SUCCESS" ]; then + terminate "Ganglia master success URL doesn't specified" + fi + + if [[ "$S3_GANGLIA_BOOTSTRAP_SUCCESS" != */ ]]; then + S3_GANGLIA_BOOTSTRAP_SUCCESS=${S3_GANGLIA_BOOTSTRAP_SUCCESS}/ + fi + + if [ -z "$S3_GANGLIA_BOOTSTRAP_FAILURE" ]; then + terminate "Ganglia master failure URL doesn't specified" + fi + + if [[ "$S3_GANGLIA_BOOTSTRAP_FAILURE" != */ ]]; then + S3_GANGLIA_BOOTSTRAP_FAILURE=${S3_GANGLIA_BOOTSTRAP_FAILURE}/ + fi +} + +# Prints EC2 instance info +printInstanceInfo() +{ + if [ "$NODE_TYPE" == "cassandra" ]; then + echo "[INFO] Cassandra download URL: $CASSANDRA_DOWNLOAD_URL" + echo "[INFO] Tests package download URL: $TESTS_PACKAGE_DONLOAD_URL" + echo "[INFO] Ganglia Core download URL: $GANGLIA_CORE_DOWNLOAD_URL" + echo "[INFO] Ganglia Web download URL: $GANGLIA_WEB_DOWNLOAD_URL" + echo "[INFO] RRD download URL: $RRD_DOWNLOAD_URL" + echo "[INFO] Logs URL: $S3_CASSANDRA_LOGS" + echo "[INFO] Logs trigger URL: $S3_LOGS_TRIGGER" + echo "[INFO] Cassandra nodes discovery URL: $S3_CASSANDRA_NODES_DISCOVERY" + echo "[INFO] Ganglia master discovery URL: $S3_GANGLIA_MASTER_DISCOVERY" + echo "[INFO] Cassandra first node lock URL: $S3_CASSANDRA_FIRST_NODE_LOCK" + echo "[INFO] Cassandra nodes join lock URL: $S3_CASSANDRA_NODES_JOIN_LOCK" + echo "[INFO] Cassandra success URL: $S3_CASSANDRA_BOOTSTRAP_SUCCESS" + echo "[INFO] Cassandra failure URL: $S3_CASSANDRA_BOOTSTRAP_FAILURE" + fi + + if [ "$NODE_TYPE" == "ignite" ]; then + echo "[INFO] Ignite download URL: $IGNITE_DOWNLOAD_URL" + echo "[INFO] Tests package download URL: $TESTS_PACKAGE_DONLOAD_URL" + echo "[INFO] Ganglia Core download URL: $GANGLIA_CORE_DOWNLOAD_URL" + echo "[INFO] Ganglia Web download URL: $GANGLIA_WEB_DOWNLOAD_URL" + echo "[INFO] RRD download URL: $RRD_DOWNLOAD_URL" + echo "[INFO] Logs URL: $S3_IGNITE_LOGS" + echo "[INFO] Logs trigger URL: $S3_LOGS_TRIGGER" + echo "[INFO] Ignite node discovery URL: $S3_IGNITE_NODES_DISCOVERY" + echo "[INFO] Cassandra node discovery URL: $S3_CASSANDRA_NODES_DISCOVERY" + echo "[INFO] Ganglia master discovery URL: $S3_GANGLIA_MASTER_DISCOVERY" + echo "[INFO] Ignite first node lock URL: $S3_IGNITE_FIRST_NODE_LOCK" + echo "[INFO] Ignite nodes join lock URL: $S3_IGNITE_NODES_JOIN_LOCK" + echo "[INFO] Ignite success URL: $S3_IGNITE_BOOTSTRAP_SUCCESS" + echo "[INFO] Ignite failure URL: $S3_IGNITE_BOOTSTRAP_FAILURE" + fi + + if [ "$NODE_TYPE" == "test" ]; then + echo "[INFO] Tests type: $TESTS_TYPE" + echo "[INFO] Test nodes count: $TEST_NODES_COUNT" + echo "[INFO] Ignite nodes count: $IGNITE_NODES_COUNT" + echo "[INFO] Cassandra nodes count: $CASSANDRA_NODES_COUNT" + echo "[INFO] Tests summary URL: $S3_TESTS_SUMMARY" + echo "[INFO] ----------------------------------------------------" + echo "[INFO] Tests package download URL: $TESTS_PACKAGE_DONLOAD_URL" + echo "[INFO] Ganglia Core download URL: $GANGLIA_CORE_DOWNLOAD_URL" + echo "[INFO] Ganglia Web download URL: $GANGLIA_WEB_DOWNLOAD_URL" + echo "[INFO] RRD download URL: $RRD_DOWNLOAD_URL" + echo "[INFO] Logs URL: $S3_TESTS_LOGS" + echo "[INFO] Logs trigger URL: $S3_LOGS_TRIGGER" + echo "[INFO] Test node discovery URL: $S3_TESTS_NODES_DISCOVERY" + echo "[INFO] Ignite node discovery URL: $S3_IGNITE_NODES_DISCOVERY" + echo "[INFO] Cassandra node discovery URL: $S3_CASSANDRA_NODES_DISCOVERY" + echo "[INFO] Ganglia master discovery URL: $S3_GANGLIA_MASTER_DISCOVERY" + echo "[INFO] Tests trigger URL: $S3_TESTS_TRIGGER" + echo "[INFO] Tests idle URL: $S3_TESTS_IDLE" + echo "[INFO] Tests preparing URL: $S3_TESTS_PREPARING" + echo "[INFO] Tests waiting URL: $S3_TESTS_WAITING" + echo "[INFO] Tests running URL: $S3_TESTS_RUNNING" + echo "[INFO] Tests success URL: $S3_TESTS_SUCCESS" + echo "[INFO] Tests failure URL: $S3_TESTS_FAILURE" + echo "[INFO] Ignite success URL: $S3_IGNITE_BOOTSTRAP_SUCCESS" + echo "[INFO] Ignite failure URL: $S3_IGNITE_BOOTSTRAP_FAILURE" + echo "[INFO] Cassandra success URL: $S3_CASSANDRA_BOOTSTRAP_SUCCESS" + echo "[INFO] Cassandra failure URL: $S3_CASSANDRA_BOOTSTRAP_FAILURE" + fi + + if [ "$NODE_TYPE" == "ganglia" ]; then + echo "[INFO] Ganglia Core download URL: $GANGLIA_CORE_DOWNLOAD_URL" + echo "[INFO] Ganglia Web download URL: $GANGLIA_WEB_DOWNLOAD_URL" + echo "[INFO] RRD download URL: $RRD_DOWNLOAD_URL" + echo "[INFO] Tests package download URL: $TESTS_PACKAGE_DONLOAD_URL" + echo "[INFO] Logs URL: $S3_GANGLIA_LOGS" + echo "[INFO] Logs trigger URL: $S3_LOGS_TRIGGER" + echo "[INFO] Ganglia master discovery URL: $S3_GANGLIA_MASTER_DISCOVERY" + echo "[INFO] Ganglia success URL: $S3_GANGLIA_BOOTSTRAP_SUCCESS" + echo "[INFO] Ganglia failure URL: $S3_GANGLIA_BOOTSTRAP_FAILURE" + fi +} + +# Clone git repository +gitClone() +{ + echo "[INFO] Cloning git repository $1 to $2" + + rm -Rf $2 + + for i in 0 9; + do + git clone $1 $2 + + if [ $code -eq 0 ]; then + echo "[INFO] Git repository $1 was successfully cloned to $2" + return 0 + fi + + echo "[WARN] Failed to clone git repository $1 from $i attempt, sleeping extra 5sec" + rm -Rf $2 + sleep 5s + done + + terminate "All 10 attempts to clone git repository $1 are failed" +} + +# Applies specified tag to EC2 instance +createTag() +{ + if [ -z "$EC2_INSTANCE_REGION" ]; then + EC2_AVAIL_ZONE=`curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone` + EC2_INSTANCE_REGION="`echo \"$EC2_AVAIL_ZONE\" | sed -e 's:\([0-9][0-9]*\)[a-z]*\$:\\1:'`" + export EC2_INSTANCE_REGION + echo "[INFO] EC2 instance region: $EC2_INSTANCE_REGION" + fi + + for i in 0 9; + do + aws ec2 create-tags --resources $1 --tags Key=$2,Value=$3 --region $EC2_INSTANCE_REGION + if [ $? -eq 0 ]; then + return 0 + fi + + echo "[WARN] $i attempt to tag EC2 instance $1 with $2=$3 is failed, sleeping extra 5sec" + sleep 5s + done + + terminate "All 10 attempts to tag EC2 instance $1 with $2=$3 are failed" +} + +# Applies 'owner', 'project' and 'Name' tags to EC2 instance +tagInstance() +{ + export EC2_HOME=/opt/aws/apitools/ec2 + export JAVA_HOME=/opt/java + export PATH=$JAVA_HOME/bin:$EC2_HOME/bin:$PATH + + INSTANCE_ID=$(curl http://169.254.169.254/latest/meta-data/instance-id) + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to get instance metadata to tag it" + exit 1 + fi + + INSTANCE_NAME= + + if [ "$NODE_TYPE" == "cassandra" ]; then + INSTANCE_NAME=$EC2_CASSANDRA_TAG + elif [ "$NODE_TYPE" == "ignite" ]; then + INSTANCE_NAME=$EC2_IGNITE_TAG + elif [ "$NODE_TYPE" == "test" ]; then + INSTANCE_NAME=$EC2_TEST_TAG + elif [ "$NODE_TYPE" == "ganglia" ]; then + INSTANCE_NAME=$EC2_GANGLIA_TAG + fi + + if [ -n "$INSTANCE_NAME" ]; then + createTag "$INSTANCE_ID" "Name" "${INSTANCE_NAME}" + fi + + if [ -n "$EC2_OWNER_TAG" ]; then + createTag "$INSTANCE_ID" "owner" "${EC2_OWNER_TAG}" + fi + + if [ -n "$EC2_PROJECT_TAG" ]; then + createTag "$INSTANCE_ID" "project" "${EC2_PROJECT_TAG}" + fi +} + +# Sets NODE_TYPE env variable +setNodeType() +{ + if [ -n "$1" ]; then + NEW_NODE_TYPE=$NODE_TYPE + NODE_TYPE=$1 + else + NEW_NODE_TYPE= + fi +} + +# Reverts NODE_TYPE env variable to previous value +revertNodeType() +{ + if [ -n "$NEW_NODE_TYPE" ]; then + NODE_TYPE=$NEW_NODE_TYPE + NEW_NODE_TYPE= + fi +} + +# Returns logs folder for the node (Cassandra, Ignite, Tests) +getLocalLogsFolder() +{ + setNodeType $1 + + if [ "$NODE_TYPE" == "cassandra" ]; then + echo "/opt/cassandra/logs" + elif [ "$NODE_TYPE" == "ignite" ]; then + echo "/opt/ignite/work/log" + elif [ "$NODE_TYPE" == "test" ]; then + echo "/opt/ignite-cassandra-tests/logs" + elif [ "$NODE_TYPE" == "ganglia" ]; then + echo "" + fi + + revertNodeType +} + +# Returns S3 URL to discover this node +getDiscoveryUrl() +{ + setNodeType $1 + + if [ "$NODE_TYPE" == "cassandra" ]; then + echo "$S3_CASSANDRA_NODES_DISCOVERY" + elif [ "$NODE_TYPE" == "ignite" ]; then + echo "$S3_IGNITE_NODES_DISCOVERY" + elif [ "$NODE_TYPE" == "test" ]; then + echo "$S3_TESTS_NODES_DISCOVERY" + elif [ "$NODE_TYPE" == "ganglia" ]; then + echo "$S3_GANGLIA_MASTER_DISCOVERY" + fi + + revertNodeType +} + +# Returns S3 URL used as a join lock, used by nodes to join cluster sequentially +getJoinLockUrl() +{ + setNodeType $1 + + if [ "$NODE_TYPE" == "cassandra" ]; then + echo "$S3_CASSANDRA_NODES_JOIN_LOCK" + elif [ "$NODE_TYPE" == "ignite" ]; then + echo "$S3_IGNITE_NODES_JOIN_LOCK" + fi + + revertNodeType +} + +# Returns S3 URL used to select first node for the cluster. The first node is responsible +# for doing all routine work (clean S3 logs/test results from previous execution) on cluster startup +getFirstNodeLockUrl() +{ + setNodeType $1 + + if [ "$NODE_TYPE" == "cassandra" ]; then + echo "$S3_CASSANDRA_FIRST_NODE_LOCK" + elif [ "$NODE_TYPE" == "ignite" ]; then + echo "$S3_IGNITE_FIRST_NODE_LOCK" + elif [ "$NODE_TYPE" == "test" ]; then + echo "$S3_TESTS_FIRST_NODE_LOCK" + fi + + revertNodeType +} + +# Returns S3 success URL for the node - folder created in S3 in case node successfully started and containing node logs +getSucessUrl() +{ + setNodeType $1 + + if [ "$NODE_TYPE" == "cassandra" ]; then + echo "$S3_CASSANDRA_BOOTSTRAP_SUCCESS" + elif [ "$NODE_TYPE" == "ignite" ]; then + echo "$S3_IGNITE_BOOTSTRAP_SUCCESS" + elif [ "$NODE_TYPE" == "test" ]; then + echo "$S3_TESTS_SUCCESS" + elif [ "$NODE_TYPE" == "ganglia" ]; then + echo "$S3_GANGLIA_BOOTSTRAP_SUCCESS" + fi + + revertNodeType +} + +# Returns S3 failure URL for the node - folder created in S3 in case node failed to start and containing node logs +getFailureUrl() +{ + setNodeType $1 + + if [ "$NODE_TYPE" == "cassandra" ]; then + echo "$S3_CASSANDRA_BOOTSTRAP_FAILURE" + elif [ "$NODE_TYPE" == "ignite" ]; then + echo "$S3_IGNITE_BOOTSTRAP_FAILURE" + elif [ "$NODE_TYPE" == "test" ]; then + echo "$S3_TESTS_FAILURE" + elif [ "$NODE_TYPE" == "ganglia" ]; then + echo "$S3_GANGLIA_BOOTSTRAP_FAILURE" + fi + + revertNodeType +} + +# Terminates script execution, unregisters node and removes all the locks (join lock, first node lock) created by it +terminate() +{ + SUCCESS_URL=$(getSucessUrl) + FAILURE_URL=$(getFailureUrl) + + if [ -n "$SUCCESS_URL" ] && [[ "$SUCCESS_URL" != */ ]]; then + SUCCESS_URL=${SUCCESS_URL}/ + fi + + if [ -n "$FAILURE_URL" ] && [[ "$FAILURE_URL" != */ ]]; then + FAILURE_URL=${FAILURE_URL}/ + fi + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + msg=$HOST_NAME + + if [ -n "$1" ]; then + echo "[ERROR] $1" + echo "[ERROR]-----------------------------------------------------" + echo "[ERROR] Failed to start $NODE_TYPE node" + echo "[ERROR]-----------------------------------------------------" + msg=$1 + reportFolder=${FAILURE_URL}${HOST_NAME} + reportFile=$reportFolder/__error__ + else + echo "[INFO]-----------------------------------------------------" + echo "[INFO] $NODE_TYPE node successfully started" + echo "[INFO]-----------------------------------------------------" + reportFolder=${SUCCESS_URL}${HOST_NAME} + reportFile=$reportFolder/__success__ + fi + + echo $msg > /opt/ignite-cassandra-tests/bootstrap/start_result + + aws s3 rm --recursive $reportFolder + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to drop report folder: $reportFolder" + fi + + localLogs=$(getLocalLogsFolder) + + if [ -d "$localLogs" ]; then + aws s3 sync --sse AES256 $localLogs $reportFolder + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to export $NODE_TYPE logs to: $reportFolder" + fi + fi + + aws s3 cp --sse AES256 /opt/ignite-cassandra-tests/bootstrap/start_result $reportFile + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to export node start result to: $reportFile" + fi + + rm -f /opt/ignite-cassandra-tests/bootstrap/start_result /opt/ignite-cassandra-tests/bootstrap/join-lock /opt/ignite-cassandra-tests/bootstrap/first-node-lock + + removeClusterJoinLock + + if [ "$NODE_TYPE" == "test" ]; then + aws s3 rm ${S3_TESTS_RUNNING}${HOST_NAME} + aws s3 rm ${S3_TESTS_WAITING}${HOST_NAME} + aws s3 rm ${S3_TESTS_IDLE}${HOST_NAME} + aws s3 rm ${S3_TESTS_PREPARING}${HOST_NAME} + unregisterNode + fi + + if [ -n "$1" ]; then + removeFirstNodeLock + unregisterNode + exit 1 + fi + + exit 0 +} + +# Registers node by creating a file having node hostname inside specific folder in S3 +registerNode() +{ + DISCOVERY_URL=$(getDiscoveryUrl) + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + echo "[INFO] Registering $NODE_TYPE node: ${DISCOVERY_URL}${HOST_NAME}" + + aws s3 cp --sse AES256 /etc/hosts ${DISCOVERY_URL}${HOST_NAME} + if [ $? -ne 0 ]; then + terminate "Failed to register $NODE_TYPE node info in: ${DISCOVERY_URL}${HOST_NAME}" + fi + + echo "[INFO] $NODE_TYPE node successfully registered" +} + +# Unregisters node by removing a file having node hostname inside specific folder in S3 +unregisterNode() +{ + DISCOVERY_URL=$(getDiscoveryUrl) + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + echo "[INFO] Removing $NODE_TYPE node registration from: ${DISCOVERY_URL}${HOST_NAME}" + + exists=$(aws s3 ls ${DISCOVERY_URL}${HOST_NAME}) + + if [ -n "$exists" ]; then + aws s3 rm ${DISCOVERY_URL}${HOST_NAME} + + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to remove $NODE_TYPE node registration" + else + echo "[INFO] $NODE_TYPE node registration removed" + fi + else + echo "[INFO] Node registration actually haven't been previously created" + fi +} + +# Cleans up all nodes metadata for particular cluster (Cassandra, Ignite, Tests). Performed only by the node acquired +# first node lock. +cleanupMetadata() +{ + DISCOVERY_URL=$(getDiscoveryUrl) + JOIN_LOCK_URL=$(getJoinLockUrl) + SUCCESS_URL=$(getSucessUrl) + FAILURE_URL=$(getFailureUrl) + + echo "[INFO] Running metadata cleanup" + + aws s3 rm $JOIN_LOCK_URL + aws s3 rm --recursive $DISCOVERY_URL + aws s3 rm --recursive $SUCCESS_URL + aws s3 rm --recursive $FAILURE_URL + + echo "[INFO] Metadata cleanup completed" +} + +# Tries to get first node lock for the node. Only one (first) node can have such lock and it will be responsible for +# cleanup process when starting cluster +tryToGetFirstNodeLock() +{ + if [ "$FIRST_NODE_LOCK" == "true" ]; then + return 0 + fi + + FIRST_NODE_LOCK_URL=$(getFirstNodeLockUrl) + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + echo "[INFO] Trying to get first node lock: $FIRST_NODE_LOCK_URL" + + checkFirstNodeLockExist $FIRST_NODE_LOCK_URL + if [ $? -ne 0 ]; then + return 1 + fi + + echo "$HOST_NAME" > /opt/ignite-cassandra-tests/bootstrap/first-node-lock + + createFirstNodeLock $FIRST_NODE_LOCK_URL + + sleep 5s + + rm -Rf /opt/ignite-cassandra-tests/bootstrap/first-node-lock + + aws s3 cp $FIRST_NODE_LOCK_URL /opt/ignite-cassandra-tests/bootstrap/first-node-lock + if [ $? -ne 0 ]; then + echo "[WARN] Failed to check just created first node lock" + return 1 + fi + + first_host=$(cat /opt/ignite-cassandra-tests/bootstrap/first-node-lock) + + rm -f /opt/ignite-cassandra-tests/bootstrap/first-node-lock + + if [ "$first_host" != "$HOST_NAME" ]; then + echo "[INFO] Node $first_host has discarded previously created first node lock" + return 1 + fi + + echo "[INFO] Congratulations, got first node lock" + + FIRST_NODE_LOCK="true" + + return 0 +} + +# Checks if first node lock already exists in S3 +checkFirstNodeLockExist() +{ + echo "[INFO] Checking for the first node lock: $1" + + lockExists=$(aws s3 ls $1) + if [ -n "$lockExists" ]; then + echo "[INFO] First node lock already exists" + return 1 + fi + + echo "[INFO] First node lock doesn't exist" + + return 0 +} + +# Creates first node lock in S3 +createFirstNodeLock() +{ + aws s3 cp --sse AES256 /opt/ignite-cassandra-tests/bootstrap/first-node-lock $1 + + if [ $? -ne 0 ]; then + terminate "Failed to create first node lock: $1" + fi + + echo "[INFO] Created first node lock: $1" +} + +# Removes first node lock from S3 +removeFirstNodeLock() +{ + if [ "$FIRST_NODE_LOCK" != "true" ]; then + return 0 + fi + + FIRST_NODE_LOCK_URL=$(getFirstNodeLockUrl) + + echo "[INFO] Removing first node lock: $FIRST_NODE_LOCK_URL" + + aws s3 rm $FIRST_NODE_LOCK_URL + + if [ $? -ne 0 ]; then + terminate "Failed to remove first node lock: $FIRST_NODE_LOCK_URL" + fi + + echo "[INFO] Removed first node lock: $FIRST_NODE_LOCK_URL" + + FIRST_NODE_LOCK="false" +} + +# Tries to get cluster join lock. Nodes use this lock to join a cluster sequentially. +tryToGetClusterJoinLock() +{ + if [ "$JOIN_LOCK" == "true" ]; then + return 0 + fi + + JOIN_LOCK_URL=$(getJoinLockUrl) + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + echo "[INFO] Trying to get cluster join lock" + + checkClusterJoinLockExist $JOIN_LOCK_URL + if [ $? -ne 0 ]; then + return 1 + fi + + echo "$HOST_NAME" > /opt/ignite-cassandra-tests/bootstrap/join-lock + + createClusterJoinLock $JOIN_LOCK_URL + + sleep 5s + + rm -Rf /opt/ignite-cassandra-tests/bootstrap/join-lock + + aws s3 cp $JOIN_LOCK_URL /opt/ignite-cassandra-tests/bootstrap/join-lock + if [ $? -ne 0 ]; then + echo "[WARN] Failed to check just created cluster join lock" + return 1 + fi + + join_host=$(cat /opt/ignite-cassandra-tests/bootstrap/join-lock) + + if [ "$join_host" != "$HOST_NAME" ]; then + echo "[INFO] Node $first_host has discarded previously created cluster join lock" + return 1 + fi + + echo "[INFO] Congratulations, got cluster join lock" + + JOIN_LOCK="true" + + return 0 +} + +# Checks if join lock already exists in S3 +checkClusterJoinLockExist() +{ + echo "[INFO] Checking for the cluster join lock: $1" + + lockExists=$(aws s3 ls $1) + if [ -n "$lockExists" ]; then + echo "[INFO] Cluster join lock already exists" + return 1 + fi + + if [ "$NODE_TYPE" == "cassandra" ]; then + status=$(/opt/cassandra/bin/nodetool -h $CASSANDRA_SEED status) + leaving=$(echo $status | grep UL) + moving=$(echo $status | grep UM) + joining=$(echo $status | grep UJ) + + if [ -n "$leaving" ] || [ -n "$moving" ] || [ -n "$joining" ]; then + echo "[INFO] Cluster join lock doesn't exist in S3, but some node still trying to join Cassandra cluster" + return 1 + fi + fi + + echo "[INFO] Cluster join lock doesn't exist" +} + +# Creates join lock in S3 +createClusterJoinLock() +{ + aws s3 cp --sse AES256 /opt/ignite-cassandra-tests/bootstrap/join-lock $1 + + if [ $? -ne 0 ]; then + terminate "Failed to create cluster join lock: $1" + fi + + echo "[INFO] Created cluster join lock: $1" +} + +# Removes join lock +removeClusterJoinLock() +{ + if [ "$JOIN_LOCK" != "true" ]; then + return 0 + fi + + JOIN_LOCK_URL=$(getJoinLockUrl) + + echo "[INFO] Removing cluster join lock: $JOIN_LOCK_URL" + + aws s3 rm $JOIN_LOCK_URL + + if [ $? -ne 0 ]; then + terminate "Failed to remove cluster join lock: $JOIN_LOCK_URL" + fi + + JOIN_LOCK="false" + + echo "[INFO] Removed cluster join lock: $JOIN_LOCK_URL" +} + +# Waits for the node to join cluster, periodically trying to acquire cluster join lock and exiting only when node +# successfully acquired the lock. Such mechanism used by nodes to join cluster sequentially (limitation of Cassandra). +waitToJoinCluster() +{ + echo "[INFO] Waiting to join $NODE_TYPE cluster" + + while true; do + tryToGetClusterJoinLock + + if [ $? -ne 0 ]; then + echo "[INFO] Another node is trying to join cluster. Waiting for extra 30sec." + sleep 30s + else + echo "[INFO]-------------------------------------------------------------" + echo "[INFO] Congratulations, got lock to join $NODE_TYPE cluster" + echo "[INFO]-------------------------------------------------------------" + break + fi + done +} + +# Wait for the cluster to register at least one node in S3, so that all other nodes will use already existing nodes +# to send them info about them and join the cluster +setupClusterSeeds() +{ + if [ "$1" != "cassandra" ] && [ "$1" != "ignite" ] && [ "$1" != "test" ]; then + terminate "Incorrect cluster type specified '$1' to setup seeds" + fi + + DISCOVERY_URL=$(getDiscoveryUrl $1) + + echo "[INFO] Setting up $1 seeds" + + echo "[INFO] Looking for $1 seeds in: $DISCOVERY_URL" + + startTime=$(date +%s) + + while true; do + seeds=$(aws s3 ls $DISCOVERY_URL | grep -v PRE | sed -r "s/^.* //g") + if [ -n "$seeds" ]; then + seeds=($seeds) + length=${#seeds[@]} + + if [ $length -lt 4 ]; then + seed1=${seeds[0]} + seed2=${seeds[1]} + seed3=${seeds[2]} + else + pos1=$(($RANDOM%$length)) + pos2=$(($RANDOM%$length)) + pos3=$(($RANDOM%$length)) + seed1=${seeds[${pos1}]} + seed2=${seeds[${pos2}]} + seed3=${seeds[${pos3}]} + fi + + CLUSTER_SEEDS=$seed1 + + if [ "$seed2" != "$seed1" ] && [ -n "$seed2" ]; then + CLUSTER_SEEDS="$CLUSTER_SEEDS $seed2" + fi + + if [ "$seed3" != "$seed2" ] && [ "$seed3" != "$seed1" ] && [ -n "$seed3" ]; then + CLUSTER_SEEDS="$CLUSTER_SEEDS $seed3" + fi + + echo "[INFO] Using $1 seeds: $CLUSTER_SEEDS" + + return 0 + fi + + currentTime=$(date +%s) + duration=$(( $currentTime-$startTime )) + duration=$(( $duration/60 )) + + if [ "$2" == "true" ]; then + if [ $duration -gt $SERVICE_STARTUP_TIME ]; then + terminate "${SERVICE_STARTUP_TIME}min timeout expired, but first $1 node is still not up and running" + fi + fi + + echo "[INFO] Waiting for the first $1 node to start and publish its seed, time passed ${duration}min" + + sleep 30s + done +} + +# Wait until first cluster node registered in S3 +waitFirstClusterNodeRegistered() +{ + DISCOVERY_URL=$(getDiscoveryUrl) + + echo "[INFO] Waiting for the first $NODE_TYPE node to register in: $DISCOVERY_URL" + + startTime=$(date +%s) + + while true; do + exists=$(aws s3 ls $DISCOVERY_URL) + if [ -n "$exists" ]; then + break + fi + + if [ "$1" == "true" ]; then + currentTime=$(date +%s) + duration=$(( $currentTime-$startTime )) + duration=$(( $duration/60 )) + + if [ $duration -gt $SERVICE_STARTUP_TIME ]; then + terminate "${SERVICE_STARTUP_TIME}min timeout expired, but first $type node is still not up and running" + fi + fi + + echo "[INFO] Waiting extra 30sec" + + sleep 30s + done + + echo "[INFO] First $type node registered" +} + +# Waits until all cluster nodes successfully bootstrapped. In case of Tests cluster also waits until all nodes +# switch to waiting state +waitAllClusterNodesReady() +{ + if [ "$1" == "cassandra" ]; then + NODES_COUNT=$CASSANDRA_NODES_COUNT + elif [ "$1" == "ignite" ]; then + NODES_COUNT=$IGNITE_NODES_COUNT + elif [ "$1" == "test" ]; then + NODES_COUNT=$TEST_NODES_COUNT + else + terminate "Incorrect cluster type specified '$1' to wait for all nodes up and running" + fi + + SUCCESS_URL=$(getSucessUrl $1) + + if [ $NODES_COUNT -eq 0 ]; then + return 0 + fi + + echo "[INFO] Waiting for all $NODES_COUNT $1 nodes ready" + + while true; do + if [ "$1" == "test" ]; then + count1=$(aws s3 ls $S3_TESTS_WAITING | wc -l) + count2=$(aws s3 ls $S3_TESTS_RUNNING | wc -l) + count=$(( $count1+$count2 )) + else + count=$(aws s3 ls $SUCCESS_URL | wc -l) + fi + + if [ $count -ge $NODES_COUNT ]; then + break + fi + + echo "[INFO] Waiting extra 30sec" + + sleep 30s + done + + sleep 30s + + echo "[INFO] Congratulation, all $NODES_COUNT $1 nodes are ready" +} + +# Wait untill all Tests cluster nodes completed their tests execution +waitAllTestNodesCompletedTests() +{ + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + echo "[INFO] Waiting for all $TEST_NODES_COUNT test nodes to complete their tests" + + while true; do + + count=$(aws s3 ls $S3_TESTS_RUNNING | grep -v $HOST_NAME | wc -l) + + if [ $count -eq 0 ]; then + break + fi + + echo "[INFO] Waiting extra 30sec" + + sleep 30s + done + + echo "[INFO] Congratulation, all $TEST_NODES_COUNT test nodes have completed their tests" +} + +# Installs all required Ganglia packages +installGangliaPackages() +{ + if [ "$1" == "master" ]; then + echo "[INFO] Installing Ganglia master required packages" + else + echo "[INFO] Installing Ganglia agent required packages" + fi + + isAmazonLinux=$(cat "/etc/issue" | grep "Amazon Linux") + + if [ -z "$isAmazonLinux" ]; then + setenforce 0 + + if [ $? -ne 0 ]; then + terminate "Failed to turn off SELinux" + fi + + downloadPackage "$EPEL_DOWNLOAD_URL" "/opt/epel.rpm" "EPEL" + + rpm -Uvh /opt/epel.rpm + if [ $? -ne 0 ]; then + terminate "Failed to setup EPEL repository" + fi + + rm -f /opt/epel.rpm + fi + + yum -y install apr-devel apr-util check-devel cairo-devel pango-devel pango \ + libxml2-devel glib2-devel dbus-devel freetype-devel freetype \ + libpng-devel libart_lgpl-devel fontconfig-devel gcc-c++ expat-devel \ + python-devel libXrender-devel perl-devel perl-CPAN gettext git sysstat \ + automake autoconf ltmain.sh pkg-config gperf libtool pcre-devel libconfuse-devel + + if [ $? -ne 0 ]; then + terminate "Failed to install all Ganglia required packages" + fi + + if [ "$1" == "master" ]; then + yum -y install httpd php php-devel php-pear + + if [ $? -ne 0 ]; then + terminate "Failed to install all Ganglia required packages" + fi + + if [ -z "$isAmazonLinux" ]; then + yum -y install liberation-sans-fonts + + if [ $? -ne 0 ]; then + terminate "Failed to install liberation-sans-fonts package" + fi + fi + fi + + if [ -z "$isAmazonLinux" ]; then + downloadPackage "$GPERF_DOWNLOAD_URL" "/opt/gperf.tar.gz" "gperf" + + tar -xvzf /opt/gperf.tar.gz -C /opt + if [ $? -ne 0 ]; then + terminate "Failed to untar gperf tarball" + fi + + rm -Rf /opt/gperf.tar.gz + + unzipDir=$(ls /opt | grep "gperf") + + if [ $? -ne 0 ]; then + terminate "Failed to update creation date to current for all files inside: /opt/$unzipDir" + fi + + pushd /opt/$unzipDir + + cat ./configure | sed -r "s/test \"\\\$2\" = conftest.file/test 1 = 1/g" > ./configure1 + rm ./configure + mv ./configure1 ./configure + chmod a+x ./configure + + ./configure + if [ $? -ne 0 ]; then + terminate "Failed to configure gperf" + fi + + make + if [ $? -ne 0 ]; then + terminate "Failed to make gperf" + fi + + make install + if [ $? -ne 0 ]; then + terminate "Failed to install gperf" + fi + + echo "[INFO] gperf tool successfully installed" + + popd + fi + + echo "[INFO] Installing rrdtool" + + downloadPackage "$RRD_DOWNLOAD_URL" "/opt/rrdtool.tar.gz" "rrdtool" + + tar -xvzf /opt/rrdtool.tar.gz -C /opt + if [ $? -ne 0 ]; then + terminate "Failed to untar rrdtool tarball" + fi + + rm -Rf /opt/rrdtool.tar.gz + + unzipDir=$(ls /opt | grep "rrdtool") + if [ "$unzipDir" != "rrdtool" ]; then + mv /opt/$unzipDir /opt/rrdtool + fi + + if [ $? -ne 0 ]; then + terminate "Failed to update creation date to current for all files inside: /opt/rrdtool" + fi + + export PKG_CONFIG_PATH=/usr/lib/pkgconfig/ + + pushd /opt/rrdtool + + cat ./configure | sed -r "s/test \"\\\$2\" = conftest.file/test 1 = 1/g" > ./configure1 + rm ./configure + mv ./configure1 ./configure + chmod a+x ./configure + + ./configure --prefix=/usr/local/rrdtool + if [ $? -ne 0 ]; then + terminate "Failed to configure rrdtool" + fi + + make + if [ $? -ne 0 ]; then + terminate "Failed to make rrdtool" + fi + + make install + if [ $? -ne 0 ]; then + terminate "Failed to install rrdtool" + fi + + ln -s /usr/local/rrdtool/bin/rrdtool /usr/bin/rrdtool + mkdir -p /var/lib/ganglia/rrds + + chown -R nobody:nobody /usr/local/rrdtool /var/lib/ganglia/rrds /usr/bin/rrdtool + + rm -Rf /opt/rrdtool + + popd + + echo "[INFO] rrdtool successfully installed" + + echo "[INFO] Installig ganglia-core" + + gitClone $GANGLIA_CORE_DOWNLOAD_URL /opt/monitor-core + + if [ $? -ne 0 ]; then + terminate "Failed to update creation date to current for all files inside: /opt/monitor-core" + fi + + pushd /opt/monitor-core + + git checkout efe9b5e5712ea74c04e3b15a06eb21900e18db40 + + ./bootstrap + + if [ $? -ne 0 ]; then + terminate "Failed to prepare ganglia-core for compilation" + fi + + cat ./configure | sed -r "s/test \"\\\$2\" = conftest.file/test 1 = 1/g" > ./configure1 + rm ./configure + mv ./configure1 ./configure + chmod a+x ./configure + + ./configure --with-gmetad --with-librrd=/usr/local/rrdtool + + if [ $? -ne 0 ]; then + terminate "Failed to configure ganglia-core" + fi + + make + if [ $? -ne 0 ]; then + terminate "Failed to make ganglia-core" + fi + + make install + if [ $? -ne 0 ]; then + terminate "Failed to install ganglia-core" + fi + + rm -Rf /opt/monitor-core + + popd + + echo "[INFO] ganglia-core successfully installed" + + if [ "$1" != "master" ]; then + return 0 + fi + + echo "[INFO] Installing ganglia-web" + + gitClone $GANGLIA_WEB_DOWNLOAD_URL /opt/web + + if [ $? -ne 0 ]; then + terminate "Failed to update creation date to current for all files inside: /opt/web" + fi + + cat /opt/web/Makefile | sed -r "s/GDESTDIR = \/usr\/share\/ganglia-webfrontend/GDESTDIR = \/opt\/ganglia-web/g" > /opt/web/Makefile1 + cat /opt/web/Makefile1 | sed -r "s/GCONFDIR = \/etc\/ganglia-web/GCONFDIR = \/opt\/ganglia-web/g" > /opt/web/Makefile2 + cat /opt/web/Makefile2 | sed -r "s/GWEB_STATEDIR = \/var\/lib\/ganglia-web/GWEB_STATEDIR = \/opt\/ganglia-web/g" > /opt/web/Makefile3 + cat /opt/web/Makefile3 | sed -r "s/APACHE_USER = www-data/APACHE_USER = apache/g" > /opt/web/Makefile4 + + rm -f /opt/web/Makefile + cp /opt/web/Makefile4 /opt/web/Makefile + rm -f /opt/web/Makefile1 /opt/web/Makefile2 /opt/web/Makefile3 /opt/web/Makefile4 + + pushd /opt/web + + git checkout f2b19c7cacfc8c51921be801b92f8ed0bd4901ae + + make + + if [ $? -ne 0 ]; then + terminate "Failed to make ganglia-web" + fi + + make install + + if [ $? -ne 0 ]; then + terminate "Failed to install ganglia-web" + fi + + rm -Rf /opt/web + + popd + + echo "" >> /etc/httpd/conf/httpd.conf + echo "Alias /ganglia /opt/ganglia-web" >> /etc/httpd/conf/httpd.conf + echo "" >> /etc/httpd/conf/httpd.conf + echo " AllowOverride All" >> /etc/httpd/conf/httpd.conf + echo " Order allow,deny" >> /etc/httpd/conf/httpd.conf + + if [ -z "$isAmazonLinux" ]; then + echo " Require all granted" >> /etc/httpd/conf/httpd.conf + fi + + echo " Allow from all" >> /etc/httpd/conf/httpd.conf + echo " Deny from none" >> /etc/httpd/conf/httpd.conf + echo "" >> /etc/httpd/conf/httpd.conf + + echo "[INFO] ganglia-web successfully installed" +} + +# Setup ntpd service +setupNTP() +{ + echo "[INFO] Installing ntp package" + + yum -y install ntp + + if [ $? -ne 0 ]; then + terminate "Failed to install ntp package" + fi + + echo "[INFO] Starting ntpd service" + + service ntpd restart + + if [ $? -ne 0 ]; then + terminate "Failed to restart ntpd service" + fi +} + +# Installs and run Ganglia agent ('gmond' daemon) +bootstrapGangliaAgent() +{ + echo "[INFO]-----------------------------------------------------------------" + echo "[INFO] Bootstrapping Ganglia agent" + echo "[INFO]-----------------------------------------------------------------" + + installGangliaPackages + + echo "[INFO] Running ganglia agent daemon to discover Ganglia master" + + /opt/ignite-cassandra-tests/bootstrap/aws/ganglia/agent-start.sh $1 $2 > /opt/ganglia-agent.log & + + echo "[INFO] Ganglia daemon job id: $!" +} + +# Partitioning, formatting to ext4 and mounting all unpartitioned drives. +# As a result env array MOUNT_POINTS provides all newly created mount points. +mountUnpartitionedDrives() +{ + MOUNT_POINTS= + + echo "[INFO] Mounting unpartitioned drives" + + lsblk -V &> /dev/null + + if [ $? -ne 0 ]; then + echo "[WARN] lsblk utility doesn't exist" + echo "[INFO] Installing util-linux-ng package" + + yum -y install util-linux-ng + + if [ $? -ne 0 ]; then + terminate "Failed to install util-linux-ng package" + fi + fi + + parted -v &> /dev/null + + if [ $? -ne 0 ]; then + echo "[WARN] parted utility doesn't exist" + echo "[INFO] Installing parted package" + + yum -y install parted + + if [ $? -ne 0 ]; then + terminate "Failed to install parted package" + fi + fi + + drives=$(lsblk -io KNAME,TYPE | grep disk | sed -r "s/disk//g" | xargs) + + echo "[INFO] Found HDDs: $drives" + + unpartDrives= + partDrives=$(lsblk -io KNAME,TYPE | grep part | sed -r "s/[0-9]*//g" | sed -r "s/part//g" | xargs) + + drives=($drives) + count=${#drives[@]} + iter=1 + + for (( i=0; i<=$(( $count -1 )); i++ )) + do + drive=${drives[$i]} + + if [ -z "$drive" ]; then + continue + fi + + isPartitioned=$(echo $partDrives | grep "$drive") + + if [ -n "$isPartitioned" ]; then + continue + fi + + echo "[INFO] Creating partition for the drive: $drive" + + parted -s -a opt /dev/$drive mklabel gpt mkpart primary 0% 100% + + if [ $? -ne 0 ]; then + terminate "Failed to create partition for the drive: $drive" + fi + + partition=$(lsblk -io KNAME,TYPE | grep part | grep $drive | sed -r "s/part//g" | xargs) + + echo "[INFO] Successfully created partition $partition for the drive: $drive" + + echo "[INFO] Formatting partition /dev/$partition to ext4" + + mkfs.ext4 -F -q /dev/$partition + + if [ $? -ne 0 ]; then + terminate "Failed to format partition: /dev/$partition" + fi + + echo "[INFO] Partition /dev/$partition was successfully formatted to ext4" + + echo "[INFO] Mounting partition /dev/$partition to /storage$iter" + + mkdir -p /storage$iter + + if [ $? -ne 0 ]; then + terminate "Failed to create mount point directory: /storage$iter" + fi + + echo "/dev/$partition /storage$iter ext4 defaults 1 1" >> /etc/fstab + + mount /storage$iter + + if [ $? -ne 0 ]; then + terminate "Failed to mount /storage$iter mount point for partition /dev/$partition" + fi + + echo "[INFO] Partition /dev/$partition was successfully mounted to /storage$iter" + + if [ -n "$MOUNT_POINTS" ]; then + MOUNT_POINTS="$MOUNT_POINTS " + fi + + MOUNT_POINTS="${MOUNT_POINTS}/storage${iter}" + + iter=$(($iter+1)) + done + + if [ -z "$MOUNT_POINTS" ]; then + echo "[INFO] All drives already have partitions created" + fi + + MOUNT_POINTS=($MOUNT_POINTS) +} + +# Creates storage directories for Cassandra: data files, commit log, saved caches. +# As a result CASSANDRA_DATA_DIR, CASSANDRA_COMMITLOG_DIR, CASSANDRA_CACHES_DIR will point to appropriate directories. +createCassandraStorageLayout() +{ + CASSANDRA_DATA_DIR= + CASSANDRA_COMMITLOG_DIR= + CASSANDRA_CACHES_DIR= + + mountUnpartitionedDrives + + echo "[INFO] Creating Cassandra storage layout" + + count=${#MOUNT_POINTS[@]} + + for (( i=0; i<=$(( $count -1 )); i++ )) + do + mountPoint=${MOUNT_POINTS[$i]} + + if [ -z "$CASSANDRA_DATA_DIR" ]; then + CASSANDRA_DATA_DIR=$mountPoint + elif [ -z "$CASSANDRA_COMMITLOG_DIR" ]; then + CASSANDRA_COMMITLOG_DIR=$mountPoint + elif [ -z "$CASSANDRA_CACHES_DIR" ]; then + CASSANDRA_CACHES_DIR=$mountPoint + else + CASSANDRA_DATA_DIR="$CASSANDRA_DATA_DIR $mountPoint" + fi + done + + if [ -z "$CASSANDRA_DATA_DIR" ]; then + CASSANDRA_DATA_DIR="/storage/cassandra/data" + else + CASSANDRA_DATA_DIR="$CASSANDRA_DATA_DIR/cassandra_data" + fi + + if [ -z "$CASSANDRA_COMMITLOG_DIR" ]; then + CASSANDRA_COMMITLOG_DIR="/storage/cassandra/commitlog" + else + CASSANDRA_COMMITLOG_DIR="$CASSANDRA_COMMITLOG_DIR/cassandra_commitlog" + fi + + if [ -z "$CASSANDRA_CACHES_DIR" ]; then + CASSANDRA_CACHES_DIR="/storage/cassandra/saved_caches" + else + CASSANDRA_CACHES_DIR="$CASSANDRA_CACHES_DIR/cassandra_caches" + fi + + echo "[INFO] Cassandra data dir: $CASSANDRA_DATA_DIR" + echo "[INFO] Cassandra commit log dir: $CASSANDRA_COMMITLOG_DIR" + echo "[INFO] Cassandra saved caches dir: $CASSANDRA_CACHES_DIR" + + dirs=("$CASSANDRA_DATA_DIR $CASSANDRA_COMMITLOG_DIR $CASSANDRA_CACHES_DIR") + + count=${#dirs[@]} + + for (( i=0; i<=$(( $count -1 )); i++ )) + do + directory=${dirs[$i]} + + mkdir -p $directory + + if [ $? -ne 0 ]; then + terminate "Failed to create directory: $directory" + fi + + chown -R cassandra:cassandra $directory + + if [ $? -ne 0 ]; then + terminate "Failed to assign cassandra:cassandra as an owner of directory $directory" + fi + done + + DATA_DIR_SPEC="\n" + + dirs=($CASSANDRA_DATA_DIR) + + count=${#dirs[@]} + + for (( i=0; i<=$(( $count -1 )); i++ )) + do + dataDir=${dirs[$i]} + DATA_DIR_SPEC="${DATA_DIR_SPEC} - ${dataDir}\n" + done + + CASSANDRA_DATA_DIR=$(echo $DATA_DIR_SPEC | sed -r "s/\//\\\\\//g") + CASSANDRA_COMMITLOG_DIR=$(echo $CASSANDRA_COMMITLOG_DIR | sed -r "s/\//\\\\\//g") + CASSANDRA_CACHES_DIR=$(echo $CASSANDRA_CACHES_DIR | sed -r "s/\//\\\\\//g") +} + +# Attaches environment configuration settings +. $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/env.sh + +# Validates environment settings +validate + +# Validates node type of EC2 instance +if [ "$1" != "cassandra" ] && [ "$1" != "ignite" ] && [ "$1" != "test" ] && [ "$1" != "ganglia" ]; then + echo "[ERROR] Unsupported node type specified: $1" + exit 1 +fi + +# Sets node type of EC2 instance +export NODE_TYPE=$1 diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/env.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/env.sh new file mode 100644 index 0000000000000..031c5c3a1628a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/env.sh @@ -0,0 +1,113 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# This file specifies environment specific settings to bootstrap required infrastructure for: +# ----------------------------------------------------------------------------------------------- +# +# 1) Cassandra cluster +# 2) Ignite cluster +# 3) Tests cluster +# 4) Ganglia agents to be installed on each clusters machine +# 5) Ganglia master to collect metrics from agent and show graphs on Ganglia Web dashboard +# +# ----------------------------------------------------------------------------------------------- + +# EC2 tagging related settings +export EC2_OWNER_TAG=ignite@apache.org +export EC2_PROJECT_TAG=ignite +export EC2_CASSANDRA_TAG=CASSANDRA +export EC2_IGNITE_TAG=IGNITE +export EC2_TEST_TAG=TEST +export EC2_GANGLIA_TAG=GANGLIA + +# Tests summary settings +export CASSANDRA_NODES_COUNT=3 +export IGNITE_NODES_COUNT=3 +export TEST_NODES_COUNT=2 +export TESTS_TYPE="ignite" + +# Time (in minutes) to wait for Cassandra/Ignite node up and running and register it in S3 +export SERVICE_STARTUP_TIME=10 + +# Number of attempts to start Cassandra/Ignite daemon +export SERVICE_START_ATTEMPTS=3 + +# Root S3 folder +export S3_ROOT=s3:/// + +# S3 folder for downloads. You should put here ignite load tests jar archive +# (you can also download here other required artifacts like Cassandra, Ignite and etc) +export S3_DOWNLOADS=$S3_ROOT/test + +# S3 root system folders where to store all infrastructure info +export S3_SYSTEM=$S3_ROOT/test1 + +# S3 system folders to store cluster specific info +export S3_CASSANDRA_SYSTEM=$S3_SYSTEM/cassandra +export S3_IGNITE_SYSTEM=$S3_SYSTEM/ignite +export S3_TESTS_SYSTEM=$S3_SYSTEM/tests +export S3_GANGLIA_SYSTEM=$S3_SYSTEM/ganglia + +# Logs related settings +export S3_LOGS_TRIGGER=$S3_SYSTEM/logs-trigger +export S3_LOGS_ROOT=$S3_SYSTEM/logs +export S3_CASSANDRA_LOGS=$S3_LOGS_ROOT/cassandra +export S3_IGNITE_LOGS=$S3_LOGS_ROOT/ignite +export S3_TESTS_LOGS=$S3_LOGS_ROOT/tests +export S3_GANGLIA_LOGS=$S3_LOGS_ROOT/ganglia + +# Cassandra related settings +export CASSANDRA_DOWNLOAD_URL=http://archive.apache.org/dist/cassandra/3.5/apache-cassandra-3.5-bin.tar.gz +export S3_CASSANDRA_BOOTSTRAP_SUCCESS=$S3_CASSANDRA_SYSTEM/success +export S3_CASSANDRA_BOOTSTRAP_FAILURE=$S3_CASSANDRA_SYSTEM/failure +export S3_CASSANDRA_NODES_DISCOVERY=$S3_CASSANDRA_SYSTEM/discovery +export S3_CASSANDRA_FIRST_NODE_LOCK=$S3_CASSANDRA_SYSTEM/first-node-lock +export S3_CASSANDRA_NODES_JOIN_LOCK=$S3_CASSANDRA_SYSTEM/join-lock + +# Ignite related settings +export IGNITE_DOWNLOAD_URL=$S3_DOWNLOADS/apache-ignite-fabric-1.8.0-SNAPSHOT-bin.zip +export S3_IGNITE_BOOTSTRAP_SUCCESS=$S3_IGNITE_SYSTEM/success +export S3_IGNITE_BOOTSTRAP_FAILURE=$S3_IGNITE_SYSTEM/failure +export S3_IGNITE_NODES_DISCOVERY=$S3_IGNITE_SYSTEM/discovery +export S3_IGNITE_FIRST_NODE_LOCK=$S3_IGNITE_SYSTEM/first-node-lock +export S3_IGNITE_NODES_JOIN_LOCK=$S3_IGNITE_SYSTEM/i-join-lock + +# Tests related settings +export TESTS_PACKAGE_DONLOAD_URL=$S3_DOWNLOADS/ignite-cassandra-tests-1.8.0-SNAPSHOT.zip +export S3_TESTS_TRIGGER=$S3_SYSTEM/tests-trigger +export S3_TESTS_NODES_DISCOVERY=$S3_TESTS_SYSTEM/discovery +export S3_TESTS_SUCCESS=$S3_TESTS_SYSTEM/success +export S3_TESTS_FAILURE=$S3_TESTS_SYSTEM/failure +export S3_TESTS_IDLE=$S3_TESTS_SYSTEM/idle +export S3_TESTS_PREPARING=$S3_TESTS_SYSTEM/preparing +export S3_TESTS_WAITING=$S3_TESTS_SYSTEM/waiting +export S3_TESTS_RUNNING=$S3_TESTS_SYSTEM/running +export S3_TESTS_FIRST_NODE_LOCK=$S3_TESTS_SYSTEM/first-node-lock +export S3_TESTS_SUMMARY=$S3_SYSTEM/t-summary.zip + +# Ganglia related settings +export GANGLIA_CORE_DOWNLOAD_URL=https://github.com/ganglia/monitor-core.git +export GANGLIA_WEB_DOWNLOAD_URL=https://github.com/ganglia/ganglia-web.git +export RRD_DOWNLOAD_URL=http://oss.oetiker.ch/rrdtool/pub/rrdtool-1.3.1.tar.gz +export GPERF_DOWNLOAD_URL=http://ftp.gnu.org/gnu/gperf/gperf-3.0.3.tar.gz +export EPEL_DOWNLOAD_URL=https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm +export S3_GANGLIA_BOOTSTRAP_SUCCESS=$S3_GANGLIA_SYSTEM/success +export S3_GANGLIA_BOOTSTRAP_FAILURE=$S3_GANGLIA_SYSTEM/failure +export S3_GANGLIA_MASTER_DISCOVERY=$S3_GANGLIA_SYSTEM/discovery \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ganglia/agent-start.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ganglia/agent-start.sh new file mode 100644 index 0000000000000..8e49c1844bd9c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ganglia/agent-start.sh @@ -0,0 +1,75 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Script to start Ganglia agent on EC2 node (used by agent-bootstrap.sh) +# ----------------------------------------------------------------------------------------------- + +. /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "ganglia" + +echo "[INFO] Running Ganglia agent discovery daemon for '$1' cluster using $2 port" + +# Waiting for the Ganglia master node up and running +waitFirstClusterNodeRegistered + +DISCOVERY_URL=$(getDiscoveryUrl) + +masterNode=$(aws s3 ls $DISCOVERY_URL | head -1) +masterNode=($masterNode) +masterNode=${masterNode[3]} +masterNode=$(echo $masterNode | xargs) + +if [ $? -ne 0 ] || [ -z "$masterNode" ]; then + echo "[ERROR] Failed to get Ganglia master node from: $DISCOVERY_URL" +fi + +echo "[INFO] Got Ganglia master node: $masterNode" + +echo "[INFO] Creating gmond config file" + +/usr/local/sbin/gmond --default_config > /opt/gmond-default.conf + +cat /opt/gmond-default.conf | sed -r "s/deaf = no/deaf = yes/g" | \ +sed -r "s/name = \"unspecified\"/name = \"$1\"/g" | \ +sed -r "s/#bind_hostname/bind_hostname/g" | \ +sed "0,/mcast_join = 239.2.11.71/s/mcast_join = 239.2.11.71/host = $masterNode/g" | \ +sed -r "s/mcast_join = 239.2.11.71//g" | sed -r "s/bind = 239.2.11.71//g" | \ +sed -r "s/port = 8649/port = $2/g" | sed -r "s/retry_bind = true//g" > /opt/gmond.conf + +echo "[INFO] Running gmond daemon to report to gmetad on $masterNode" + +/usr/local/sbin/gmond --conf=/opt/gmond.conf -p /opt/gmond.pid + +sleep 2s + +if [ ! -f "/opt/gmond.pid" ]; then + echo "[ERROR] Failed to start gmond daemon, pid file doesn't exist" + exit 1 +fi + +pid=$(cat /opt/gmond.pid) + +echo "[INFO] gmond daemon started, pid=$pid" + +exists=$(ps $pid | grep gmond) + +if [ -z "$exists" ]; then + echo "[ERROR] gmond daemon abnormally terminated" + exit 1 +fi \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ganglia/ganglia-bootstrap.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ganglia/ganglia-bootstrap.sh new file mode 100644 index 0000000000000..15fa044550287 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ganglia/ganglia-bootstrap.sh @@ -0,0 +1,417 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Bootstrap script to spin up Ganglia master +# ----------------------------------------------------------------------------------------------- + +# URL to download AWS CLI tools +AWS_CLI_DOWNLOAD_URL=https://s3.amazonaws.com/aws-cli/awscli-bundle.zip + +# URL to download JDK +JDK_DOWNLOAD_URL=http://download.oracle.com/otn-pub/java/jdk/8u77-b03/jdk-8u77-linux-x64.tar.gz + +# URL to download Ignite-Cassandra tests package - you should previously package and upload it to this place +TESTS_PACKAGE_DONLOAD_URL=s3:////ignite-cassandra-tests-.zip + +# Terminates script execution and upload logs to S3 +terminate() +{ + SUCCESS_URL=$S3_GANGLIA_BOOTSTRAP_SUCCESS + FAILURE_URL=$S3_GANGLIA_BOOTSTRAP_FAILURE + + if [ -n "$SUCCESS_URL" ] && [[ "$SUCCESS_URL" != */ ]]; then + SUCCESS_URL=${SUCCESS_URL}/ + fi + + if [ -n "$FAILURE_URL" ] && [[ "$FAILURE_URL" != */ ]]; then + FAILURE_URL=${FAILURE_URL}/ + fi + + host_name=$(hostname -f | tr '[:upper:]' '[:lower:]') + msg=$host_name + + if [ -n "$1" ]; then + echo "[ERROR] $1" + echo "[ERROR]-----------------------------------------------------" + echo "[ERROR] Ganglia master node bootstrap failed" + echo "[ERROR]-----------------------------------------------------" + msg=$1 + + if [ -z "$FAILURE_URL" ]; then + exit 1 + fi + + reportFolder=${FAILURE_URL}${host_name} + reportFile=$reportFolder/__error__ + else + echo "[INFO]-----------------------------------------------------" + echo "[INFO] Ganglia master node bootstrap successfully completed" + echo "[INFO]-----------------------------------------------------" + + if [ -z "$SUCCESS_URL" ]; then + exit 0 + fi + + reportFolder=${SUCCESS_URL}${host_name} + reportFile=$reportFolder/__success__ + fi + + echo $msg > /opt/bootstrap-result + + aws s3 rm --recursive $reportFolder + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to drop report folder: $reportFolder" + fi + + aws s3 cp --sse AES256 /opt/bootstrap-result $reportFile + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to report bootstrap result to: $reportFile" + fi + + rm -f /opt/bootstrap-result + + if [ -n "$1" ]; then + exit 1 + fi + + exit 0 +} + +# Downloads specified package +downloadPackage() +{ + echo "[INFO] Downloading $3 package from $1 into $2" + + for i in 0 9; + do + if [[ "$1" == s3* ]]; then + aws s3 cp $1 $2 + code=$? + else + curl "$1" -o "$2" + code=$? + fi + + if [ $code -eq 0 ]; then + echo "[INFO] $3 package successfully downloaded from $1 into $2" + return 0 + fi + + echo "[WARN] Failed to download $3 package from $i attempt, sleeping extra 5sec" + sleep 5s + done + + terminate "All 10 attempts to download $3 package from $1 are failed" +} + +# Downloads and setup JDK +setupJava() +{ + rm -Rf /opt/java /opt/jdk.tar.gz + + echo "[INFO] Downloading 'jdk'" + wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" "$JDK_DOWNLOAD_URL" -O /opt/jdk.tar.gz + if [ $? -ne 0 ]; then + terminate "Failed to download 'jdk'" + fi + + echo "[INFO] Untaring 'jdk'" + tar -xvzf /opt/jdk.tar.gz -C /opt + if [ $? -ne 0 ]; then + terminate "Failed to untar 'jdk'" + fi + + rm -Rf /opt/jdk.tar.gz + + unzipDir=$(ls /opt | grep "jdk") + if [ "$unzipDir" != "java" ]; then + mv /opt/$unzipDir /opt/java + fi +} + +# Downloads and setup AWS CLI +setupAWSCLI() +{ + echo "[INFO] Installing 'awscli'" + pip install --upgrade awscli + if [ $? -eq 0 ]; then + return 0 + fi + + echo "[ERROR] Failed to install 'awscli' using pip" + echo "[INFO] Trying to install awscli using zip archive" + echo "[INFO] Downloading awscli zip" + + downloadPackage "$AWS_CLI_DOWNLOAD_URL" "/opt/awscli-bundle.zip" "awscli" + + echo "[INFO] Unzipping awscli zip" + unzip /opt/awscli-bundle.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip awscli zip" + fi + + rm -Rf /opt/awscli-bundle.zip + + echo "[INFO] Installing awscli" + /opt/awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws + if [ $? -ne 0 ]; then + terminate "Failed to install awscli" + fi + + echo "[INFO] Successfully installed awscli from zip archive" +} + +# Setup all the pre-requisites (packages, settings and etc.) +setupPreRequisites() +{ + echo "[INFO] Installing 'wget' package" + yum -y install wget + if [ $? -ne 0 ]; then + terminate "Failed to install 'wget' package" + fi + + echo "[INFO] Installing 'net-tools' package" + yum -y install net-tools + if [ $? -ne 0 ]; then + terminate "Failed to install 'net-tools' package" + fi + + echo "[INFO] Installing 'python' package" + yum -y install python + if [ $? -ne 0 ]; then + terminate "Failed to install 'python' package" + fi + + echo "[INFO] Installing 'unzip' package" + yum -y install unzip + if [ $? -ne 0 ]; then + terminate "Failed to install 'unzip' package" + fi + + downloadPackage "https://bootstrap.pypa.io/get-pip.py" "/opt/get-pip.py" "get-pip.py" + + echo "[INFO] Installing 'pip'" + python /opt/get-pip.py + if [ $? -ne 0 ]; then + terminate "Failed to install 'pip'" + fi +} + +# Downloads and setup tests package +setupTestsPackage() +{ + downloadPackage "$TESTS_PACKAGE_DONLOAD_URL" "/opt/ignite-cassandra-tests.zip" "Tests" + + rm -Rf /opt/ignite-cassandra-tests + + unzip /opt/ignite-cassandra-tests.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip tests package" + fi + + rm -f /opt/ignite-cassandra-tests.zip + + unzipDir=$(ls /opt | grep "ignite-cassandra") + if [ "$unzipDir" != "ignite-cassandra-tests" ]; then + mv /opt/$unzipDir /opt/ignite-cassandra-tests + fi + + find /opt/ignite-cassandra-tests -type f -name "*.sh" -exec chmod ug+x {} \; + + . /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "ganglia" + + setupNTP + + echo "[INFO] Starting logs collector daemon" + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + /opt/ignite-cassandra-tests/bootstrap/aws/logs-collector.sh "$S3_LOGS_TRIGGER" "$S3_GANGLIA_LOGS/$HOST_NAME" "/var/log/httpd" > /opt/logs-collector.log & + + echo "[INFO] Logs collector daemon started: $!" + + echo "----------------------------------------------------------------------------------------" + printInstanceInfo + echo "----------------------------------------------------------------------------------------" + tagInstance +} + +# Creates config file for 'gmond' damon working in receiver mode +createGmondReceiverConfig() +{ + /usr/local/sbin/gmond --default_config > /opt/gmond-default.conf + if [ $? -ne 0 ]; then + terminate "Failed to create gmond default config in: /opt/gmond-default.txt" + fi + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + cat /opt/gmond-default.conf | sed -r "s/mute = no/mute = yes/g" | \ + sed -r "s/name = \"unspecified\"/name = \"$1\"/g" | \ + sed -r "s/#bind_hostname/bind_hostname/g" | \ + sed "0,/mcast_join = 239.2.11.71/s/mcast_join = 239.2.11.71/host = $HOST_NAME/g" | \ + sed -r "s/mcast_join = 239.2.11.71//g" | sed -r "s/bind = 239.2.11.71//g" | \ + sed -r "s/port = 8649/port = $2/g" | sed -r "s/retry_bind = true//g" > /opt/gmond-${1}.conf + + chmod a+r /opt/gmond-${1}.conf + + rm -f /opt/gmond-default.conf +} + +# Creates config file for 'gmond' damon working in sender-receiver mode +createGmondSenderReceiverConfig() +{ + /usr/local/sbin/gmond --default_config > /opt/gmond-default.conf + if [ $? -ne 0 ]; then + terminate "Failed to create gmond default config in: /opt/gmond-default.txt" + fi + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + cat /opt/gmond-default.conf | sed -r "s/name = \"unspecified\"/name = \"$1\"/g" | \ + sed -r "s/#bind_hostname/bind_hostname/g" | \ + sed "0,/mcast_join = 239.2.11.71/s/mcast_join = 239.2.11.71/host = $HOST_NAME/g" | \ + sed -r "s/mcast_join = 239.2.11.71//g" | sed -r "s/bind = 239.2.11.71//g" | \ + sed -r "s/port = 8649/port = $2/g" | sed -r "s/retry_bind = true//g" > /opt/gmond-${1}.conf + + chmod a+r /opt/gmond-${1}.conf + + rm -f /opt/gmond-default.conf +} + +# Downloads and setup Ganglia (and dependency) packages +setupGangliaPackages() +{ + installGangliaPackages "master" + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + echo "data_source \"cassandra\" ${HOST_NAME}:8641" > /opt/gmetad.conf + echo "data_source \"ignite\" ${HOST_NAME}:8642" >> /opt/gmetad.conf + echo "data_source \"test\" ${HOST_NAME}:8643" >> /opt/gmetad.conf + #echo "data_source \"ganglia\" ${HOST_NAME}:8644" >> /opt/gmetad.conf + echo "setuid_username \"nobody\"" >> /opt/gmetad.conf + echo "case_sensitive_hostnames 0" >> /opt/gmetad.conf + + chmod a+r /opt/gmetad.conf + + createGmondReceiverConfig cassandra 8641 + createGmondReceiverConfig ignite 8642 + createGmondReceiverConfig test 8643 + #createGmondSenderReceiverConfig ganglia 8644 +} + +# Starts 'gmond' receiver damon +startGmondReceiver() +{ + configFile=/opt/gmond-${1}.conf + pidFile=/opt/gmond-${1}.pid + + echo "[INFO] Starting gmond receiver daemon for $1 cluster using config file: $configFile" + + rm -f $pidFile + + /usr/local/sbin/gmond --conf=$configFile --pid-file=$pidFile + + sleep 2s + + if [ ! -f "$pidFile" ]; then + terminate "Failed to start gmond daemon for $1 cluster, pid file doesn't exist" + fi + + pid=$(cat $pidFile) + + echo "[INFO] gmond daemon for $1 cluster started, pid=$pid" + + exists=$(ps $pid | grep gmond) + + if [ -z "$exists" ]; then + terminate "gmond daemon for $1 cluster abnormally terminated" + fi +} + +# Starts 'gmetad' daemon +startGmetadCollector() +{ + echo "[INFO] Starting gmetad daemon" + + rm -f /opt/gmetad.pid + + /usr/local/sbin/gmetad --conf=/opt/gmetad.conf --pid-file=/opt/gmetad.pid + + sleep 2s + + if [ ! -f "/opt/gmetad.pid" ]; then + terminate "Failed to start gmetad daemon, pid file doesn't exist" + fi + + pid=$(cat /opt/gmetad.pid) + + echo "[INFO] gmetad daemon started, pid=$pid" + + exists=$(ps $pid | grep gmetad) + + if [ -z "$exists" ]; then + terminate "gmetad daemon abnormally terminated" + fi +} + +# Starts Apache 'httpd' service +startHttpdService() +{ + echo "[INFO] Starting httpd service" + + service httpd start + + if [ $? -ne 0 ]; then + terminate "Failed to start httpd service" + fi + + sleep 5s + + exists=$(service httpd status | grep running) + if [ -z "$exists" ]; then + terminate "httpd service process terminated" + fi + + echo "[INFO] httpd service successfully started" +} + +################################################################################################################### + +echo "[INFO]-----------------------------------------------------------------" +echo "[INFO] Bootstrapping Ganglia master server" +echo "[INFO]-----------------------------------------------------------------" + +setupPreRequisites +setupJava +setupAWSCLI +setupTestsPackage +setupGangliaPackages + +registerNode + +startGmondReceiver cassandra +startGmondReceiver ignite +startGmondReceiver test +#startGmondReceiver ganglia +startGmetadCollector +startHttpdService + +terminate diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-bootstrap.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-bootstrap.sh new file mode 100644 index 0000000000000..7f97ea1b71956 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-bootstrap.sh @@ -0,0 +1,336 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Bootstrap script to spin up Ignite cluster +# ----------------------------------------------------------------------------------------------- + +# URL to download AWS CLI tools +AWS_CLI_DOWNLOAD_URL=https://s3.amazonaws.com/aws-cli/awscli-bundle.zip + +# URL to download JDK +JDK_DOWNLOAD_URL=http://download.oracle.com/otn-pub/java/jdk/8u77-b03/jdk-8u77-linux-x64.tar.gz + +# URL to download Ignite-Cassandra tests package - you should previously package and upload it to this place +TESTS_PACKAGE_DONLOAD_URL=s3:////ignite-cassandra-tests-.zip + +# Terminates script execution and upload logs to S3 +terminate() +{ + SUCCESS_URL=$S3_IGNITE_BOOTSTRAP_SUCCESS + FAILURE_URL=$S3_IGNITE_BOOTSTRAP_FAILURE + + if [ -n "$SUCCESS_URL" ] && [[ "$SUCCESS_URL" != */ ]]; then + SUCCESS_URL=${SUCCESS_URL}/ + fi + + if [ -n "$FAILURE_URL" ] && [[ "$FAILURE_URL" != */ ]]; then + FAILURE_URL=${FAILURE_URL}/ + fi + + host_name=$(hostname -f | tr '[:upper:]' '[:lower:]') + msg=$host_name + + if [ -n "$1" ]; then + echo "[ERROR] $1" + echo "[ERROR]-----------------------------------------------------" + echo "[ERROR] Ignite node bootstrap failed" + echo "[ERROR]-----------------------------------------------------" + msg=$1 + + if [ -z "$FAILURE_URL" ]; then + exit 1 + fi + + reportFolder=${FAILURE_URL}${host_name} + reportFile=$reportFolder/__error__ + else + echo "[INFO]-----------------------------------------------------" + echo "[INFO] Ignite node bootstrap successfully completed" + echo "[INFO]-----------------------------------------------------" + + if [ -z "$SUCCESS_URL" ]; then + exit 0 + fi + + reportFolder=${SUCCESS_URL}${host_name} + reportFile=$reportFolder/__success__ + fi + + echo $msg > /opt/bootstrap-result + + aws s3 rm --recursive $reportFolder + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to drop report folder: $reportFolder" + fi + + aws s3 cp --sse AES256 /opt/bootstrap-result $reportFile + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to report bootstrap result to: $reportFile" + fi + + rm -f /opt/bootstrap-result + + if [ -n "$1" ]; then + exit 1 + fi + + exit 0 +} + +# Downloads specified package +downloadPackage() +{ + echo "[INFO] Downloading $3 package from $1 into $2" + + for i in 0 9; + do + if [[ "$1" == s3* ]]; then + aws s3 cp $1 $2 + code=$? + else + curl "$1" -o "$2" + code=$? + fi + + if [ $code -eq 0 ]; then + echo "[INFO] $3 package successfully downloaded from $1 into $2" + return 0 + fi + + echo "[WARN] Failed to download $3 package from $i attempt, sleeping extra 5sec" + sleep 5s + done + + terminate "All 10 attempts to download $3 package from $1 are failed" +} + +# Downloads and setup JDK +setupJava() +{ + rm -Rf /opt/java /opt/jdk.tar.gz + + echo "[INFO] Downloading 'jdk'" + wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" "$JDK_DOWNLOAD_URL" -O /opt/jdk.tar.gz + if [ $? -ne 0 ]; then + terminate "Failed to download 'jdk'" + fi + + echo "[INFO] Untaring 'jdk'" + tar -xvzf /opt/jdk.tar.gz -C /opt + if [ $? -ne 0 ]; then + terminate "Failed to untar 'jdk'" + fi + + rm -Rf /opt/jdk.tar.gz + + unzipDir=$(ls /opt | grep "jdk") + if [ "$unzipDir" != "java" ]; then + mv /opt/$unzipDir /opt/java + fi +} + +# Downloads and setup AWS CLI +setupAWSCLI() +{ + echo "[INFO] Installing 'awscli'" + pip install --upgrade awscli + if [ $? -eq 0 ]; then + return 0 + fi + + echo "[ERROR] Failed to install 'awscli' using pip" + echo "[INFO] Trying to install awscli using zip archive" + echo "[INFO] Downloading awscli zip" + + downloadPackage "$AWS_CLI_DOWNLOAD_URL" "/opt/awscli-bundle.zip" "awscli" + + echo "[INFO] Unzipping awscli zip" + unzip /opt/awscli-bundle.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip awscli zip" + fi + + rm -Rf /opt/awscli-bundle.zip + + echo "[INFO] Installing awscli" + /opt/awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws + if [ $? -ne 0 ]; then + terminate "Failed to install awscli" + fi + + echo "[INFO] Successfully installed awscli from zip archive" +} + +# Setup all the pre-requisites (packages, settings and etc.) +setupPreRequisites() +{ + echo "[INFO] Installing 'wget' package" + yum -y install wget + if [ $? -ne 0 ]; then + terminate "Failed to install 'wget' package" + fi + + echo "[INFO] Installing 'net-tools' package" + yum -y install net-tools + if [ $? -ne 0 ]; then + terminate "Failed to install 'net-tools' package" + fi + + echo "[INFO] Installing 'python' package" + yum -y install python + if [ $? -ne 0 ]; then + terminate "Failed to install 'python' package" + fi + + echo "[INFO] Installing 'unzip' package" + yum -y install unzip + if [ $? -ne 0 ]; then + terminate "Failed to install 'unzip' package" + fi + + downloadPackage "https://bootstrap.pypa.io/get-pip.py" "/opt/get-pip.py" "get-pip.py" + + echo "[INFO] Installing 'pip'" + python /opt/get-pip.py + if [ $? -ne 0 ]; then + terminate "Failed to install 'pip'" + fi +} + +# Downloads and setup tests package +setupTestsPackage() +{ + downloadPackage "$TESTS_PACKAGE_DONLOAD_URL" "/opt/ignite-cassandra-tests.zip" "Tests" + + rm -Rf /opt/ignite-cassandra-tests + + unzip /opt/ignite-cassandra-tests.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip tests package" + fi + + rm -f /opt/ignite-cassandra-tests.zip + + unzipDir=$(ls /opt | grep "ignite-cassandra") + if [ "$unzipDir" != "ignite-cassandra-tests" ]; then + mv /opt/$unzipDir /opt/ignite-cassandra-tests + fi + + find /opt/ignite-cassandra-tests -type f -name "*.sh" -exec chmod ug+x {} \; + + . /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "ignite" + + setupNTP + + echo "[INFO] Starting logs collector daemon" + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + /opt/ignite-cassandra-tests/bootstrap/aws/logs-collector.sh "$S3_LOGS_TRIGGER" "$S3_IGNITE_LOGS/$HOST_NAME" "/opt/ignite/work/log" "/opt/ignite/ignite-start.log" > /opt/logs-collector.log & + + echo "[INFO] Logs collector daemon started: $!" + + echo "----------------------------------------------------------------------------------------" + printInstanceInfo + echo "----------------------------------------------------------------------------------------" + tagInstance + bootstrapGangliaAgent "ignite" 8642 +} + +# Downloads Ignite package +downloadIgnite() +{ + downloadPackage "$IGNITE_DOWNLOAD_URL" "/opt/ignite.zip" "Ignite" + + rm -Rf /opt/ignite + + echo "[INFO] Unzipping Ignite package" + unzip /opt/ignite.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip Ignite package" + fi + + rm -f /opt/ignite.zip + + unzipDir=$(ls /opt | grep "ignite" | grep "apache") + if [ "$unzipDir" != "ignite" ]; then + mv /opt/$unzipDir /opt/ignite + fi +} + +# Setups Ignite +setupIgnite() +{ + echo "[INFO] Creating 'ignite' group" + exists=$(cat /etc/group | grep ignite) + if [ -z "$exists" ]; then + groupadd ignite + if [ $? -ne 0 ]; then + terminate "Failed to create 'ignite' group" + fi + fi + + echo "[INFO] Creating 'ignite' user" + exists=$(cat /etc/passwd | grep ignite) + if [ -z "$exists" ]; then + useradd -g ignite ignite + if [ $? -ne 0 ]; then + terminate "Failed to create 'ignite' user" + fi + fi + + testsJar=$(find /opt/ignite-cassandra-tests -type f -name "*.jar" | grep ignite-cassandra- | grep tests.jar) + if [ -n "$testsJar" ]; then + echo "[INFO] Coping tests jar $testsJar into /opt/ignite/libs/optional/ignite-cassandra" + cp $testsJar /opt/ignite/libs/optional/ignite-cassandra + if [ $? -ne 0 ]; then + terminate "Failed copy $testsJar into /opt/ignite/libs/optional/ignite-cassandra" + fi + fi + + rm -f /opt/ignite/config/ignite-cassandra-server-template.xml + mv -f /opt/ignite-cassandra-tests/bootstrap/aws/ignite/ignite-cassandra-server-template.xml /opt/ignite/config + + chown -R ignite:ignite /opt/ignite /opt/ignite-cassandra-tests + + echo "export JAVA_HOME=/opt/java" >> $1 + echo "export IGNITE_HOME=/opt/ignite" >> $1 + echo "export USER_LIBS=\$IGNITE_HOME/libs/optional/ignite-cassandra/*:\$IGNITE_HOME/libs/optional/ignite-slf4j/*" >> $1 + echo "export PATH=\$JAVA_HOME/bin:\$IGNITE_HOME/bin:\$PATH" >> $1 +} + +################################################################################################################### + +echo "[INFO]-----------------------------------------------------------------" +echo "[INFO] Bootstrapping Ignite node" +echo "[INFO]-----------------------------------------------------------------" + +setupPreRequisites +setupJava +setupAWSCLI +setupTestsPackage + +downloadIgnite +setupIgnite "/root/.bash_profile" + +cmd="/opt/ignite-cassandra-tests/bootstrap/aws/ignite/ignite-start.sh" + +#sudo -u ignite -g ignite sh -c "$cmd | tee /opt/ignite/ignite-start.log" + +$cmd | tee /opt/ignite/ignite-start.log \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-cassandra-server-template.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-cassandra-server-template.xml new file mode 100644 index 0000000000000..692cd8b0b5ce0 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-cassandra-server-template.xml @@ -0,0 +1,181 @@ + + + + + + + + + + + + + + + ${CASSANDRA_SEEDS} + + + + + + + + + + + + + + + + + +]]> + + + + + + + + + + +]]> + + + + + + + + + + REPLICATION = {'class' : 'SimpleStrategy', 'replication_factor' : 3} + AND DURABLE_WRITES = true + + + comment = 'A most excellent and useful table' + AND read_repair_chance = 0.2 + + + + + + + + + + + + + + + + + + + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${IGNITE_SEEDS} + + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-env.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-env.sh new file mode 100644 index 0000000000000..bfe3371917077 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-env.sh @@ -0,0 +1,29 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Environment setup script from Ignite +# ----------------------------------------------------------------------------------------------- + +JVM_OPTS="-Xms10g -Xmx10g -server -XX:+AggressiveOpts -XX:MaxMetaspaceSize=256m" +JVM_OPTS="$JVM_OPTS -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+UseTLAB -XX:NewSize=128m -XX:MaxNewSize=768m" +#JVM_OPTS="$JVM_OPTS -XX:MaxTenuringThreshold=0 -XX:SurvivorRatio=1024 -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=60" +JVM_OPTS="$JVM_OPTS -Xss16m" + +export JVM_OPTS diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-start.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-start.sh new file mode 100644 index 0000000000000..f2c15574a3669 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/ignite/ignite-start.sh @@ -0,0 +1,266 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Script to start Ignite daemon (used by ignite-bootstrap.sh) +# ----------------------------------------------------------------------------------------------- + +#profile=/home/ignite/.bash_profile +profile=/root/.bash_profile + +. $profile +. /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "ignite" + +# Setups Cassandra seeds for this Ignite node being able to connect to Cassandra. +# Looks for the information in S3 about already up and running Cassandra cluster nodes. +setupCassandraSeeds() +{ + setupClusterSeeds "cassandra" "true" + + CLUSTER_SEEDS=($CLUSTER_SEEDS) + count=${#CLUSTER_SEEDS[@]} + + CASSANDRA_SEEDS= + + for (( i=0; i<=$(( $count -1 )); i++ )) + do + seed=${CLUSTER_SEEDS[$i]} + CASSANDRA_SEEDS="${CASSANDRA_SEEDS}$seed<\/value>" + done + + cat /opt/ignite/config/ignite-cassandra-server-template.xml | sed -r "s/\\\$\{CASSANDRA_SEEDS\}/$CASSANDRA_SEEDS/g" > /opt/ignite/config/ignite-cassandra-server.xml +} + +# Setups Ignite nodes which this EC2 Ignite node will use to send its metadata and join Ignite cluster +setupIgniteSeeds() +{ + if [ "$FIRST_NODE_LOCK" == "true" ]; then + echo "[INFO] Setting up Ignite seeds" + + CLUSTER_SEEDS="127.0.0.1:47500..47509" + + echo "[INFO] Using localhost address as a seed for the first Ignite node: $CLUSTER_SEEDS" + + aws s3 rm --recursive ${S3_IGNITE_NODES_DISCOVERY::-1} + if [ $? -ne 0 ]; then + terminate "Failed to clean Ignite node discovery URL: $S3_IGNITE_NODES_DISCOVERY" + fi + else + setupClusterSeeds "ignite" "true" + fi + + CLUSTER_SEEDS=($CLUSTER_SEEDS) + count=${#CLUSTER_SEEDS[@]} + + IGNITE_SEEDS= + + for (( i=0; i<=$(( $count -1 )); i++ )) + do + seed=${CLUSTER_SEEDS[$i]} + IGNITE_SEEDS="${IGNITE_SEEDS}$seed<\/value>" + done + + cat /opt/ignite/config/ignite-cassandra-server.xml | sed -r "s/\\\$\{IGNITE_SEEDS\}/$IGNITE_SEEDS/g" > /opt/ignite/config/ignite-cassandra-server1.xml + mv -f /opt/ignite/config/ignite-cassandra-server1.xml /opt/ignite/config/ignite-cassandra-server.xml +} + +# Checks status of Ignite daemon +checkIgniteStatus() +{ + proc=$(ps -ef | grep java | grep "org.apache.ignite.startup.cmdline.CommandLineStartup") + + nodeId= + nodeAddrs= + nodePorts= + topology= + metrics= + + logFile=$(ls /opt/ignite/work/log/ | grep "\.log$") + if [ -n "$logFile" ]; then + logFile=/opt/ignite/work/log/$logFile + nodeId=$(cat $logFile | grep "Local node \[ID") + nodeAddrs=$(cat $logFile | grep "Local node addresses:") + nodePorts=$(cat $logFile | grep "Local ports:") + topology=$(cat $logFile | grep "Topology snapshot") + metrics=$(cat $logFile | grep "Metrics for local node" | head -n 1) + fi + + if [ -n "$nodeId" ] && [ -n "$nodeAddrs" ] && [ -n "$nodePorts" ] && [ -n "$topology" ] && [ -n "$metrics" ] && [ -n "$proc" ]; then + sleep 30s + return 0 + fi + + return 1 +} + +# Gracefully starts Ignite daemon and waits until it joins Ignite cluster +startIgnite() +{ + echo "[INFO]-------------------------------------------------------------" + echo "[INFO] Trying attempt $START_ATTEMPT to start Ignite daemon" + echo "[INFO]-------------------------------------------------------------" + echo "" + + setupCassandraSeeds + setupIgniteSeeds + + waitToJoinCluster + + if [ "$FIRST_NODE_LOCK" == "true" ]; then + aws s3 rm --recursive ${S3_IGNITE_NODES_DISCOVERY::-1} + if [ $? -ne 0 ]; then + terminate "Failed to clean Ignite node discovery URL: $S3_IGNITE_NODES_DISCOVERY" + fi + fi + + proc=$(ps -ef | grep java | grep "org.apache.ignite.startup.cmdline.CommandLineStartup") + proc=($proc) + + if [ -n "${proc[1]}" ]; then + echo "[INFO] Terminating existing Ignite process ${proc[1]}" + kill -9 ${proc[1]} + fi + + echo "[INFO] Starting Ignite" + rm -Rf /opt/ignite/work/* + /opt/ignite/bin/ignite.sh /opt/ignite/config/ignite-cassandra-server.xml & + + echo "[INFO] Ignite job id: $!" + + sleep 1m + + START_ATTEMPT=$(( $START_ATTEMPT+1 )) +} + +####################################################################################################### + +START_ATTEMPT=0 + +# Cleans all the previous metadata about this EC2 node +unregisterNode + +# Tries to get first-node lock +tryToGetFirstNodeLock + +echo "[INFO]-----------------------------------------------------------------" + +if [ "$FIRST_NODE_LOCK" == "true" ]; then + echo "[INFO] Starting first Ignite node" +else + echo "[INFO] Starting Ignite node" +fi + +echo "[INFO]-----------------------------------------------------------------" +printInstanceInfo +echo "[INFO]-----------------------------------------------------------------" + +if [ "$FIRST_NODE_LOCK" != "true" ]; then + waitFirstClusterNodeRegistered "true" +else + cleanupMetadata +fi + +# Applies Ignite environment settings from ignite-env.sh +envScript=$(readlink -m $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/ignite-env.sh) +if [ -f "$envScript" ]; then + . $envScript +fi + +# Start Ignite daemon +startIgnite + +startTime=$(date +%s) + +# Trying multiple attempts to start Ignite daemon +while true; do + proc=$(ps -ef | grep java | grep "org.apache.ignite.startup.cmdline.CommandLineStartup") + + checkIgniteStatus + + if [ $? -eq 0 ]; then + sleep 1m + echo "[INFO]-----------------------------------------------------" + echo "[INFO] Ignite daemon successfully started" + echo "[INFO]-----------------------------------------------------" + echo $proc + echo "[INFO]-----------------------------------------------------" + + # Once node joined the cluster we need to remove cluster-join lock + # to allow other EC2 nodes to acquire it and join cluster sequentially + removeClusterJoinLock + + break + fi + + currentTime=$(date +%s) + duration=$(( $currentTime-$startTime )) + duration=$(( $duration/60 )) + + if [ $duration -gt $SERVICE_STARTUP_TIME ]; then + if [ "$FIRST_NODE_LOCK" == "true" ]; then + # If the first node of Ignite cluster failed to start Ignite daemon in SERVICE_STARTUP_TIME min, + # we will not try any other attempts and just terminate with error. Terminate function itself, will + # take care about removing all the locks holding by this node. + terminate "${SERVICE_STARTUP_TIME}min timeout expired, but first Ignite daemon is still not up and running" + else + # If node isn't the first node of Ignite cluster and it failed to start we need to + # remove cluster-join lock to allow other EC2 nodes to acquire it + removeClusterJoinLock + + # If node failed all SERVICE_START_ATTEMPTS attempts to start Ignite daemon we will not + # try anymore and terminate with error + if [ $START_ATTEMPT -gt $SERVICE_START_ATTEMPTS ]; then + terminate "${SERVICE_START_ATTEMPTS} attempts exceed, but Ignite daemon is still not up and running" + fi + + # New attempt to start Ignite daemon + startIgnite + fi + + continue + fi + + # Handling situation when Ignite daemon process abnormally terminated + if [ -z "$proc" ]; then + # If this is the first node of Ignite cluster just terminating with error + if [ "$FIRST_NODE_LOCK" == "true" ]; then + terminate "Failed to start Ignite daemon" + fi + + # Remove cluster-join lock to allow other EC2 nodes to acquire it + removeClusterJoinLock + + echo "[WARN] Failed to start Ignite daemon. Sleeping for extra 30sec" + sleep 30s + + # New attempt to start Ignite daemon + startIgnite + + continue + fi + + echo "[INFO] Waiting for Ignite daemon to start, time passed ${duration}min" + sleep 30s +done + +# Once Ignite daemon successfully started we registering new Ignite node in S3 +registerNode + +# Terminating script with zero exit code +terminate \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/logs-collector.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/logs-collector.sh new file mode 100644 index 0000000000000..1634b89cff575 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/logs-collector.sh @@ -0,0 +1,173 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Logs collector daemon +# ----------------------------------------------------------------------------------------------- +# Script is launched in background by all EC2 nodes of all clusters (Cassandra, Ignite, Tests) and +# periodically (each 30 seconds) checks if specific S3 trigger file (specified by $S3_LOGS_TRIGGER_URL) +# was created or its timestamp was changed. Such an event serve as a trigger for the script +# to collect EC2 instance logs (from folder specified by $1) and upload them into specific +# S3 folder (specified by $S3_LOGS_FOLDER). +# ----------------------------------------------------------------------------------------------- + +uploadLogs() +{ + if [ ! -d "$1" ]; then + echo "[INFO] Logs directory doesn't exist: $1" + return 0 + fi + + echo "[INFO] Uploading logs from directory: $1" + + dirList=$(ls $1 | head -1) + + if [ -z "$dirList" ]; then + echo "[INFO] Directory is empty: $1" + fi + + for i in 0 9; + do + aws s3 sync --sse AES256 --delete "$1" "$S3_LOGS_FOLDER" + code=$? + + if [ $code -eq 0 ]; then + echo "[INFO] Successfully uploaded logs from directory: $1" + return 0 + fi + + echo "[WARN] Failed to upload logs from $i attempt, sleeping extra 30sec" + sleep 30s + done + + echo "[ERROR] All 10 attempts to upload logs are failed for the directory: $1" +} + +createNewLogsSnapshot() +{ + rm -f ~/logs-collector.snapshot.new + + for log_src in "$@" + do + if [ -d "$log_src" ] || [ -f "$log_src" ]; then + ls -alR $log_src >> ~/logs-collector.snapshot.new + + fi + done +} + +checkLogsChanged() +{ + createNewLogsSnapshot $@ + + if [ ! -f "~/logs-collector.snapshot" ]; then + return 1 + fi + + diff "~/logs-collector.snapshot" "~/logs-collector.snapshot.new" > /dev/null + + return $? +} + +updateLogsSnapshot() +{ + if [ ! -f "~/logs-collector.snapshot.new" ]; then + return 0 + fi + + rm -f "~/logs-collector.snapshot" + mv "~/logs-collector.snapshot.new" "~/logs-collector.snapshot" +} + +collectLogs() +{ + createNewLogsSnapshot + + rm -Rf ~/logs-collector-logs + mkdir -p ~/logs-collector-logs + + for log_src in "$@" + do + if [ -f "$log_src" ]; then + echo "[INFO] Collecting log file: $log_src" + cp -f $log_src ~/logs-collector-logs + elif [ -d "$log_src" ]; then + echo "[INFO] Collecting logs from folder: $log_src" + cp -Rf $log_src ~/logs-collector-logs + fi + done + + uploadLogs ~/logs-collector-logs + + rm -Rf ~/logs-collector-logs + + updateLogsSnapshot +} + +echo "[INFO] Running Logs collector service" + +if [ -z "$1" ]; then + echo "[ERROR] Logs collection S3 trigger URL doesn't specified" + exit 1 +fi + +S3_LOGS_TRIGGER_URL=$1 + +echo "[INFO] Logs collection S3 trigger URL: $S3_LOGS_TRIGGER_URL" + +if [ -z "$2" ]; then + echo "[ERROR] S3 folder where to upload logs doesn't specified" + exit 1 +fi + +S3_LOGS_FOLDER=$2 + +echo "[INFO] S3 logs upload folder: $S3_LOGS_FOLDER" + +shift 2 + +if [ -z "$1" ]; then + echo "[WARN] Local logs sources don't specified" +else + echo "[INFO] Local logs sources: $@" +fi + +echo "--------------------------------------------------------------------" + +TRIGGER_STATE= + +while true; do + sleep 30s + + STATE=$(aws s3 ls $S3_LOGS_TRIGGER_URL) + + if [ -z "$STATE" ] || [ "$STATE" == "$TRIGGER_STATE" ]; then + checkLogsChanged + + if [ $? -eq 0 ]; then + continue + fi + fi + + TRIGGER_STATE=$STATE + + collectLogs $@ /var/log/cloud-init.log /var/log/cloud-init-output.log + + echo "--------------------------------------------------------------------" +done diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/ignite-cassandra-client-template.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/ignite-cassandra-client-template.xml new file mode 100644 index 0000000000000..2989563ddaf1a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/ignite-cassandra-client-template.xml @@ -0,0 +1,183 @@ + + + + + + + + + + + + + + ${CASSANDRA_SEEDS} + + + + + + + + + + + + + + + + + +]]> + + + + + + + + + + +]]> + + + + + + + + + + REPLICATION = {'class' : 'SimpleStrategy', 'replication_factor' : 3} + AND DURABLE_WRITES = true + + + comment = 'A most excellent and useful table' + AND read_repair_chance = 0.2 + + + + + + + + + + + + + + + + + + + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${IGNITE_SEEDS} + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-bootstrap.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-bootstrap.sh new file mode 100644 index 0000000000000..8e6faff699b01 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-bootstrap.sh @@ -0,0 +1,317 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Bootstrap script to spin up Tests cluster +# ----------------------------------------------------------------------------------------------- + +# URL to download AWS CLI tools +AWS_CLI_DOWNLOAD_URL=https://s3.amazonaws.com/aws-cli/awscli-bundle.zip + +# URL to download JDK +JDK_DOWNLOAD_URL=http://download.oracle.com/otn-pub/java/jdk/8u77-b03/jdk-8u77-linux-x64.tar.gz + +# URL to download Ignite-Cassandra tests package - you should previously package and upload it to this place +TESTS_PACKAGE_DONLOAD_URL=s3:////ignite-cassandra-tests-.zip + +# Terminates script execution and upload logs to S3 +terminate() +{ + SUCCESS_URL=$S3_TESTS_SUCCESS + FAILURE_URL=$S3_TESTS_FAILURE + + if [ -n "$SUCCESS_URL" ] && [[ "$SUCCESS_URL" != */ ]]; then + SUCCESS_URL=${SUCCESS_URL}/ + fi + + if [ -n "$FAILURE_URL" ] && [[ "$FAILURE_URL" != */ ]]; then + FAILURE_URL=${FAILURE_URL}/ + fi + + host_name=$(hostname -f | tr '[:upper:]' '[:lower:]') + msg=$host_name + + if [ -n "$1" ]; then + echo "[ERROR] $1" + echo "[ERROR]-----------------------------------------------------" + echo "[ERROR] Test node bootstrap failed" + echo "[ERROR]-----------------------------------------------------" + msg=$1 + + if [ -z "$FAILURE_URL" ]; then + exit 1 + fi + + reportFolder=${FAILURE_URL}${host_name} + reportFile=$reportFolder/__error__ + else + echo "[INFO]-----------------------------------------------------" + echo "[INFO] Test node bootstrap successfully completed" + echo "[INFO]-----------------------------------------------------" + + if [ -z "$SUCCESS_URL" ]; then + exit 0 + fi + + reportFolder=${SUCCESS_URL}${host_name} + reportFile=$reportFolder/__success__ + fi + + echo $msg > /opt/bootstrap-result + + aws s3 rm --recursive $reportFolder + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to drop report folder: $reportFolder" + fi + + aws s3 cp --sse AES256 /opt/bootstrap-result $reportFile + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to report bootstrap result to: $reportFile" + fi + + rm -f /opt/bootstrap-result + + if [ -n "$1" ]; then + exit 1 + fi + + exit 0 +} + +# Downloads specified package +downloadPackage() +{ + echo "[INFO] Downloading $3 package from $1 into $2" + + for i in 0 9; + do + if [[ "$1" == s3* ]]; then + aws s3 cp $1 $2 + code=$? + else + curl "$1" -o "$2" + code=$? + fi + + if [ $code -eq 0 ]; then + echo "[INFO] $3 package successfully downloaded from $1 into $2" + return 0 + fi + + echo "[WARN] Failed to download $3 package from $i attempt, sleeping extra 5sec" + sleep 5s + done + + terminate "All 10 attempts to download $3 package from $1 are failed" +} + +# Downloads and setup JDK +setupJava() +{ + rm -Rf /opt/java /opt/jdk.tar.gz + + echo "[INFO] Downloading 'jdk'" + wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" "$JDK_DOWNLOAD_URL" -O /opt/jdk.tar.gz + if [ $? -ne 0 ]; then + terminate "Failed to download 'jdk'" + fi + + echo "[INFO] Untaring 'jdk'" + tar -xvzf /opt/jdk.tar.gz -C /opt + if [ $? -ne 0 ]; then + terminate "Failed to untar 'jdk'" + fi + + rm -Rf /opt/jdk.tar.gz + + unzipDir=$(ls /opt | grep "jdk") + if [ "$unzipDir" != "java" ]; then + mv /opt/$unzipDir /opt/java + fi +} + +# Downloads and setup AWS CLI +setupAWSCLI() +{ + echo "[INFO] Installing 'awscli'" + pip install --upgrade awscli + if [ $? -eq 0 ]; then + return 0 + fi + + echo "[ERROR] Failed to install 'awscli' using pip" + echo "[INFO] Trying to install awscli using zip archive" + echo "[INFO] Downloading awscli zip" + + downloadPackage "$AWS_CLI_DOWNLOAD_URL" "/opt/awscli-bundle.zip" "awscli" + + echo "[INFO] Unzipping awscli zip" + unzip /opt/awscli-bundle.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip awscli zip" + fi + + rm -Rf /opt/awscli-bundle.zip + + echo "[INFO] Installing awscli" + /opt/awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws + if [ $? -ne 0 ]; then + terminate "Failed to install awscli" + fi + + echo "[INFO] Successfully installed awscli from zip archive" +} + +# Setup all the pre-requisites (packages, settings and etc.) +setupPreRequisites() +{ + echo "[INFO] Installing 'wget' package" + yum -y install wget + if [ $? -ne 0 ]; then + terminate "Failed to install 'wget' package" + fi + + echo "[INFO] Installing 'net-tools' package" + yum -y install net-tools + if [ $? -ne 0 ]; then + terminate "Failed to install 'net-tools' package" + fi + + echo "[INFO] Installing 'python' package" + yum -y install python + if [ $? -ne 0 ]; then + terminate "Failed to install 'python' package" + fi + + echo "[INFO] Installing 'unzip' package" + yum -y install unzip + if [ $? -ne 0 ]; then + terminate "Failed to install 'unzip' package" + fi + + downloadPackage "https://bootstrap.pypa.io/get-pip.py" "/opt/get-pip.py" "get-pip.py" + + echo "[INFO] Installing 'pip'" + python /opt/get-pip.py + if [ $? -ne 0 ]; then + terminate "Failed to install 'pip'" + fi +} + +# Downloads and setup tests package +setupTestsPackage() +{ + downloadPackage "$TESTS_PACKAGE_DONLOAD_URL" "/opt/ignite-cassandra-tests.zip" "Tests" + + rm -Rf /opt/ignite-cassandra-tests + + unzip /opt/ignite-cassandra-tests.zip -d /opt + if [ $? -ne 0 ]; then + terminate "Failed to unzip tests package" + fi + + rm -f /opt/ignite-cassandra-tests.zip + + unzipDir=$(ls /opt | grep "ignite-cassandra") + if [ "$unzipDir" != "ignite-cassandra-tests" ]; then + mv /opt/$unzipDir /opt/ignite-cassandra-tests + fi + + find /opt/ignite-cassandra-tests -type f -name "*.sh" -exec chmod ug+x {} \; + + . /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "test" + + setupNTP + + echo "[INFO] Starting logs collector daemon" + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + /opt/ignite-cassandra-tests/bootstrap/aws/logs-collector.sh "$S3_LOGS_TRIGGER" "$S3_TESTS_LOGS/$HOST_NAME" "/opt/ignite-cassandra-tests/logs" > /opt/logs-collector.log & + + echo "[INFO] Logs collector daemon started: $!" + + echo "----------------------------------------------------------------------------------------" + printInstanceInfo + echo "----------------------------------------------------------------------------------------" + tagInstance + bootstrapGangliaAgent "test" 8643 + + ################################################### + # Extra configuration specific only for test node # + ################################################### + + echo "[INFO] Installing bc package" + + yum -y install bc + + if [ $? -ne 0 ]; then + terminate "Failed to install bc package" + fi + + echo "[INFO] Installing zip package" + + yum -y install zip + + if [ $? -ne 0 ]; then + terminate "Failed to install zip package" + fi + + echo "[INFO] Creating 'ignite' group" + exists=$(cat /etc/group | grep ignite) + if [ -z "$exists" ]; then + groupadd ignite + if [ $? -ne 0 ]; then + terminate "Failed to create 'ignite' group" + fi + fi + + echo "[INFO] Creating 'ignite' user" + exists=$(cat /etc/passwd | grep ignite) + if [ -z "$exists" ]; then + useradd -g ignite ignite + if [ $? -ne 0 ]; then + terminate "Failed to create 'ignite' user" + fi + fi + + mkdir -p /opt/ignite-cassandra-tests/logs + chown -R ignite:ignite /opt/ignite-cassandra-tests + + echo "export JAVA_HOME=/opt/java" >> $1 + echo "export PATH=\$JAVA_HOME/bin:\$PATH" >> $1 +} + +################################################################################################################### + +echo "[INFO]-----------------------------------------------------------------" +echo "[INFO] Bootstrapping Tests node" +echo "[INFO]-----------------------------------------------------------------" + +setupPreRequisites +setupJava +setupAWSCLI +setupTestsPackage "/root/.bash_profile" + +cmd="/opt/ignite-cassandra-tests/bootstrap/aws/tests/tests-manager.sh" + +#sudo -u ignite -g ignite sh -c "$cmd > /opt/ignite-cassandra-tests/tests-manager" & + +$cmd > /opt/ignite-cassandra-tests/logs/tests-manager.log & + +terminate \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-manager.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-manager.sh new file mode 100644 index 0000000000000..c0f5d6b8cba9b --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-manager.sh @@ -0,0 +1,458 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Tests manager daemon +# ----------------------------------------------------------------------------------------------- +# Script is launched in background by all nodes of Tests cluster and +# periodically (each 30 seconds) checks if specific S3 trigger file was created or +# its timestamp was changed. Such an event serve as a trigger for the script to start +# preparing to run load tests. +# ----------------------------------------------------------------------------------------------- + +#profile=/home/ignite/.bash_profile +profile=/root/.bash_profile + +. $profile +. /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "test" + +# Switch test node to IDLE state +switchToIdleState() +{ + if [ "$NODE_STATE" != "IDLE" ]; then + echo "[INFO] Switching node to IDLE state" + dropStateFlag "$S3_TESTS_WAITING" "$S3_TESTS_PREPARING" "$S3_TESTS_RUNNING" + createStateFlag "$S3_TESTS_IDLE" + NODE_STATE="IDLE" + echo "[INFO] Node was switched to IDLE state" + fi +} + +# Switch test node to PREPARING state +switchToPreparingState() +{ + if [ "$NODE_STATE" != "PREPARING" ]; then + echo "[INFO] Switching node to PREPARING state" + dropStateFlag "$S3_TESTS_WAITING" "$S3_TESTS_IDLE" "$S3_TESTS_RUNNING" + createStateFlag "$S3_TESTS_PREPARING" + NODE_STATE="PREPARING" + echo "[INFO] Node was switched to PREPARING state" + fi +} + +# Switch test node to WAITING state +switchToWaitingState() +{ + if [ "$NODE_STATE" != "WAITING" ]; then + echo "[INFO] Switching node to WAITING state" + dropStateFlag "$S3_TESTS_IDLE" "$S3_TESTS_PREPARING" "$S3_TESTS_RUNNING" + createStateFlag "$S3_TESTS_WAITING" + NODE_STATE="WAITING" + echo "[INFO] Node was switched to WAITING state" + fi +} + +# Switch test node to RUNNING state +switchToRunningState() +{ + if [ "$NODE_STATE" != "RUNNING" ]; then + echo "[INFO] Switching node to RUNNING state" + dropStateFlag "$S3_TESTS_IDLE" "$S3_TESTS_PREPARING" "$S3_TESTS_WAITING" + createStateFlag "$S3_TESTS_RUNNING" + NODE_STATE="RUNNING" + echo "[INFO] Node was switched to RUNNING state" + fi +} + +# Creates appropriate state flag for the node in S3 +createStateFlag() +{ + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + aws s3 cp --sse AES256 /etc/hosts ${1}${HOST_NAME} + if [ $? -ne 0 ]; then + terminate "Failed to create state flag: ${1}${HOST_NAME}" + fi +} + +# Drops appropriate state flag for the node in S3 +dropStateFlag() +{ + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + for flagUrl in "$@" + do + exists=$(aws s3 ls ${flagUrl}${HOST_NAME}) + if [ -n "$exists" ]; then + aws s3 rm ${flagUrl}${HOST_NAME} + if [ $? -ne 0 ]; then + terminate "Failed to drop state flag: ${flagUrl}${HOST_NAME}" + fi + fi + done +} + +# Removes tests summary report from S3 +dropTestsSummary() +{ + exists=$(aws s3 ls $S3_TESTS_SUMMARY) + if [ -z "$exists" ]; then + return 0 + fi + + aws s3 rm $S3_TESTS_SUMMARY + if [ $? -ne 0 ]; then + terminate "Failed to drop tests summary info: $S3_TESTS_SUMMARY" + fi +} + +# Recreate all the necessary Cassandra artifacts before running Load tests +recreateCassandraArtifacts() +{ + /opt/ignite-cassandra-tests/recreate-cassandra-artifacts.sh + if [ $? -ne 0 ]; then + terminate "Failed to recreate Cassandra artifacts" + fi +} + +# Setups Cassandra seeds for this Tests node being able to connect to Cassandra. +# Looks for the information in S3 about already up and running Cassandra cluster nodes. +setupCassandraSeeds() +{ + if [ $CASSANDRA_NODES_COUNT -eq 0 ]; then + return 0 + fi + + setupClusterSeeds "cassandra" + + CASSANDRA_SEEDS1=$(echo $CLUSTER_SEEDS | sed -r "s/ /,/g") + CASSANDRA_SEEDS2= + + CLUSTER_SEEDS=($CLUSTER_SEEDS) + count=${#CLUSTER_SEEDS[@]} + + for (( i=0; i<=$(( $count -1 )); i++ )) + do + seed=${CLUSTER_SEEDS[$i]} + CASSANDRA_SEEDS2="${CASSANDRA_SEEDS2}$seed<\/value>" + done + + echo "[INFO] Using Cassandra seeds: $CASSANDRA_SEEDS1" + + echo "contact.points=$CASSANDRA_SEEDS1" > /opt/ignite-cassandra-tests/settings/org/apache/ignite/tests/cassandra/connection.properties + + cat /opt/ignite-cassandra-tests/bootstrap/aws/tests/ignite-cassandra-client-template.xml | sed -r "s/\\\$\{CASSANDRA_SEEDS\}/$CASSANDRA_SEEDS2/g" > /opt/ignite-cassandra-tests/bootstrap/aws/tests/ignite-cassandra-client-template1.xml +} + +# Setups Ignite nodes for this Tests node being able to connect to Ignite. +# Looks for the information in S3 about already up and running Cassandra cluster nodes. +setupIgniteSeeds() +{ + if [ $IGNITE_NODES_COUNT -eq 0 ]; then + return 0 + fi + + setupClusterSeeds "ignite" + + CLUSTER_SEEDS=($CLUSTER_SEEDS) + count=${#CLUSTER_SEEDS[@]} + + IGNITE_SEEDS= + + for (( i=0; i<=$(( $count -1 )); i++ )) + do + seed=${CLUSTER_SEEDS[$i]} + IGNITE_SEEDS="${IGNITE_SEEDS}$seed<\/value>" + done + + echo "[INFO] Using Ignite seeds: $IGNITE_SEEDS" + + cat /opt/ignite-cassandra-tests/bootstrap/aws/tests/ignite-cassandra-client-template1.xml | sed -r "s/\\\$\{IGNITE_SEEDS\}/$IGNITE_SEEDS/g" > /opt/ignite-cassandra-tests/settings/org/apache/ignite/tests/persistence/primitive/ignite-remote-client-config.xml + rm -f /opt/ignite-cassandra-tests/bootstrap/aws/tests/ignite-cassandra-client-template1.xml +} + +# Setups Cassandra credentials to connect to Cassandra cluster +setupCassandraCredentials() +{ + echo "admin.user=cassandra" > /opt/ignite-cassandra-tests/settings/org/apache/ignite/tests/cassandra/credentials.properties + echo "admin.password=cassandra" >> /opt/ignite-cassandra-tests/settings/org/apache/ignite/tests/cassandra/credentials.properties + echo "regular.user=cassandra" >> /opt/ignite-cassandra-tests/settings/org/apache/ignite/tests/cassandra/credentials.properties + echo "regular.password=cassandra" >> /opt/ignite-cassandra-tests/settings/org/apache/ignite/tests/cassandra/credentials.properties +} + +# Triggering first time tests execution for all nodes in the Tests cluster +triggerFirstTimeTestsExecution() +{ + if [ -z "$TESTS_TYPE" ]; then + return 0 + fi + + tryToGetFirstNodeLock + if [ $? -ne 0 ]; then + return 0 + fi + + sleep 30s + + echo "[INFO] Triggering first time tests execution" + + echo "TESTS_TYPE=$TESTS_TYPE" > /opt/ignite-cassandra-tests/tests-trigger + echo "#--------------------------------------------------" >> /opt/ignite-cassandra-tests/tests-trigger + echo "" >> /opt/ignite-cassandra-tests/tests-trigger + cat /opt/ignite-cassandra-tests/settings/tests.properties >> /opt/ignite-cassandra-tests/tests-trigger + + aws s3 cp --sse AES256 /opt/ignite-cassandra-tests/tests-trigger $S3_TESTS_TRIGGER + code=$? + + rm -f /opt/ignite-cassandra-tests/tests-trigger + + if [ $code -ne 0 ]; then + terminate "Failed to create tests trigger: $S3_TESTS_TRIGGER" + fi +} + +# Cleans previously created logs from S3 +cleanPreviousLogs() +{ + for logFile in /opt/ignite-cassandra-tests/logs/* + do + managerLog=$(echo $logFile | grep "tests-manager") + if [ -z "$managerLog" ]; then + rm -Rf $logFile + fi + done + + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + aws s3 rm --recursive ${S3_TESTS_FAILURE}${HOST_NAME} + aws s3 rm --recursive ${S3_TESTS_SUCCESS}${HOST_NAME} +} + +# Uploads tests logs to S3 +uploadTestsLogs() +{ + HOST_NAME=$(hostname -f | tr '[:upper:]' '[:lower:]') + + if [ -f "/opt/ignite-cassandra-tests/logs/__success__" ]; then + logsFolder=${S3_TESTS_SUCCESS}${HOST_NAME} + else + logsFolder=${S3_TESTS_FAILURE}${HOST_NAME} + fi + + aws s3 rm --recursive $logsFolder + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to drop logs folder: $logsFolder" + fi + + if [ -d "/opt/ignite-cassandra-tests/logs" ]; then + aws s3 sync --sse AES256 /opt/ignite-cassandra-tests/logs $logsFolder + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to export tests logs to: $logsFolder" + fi + fi +} + +# Runs tests-report.sh to prepare tests summary report +buildTestsSummaryReport() +{ + reportScript=$(readlink -m $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/tests-report.sh) + $reportScript + + if [ -n "$S3_LOGS_TRIGGER" ]; then + aws s3 cp --sse AES256 /etc/hosts $S3_LOGS_TRIGGER + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to trigger logs collection" + fi + fi +} + +# Running load tests +runLoadTests() +{ + cd /opt/ignite-cassandra-tests + + if [ "$TESTS_TYPE" == "ignite" ]; then + echo "[INFO] Running Ignite load tests" + ./ignite-load-tests.sh & + else + echo "[INFO] Running Cassandra load tests" + ./cassandra-load-tests.sh & + fi + + testsJobId=$! + + echo "[INFO] Tests job id: $testsJobId" + + sleep 1m + + LOGS_SNAPSHOT=$(ls -al /opt/ignite-cassandra-tests/logs) + LOGS_SNAPSHOT_TIME=$(date +%s) + + TERMINATED= + + # tests monitoring + while true; do + proc=$(ps -ef | grep java | grep "org.apache.ignite.tests") + if [ -z "$proc" ]; then + break + fi + + NEW_LOGS_SNAPSHOT=$(ls -al /opt/ignite-cassandra-tests/logs) + NEW_LOGS_SNAPSHOT_TIME=$(date +%s) + + # if logs state updated it means that tests are running and not stuck + if [ "$LOGS_SNAPSHOT" != "$NEW_LOGS_SNAPSHOT" ]; then + LOGS_SNAPSHOT=$NEW_LOGS_SNAPSHOT + LOGS_SNAPSHOT_TIME=$NEW_LOGS_SNAPSHOT_TIME + continue + fi + + duration=$(( $NEW_LOGS_SNAPSHOT_TIME-$LOGS_SNAPSHOT_TIME )) + duration=$(( $duration/60 )) + + # if logs wasn't updated during 5min it means that load tests stuck + if [ $duration -gt 5 ]; then + proc=($proc) + kill -9 ${proc[1]} + TERMINATED="true" + break + fi + + echo "[INFO] Waiting extra 30sec for load tests to complete" + + sleep 30s + done + + rm -f /opt/ignite-cassandra-tests/logs/tests.properties + cp /opt/ignite-cassandra-tests/settings/tests.properties /opt/ignite-cassandra-tests/logs + + if [ "$TERMINATED" == "true" ]; then + echo "[ERROR] Load tests stuck, tests process terminated" + echo "Load tests stuck, tests process terminated" > /opt/ignite-cassandra-tests/logs/__error__ + return 0 + fi + + failed= + if [ "$TESTS_TYPE" == "cassandra" ]; then + failed=$(cat /opt/ignite-cassandra-tests/cassandra-load-tests.log | grep "load tests execution failed") + else + failed=$(cat /opt/ignite-cassandra-tests/ignite-load-tests.log | grep "load tests execution failed") + fi + + if [ -n "$failed" ]; then + echo "[ERROR] Load tests execution failed" + echo "Load tests execution failed" > /opt/ignite-cassandra-tests/logs/__error__ + else + echo "[INFO] Load tests execution successfully completed" + echo "Load tests execution successfully completed" > /opt/ignite-cassandra-tests/logs/__success__ + fi +} + +####################################################################################################### + +sleep 1m + +NODE_STATE= +TRIGGER_STATE= + +printInstanceInfo +setupCassandraCredentials +switchToIdleState + +triggerFirstTimeTestsExecution + +registerNode + +while true; do + # switching state to IDLE + switchToIdleState + + sleep 30s + + NEW_TRIGGER_STATE=$(aws s3 ls $S3_TESTS_TRIGGER | xargs) + if [ -z "$NEW_TRIGGER_STATE" ] || [ "$NEW_TRIGGER_STATE" == "$TRIGGER_STATE" ]; then + continue + fi + + echo "----------------------------------------------------------------------" + echo "[INFO] Tests trigger changed" + echo "----------------------------------------------------------------------" + echo "[INFO] Old trigger: $TRIGGER_STATE" + echo "----------------------------------------------------------------------" + echo "[INFO] New trigger: $NEW_TRIGGER_STATE" + echo "----------------------------------------------------------------------" + + TRIGGER_STATE=$NEW_TRIGGER_STATE + + aws s3 cp $S3_TESTS_TRIGGER /opt/ignite-cassandra-tests/tests-trigger + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to download tests trigger info from: $S3_TESTS_TRIGGER" + continue + fi + + TESTS_TYPE=$(cat /opt/ignite-cassandra-tests/tests-trigger | grep TESTS_TYPE | xargs | sed -r "s/TESTS_TYPE=//g") + if [ "$TESTS_TYPE" != "ignite" ] && [ "$TESTS_TYPE" != "cassandra" ]; then + rm -f /opt/ignite-cassandra-tests/tests-trigger + echo "[ERROR] Incorrect tests type specified in the trigger info: $S3_TESTS_TRIGGER" + continue + fi + + rm -f /opt/ignite-cassandra-tests/settings/tests.properties + mv -f /opt/ignite-cassandra-tests/tests-trigger /opt/ignite-cassandra-tests/settings/tests.properties + + waitAllTestNodesCompletedTests + + # switching state to PREPARING + switchToPreparingState + + waitAllClusterNodesReady "cassandra" + waitAllClusterNodesReady "ignite" + setupCassandraSeeds + setupIgniteSeeds + + cleanPreviousLogs + + tryToGetFirstNodeLock + if [ $? -eq 0 ]; then + dropTestsSummary + recreateCassandraArtifacts + fi + + # switching state to WAITING + switchToWaitingState + + waitAllClusterNodesReady "test" + + if [ "$FIRST_NODE_LOCK" == "true" ]; then + aws s3 rm $S3_TESTS_TRIGGER + fi + + # switching state to RUNNING + switchToRunningState + + runLoadTests + uploadTestsLogs + + tryToGetFirstNodeLock + if [ $? -eq 0 ]; then + waitAllTestNodesCompletedTests + buildTestsSummaryReport + removeFirstNodeLock + fi +done \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-report.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-report.sh new file mode 100644 index 0000000000000..1576d57376166 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/bootstrap/aws/tests/tests-report.sh @@ -0,0 +1,499 @@ +#!/bin/sh + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# ----------------------------------------------------------------------------------------------- +# Tests report builder +# ----------------------------------------------------------------------------------------------- +# Script is used to analyze load tests logs collected from all 'Tests' cluster nodes and build +# summary report +# ----------------------------------------------------------------------------------------------- + +#profile=/home/ignite/.bash_profile +profile=/root/.bash_profile + +. $profile +. /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "test" + +# Building tests summary report +reportTestsSummary() +{ + echo "[INFO] Preparing tests results summary" + + TESTS_SUMMARY_DIR=/opt/ignite-cassandra-tests/tests-summary + SUCCEED_NODES_FILE=$TESTS_SUMMARY_DIR/succeed-nodes + SUCCEED_NODES_DIR=$TESTS_SUMMARY_DIR/succeed + FAILED_NODES_FILE=$TESTS_SUMMARY_DIR/failed-nodes + FAILED_NODES_DIR=$TESTS_SUMMARY_DIR/failed + REPORT_FILE=$TESTS_SUMMARY_DIR/report.txt + + rm -Rf $TESTS_SUMMARY_DIR + mkdir -p $TESTS_SUMMARY_DIR + mkdir -p $SUCCEED_NODES_DIR + mkdir -p $FAILED_NODES_DIR + + aws s3 ls $S3_TESTS_SUCCESS | sed -r "s/PRE //g" | sed -r "s/ //g" | sed -r "s/\///g" > $SUCCEED_NODES_FILE + aws s3 ls $S3_TESTS_FAILURE | sed -r "s/PRE //g" | sed -r "s/ //g" | sed -r "s/\///g" > $FAILED_NODES_FILE + + succeedCount=$(cat $SUCCEED_NODES_FILE | wc -l) + failedCount=$(cat $FAILED_NODES_FILE | wc -l) + count=$(( $succeedCount+$failedCount )) + + echo "Test type : $TESTS_TYPE" > $REPORT_FILE + echo "Test nodes count : $count" >> $REPORT_FILE + echo "Test nodes succeed: $succeedCount" >> $REPORT_FILE + echo "Test nodes failed : $failedCount" >> $REPORT_FILE + echo "----------------------------------------------------------------------------------------------" >> $REPORT_FILE + + if [ $succeedCount -gt 0 ]; then + echo "Succeed test nodes |" >> $REPORT_FILE + echo "-------------------" >> $REPORT_FILE + cat $SUCCEED_NODES_FILE >> $REPORT_FILE + echo "----------------------------------------------------------------------------------------------" >> $REPORT_FILE + + aws s3 sync --delete $S3_TESTS_SUCCESS $SUCCEED_NODES_DIR + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to get succeed tests details" + else + reportSucceedTestsStatistics "$REPORT_FILE" "$SUCCEED_NODES_DIR" + fi + fi + + if [ $failedCount -gt 0 ]; then + echo "Failed test nodes |" >> $REPORT_FILE + echo "------------------" >> $REPORT_FILE + cat $FAILED_NODES_FILE >> $REPORT_FILE + echo "----------------------------------------------------------------------------------------------" >> $REPORT_FILE + + aws sync --delete $S3_TESTS_FAILURE $FAILED_NODES_DIR + if [ $? -ne 0 ]; then + echo "[ERROR] Failed to get failed tests details" + else + reportFailedTestsDetailes "$REPORT_FILE" "$FAILED_NODES_DIR" + fi + fi + + rm -f $HOME/tests-summary.zip + + pushd $TESTS_SUMMARY_DIR + + zip -r -9 $HOME/tests-summary.zip . + code=$? + + rm -Rf $TESTS_SUMMARY_DIR + + popd + + if [ $code -ne 0 ]; then + echo "-------------------------------------------------------------------------------------" + echo "[ERROR] Failed to create tests summary zip archive $HOME/tests-summary.zip for $TESTS_SUMMARY_DIR" + echo "-------------------------------------------------------------------------------------" + return 1 + fi + + aws s3 cp --sse AES256 $HOME/tests-summary.zip $S3_TESTS_SUMMARY + if [ $? -ne 0 ]; then + echo "-------------------------------------------------------------------------------------" + echo "[ERROR] Failed to uploat tests summary archive to: $S3_TESTS_SUMMARY" + echo "-------------------------------------------------------------------------------------" + else + echo "-------------------------------------------------------------------------------------" + echo "[INFO] Tests results summary uploaded to: $S3_TESTS_SUMMARY" + echo "-------------------------------------------------------------------------------------" + fi + + rm -f $HOME/tests-summary.zip +} + +# Creates report for succeed tests +reportSucceedTestsStatistics() +{ + writeMsg="0" + writeErrors="0" + writeSpeed="0" + blkWriteMsg="0" + blkWriteErrors="0" + blkWriteSpeed="0" + readMsg="0" + readErrors="0" + readSpeed="0" + blkReadMsg="0" + blkReadErrors="0" + blkReadSpeed="0" + + writeErrNodes= + blkWriteErrNodes= + readErrNodes= + blkReadErrNodes= + + tmpFile=`mktemp` + + for dir in $2/* + do + node=$(echo $dir | sed -r "s/^.*\///g") + echo "-------------------------------------------------------------------------------------" + echo "[INFO] Gathering statistics from $node test node" + echo "-------------------------------------------------------------------------------------" + + logFile=$(ls $dir | grep "${TESTS_TYPE}-load-tests.log" | head -1) + if [ -z "$logFile" ]; then + echo "[WARN] Node $node marked as succeeded, but it doesn't have \"${TESTS_TYPE}-load-tests.log\" tests results summary file" + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Node $node marked as succeeded," >> $tmpFile + echo "but it doesn't have \"${TESTS_TYPE}-load-tests.log\" tests results summary file" >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + continue + fi + + logFile=$dir/$logFile + if [ ! -f "$logFile" ]; then + echo "[WARN] Node $node marked as succeeded, but it doesn't have \"${TESTS_TYPE}-load-tests.log\" tests results summary file" + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Node $node marked as succeeded," >> $tmpFile + echo "but it doesn't have \"${TESTS_TYPE}-load-tests.log\" tests results summary file" >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + continue + fi + + cnt=$(cat $logFile | grep "^WRITE messages" | sed -r "s/WRITE messages: //g" | xargs) + if [ -n "$cnt" ]; then + writeMsg=$(bc <<< "$writeMsg + $cnt") + if [ $cnt -ne 0 ]; then + echo "[INFO] WRITE messages: $cnt" + else + echo "[WARN] WRITE messages count is zero for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "WRITE messages count is zero for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + else + echo "[WARN] Failed to detect WRITE messages count for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect WRITE messages count for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^WRITE errors" | sed -r "s/WRITE errors: //g" | sed -r "s/,.*//g" | xargs) + if [ -n "$cnt" ]; then + echo "[INFO] WRITE errors: $cnt" + writeErrors=$(bc <<< "$writeErrors + $cnt") + if [ $cnt -ne 0 ]; then + if [ -n "$writeErrNodes" ]; then + writeErrNodes="${writeErrNodes}, " + fi + writeErrNodes="${writeErrNodes}${node}" + fi + else + echo "[WARN] Failed to detect WRITE errors count for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect WRITE errors count for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^WRITE speed" | sed -r "s/WRITE speed: //g" | sed -r "s/ msg\/sec//g" | xargs) + if [ -n "$cnt" ]; then + writeSpeed=$(bc <<< "$writeSpeed + $cnt") + if [ $cnt -ne 0 ]; then + echo "[INFO] WRITE speed: $cnt msg/sec" + else + echo "[WARN] WRITE speed is zero for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "WRITE speed is zero for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + else + echo "[WARN] Failed to detect WRITE speed for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect WRITE speed for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^BULK_WRITE messages" | sed -r "s/BULK_WRITE messages: //g" | xargs) + if [ -n "$cnt" ]; then + blkWriteMsg=$(bc <<< "$blkWriteMsg + $cnt") + if [ $cnt -ne 0 ]; then + echo "[INFO] BULK_WRITE messages: $cnt" + else + echo "[WARN] BULK_WRITE messages count is zero for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "BULK_WRITE messages count is zero for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + else + echo "[WARN] Failed to detect BULK_WRITE messages count for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect BULK_WRITE messages count for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^BULK_WRITE errors" | sed -r "s/BULK_WRITE errors: //g" | sed -r "s/,.*//g" | xargs) + if [ -n "$cnt" ]; then + blkWriteErrors=$(bc <<< "$blkWriteErrors + $cnt") + echo "[INFO] BULK_WRITE errors: $cnt" + if [ $cnt -ne 0 ]; then + if [ -n "$blkWriteErrNodes" ]; then + blkWriteErrNodes="${blkWriteErrNodes}, " + fi + blkWriteErrNodes="${blkWriteErrNodes}${node}" + fi + else + echo "[WARN] Failed to detect BULK_WRITE errors count for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect BULK_WRITE errors count for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^BULK_WRITE speed" | sed -r "s/BULK_WRITE speed: //g" | sed -r "s/ msg\/sec//g" | xargs) + if [ -n "$cnt" ]; then + blkWriteSpeed=$(bc <<< "$blkWriteSpeed + $cnt") + if [ $cnt -ne 0 ]; then + echo "[INFO] BULK_WRITE speed: $cnt msg/sec" + else + echo "[WARN] BULK_WRITE speed is zero for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "BULK_WRITE speed is zero for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + else + echo "[WARN] Failed to detect BULK_WRITE speed for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect BULK_WRITE speed for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^READ messages" | sed -r "s/READ messages: //g" | xargs) + if [ -n "$cnt" ]; then + readMsg=$(bc <<< "$readMsg + $cnt") + if [ $cnt -ne 0 ]; then + echo "[INFO] READ messages: $cnt" + else + echo "[WARN] READ messages count is zero for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "READ messages count is zero for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + else + echo "[WARN] Failed to detect READ messages count for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect READ messages count for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^READ errors" | sed -r "s/READ errors: //g" | sed -r "s/,.*//g" | xargs) + if [ -n "$cnt" ]; then + readErrors=$(bc <<< "$readErrors + $cnt") + echo "[INFO] READ errors: $cnt" + if [ $cnt -ne 0 ]; then + if [ -n "$readErrNodes" ]; then + blkWriteErrNodes="${readErrNodes}, " + fi + readErrNodes="${readErrNodes}${node}" + fi + else + echo "[WARN] Failed to detect READ errors count for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect READ errors count for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^READ speed" | sed -r "s/READ speed: //g" | sed -r "s/ msg\/sec//g" | xargs) + if [ -n "$cnt" ]; then + readSpeed=$(bc <<< "$readSpeed + $cnt") + if [ $cnt -ne 0 ]; then + echo "[INFO] READ speed: $cnt msg/sec" + else + echo "[WARN] READ speed is zero for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "READ speed is zero for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + else + echo "[WARN] Failed to detect READ speed for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect READ speed for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^BULK_READ messages" | sed -r "s/BULK_READ messages: //g" | xargs) + if [ -n "$cnt" ]; then + blkReadMsg=$(bc <<< "$blkReadMsg + $cnt") + if [ $cnt -ne 0 ]; then + echo "[INFO] BULK_READ messages: $cnt" + else + echo "[WARN] BULK_READ messages count is zero for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "BULK_READ messages count is zero for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + else + echo "[WARN] Failed to detect BULK_READ messages count for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect BULK_READ messages count for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^BULK_READ errors" | sed -r "s/BULK_READ errors: //g" | sed -r "s/,.*//g" | xargs) + if [ -n "$cnt" ]; then + blkReadErrors=$(bc <<< "$blkReadErrors + $cnt") + echo "[INFO] BULK_READ errors: $cnt" + if [ $cnt -ne 0 ]; then + if [ -n "$blkReadErrNodes" ]; then + blkReadErrNodes="${blkReadErrNodes}, " + fi + blkReadErrNodes="${blkReadErrNodes}${node}" + fi + else + echo "[WARN] Failed to detect BULK_READ errors count for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect BULK_READ errors count for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + + cnt=$(cat $logFile | grep "^BULK_READ speed" | sed -r "s/BULK_READ speed: //g" | sed -r "s/ msg\/sec//g" | xargs) + if [ -n "$cnt" ]; then + blkReadSpeed=$(bc <<< "$blkReadSpeed + $cnt") + if [ $cnt -ne 0 ]; then + echo "[INFO] BULK_READ speed: $cnt msg/sec" + else + echo "[WARN] BULK_READ speed is zero for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "BULK_READ speed is zero for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + else + echo "[WARN] Failed to detect BULK_READ speed for $node node. This test probably failed." + echo "WARNING |" >> $tmpFile + echo "--------" >> $tmpFile + echo "Failed to detect BULK_READ speed for $node node. This test probably failed." >> $tmpFile + echo "----------------------------------------------------------------------------------------------" >> $tmpFile + fi + done + + echo "-------------------------------------------------------------------------------------" + + echo "WRITE test metrics |" >> $1 + echo "-------------------" >> $1 + echo "Messages: $writeMsg" >> $1 + echo "Speed : $writeSpeed msg/sec" >> $1 + echo "Errors : $writeErrors" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + + echo "BULK_WRITE test metrics |" >> $1 + echo "------------------------" >> $1 + echo "Messages: $blkWriteMsg" >> $1 + echo "Speed : $blkWriteSpeed msg/sec" >> $1 + echo "Errors : $blkWriteErrors" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + + echo "READ test metrics |" >> $1 + echo "------------------" >> $1 + echo "Messages: $readMsg" >> $1 + echo "Speed : $readSpeed msg/sec" >> $1 + echo "Errors : $readErrors" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + + echo "BULK_READ test metrics |" >> $1 + echo "-----------------------" >> $1 + echo "Messages: $blkReadMsg" >> $1 + echo "Speed : $blkReadSpeed msg/sec" >> $1 + echo "Errors : $blkReadErrors" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + + if [ -n "$writeErrNodes" ]; then + echo "Nodes having WRITE errors |" >> $1 + echo "-------------------------------" >> $1 + echo "$writeErrNodes" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + fi + + if [ -n "$blkWriteErrNodes" ]; then + echo "Nodes having BULK_WRITE errors |" >> $1 + echo "-------------------------------" >> $1 + echo "$blkWriteErrNodes" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + fi + + if [ -n "$readErrNodes" ]; then + echo "Nodes having READ errors |" >> $1 + echo "-------------------------------" >> $1 + echo "$readErrNodes" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + fi + + if [ -n "$blkReadErrNodes" ]; then + echo "Nodes having BULK_READ errors |" >> $1 + echo "-------------------------------" >> $1 + echo "$blkReadErrNodes" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + fi + + cat $tmpFile >> $1 + + rm -f $tmpFile +} + +# Creates report for failed tests +reportFailedTestsDetailes() +{ + for dir in $2/* + do + node=$(echo $dir | sed -r "s/^.*\///g") + if [ -z "$node" ]; then + continue + fi + + echo "----------------------------------------------------------------------------------------------" >> $1 + echo "Error details for node: $node" >> $1 + echo "----------------------------------------------------------------------------------------------" >> $1 + + if [ -f "$dir/__error__" ]; then + cat $dir/__error__ >> $1 + else + echo "N/A" >> $1 + fi + done +} + +####################################################################################################### + +if [ "$TESTS_TYPE" != "ignite" ] && [ "$TESTS_TYPE" != "cassandra" ]; then + terminate "Incorrect tests type specified: $TESTS_TYPE" +fi + +reportTestsSummary \ No newline at end of file diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraConfigTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraConfigTest.java new file mode 100644 index 0000000000000..48ac18050651f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraConfigTest.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import org.apache.ignite.cache.query.annotations.QuerySqlField; +import org.apache.ignite.cache.store.cassandra.persistence.KeyPersistenceSettings; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Simple test for DDL generator. + */ +public class CassandraConfigTest { + /** + * Check if same DDL generated for similar keys and same KeyPersistenceConfiguration. + */ + @Test + public void testDDLGeneration() { + KeyPersistenceSettings keyPersistenceSettingsA = getKeyPersistenceSettings(KeyA.class); + KeyPersistenceSettings keyPersistenceSettingsB = getKeyPersistenceSettings(KeyB.class); + + assertEquals(keyPersistenceSettingsB.getPrimaryKeyDDL(), + keyPersistenceSettingsA.getPrimaryKeyDDL()); + + assertEquals(keyPersistenceSettingsB.getClusteringDDL(), + keyPersistenceSettingsA.getClusteringDDL()); + } + + /** + * @return KeyPersistenceSetting + */ + private KeyPersistenceSettings getKeyPersistenceSettings(Class keyClass) { + String cfg = "" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " " + + " " + + " " + + ""; + + return new KeyValuePersistenceSettings(cfg).getKeyPersistenceSettings(); + } + + /** + * + */ + public static class BaseKey { + /** */ + @QuerySqlField + // Looks like next annotation is ignored when generating DDL, + // but Ignite supports this annotation in parent classes. +// @AffinityKeyMapped + private Integer contextId; + + /** */ + public Integer getContextId() { + return contextId; + } + + /** */ + public void setContextId(Integer contextId) { + this.contextId = contextId; + } + } + + /** + * + */ + public static class KeyA extends BaseKey { + /** */ + @QuerySqlField(index = true) + private String timestamp; + + /** */ + @QuerySqlField(index = true) + private String name; + + /** */ + @QuerySqlField + private String creationDate; + + /** + * Constructor. + */ + public KeyA() { + } + } + + /** + * + */ + public static class KeyB { + + /** */ + @QuerySqlField(index = true) + private String timestamp; + + /** */ + @QuerySqlField(index = true) + private String name; + + /** */ + @QuerySqlField + private String creationDate; + + /** */ + @QuerySqlField +// @AffinityKeyMapped + private Integer contextId; + + /** + * Constructor. + */ + public KeyB() { + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraDirectPersistenceLoadTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraDirectPersistenceLoadTest.java new file mode 100644 index 0000000000000..48f85c3426e30 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraDirectPersistenceLoadTest.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import org.apache.ignite.tests.load.LoadTestDriver; +import org.apache.ignite.tests.load.cassandra.BulkReadWorker; +import org.apache.ignite.tests.load.cassandra.BulkWriteWorker; +import org.apache.ignite.tests.load.cassandra.ReadWorker; +import org.apache.ignite.tests.load.cassandra.WriteWorker; +import org.apache.ignite.tests.utils.CacheStoreHelper; +import org.apache.ignite.tests.utils.CassandraHelper; +import org.apache.ignite.tests.utils.TestsHelper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Load tests for {@link org.apache.ignite.cache.store.cassandra.CassandraCacheStore} implementation of + * {@link org.apache.ignite.cache.store.CacheStore} which allows to store Ignite cache data into Cassandra tables. + */ +public class CassandraDirectPersistenceLoadTest extends LoadTestDriver { + /** */ + private static final Logger LOGGER = LogManager.getLogger("CassandraLoadTests"); + + /** + * + * @param args Test arguments. + */ + public static void main(String[] args) { + try { + LOGGER.info("Cassandra load tests execution started"); + + LoadTestDriver driver = new CassandraDirectPersistenceLoadTest(); + + /** + * Load test scripts could be executed from several machines. Current implementation can correctly, + * handle situation when Cassandra keyspace/table was dropped - for example by the same load test + * started a bit later on another machine. Moreover there is a warm up period for each load test. + * Thus all the delays related to keyspaces/tables recreation actions will not affect performance metrics, + * but it will be produced lots of "trash" output in the logs (related to correct handling of such + * exceptional situation and keyspace/table recreation). + * + * Thus dropping test keyspaces at the beginning of the tests makes sense only for Unit tests, + * but not for Load tests. + **/ + + //CassandraHelper.dropTestKeyspaces(); + + driver.runTest("WRITE", WriteWorker.class, WriteWorker.LOGGER_NAME); + + driver.runTest("BULK_WRITE", BulkWriteWorker.class, BulkWriteWorker.LOGGER_NAME); + + driver.runTest("READ", ReadWorker.class, ReadWorker.LOGGER_NAME); + + driver.runTest("BULK_READ", BulkReadWorker.class, BulkReadWorker.LOGGER_NAME); + + /** + * Load test script executed on one machine could complete earlier that the same load test executed from + * another machine. Current implementation can correctly handle situation when Cassandra keyspace/table + * was dropped (simply recreate it). But dropping keyspace/table during load tests execution and subsequent + * recreation of such objects can have SIGNIFICANT EFFECT on final performance metrics. + * + * Thus dropping test keyspaces at the end of the tests makes sense only for Unit tests, + * but not for Load tests. + */ + + //CassandraHelper.dropTestKeyspaces(); // REVIEW This line is commented by purpose? + + LOGGER.info("Cassandra load tests execution completed"); + } + catch (Throwable e) { + LOGGER.error("Cassandra load tests execution failed", e); + throw new RuntimeException("Cassandra load tests execution failed", e); + } + finally { + CassandraHelper.releaseCassandraResources(); + } + } + + /** {@inheritDoc} */ + @Override protected Logger logger() { + return LOGGER; + } + + /** {@inheritDoc} */ + @Override protected Object setup(String logName) { + return CacheStoreHelper.createCacheStore( + TestsHelper.getLoadTestsCacheName(), + TestsHelper.getLoadTestsPersistenceSettings(), + CassandraHelper.getAdminDataSrc(), + LogManager.getLogger(logName)); + } + +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraDirectPersistenceTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraDirectPersistenceTest.java new file mode 100644 index 0000000000000..0bc33677bb400 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraDirectPersistenceTest.java @@ -0,0 +1,767 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.util.Collection; +import java.util.List; +import java.util.Map; +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.tests.pojos.Person; +import org.apache.ignite.tests.pojos.PersonId; +import org.apache.ignite.tests.pojos.Product; +import org.apache.ignite.tests.pojos.ProductOrder; +import org.apache.ignite.tests.pojos.SimplePerson; +import org.apache.ignite.tests.pojos.SimplePersonId; +import org.apache.ignite.tests.utils.CacheStoreHelper; +import org.apache.ignite.tests.utils.CassandraHelper; +import org.apache.ignite.tests.utils.TestCacheSession; +import org.apache.ignite.tests.utils.TestTransaction; +import org.apache.ignite.tests.utils.TestsHelper; +import org.apache.ignite.transactions.Transaction; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.core.io.ClassPathResource; + +/** + * Unit tests for {@link org.apache.ignite.cache.store.cassandra.CassandraCacheStore} implementation of + * {@link org.apache.ignite.cache.store.CacheStore} which allows to store Ignite cache data into Cassandra tables. + */ +public class CassandraDirectPersistenceTest { + /** */ + private static final Logger LOGGER = LogManager.getLogger(CassandraDirectPersistenceTest.class.getName()); + + /** */ + @BeforeClass + public static void setUpClass() { + if (CassandraHelper.useEmbeddedCassandra()) { + try { + CassandraHelper.startEmbeddedCassandra(LOGGER); + } + catch (Throwable e) { + throw new RuntimeException("Failed to start embedded Cassandra instance", e); + } + } + + LOGGER.info("Testing admin connection to Cassandra"); + CassandraHelper.testAdminConnection(); + + LOGGER.info("Testing regular connection to Cassandra"); + CassandraHelper.testRegularConnection(); + + LOGGER.info("Dropping all artifacts from previous tests execution session"); + CassandraHelper.dropTestKeyspaces(); + + LOGGER.info("Start tests execution"); + } + + /** */ + @AfterClass + public static void tearDownClass() { + try { + CassandraHelper.dropTestKeyspaces(); + } + finally { + CassandraHelper.releaseCassandraResources(); + + if (CassandraHelper.useEmbeddedCassandra()) { + try { + CassandraHelper.stopEmbeddedCassandra(); + } + catch (Throwable e) { + LOGGER.error("Failed to stop embedded Cassandra instance", e); + } + } + } + } + + /** */ + @Test + @SuppressWarnings("unchecked") + public void primitiveStrategyTest() { + CacheStore store1 = CacheStoreHelper.createCacheStore("longTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/primitive/persistence-settings-1.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore store2 = CacheStoreHelper.createCacheStore("stringTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/primitive/persistence-settings-2.xml"), + CassandraHelper.getAdminDataSrc()); + + Collection> longEntries = TestsHelper.generateLongsEntries(); + Collection> strEntries = TestsHelper.generateStringsEntries(); + + Collection fakeLongKeys = TestsHelper.getKeys(longEntries); + fakeLongKeys.add(-1L); + fakeLongKeys.add(-2L); + fakeLongKeys.add(-3L); + fakeLongKeys.add(-4L); + + Collection fakeStrKeys = TestsHelper.getKeys(strEntries); + fakeStrKeys.add("-1"); + fakeStrKeys.add("-2"); + fakeStrKeys.add("-3"); + fakeStrKeys.add("-4"); + + LOGGER.info("Running PRIMITIVE strategy write tests"); + + LOGGER.info("Running single write operation tests"); + store1.write(longEntries.iterator().next()); + store2.write(strEntries.iterator().next()); + LOGGER.info("Single write operation tests passed"); + + LOGGER.info("Running bulk write operation tests"); + store1.writeAll(longEntries); + store2.writeAll(strEntries); + LOGGER.info("Bulk write operation tests passed"); + + LOGGER.info("PRIMITIVE strategy write tests passed"); + + LOGGER.info("Running PRIMITIVE strategy read tests"); + + LOGGER.info("Running single read operation tests"); + + LOGGER.info("Running real keys read tests"); + + Long longVal = (Long)store1.load(longEntries.iterator().next().getKey()); + if (!longEntries.iterator().next().getValue().equals(longVal)) + throw new RuntimeException("Long values were incorrectly deserialized from Cassandra"); + + String strVal = (String)store2.load(strEntries.iterator().next().getKey()); + if (!strEntries.iterator().next().getValue().equals(strVal)) + throw new RuntimeException("String values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Running fake keys read tests"); + + longVal = (Long)store1.load(-1L); + if (longVal != null) + throw new RuntimeException("Long value with fake key '-1' was found in Cassandra"); + + strVal = (String)store2.load("-1"); + if (strVal != null) + throw new RuntimeException("String value with fake key '-1' was found in Cassandra"); + + LOGGER.info("Single read operation tests passed"); + + LOGGER.info("Running bulk read operation tests"); + + LOGGER.info("Running real keys read tests"); + + Map longValues = store1.loadAll(TestsHelper.getKeys(longEntries)); + if (!TestsHelper.checkCollectionsEqual(longValues, longEntries)) + throw new RuntimeException("Long values were incorrectly deserialized from Cassandra"); + + Map strValues = store2.loadAll(TestsHelper.getKeys(strEntries)); + if (!TestsHelper.checkCollectionsEqual(strValues, strEntries)) + throw new RuntimeException("String values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Running fake keys read tests"); + + longValues = store1.loadAll(fakeLongKeys); + if (!TestsHelper.checkCollectionsEqual(longValues, longEntries)) + throw new RuntimeException("Long values were incorrectly deserialized from Cassandra"); + + strValues = store2.loadAll(fakeStrKeys); + if (!TestsHelper.checkCollectionsEqual(strValues, strEntries)) + throw new RuntimeException("String values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk read operation tests passed"); + + LOGGER.info("PRIMITIVE strategy read tests passed"); + + LOGGER.info("Running PRIMITIVE strategy delete tests"); + + LOGGER.info("Deleting real keys"); + + store1.delete(longEntries.iterator().next().getKey()); + store1.deleteAll(TestsHelper.getKeys(longEntries)); + + store2.delete(strEntries.iterator().next().getKey()); + store2.deleteAll(TestsHelper.getKeys(strEntries)); + + LOGGER.info("Deleting fake keys"); + + store1.delete(-1L); + store2.delete("-1"); + + store1.deleteAll(fakeLongKeys); + store2.deleteAll(fakeStrKeys); + + LOGGER.info("PRIMITIVE strategy delete tests passed"); + } + + /** */ + @Test + @SuppressWarnings("unchecked") + public void blobStrategyTest() { + CacheStore store1 = CacheStoreHelper.createCacheStore("longTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/blob/persistence-settings-1.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore store2 = CacheStoreHelper.createCacheStore("personTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/blob/persistence-settings-2.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore store3 = CacheStoreHelper.createCacheStore("personTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/blob/persistence-settings-3.xml"), + CassandraHelper.getAdminDataSrc()); + + Collection> longEntries = TestsHelper.generateLongsEntries(); + Collection> personEntries = TestsHelper.generateLongsPersonsEntries(); + + LOGGER.info("Running BLOB strategy write tests"); + + LOGGER.info("Running single write operation tests"); + store1.write(longEntries.iterator().next()); + store2.write(personEntries.iterator().next()); + store3.write(personEntries.iterator().next()); + LOGGER.info("Single write operation tests passed"); + + LOGGER.info("Running bulk write operation tests"); + store1.writeAll(longEntries); + store2.writeAll(personEntries); + store3.writeAll(personEntries); + LOGGER.info("Bulk write operation tests passed"); + + LOGGER.info("BLOB strategy write tests passed"); + + LOGGER.info("Running BLOB strategy read tests"); + + LOGGER.info("Running single read operation tests"); + + Long longVal = (Long)store1.load(longEntries.iterator().next().getKey()); + if (!longEntries.iterator().next().getValue().equals(longVal)) + throw new RuntimeException("Long values were incorrectly deserialized from Cassandra"); + + Person personVal = (Person)store2.load(personEntries.iterator().next().getKey()); + if (!personEntries.iterator().next().getValue().equals(personVal)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + personVal = (Person)store3.load(personEntries.iterator().next().getKey()); + if (!personEntries.iterator().next().getValue().equals(personVal)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Single read operation tests passed"); + + LOGGER.info("Running bulk read operation tests"); + + Map longValues = store1.loadAll(TestsHelper.getKeys(longEntries)); + if (!TestsHelper.checkCollectionsEqual(longValues, longEntries)) + throw new RuntimeException("Long values were incorrectly deserialized from Cassandra"); + + Map personValues = store2.loadAll(TestsHelper.getKeys(personEntries)); + if (!TestsHelper.checkPersonCollectionsEqual(personValues, personEntries, false)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + personValues = store3.loadAll(TestsHelper.getKeys(personEntries)); + if (!TestsHelper.checkPersonCollectionsEqual(personValues, personEntries, false)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk read operation tests passed"); + + LOGGER.info("BLOB strategy read tests passed"); + + LOGGER.info("Running BLOB strategy delete tests"); + + store1.delete(longEntries.iterator().next().getKey()); + store1.deleteAll(TestsHelper.getKeys(longEntries)); + + store2.delete(personEntries.iterator().next().getKey()); + store2.deleteAll(TestsHelper.getKeys(personEntries)); + + store3.delete(personEntries.iterator().next().getKey()); + store3.deleteAll(TestsHelper.getKeys(personEntries)); + + LOGGER.info("BLOB strategy delete tests passed"); + } + + /** */ + @Test + @SuppressWarnings("unchecked") + public void pojoStrategyTest() { + CacheStore store1 = CacheStoreHelper.createCacheStore("longTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/persistence-settings-1.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore store2 = CacheStoreHelper.createCacheStore("personTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/persistence-settings-2.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore store3 = CacheStoreHelper.createCacheStore("personTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore store4 = CacheStoreHelper.createCacheStore("persons", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/persistence-settings-4.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore productStore = CacheStoreHelper.createCacheStore("product", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/product.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore orderStore = CacheStoreHelper.createCacheStore("order", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/order.xml"), + CassandraHelper.getAdminDataSrc()); + + Collection> entries1 = TestsHelper.generateLongsPersonsEntries(); + Collection> entries2 = TestsHelper.generatePersonIdsPersonsEntries(); + Collection> entries3 = TestsHelper.generatePersonIdsPersonsEntries(); + Collection> productEntries = TestsHelper.generateProductEntries(); + Collection> orderEntries = TestsHelper.generateOrderEntries(); + + LOGGER.info("Running POJO strategy write tests"); + + LOGGER.info("Running single write operation tests"); + store1.write(entries1.iterator().next()); + store2.write(entries2.iterator().next()); + store3.write(entries3.iterator().next()); + store4.write(entries3.iterator().next()); + productStore.write(productEntries.iterator().next()); + orderStore.write(orderEntries.iterator().next()); + LOGGER.info("Single write operation tests passed"); + + LOGGER.info("Running bulk write operation tests"); + store1.writeAll(entries1); + store2.writeAll(entries2); + store3.writeAll(entries3); + store4.writeAll(entries3); + productStore.writeAll(productEntries); + orderStore.writeAll(orderEntries); + LOGGER.info("Bulk write operation tests passed"); + + LOGGER.info("POJO strategy write tests passed"); + + LOGGER.info("Running POJO strategy read tests"); + + LOGGER.info("Running single read operation tests"); + + Person person = (Person)store1.load(entries1.iterator().next().getKey()); + if (!entries1.iterator().next().getValue().equalsPrimitiveFields(person)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + person = (Person)store2.load(entries2.iterator().next().getKey()); + if (!entries2.iterator().next().getValue().equalsPrimitiveFields(person)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + person = (Person)store3.load(entries3.iterator().next().getKey()); + if (!entries3.iterator().next().getValue().equals(person)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + person = (Person)store4.load(entries3.iterator().next().getKey()); + if (!entries3.iterator().next().getValue().equals(person)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + Product product = (Product)productStore.load(productEntries.iterator().next().getKey()); + if (!productEntries.iterator().next().getValue().equals(product)) + throw new RuntimeException("Product values were incorrectly deserialized from Cassandra"); + + ProductOrder order = (ProductOrder)orderStore.load(orderEntries.iterator().next().getKey()); + if (!orderEntries.iterator().next().getValue().equals(order)) + throw new RuntimeException("Order values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Single read operation tests passed"); + + LOGGER.info("Running bulk read operation tests"); + + Map persons = store1.loadAll(TestsHelper.getKeys(entries1)); + if (!TestsHelper.checkPersonCollectionsEqual(persons, entries1, true)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + persons = store2.loadAll(TestsHelper.getKeys(entries2)); + if (!TestsHelper.checkPersonCollectionsEqual(persons, entries2, true)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + persons = store3.loadAll(TestsHelper.getKeys(entries3)); + if (!TestsHelper.checkPersonCollectionsEqual(persons, entries3, false)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + persons = store4.loadAll(TestsHelper.getKeys(entries3)); + if (!TestsHelper.checkPersonCollectionsEqual(persons, entries3, false)) + throw new RuntimeException("Person values were incorrectly deserialized from Cassandra"); + + Map products = productStore.loadAll(TestsHelper.getKeys(productEntries)); + if (!TestsHelper.checkProductCollectionsEqual(products, productEntries)) + throw new RuntimeException("Product values were incorrectly deserialized from Cassandra"); + + Map orders = orderStore.loadAll(TestsHelper.getKeys(orderEntries)); + if (!TestsHelper.checkOrderCollectionsEqual(orders, orderEntries)) + throw new RuntimeException("Order values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk read operation tests passed"); + + LOGGER.info("POJO strategy read tests passed"); + + LOGGER.info("Running POJO strategy delete tests"); + + store1.delete(entries1.iterator().next().getKey()); + store1.deleteAll(TestsHelper.getKeys(entries1)); + + store2.delete(entries2.iterator().next().getKey()); + store2.deleteAll(TestsHelper.getKeys(entries2)); + + store3.delete(entries3.iterator().next().getKey()); + store3.deleteAll(TestsHelper.getKeys(entries3)); + + store4.delete(entries3.iterator().next().getKey()); + store4.deleteAll(TestsHelper.getKeys(entries3)); + + productStore.delete(productEntries.iterator().next().getKey()); + productStore.deleteAll(TestsHelper.getKeys(productEntries)); + + orderStore.delete(orderEntries.iterator().next().getKey()); + orderStore.deleteAll(TestsHelper.getKeys(orderEntries)); + + LOGGER.info("POJO strategy delete tests passed"); + } + + /** */ + @Test + @SuppressWarnings("unchecked") + public void pojoStrategySimpleObjectsTest() { + CacheStore store5 = CacheStoreHelper.createCacheStore("persons5", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/persistence-settings-5.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore store6 = CacheStoreHelper.createCacheStore("persons6", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/persistence-settings-6.xml"), + CassandraHelper.getAdminDataSrc()); + + Collection> entries5 = TestsHelper.generateSimplePersonIdsPersonsEntries(); + Collection> entries6 = TestsHelper.generateSimplePersonIdsPersonsEntries(); + + LOGGER.info("Running POJO strategy write tests for simple objects"); + + LOGGER.info("Running single write operation tests"); + store5.write(entries5.iterator().next()); + store6.write(entries6.iterator().next()); + LOGGER.info("Single write operation tests passed"); + + LOGGER.info("Running bulk write operation tests"); + store5.writeAll(entries5); + store6.writeAll(entries6); + LOGGER.info("Bulk write operation tests passed"); + + LOGGER.info("POJO strategy write tests for simple objects passed"); + + LOGGER.info("Running POJO simple objects strategy read tests"); + + LOGGER.info("Running single read operation tests"); + + SimplePerson person = (SimplePerson)store5.load(entries5.iterator().next().getKey()); + if (!entries5.iterator().next().getValue().equalsPrimitiveFields(person)) + throw new RuntimeException("SimplePerson values were incorrectly deserialized from Cassandra"); + + person = (SimplePerson)store6.load(entries6.iterator().next().getKey()); + if (!entries6.iterator().next().getValue().equalsPrimitiveFields(person)) + throw new RuntimeException("SimplePerson values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Single read operation tests passed"); + + LOGGER.info("Running bulk read operation tests"); + + Map persons = store5.loadAll(TestsHelper.getKeys(entries5)); + if (!TestsHelper.checkSimplePersonCollectionsEqual(persons, entries5, true)) + throw new RuntimeException("SimplePerson values were incorrectly deserialized from Cassandra"); + + persons = store6.loadAll(TestsHelper.getKeys(entries6)); + if (!TestsHelper.checkSimplePersonCollectionsEqual(persons, entries6, true)) + throw new RuntimeException("SimplePerson values were incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk read operation tests passed"); + + LOGGER.info("POJO strategy read tests for simple objects passed"); + + LOGGER.info("Running POJO strategy delete tests for simple objects"); + + store5.delete(entries5.iterator().next().getKey()); + store5.deleteAll(TestsHelper.getKeys(entries5)); + + store6.delete(entries6.iterator().next().getKey()); + store6.deleteAll(TestsHelper.getKeys(entries6)); + + LOGGER.info("POJO strategy delete tests for simple objects passed"); + } + + /** */ + @Test + @SuppressWarnings("unchecked") + public void pojoStrategyTransactionTest() { + Map sessionProps = U.newHashMap(1); + Transaction sessionTx = new TestTransaction(); + + CacheStore productStore = CacheStoreHelper.createCacheStore("product", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/product.xml"), + CassandraHelper.getAdminDataSrc(), new TestCacheSession("product", sessionTx, sessionProps)); + + CacheStore orderStore = CacheStoreHelper.createCacheStore("order", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/order.xml"), + CassandraHelper.getAdminDataSrc(), new TestCacheSession("order", sessionTx, sessionProps)); + + List> productEntries = TestsHelper.generateProductEntries(); + Map>> ordersPerProduct = + TestsHelper.generateOrdersPerProductEntries(productEntries, 2); + + Collection productIds = TestsHelper.getProductIds(productEntries); + Collection orderIds = TestsHelper.getOrderIds(ordersPerProduct); + + LOGGER.info("Running POJO strategy transaction write tests"); + + LOGGER.info("Running single write operation tests"); + + CassandraHelper.dropTestKeyspaces(); + + Product product = productEntries.iterator().next().getValue(); + ProductOrder order = ordersPerProduct.get(product.getId()).iterator().next().getValue(); + + productStore.write(productEntries.iterator().next()); + orderStore.write(ordersPerProduct.get(product.getId()).iterator().next()); + + if (productStore.load(product.getId()) != null || orderStore.load(order.getId()) != null) { + throw new RuntimeException("Single write operation test failed. Transaction wasn't committed yet, but " + + "objects were already persisted into Cassandra"); + } + + Map products = (Map)productStore.loadAll(productIds); + Map orders = (Map)orderStore.loadAll(orderIds); + + if ((products != null && !products.isEmpty()) || (orders != null && !orders.isEmpty())) { + throw new RuntimeException("Single write operation test failed. Transaction wasn't committed yet, but " + + "objects were already persisted into Cassandra"); + } + + //noinspection deprecation + orderStore.sessionEnd(true); + //noinspection deprecation + productStore.sessionEnd(true); + + Product product1 = (Product)productStore.load(product.getId()); + ProductOrder order1 = (ProductOrder)orderStore.load(order.getId()); + + if (product1 == null || order1 == null) { + throw new RuntimeException("Single write operation test failed. Transaction was committed, but " + + "no objects were persisted into Cassandra"); + } + + if (!product.equals(product1) || !order.equals(order1)) { + throw new RuntimeException("Single write operation test failed. Transaction was committed, but " + + "objects were incorrectly persisted/loaded to/from Cassandra"); + } + + products = (Map)productStore.loadAll(productIds); + orders = (Map)orderStore.loadAll(orderIds); + + if (products == null || products.isEmpty() || orders == null || orders.isEmpty()) { + throw new RuntimeException("Single write operation test failed. Transaction was committed, but " + + "no objects were persisted into Cassandra"); + } + + if (products.size() > 1 || orders.size() > 1) { + throw new RuntimeException("Single write operation test failed. There were committed more objects " + + "into Cassandra than expected"); + } + + product1 = products.entrySet().iterator().next().getValue(); + order1 = orders.entrySet().iterator().next().getValue(); + + if (!product.equals(product1) || !order.equals(order1)) { + throw new RuntimeException("Single write operation test failed. Transaction was committed, but " + + "objects were incorrectly persisted/loaded to/from Cassandra"); + } + + LOGGER.info("Single write operation tests passed"); + + LOGGER.info("Running bulk write operation tests"); + + CassandraHelper.dropTestKeyspaces(); + sessionProps.clear(); + + productStore.writeAll(productEntries); + + for (Long productId : ordersPerProduct.keySet()) + orderStore.writeAll(ordersPerProduct.get(productId)); + + for (Long productId : productIds) { + if (productStore.load(productId) != null) { + throw new RuntimeException("Bulk write operation test failed. Transaction wasn't committed yet, but " + + "objects were already persisted into Cassandra"); + } + } + + for (Long orderId : orderIds) { + if (orderStore.load(orderId) != null) { + throw new RuntimeException("Bulk write operation test failed. Transaction wasn't committed yet, but " + + "objects were already persisted into Cassandra"); + } + } + + products = (Map)productStore.loadAll(productIds); + orders = (Map)orderStore.loadAll(orderIds); + + if ((products != null && !products.isEmpty()) || (orders != null && !orders.isEmpty())) { + throw new RuntimeException("Bulk write operation test failed. Transaction wasn't committed yet, but " + + "objects were already persisted into Cassandra"); + } + + //noinspection deprecation + productStore.sessionEnd(true); + //noinspection deprecation + orderStore.sessionEnd(true); + + for (CacheEntryImpl entry : productEntries) { + product = (Product)productStore.load(entry.getKey()); + + if (!entry.getValue().equals(product)) { + throw new RuntimeException("Bulk write operation test failed. Transaction was committed, but " + + "not all objects were persisted into Cassandra"); + } + } + + for (Long productId : ordersPerProduct.keySet()) { + for (CacheEntryImpl entry : ordersPerProduct.get(productId)) { + order = (ProductOrder)orderStore.load(entry.getKey()); + + if (!entry.getValue().equals(order)) { + throw new RuntimeException("Bulk write operation test failed. Transaction was committed, but " + + "not all objects were persisted into Cassandra"); + } + } + } + + products = (Map)productStore.loadAll(productIds); + orders = (Map)orderStore.loadAll(orderIds); + + if (products == null || products.isEmpty() || orders == null || orders.isEmpty()) { + throw new RuntimeException("Bulk write operation test failed. Transaction was committed, but " + + "no objects were persisted into Cassandra"); + } + + if (products.size() < productIds.size() || orders.size() < orderIds.size()) { + throw new RuntimeException("Bulk write operation test failed. There were committed less objects " + + "into Cassandra than expected"); + } + + if (products.size() > productIds.size() || orders.size() > orderIds.size()) { + throw new RuntimeException("Bulk write operation test failed. There were committed more objects " + + "into Cassandra than expected"); + } + + for (CacheEntryImpl entry : productEntries) { + product = products.get(entry.getKey()); + + if (!entry.getValue().equals(product)) { + throw new RuntimeException("Bulk write operation test failed. Transaction was committed, but " + + "some objects were incorrectly persisted/loaded to/from Cassandra"); + } + } + + for (Long productId : ordersPerProduct.keySet()) { + for (CacheEntryImpl entry : ordersPerProduct.get(productId)) { + order = orders.get(entry.getKey()); + + if (!entry.getValue().equals(order)) { + throw new RuntimeException("Bulk write operation test failed. Transaction was committed, but " + + "some objects were incorrectly persisted/loaded to/from Cassandra"); + } + } + } + + LOGGER.info("Bulk write operation tests passed"); + + LOGGER.info("POJO strategy transaction write tests passed"); + + LOGGER.info("Running POJO strategy transaction delete tests"); + + LOGGER.info("Running single delete tests"); + + sessionProps.clear(); + + Product deletedProduct = productEntries.remove(0).getValue(); + ProductOrder deletedOrder = ordersPerProduct.get(deletedProduct.getId()).remove(0).getValue(); + + productStore.delete(deletedProduct.getId()); + orderStore.delete(deletedOrder.getId()); + + if (productStore.load(deletedProduct.getId()) == null || orderStore.load(deletedOrder.getId()) == null) { + throw new RuntimeException("Single delete operation test failed. Transaction wasn't committed yet, but " + + "objects were already deleted from Cassandra"); + } + + products = (Map)productStore.loadAll(productIds); + orders = (Map)orderStore.loadAll(orderIds); + + if (products.size() != productIds.size() || orders.size() != orderIds.size()) { + throw new RuntimeException("Single delete operation test failed. Transaction wasn't committed yet, but " + + "objects were already deleted from Cassandra"); + } + + //noinspection deprecation + productStore.sessionEnd(true); + //noinspection deprecation + orderStore.sessionEnd(true); + + if (productStore.load(deletedProduct.getId()) != null || orderStore.load(deletedOrder.getId()) != null) { + throw new RuntimeException("Single delete operation test failed. Transaction was committed, but " + + "objects were not deleted from Cassandra"); + } + + products = (Map)productStore.loadAll(productIds); + orders = (Map)orderStore.loadAll(orderIds); + + if (products.get(deletedProduct.getId()) != null || orders.get(deletedOrder.getId()) != null) { + throw new RuntimeException("Single delete operation test failed. Transaction was committed, but " + + "objects were not deleted from Cassandra"); + } + + LOGGER.info("Single delete tests passed"); + + LOGGER.info("Running bulk delete tests"); + + sessionProps.clear(); + + productStore.deleteAll(productIds); + orderStore.deleteAll(orderIds); + + products = (Map)productStore.loadAll(productIds); + orders = (Map)orderStore.loadAll(orderIds); + + if (products == null || products.isEmpty() || orders == null || orders.isEmpty()) { + throw new RuntimeException("Bulk delete operation test failed. Transaction wasn't committed yet, but " + + "objects were already deleted from Cassandra"); + } + + //noinspection deprecation + orderStore.sessionEnd(true); + //noinspection deprecation + productStore.sessionEnd(true); + + products = (Map)productStore.loadAll(productIds); + orders = (Map)orderStore.loadAll(orderIds); + + if ((products != null && !products.isEmpty()) || (orders != null && !orders.isEmpty())) { + throw new RuntimeException("Bulk delete operation test failed. Transaction was committed, but " + + "objects were not deleted from Cassandra"); + } + + LOGGER.info("Bulk delete tests passed"); + + LOGGER.info("POJO strategy transaction delete tests passed"); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraLocalServer.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraLocalServer.java new file mode 100644 index 0000000000000..a229d955e300c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraLocalServer.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import org.apache.ignite.tests.utils.CassandraHelper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Simple helper class to run Cassandra on localhost + */ +public class CassandraLocalServer { + /** */ + private static final Logger LOGGER = LogManager.getLogger(CassandraLocalServer.class.getName()); + + /** */ + public static void main(String[] args) { + try { + CassandraHelper.startEmbeddedCassandra(LOGGER); + } + catch (Throwable e) { + throw new RuntimeException("Failed to start embedded Cassandra instance", e); + } + + LOGGER.info("Testing admin connection to Cassandra"); + CassandraHelper.testAdminConnection(); + + LOGGER.info("Testing regular connection to Cassandra"); + CassandraHelper.testRegularConnection(); + + LOGGER.info("Dropping all artifacts from previous tests execution session"); + CassandraHelper.dropTestKeyspaces(); + + while (true) { + try { + System.out.println("Cassandra server running"); + + Thread.sleep(10000); + } + catch (Throwable e) { + throw new RuntimeException("Cassandra server terminated", e); + } + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraSessionImplTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraSessionImplTest.java new file mode 100644 index 0000000000000..a3a2bcdb7114e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/CassandraSessionImplTest.java @@ -0,0 +1,211 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; +import com.datastax.driver.core.BoundStatement; +import com.datastax.driver.core.Cluster; +import com.datastax.driver.core.ColumnDefinitions; +import com.datastax.driver.core.ConsistencyLevel; +import com.datastax.driver.core.PreparedId; +import com.datastax.driver.core.PreparedStatement; +import com.datastax.driver.core.ResultSet; +import com.datastax.driver.core.ResultSetFuture; +import com.datastax.driver.core.Row; +import com.datastax.driver.core.Session; +import com.datastax.driver.core.Statement; +import com.datastax.driver.core.exceptions.InvalidQueryException; +import org.apache.ignite.IgniteLogger; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.cache.store.cassandra.session.BatchExecutionAssistant; +import org.apache.ignite.cache.store.cassandra.session.CassandraSessionImpl; +import org.apache.ignite.cache.store.cassandra.session.WrappedPreparedStatement; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.nullable; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** */ +public class CassandraSessionImplTest { + + /** */ + private PreparedStatement preparedStatement1 = mockPreparedStatement(); + + /** */ + private PreparedStatement preparedStatement2 = mockPreparedStatement(); + + /** */ + private MyBoundStatement1 boundStatement1 = new MyBoundStatement1(preparedStatement1); + + /** */ + private MyBoundStatement2 boundStatement2 = new MyBoundStatement2(preparedStatement2); + + /** */ + @SuppressWarnings("unchecked") + @Test + public void executeFailureTest() { + Session session1 = mock(Session.class); + Session session2 = mock(Session.class); + when(session1.prepare(nullable(String.class))).thenReturn(preparedStatement1); + when(session2.prepare(nullable(String.class))).thenReturn(preparedStatement2); + + ResultSetFuture rsFuture = mock(ResultSetFuture.class); + ResultSet rs = mock(ResultSet.class); + Iterator it = mock(Iterator.class); + when(it.hasNext()).thenReturn(true); + when(it.next()).thenReturn(mock(Row.class)); + when(rs.iterator()).thenReturn(it); + when(rsFuture.getUninterruptibly()).thenReturn(rs); + /* @formatter:off */ + when(session1.executeAsync(any(Statement.class))) + .thenThrow(new InvalidQueryException("You may have used a PreparedStatement that was created with another Cluster instance")) + .thenThrow(new RuntimeException("this session should be refreshed / recreated")); + when(session2.executeAsync(boundStatement1)) + .thenThrow(new InvalidQueryException("You may have used a PreparedStatement that was created with another Cluster instance")); + when(session2.executeAsync(boundStatement2)).thenReturn(rsFuture); + /* @formatter:on */ + + Cluster cluster = mock(Cluster.class); + when(cluster.connect()).thenReturn(session1).thenReturn(session2); + when(session1.getCluster()).thenReturn(cluster); + when(session2.getCluster()).thenReturn(cluster); + + Cluster.Builder builder = mock(Cluster.Builder.class); + when(builder.build()).thenReturn(cluster); + + CassandraSessionImpl cassandraSession = new CassandraSessionImpl(builder, null, + ConsistencyLevel.ONE, ConsistencyLevel.ONE, 0, mock(IgniteLogger.class)); + + BatchExecutionAssistant batchExecutionAssistant = new MyBatchExecutionAssistant(); + ArrayList data = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + data.add(String.valueOf(i)); + } + cassandraSession.execute(batchExecutionAssistant, data); + + verify(cluster, times(2)).connect(); + verify(session1, times(1)).prepare(nullable(String.class)); + verify(session2, times(1)).prepare(nullable(String.class)); + assertEquals(10, batchExecutionAssistant.processedCount()); + } + + /** */ + private static PreparedStatement mockPreparedStatement() { + PreparedStatement ps = mock(PreparedStatement.class); + when(ps.getVariables()).thenReturn(mock(ColumnDefinitions.class)); + when(ps.getPreparedId()).thenReturn(mock(PreparedId.class)); + when(ps.getQueryString()).thenReturn("insert into xxx"); + return ps; + } + + /** */ + private class MyBatchExecutionAssistant implements BatchExecutionAssistant { + /** */ + private Set processed = new HashSet<>(); + + /** {@inheritDoc} */ + @Override public void process(Row row, int seqNum) { + if (processed.contains(seqNum)) + return; + + processed.add(seqNum); + } + + /** {@inheritDoc} */ + @Override public boolean alreadyProcessed(int seqNum) { + return processed.contains(seqNum); + } + + /** {@inheritDoc} */ + @Override public int processedCount() { + return processed.size(); + } + + /** {@inheritDoc} */ + @Override public boolean tableExistenceRequired() { + return false; + } + + /** {@inheritDoc} */ + @Override public String getTable() { + return null; + } + + /** {@inheritDoc} */ + @Override public String getStatement() { + return null; + } + + /** {@inheritDoc} */ + @Override public BoundStatement bindStatement(PreparedStatement statement, Object obj) { + if (statement instanceof WrappedPreparedStatement) + statement = ((WrappedPreparedStatement)statement).getWrappedStatement(); + + if (statement == preparedStatement1) { + return boundStatement1; + } + else if (statement == preparedStatement2) { + return boundStatement2; + } + + throw new RuntimeException("unexpected"); + } + + /** {@inheritDoc} */ + @Override public KeyValuePersistenceSettings getPersistenceSettings() { + return null; + } + + /** {@inheritDoc} */ + @Override public String operationName() { + return null; + } + + /** {@inheritDoc} */ + @Override public Object processedData() { + return null; + } + + } + + /** */ + private static class MyBoundStatement1 extends BoundStatement { + /** */ + MyBoundStatement1(PreparedStatement ps) { + super(ps); + } + + } + + /** */ + private static class MyBoundStatement2 extends BoundStatement { + /** */ + MyBoundStatement2(PreparedStatement ps) { + super(ps); + } + } + +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/DDLGeneratorTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/DDLGeneratorTest.java new file mode 100644 index 0000000000000..60169e7c82d78 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/DDLGeneratorTest.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.net.URL; +import org.apache.ignite.cache.store.cassandra.utils.DDLGenerator; +import org.junit.Test; + +/** + * DDLGenerator test. + */ +public class DDLGeneratorTest { + /** */ + private static final String[] RESOURCES = new String[] { + "org/apache/ignite/tests/persistence/primitive/persistence-settings-1.xml", + "org/apache/ignite/tests/persistence/pojo/persistence-settings-1.xml", + "org/apache/ignite/tests/persistence/pojo/persistence-settings-2.xml", + "org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml", + "org/apache/ignite/tests/persistence/pojo/persistence-settings-4.xml", + "org/apache/ignite/tests/persistence/pojo/persistence-settings-5.xml", + "org/apache/ignite/tests/persistence/pojo/persistence-settings-6.xml", + "org/apache/ignite/tests/persistence/pojo/product.xml", + "org/apache/ignite/tests/persistence/pojo/order.xml" + }; + + /** + * Test DDL generator. + */ + @Test + public void generatorTest() { + String[] files = new String[RESOURCES.length]; + + ClassLoader clsLdr = DDLGeneratorTest.class.getClassLoader(); + + for (int i = 0; i < RESOURCES.length; i++) { + URL url = clsLdr.getResource(RESOURCES[i]); + if (url == null) + throw new IllegalStateException("Failed to find resource: " + RESOURCES[i]); + + files[i] = url.getFile(); + } + + DDLGenerator.main(files); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/DatasourceSerializationTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/DatasourceSerializationTest.java new file mode 100644 index 0000000000000..e981dea8c833c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/DatasourceSerializationTest.java @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.io.Serializable; +import java.lang.reflect.Field; +import java.net.InetAddress; +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import com.datastax.driver.core.Cluster; +import com.datastax.driver.core.ConsistencyLevel; +import com.datastax.driver.core.Host; +import com.datastax.driver.core.HostDistance; +import com.datastax.driver.core.Statement; +import com.datastax.driver.core.policies.LoadBalancingPolicy; +import com.datastax.driver.core.policies.RoundRobinPolicy; +import com.datastax.driver.core.policies.TokenAwarePolicy; +import org.apache.ignite.cache.store.cassandra.datasource.Credentials; +import org.apache.ignite.cache.store.cassandra.datasource.DataSource; +import org.apache.ignite.cache.store.cassandra.serializer.JavaSerializer; +import org.apache.ignite.tests.utils.CassandraAdminCredentials; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; + +/** + * Test for datasource serialization. + */ +public class DatasourceSerializationTest { + /** + * Sample class for serialization test. + */ + private static class MyLoadBalancingPolicy implements LoadBalancingPolicy, Serializable { + /** */ + private transient LoadBalancingPolicy plc = new TokenAwarePolicy(new RoundRobinPolicy()); + + /** {@inheritDoc} */ + @Override public void init(Cluster cluster, Collection hosts) { + plc.init(cluster, hosts); + } + + /** {@inheritDoc} */ + @Override public HostDistance distance(Host host) { + return plc.distance(host); + } + + /** {@inheritDoc} */ + @Override public Iterator newQueryPlan(String loggedKeyspace, Statement statement) { + return plc.newQueryPlan(loggedKeyspace, statement); + } + + /** {@inheritDoc} */ + @Override public void onAdd(Host host) { + plc.onAdd(host); + } + + /** {@inheritDoc} */ + @Override public void onUp(Host host) { + plc.onUp(host); + } + + /** {@inheritDoc} */ + @Override public void onDown(Host host) { + plc.onDown(host); + } + + /** {@inheritDoc} */ + @Override public void onRemove(Host host) { + plc.onRemove(host); + } + + /** {@inheritDoc} */ + @Override public void close() { + plc.close(); + } + } + + /** + * Serialization test. + */ + @Test + public void serializationTest() { + DataSource src = new DataSource(); + + Credentials cred = new CassandraAdminCredentials(); + String[] points = new String[]{"127.0.0.1", "10.0.0.2", "10.0.0.3"}; + LoadBalancingPolicy plc = new MyLoadBalancingPolicy(); + + src.setCredentials(cred); + src.setContactPoints(points); + src.setReadConsistency("ONE"); + src.setWriteConsistency("QUORUM"); + src.setLoadBalancingPolicy(plc); + + JavaSerializer serializer = new JavaSerializer(); + + ByteBuffer buff = serializer.serialize(src); + DataSource _src = (DataSource)serializer.deserialize(buff); + + Credentials _cred = (Credentials)getFieldValue(_src, "creds"); + List _points = (List)getFieldValue(_src, "contactPoints"); + ConsistencyLevel _readCons = (ConsistencyLevel)getFieldValue(_src, "readConsistency"); + ConsistencyLevel _writeCons = (ConsistencyLevel)getFieldValue(_src, "writeConsistency"); + LoadBalancingPolicy _plc = (LoadBalancingPolicy)getFieldValue(_src, "loadBalancingPlc"); + + assertTrue("Incorrectly serialized/deserialized credentials for Cassandra DataSource", + cred.getPassword().equals(_cred.getPassword()) && cred.getUser().equals(_cred.getUser())); + + assertTrue("Incorrectly serialized/deserialized contact points for Cassandra DataSource", + "/127.0.0.1".equals(_points.get(0).toString()) && + "/10.0.0.2".equals(_points.get(1).toString()) && + "/10.0.0.3".equals(_points.get(2).toString())); + + assertTrue("Incorrectly serialized/deserialized consistency levels for Cassandra DataSource", + ConsistencyLevel.ONE == _readCons && ConsistencyLevel.QUORUM == _writeCons); + + assertTrue("Incorrectly serialized/deserialized load balancing policy for Cassandra DataSource", + _plc instanceof MyLoadBalancingPolicy); + } + + /** + * @param obj Object. + * @param field Field name. + * @return Field value. + */ + private Object getFieldValue(Object obj, String field) { + try { + Field f = obj.getClass().getDeclaredField(field); + + f.setAccessible(true); + + return f.get(obj); + } + catch (Throwable e) { + throw new RuntimeException("Failed to get field '" + field + "' value", e); + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStoreLoadTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStoreLoadTest.java new file mode 100644 index 0000000000000..9a759bc3c0563 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStoreLoadTest.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import org.apache.ignite.Ignite; +import org.apache.ignite.Ignition; +import org.apache.ignite.tests.load.LoadTestDriver; +import org.apache.ignite.tests.load.ignite.BulkReadWorker; +import org.apache.ignite.tests.load.ignite.BulkWriteWorker; +import org.apache.ignite.tests.load.ignite.ReadWorker; +import org.apache.ignite.tests.load.ignite.WriteWorker; +import org.apache.ignite.tests.utils.CassandraHelper; +import org.apache.ignite.tests.utils.TestsHelper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Load tests for Ignite caches which utilizing {@link org.apache.ignite.cache.store.cassandra.CassandraCacheStore} + * to store cache data into Cassandra tables + */ +public class IgnitePersistentStoreLoadTest extends LoadTestDriver { + /** */ + private static final Logger LOGGER = LogManager.getLogger("IgniteLoadTests"); + + /** + * test starter. + * + * @param args Test arguments. + */ + public static void main(String[] args) { + try { + LOGGER.info("Ignite load tests execution started"); + + LoadTestDriver driver = new IgnitePersistentStoreLoadTest(); + + /** + * Load test scripts could be executed from several machines. Current implementation can correctly, + * handle situation when Cassandra keyspace/table was dropped - for example by the same load test + * started a bit later on another machine. Moreover there is a warm up period for each load test. + * Thus all the delays related to keyspaces/tables recreation actions will not affect performance metrics, + * but it will be produced lots of "trash" output in the logs (related to correct handling of such + * exceptional situation and keyspace/table recreation). + * + * Thus dropping test keyspaces makes sense only for Unit tests, but not for Load tests. + **/ + + //CassandraHelper.dropTestKeyspaces(); + + driver.runTest("WRITE", WriteWorker.class, WriteWorker.LOGGER_NAME); + + driver.runTest("BULK_WRITE", BulkWriteWorker.class, BulkWriteWorker.LOGGER_NAME); + + driver.runTest("READ", ReadWorker.class, ReadWorker.LOGGER_NAME); + + driver.runTest("BULK_READ", BulkReadWorker.class, BulkReadWorker.LOGGER_NAME); + + /** + * Load test script executed on one machine could complete earlier that the same load test executed from + * another machine. Current implementation can correctly handle situation when Cassandra keyspace/table + * was dropped (simply recreate it). But dropping keyspace/table during load tests execution and subsequent + * recreation of such objects can have SIGNIFICANT EFFECT on final performance metrics. + * + * Thus dropping test keyspaces at the end of the tests makes sense only for Unit tests, + * but not for Load tests. + */ + + //CassandraHelper.dropTestKeyspaces(); + + LOGGER.info("Ignite load tests execution completed"); + } + catch (Throwable e) { + LOGGER.error("Ignite load tests execution failed", e); + throw new RuntimeException("Ignite load tests execution failed", e); + } + finally { + CassandraHelper.releaseCassandraResources(); + } + } + + /** {@inheritDoc} */ + @Override protected Logger logger() { + return LOGGER; + } + + /** {@inheritDoc} */ + @Override protected Object setup(String logName) { + return Ignition.start(TestsHelper.getLoadTestsIgniteConfig()); + } + + /** {@inheritDoc} */ + @Override protected void tearDown(Object obj) { + Ignite ignite = (Ignite)obj; + + if (ignite != null) + ignite.close(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStorePrimitiveTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStorePrimitiveTest.java new file mode 100644 index 0000000000000..5b2799a2b59c1 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStorePrimitiveTest.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.io.IOException; +import java.net.URL; +import com.datastax.driver.core.policies.RoundRobinPolicy; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.Ignition; +import org.apache.ignite.cache.store.cassandra.CassandraCacheStoreFactory; +import org.apache.ignite.cache.store.cassandra.datasource.DataSource; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.tests.utils.CassandraAdminCredentials; +import org.apache.ignite.tests.utils.CassandraHelper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Unit test for Ignite caches which utilizing {@link org.apache.ignite.cache.store.cassandra.CassandraCacheStore} + * to store primitive type cache data into Cassandra table. + */ +public class IgnitePersistentStorePrimitiveTest { + /** */ + private static final Logger LOGGER = LogManager.getLogger(IgnitePersistentStorePrimitiveTest.class.getName()); + + /** */ + @BeforeClass + public static void setUpClass() { + if (CassandraHelper.useEmbeddedCassandra()) { + try { + CassandraHelper.startEmbeddedCassandra(LOGGER); + } + catch (Throwable e) { + throw new RuntimeException("Failed to start embedded Cassandra instance", e); + } + } + + LOGGER.info("Testing admin connection to Cassandra"); + CassandraHelper.testAdminConnection(); + + LOGGER.info("Testing regular connection to Cassandra"); + CassandraHelper.testRegularConnection(); + + LOGGER.info("Dropping all artifacts from previous tests execution session"); + CassandraHelper.dropTestKeyspaces(); + + LOGGER.info("Start tests execution"); + } + + /** */ + @AfterClass + public static void tearDownClass() { + try { + CassandraHelper.dropTestKeyspaces(); + } + finally { + CassandraHelper.releaseCassandraResources(); + + if (CassandraHelper.useEmbeddedCassandra()) { + try { + CassandraHelper.stopEmbeddedCassandra(); + } + catch (Throwable e) { + LOGGER.error("Failed to stop embedded Cassandra instance", e); + } + } + } + } + + /** */ + @Test + public void test() throws IOException { + IgniteConfiguration config = igniteConfig(); + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start(config)) { + IgniteCache cache = ignite.getOrCreateCache("cache1"); + cache.put(12L, 12L); + } + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start(config)) { + IgniteCache cache = ignite.getOrCreateCache("cache1"); + + assertEquals(12L, (long)cache.get(12L)); + + cache.remove(12L); + } + } + + /** */ + private IgniteConfiguration igniteConfig() throws IOException { + URL url = getClass().getClassLoader().getResource("org/apache/ignite/tests/persistence/blob/persistence-settings-1.xml"); + String persistence = U.readFileToString(url.getFile(), "UTF-8"); + KeyValuePersistenceSettings persistenceSettings = new KeyValuePersistenceSettings(persistence); + + DataSource dataSource = new DataSource(); + dataSource.setContactPoints(CassandraHelper.getContactPointsArray()); + dataSource.setCredentials(new CassandraAdminCredentials()); + dataSource.setLoadBalancingPolicy(new RoundRobinPolicy()); + + CassandraCacheStoreFactory storeFactory = new CassandraCacheStoreFactory<>(); + storeFactory.setDataSource(dataSource); + storeFactory.setPersistenceSettings(persistenceSettings); + + CacheConfiguration cacheConfiguration = new CacheConfiguration<>(); + cacheConfiguration.setName("cache1"); + cacheConfiguration.setReadThrough(true); + cacheConfiguration.setWriteThrough(true); + cacheConfiguration.setCacheStoreFactory(storeFactory); + + IgniteConfiguration config = new IgniteConfiguration(); + config.setCacheConfiguration(cacheConfiguration); + + return config; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStoreTest.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStoreTest.java new file mode 100644 index 0000000000000..1aeade57aab11 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/IgnitePersistentStoreTest.java @@ -0,0 +1,920 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.io.IOException; +import java.net.URL; +import java.time.Instant; +import java.util.Collection; +import java.util.Map; +import com.datastax.driver.core.SimpleStatement; +import com.datastax.driver.core.policies.RoundRobinPolicy; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.IgniteTransactions; +import org.apache.ignite.Ignition; +import org.apache.ignite.binary.BinaryObject; +import org.apache.ignite.cache.CachePeekMode; +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.cache.store.cassandra.CassandraCacheStoreFactory; +import org.apache.ignite.cache.store.cassandra.datasource.DataSource; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.binary.BinaryMarshaller; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.tests.pojos.Person; +import org.apache.ignite.tests.pojos.PersonId; +import org.apache.ignite.tests.pojos.Product; +import org.apache.ignite.tests.pojos.ProductOrder; +import org.apache.ignite.tests.pojos.SimplePerson; +import org.apache.ignite.tests.pojos.SimplePersonId; +import org.apache.ignite.tests.utils.CacheStoreHelper; +import org.apache.ignite.tests.utils.CassandraAdminCredentials; +import org.apache.ignite.tests.utils.CassandraHelper; +import org.apache.ignite.tests.utils.TestsHelper; +import org.apache.ignite.transactions.Transaction; +import org.apache.ignite.transactions.TransactionConcurrency; +import org.apache.ignite.transactions.TransactionIsolation; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.core.io.ClassPathResource; + +/** + * Unit tests for Ignite caches which utilizing {@link org.apache.ignite.cache.store.cassandra.CassandraCacheStore} + * to store cache data into Cassandra tables + */ +public class IgnitePersistentStoreTest { + /** */ + private static final Logger LOGGER = LogManager.getLogger(IgnitePersistentStoreTest.class.getName()); + + /** */ + @BeforeClass + public static void setUpClass() { + if (CassandraHelper.useEmbeddedCassandra()) { + try { + CassandraHelper.startEmbeddedCassandra(LOGGER); + } + catch (Throwable e) { + throw new RuntimeException("Failed to start embedded Cassandra instance", e); + } + } + + LOGGER.info("Testing admin connection to Cassandra"); + CassandraHelper.testAdminConnection(); + + LOGGER.info("Testing regular connection to Cassandra"); + CassandraHelper.testRegularConnection(); + + LOGGER.info("Dropping all artifacts from previous tests execution session"); + CassandraHelper.dropTestKeyspaces(); + + LOGGER.info("Start tests execution"); + } + + /** */ + @AfterClass + public static void tearDownClass() { + try { + CassandraHelper.dropTestKeyspaces(); + } + finally { + CassandraHelper.releaseCassandraResources(); + + if (CassandraHelper.useEmbeddedCassandra()) { + try { + CassandraHelper.stopEmbeddedCassandra(); + } + catch (Throwable e) { + LOGGER.error("Failed to stop embedded Cassandra instance", e); + } + } + } + } + + /** */ + @Test + public void primitiveStrategyTest() { + Ignition.stopAll(true); + + Map longMap = TestsHelper.generateLongsMap(); + Map strMap = TestsHelper.generateStringsMap(); + + LOGGER.info("Running PRIMITIVE strategy write tests"); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/primitive/ignite-config.xml")) { + IgniteCache longCache = ignite.getOrCreateCache(new CacheConfiguration("cache1")); + IgniteCache strCache = ignite.getOrCreateCache(new CacheConfiguration("cache2")); + + LOGGER.info("Running single operation write tests"); + longCache.put(1L, 1L); + strCache.put("1", "1"); + LOGGER.info("Single operation write tests passed"); + + LOGGER.info("Running bulk operation write tests"); + longCache.putAll(longMap); + strCache.putAll(strMap); + LOGGER.info("Bulk operation write tests passed"); + } + + LOGGER.info("PRIMITIVE strategy write tests passed"); + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/primitive/ignite-config.xml")) { + LOGGER.info("Running PRIMITIVE strategy read tests"); + + IgniteCache longCache = ignite.getOrCreateCache(new CacheConfiguration("cache1")); + IgniteCache strCache = ignite.getOrCreateCache(new CacheConfiguration("cache2")); + + LOGGER.info("Running single operation read tests"); + + Long longVal = longCache.get(1L); + if (!longVal.equals(longMap.get(1L))) + throw new RuntimeException("Long value was incorrectly deserialized from Cassandra"); + + String strVal = strCache.get("1"); + if (!strVal.equals(strMap.get("1"))) + throw new RuntimeException("String value was incorrectly deserialized from Cassandra"); + + LOGGER.info("Single operation read tests passed"); + + LOGGER.info("Running bulk operation read tests"); + + Map longMap1 = longCache.getAll(longMap.keySet()); + if (!TestsHelper.checkMapsEqual(longMap, longMap1)) + throw new RuntimeException("Long values batch was incorrectly deserialized from Cassandra"); + + Map strMap1 = strCache.getAll(strMap.keySet()); + if (!TestsHelper.checkMapsEqual(strMap, strMap1)) + throw new RuntimeException("String values batch was incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk operation read tests passed"); + + LOGGER.info("PRIMITIVE strategy read tests passed"); + + LOGGER.info("Running PRIMITIVE strategy delete tests"); + + longCache.remove(1L); + longCache.removeAll(longMap.keySet()); + + strCache.remove("1"); + strCache.removeAll(strMap.keySet()); + + LOGGER.info("PRIMITIVE strategy delete tests passed"); + } + } + + /** */ + @Test + public void blobStrategyTest() { + Ignition.stopAll(true); + + Map longMap = TestsHelper.generateLongsMap(); + Map personMap = TestsHelper.generateLongsPersonsMap(); + + LOGGER.info("Running BLOB strategy write tests"); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/blob/ignite-config.xml")) { + IgniteCache longCache = ignite.getOrCreateCache(new CacheConfiguration("cache1")); + IgniteCache personCache = ignite.getOrCreateCache(new CacheConfiguration("cache2")); + + LOGGER.info("Running single operation write tests"); + longCache.put(1L, 1L); + personCache.put(1L, TestsHelper.generateRandomPerson(1L)); + LOGGER.info("Single operation write tests passed"); + + LOGGER.info("Running bulk operation write tests"); + longCache.putAll(longMap); + personCache.putAll(personMap); + LOGGER.info("Bulk operation write tests passed"); + } + + LOGGER.info("BLOB strategy write tests passed"); + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/blob/ignite-config.xml")) { + LOGGER.info("Running BLOB strategy read tests"); + + IgniteCache longCache = ignite.getOrCreateCache(new CacheConfiguration("cache1")); + IgniteCache personCache = ignite.getOrCreateCache(new CacheConfiguration("cache2")); + + LOGGER.info("Running single operation read tests"); + + Long longVal = longCache.get(1L); + if (!longVal.equals(longMap.get(1L))) + throw new RuntimeException("Long value was incorrectly deserialized from Cassandra"); + + Person person = personCache.get(1L); + if (!person.equals(personMap.get(1L))) + throw new RuntimeException("Person value was incorrectly deserialized from Cassandra"); + + LOGGER.info("Single operation read tests passed"); + + LOGGER.info("Running bulk operation read tests"); + + Map longMap1 = longCache.getAll(longMap.keySet()); + if (!TestsHelper.checkMapsEqual(longMap, longMap1)) + throw new RuntimeException("Long values batch was incorrectly deserialized from Cassandra"); + + Map personMap1 = personCache.getAll(personMap.keySet()); + if (!TestsHelper.checkPersonMapsEqual(personMap, personMap1, false)) + throw new RuntimeException("Person values batch was incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk operation read tests passed"); + + LOGGER.info("BLOB strategy read tests passed"); + + LOGGER.info("Running BLOB strategy delete tests"); + + longCache.remove(1L); + longCache.removeAll(longMap.keySet()); + + personCache.remove(1L); + personCache.removeAll(personMap.keySet()); + + LOGGER.info("BLOB strategy delete tests passed"); + } + } + + /** */ + @Test + public void blobBinaryLoadCacheTest() { + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/loadall_blob/ignite-config.xml")) { + IgniteCache personCache = ignite.getOrCreateCache("cache2"); + + assert ignite.configuration().getMarshaller() instanceof BinaryMarshaller; + + personCache.put(1L, new PojoPerson(1, "name")); + + assert personCache.withKeepBinary().get(1L) instanceof BinaryObject; + } + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/loadall_blob/ignite-config.xml")) { + IgniteCache personCache = ignite.getOrCreateCache("cache2"); + + personCache.loadCache(null, null); + + PojoPerson person = personCache.get(1L); + + LOGGER.info("loadCache tests passed"); + } + } + + /** */ + @Test + public void pojoStrategyTest() { + Ignition.stopAll(true); + + LOGGER.info("Running POJO strategy write tests"); + + Map personMap1 = TestsHelper.generateLongsPersonsMap(); + Map personMap2 = TestsHelper.generatePersonIdsPersonsMap(); + Map productsMap = TestsHelper.generateProductsMap(); + Map ordersMap = TestsHelper.generateOrdersMap(); + + Product product = TestsHelper.generateRandomProduct(-1L); + ProductOrder order = TestsHelper.generateRandomOrder(-1L); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/pojo/ignite-config.xml")) { + IgniteCache personCache1 = ignite.getOrCreateCache(new CacheConfiguration("cache1")); + IgniteCache personCache2 = ignite.getOrCreateCache(new CacheConfiguration("cache2")); + IgniteCache personCache3 = ignite.getOrCreateCache(new CacheConfiguration("cache3")); + IgniteCache personCache4 = ignite.getOrCreateCache(new CacheConfiguration("cache4")); + IgniteCache productCache = ignite.getOrCreateCache(new CacheConfiguration("product")); + IgniteCache orderCache = ignite.getOrCreateCache(new CacheConfiguration("order")); + + LOGGER.info("Running single operation write tests"); + + personCache1.put(1L, TestsHelper.generateRandomPerson(1L)); + + PersonId id = TestsHelper.generateRandomPersonId(); + personCache2.put(id, TestsHelper.generateRandomPerson(id.getPersonNumber())); + + id = TestsHelper.generateRandomPersonId(); + personCache3.put(id, TestsHelper.generateRandomPerson(id.getPersonNumber())); + personCache4.put(id, TestsHelper.generateRandomPerson(id.getPersonNumber())); + + productCache.put(product.getId(), product); + orderCache.put(order.getId(), order); + + LOGGER.info("Single operation write tests passed"); + + LOGGER.info("Running bulk operation write tests"); + personCache1.putAll(personMap1); + personCache2.putAll(personMap2); + personCache3.putAll(personMap2); + personCache4.putAll(personMap2); + productCache.putAll(productsMap); + orderCache.putAll(ordersMap); + LOGGER.info("Bulk operation write tests passed"); + } + + LOGGER.info("POJO strategy write tests passed"); + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/pojo/ignite-config.xml")) { + LOGGER.info("Running POJO strategy read tests"); + + IgniteCache personCache1 = ignite.getOrCreateCache(new CacheConfiguration("cache1")); + IgniteCache personCache2 = ignite.getOrCreateCache(new CacheConfiguration("cache2")); + IgniteCache personCache3 = ignite.getOrCreateCache(new CacheConfiguration("cache3")); + IgniteCache personCache4 = ignite.getOrCreateCache(new CacheConfiguration("cache4")); + IgniteCache productCache = ignite.getOrCreateCache(new CacheConfiguration("product")); + IgniteCache orderCache = ignite.getOrCreateCache(new CacheConfiguration("order")); + + LOGGER.info("Running single operation read tests"); + Person person = personCache1.get(1L); + if (!person.equalsPrimitiveFields(personMap1.get(1L))) + throw new RuntimeException("Person value was incorrectly deserialized from Cassandra"); + + PersonId id = personMap2.keySet().iterator().next(); + + person = personCache2.get(id); + if (!person.equalsPrimitiveFields(personMap2.get(id))) + throw new RuntimeException("Person value was incorrectly deserialized from Cassandra"); + + person = personCache3.get(id); + if (!person.equals(personMap2.get(id))) + throw new RuntimeException("Person value was incorrectly deserialized from Cassandra"); + + person = personCache4.get(id); + if (!person.equals(personMap2.get(id))) + throw new RuntimeException("Person value was incorrectly deserialized from Cassandra"); + + Product product1 = productCache.get(product.getId()); + if (!product.equals(product1)) + throw new RuntimeException("Product value was incorrectly deserialized from Cassandra"); + + ProductOrder order1 = orderCache.get(order.getId()); + if (!order.equals(order1)) + throw new RuntimeException("Order value was incorrectly deserialized from Cassandra"); + + LOGGER.info("Single operation read tests passed"); + + LOGGER.info("Running bulk operation read tests"); + + Map persons1 = personCache1.getAll(personMap1.keySet()); + if (!TestsHelper.checkPersonMapsEqual(persons1, personMap1, true)) + throw new RuntimeException("Persons values batch was incorrectly deserialized from Cassandra"); + + Map persons2 = personCache2.getAll(personMap2.keySet()); + if (!TestsHelper.checkPersonMapsEqual(persons2, personMap2, true)) + throw new RuntimeException("Person values batch was incorrectly deserialized from Cassandra"); + + Map persons3 = personCache3.getAll(personMap2.keySet()); + if (!TestsHelper.checkPersonMapsEqual(persons3, personMap2, false)) + throw new RuntimeException("Person values batch was incorrectly deserialized from Cassandra"); + + Map persons4 = personCache4.getAll(personMap2.keySet()); + if (!TestsHelper.checkPersonMapsEqual(persons4, personMap2, false)) + throw new RuntimeException("Person values batch was incorrectly deserialized from Cassandra"); + + Map productsMap1 = productCache.getAll(productsMap.keySet()); + if (!TestsHelper.checkProductMapsEqual(productsMap, productsMap1)) + throw new RuntimeException("Product values batch was incorrectly deserialized from Cassandra"); + + Map ordersMap1 = orderCache.getAll(ordersMap.keySet()); + if (!TestsHelper.checkOrderMapsEqual(ordersMap, ordersMap1)) + throw new RuntimeException("Order values batch was incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk operation read tests passed"); + + LOGGER.info("POJO strategy read tests passed"); + + LOGGER.info("Running POJO strategy delete tests"); + + personCache1.remove(1L); + personCache1.removeAll(personMap1.keySet()); + + personCache2.remove(id); + personCache2.removeAll(personMap2.keySet()); + + personCache3.remove(id); + personCache3.removeAll(personMap2.keySet()); + + personCache4.remove(id); + personCache4.removeAll(personMap2.keySet()); + + productCache.remove(product.getId()); + productCache.removeAll(productsMap.keySet()); + + orderCache.remove(order.getId()); + orderCache.removeAll(ordersMap.keySet()); + + LOGGER.info("POJO strategy delete tests passed"); + } + } + + /** */ + @Test + public void pojoStrategySimpleObjectsTest() { + Ignition.stopAll(true); + + LOGGER.info("Running POJO strategy write tests for simple objects"); + + Map personMap5 = TestsHelper.generateSimplePersonIdsPersonsMap(); + Map personMap6 = TestsHelper.generateSimplePersonIdsPersonsMap(); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/pojo/ignite-config.xml")) { + IgniteCache personCache5 = + ignite.getOrCreateCache(new CacheConfiguration("cache5")); + IgniteCache personCache6 = + ignite.getOrCreateCache(new CacheConfiguration("cache6")); + + LOGGER.info("Running single operation write tests"); + + SimplePersonId id = TestsHelper.generateRandomSimplePersonId(); + personCache5.put(id, TestsHelper.generateRandomSimplePerson(id.personNum)); + personCache6.put(id, TestsHelper.generateRandomSimplePerson(id.personNum)); + + LOGGER.info("Single operation write tests passed"); + + LOGGER.info("Running bulk operation write tests"); + personCache5.putAll(personMap5); + personCache6.putAll(personMap6); + LOGGER.info("Bulk operation write tests passed"); + } + + LOGGER.info("POJO strategy write tests for simple objects passed"); + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/pojo/ignite-config.xml")) { + LOGGER.info("Running POJO strategy read tests for simple objects"); + + IgniteCache personCache5 = + ignite.getOrCreateCache(new CacheConfiguration("cache5")); + IgniteCache personCache6 = + ignite.getOrCreateCache(new CacheConfiguration("cache6")); + + LOGGER.info("Running single operation read tests"); + + SimplePersonId id = personMap5.keySet().iterator().next(); + + SimplePerson person = personCache5.get(id); + if (!person.equalsPrimitiveFields(personMap5.get(id))) + throw new RuntimeException("SimplePerson value was incorrectly deserialized from Cassandra"); + + id = personMap6.keySet().iterator().next(); + + person = personCache6.get(id); + if (!person.equals(personMap6.get(id))) + throw new RuntimeException("SimplePerson value was incorrectly deserialized from Cassandra"); + + LOGGER.info("Single operation read tests passed"); + + LOGGER.info("Running bulk operation read tests"); + + Map persons5 = personCache5.getAll(personMap5.keySet()); + if (!TestsHelper.checkSimplePersonMapsEqual(persons5, personMap5, true)) + throw new RuntimeException("SimplePerson values batch was incorrectly deserialized from Cassandra"); + + Map persons6 = personCache6.getAll(personMap6.keySet()); + if (!TestsHelper.checkSimplePersonMapsEqual(persons6, personMap6, false)) + throw new RuntimeException("SimplePerson values batch was incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk operation read tests passed"); + + LOGGER.info("POJO strategy read tests for simple objects passed"); + + LOGGER.info("Running POJO strategy delete tests for simple objects"); + + personCache5.remove(id); + personCache5.removeAll(personMap5.keySet()); + + personCache6.remove(id); + personCache6.removeAll(personMap6.keySet()); + + LOGGER.info("POJO strategy delete tests for simple objects passed"); + } + } + + /** */ + @Test + public void pojoStrategyTransactionTest() { + CassandraHelper.dropTestKeyspaces(); + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/pojo/ignite-config.xml")) { + pojoStrategyTransactionTest(ignite, TransactionConcurrency.OPTIMISTIC, TransactionIsolation.READ_COMMITTED); + pojoStrategyTransactionTest(ignite, TransactionConcurrency.OPTIMISTIC, TransactionIsolation.REPEATABLE_READ); + pojoStrategyTransactionTest(ignite, TransactionConcurrency.OPTIMISTIC, TransactionIsolation.SERIALIZABLE); + pojoStrategyTransactionTest(ignite, TransactionConcurrency.PESSIMISTIC, TransactionIsolation.READ_COMMITTED); + pojoStrategyTransactionTest(ignite, TransactionConcurrency.PESSIMISTIC, TransactionIsolation.REPEATABLE_READ); + pojoStrategyTransactionTest(ignite, TransactionConcurrency.PESSIMISTIC, TransactionIsolation.SERIALIZABLE); + } + } + + /** */ + @Test + public void loadCacheTest() { + Ignition.stopAll(true); + + LOGGER.info("Running loadCache test"); + + LOGGER.info("Filling Cassandra table with test data"); + + CacheStore store = CacheStoreHelper.createCacheStore("personTypes", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml"), + CassandraHelper.getAdminDataSrc()); + + Collection> entries = TestsHelper.generatePersonIdsPersonsEntries(); + + //noinspection unchecked + store.writeAll(entries); + + LOGGER.info("Cassandra table filled with test data"); + + LOGGER.info("Running loadCache test"); + + try (Ignite ignite = Ignition.start("org/apache/ignite/tests/persistence/pojo/ignite-config.xml")) { + CacheConfiguration ccfg = new CacheConfiguration<>("cache3"); + + IgniteCache personCache3 = ignite.getOrCreateCache(ccfg); + + int size = personCache3.size(CachePeekMode.ALL); + + LOGGER.info("Initial cache size " + size); + + LOGGER.info("Loading cache data from Cassandra table"); + + String qry = "select * from test1.pojo_test3 limit 3"; + + personCache3.loadCache(null, qry); + + size = personCache3.size(CachePeekMode.ALL); + Assert.assertEquals("Cache data was incorrectly loaded from Cassandra table by '" + qry + "'", 3, size); + + personCache3.clear(); + + personCache3.loadCache(null, new SimpleStatement(qry)); + + size = personCache3.size(CachePeekMode.ALL); + Assert.assertEquals("Cache data was incorrectly loaded from Cassandra table by statement", 3, size); + + personCache3.clear(); + + personCache3.loadCache(null); + + size = personCache3.size(CachePeekMode.ALL); + Assert.assertEquals("Cache data was incorrectly loaded from Cassandra. " + + "Expected number of records is " + TestsHelper.getBulkOperationSize() + + ", but loaded number of records is " + size, + TestsHelper.getBulkOperationSize(), size); + + LOGGER.info("Cache data loaded from Cassandra table"); + } + + LOGGER.info("loadCache test passed"); + } + + /** */ + @SuppressWarnings("unchecked") + private void pojoStrategyTransactionTest(Ignite ignite, TransactionConcurrency concurrency, + TransactionIsolation isolation) { + LOGGER.info("-----------------------------------------------------------------------------------"); + LOGGER.info("Running POJO transaction tests using " + concurrency + + " concurrency and " + isolation + " isolation level"); + LOGGER.info("-----------------------------------------------------------------------------------"); + + CacheStore productStore = CacheStoreHelper.createCacheStore("product", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/product.xml"), + CassandraHelper.getAdminDataSrc()); + + CacheStore orderStore = CacheStoreHelper.createCacheStore("order", + new ClassPathResource("org/apache/ignite/tests/persistence/pojo/order.xml"), + CassandraHelper.getAdminDataSrc()); + + Map productsMap = TestsHelper.generateProductsMap(5); + Map productsMap1; + Map ordersMap = TestsHelper.generateOrdersMap(5); + Map ordersMap1; + Product product = TestsHelper.generateRandomProduct(-1L); + ProductOrder order = TestsHelper.generateRandomOrder(-1L, -1L, Instant.now()); + + IgniteTransactions txs = ignite.transactions(); + + IgniteCache productCache = ignite.getOrCreateCache(new CacheConfiguration("product")); + IgniteCache orderCache = ignite.getOrCreateCache(new CacheConfiguration("order")); + + LOGGER.info("Running POJO strategy write tests"); + + LOGGER.info("Running single operation write tests"); + + Transaction tx = txs.txStart(concurrency, isolation); + + try { + productCache.put(product.getId(), product); + orderCache.put(order.getId(), order); + + if (productStore.load(product.getId()) != null || orderStore.load(order.getId()) != null) { + throw new RuntimeException("Single write operation test failed. Transaction wasn't committed yet, but " + + "objects were already persisted into Cassandra"); + } + + Map products = (Map)productStore.loadAll(productsMap.keySet()); + Map orders = (Map)orderStore.loadAll(ordersMap.keySet()); + + if ((products != null && !products.isEmpty()) || (orders != null && !orders.isEmpty())) { + throw new RuntimeException("Single write operation test failed. Transaction wasn't committed yet, but " + + "objects were already persisted into Cassandra"); + } + + tx.commit(); + } + finally { + U.closeQuiet(tx); + } + + Product product1 = (Product)productStore.load(product.getId()); + ProductOrder order1 = (ProductOrder)orderStore.load(order.getId()); + + if (product1 == null || order1 == null) { + throw new RuntimeException("Single write operation test failed. Transaction was committed, but " + + "no objects were persisted into Cassandra"); + } + + if (!product.equals(product1) || !order.equals(order1)) { + throw new RuntimeException("Single write operation test failed. Transaction was committed, but " + + "objects were incorrectly persisted/loaded to/from Cassandra"); + } + + LOGGER.info("Single operation write tests passed"); + + LOGGER.info("Running bulk operation write tests"); + + tx = txs.txStart(concurrency, isolation); + + try { + productCache.putAll(productsMap); + orderCache.putAll(ordersMap); + + productsMap1 = (Map)productStore.loadAll(productsMap.keySet()); + ordersMap1 = (Map)orderStore.loadAll(ordersMap.keySet()); + + if ((productsMap1 != null && !productsMap1.isEmpty()) || (ordersMap1 != null && !ordersMap1.isEmpty())) { + throw new RuntimeException("Bulk write operation test failed. Transaction wasn't committed yet, but " + + "objects were already persisted into Cassandra"); + } + + tx.commit(); + } + finally { + U.closeQuiet(tx); + } + + productsMap1 = (Map)productStore.loadAll(productsMap.keySet()); + ordersMap1 = (Map)orderStore.loadAll(ordersMap.keySet()); + + if (productsMap1 == null || productsMap1.isEmpty() || ordersMap1 == null || ordersMap1.isEmpty()) { + throw new RuntimeException("Bulk write operation test failed. Transaction was committed, but " + + "no objects were persisted into Cassandra"); + } + + if (productsMap1.size() < productsMap.size() || ordersMap1.size() < ordersMap.size()) { + throw new RuntimeException("Bulk write operation test failed. There were committed less objects " + + "into Cassandra than expected"); + } + + if (productsMap1.size() > productsMap.size() || ordersMap1.size() > ordersMap.size()) { + throw new RuntimeException("Bulk write operation test failed. There were committed more objects " + + "into Cassandra than expected"); + } + + for (Map.Entry entry : productsMap.entrySet()) { + product = productsMap1.get(entry.getKey()); + + if (!entry.getValue().equals(product)) { + throw new RuntimeException("Bulk write operation test failed. Transaction was committed, but " + + "some objects were incorrectly persisted/loaded to/from Cassandra"); + } + } + + for (Map.Entry entry : ordersMap.entrySet()) { + order = ordersMap1.get(entry.getKey()); + + if (!entry.getValue().equals(order)) { + throw new RuntimeException("Bulk write operation test failed. Transaction was committed, but " + + "some objects were incorrectly persisted/loaded to/from Cassandra"); + } + } + + LOGGER.info("Bulk operation write tests passed"); + + LOGGER.info("POJO strategy write tests passed"); + + LOGGER.info("Running POJO strategy delete tests"); + + LOGGER.info("Running single delete tests"); + + tx = txs.txStart(concurrency, isolation); + + try { + productCache.remove(-1L); + orderCache.remove(-1L); + + if (productStore.load(-1L) == null || orderStore.load(-1L) == null) { + throw new RuntimeException("Single delete operation test failed. Transaction wasn't committed yet, but " + + "objects were already deleted from Cassandra"); + } + + tx.commit(); + } + finally { + U.closeQuiet(tx); + } + + if (productStore.load(-1L) != null || orderStore.load(-1L) != null) { + throw new RuntimeException("Single delete operation test failed. Transaction was committed, but " + + "objects were not deleted from Cassandra"); + } + + LOGGER.info("Single delete tests passed"); + + LOGGER.info("Running bulk delete tests"); + + tx = txs.txStart(concurrency, isolation); + + try { + productCache.removeAll(productsMap.keySet()); + orderCache.removeAll(ordersMap.keySet()); + + productsMap1 = (Map)productStore.loadAll(productsMap.keySet()); + ordersMap1 = (Map)orderStore.loadAll(ordersMap.keySet()); + + if (productsMap1.size() != productsMap.size() || ordersMap1.size() != ordersMap.size()) { + throw new RuntimeException("Bulk delete operation test failed. Transaction wasn't committed yet, but " + + "objects were already deleted from Cassandra"); + } + + tx.commit(); + } + finally { + U.closeQuiet(tx); + } + + productsMap1 = (Map)productStore.loadAll(productsMap.keySet()); + ordersMap1 = (Map)orderStore.loadAll(ordersMap.keySet()); + + if ((productsMap1 != null && !productsMap1.isEmpty()) || (ordersMap1 != null && !ordersMap1.isEmpty())) { + throw new RuntimeException("Bulk delete operation test failed. Transaction was committed, but " + + "objects were not deleted from Cassandra"); + } + + LOGGER.info("Bulk delete tests passed"); + + LOGGER.info("POJO strategy delete tests passed"); + + LOGGER.info("-----------------------------------------------------------------------------------"); + LOGGER.info("Passed POJO transaction tests for " + concurrency + + " concurrency and " + isolation + " isolation level"); + LOGGER.info("-----------------------------------------------------------------------------------"); + } + + /** + * KeyValuePersistenceSettings is passed directly, not as a bean and should be + * serialized and deserialized correctly + */ + @Test + public void directPersistenceConfigTest() throws IOException { + Map personMap = TestsHelper.generatePersonIdsPersonsMap(); + PersonId id = TestsHelper.generateRandomPersonId(); + Person person = TestsHelper.generateRandomPerson(id.getPersonNumber()); + + IgniteConfiguration config = igniteConfig(); + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start(config)) { + LOGGER.info("Running POJO strategy write tests"); + IgniteCache cache = ignite.getOrCreateCache("cache1"); + + LOGGER.info("Running single operation write tests"); + cache.put(id, TestsHelper.generateRandomPerson(id.getPersonNumber())); + cache.put(id, person); + LOGGER.info("Single operation write tests passed"); + + LOGGER.info("Running bulk operation write tests"); + cache.putAll(personMap); + LOGGER.info("Bulk operation write tests passed"); + } + + LOGGER.info("POJO strategy write tests passed"); + + Ignition.stopAll(true); + + try (Ignite ignite = Ignition.start(config)) { + LOGGER.info("Running POJO strategy read tests"); + IgniteCache cache = ignite.getOrCreateCache("cache1"); + + Person actualPerson = cache.get(id); + if (!person.equals(actualPerson)) + throw new RuntimeException("Person value was incorrectly deserialized from Cassandra"); + + LOGGER.info("Single operation read tests passed"); + + LOGGER.info("Running bulk operation read tests"); + + Map actualPersonMap = cache.getAll(personMap.keySet()); + if (!TestsHelper.checkPersonMapsEqual(actualPersonMap, personMap, true)) + throw new RuntimeException("Person values batch was incorrectly deserialized from Cassandra"); + + LOGGER.info("Bulk operation read tests passed"); + + LOGGER.info("POJO strategy read tests passed"); + + LOGGER.info("Running POJO strategy delete tests"); + + cache.remove(id); + cache.removeAll(personMap.keySet()); + + LOGGER.info("POJO strategy delete tests passed"); + } + } + + /** */ + private IgniteConfiguration igniteConfig() throws IOException { + URL url = getClass().getClassLoader().getResource("org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml"); + String persistence = U.readFileToString(url.getFile(), "UTF-8"); + + KeyValuePersistenceSettings persistenceSettings = new KeyValuePersistenceSettings(persistence); + + DataSource dataSource = new DataSource(); + dataSource.setContactPoints(CassandraHelper.getContactPointsArray()); + dataSource.setCredentials(new CassandraAdminCredentials()); + dataSource.setLoadBalancingPolicy(new RoundRobinPolicy()); + + CassandraCacheStoreFactory storeFactory = new CassandraCacheStoreFactory<>(); + storeFactory.setDataSource(dataSource); + storeFactory.setPersistenceSettings(persistenceSettings); + + CacheConfiguration cacheConfiguration = new CacheConfiguration<>(); + cacheConfiguration.setName("cache1"); + cacheConfiguration.setReadThrough(true); + cacheConfiguration.setWriteThrough(true); + cacheConfiguration.setCacheStoreFactory(storeFactory); + + IgniteConfiguration config = new IgniteConfiguration(); + config.setCacheConfiguration(cacheConfiguration); + + return config; + } + + /** */ + public static class PojoPerson { + /** */ + private int id; + + /** */ + private String name; + + /** */ + public PojoPerson() { + // No-op. + } + + /** */ + public PojoPerson(int id, String name) { + this.id = id; + this.name = name; + } + + /** */ + public int getId() { + return id; + } + + /** */ + public String getName() { + return name; + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/LoadTestsCassandraArtifactsCreator.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/LoadTestsCassandraArtifactsCreator.java new file mode 100644 index 0000000000000..42cfd9d4b6185 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/LoadTestsCassandraArtifactsCreator.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests; + +import java.util.LinkedList; +import java.util.List; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.tests.utils.CassandraHelper; +import org.apache.ignite.tests.utils.TestsHelper; + +/** + * Recreates all required Cassandra database objects (keyspace, table, indexes) for load tests + */ +public class LoadTestsCassandraArtifactsCreator { + /** + * Recreates Cassandra artifacts required for load tests + * @param args not used + */ + public static void main(String[] args) { + try { + System.out.println("[INFO] Recreating Cassandra artifacts (keyspace, table, indexes) for load tests"); + + KeyValuePersistenceSettings perSettings = + new KeyValuePersistenceSettings(TestsHelper.getLoadTestsPersistenceSettings()); + + System.out.println("[INFO] Dropping test keyspace: " + perSettings.getKeyspace()); + + try { + CassandraHelper.dropTestKeyspaces(); + } + catch (Throwable e) { + throw new RuntimeException("Failed to drop test keyspace: " + perSettings.getKeyspace(), e); + } + + System.out.println("[INFO] Test keyspace '" + perSettings.getKeyspace() + "' was successfully dropped"); + + System.out.println("[INFO] Creating test keyspace: " + perSettings.getKeyspace()); + + try { + CassandraHelper.executeWithAdminCredentials(perSettings.getKeyspaceDDLStatement()); + } + catch (Throwable e) { + throw new RuntimeException("Failed to create test keyspace: " + perSettings.getKeyspace(), e); + } + + System.out.println("[INFO] Test keyspace '" + perSettings.getKeyspace() + "' was successfully created"); + + System.out.println("[INFO] Creating test table: " + perSettings.getTable()); + + try { + CassandraHelper.executeWithAdminCredentials(perSettings.getTableDDLStatement(perSettings.getTable())); + } + catch (Throwable e) { + throw new RuntimeException("Failed to create test table: " + perSettings.getTable(), e); + } + + System.out.println("[INFO] Test table '" + perSettings.getTable() + "' was successfully created"); + + List statements = perSettings.getIndexDDLStatements(perSettings.getTable()); + if (statements == null) + statements = new LinkedList<>(); + + for (String statement : statements) { + System.out.println("[INFO] Creating test table index:"); + System.out.println(statement); + + try { + CassandraHelper.executeWithAdminCredentials(statement); + } + catch (Throwable e) { + throw new RuntimeException("Failed to create test table index", e); + } + + System.out.println("[INFO] Test table index was successfully created"); + } + + System.out.println("[INFO] All required Cassandra artifacts were successfully recreated"); + } + catch (Throwable e) { + System.out.println("[ERROR] Failed to recreate Cassandra artifacts"); + e.printStackTrace(System.out); + + if (e instanceof RuntimeException) + throw (RuntimeException)e; + else + throw new RuntimeException(e); + } + finally { + CassandraHelper.releaseCassandraResources(); + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/Generator.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/Generator.java new file mode 100644 index 0000000000000..0c18bc0e2a43c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/Generator.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load; + +/** + * Generator abstraction which could be used by tests to generate next key/value pair for Ignite cache + * from provided int number (which sequentially incremented in load test driver loop). + */ +public interface Generator { + /** */ + public Object generate(long i); +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/IntGenerator.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/IntGenerator.java new file mode 100644 index 0000000000000..21490f6cce699 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/IntGenerator.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load; + +/** + * Implementation of {@link org.apache.ignite.tests.load.Generator} generating {@link Integer} instance. + */ +public class IntGenerator implements Generator { + /** {@inheritDoc} */ + @Override public Object generate(long i) { + long val = i / 10000; + + while (val > Integer.MAX_VALUE) + val /= 2; + + return (int)val; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/LoadTestDriver.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/LoadTestDriver.java new file mode 100644 index 0000000000000..a244da1f8bde7 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/LoadTestDriver.java @@ -0,0 +1,238 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load; + +import java.lang.reflect.Constructor; +import java.util.LinkedList; +import java.util.List; +import org.apache.ignite.Ignite; +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.cache.store.cassandra.common.SystemHelper; +import org.apache.ignite.tests.utils.TestsHelper; +import org.apache.logging.log4j.Logger; + +/** + * Basic load test driver to be inherited by specific implementation for particular use-case. + */ +public abstract class LoadTestDriver { + /** Number of attempts to setup load test */ + private static final int NUMBER_OF_SETUP_ATTEMPTS = 10; + + /** Timeout between load test setup attempts */ + private static final int SETUP_ATTEMPT_TIMEOUT = 1000; + + /** */ + public void runTest(String testName, Class clazz, String logName) { + logger().info("Running " + testName + " test"); + + Object cfg = null; + + int attempt; + + logger().info("Setting up load tests driver"); + + for (attempt = 0; attempt < NUMBER_OF_SETUP_ATTEMPTS; attempt++) { + try { + cfg = setup(logName); + break; + } + catch (Throwable e) { + logger().error((attempt + 1) + " attempt to setup load test '" + testName + "' failed", e); + } + + if (attempt + 1 != NUMBER_OF_SETUP_ATTEMPTS) { + logger().info("Sleeping for " + SETUP_ATTEMPT_TIMEOUT + " seconds before trying next attempt " + + "to setup '" + testName + "' load test"); + + try { + Thread.sleep(SETUP_ATTEMPT_TIMEOUT); + } + catch (InterruptedException ignored) { + // No-op. + } + } + } + + if (cfg == null && attempt == NUMBER_OF_SETUP_ATTEMPTS) { + throw new RuntimeException("All " + NUMBER_OF_SETUP_ATTEMPTS + " attempts to setup load test '" + + testName + "' have failed"); + } + + // calculates host unique prefix based on its subnet IP address + long hostUniquePrefix = getHostUniquePrefix(); + + logger().info("Load tests driver setup successfully completed"); + + try { + + List workers = new LinkedList<>(); + long startPosition = 0; + + logger().info("Starting workers"); + + for (int i = 0; i < TestsHelper.getLoadTestsThreadsCount(); i++) { + Worker worker = createWorker(clazz, cfg, + hostUniquePrefix + startPosition, + hostUniquePrefix + startPosition + 100000000); + workers.add(worker); + worker.setName(testName + "-worker-" + i); + worker.start(); + startPosition += 100000001; + } + + logger().info("Workers started"); + logger().info("Waiting for workers to complete"); + + List failedWorkers = new LinkedList<>(); + + for (Worker worker : workers) { + boolean failed = false; + + try { + worker.join(); + } + catch (Throwable e) { + logger().error("Worker " + worker.getName() + " waiting interrupted", e); + failed = true; + } + + if (failed || worker.isFailed()) { + failedWorkers.add(worker.getName()); + logger().info("Worker " + worker.getName() + " execution failed"); + } + else + logger().info("Worker " + worker.getName() + " successfully completed"); + } + + printTestResultsHeader(testName, failedWorkers); + printTestResultsStatistics(testName, workers); + } + finally { + tearDown(cfg); + } + } + + /** */ + protected abstract Logger logger(); + + /** */ + protected abstract Object setup(String logName); + + /** */ + protected void tearDown(Object obj) { + } + + /** */ + @SuppressWarnings("unchecked") + private Worker createWorker(Class clazz, Object cfg, long startPosition, long endPosition) { + try { + Class cfgCls = cfg instanceof Ignite ? Ignite.class : CacheStore.class; + + Constructor ctor = clazz.getConstructor(cfgCls, long.class, long.class); + + return (Worker)ctor.newInstance(cfg, startPosition, endPosition); + } + catch (Throwable e) { + logger().error("Failed to instantiate worker of class '" + clazz.getName() + "'", e); + throw new RuntimeException("Failed to instantiate worker of class '" + clazz.getName() + "'", e); + } + } + + /** */ + private void printTestResultsHeader(String testName, List failedWorkers) { + if (failedWorkers.isEmpty()) { + logger().info(testName + " test execution successfully completed."); + return; + } + + if (failedWorkers.size() == TestsHelper.getLoadTestsThreadsCount()) { + logger().error(testName + " test execution totally failed."); + return; + } + + String strFailedWorkers = ""; + + for (String workerName : failedWorkers) { + if (!strFailedWorkers.isEmpty()) + strFailedWorkers += ", "; + + strFailedWorkers += workerName; + } + + logger().warn(testName + " test execution completed, but " + failedWorkers.size() + " of " + + TestsHelper.getLoadTestsThreadsCount() + " workers failed. Failed workers: " + strFailedWorkers); + } + + /** */ + @SuppressWarnings("StringBufferReplaceableByString") + private void printTestResultsStatistics(String testName, List workers) { + long cnt = 0; + long errCnt = 0; + long speed = 0; + + for (Worker worker : workers) { + cnt += worker.getMsgProcessed(); + errCnt += worker.getErrorsCount(); + speed += worker.getSpeed(); + } + + float errPercent = errCnt == 0 ? + 0 : + cnt + errCnt == 0 ? 0 : (float)(errCnt * 100 ) / (float)(cnt + errCnt); + + StringBuilder builder = new StringBuilder(); + builder.append(SystemHelper.LINE_SEPARATOR); + builder.append("-------------------------------------------------"); + builder.append(SystemHelper.LINE_SEPARATOR); + builder.append(testName).append(" test statistics").append(SystemHelper.LINE_SEPARATOR); + builder.append(testName).append(" messages: ").append(cnt).append(SystemHelper.LINE_SEPARATOR); + builder.append(testName).append(" errors: ").append(errCnt).append(", "). + append(String.format("%.2f", errPercent).replace(",", ".")). + append("%").append(SystemHelper.LINE_SEPARATOR); + builder.append(testName).append(" speed: ").append(speed).append(" msg/sec").append(SystemHelper.LINE_SEPARATOR); + builder.append("-------------------------------------------------"); + + logger().info(builder.toString()); + } + + /** */ + private long getHostUniquePrefix() { + String[] parts = SystemHelper.HOST_IP.split("\\."); + + if (parts[2].equals("0")) + parts[2] = "777"; + + if (parts[3].equals("0")) + parts[3] = "777"; + + long part3 = Long.parseLong(parts[2]); + long part4 = Long.parseLong(parts[3]); + + if (part3 < 10) + part3 *= 100; + else if (part4 < 100) + part3 *= 10; + + if (part4 < 10) + part4 *= 100; + else if (part4 < 100) + part4 *= 10; + + return (part4 * 100000000000000L) + (part3 * 100000000000L) + Thread.currentThread().getId(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/LongGenerator.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/LongGenerator.java new file mode 100644 index 0000000000000..0398f98cbe2cf --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/LongGenerator.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load; + +/** + * Implementation of {@link org.apache.ignite.tests.load.Generator} generating {@link Long} instance. + */ +public class LongGenerator implements Generator { + /** {@inheritDoc} */ + @Override public Object generate(long i) { + return i; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/PersonGenerator.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/PersonGenerator.java new file mode 100644 index 0000000000000..054c1661ac1f3 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/PersonGenerator.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load; + +import java.util.Date; +import java.util.LinkedList; +import java.util.List; +import org.apache.ignite.tests.pojos.Person; + +/** + * Implementation of {@link Generator} generating {@link Person} instance. + */ +public class PersonGenerator implements Generator { + /** */ + private static final Date DATE = new Date(); + + /** */ + private static final List PHONES = new LinkedList(); + + static { + PHONES.add("1234567"); + PHONES.add("7654321"); + PHONES.add("1289054"); + } + + /** {@inheritDoc} */ + @Override public Object generate(long i) { + return new Person(i, Long.toString(i), Long.toString(i), (short)(i % 100), i % 2 == 0, i, i, DATE, PHONES); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/PersonIdGenerator.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/PersonIdGenerator.java new file mode 100644 index 0000000000000..a11e0d81d4318 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/PersonIdGenerator.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load; + +import org.apache.ignite.tests.pojos.PersonId; + +/** + * Implementation of {@link org.apache.ignite.tests.load.Generator} generating + * {@link org.apache.ignite.tests.pojos.PersonId} instance. + */ +public class PersonIdGenerator implements Generator { + /** {@inheritDoc} */ + @Override public Object generate(long i) { + return new PersonId(Long.toString(i), Long.toString(i), i); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/StringGenerator.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/StringGenerator.java new file mode 100644 index 0000000000000..cfaf34ae50def --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/StringGenerator.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load; + +/** + * Implementation of {@link org.apache.ignite.tests.load.Generator} generating {@link String} instance. + */ +public class StringGenerator implements Generator { + /** {@inheritDoc} */ + @Override public Object generate(long i) { + return Long.toString(i); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/Worker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/Worker.java new file mode 100644 index 0000000000000..0aa20c0b2de3a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/Worker.java @@ -0,0 +1,432 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.cache.store.cassandra.common.SystemHelper; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.tests.utils.TestsHelper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Worker thread abstraction to be inherited by specific load test implementation + */ +public abstract class Worker extends Thread { + /** */ + private long testStartTime; + + /** */ + boolean warmup = TestsHelper.getLoadTestsWarmupPeriod() != 0; + + /** */ + private volatile long warmupStartTime; + + /** */ + private volatile long warmupFinishTime; + + /** */ + private volatile long startTime; + + /** */ + private volatile long finishTime; + + /** */ + private volatile long warmupMsgProcessed; + + /** */ + private volatile long warmupSleepCnt; + + /** */ + private volatile long msgProcessed; + + /** */ + private volatile long msgFailed; + + /** */ + private volatile long sleepCnt; + + /** */ + private Throwable executionError; + + /** */ + private long statReportedTime; + + /** */ + private CacheStore cacheStore; + + /** */ + private Ignite ignite; + + /** */ + private IgniteCache igniteCache; + + /** */ + private Logger log; + + /** */ + private long startPosition; + + /** */ + private long endPosition; + + /** */ + public Worker(CacheStore cacheStore, long startPosition, long endPosition) { + this.cacheStore = cacheStore; + this.log = LogManager.getLogger(loggerName()); + this.startPosition = startPosition; + this.endPosition = endPosition; + } + + /** */ + public Worker(Ignite ignite, long startPosition, long endPosition) { + this.ignite = ignite; + this.log = LogManager.getLogger(loggerName()); + this.startPosition = startPosition; + this.endPosition = endPosition; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override public void run() { + try { + if (ignite != null) + igniteCache = ignite.getOrCreateCache(new CacheConfiguration(TestsHelper.getLoadTestsCacheName())); + + execute(); + } + catch (Throwable e) { + executionError = e; + throw new RuntimeException("Test execution abnormally terminated", e); + } + finally { + reportTestCompletion(); + } + } + + /** */ + public boolean isFailed() { + return executionError != null; + } + + /** */ + public long getSpeed() { + if (msgProcessed == 0) + return 0; + + long finish = finishTime != 0 ? finishTime : System.currentTimeMillis(); + long duration = (finish - startTime - sleepCnt * TestsHelper.getLoadTestsRequestsLatency()) / 1000; + + return duration == 0 ? msgProcessed : msgProcessed / duration; + } + + /** */ + public long getErrorsCount() { + return msgFailed; + } + + /** */ + public float getErrorsPercent() { + if (msgFailed == 0) + return 0; + + return msgProcessed + msgFailed == 0 ? 0 : (float)(msgFailed * 100 ) / (float)(msgProcessed + msgFailed); + } + + /** */ + public long getMsgCountTotal() { + return warmupMsgProcessed + msgProcessed; + } + + /** */ + public long getWarmupMsgProcessed() { + return warmupMsgProcessed; + } + + /** */ + public long getMsgProcessed() { + return msgProcessed; + } + + /** */ + protected abstract String loggerName(); + + /** */ + protected abstract boolean batchMode(); + + /** */ + protected void process(CacheStore cacheStore, CacheEntryImpl entry) { + throw new UnsupportedOperationException("Single message processing is not supported"); + } + + /** */ + protected void process(IgniteCache cache, Object key, Object val) { + throw new UnsupportedOperationException("Single message processing is not supported"); + } + + /** */ + protected void process(CacheStore cacheStore, Collection entries) { + throw new UnsupportedOperationException("Batch processing is not supported"); + } + + /** */ + protected void process(IgniteCache cache, Map map) { + throw new UnsupportedOperationException("Batch processing is not supported"); + } + + /** */ + @SuppressWarnings("unchecked") + private void execute() throws InterruptedException { + testStartTime = System.currentTimeMillis(); + + log.info("Test execution started"); + + if (warmup) + log.info("Warm up period started"); + + warmupStartTime = warmup ? testStartTime : 0; + startTime = !warmup ? testStartTime : 0; + + statReportedTime = testStartTime; + + long cntr = startPosition; + Object key = TestsHelper.generateLoadTestsKey(cntr); + Object val = TestsHelper.generateLoadTestsValue(cntr); + List batchList = new ArrayList<>(TestsHelper.getBulkOperationSize()); + Map batchMap = new HashMap(TestsHelper.getBulkOperationSize()); + + int execTime = TestsHelper.getLoadTestsWarmupPeriod() + TestsHelper.getLoadTestsExecutionTime(); + + try { + while (true) { + if (System.currentTimeMillis() - testStartTime > execTime) + break; + + if (warmup && System.currentTimeMillis() - testStartTime > TestsHelper.getLoadTestsWarmupPeriod()) { + warmupFinishTime = System.currentTimeMillis(); + startTime = warmupFinishTime; + statReportedTime = warmupFinishTime; + warmup = false; + log.info("Warm up period completed"); + } + + if (!batchMode()) { + if (cacheStore != null) + doWork(new CacheEntryImpl(key, val)); + else + doWork(key, val); + } + else if (batchList.size() == TestsHelper.getBulkOperationSize() || + batchMap.size() == TestsHelper.getBulkOperationSize()) { + if (cacheStore != null) + doWork(batchList); + else + doWork(batchMap); + + batchMap.clear(); + batchList.clear(); + } + + if (cntr == endPosition) + cntr = startPosition; + else + cntr++; + + key = TestsHelper.generateLoadTestsKey(cntr); + val = TestsHelper.generateLoadTestsValue(cntr); + + if (batchMode()) { + if (cacheStore != null) + batchList.add(new CacheEntryImpl(key, val)); + else + batchMap.put(key, val); + } + + reportStatistics(); + } + } + finally { + warmupFinishTime = warmupFinishTime != 0 ? warmupFinishTime : System.currentTimeMillis(); + finishTime = System.currentTimeMillis(); + } + } + + /** */ + private void doWork(CacheEntryImpl entry) { + try { + process(cacheStore, entry); + updateMetrics(1); + } + catch (Throwable e) { + log.error("Failed to perform single operation", e); + updateErrorMetrics(1); + } + } + + /** */ + private void doWork(Object key, Object val) { + try { + process(igniteCache, key, val); + updateMetrics(1); + } + catch (Throwable e) { + log.error("Failed to perform single operation", e); + updateErrorMetrics(1); + } + } + + /** */ + private void doWork(Collection entries) { + try { + process(cacheStore, entries); + updateMetrics(entries.size()); + } + catch (Throwable e) { + log.error("Failed to perform batch operation", e); + updateErrorMetrics(entries.size()); + } + } + + /** */ + private void doWork(Map entries) { + try { + process(igniteCache, entries); + updateMetrics(entries.size()); + } + catch (Throwable e) { + log.error("Failed to perform batch operation", e); + updateErrorMetrics(entries.size()); + } + } + + /** */ + private long getWarmUpSpeed() { + if (warmupMsgProcessed == 0) + return 0; + + long finish = warmupFinishTime != 0 ? warmupFinishTime : System.currentTimeMillis(); + long duration = (finish - warmupStartTime - warmupSleepCnt * TestsHelper.getLoadTestsRequestsLatency()) / 1000; + + return duration == 0 ? warmupMsgProcessed : warmupMsgProcessed / duration; + } + + /** */ + private void updateMetrics(int itemsProcessed) { + if (warmup) + warmupMsgProcessed += itemsProcessed; + else + msgProcessed += itemsProcessed; + + if (TestsHelper.getLoadTestsRequestsLatency() > 0) { + try { + Thread.sleep(TestsHelper.getLoadTestsRequestsLatency()); + + if (warmup) + warmupSleepCnt++; + else + sleepCnt++; + } + catch (Throwable ignored) { + } + } + } + + /** + * TODO IGNITE-1371 Comment absent. + * + * @param itemsFailed Failed item. + */ + private void updateErrorMetrics(int itemsFailed) { + if (!warmup) + msgFailed += itemsFailed; + } + + /** */ + private void reportStatistics() { + // statistics should be reported only every 30 seconds + if (System.currentTimeMillis() - statReportedTime < 30000) + return; + + statReportedTime = System.currentTimeMillis(); + + int completed = warmup ? + (int)(statReportedTime - warmupStartTime) * 100 / TestsHelper.getLoadTestsWarmupPeriod() : + (int)(statReportedTime - startTime) * 100 / TestsHelper.getLoadTestsExecutionTime(); + + if (completed > 100) + completed = 100; + + if (warmup) { + log.info("Warm up messages processed " + warmupMsgProcessed + ", " + + "speed " + getWarmUpSpeed() + " msg/sec, " + completed + "% completed"); + } + else { + log.info("Messages processed " + msgProcessed + ", " + + "speed " + getSpeed() + " msg/sec, " + completed + "% completed, " + + "errors " + msgFailed + " / " + String.format("%.2f", getErrorsPercent()).replace(",", ".") + "%"); + } + } + + /** */ + private void reportTestCompletion() { + StringBuilder builder = new StringBuilder(); + + if (executionError != null) + builder.append("Test execution abnormally terminated. "); + else + builder.append("Test execution successfully completed. "); + + builder.append("Statistics: ").append(SystemHelper.LINE_SEPARATOR); + builder.append("Start time: ") + .append(IgniteUtils.SHORT_DATE_FMT.format(Instant.ofEpochMilli(testStartTime))) + .append(SystemHelper.LINE_SEPARATOR); + builder.append("Finish time: ") + .append(IgniteUtils.SHORT_DATE_FMT.format(Instant.ofEpochMilli(finishTime))) + .append(SystemHelper.LINE_SEPARATOR); + builder.append("Duration: ").append((finishTime - testStartTime) / 1000).append(" sec") + .append(SystemHelper.LINE_SEPARATOR); + + if (TestsHelper.getLoadTestsWarmupPeriod() > 0) { + builder.append("Warm up period: ").append(TestsHelper.getLoadTestsWarmupPeriod() / 1000) + .append(" sec").append(SystemHelper.LINE_SEPARATOR); + builder.append("Warm up processed messages: ").append(warmupMsgProcessed).append(SystemHelper.LINE_SEPARATOR); + builder.append("Warm up processing speed: ").append(getWarmUpSpeed()) + .append(" msg/sec").append(SystemHelper.LINE_SEPARATOR); + } + + builder.append("Processed messages: ").append(msgProcessed).append(SystemHelper.LINE_SEPARATOR); + builder.append("Processing speed: ").append(getSpeed()).append(" msg/sec").append(SystemHelper.LINE_SEPARATOR); + builder.append("Errors: ").append(msgFailed).append(" / "). + append(String.format("%.2f", getErrorsPercent()).replace(",", ".")).append("%"); + + if (executionError != null) + log.error(builder.toString(), executionError); + else + log.info(builder.toString()); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/BulkReadWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/BulkReadWorker.java new file mode 100644 index 0000000000000..38f0db86674fd --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/BulkReadWorker.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load.cassandra; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.tests.load.Worker; +import org.apache.ignite.tests.utils.TestsHelper; + +/** + * Cassandra direct load tests worker for bulk read operation CacheStore.load + */ +public class BulkReadWorker extends Worker { + /** */ + public static final String LOGGER_NAME = "CassandraBulkReadLoadTest"; + + /** */ + private List keys = new ArrayList<>(TestsHelper.getBulkOperationSize()); + + /** */ + public BulkReadWorker(CacheStore cacheStore, long startPosition, long endPosition) { + super(cacheStore, startPosition, endPosition); + } + + /** {@inheritDoc} */ + @Override protected String loggerName() { + return LOGGER_NAME; + } + + /** {@inheritDoc} */ + @Override protected boolean batchMode() { + return true; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected void process(CacheStore cacheStore, Collection entries) { + keys.clear(); + + for (CacheEntryImpl entry : entries) + keys.add(entry.getKey()); + + cacheStore.loadAll(keys); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/BulkWriteWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/BulkWriteWorker.java new file mode 100644 index 0000000000000..c71728f767320 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/BulkWriteWorker.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load.cassandra; + +import java.util.Collection; +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.tests.load.Worker; + +/** + * Cassandra direct load tests worker for bulk write operation CacheStore.writeAll + */ +public class BulkWriteWorker extends Worker { + /** */ + public static final String LOGGER_NAME = "CassandraBulkWriteLoadTest"; + + /** */ + public BulkWriteWorker(CacheStore cacheStore, long startPosition, long endPosition) { + super(cacheStore, startPosition, endPosition); + } + + /** {@inheritDoc} */ + @Override protected String loggerName() { + return LOGGER_NAME; + } + + /** {@inheritDoc} */ + @Override protected boolean batchMode() { + return true; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected void process(CacheStore cacheStore, Collection entries) { + cacheStore.writeAll(entries); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/ReadWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/ReadWorker.java new file mode 100644 index 0000000000000..051b55fb49e95 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/ReadWorker.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load.cassandra; + +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.tests.load.Worker; + +/** + * Cassandra direct load tests worker for read operation CacheStore.load + */ +public class ReadWorker extends Worker { + /** */ + public static final String LOGGER_NAME = "CassandraReadLoadTest"; + + /** */ + public ReadWorker(CacheStore cacheStore, long startPosition, long endPosition) { + super(cacheStore, startPosition, endPosition); + } + + /** {@inheritDoc} */ + @Override protected String loggerName() { + return LOGGER_NAME; + } + + /** {@inheritDoc} */ + @Override protected boolean batchMode() { + return false; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected void process(CacheStore cacheStore, CacheEntryImpl entry) { + cacheStore.load(entry.getKey()); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/WriteWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/WriteWorker.java new file mode 100644 index 0000000000000..2b10bcdcd1698 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/WriteWorker.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load.cassandra; + +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.tests.load.Worker; + +/** + * Cassandra direct load tests worker for write operation CacheStore.write + */ +public class WriteWorker extends Worker { + /** */ + public static final String LOGGER_NAME = "CassandraWriteLoadTest"; + + /** */ + public WriteWorker(CacheStore cacheStore, long startPosition, long endPosition) { + super(cacheStore, startPosition, endPosition); + } + + /** {@inheritDoc} */ + @Override protected String loggerName() { + return LOGGER_NAME; + } + + /** {@inheritDoc} */ + @Override protected boolean batchMode() { + return false; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected void process(CacheStore cacheStore, CacheEntryImpl entry) { + cacheStore.write(entry); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/package-info.java new file mode 100644 index 0000000000000..74204eed6e2a5 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/cassandra/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains load tests workers implementation for Cassandra cluster + */ + +package org.apache.ignite.tests.load.cassandra; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/BulkReadWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/BulkReadWorker.java new file mode 100644 index 0000000000000..c20d0cef220bd --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/BulkReadWorker.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load.ignite; + +import java.util.Map; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.tests.load.Worker; + +/** + * Ignite load tests worker for bulk read operation CacheStore.loadAll + */ +public class BulkReadWorker extends Worker { + /** */ + public static final String LOGGER_NAME = "IgniteBulkReadLoadTest"; + + /** */ + public BulkReadWorker(Ignite ignite, long startPosition, long endPosition) { + super(ignite, startPosition, endPosition); + } + + /** {@inheritDoc} */ + @Override protected String loggerName() { + return LOGGER_NAME; + } + + /** {@inheritDoc} */ + @Override protected boolean batchMode() { + return true; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected void process(IgniteCache cache, Map entries) { + cache.getAll(entries.keySet()); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/BulkWriteWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/BulkWriteWorker.java new file mode 100644 index 0000000000000..1ce7be3622fc3 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/BulkWriteWorker.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load.ignite; + +import java.util.Map; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.tests.load.Worker; + +/** + * Ignite load tests worker for bulk read operation CacheStore.writeAll + */ +public class BulkWriteWorker extends Worker { + /** */ + public static final String LOGGER_NAME = "IgniteBulkWriteLoadTest"; + + /** */ + public BulkWriteWorker(Ignite ignite, long startPosition, long endPosition) { + super(ignite, startPosition, endPosition); + } + + /** {@inheritDoc} */ + @Override protected String loggerName() { + return LOGGER_NAME; + } + + /** {@inheritDoc} */ + @Override protected boolean batchMode() { + return true; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected void process(IgniteCache cache, Map entries) { + cache.putAll(entries); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/ReadWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/ReadWorker.java new file mode 100644 index 0000000000000..35f7d3959c2a5 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/ReadWorker.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load.ignite; + +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.tests.load.Worker; + +/** + * Ignite load tests worker for read operation CacheStore.load + */ +public class ReadWorker extends Worker { + /** */ + public static final String LOGGER_NAME = "IgniteReadLoadTest"; + + /** */ + public ReadWorker(Ignite ignite, long startPosition, long endPosition) { + super(ignite, startPosition, endPosition); + } + + /** {@inheritDoc} */ + @Override protected String loggerName() { + return LOGGER_NAME; + } + + /** {@inheritDoc} */ + @Override protected boolean batchMode() { + return false; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected void process(IgniteCache cache, Object key, Object val) { + cache.get(key); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/WriteWorker.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/WriteWorker.java new file mode 100644 index 0000000000000..bed709973597e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/WriteWorker.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.load.ignite; + +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.tests.load.Worker; + +/** + * Ignite load tests worker for write operation CacheStore.write + */ +public class WriteWorker extends Worker { + /** */ + public static final String LOGGER_NAME = "IgniteWriteLoadTest"; + + /** */ + public WriteWorker(Ignite ignite, long startPosition, long endPosition) { + super(ignite, startPosition, endPosition); + } + + /** {@inheritDoc} */ + @Override protected String loggerName() { + return LOGGER_NAME; + } + + /** {@inheritDoc} */ + @Override protected boolean batchMode() { + return false; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected void process(IgniteCache cache, Object key, Object val) { + cache.put(key, val); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/package-info.java new file mode 100644 index 0000000000000..2beab56c5ba7b --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/ignite/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains load tests workers implementation for Ignite-Cassandra cluster + */ + +package org.apache.ignite.tests.load.ignite; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/package-info.java new file mode 100644 index 0000000000000..890e3dffd7a10 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/load/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains load tests classes + */ + +package org.apache.ignite.tests.load; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/package-info.java new file mode 100644 index 0000000000000..52a34daea09bd --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains unit tests + */ + +package org.apache.ignite.tests; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/Person.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/Person.java new file mode 100644 index 0000000000000..2bec7071c76cb --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/Person.java @@ -0,0 +1,261 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.pojos; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.util.Date; +import java.util.List; + +/** + * Simple POJO which could be stored as a value in Ignite cache + */ +public class Person implements Externalizable { + /** */ + private long personNum; + + /** */ + private String firstName; + + /** */ + private String lastName; + + /** */ + private String fullName; + + /** */ + private short age; + + /** */ + private boolean married; + + /** */ + private long height; + + /** */ + private float weight; + + /** */ + private Date birthDate; + + /** */ + private List phones; + + /** */ + public Person() { + } + + /** */ + public Person(long personNum, String firstName, String lastName, short age, boolean married, + long height, float weight, Date birthDate, List phones) { + this.personNum = personNum; + this.firstName = firstName; + this.lastName = lastName; + this.age = age; + this.married = married; + this.height = height; + this.weight = weight; + this.birthDate = birthDate; + this.phones = phones; + } + + + /** {@inheritDoc} */ + @Override public void writeExternal(ObjectOutput out) throws IOException { + out.writeLong(personNum); + out.writeObject(firstName); + out.writeObject(lastName); + out.writeShort(age); + out.writeBoolean(married); + out.writeLong(height); + out.writeFloat(weight); + out.writeObject(birthDate); + out.writeObject(phones); + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { + personNum = in.readLong(); + firstName = (String)in.readObject(); + lastName = (String)in.readObject(); + age = in.readShort(); + married = in.readBoolean(); + height = in.readLong(); + weight = in.readFloat(); + birthDate = (Date)in.readObject(); + phones = (List)in.readObject(); + } + + /** {@inheritDoc} */ + @SuppressWarnings("SimplifiableIfStatement") + @Override public boolean equals(Object obj) { + if (obj == null || !(obj instanceof Person)) + return false; + + Person person = (Person)obj; + + if (personNum != person.personNum) + return false; + + if ((firstName != null && !firstName.equals(person.firstName)) || + (person.firstName != null && !person.firstName.equals(firstName))) + return false; + + if ((lastName != null && !lastName.equals(person.lastName)) || + (person.lastName != null && !person.lastName.equals(lastName))) + return false; + + if ((birthDate != null && !birthDate.equals(person.birthDate)) || + (person.birthDate != null && !person.birthDate.equals(birthDate))) + return false; + + if ((phones != null && !phones.equals(person.phones)) || + (person.phones != null && !person.phones.equals(phones))) + return false; + + return age == person.age && married == person.married && + height == person.height && weight == person.weight; + } + + /** */ + @SuppressWarnings("SimplifiableIfStatement") + public boolean equalsPrimitiveFields(Object obj) { + if (obj == null || !(obj instanceof Person)) + return false; + + Person person = (Person)obj; + + if (personNum != person.personNum) + return false; + + if ((firstName != null && !firstName.equals(person.firstName)) || + (person.firstName != null && !person.firstName.equals(firstName))) + return false; + + if ((lastName != null && !lastName.equals(person.lastName)) || + (person.lastName != null && !person.lastName.equals(lastName))) + return false; + + if ((birthDate != null && !birthDate.equals(person.birthDate)) || + (person.birthDate != null && !person.birthDate.equals(birthDate))) + return false; + + return age == person.age && married == person.married && + height == person.height && weight == person.weight; + } + + /** */ + public void setPersonNumber(long personNum) { + this.personNum = personNum; + } + + /** */ + public long getPersonNumber() { + return personNum; + } + + /** */ + public void setFirstName(String name) { + firstName = name; + fullName = firstName + " " + lastName; + } + + /** */ + public String getFirstName() { + return firstName; + } + + /** */ + public void setLastName(String name) { + lastName = name; + fullName = firstName + " " + lastName; + } + + /** */ + public String getLastName() { + return lastName; + } + + /** */ + public String getFullName() { + return fullName; + } + + /** */ + public void setAge(short age) { + this.age = age; + } + + /** */ + public short getAge() { + return age; + } + + /** */ + public void setMarried(boolean married) { + this.married = married; + } + + /** */ + public boolean getMarried() { + return married; + } + + /** */ + public void setHeight(long height) { + this.height = height; + } + + /** */ + public long getHeight() { + return height; + } + + /** */ + public void setWeight(float weight) { + this.weight = weight; + } + + /** */ + public float getWeight() { + return weight; + } + + /** */ + public void setBirthDate(Date date) { + birthDate = date; + } + + /** */ + public Date getBirthDate() { + return birthDate; + } + + /** */ + public void setPhones(List phones) { + this.phones = phones; + } + + /** */ + public List getPhones() { + return phones; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/PersonId.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/PersonId.java new file mode 100644 index 0000000000000..530e09b3da073 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/PersonId.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.pojos; + +import java.io.Serializable; + +/** + * Simple POJO which could be stored as a key in Ignite cache + */ +public class PersonId implements Serializable { + /** */ + private String companyCode; + + /** */ + private String departmentCode; + + /** */ + private long personNum; + + /** */ + public PersonId() { + } + + /** */ + public PersonId(String companyCode, String departmentCode, long personNum) { + this.companyCode = companyCode; + this.departmentCode = departmentCode; + this.personNum = personNum; + } + + /** {@inheritDoc} */ + @SuppressWarnings("SimplifiableIfStatement") + @Override public boolean equals(Object obj) { + if (obj == null || !(obj instanceof PersonId)) + return false; + + PersonId id = (PersonId)obj; + + if ((companyCode != null && !companyCode.equals(id.companyCode)) || + (id.companyCode != null && !id.companyCode.equals(companyCode))) + return false; + + if ((companyCode != null && !companyCode.equals(id.companyCode)) || + (id.companyCode != null && !id.companyCode.equals(companyCode))) + return false; + + return personNum == id.personNum; + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + String code = (companyCode == null ? "" : companyCode) + + (departmentCode == null ? "" : departmentCode) + + personNum; + + return code.hashCode(); + } + + /** */ + public void setCompanyCode(String code) { + companyCode = code; + } + + /** */ + public String getCompanyCode() { + return companyCode; + } + + /** */ + public void setDepartmentCode(String code) { + departmentCode = code; + } + + /** */ + public String getDepartmentCode() { + return departmentCode; + } + + /** */ + public void setPersonNumber(long personNum) { + this.personNum = personNum; + } + + /** */ + public long getPersonNumber() { + return personNum; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/Product.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/Product.java new file mode 100644 index 0000000000000..acdb10c922d49 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/Product.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.pojos; + +/** + * Simple POJO to store information about product + */ +public class Product { + /** */ + private long id; + + /** */ + private String type; + + /** */ + private String title; + + /** */ + private String description; + + /** */ + private float price; + + /** */ + public Product() { + } + + /** */ + public Product(long id, String type, String title, String description, float price) { + this.id = id; + this.type = type; + this.title = title; + this.description = description; + this.price = price; + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + return ((Long)id).hashCode(); + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { + return obj instanceof Product && id == ((Product)obj).id; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return id + ", " + price + ", " + type + ", " + title + ", " + description; + } + + /** */ + public void setId(long id) { + this.id = id; + } + + /** */ + public long getId() { + return id; + } + + /** */ + public void setType(String type) { + this.type = type; + } + + /** */ + public String getType() { + return type; + } + + /** */ + public void setTitle(String title) { + this.title = title; + } + + /** */ + public String getTitle() { + return title; + } + + /** */ + public void setDescription(String description) { + this.description = description; + } + + /** */ + public String getDescription() { + return description; + } + + /** */ + public void setPrice(float price) { + this.price = price; + } + + /** */ + public float getPrice() { + return price; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/ProductOrder.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/ProductOrder.java new file mode 100644 index 0000000000000..0c7ba679cbd06 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/ProductOrder.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.pojos; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; + +/** + * Simple POJO to store information about product order + */ +public class ProductOrder { + /** */ + private static final DateTimeFormatter FORMAT = + DateTimeFormatter.ofPattern("MM/dd/yyyy/S").withZone(ZoneId.systemDefault()); + + /** */ + private static final DateTimeFormatter FULL_FORMAT = + DateTimeFormatter.ofPattern("MM/dd/yyyy HH:mm:ss:S").withZone(ZoneId.systemDefault()); + + /** */ + private long id; + + /** */ + private long productId; + + /** */ + private Instant date; + + /** */ + private int amount; + + /** */ + private float price; + + /** */ + public ProductOrder() { + } + + /** */ + public ProductOrder(long id, Product product, Instant date, int amount) { + this(id, product.getId(), product.getPrice(), date, amount); + } + + /** */ + public ProductOrder(long id, long productId, float productPrice, Instant date, int amount) { + this.id = id; + this.productId = productId; + this.date = date; + this.amount = amount; + this.price = productPrice * amount; + + // if user ordered more than 10 items provide 5% discount + if (amount > 10) + price *= 0.95F; + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + return ((Long)id).hashCode(); + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { + return obj instanceof ProductOrder && id == ((ProductOrder)obj).id; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return id + ", " + productId + ", " + FULL_FORMAT.format(date) + ", " + getDayMillisecond() + ", " + amount + ", " + price; + } + + /** */ + public void setId(long id) { + this.id = id; + } + + /** */ + public long getId() { + return id; + } + + /** */ + public void setProductId(long productId) { + this.productId = productId; + } + + /** */ + public long getProductId() { + return productId; + } + + /** */ + public void setDate(Instant date) { + this.date = date; + } + + /** */ + public Instant getDate() { + return date; + } + + /** */ + public void setAmount(int amount) { + this.amount = amount; + } + + /** */ + public int getAmount() { + return amount; + } + + /** */ + public void setPrice(float price) { + this.price = price; + } + + /** */ + public float getPrice() { + return price; + } + + /** */ + public String getDayMillisecond() { + return FORMAT.format(date); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/SimplePerson.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/SimplePerson.java new file mode 100644 index 0000000000000..dbafde942b96d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/SimplePerson.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.pojos; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.util.Date; +import java.util.List; +import org.apache.ignite.cache.query.annotations.QuerySqlField; + +/** + * Simple POJO without getters/setters which could be stored as a value in Ignite cache + */ +public class SimplePerson implements Externalizable { + /** */ + @QuerySqlField(name = "person_num") + private long personNum; + + /** */ + @QuerySqlField(name = "first_name") + private String firstName; + + /** */ + @QuerySqlField(name = "last_name") + private String lastName; + + /** */ + @QuerySqlField(name = "age") + private short age; + + /** */ + @QuerySqlField(name = "married", index = true) + private boolean married; + + /** */ + @QuerySqlField(name = "height") + private long height; + + /** */ + @QuerySqlField(name = "weight") + private float weight; + + /** */ + @QuerySqlField(name = "birth_date") + private Date birthDate; + + /** */ + @QuerySqlField(name = "phones") + private List phones; + + /** */ + public SimplePerson() { + } + + /** */ + public SimplePerson(Person person) { + this.personNum = person.getPersonNumber(); + this.firstName = person.getFirstName(); + this.lastName = person.getLastName(); + this.age = person.getAge(); + this.married = person.getMarried(); + this.height = person.getHeight(); + this.weight = person.getWeight(); + this.birthDate = person.getBirthDate(); + this.phones = person.getPhones(); + } + + /** */ + public SimplePerson(long personNum, String firstName, String lastName, short age, boolean married, + long height, float weight, Date birthDate, List phones) { + this.personNum = personNum; + this.firstName = firstName; + this.lastName = lastName; + this.age = age; + this.married = married; + this.height = height; + this.weight = weight; + this.birthDate = birthDate; + this.phones = phones; + } + + + /** {@inheritDoc} */ + @Override public void writeExternal(ObjectOutput out) throws IOException { + out.writeLong(personNum); + out.writeObject(firstName); + out.writeObject(lastName); + out.writeShort(age); + out.writeBoolean(married); + out.writeLong(height); + out.writeFloat(weight); + out.writeObject(birthDate); + out.writeObject(phones); + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { + personNum = in.readLong(); + firstName = (String)in.readObject(); + lastName = (String)in.readObject(); + age = in.readShort(); + married = in.readBoolean(); + height = in.readLong(); + weight = in.readFloat(); + birthDate = (Date)in.readObject(); + phones = (List)in.readObject(); + } + + /** {@inheritDoc} */ + @SuppressWarnings("SimplifiableIfStatement") + @Override public boolean equals(Object obj) { + if (obj == null || !(obj instanceof SimplePerson)) + return false; + + SimplePerson person = (SimplePerson)obj; + + if (personNum != person.personNum) + return false; + + if ((firstName != null && !firstName.equals(person.firstName)) || + (person.firstName != null && !person.firstName.equals(firstName))) + return false; + + if ((lastName != null && !lastName.equals(person.lastName)) || + (person.lastName != null && !person.lastName.equals(lastName))) + return false; + + if ((birthDate != null && !birthDate.equals(person.birthDate)) || + (person.birthDate != null && !person.birthDate.equals(birthDate))) + return false; + + if ((phones != null && !phones.equals(person.phones)) || + (person.phones != null && !person.phones.equals(phones))) + return false; + + return age == person.age && married == person.married && + height == person.height && weight == person.weight; + } + + /** */ + @SuppressWarnings("SimplifiableIfStatement") + public boolean equalsPrimitiveFields(Object obj) { + if (obj == null || !(obj instanceof SimplePerson)) + return false; + + SimplePerson person = (SimplePerson)obj; + + if (personNum != person.personNum) + return false; + + if ((firstName != null && !firstName.equals(person.firstName)) || + (person.firstName != null && !person.firstName.equals(firstName))) + return false; + + if ((lastName != null && !lastName.equals(person.lastName)) || + (person.lastName != null && !person.lastName.equals(lastName))) + return false; + + if ((birthDate != null && !birthDate.equals(person.birthDate)) || + (person.birthDate != null && !person.birthDate.equals(birthDate))) + return false; + + return age == person.age && married == person.married && + height == person.height && weight == person.weight; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/SimplePersonId.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/SimplePersonId.java new file mode 100644 index 0000000000000..75e7c4eeb897b --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/SimplePersonId.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.pojos; + +import java.io.Serializable; +import org.apache.ignite.cache.affinity.AffinityKeyMapped; +import org.apache.ignite.cache.query.annotations.QuerySqlField; + +/** + * Simple POJO without getters/setters which could be stored as a key in Ignite cache + */ +public class SimplePersonId implements Serializable { + /** */ + @AffinityKeyMapped + @QuerySqlField(name = "company_code") + public String companyCode; + + /** */ + @AffinityKeyMapped + @QuerySqlField(name = "department_code") + public String departmentCode; + + /** */ + @QuerySqlField(name = "person_num") + public long personNum; + + /** */ + public SimplePersonId() { + } + + /** */ + public SimplePersonId(PersonId personId) { + this.companyCode = personId.getCompanyCode(); + this.departmentCode = personId.getDepartmentCode(); + this.personNum = personId.getPersonNumber(); + } + + /** */ + public SimplePersonId(String companyCode, String departmentCode, long personNum) { + this.companyCode = companyCode; + this.departmentCode = departmentCode; + this.personNum = personNum; + } + + /** {@inheritDoc} */ + @SuppressWarnings("SimplifiableIfStatement") + @Override public boolean equals(Object obj) { + if (obj == null || !(obj instanceof SimplePersonId)) + return false; + + SimplePersonId id = (SimplePersonId)obj; + + if ((companyCode != null && !companyCode.equals(id.companyCode)) || + (id.companyCode != null && !id.companyCode.equals(companyCode))) + return false; + + if ((companyCode != null && !companyCode.equals(id.companyCode)) || + (id.companyCode != null && !id.companyCode.equals(companyCode))) + return false; + + return personNum == id.personNum; + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + String code = (companyCode == null ? "" : companyCode) + + (departmentCode == null ? "" : departmentCode) + + personNum; + + return code.hashCode(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/package-info.java new file mode 100644 index 0000000000000..daa86ad31d850 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/pojos/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains sample POJO objects used in unit tests + */ + +package org.apache.ignite.tests.pojos; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CacheStoreHelper.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CacheStoreHelper.java new file mode 100644 index 0000000000000..bf996cdd258a3 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CacheStoreHelper.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.utils; + +import java.lang.reflect.Field; +import org.apache.ignite.cache.store.CacheStore; +import org.apache.ignite.cache.store.CacheStoreSession; +import org.apache.ignite.cache.store.cassandra.CassandraCacheStore; +import org.apache.ignite.cache.store.cassandra.datasource.DataSource; +import org.apache.ignite.cache.store.cassandra.persistence.KeyValuePersistenceSettings; +import org.apache.ignite.testframework.junits.logger.GridTestLog4jLogger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.core.io.Resource; + +/** + * Helper class utilized by unit tests to get appropriate instance of {@link CacheStore} + */ +public class CacheStoreHelper { + /** */ + private static final Logger LOGGER = LogManager.getLogger(CacheStoreHelper.class.getName()); + + /** */ + public static CacheStore createCacheStore(String cacheName, Resource persistenceSettings, DataSource conn) { + return createCacheStore(cacheName, persistenceSettings, conn, null, LOGGER); + } + + /** */ + public static CacheStore createCacheStore(String cacheName, Resource persistenceSettings, DataSource conn, + CacheStoreSession session) { + return createCacheStore(cacheName, persistenceSettings, conn, session, LOGGER); + } + + /** */ + public static CacheStore createCacheStore(String cacheName, Resource persistenceSettings, DataSource conn, + Logger log) { + return createCacheStore(cacheName, persistenceSettings, conn, null, log); + } + + /** */ + public static CacheStore createCacheStore(String cacheName, Resource persistenceSettings, DataSource conn, + CacheStoreSession session, Logger log) { + CassandraCacheStore cacheStore = + new CassandraCacheStore<>(conn, new KeyValuePersistenceSettings(persistenceSettings), + Runtime.getRuntime().availableProcessors()); + + try { + Field sesField = CassandraCacheStore.class.getDeclaredField("storeSes"); + Field logField = CassandraCacheStore.class.getDeclaredField("log"); + + sesField.setAccessible(true); + logField.setAccessible(true); + + sesField.set(cacheStore, session != null ? session : new TestCacheSession(cacheName)); + logField.set(cacheStore, new GridTestLog4jLogger(log)); + } + catch (Throwable e) { + throw new RuntimeException("Failed to initialize test Ignite cache store", e); + } + + return cacheStore; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraAdminCredentials.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraAdminCredentials.java new file mode 100644 index 0000000000000..e7047f315bb45 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraAdminCredentials.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.utils; + +import org.apache.ignite.cache.store.cassandra.datasource.Credentials; + +/** + * Implementation of {@link Credentials} providing admin user/password to establish Cassandra session. + */ +public class CassandraAdminCredentials implements Credentials { + /** */ + private static final long serialVersionUID = 0L; + + /** {@inheritDoc} */ + @Override public String getUser() { + return CassandraHelper.getAdminUser(); + } + + /** {@inheritDoc} */ + @Override public String getPassword() { + return CassandraHelper.getAdminPassword(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraHelper.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraHelper.java new file mode 100644 index 0000000000000..559294a99068f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraHelper.java @@ -0,0 +1,366 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.utils; + +import java.lang.reflect.Field; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.URL; +import java.util.LinkedList; +import java.util.List; +import java.util.ResourceBundle; +import java.util.concurrent.atomic.AtomicInteger; +import com.datastax.driver.core.Cluster; +import com.datastax.driver.core.PreparedStatement; +import com.datastax.driver.core.ResultSet; +import com.datastax.driver.core.Session; +import com.datastax.driver.core.Statement; +import org.apache.ignite.cache.store.cassandra.datasource.DataSource; +import org.apache.ignite.cache.store.cassandra.session.pool.SessionPool; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.lifecycle.LifecycleEventType; +import org.apache.ignite.testframework.junits.logger.GridTestLog4jLogger; +import org.apache.logging.log4j.Logger; +import org.springframework.context.ApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; + +/** + * Helper class providing bunch of utility methods to work with Cassandra + */ +public class CassandraHelper { + /** */ + private static final ResourceBundle CREDENTIALS = ResourceBundle.getBundle("org/apache/ignite/tests/cassandra/credentials"); + + /** */ + private static final ResourceBundle CONNECTION = ResourceBundle.getBundle("org/apache/ignite/tests/cassandra/connection"); + + /** */ + private static final ResourceBundle KEYSPACES = ResourceBundle.getBundle("org/apache/ignite/tests/cassandra/keyspaces"); + + /** */ + private static final String EMBEDDED_CASSANDRA_YAML = "org/apache/ignite/tests/cassandra/embedded-cassandra.yaml"; + + /** */ + private static final ApplicationContext connectionContext = + new ClassPathXmlApplicationContext("org/apache/ignite/tests/cassandra/connection-settings.xml"); + + /** */ + private static DataSource adminDataSrc; + + /** */ + private static DataSource regularDataSrc; + + /** */ + private static Cluster adminCluster; + + /** */ + private static Cluster regularCluster; + + /** */ + private static Session adminSes; + + /** */ + private static Session regularSes; + + /** */ + private static CassandraLifeCycleBean embeddedCassandraBean; + + /** */ + public static String getAdminUser() { + return CREDENTIALS.getString("admin.user"); + } + + /** */ + public static String getAdminPassword() { + return CREDENTIALS.getString("admin.password"); + } + + /** */ + public static String getRegularUser() { + return CREDENTIALS.getString("regular.user"); + } + + /** */ + public static String getRegularPassword() { + return CREDENTIALS.getString("regular.password"); + } + + /** */ + public static String[] getTestKeyspaces() { + return KEYSPACES.getString("keyspaces").split(","); + } + + /** */ + private static AtomicInteger refCounter = new AtomicInteger(0); + + /** */ + public static String[] getContactPointsArray() { + String[] points = CONNECTION.getString("contact.points").split(","); + + if (points.length == 0) + throw new RuntimeException("No Cassandra contact points specified"); + + for (int i = 0; i < points.length; i++) + points[i] = points[i].trim(); + + return points; + } + + /** */ + public static List getContactPoints() { + String[] points = getContactPointsArray(); + + List contactPoints = new LinkedList<>(); + + for (String point : points) { + if (point.contains(":")) + continue; + + try { + contactPoints.add(InetAddress.getByName(point)); + } + catch (Throwable e) { + throw new IllegalArgumentException("Incorrect contact point '" + point + + "' specified for Cassandra cache storage", e); + } + } + + return contactPoints; + } + + /** */ + public static List getContactPointsWithPorts() { + String[] points = getContactPointsArray(); + + List contactPoints = new LinkedList<>(); + + for (String point : points) { + if (!point.contains(":")) + continue; + + String[] chunks = point.split(":"); + + try { + contactPoints.add(InetSocketAddress.createUnresolved(chunks[0].trim(), Integer.parseInt(chunks[1].trim()))); + } + catch (Throwable e) { + throw new IllegalArgumentException("Incorrect contact point '" + point + + "' specified for Cassandra cache storage", e); + } + } + + return contactPoints; + } + + /** + * Checks if embedded Cassandra should be used for unit tests + * @return true if embedded Cassandra should be used + */ + public static boolean useEmbeddedCassandra() { + String[] contactPoints = getContactPointsArray(); + + return contactPoints != null && contactPoints.length == 1 && contactPoints[0].trim().startsWith("127.0.0.1"); + } + + /** */ + public static void dropTestKeyspaces() { + String[] keyspaces = getTestKeyspaces(); + + for (String keyspace : keyspaces) { + try { + executeWithAdminCredentials("DROP KEYSPACE IF EXISTS " + keyspace + ";"); + } + catch (Throwable e) { + throw new RuntimeException("Failed to drop keyspace: " + keyspace, e); + } + } + } + + /** */ + public static ResultSet executeWithAdminCredentials(String statement, Object... args) { + if (args == null || args.length == 0) + return adminSession().execute(statement); + + PreparedStatement ps = adminSession().prepare(statement); + return adminSession().execute(ps.bind(args)); + } + + /** */ + public static ResultSet executeWithRegularCredentials(String statement, Object... args) { + if (args == null || args.length == 0) + return regularSession().execute(statement); + + PreparedStatement ps = regularSession().prepare(statement); + return regularSession().execute(ps.bind(args)); + } + + /** */ + public static ResultSet executeWithAdminCredentials(Statement statement) { + return adminSession().execute(statement); + } + + /** */ + public static ResultSet executeWithRegularCredentials(Statement statement) { + return regularSession().execute(statement); + } + + /** */ + public static synchronized DataSource getAdminDataSrc() { + if (adminDataSrc != null) + return adminDataSrc; + + return adminDataSrc = (DataSource)connectionContext.getBean("cassandraAdminDataSource"); + } + + /** */ + public static synchronized DataSource getRegularDataSrc() { + if (regularDataSrc != null) + return regularDataSrc; + + return regularDataSrc = (DataSource)connectionContext.getBean("cassandraRegularDataSource"); + } + + /** */ + public static void testAdminConnection() { + try { + adminSession(); + } + catch (Throwable e) { + throw new RuntimeException("Failed to check admin connection to Cassandra", e); + } + } + + /** */ + public static void testRegularConnection() { + try { + regularSession(); + } + catch (Throwable e) { + throw new RuntimeException("Failed to check regular connection to Cassandra", e); + } + } + + /** */ + public static synchronized void releaseCassandraResources() { + try { + if (adminSes != null && !adminSes.isClosed()) + U.closeQuiet(adminSes); + } + finally { + adminSes = null; + } + + try { + if (adminCluster != null && !adminCluster.isClosed()) + U.closeQuiet(adminCluster); + } + finally { + adminCluster = null; + } + + try { + if (regularSes != null && !regularSes.isClosed()) + U.closeQuiet(regularSes); + } + finally { + regularSes = null; + } + + try { + if (regularCluster != null && !regularCluster.isClosed()) + U.closeQuiet(regularCluster); + } + finally { + regularCluster = null; + } + + SessionPool.release(); + } + + /** */ + private static synchronized Session adminSession() { + if (adminSes != null) + return adminSes; + + try { + Cluster.Builder builder = Cluster.builder(); + builder = builder.withCredentials(getAdminUser(), getAdminPassword()); + builder.addContactPoints(getContactPoints()); + builder.addContactPointsWithPorts(getContactPointsWithPorts()); + + adminCluster = builder.build(); + return adminSes = adminCluster.connect(); + } + catch (Throwable e) { + throw new RuntimeException("Failed to create admin session to Cassandra database", e); + } + } + + /** */ + private static synchronized Session regularSession() { + if (regularSes != null) + return regularSes; + + try { + Cluster.Builder builder = Cluster.builder(); + builder = builder.withCredentials(getRegularUser(), getRegularPassword()); + builder.addContactPoints(getContactPoints()); + builder.addContactPointsWithPorts(getContactPointsWithPorts()); + + regularCluster = builder.build(); + return regularSes = regularCluster.connect(); + } + catch (Throwable e) { + throw new RuntimeException("Failed to create regular session to Cassandra database", e); + } + } + + /** + * Note that setting of cassandra.storagedir property is expected. + */ + public static void startEmbeddedCassandra(Logger log) { + if (refCounter.getAndIncrement() > 0) + return; + + ClassLoader clsLdr = CassandraHelper.class.getClassLoader(); + URL url = clsLdr.getResource(EMBEDDED_CASSANDRA_YAML); + + embeddedCassandraBean = new CassandraLifeCycleBean(); + embeddedCassandraBean.setCassandraConfigFile(url.getFile()); + + try { + Field logField = CassandraLifeCycleBean.class.getDeclaredField("log"); + logField.setAccessible(true); + logField.set(embeddedCassandraBean, new GridTestLog4jLogger(log)); + } + catch (Throwable e) { + throw new RuntimeException("Failed to initialize logger for CassandraLifeCycleBean", e); + } + + embeddedCassandraBean.onLifecycleEvent(LifecycleEventType.BEFORE_NODE_START); + } + + /** */ + public static void stopEmbeddedCassandra() { + if (refCounter.decrementAndGet() > 0) + return; + + if (embeddedCassandraBean != null) + embeddedCassandraBean.onLifecycleEvent(LifecycleEventType.BEFORE_NODE_STOP); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraLifeCycleBean.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraLifeCycleBean.java new file mode 100644 index 0000000000000..6ddc7544b0593 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraLifeCycleBean.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.utils; + +import org.apache.cassandra.service.CassandraDaemon; +import org.apache.ignite.IgniteLogger; +import org.apache.ignite.lifecycle.LifecycleBean; +import org.apache.ignite.lifecycle.LifecycleEventType; +import org.apache.ignite.resources.LoggerResource; + +/** + * Implementation of {@link LifecycleBean} to start embedded Cassandra instance on Ignite cluster startup + */ +public class CassandraLifeCycleBean implements LifecycleBean { + /** System property specifying Cassandra jmx port */ + private static final String CASSANDRA_JMX_PORT_PROP = "cassandra.jmx.local.port"; + + /** System property specifying Cassandra YAML config file */ + private static final String CASSANDRA_CONFIG_PROP = "cassandra.config"; + + /** Prefix for file path syntax */ + private static final String FILE_PREFIX = "file:///"; + + /** Auto-injected logger instance. */ + @LoggerResource + private IgniteLogger log; + + /** Instance of embedded Cassandra database */ + private CassandraDaemon embeddedCassandraDaemon; + + /** JMX port for embedded Cassandra instance */ + private String jmxPort; + + /** YAML config file for embedded Cassandra */ + private String cassandraCfgFile; + + /** + * Returns JMX port for embedded Cassandra + * @return JMX port + */ + public String getJmxPort() { + return jmxPort; + } + + /** + * Setter for embedded Cassandra JMX port + * @param jmxPort embedded Cassandra JMX port + */ + public void setJmxPort(String jmxPort) { + this.jmxPort = jmxPort; + } + + /** + * Returns embedded Cassandra YAML config file + * @return YAML config file + */ + public String getCassandraConfigFile() { + return cassandraCfgFile; + } + + /** + * Setter for embedded Cassandra YAML config file + * @param cassandraCfgFile YAML config file + */ + public void setCassandraConfigFile(String cassandraCfgFile) { + this.cassandraCfgFile = cassandraCfgFile; + } + + /** {@inheritDoc} */ + @Override public void onLifecycleEvent(LifecycleEventType evt) { + if (evt == LifecycleEventType.BEFORE_NODE_START) + startEmbeddedCassandra(); + else if (evt == LifecycleEventType.BEFORE_NODE_STOP) + stopEmbeddedCassandra(); + } + + /** + * Starts embedded Cassandra instance + */ + private void startEmbeddedCassandra() { + if (log != null) { + log.info("-------------------------------"); + log.info("| Starting embedded Cassandra |"); + log.info("-------------------------------"); + } + + try { + if (jmxPort != null) + System.setProperty(CASSANDRA_JMX_PORT_PROP, jmxPort); + + if (cassandraCfgFile != null) + System.setProperty(CASSANDRA_CONFIG_PROP, FILE_PREFIX + cassandraCfgFile); + + embeddedCassandraDaemon = new CassandraDaemon(true); + embeddedCassandraDaemon.applyConfig(); + embeddedCassandraDaemon.init(null); + embeddedCassandraDaemon.start(); + } + catch (Exception e) { + throw new RuntimeException("Failed to start embedded Cassandra", e); + } + + if (log != null) { + log.info("------------------------------"); + log.info("| Embedded Cassandra started |"); + log.info("------------------------------"); + } + } + + /** + * Stops embedded Cassandra instance + */ + private void stopEmbeddedCassandra() { + if (log != null) { + log.info("-------------------------------"); + log.info("| Stopping embedded Cassandra |"); + log.info("-------------------------------"); + } + + if (embeddedCassandraDaemon != null) { + try { + embeddedCassandraDaemon.deactivate(); + } + catch (Throwable e) { + throw new RuntimeException("Failed to stop embedded Cassandra", e); + } + } + + if (log != null) { + log.info("------------------------------"); + log.info("| Embedded Cassandra stopped |"); + log.info("------------------------------"); + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraRegularCredentials.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraRegularCredentials.java new file mode 100644 index 0000000000000..7546c9bb5d3f0 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/CassandraRegularCredentials.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.utils; + +import org.apache.ignite.cache.store.cassandra.datasource.Credentials; + +/** + * Implementation of {@link Credentials} providing regular user/password to establish Cassandra session. + */ +public class CassandraRegularCredentials implements Credentials { + /** */ + private static final long serialVersionUID = 0L; + + /** {@inheritDoc} */ + @Override public String getUser() { + return CassandraHelper.getRegularUser(); + } + + /** {@inheritDoc} */ + @Override public String getPassword() { + return CassandraHelper.getRegularPassword(); + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestCacheSession.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestCacheSession.java new file mode 100644 index 0000000000000..c4272bdc577c3 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestCacheSession.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.utils; + +import java.util.Map; +import org.apache.ignite.cache.store.CacheStoreSession; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.transactions.Transaction; +import org.jetbrains.annotations.Nullable; + +/** + * Test implementation of {@link CacheStoreSession} for the unit tests purposes + */ +public class TestCacheSession implements CacheStoreSession { + /** */ + private String cacheName; + + /** */ + private Transaction tx; + + /** */ + private Map props = U.newHashMap(1); + + /** */ + private Object attach; + + /** */ + public TestCacheSession(String cacheName) { + this.cacheName = cacheName; + } + + /** */ + public TestCacheSession(String cacheName, Transaction tx, Map props) { + this.cacheName = cacheName; + this.tx = tx; + this.props = props; + } + + /** */ + public void newSession(@Nullable Transaction tx) { + this.tx = tx; + props = null; + } + + /** {@inheritDoc} */ + @Nullable @Override public Transaction transaction() { + return tx; + } + + /** {@inheritDoc} */ + @Override public boolean isWithinTransaction() { + return transaction() != null; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override public Object attach(@Nullable Object attach) { + Object prev = this.attach; + this.attach = attach; + return prev; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Nullable @Override public T attachment() { + return (T)attach; + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override public Map properties() { + return (Map)props; + } + + /** {@inheritDoc} */ + @Nullable @Override public String cacheName() { + return cacheName; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestTransaction.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestTransaction.java new file mode 100644 index 0000000000000..6681dd6a224d0 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestTransaction.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.utils; + +import java.util.UUID; +import org.apache.ignite.IgniteException; +import org.apache.ignite.lang.IgniteAsyncSupport; +import org.apache.ignite.lang.IgniteFuture; +import org.apache.ignite.lang.IgniteUuid; +import org.apache.ignite.transactions.Transaction; +import org.apache.ignite.transactions.TransactionConcurrency; +import org.apache.ignite.transactions.TransactionIsolation; +import org.apache.ignite.transactions.TransactionState; +import org.jetbrains.annotations.Nullable; + +/** + * Dummy transaction for test purposes. + */ +public class TestTransaction implements Transaction { + /** */ + private final IgniteUuid xid = IgniteUuid.randomUuid(); + + /** {@inheritDoc} */ + @Nullable @Override public IgniteUuid xid() { + return xid; + } + + /** {@inheritDoc} */ + @Nullable @Override public UUID nodeId() { + return null; + } + + /** {@inheritDoc} */ + @Override public long threadId() { + return 0; + } + + /** {@inheritDoc} */ + @Override public long startTime() { + return 0; + } + + /** {@inheritDoc} */ + @Nullable @Override public TransactionIsolation isolation() { + return null; + } + + /** {@inheritDoc} */ + @Nullable @Override public TransactionConcurrency concurrency() { + return null; + } + + /** {@inheritDoc} */ + @Override public boolean implicit() { + return false; + } + + /** {@inheritDoc} */ + @Override public boolean isInvalidate() { + return false; + } + + /** {@inheritDoc} */ + @Nullable @Override public TransactionState state() { + return null; + } + + /** {@inheritDoc} */ + @Override public long timeout() { + return 0; + } + + /** {@inheritDoc} */ + @Override public long timeout(long timeout) { + return 0; + } + + /** {@inheritDoc} */ + @Override public boolean setRollbackOnly() { + return false; + } + + /** {@inheritDoc} */ + @Override public boolean isRollbackOnly() { + return false; + } + + /** {@inheritDoc} */ + @Override public void commit() { + // No-op. + } + + /** {@inheritDoc} */ + @Override public IgniteFuture commitAsync() throws IgniteException { + return null; + } + + /** {@inheritDoc} */ + @Override public void close() { + // No-op. + } + + /** {@inheritDoc} */ + @Override public IgniteAsyncSupport withAsync() { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + @Override public boolean isAsync() { + return false; + } + + /** {@inheritDoc} */ + @Override public IgniteFuture future() { + return null; + } + + /** {@inheritDoc} */ + @Override public void rollback() { + // No-op. + } + + /** {@inheritDoc} */ + @Override public IgniteFuture rollbackAsync() throws IgniteException { + return null; + } + + /** {@inheritDoc} */ + @Override public void suspend() throws IgniteException{ + // No-op. + } + + /** {@inheritDoc} */ + @Nullable @Override public String label() { + return null; + } + + /** {@inheritDoc} */ + @Override public void resume() throws IgniteException { + // No-op. + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestsHelper.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestsHelper.java new file mode 100644 index 0000000000000..67c00f8aa3373 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/TestsHelper.java @@ -0,0 +1,752 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.tests.utils; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.ZoneOffset; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.ResourceBundle; +import java.util.Set; +import org.apache.ignite.cache.store.cassandra.common.SystemHelper; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.tests.load.Generator; +import org.apache.ignite.tests.pojos.Person; +import org.apache.ignite.tests.pojos.PersonId; +import org.apache.ignite.tests.pojos.Product; +import org.apache.ignite.tests.pojos.ProductOrder; +import org.apache.ignite.tests.pojos.SimplePerson; +import org.apache.ignite.tests.pojos.SimplePersonId; +import org.springframework.core.io.ClassPathResource; + +/** + * Helper class for all tests + */ +public class TestsHelper { + /** */ + private static final String LETTERS_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; + + /** */ + private static final String NUMBERS_ALPHABET = "0123456789"; + + /** */ + private static final Random RANDOM = new Random(System.currentTimeMillis()); + + /** */ + private static final ResourceBundle TESTS_SETTINGS = ResourceBundle.getBundle("tests"); + + /** */ + private static final int BULK_OPERATION_SIZE = parseTestSettings("bulk.operation.size"); + + /** */ + private static final String LOAD_TESTS_CACHE_NAME = TESTS_SETTINGS.getString("load.tests.cache.name"); + + /** */ + private static final int LOAD_TESTS_THREADS_COUNT = parseTestSettings("load.tests.threads.count"); + + /** */ + private static final int LOAD_TESTS_WARMUP_PERIOD = parseTestSettings("load.tests.warmup.period"); + + /** */ + private static final int LOAD_TESTS_EXECUTION_TIME = parseTestSettings("load.tests.execution.time"); + + /** */ + private static final int LOAD_TESTS_REQUESTS_LATENCY = parseTestSettings("load.tests.requests.latency"); + + /** */ + private static final int TRANSACTION_PRODUCTS_COUNT = parseTestSettings("transaction.products.count"); + + /** */ + private static final int TRANSACTION_ORDERS_COUNT = parseTestSettings("transaction.orders.count"); + + /** */ + private static final int ORDERS_YEAR; + + /** */ + private static final int ORDERS_MONTH; + + /** */ + private static final int ORDERS_DAY; + + /** */ + private static final String LOAD_TESTS_PERSISTENCE_SETTINGS = TESTS_SETTINGS.getString("load.tests.persistence.settings"); + + /** */ + private static final String LOAD_TESTS_IGNITE_CONFIG = TESTS_SETTINGS.getString("load.tests.ignite.config"); + + /** */ + private static final Generator LOAD_TESTS_KEY_GENERATOR; + + /** */ + private static final Generator LOAD_TESTS_VALUE_GENERATOR; + + /** */ + private static final String HOST_PREFIX; + + static { + try { + LOAD_TESTS_KEY_GENERATOR = (Generator)Class.forName(TESTS_SETTINGS.getString("load.tests.key.generator")).newInstance(); + LOAD_TESTS_VALUE_GENERATOR = (Generator)Class.forName(TESTS_SETTINGS.getString("load.tests.value.generator")).newInstance(); + + String[] parts = SystemHelper.HOST_IP.split("\\."); + + String prefix = parts[3]; + prefix = prefix.length() > 2 ? prefix.substring(prefix.length() - 2) : prefix; + + HOST_PREFIX = prefix; + + LocalDate date = LocalDate.now(); + + String year = TESTS_SETTINGS.getString("orders.year"); + ORDERS_YEAR = !year.trim().isEmpty() ? Integer.parseInt(year) : date.getYear(); + + String month = TESTS_SETTINGS.getString("orders.month"); + ORDERS_MONTH = !month.trim().isEmpty() ? (Integer.parseInt(month) + 1) : date.getMonthValue(); + + String day = TESTS_SETTINGS.getString("orders.day"); + ORDERS_DAY = !day.trim().isEmpty() ? Integer.parseInt(day) : date.getDayOfMonth(); + } + catch (Throwable e) { + throw new RuntimeException("Failed to initialize TestsHelper", e); + } + } + + /** */ + private static int parseTestSettings(String name) { + return Integer.parseInt(TESTS_SETTINGS.getString(name)); + } + + /** */ + public static int getLoadTestsThreadsCount() { + return LOAD_TESTS_THREADS_COUNT; + } + + /** */ + public static int getLoadTestsWarmupPeriod() { + return LOAD_TESTS_WARMUP_PERIOD; + } + + /** */ + public static int getLoadTestsExecutionTime() { + return LOAD_TESTS_EXECUTION_TIME; + } + + /** */ + public static int getLoadTestsRequestsLatency() { + return LOAD_TESTS_REQUESTS_LATENCY; + } + + /** */ + public static ClassPathResource getLoadTestsPersistenceSettings() { + return new ClassPathResource(LOAD_TESTS_PERSISTENCE_SETTINGS); + } + + /** */ + public static String getLoadTestsIgniteConfig() { + return LOAD_TESTS_IGNITE_CONFIG; + } + + /** */ + public static int getBulkOperationSize() { + return BULK_OPERATION_SIZE; + } + + /** */ + public static String getLoadTestsCacheName() { + return LOAD_TESTS_CACHE_NAME; + } + + /** */ + public static Object generateLoadTestsKey(long i) { + return LOAD_TESTS_KEY_GENERATOR.generate(i); + } + + /** */ + public static Object generateLoadTestsValue(long i) { + return LOAD_TESTS_VALUE_GENERATOR.generate(i); + } + + /** */ + @SuppressWarnings("unchecked") + public static CacheEntryImpl generateLoadTestsEntry(long i) { + return new CacheEntryImpl(TestsHelper.generateLoadTestsKey(i), TestsHelper.generateLoadTestsValue(i)); + } + + /** */ + public static Collection getKeys(Collection> entries) { + List list = new LinkedList<>(); + + for (CacheEntryImpl entry : entries) + list.add(entry.getKey()); + + return list; + } + + /** */ + public static Map generateLongsMap() { + return generateLongsMap(BULK_OPERATION_SIZE); + } + + /** */ + public static Map generateLongsMap(int cnt) { + Map map = new HashMap<>(); + + for (long i = 0; i < cnt; i++) + map.put(i, i + 123); + + return map; + } + + /** */ + public static Collection> generateLongsEntries() { + return generateLongsEntries(BULK_OPERATION_SIZE); + } + + /** */ + public static Collection> generateLongsEntries(int cnt) { + Collection> entries = new LinkedList<>(); + + for (long i = 0; i < cnt; i++) + entries.add(new CacheEntryImpl<>(i, i + 123)); + + return entries; + } + + /** */ + public static Map generateStringsMap() { + return generateStringsMap(BULK_OPERATION_SIZE); + } + + /** */ + public static Map generateStringsMap(int cnt) { + Map map = new HashMap<>(); + + for (int i = 0; i < cnt; i++) + map.put(Integer.toString(i), randomString(5)); + + return map; + } + + /** */ + public static Collection> generateStringsEntries() { + return generateStringsEntries(BULK_OPERATION_SIZE); + } + + /** */ + public static Collection> generateStringsEntries(int cnt) { + Collection> entries = new LinkedList<>(); + + for (int i = 0; i < cnt; i++) + entries.add(new CacheEntryImpl<>(Integer.toString(i), randomString(5))); + + return entries; + } + + /** */ + public static Map generateLongsPersonsMap() { + Map map = new HashMap<>(); + + for (long i = 0; i < BULK_OPERATION_SIZE; i++) + map.put(i, generateRandomPerson(i)); + + return map; + } + + /** */ + public static Collection> generateLongsPersonsEntries() { + Collection> entries = new LinkedList<>(); + + for (long i = 0; i < BULK_OPERATION_SIZE; i++) + entries.add(new CacheEntryImpl<>(i, generateRandomPerson(i))); + + return entries; + } + + /** */ + public static Map generateSimplePersonIdsPersonsMap() { + return generateSimplePersonIdsPersonsMap(BULK_OPERATION_SIZE); + } + + /** */ + public static Map generateSimplePersonIdsPersonsMap(int cnt) { + Map map = new HashMap<>(); + + for (int i = 0; i < cnt; i++) { + PersonId id = generateRandomPersonId(); + + map.put(new SimplePersonId(id), new SimplePerson(generateRandomPerson(id.getPersonNumber()))); + } + + return map; + } + + /** */ + public static Map generatePersonIdsPersonsMap() { + return generatePersonIdsPersonsMap(BULK_OPERATION_SIZE); + } + + /** */ + public static Map generatePersonIdsPersonsMap(int cnt) { + Map map = new HashMap<>(); + + for (int i = 0; i < cnt; i++) { + PersonId id = generateRandomPersonId(); + + map.put(id, generateRandomPerson(id.getPersonNumber())); + } + + return map; + } + + /** */ + public static Collection> generateSimplePersonIdsPersonsEntries() { + return generateSimplePersonIdsPersonsEntries(BULK_OPERATION_SIZE); + } + + /** */ + public static Collection> generateSimplePersonIdsPersonsEntries(int cnt) { + Collection> entries = new LinkedList<>(); + + for (int i = 0; i < cnt; i++) { + PersonId id = generateRandomPersonId(); + + entries.add(new CacheEntryImpl<>(new SimplePersonId(id), new SimplePerson(generateRandomPerson(id.getPersonNumber())))); + } + + return entries; + } + + /** */ + public static Collection> generatePersonIdsPersonsEntries() { + return generatePersonIdsPersonsEntries(BULK_OPERATION_SIZE); + } + + /** */ + public static Collection> generatePersonIdsPersonsEntries(int cnt) { + Collection> entries = new LinkedList<>(); + + for (int i = 0; i < cnt; i++) { + PersonId id = generateRandomPersonId(); + + entries.add(new CacheEntryImpl<>(id, generateRandomPerson(id.getPersonNumber()))); + } + + return entries; + } + + /** */ + public static List> generateProductEntries() { + List> entries = new LinkedList<>(); + + for (long i = 0; i < BULK_OPERATION_SIZE; i++) + entries.add(new CacheEntryImpl<>(i, generateRandomProduct(i))); + + return entries; + } + + /** */ + public static Collection getProductIds(Collection> entries) { + List ids = new LinkedList<>(); + + for (CacheEntryImpl entry : entries) + ids.add(entry.getKey()); + + return ids; + } + + /** */ + public static Map generateProductsMap() { + return generateProductsMap(BULK_OPERATION_SIZE); + } + + /** */ + public static Map generateProductsMap(int count) { + Map map = new HashMap<>(); + + for (long i = 0; i < count; i++) + map.put(i, generateRandomProduct(i)); + + return map; + } + + /** */ + public static Collection> generateOrderEntries() { + Collection> entries = new LinkedList<>(); + + for (long i = 0; i < BULK_OPERATION_SIZE; i++) { + ProductOrder order = generateRandomOrder(i); + entries.add(new CacheEntryImpl<>(order.getId(), order)); + } + + return entries; + } + + /** */ + public static Map generateOrdersMap() { + return generateOrdersMap(BULK_OPERATION_SIZE); + } + + /** */ + public static Map generateOrdersMap(int count) { + Map map = new HashMap<>(); + + for (long i = 0; i < count; i++) { + ProductOrder order = generateRandomOrder(i); + map.put(order.getId(), order); + } + + return map; + } + + /** */ + public static Map>> generateOrdersPerProductEntries( + Collection> products) { + return generateOrdersPerProductEntries(products, TRANSACTION_ORDERS_COUNT); + } + + /** */ + public static Map>> generateOrdersPerProductEntries( + Collection> products, int ordersPerProductCount) { + Map>> map = new HashMap<>(); + + for (CacheEntryImpl entry : products) { + List> orders = new LinkedList<>(); + + for (long i = 0; i < ordersPerProductCount; i++) { + ProductOrder order = generateRandomOrder(entry.getKey()); + orders.add(new CacheEntryImpl<>(order.getId(), order)); + } + + map.put(entry.getKey(), orders); + } + + return map; + } + + /** */ + public static Map> generateOrdersPerProductMap(Map products) { + return generateOrdersPerProductMap(products, TRANSACTION_ORDERS_COUNT); + } + + /** */ + public static Map> generateOrdersPerProductMap(Map products, + int ordersPerProductCount) { + Map> map = new HashMap<>(); + + for (Map.Entry entry : products.entrySet()) { + Map orders = new HashMap<>(); + + for (long i = 0; i < ordersPerProductCount; i++) { + ProductOrder order = generateRandomOrder(entry.getKey()); + orders.put(order.getId(), order); + } + + map.put(entry.getKey(), orders); + } + + return map; + } + + /** */ + public static Collection getOrderIds(Map>> orders) { + Set ids = new HashSet<>(); + + for (Long key : orders.keySet()) { + for (CacheEntryImpl entry : orders.get(key)) + ids.add(entry.getKey()); + } + + return ids; + } + + /** */ + public static SimplePerson generateRandomSimplePerson(long personNum) { + int phonesCnt = RANDOM.nextInt(4); + + List phones = new LinkedList<>(); + + for (int i = 0; i < phonesCnt; i++) + phones.add(randomNumber(4)); + + return new SimplePerson(personNum, randomString(4), randomString(4), (short)RANDOM.nextInt(100), + RANDOM.nextBoolean(), RANDOM.nextLong(), RANDOM.nextFloat(), new Date(), phones); + } + + /** */ + public static SimplePersonId generateRandomSimplePersonId() { + return new SimplePersonId(randomString(4), randomString(4), RANDOM.nextInt(100)); + } + + /** */ + public static Person generateRandomPerson(long personNum) { + int phonesCnt = RANDOM.nextInt(4); + + List phones = new LinkedList<>(); + + for (int i = 0; i < phonesCnt; i++) + phones.add(randomNumber(4)); + + return new Person(personNum, randomString(4), randomString(4), (short)RANDOM.nextInt(100), + RANDOM.nextBoolean(), RANDOM.nextLong(), RANDOM.nextFloat(), new Date(), phones); + } + + /** */ + public static PersonId generateRandomPersonId() { + return new PersonId(randomString(4), randomString(4), RANDOM.nextInt(100)); + } + + /** */ + public static Product generateRandomProduct(long id) { + return new Product(id, randomString(2), randomString(6), randomString(20), generateProductPrice(id)); + } + + /** */ + public static ProductOrder generateRandomOrder(long productId) { + return generateRandomOrder(productId, RANDOM.nextInt(10000)); + } + + /** */ + private static ProductOrder generateRandomOrder(long productId, int saltedNumber) { + LocalDate date = LocalDate.of(ORDERS_YEAR, ORDERS_MONTH, ORDERS_DAY); + + long id = Long.parseLong(productId + System.currentTimeMillis() + HOST_PREFIX + saltedNumber); + + return generateRandomOrder(id, productId, date.atStartOfDay().toInstant(ZoneOffset.UTC)); + } + + /** */ + public static ProductOrder generateRandomOrder(long id, long productId, Instant date) { + return new ProductOrder(id, productId, generateProductPrice(productId), date, 1 + RANDOM.nextInt(20)); + } + + /** */ + public static boolean checkMapsEqual(Map map1, Map map2) { + if (map1 == null || map2 == null || map1.size() != map2.size()) + return false; + + for (Object key : map1.keySet()) { + Object obj1 = map1.get(key); + Object obj2 = map2.get(key); + + if (obj1 == null || obj2 == null || !obj1.equals(obj2)) + return false; + } + + return true; + } + + /** */ + public static boolean checkCollectionsEqual(Map map, Collection> col) { + if (map == null || col == null || map.size() != col.size()) + return false; + + for (CacheEntryImpl entry : col) { + if (!entry.getValue().equals(map.get(entry.getKey()))) + return false; + } + + return true; + } + + /** */ + public static boolean checkSimplePersonMapsEqual(Map map1, Map map2, + boolean primitiveFieldsOnly) { + if (map1 == null || map2 == null || map1.size() != map2.size()) + return false; + + for (K key : map1.keySet()) { + SimplePerson person1 = map1.get(key); + SimplePerson person2 = map2.get(key); + + boolean equals = person1 != null && person2 != null && + (primitiveFieldsOnly ? person1.equalsPrimitiveFields(person2) : person1.equals(person2)); + + if (!equals) + return false; + } + + return true; + } + + /** */ + public static boolean checkPersonMapsEqual(Map map1, Map map2, + boolean primitiveFieldsOnly) { + if (map1 == null || map2 == null || map1.size() != map2.size()) + return false; + + for (K key : map1.keySet()) { + Person person1 = map1.get(key); + Person person2 = map2.get(key); + + boolean equals = person1 != null && person2 != null && + (primitiveFieldsOnly ? person1.equalsPrimitiveFields(person2) : person1.equals(person2)); + + if (!equals) + return false; + } + + return true; + } + + /** */ + public static boolean checkSimplePersonCollectionsEqual(Map map, Collection> col, + boolean primitiveFieldsOnly) { + if (map == null || col == null || map.size() != col.size()) + return false; + + for (CacheEntryImpl entry : col) { + boolean equals = primitiveFieldsOnly ? + entry.getValue().equalsPrimitiveFields(map.get(entry.getKey())) : + entry.getValue().equals(map.get(entry.getKey())); + + if (!equals) + return false; + } + + return true; + } + + /** */ + public static boolean checkPersonCollectionsEqual(Map map, Collection> col, + boolean primitiveFieldsOnly) { + if (map == null || col == null || map.size() != col.size()) + return false; + + for (CacheEntryImpl entry : col) { + boolean equals = primitiveFieldsOnly ? + entry.getValue().equalsPrimitiveFields(map.get(entry.getKey())) : + entry.getValue().equals(map.get(entry.getKey())); + + if (!equals) + return false; + } + + return true; + } + + /** */ + public static boolean checkProductCollectionsEqual(Map map, Collection> col) { + if (map == null || col == null || map.size() != col.size()) + return false; + + for (CacheEntryImpl entry : col) + if (!entry.getValue().equals(map.get(entry.getKey()))) + return false; + + return true; + } + + /** */ + public static boolean checkProductMapsEqual(Map map1, Map map2) { + if (map1 == null || map2 == null || map1.size() != map2.size()) + return false; + + for (K key : map1.keySet()) { + Product product1 = map1.get(key); + Product product2 = map2.get(key); + + boolean equals = product1 != null && product2 != null && product1.equals(product2); + + if (!equals) + return false; + } + + return true; + } + + /** */ + public static boolean checkOrderCollectionsEqual(Map map, Collection> col) { + if (map == null || col == null || map.size() != col.size()) + return false; + + for (CacheEntryImpl entry : col) + if (!entry.getValue().equals(map.get(entry.getKey()))) + return false; + + return true; + } + + /** */ + public static boolean checkOrderMapsEqual(Map map1, Map map2) { + if (map1 == null || map2 == null || map1.size() != map2.size()) + return false; + + for (K key : map1.keySet()) { + ProductOrder order1 = map1.get(key); + ProductOrder order2 = map2.get(key); + + boolean equals = order1 != null && order2 != null && order1.equals(order2); + + if (!equals) + return false; + } + + return true; + } + + /** */ + public static String randomString(int len) { + StringBuilder builder = new StringBuilder(len); + + for (int i = 0; i < len; i++) + builder.append(LETTERS_ALPHABET.charAt(RANDOM.nextInt(LETTERS_ALPHABET.length()))); + + return builder.toString(); + } + + /** */ + public static String randomNumber(int len) { + StringBuilder builder = new StringBuilder(len); + + for (int i = 0; i < len; i++) + builder.append(NUMBERS_ALPHABET.charAt(RANDOM.nextInt(NUMBERS_ALPHABET.length()))); + + return builder.toString(); + } + + /** */ + private static float generateProductPrice(long productId) { + long id = productId < 1000 ? + (((productId + 1) * (productId + 1) * 1000) / 2) * 10 : + (productId / 20) * (productId / 20); + + id = id == 0 ? 24 : id; + + float price = Long.parseLong(Long.toString(id).replace("0", "")); + + int i = 0; + + while (price > 100) { + if (i % 2 != 0) + price = price / 2; + else + price = (float)Math.sqrt(price); + + i++; + } + + return ((float)((int)(price * 100))) / 100.0F; + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/package-info.java new file mode 100644 index 0000000000000..2c8fec165a6bf --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/tests/utils/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains utility classes for unit tests + */ + +package org.apache.ignite.tests.utils; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/testsuites/cassandra/store/IgniteCassandraStoreTestSuite.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/testsuites/cassandra/store/IgniteCassandraStoreTestSuite.java new file mode 100644 index 0000000000000..deeb1291b892f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/testsuites/cassandra/store/IgniteCassandraStoreTestSuite.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.testsuites.cassandra.store; + +import org.apache.ignite.tests.CassandraConfigTest; +import org.apache.ignite.tests.CassandraDirectPersistenceTest; +import org.apache.ignite.tests.CassandraSessionImplTest; +import org.apache.ignite.tests.DDLGeneratorTest; +import org.apache.ignite.tests.DatasourceSerializationTest; +import org.apache.ignite.tests.IgnitePersistentStorePrimitiveTest; +import org.apache.ignite.tests.IgnitePersistentStoreTest; +import org.apache.ignite.tests.utils.CassandraHelper; +import org.apache.ignite.tools.junit.JUnitTeamcityReporter; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; + +/** + * Cache suite for Cassandra store. + * + * Running with -DforkMode=always is recommended + */ +@RunWith(Suite.class) +@SuiteClasses({ + CassandraConfigTest.class, + CassandraDirectPersistenceTest.class, + CassandraSessionImplTest.class, + DatasourceSerializationTest.class, + DDLGeneratorTest.class, + IgnitePersistentStoreTest.class, + IgnitePersistentStorePrimitiveTest.class}) +public class IgniteCassandraStoreTestSuite { + /** */ + private static final Logger LOGGER = LogManager.getLogger(IgniteCassandraStoreTestSuite.class.getName()); + + /** */ + @BeforeClass + public static void setUpClass() { + JUnitTeamcityReporter.suite = IgniteCassandraStoreTestSuite.class.getName(); + + if (CassandraHelper.useEmbeddedCassandra()) { + try { + CassandraHelper.startEmbeddedCassandra(LOGGER); + } + catch (Throwable e) { + throw new RuntimeException("Failed to start embedded Cassandra instance", e); + } + } + } + + /** */ + @AfterClass + public static void tearDownClass() { + if (CassandraHelper.useEmbeddedCassandra()) { + try { + CassandraHelper.stopEmbeddedCassandra(); + } + catch (Throwable e) { + LOGGER.error("Failed to stop embedded Cassandra instance", e); + } + } + } +} diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/testsuites/cassandra/store/package-info.java b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/testsuites/cassandra/store/package-info.java new file mode 100644 index 0000000000000..b7447b0b4659c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/java/org/apache/ignite/testsuites/cassandra/store/package-info.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Contains test suite for unit tests + */ +package org.apache.ignite.testsuites.cassandra.store; diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/log4j2.properties b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..9e79b91ffc1c0 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/log4j2.properties @@ -0,0 +1,178 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +rootLogger.level=info +rootLogger.appenderRef.$1.ref=stdout + +# Direct log messages to stdout +appender.stdout.name=stdout +appender.stdout.type=Console +appender.stdout.target=SYSTEM_OUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +# ------ LOAD TESTS LOGGING ------ + +# Cassandra load tests loggers +logger.CassandraLoadTests.name=CassandraLoadTests +logger.CassandraLoadTests.level=INFO +logger.CassandraLoadTests.appenderRef.$1.ref=stdout +logger.CassandraLoadTests.appenderRef.$2.ref=cassandraLoadTests + +logger.CassandraWriteLoadTest.name=CassandraWriteLoadTest +logger.CassandraWriteLoadTest.level=INFO +logger.CassandraWriteLoadTest.appenderRef.$1.ref=csWrite + +logger.CassandraBulkWriteLoadTest.name=CassandraBulkWriteLoadTest +logger.CassandraBulkWriteLoadTest.level=INFO +logger.CassandraBulkWriteLoadTest.appenderRef.$1.ref =csBulkWrite + +logger.CassandraReadLoadTest.name=CassandraReadLoadTest +logger.CassandraReadLoadTest.level=INFO +logger.CassandraReadLoadTest.appenderRef.$1.ref=csRead + +logger.CassandraBulkReadLoadTest.name=CassandraBulkReadLoadTest +logger.CassandraBulkReadLoadTest.level=INFO +logger.CassandraBulkReadLoadTest.appenderRef.$1.ref=csRead + +appender.csWrite.type=RollingFile +appender.csWrite.name=csWrite +appender.csWrite.fileName=logs/cassandra-write.log +appender.csWrite.filePattern=logs/cassandra-write.log%i +appender.csWrite.policies.type=SizeBasedTriggeringPolicy +appender.csWrite.policies.size=10MB +appender.csWrite.strategy.type=DefaultRolloverStrategy +appender.csWrite.strategy.max=10 +appender.csWrite.append=true +appender.csWrite.layout.type=PatternLayout +appender.csWrite.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +appender.csBulkWrite.type=RollingFile +appender.csBulkWrite.name=csBulkWrite +appender.csBulkWrite.fileName=logs/cassandra-bulk-write.log +appender.csBulkWrite.filePattern=logs/cassandra-bulk-write.log%i +appender.csBulkWrite.policies.type=SizeBasedTriggeringPolicy +appender.csBulkWrite.policies.size=10MB +appender.csBulkWrite.strategy.type=DefaultRolloverStrategy +appender.csBulkWrite.strategy.max=10 +appender.csBulkWrite.append=true +appender.csBulkWrite.layout.type=PatternLayout +appender.csBulkWrite.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +appender.csRead.type=RollingFile +appender.csRead.name=csRead +appender.csRead.fileName=logs/cassandra-read.log +appender.csRead.filePattern=logs/cassandra-read.log%i +appender.csRead.policies.type=SizeBasedTriggeringPolicy +appender.csRead.policies.size=10MB +appender.csRead.strategy.type=DefaultRolloverStrategy +appender.csRead.strategy.max=10 +appender.csRead.append=true +appender.csRead.layout.type=PatternLayout +appender.csRead.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +appender.csBulkRead.type=RollingFile +appender.csBulkRead.name=csBulkRead +appender.csBulkRead.fileName=logs/cassandra-bulk-read.log +appender.csBulkRead.filePattern=logs/cassandra-bulk-read.log%i +appender.csBulkRead.policies.type=SizeBasedTriggeringPolicy +appender.csBulkRead.policies.size=10MB +appender.csBulkRead.strategy.type=DefaultRolloverStrategy +appender.csBulkRead.strategy.max=10 +appender.csBulkRead.append=true +appender.csBulkRead.layout.type=PatternLayout +appender.csBulkRead.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +# Ignite load tests loggers +logger.IgniteLoadTests.name=CassandraLoadTests +logger.IgniteLoadTests.level=INFO +logger.IgniteLoadTests.appenderRef.$1.ref=stdout +logger.IgniteLoadTests.appenderRef.$2.ref=igniteLoadTests + +logger.IgniteWriteLoadTest.name=CassandraWriteLoadTest +logger.IgniteWriteLoadTest.level=INFO +logger.IgniteWriteLoadTest.appenderRef.$1.ref=igWrite + +logger.IgniteBulkWriteLoadTest.name=CassandraBulkWriteLoadTest +logger.IgniteBulkWriteLoadTest.level=INFO +logger.IgniteBulkWriteLoadTest.appenderRef.$1.ref=igBulkWrite + +logger.IgniteReadLoadTest.name=CassandraReadLoadTest +logger.IgniteReadLoadTest.level=INFO +logger.IgniteReadLoadTest.appenderRef.$1.ref=igRead + +logger.IgniteBulkReadLoadTest.name=CassandraBulkReadLoadTest +logger.IgniteBulkReadLoadTest.level=INFO +logger.IgniteBulkReadLoadTest.appenderRef.$1.ref=igBulkRead + +appender.igniteLoadTests.type=RollingFile +appender.igniteLoadTests.name=igniteLoadTests +appender.igniteLoadTests.fileName=logs/ignite-load-tests.log +appender.igniteLoadTests.filePattern=logs/ignite-load-tests.log%i +appender.igniteLoadTests.policies.type=SizeBasedTriggeringPolicy +appender.igniteLoadTests.policies.size=10MB +appender.igniteLoadTests.strategy.type=DefaultRolloverStrategy +appender.igniteLoadTests.strategy.max=10 +appender.igniteLoadTests.append=true +appender.igniteLoadTests.layout.type=PatternLayout +appender.igniteLoadTests.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +appender.igWrite.type=RollingFile +appender.igWrite.name=igWrite +appender.igWrite.fileName=logs/ignite-write.log +appender.igWrite.filePattern=logs/ignite-write.log%i +appender.igWrite.policies.type=SizeBasedTriggeringPolicy +appender.igWrite.policies.size=10MB +appender.igWrite.strategy.type=DefaultRolloverStrategy +appender.igWrite.strategy.max=10 +appender.igWrite.append=true +appender.igWrite.layout.type=PatternLayout +appender.igWrite.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +appender.igBulkWrite.type=RollingFile +appender.igBulkWrite.name=igBulkWrite +appender.igBulkWrite.fileName=logs/ignite-bulk-write.log +appender.igBulkWrite.filePattern=logs/ignite-bulk-write.log%i +appender.igBulkWrite.policies.type=SizeBasedTriggeringPolicy +appender.igBulkWrite.policies.size=10MB +appender.igBulkWrite.strategy.type=DefaultRolloverStrategy +appender.igBulkWrite.strategy.max=10 +appender.igBulkWrite.append=true +appender.igBulkWrite.layout.type=PatternLayout +appender.igBulkWrite.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +appender.igRead.type=RollingFile +appender.igRead.name=igRead +appender.igRead.fileName=logs/ignite-read.log +appender.igRead.filePattern=logs/ignite-read.log%i +appender.igRead.policies.type=SizeBasedTriggeringPolicy +appender.igRead.policies.size=10MB +appender.igRead.strategy.type=DefaultRolloverStrategy +appender.igRead.strategy.max=10 +appender.igRead.append=true +appender.igRead.layout.type=PatternLayout +appender.igRead.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n + +appender.igBulkRead.type=RollingFile +appender.igBulkRead.name=igBulkRead +appender.igBulkRead.fileName=logs/ignite-bulk-read.log +appender.igBulkRead.filePattern=logs/ignite-bulk-read.log%i +appender.igBulkRead.policies.type=SizeBasedTriggeringPolicy +appender.igBulkRead.policies.size=10MB +appender.igBulkRead.strategy.type=DefaultRolloverStrategy +appender.igBulkRead.strategy.max=10 +appender.igBulkRead.append=true +appender.igBulkRead.layout.type=PatternLayout +appender.igBulkRead.layout.pattern=%d{HH:mm:ss,SSS} %5p [%t] - %m%n diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/connection-settings.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/connection-settings.xml new file mode 100644 index 0000000000000..aec602ecb365e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/connection-settings.xml @@ -0,0 +1,52 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/connection.properties b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/connection.properties new file mode 100644 index 0000000000000..ef150189a8992 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/connection.properties @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Comma delimited Cassandra contact points in format: host[:port] +contact.points=127.0.0.1 diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/credentials.properties b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/credentials.properties new file mode 100644 index 0000000000000..f011bcc3b338c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/credentials.properties @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Cassandra admin user/password +admin.user= +admin.password= + +# Cassandra regular user/password +regular.user= +regular.password= diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/embedded-cassandra.yaml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/embedded-cassandra.yaml new file mode 100644 index 0000000000000..5089f074a41da --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/embedded-cassandra.yaml @@ -0,0 +1,120 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +cluster_name: 'Test Cluster' + +listen_address: 127.0.0.1 +native_transport_port: 9042 +rpc_address: 127.0.0.1 +rpc_port: 9160 + +seed_provider: + - class_name: org.apache.cassandra.locator.SimpleSeedProvider + parameters: + - seeds: "127.0.0.1" +saved_caches_directory: ./data/saved_caches +commitlog_directory: ./data/commitlog +hints_directory: ./data/hints +data_file_directories: + - ./data/data +cdc_raw_directory: ./data/cdc + +num_tokens: 256 +hinted_handoff_enabled: true +max_hint_window_in_ms: 10800000 # 3 hours +hinted_handoff_throttle_in_kb: 1024 +max_hints_delivery_threads: 2 +hints_flush_period_in_ms: 10000 +max_hints_file_size_in_mb: 128 +batchlog_replay_throttle_in_kb: 1024 +authenticator: AllowAllAuthenticator +authorizer: AllowAllAuthorizer +role_manager: CassandraRoleManager +roles_validity_in_ms: 2000 +permissions_validity_in_ms: 2000 +partitioner: org.apache.cassandra.dht.Murmur3Partitioner +disk_failure_policy: stop +commit_failure_policy: stop +key_cache_size_in_mb: +key_cache_save_period: 14400 +row_cache_size_in_mb: 0 +row_cache_save_period: 0 +counter_cache_size_in_mb: +counter_cache_save_period: 7200 +commitlog_sync: periodic +commitlog_sync_period_in_ms: 10000 +commitlog_segment_size_in_mb: 32 +concurrent_reads: 32 +concurrent_writes: 32 +concurrent_counter_writes: 32 +concurrent_materialized_view_writes: 32 +memtable_allocation_type: heap_buffers +index_summary_capacity_in_mb: +index_summary_resize_interval_in_minutes: 60 +trickle_fsync: false +trickle_fsync_interval_in_kb: 10240 +storage_port: 7000 +ssl_storage_port: 7001 +start_native_transport: true +start_rpc: false +rpc_keepalive: true +rpc_server_type: sync +thrift_framed_transport_size_in_mb: 15 +incremental_backups: false +snapshot_before_compaction: false +auto_snapshot: true +tombstone_warn_threshold: 1000 +tombstone_failure_threshold: 100000 +column_index_size_in_kb: 64 +batch_size_warn_threshold_in_kb: 5 +batch_size_fail_threshold_in_kb: 50 +compaction_throughput_mb_per_sec: 16 +compaction_large_partition_warning_threshold_mb: 100 +sstable_preemptive_open_interval_in_mb: 50 +read_request_timeout_in_ms: 5000 +range_request_timeout_in_ms: 10000 +write_request_timeout_in_ms: 2000 +counter_write_request_timeout_in_ms: 5000 +cas_contention_timeout_in_ms: 1000 +truncate_request_timeout_in_ms: 60000 +request_timeout_in_ms: 10000 +cross_node_timeout: false +endpoint_snitch: SimpleSnitch +dynamic_snitch_update_interval_in_ms: 100 +dynamic_snitch_reset_interval_in_ms: 600000 +dynamic_snitch_badness_threshold: 0.1 +request_scheduler: org.apache.cassandra.scheduler.NoScheduler + +server_encryption_options: + internode_encryption: none + keystore: conf/.keystore + keystore_password: cassandra + truststore: conf/.truststore + truststore_password: cassandra + +client_encryption_options: + enabled: false + optional: false + keystore: conf/.keystore + keystore_password: cassandra + +internode_compression: all +inter_dc_tcp_nodelay: false +tracetype_query_ttl: 86400 +tracetype_repair_ttl: 604800 +gc_warn_threshold_in_ms: 1000 +enable_user_defined_functions: false +enable_scripted_user_defined_functions: false +windows_timer_interval: 1 diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/keyspaces.properties b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/keyspaces.properties new file mode 100644 index 0000000000000..9205cc1ed1e46 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/cassandra/keyspaces.properties @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Cassandra keyspaces used for tests +keyspaces=test1 diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/ignite-config.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/ignite-config.xml new file mode 100644 index 0000000000000..cde4becdb3e6a --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/ignite-config.xml @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 127.0.0.1:47500..47509 + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-1.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-1.xml new file mode 100644 index 0000000000000..1c1951d4e0557 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-1.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-2.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-2.xml new file mode 100644 index 0000000000000..49b3caf7e0103 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-2.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-3.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-3.xml new file mode 100644 index 0000000000000..e872201fcc180 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/blob/persistence-settings-3.xml @@ -0,0 +1,29 @@ + + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/loadall_blob/ignite-config.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/loadall_blob/ignite-config.xml new file mode 100644 index 0000000000000..115e263e017d8 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/loadall_blob/ignite-config.xml @@ -0,0 +1,90 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 127.0.0.1:47500..47509 + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/loadall_blob/persistence-settings.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/loadall_blob/persistence-settings.xml new file mode 100644 index 0000000000000..e872201fcc180 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/loadall_blob/persistence-settings.xml @@ -0,0 +1,29 @@ + + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/ignite-config.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/ignite-config.xml new file mode 100644 index 0000000000000..4105b3dfbea4f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/ignite-config.xml @@ -0,0 +1,212 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 127.0.0.1:47500..47509 + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/order.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/order.xml new file mode 100644 index 0000000000000..d6163643fae44 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/order.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-1.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-1.xml new file mode 100644 index 0000000000000..b39578c0991ed --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-1.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-2.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-2.xml new file mode 100644 index 0000000000000..10a2d9f6244d3 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-2.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml new file mode 100644 index 0000000000000..f602508cf1afb --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml @@ -0,0 +1,175 @@ + + + + + + + REPLICATION = {'class' : 'SimpleStrategy', 'replication_factor' : 3} + AND DURABLE_WRITES = true + + + + + comment = 'A most excellent and useful table' + AND read_repair_chance = 0.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-4.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-4.xml new file mode 100644 index 0000000000000..490d8e76dc8ef --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-4.xml @@ -0,0 +1,175 @@ + + + + + + + REPLICATION = {'class' : 'SimpleStrategy', 'replication_factor' : 3} + AND DURABLE_WRITES = true + + + + + comment = 'A most excellent and useful table' + AND read_repair_chance = 0.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-5.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-5.xml new file mode 100644 index 0000000000000..f4210b8ba4844 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-5.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-6.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-6.xml new file mode 100644 index 0000000000000..340f64615a82c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/persistence-settings-6.xml @@ -0,0 +1,174 @@ + + + + + + + REPLICATION = {'class' : 'SimpleStrategy', 'replication_factor' : 3} + AND DURABLE_WRITES = true + + + + + comment = 'A most excellent and useful table' + AND read_repair_chance = 0.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/product.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/product.xml new file mode 100644 index 0000000000000..c761e1c357191 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/pojo/product.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-config.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-config.xml new file mode 100644 index 0000000000000..99091fa8d274f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-config.xml @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 127.0.0.1:47500..47509 + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-remote-client-config.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-remote-client-config.xml new file mode 100644 index 0000000000000..5b5bb597aee82 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-remote-client-config.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + cassandra-node-1.abc.com + cassandra-node-2.abc.com + + + + + + + + + + + + + + + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ignite-node-1 + ignite-node-2 + ignite-node-3 + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-remote-server-config.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-remote-server-config.xml new file mode 100644 index 0000000000000..e8852c020a0d1 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/ignite-remote-server-config.xml @@ -0,0 +1,110 @@ + + + + + + + + + + + + + + + cassandra-node-1.abc.com + cassandra-node-2.abc.com + + + + + + + + + + + + + + + + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 127.0.0.1:47500..47509 + + + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/persistence-settings-1.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/persistence-settings-1.xml new file mode 100644 index 0000000000000..27882e631f96c --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/persistence-settings-1.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/persistence-settings-2.xml b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/persistence-settings-2.xml new file mode 100644 index 0000000000000..62c1f4a6004cd --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/org/apache/ignite/tests/persistence/primitive/persistence-settings-2.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/resources/tests.properties b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/tests.properties new file mode 100644 index 0000000000000..b11f2c847b27f --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/resources/tests.properties @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Number of elements for CacheStore bulk operations: loadAll, writeAll, deleteAll +bulk.operation.size=100 + +# Number of product per transaction +transaction.products.count=2 + +# Number of orders per transaction +transaction.orders.count=10 + +# Year to use for generating new orders +orders.year= + +# Month to use for generating new orders +orders.month= + +# Day of month to use for generating new orders +orders.day= + +# ----- Load tests settings ----- + +# Ignite cache to be used by load tests +load.tests.cache.name=cache1 +#load.tests.cache.name=cache3 + +# Number of simultaneous threads for each load test +load.tests.threads.count=10 + +# Warm up period (in milliseconds) for each load test before starting any measurements +load.tests.warmup.period=180000 + +# Time for each load test execution excluding warm up period (in milliseconds) +load.tests.execution.time=300000 + +# Latency (in milliseconds) between two sequential requests to Cassandra/Ignite +load.tests.requests.latency=0 + +# Resource specifying persistence settings for all load tests +load.tests.persistence.settings=org/apache/ignite/tests/persistence/primitive/persistence-settings-1.xml +#load.tests.persistence.settings=org/apache/ignite/tests/persistence/pojo/persistence-settings-3.xml + +# Resource specifying Ignite configuration for all load tests +load.tests.ignite.config=org/apache/ignite/tests/persistence/primitive/ignite-remote-client-config.xml + +# Key generator for load tests +load.tests.key.generator=org.apache.ignite.tests.load.LongGenerator +#load.tests.key.generator=org.apache.ignite.tests.load.PersonIdGenerator + +# Value generator for load tests +load.tests.value.generator=org.apache.ignite.tests.load.LongGenerator +#load.tests.value.generator=org.apache.ignite.tests.load.PersonGenerator diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/cassandra-load-tests.bat b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/cassandra-load-tests.bat new file mode 100644 index 0000000000000..c64de1e20b9f8 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/cassandra-load-tests.bat @@ -0,0 +1,41 @@ +:: +:: Licensed to the Apache Software Foundation (ASF) under one or more +:: contributor license agreements. See the NOTICE file distributed with +:: this work for additional information regarding copyright ownership. +:: The ASF licenses this file to You under the Apache License, Version 2.0 +:: (the "License"); you may not use this file except in compliance with +:: the License. You may obtain a copy of the License at +:: +:: http://www.apache.org/licenses/LICENSE-2.0 +:: +:: Unless required by applicable law or agreed to in writing, software +:: distributed under the License is distributed on an "AS IS" BASIS, +:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +:: See the License for the specific language governing permissions and +:: limitations under the License. +:: + +echo off + +echo. + +set TESTS_CLASSPATH="%~dp0lib\*;%~dp0settings" + +call %~dp0jvm-opts.bat %* + +call java %JVM_OPTS% -cp "%TESTS_CLASSPATH%" "org.apache.ignite.tests.CassandraDirectPersistenceLoadTest" + +if %errorLevel% NEQ 0 ( + echo. + echo -------------------------------------------------------------------------------- + echo [ERROR] Tests execution failed + echo -------------------------------------------------------------------------------- + echo. + exit /b %errorLevel% +) + +echo. +echo -------------------------------------------------------------------------------- +echo [INFO] Tests execution succeed +echo -------------------------------------------------------------------------------- +echo. diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/cassandra-load-tests.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/cassandra-load-tests.sh new file mode 100644 index 0000000000000..dda25dc97e91d --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/cassandra-load-tests.sh @@ -0,0 +1,39 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +TESTS_ROOT=$(readlink -m $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )) +TESTS_CLASSPATH="$TESTS_ROOT/lib/*:$TESTS_ROOT/settings" + +. $TESTS_ROOT/jvm-opt.sh $@ + +java $JVM_OPTS -cp "$TESTS_CLASSPATH" "org.apache.ignite.tests.CassandraDirectPersistenceLoadTest" + +if [ $? -ne 0 ]; then + echo + echo "--------------------------------------------------------------------------------" + echo "[ERROR] Tests execution failed" + echo "--------------------------------------------------------------------------------" + echo + exit 1 +fi + +echo +echo "--------------------------------------------------------------------------------" +echo "[INFO] Tests execution succeed" +echo "--------------------------------------------------------------------------------" +echo diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/ignite-load-tests.bat b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/ignite-load-tests.bat new file mode 100644 index 0000000000000..5a45ffcb6a758 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/ignite-load-tests.bat @@ -0,0 +1,41 @@ +:: +:: Licensed to the Apache Software Foundation (ASF) under one or more +:: contributor license agreements. See the NOTICE file distributed with +:: this work for additional information regarding copyright ownership. +:: The ASF licenses this file to You under the Apache License, Version 2.0 +:: (the "License"); you may not use this file except in compliance with +:: the License. You may obtain a copy of the License at +:: +:: http://www.apache.org/licenses/LICENSE-2.0 +:: +:: Unless required by applicable law or agreed to in writing, software +:: distributed under the License is distributed on an "AS IS" BASIS, +:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +:: See the License for the specific language governing permissions and +:: limitations under the License. +:: + +echo off + +echo. + +set TESTS_CLASSPATH="%~dp0\lib*;%~dp0settings" + +call %~dp0jvm-opts.bat %* + +call java %JVM_OPTS% -cp "%TESTS_CLASSPATH%" "org.apache.ignite.tests.IgnitePersistentStoreLoadTest" + +if %errorLevel% NEQ 0 ( + echo. + echo -------------------------------------------------------------------------------- + echo [ERROR] Tests execution failed + echo -------------------------------------------------------------------------------- + echo. + exit /b %errorLevel% +) + +echo. +echo -------------------------------------------------------------------------------- +echo [INFO] Tests execution succeed +echo -------------------------------------------------------------------------------- +echo. diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/ignite-load-tests.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/ignite-load-tests.sh new file mode 100644 index 0000000000000..c2e1a419af7bf --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/ignite-load-tests.sh @@ -0,0 +1,39 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +TESTS_ROOT=$(readlink -m $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )) +TESTS_CLASSPATH="$TESTS_ROOT/lib/*:$TESTS_ROOT/settings" + +. $TESTS_ROOT/jvm-opt.sh $@ + +java $JVM_OPTS -cp "$TESTS_CLASSPATH" "org.apache.ignite.tests.IgnitePersistentStoreLoadTest" + +if [ $? -ne 0 ]; then + echo + echo "--------------------------------------------------------------------------------" + echo "[ERROR] Tests execution failed" + echo "--------------------------------------------------------------------------------" + echo + exit 1 +fi + +echo +echo "--------------------------------------------------------------------------------" +echo "[INFO] Tests execution succeed" +echo "--------------------------------------------------------------------------------" +echo diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/jvm-opt.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/jvm-opt.sh new file mode 100644 index 0000000000000..d4e70f005e593 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/jvm-opt.sh @@ -0,0 +1,21 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +JVM_OPTS="-Xms6g -Xmx6g -XX:+AggressiveOpts -XX:MaxMetaspaceSize=256m" +JVM_OPTS="$JVM_OPTS -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+UseTLAB -XX:NewSize=128m -XX:MaxNewSize=768m" +JVM_OPTS="$JVM_OPTS -Xss16m" diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/jvm-opts.bat b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/jvm-opts.bat new file mode 100644 index 0000000000000..1937efbc9ed57 --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/jvm-opts.bat @@ -0,0 +1,24 @@ +:: +:: Licensed to the Apache Software Foundation (ASF) under one or more +:: contributor license agreements. See the NOTICE file distributed with +:: this work for additional information regarding copyright ownership. +:: The ASF licenses this file to You under the Apache License, Version 2.0 +:: (the "License"); you may not use this file except in compliance with +:: the License. You may obtain a copy of the License at +:: +:: http://www.apache.org/licenses/LICENSE-2.0 +:: +:: Unless required by applicable law or agreed to in writing, software +:: distributed under the License is distributed on an "AS IS" BASIS, +:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +:: See the License for the specific language governing permissions and +:: limitations under the License. +:: + +echo off + +echo. + +set "JVM_OPTS=-Xms6g -Xmx6g -XX:+AggressiveOpts -XX:MaxMetaspaceSize=256m" +set "JVM_OPTS=%JVM_OPTS% -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+UseTLAB -XX:NewSize=128m -XX:MaxNewSize=768m" +set "JVM_OPTS=%JVM_OPTS% -Xss16m" diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/recreate-cassandra-artifacts.bat b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/recreate-cassandra-artifacts.bat new file mode 100644 index 0000000000000..d538ea425e54e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/recreate-cassandra-artifacts.bat @@ -0,0 +1,41 @@ +:: +:: Licensed to the Apache Software Foundation (ASF) under one or more +:: contributor license agreements. See the NOTICE file distributed with +:: this work for additional information regarding copyright ownership. +:: The ASF licenses this file to You under the Apache License, Version 2.0 +:: (the "License"); you may not use this file except in compliance with +:: the License. You may obtain a copy of the License at +:: +:: http://www.apache.org/licenses/LICENSE-2.0 +:: +:: Unless required by applicable law or agreed to in writing, software +:: distributed under the License is distributed on an "AS IS" BASIS, +:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +:: See the License for the specific language governing permissions and +:: limitations under the License. +:: + +echo off + +echo. + +set TESTS_CLASSPATH="%~dp0\lib*;%~dp0settings" + +call %~dp0jvm-opts.bat %* + +call java %JVM_OPTS% -cp "%TESTS_CLASSPATH%" "org.apache.ignite.tests.LoadTestsCassandraArtifactsCreator" + +if %errorLevel% NEQ 0 ( + echo. + echo -------------------------------------------------------------------------------- + echo [ERROR] Failed to recreate Cassandra artifacts + echo -------------------------------------------------------------------------------- + echo. + exit /b %errorLevel% +) + +echo. +echo -------------------------------------------------------------------------------- +echo [INFO] Cassandra artifacts were successfully recreated +echo -------------------------------------------------------------------------------- +echo. diff --git a/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/recreate-cassandra-artifacts.sh b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/recreate-cassandra-artifacts.sh new file mode 100644 index 0000000000000..b0f99be218b8e --- /dev/null +++ b/ignite-extensions/modules/cassandra-ext/store/src/test/scripts/recreate-cassandra-artifacts.sh @@ -0,0 +1,39 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +TESTS_ROOT=$(readlink -m $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )) +TESTS_CLASSPATH="$TESTS_ROOT/lib/*:$TESTS_ROOT/settings" + +. $TESTS_ROOT/jvm-opt.sh $@ + +java $JVM_OPTS -cp "$TESTS_CLASSPATH" "org.apache.ignite.tests.LoadTestsCassandraArtifactsCreator" + +if [ $? -ne 0 ]; then + echo + echo "--------------------------------------------------------------------------------" + echo "[ERROR] Failed to recreate Cassandra artifacts" + echo "--------------------------------------------------------------------------------" + echo + exit 1 +fi + +echo +echo "--------------------------------------------------------------------------------" +echo "[INFO] Cassandra artifacts were successfully recreated" +echo "--------------------------------------------------------------------------------" +echo diff --git a/ignite-extensions/modules/elasticsearch-relay/pom.xml b/ignite-extensions/modules/elasticsearch-relay/pom.xml index 7a944764e0e13..d96121c53d758 100644 --- a/ignite-extensions/modules/elasticsearch-relay/pom.xml +++ b/ignite-extensions/modules/elasticsearch-relay/pom.xml @@ -4,8 +4,8 @@ org.apache.ignite - ignite-parent-internal - 2.16.999-SNAPSHOT + ignite-parent-ext-internal + 1 ../../parent-internal/pom.xml diff --git a/ignite-extensions/modules/ml-ext/examples/pom.xml b/ignite-extensions/modules/ml-ext/examples/pom.xml index 9035d08d2795a..9f5ce01a307b2 100644 --- a/ignite-extensions/modules/ml-ext/examples/pom.xml +++ b/ignite-extensions/modules/ml-ext/examples/pom.xml @@ -69,24 +69,12 @@ ${project.parent.version} - - org.apache.ignite - ignite-ml-h2o-model-parser-ext - ${project.parent.version} - - org.apache.ignite ignite-ml-catboost-model-parser-ext ${project.parent.version} - - - org.apache.ignite - ignite-ml-spark-model-parser-ext - ${project.parent.version} - - + org.jpmml pmml-model diff --git a/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVectorSet.java b/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVectorSet.java index 6ae058665dc86..3355ab325e6d7 100644 --- a/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVectorSet.java +++ b/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVectorSet.java @@ -234,7 +234,6 @@ public static Vector emptyVector(int size) { public LabeledVectorSet copy() { LabeledVectorSet res = new LabeledVectorSet<>(this.data, this.colSize); res.meta = this.meta; - return res; } diff --git a/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/DecisionTreeTrainer.java b/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/DecisionTreeTrainer.java index d567eaef5d245..0692ec62ac853 100644 --- a/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/DecisionTreeTrainer.java +++ b/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/DecisionTreeTrainer.java @@ -224,13 +224,13 @@ private SplitPoint calculateBestSplitPoint(StepFunction[] criterionFunctions) for (int col = 0; col < criterionFunctions.length; col++) { StepFunction criterionFunctionForCol = criterionFunctions[col]; - double[] args = criterionFunctionForCol.getX(); + double[] arguments = criterionFunctionForCol.getX(); T[] values = criterionFunctionForCol.getY(); for (int leftSize = 1; leftSize < values.length - 1; leftSize++) { if ((values[0].impurity() - values[leftSize].impurity()) > minImpurityDecrease && (res == null || values[leftSize].compareTo(res.val) < 0)) - res = new SplitPoint<>(values[leftSize], col, calculateThreshold(args, leftSize)); + res = new SplitPoint<>(values[leftSize], col, calculateThreshold(arguments, leftSize)); } } diff --git a/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/impurity/util/SimpleStepFunctionCompressor.java b/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/impurity/util/SimpleStepFunctionCompressor.java index 39fe986bbf6fc..2418571089ba6 100644 --- a/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/impurity/util/SimpleStepFunctionCompressor.java +++ b/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/impurity/util/SimpleStepFunctionCompressor.java @@ -63,14 +63,14 @@ public SimpleStepFunctionCompressor(int minSizeToBeCompressed, double minImpurit /** {@inheritDoc} */ @Override public StepFunction compress(StepFunction function) { - double[] args = function.getX(); + double[] arguments = function.getX(); T[] values = function.getY(); - if (args.length >= minSizeToBeCompressed) { + if (arguments.length >= minSizeToBeCompressed) { List points = new ArrayList<>(); - for (int i = 0; i < args.length; i++) - points.add(new StepFunctionPoint(args[i], values[i])); + for (int i = 0; i < arguments.length; i++) + points.add(new StepFunctionPoint(arguments[i], values[i])); points = compress(points); diff --git a/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/randomforest/RandomForestClassifierTrainer.java b/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/randomforest/RandomForestClassifierTrainer.java index 9679906c69d1d..ab8db2e563114 100644 --- a/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/randomforest/RandomForestClassifierTrainer.java +++ b/ignite-extensions/modules/ml-ext/ml/src/main/java/org/apache/ignite/ml/tree/randomforest/RandomForestClassifierTrainer.java @@ -90,8 +90,8 @@ public RandomForestClassifierTrainer(List meta) { return false; int i = 0; - for (Double lbl : uniqLabels) - lblMapping.put(lbl, i++); + for (Double label : uniqLabels) + lblMapping.put(label, i++); return super.init(dataset); } diff --git a/ignite-extensions/modules/ml-ext/ml/src/main/resources/META-INF/services/org.apache.ignite.plugin.PluginProvider b/ignite-extensions/modules/ml-ext/ml/src/main/resources/META-INF/services/org.apache.ignite.plugin.PluginProvider new file mode 100644 index 0000000000000..03cd54b214ad5 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/main/resources/META-INF/services/org.apache.ignite.plugin.PluginProvider @@ -0,0 +1 @@ +org.apache.ignite.ml.util.plugin.MLPluginProvider \ No newline at end of file diff --git a/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-dataset-template.html b/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-dataset-template.html new file mode 100644 index 0000000000000..03e9c894d7278 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-dataset-template.html @@ -0,0 +1,112 @@ + + + +IgniteML + + + + + diff --git a/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-matrix-template.html b/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-matrix-template.html new file mode 100644 index 0000000000000..055344b90c60f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-matrix-template.html @@ -0,0 +1,121 @@ + + + +IgniteML + + + + + \ No newline at end of file diff --git a/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-vector-template.html b/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-vector-template.html new file mode 100644 index 0000000000000..a615b7c5fceb8 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/main/resources/org/apache/ignite/ml/math/d3-vector-template.html @@ -0,0 +1,106 @@ + + + +IgniteML + + + + + \ No newline at end of file diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java new file mode 100644 index 0000000000000..b613ade79701d --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml; + +import org.apache.ignite.ml.clustering.ClusteringTestSuite; +import org.apache.ignite.ml.common.CommonTestSuite; +import org.apache.ignite.ml.composition.CompositionTestSuite; +import org.apache.ignite.ml.dataset.DatasetTestSuite; +import org.apache.ignite.ml.environment.EnvironmentTestSuite; +import org.apache.ignite.ml.inference.InferenceTestSuite; +import org.apache.ignite.ml.knn.KNNTestSuite; +import org.apache.ignite.ml.math.MathImplMainTestSuite; +import org.apache.ignite.ml.multiclass.MultiClassTestSuite; +import org.apache.ignite.ml.nn.MLPTestSuite; +import org.apache.ignite.ml.pipeline.PipelineTestSuite; +import org.apache.ignite.ml.preprocessing.PreprocessingTestSuite; +import org.apache.ignite.ml.recommendation.RecommendationTestSuite; +import org.apache.ignite.ml.regressions.RegressionsTestSuite; +import org.apache.ignite.ml.selection.SelectionTestSuite; +import org.apache.ignite.ml.structures.StructuresTestSuite; +import org.apache.ignite.ml.svm.SVMTestSuite; +import org.apache.ignite.ml.tree.DecisionTreeTestSuite; +import org.apache.ignite.ml.tree.randomforest.RandomForestTreeTestSuite; +import org.apache.ignite.ml.util.UtilTestSuite; +import org.apache.ignite.ml.util.generators.DataStreamGeneratorTestSuite; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all module tests. IMPL NOTE tests in {@code org.apache.ignite.ml.tree.performance} are not + * included here because these are intended only for manual execution. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + MathImplMainTestSuite.class, + RegressionsTestSuite.class, + SVMTestSuite.class, + ClusteringTestSuite.class, + KNNTestSuite.class, + PipelineTestSuite.class, + PreprocessingTestSuite.class, + CompositionTestSuite.class, + EnvironmentTestSuite.class, + StructuresTestSuite.class, + CommonTestSuite.class, + MultiClassTestSuite.class, + DataStreamGeneratorTestSuite.class, + UtilTestSuite.class, + RandomForestTreeTestSuite.class, + RecommendationTestSuite.class, + + /** JUnit 3 tests. */ + DecisionTreeTestSuite.class, + MLPTestSuite.class, + InferenceTestSuite.class, + DatasetTestSuite.class, + SelectionTestSuite.class +}) +public class IgniteMLTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/TestUtils.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/TestUtils.java new file mode 100644 index 0000000000000..f30000170f9d3 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/TestUtils.java @@ -0,0 +1,481 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml; + +import java.io.Serializable; +import java.util.stream.IntStream; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.preprocessing.Preprocessor; +import org.apache.ignite.ml.trainers.DatasetTrainer; +import org.junit.Assert; + +import static org.junit.Assert.assertTrue; + +/** */ +public class TestUtils { + /** + * Collection of static methods used in math unit tests. + */ + private TestUtils() { + } + + /** + * Verifies that expected and actual are within delta, or are both NaN or + * infinities of the same sign. + * + * @param exp Expected value. + * @param actual Actual value. + * @param delta Maximum allowed delta between {@code exp} and {@code actual}. + */ + public static void assertEquals(double exp, double actual, double delta) { + Assert.assertEquals(null, exp, actual, delta); + } + + /** + * Verifies that expected and actual are within delta, or are both NaN or + * infinities of the same sign. + */ + public static void assertEquals(String msg, double exp, double actual, double delta) { + // Check for NaN. + if (Double.isNaN(exp)) + Assert.assertTrue("" + actual + " is not NaN.", Double.isNaN(actual)); + else + Assert.assertEquals(msg, exp, actual, delta); + } + + /** + * Verifies that two double arrays have equal entries, up to tolerance. + */ + public static void assertEquals(double exp[], double observed[], double tolerance) { + assertEquals("Array comparison failure", exp, observed, tolerance); + } + + /** + * Asserts that all entries of the specified vectors are equal to within a + * positive {@code delta}. + * + * @param msg The identifying message for the assertion error (can be {@code null}). + * @param exp Expected value. + * @param actual Actual value. + * @param delta The maximum difference between the entries of the expected and actual vectors for which both entries + * are still considered equal. + */ + public static void assertEquals(final String msg, + final double[] exp, final Vector actual, final double delta) { + final String msgAndSep = msg.equals("") ? "" : msg + ", "; + + Assert.assertEquals(msgAndSep + "dimension", exp.length, actual.size()); + + for (int i = 0; i < exp.length; i++) + Assert.assertEquals(msgAndSep + "entry #" + i, exp[i], actual.getX(i), delta); + } + + /** + * Asserts that all entries of the specified vectors are equal to within a + * positive {@code delta}. + * + * @param msg The identifying message for the assertion error (can be {@code null}). + * @param exp Expected value. + * @param actual Actual value. + * @param delta The maximum difference between the entries of the expected and actual vectors for which both entries + * are still considered equal. + */ + public static void assertEquals(final String msg, + final Vector exp, final Vector actual, final double delta) { + final String msgAndSep = msg.equals("") ? "" : msg + ", "; + + Assert.assertEquals(msgAndSep + "dimension", exp.size(), actual.size()); + + final int dim = exp.size(); + for (int i = 0; i < dim; i++) + Assert.assertEquals(msgAndSep + "entry #" + i, exp.getX(i), actual.getX(i), delta); + } + + /** + * Verifies that two matrices are close (1-norm). + * + * @param msg The identifying message for the assertion error. + * @param exp Expected matrix. + * @param actual Actual matrix. + * @param tolerance Comparison tolerance value. + */ + public static void assertEquals(String msg, Matrix exp, Matrix actual, double tolerance) { + Assert.assertNotNull(msg + "\nObserved should not be null", actual); + + if (exp.columnSize() != actual.columnSize() || exp.rowSize() != actual.rowSize()) { + String msgBuff = msg + "\nObserved has incorrect dimensions." + + "\nobserved is " + actual.rowSize() + + " x " + actual.columnSize() + + "\nexpected " + exp.rowSize() + + " x " + exp.columnSize(); + + Assert.fail(msgBuff); + } + + Matrix delta = exp.minus(actual); + + if (maximumAbsoluteRowSum(delta) >= tolerance) { + String msgBuff = msg + "\nExpected: " + exp + + "\nObserved: " + actual + + "\nexpected - observed: " + delta; + + Assert.fail(msgBuff); + } + } + + /** + * Verifies that two matrices are equal. + * + * @param exp Expected matrix. + * @param actual Actual matrix. + */ + public static void assertEquals(Matrix exp, Matrix actual) { + Assert.assertNotNull("Observed should not be null", actual); + + if (exp.columnSize() != actual.columnSize() || exp.rowSize() != actual.rowSize()) { + String msgBuff = "Observed has incorrect dimensions." + + "\nobserved is " + actual.rowSize() + + " x " + actual.columnSize() + + "\nexpected " + exp.rowSize() + + " x " + exp.columnSize(); + + Assert.fail(msgBuff); + } + + for (int i = 0; i < exp.rowSize(); ++i) + for (int j = 0; j < exp.columnSize(); ++j) { + double eij = exp.getX(i, j); + double aij = actual.getX(i, j); + + // TODO: IGNITE-5824, Check precision here. + Assert.assertEquals(eij, aij, 0.0); + } + } + + /** + * Verifies that two vectors are equal. + * + * @param exp Expected vector. + * @param observed Actual vector. + */ + public static void assertEquals(Vector exp, Vector observed, double eps) { + Assert.assertNotNull("Observed should not be null", observed); + + if (exp.size() != observed.size()) { + String msgBuff = "Observed has incorrect dimensions." + + "\nobserved is " + observed.size() + + " x " + observed.size(); + + Assert.fail(msgBuff); + } + + for (int i = 0; i < exp.size(); ++i) { + double eij = exp.getX(i); + double aij = observed.getX(i); + + Assert.assertEquals(eij, aij, eps); + } + } + + /** + * Verifies that two double arrays are close (sup norm). + * + * @param msg The identifying message for the assertion error. + * @param exp Expected array. + * @param actual Actual array. + * @param tolerance Comparison tolerance value. + */ + public static void assertEquals(String msg, double[] exp, double[] actual, double tolerance) { + StringBuilder out = new StringBuilder(msg); + + if (exp.length != actual.length) { + out.append("\n Arrays not same length. \n"); + out.append("expected has length "); + out.append(exp.length); + out.append(" observed length = "); + out.append(actual.length); + Assert.fail(out.toString()); + } + + boolean failure = false; + + for (int i = 0; i < exp.length; i++) + if (!Precision.equalsIncludingNaN(exp[i], actual[i], tolerance)) { + failure = true; + out.append("\n Elements at index "); + out.append(i); + out.append(" differ. "); + out.append(" expected = "); + out.append(exp[i]); + out.append(" observed = "); + out.append(actual[i]); + } + + if (failure) + Assert.fail(out.toString()); + } + + /** */ + public static double maximumAbsoluteRowSum(Matrix mtx) { + return IntStream.range(0, mtx.rowSize()).mapToObj(mtx::viewRow).map(v -> Math.abs(v.sum())).reduce(Math::max).get(); + } + + /** */ + public static void checkIsInEpsilonNeighbourhood(Vector[] v1s, Vector[] v2s, double epsilon) { + for (int i = 0; i < v1s.length; i++) { + assertTrue("Not in epsilon neighbourhood (index " + i + ") ", + v1s[i].minus(v2s[i]).kNorm(2) < epsilon); + } + } + + /** */ + public static void checkIsInEpsilonNeighbourhood(Vector v1, Vector v2, double epsilon) { + checkIsInEpsilonNeighbourhood(new Vector[] {v1}, new Vector[] {v2}, epsilon); + } + + /** */ + public static boolean checkIsInEpsilonNeighbourhoodBoolean(Vector v1, Vector v2, double epsilon) { + try { + checkIsInEpsilonNeighbourhood(new Vector[] {v1}, new Vector[] {v2}, epsilon); + } + catch (Throwable e) { + return false; + } + + return true; + } + + /** */ + private static class Precision { + /** Offset to order signed double numbers lexicographically. */ + private static final long SGN_MASK = 0x8000000000000000L; + + /** Positive zero bits. */ + private static final long POSITIVE_ZERO_DOUBLE_BITS = Double.doubleToRawLongBits(+0.0); + + /** Negative zero bits. */ + private static final long NEGATIVE_ZERO_DOUBLE_BITS = Double.doubleToRawLongBits(-0.0); + + /** + * Returns true if the arguments are both NaN, are equal or are within the range + * of allowed error (inclusive). + * + * @param x first value + * @param y second value + * @param eps the amount of absolute error to allow. + * @return {@code true} if the values are equal or within range of each other, or both are NaN. + * @since 2.2 + */ + static boolean equalsIncludingNaN(double x, double y, double eps) { + return equalsIncludingNaN(x, y) || (Math.abs(y - x) <= eps); + } + + /** + * Returns true if the arguments are both NaN or they are + * equal as defined by {@link #equals(double, double, int) equals(x, y, 1)}. + * + * @param x first value + * @param y second value + * @return {@code true} if the values are equal or both are NaN. + * @since 2.2 + */ + private static boolean equalsIncludingNaN(double x, double y) { + return (x != x || y != y) ? !(x != x ^ y != y) : equals(x, y, 1); + } + + /** + * Returns true if the arguments are equal or within the range of allowed + * error (inclusive). + *

+ * Two float numbers are considered equal if there are {@code (maxUlps - 1)} + * (or fewer) floating point numbers between them, i.e. two adjacent + * floating point numbers are considered equal. + *

+ *

+ * Adapted from + * Bruce Dawson. Returns {@code false} if either of the arguments is NaN. + *

+ * + * @param x first value + * @param y second value + * @param maxUlps {@code (maxUlps - 1)} is the number of floating point values between {@code x} and {@code y}. + * @return {@code true} if there are fewer than {@code maxUlps} floating point values between {@code x} and {@code + * y}. + */ + private static boolean equals(final double x, final double y, final int maxUlps) { + + final long xInt = Double.doubleToRawLongBits(x); + final long yInt = Double.doubleToRawLongBits(y); + + final boolean isEqual; + if (((xInt ^ yInt) & SGN_MASK) == 0L) { + // number have same sign, there is no risk of overflow + isEqual = Math.abs(xInt - yInt) <= maxUlps; + } + else { + // number have opposite signs, take care of overflow + final long deltaPlus; + final long deltaMinus; + if (xInt < yInt) { + deltaPlus = yInt - POSITIVE_ZERO_DOUBLE_BITS; + deltaMinus = xInt - NEGATIVE_ZERO_DOUBLE_BITS; + } + else { + deltaPlus = xInt - POSITIVE_ZERO_DOUBLE_BITS; + deltaMinus = yInt - NEGATIVE_ZERO_DOUBLE_BITS; + } + + if (deltaPlus > maxUlps) + isEqual = false; + else + isEqual = deltaMinus <= (maxUlps - deltaPlus); + + } + + return isEqual && !Double.isNaN(x) && !Double.isNaN(y); + + } + } + + /** + * Gets test learning environment builder. + * + * @return Test learning environment builder. + */ + public static LearningEnvironmentBuilder testEnvBuilder() { + return testEnvBuilder(123L); + } + + /** + * Gets test learning environment builder with a given seed. + * + * @param seed Seed. + * @return Test learning environment builder. + */ + public static LearningEnvironmentBuilder testEnvBuilder(long seed) { + return LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(seed); + } + + /** + * Simple wrapper class which adds {@link AutoCloseable} to given type. + * + * @param Type to wrap. + */ + public static class DataWrapper implements AutoCloseable { + /** + * Value to wrap. + */ + T val; + + /** + * Wrap given value in {@link AutoCloseable}. + * + * @param val Value to wrap. + * @param Type of value to wrap. + * @return Value wrapped as {@link AutoCloseable}. + */ + public static DataWrapper of(T val) { + return new DataWrapper<>(val); + } + + /** + * Construct instance of this class from given value. + * + * @param val Value to wrap. + */ + public DataWrapper(T val) { + this.val = val; + } + + /** + * Get wrapped value. + * + * @return Wrapped value. + */ + public T val() { + return val; + } + + /** {@inheritDoc} */ + @Override public void close() throws Exception { + if (val instanceof AutoCloseable) + ((AutoCloseable)val).close(); + } + } + + /** + * Return model which returns given constant. + * + * @param v Constant value. + * @param Type of input. + * @param Type of output. + * @return Model which returns given constant. + */ + public static IgniteModel constantModel(V v) { + return t -> v; + } + + /** + * Returns trainer which independently of dataset outputs given model. + * + * @param ml Model. + * @param Type of model input. + * @param Type of model output. + * @param Type of model. + * @param Type of dataset labels. + * @return Trainer which independently of dataset outputs given model. + */ + public static , L> DatasetTrainer constantTrainer(M ml) { + return new DatasetTrainer() { + /** */ + public M fit(DatasetBuilder datasetBuilder, + Vectorizer extractor) { + return ml; + } + + /** {@inheritDoc} */ + @Override public M fitWithInitializedDeployingContext( + DatasetBuilder datasetBuilder, + Preprocessor preprocessor + ) { + return null; + } + + /** {@inheritDoc} */ + @Override public boolean isUpdateable(M mdl) { + return true; + } + + @Override protected M updateModel(M mdl, DatasetBuilder datasetBuilder, + Preprocessor preprocessor) { + return null; + } + + /** */ + public M updateModel(M mdl, DatasetBuilder datasetBuilder, + Vectorizer extractor) { + return ml; + } + }; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java new file mode 100644 index 0000000000000..d22198caa3c26 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering; + +import org.apache.ignite.ml.clustering.gmm.CovarianceMatricesAggregatorTest; +import org.apache.ignite.ml.clustering.gmm.GmmModelTest; +import org.apache.ignite.ml.clustering.gmm.GmmPartitionDataTest; +import org.apache.ignite.ml.clustering.gmm.GmmTrainerIntegrationTest; +import org.apache.ignite.ml.clustering.gmm.GmmTrainerTest; +import org.apache.ignite.ml.clustering.gmm.MeanWithClusterProbAggregatorTest; +import org.apache.ignite.ml.clustering.gmm.NewComponentStatisticsAggregatorTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.clustering package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + //k-means tests + KMeansTrainerTest.class, + KMeansModelTest.class, + + //GMM tests + CovarianceMatricesAggregatorTest.class, + GmmModelTest.class, + GmmPartitionDataTest.class, + MeanWithClusterProbAggregatorTest.class, + GmmTrainerTest.class, + GmmTrainerIntegrationTest.class, + NewComponentStatisticsAggregatorTest.class +}) +public class ClusteringTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansModelTest.java new file mode 100644 index 0000000000000..5c7f8dad79905 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansModelTest.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering; + +import org.apache.ignite.ml.clustering.kmeans.KMeansModel; +import org.apache.ignite.ml.math.distances.DistanceMeasure; +import org.apache.ignite.ml.math.distances.EuclideanDistance; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link KMeansModel}. + */ +public class KMeansModelTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-6; + + /** */ + @Test + public void predictClusters() { + DistanceMeasure distanceMeasure = new EuclideanDistance(); + + Vector[] centers = new DenseVector[4]; + + centers[0] = new DenseVector(new double[]{1.0, 1.0}); + centers[1] = new DenseVector(new double[]{-1.0, 1.0}); + centers[2] = new DenseVector(new double[]{1.0, -1.0}); + centers[3] = new DenseVector(new double[]{-1.0, -1.0}); + + KMeansModel mdl = new KMeansModel(centers, distanceMeasure); + + Assert.assertTrue(mdl.toString().contains("KMeansModel")); + + Assert.assertEquals(mdl.predict(new DenseVector(new double[]{1.1, 1.1})), 0.0, PRECISION); + Assert.assertEquals(mdl.predict(new DenseVector(new double[]{-1.1, 1.1})), 1.0, PRECISION); + Assert.assertEquals(mdl.predict(new DenseVector(new double[]{1.1, -1.1})), 2.0, PRECISION); + Assert.assertEquals(mdl.predict(new DenseVector(new double[]{-1.1, -1.1})), 3.0, PRECISION); + + Assert.assertEquals(mdl.distanceMeasure(), distanceMeasure); + Assert.assertEquals(mdl.amountOfClusters(), 4); + Assert.assertArrayEquals(mdl.centers(), centers); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java new file mode 100644 index 0000000000000..43a37c59777fb --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.clustering.kmeans.KMeansModel; +import org.apache.ignite.ml.clustering.kmeans.KMeansTrainer; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.distances.EuclideanDistance; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.jetbrains.annotations.NotNull; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link KMeansTrainer}. + */ +public class KMeansTrainerTest extends TrainerTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-2; + + /** Data. */ + private static final Map data = new HashMap<>(); + + static { + data.put(0, new double[] {1.0, 1.0, 1.0}); + data.put(1, new double[] {1.0, 2.0, 1.0}); + data.put(2, new double[] {2.0, 1.0, 1.0}); + data.put(3, new double[] {-1.0, -1.0, 2.0}); + data.put(4, new double[] {-1.0, -2.0, 2.0}); + data.put(5, new double[] {-2.0, -1.0, 2.0}); + } + + /** + * A few points, one cluster, one iteration + */ + @Test + public void findOneClusters() { + KMeansTrainer trainer = createAndCheckTrainer(); + KMeansModel knnMdl = trainer.withAmountOfClusters(1).fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Vector firstVector = new DenseVector(new double[] {2.0, 2.0}); + assertEquals(knnMdl.predict(firstVector), 0.0, PRECISION); + Vector secondVector = new DenseVector(new double[] {-2.0, -2.0}); + assertEquals(knnMdl.predict(secondVector), 0.0, PRECISION); + assertEquals(trainer.getMaxIterations(), 1); + assertEquals(trainer.getEpsilon(), PRECISION, PRECISION); + } + + /** */ + @Test + public void testUpdateMdl() { + KMeansTrainer trainer = createAndCheckTrainer(); + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST); + KMeansModel originalMdl = trainer.withAmountOfClusters(1).fit( + new LocalDatasetBuilder<>(data, parts), + vectorizer + ); + KMeansModel updatedMdlOnSameDataset = trainer.update( + originalMdl, + new LocalDatasetBuilder<>(data, parts), + vectorizer + ); + KMeansModel updatedMdlOnEmptyDataset = trainer.update( + originalMdl, + new LocalDatasetBuilder<>(new HashMap<>(), parts), + vectorizer + ); + + Vector firstVector = new DenseVector(new double[] {2.0, 2.0}); + Vector secondVector = new DenseVector(new double[] {-2.0, -2.0}); + assertEquals(originalMdl.predict(firstVector), updatedMdlOnSameDataset.predict(firstVector), PRECISION); + assertEquals(originalMdl.predict(secondVector), updatedMdlOnSameDataset.predict(secondVector), PRECISION); + assertEquals(originalMdl.predict(firstVector), updatedMdlOnEmptyDataset.predict(firstVector), PRECISION); + assertEquals(originalMdl.predict(secondVector), updatedMdlOnEmptyDataset.predict(secondVector), PRECISION); + } + + /** */ + @NotNull private KMeansTrainer createAndCheckTrainer() { + KMeansTrainer trainer = new KMeansTrainer() + .withDistance(new EuclideanDistance()) + .withAmountOfClusters(10) + .withMaxIterations(1) + .withEpsilon(PRECISION); + assertEquals(10, trainer.getAmountOfClusters()); + assertTrue(trainer.getDistance() instanceof EuclideanDistance); + return trainer; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/CovarianceMatricesAggregatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/CovarianceMatricesAggregatorTest.java new file mode 100644 index 0000000000000..b9753b51eecf7 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/CovarianceMatricesAggregatorTest.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering.gmm; + +import java.util.Arrays; +import java.util.List; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link CovarianceMatricesAggregator}. + */ +public class CovarianceMatricesAggregatorTest { + /** */ + @Test + public void testAdd() { + CovarianceMatricesAggregator agg = new CovarianceMatricesAggregator(VectorUtils.of(1., 0.)); + assertEquals(0, agg.rowCount()); + + agg.add(VectorUtils.of(1., 0.), 100.); + assertArrayEquals(VectorUtils.of(1., 0.).asArray(), agg.mean().asArray(), 1e-4); + assertArrayEquals( + agg.weightedSum().getStorage().data(), + fromArray(2, 0., 0., 0., 0.).getStorage().data(), + 1e-4 + ); + assertEquals(1, agg.rowCount()); + + agg.add(VectorUtils.of(0., 1.), 10.); + assertArrayEquals(VectorUtils.of(1., 0.).asArray(), agg.mean().asArray(), 1e-4); + assertArrayEquals( + agg.weightedSum().getStorage().data(), + fromArray(2, 10., -10., -10., 10.).getStorage().data(), + 1e-4 + ); + assertEquals(2, agg.rowCount()); + } + + /** */ + @Test + public void testPlus() { + Vector mean = VectorUtils.of(1, 0); + + CovarianceMatricesAggregator agg1 = new CovarianceMatricesAggregator(mean, identity(2), 1); + CovarianceMatricesAggregator agg2 = new CovarianceMatricesAggregator(mean, identity(2).times(2), 3); + CovarianceMatricesAggregator res = agg1.plus(agg2); + + assertArrayEquals(mean.asArray(), res.mean().asArray(), 1e-4); + assertArrayEquals(identity(2).times(3).getStorage().data(), res.weightedSum().getStorage().data(), 1e-4); + assertEquals(4, res.rowCount()); + } + + /** */ + @Test + public void testReduce() { + Vector mean1 = VectorUtils.of(1, 0); + Vector mean2 = VectorUtils.of(0, 1); + + CovarianceMatricesAggregator agg11 = new CovarianceMatricesAggregator(mean1, identity(2), 1); + CovarianceMatricesAggregator agg12 = new CovarianceMatricesAggregator(mean1, identity(2), 1); + + CovarianceMatricesAggregator agg21 = new CovarianceMatricesAggregator(mean2, identity(2), 2); + CovarianceMatricesAggregator agg22 = new CovarianceMatricesAggregator(mean2, identity(2), 2); + + List result = CovarianceMatricesAggregator.reduce( + Arrays.asList(agg11, agg21), + Arrays.asList(agg12, agg22) + ); + + assertEquals(2, result.size()); + CovarianceMatricesAggregator res1 = result.get(0); + CovarianceMatricesAggregator res2 = result.get(1); + + assertArrayEquals(mean1.asArray(), res1.mean().asArray(), 1e-4); + assertArrayEquals(identity(2).times(2).getStorage().data(), res1.weightedSum().getStorage().data(), 1e-4); + assertEquals(2, res1.rowCount()); + + assertArrayEquals(mean2.asArray(), res2.mean().asArray(), 1e-4); + assertArrayEquals(identity(2).times(2).getStorage().data(), res2.weightedSum().getStorage().data(), 1e-4); + assertEquals(4, res2.rowCount()); + } + + /** */ + @Test + public void testMap() { + List> xs = Arrays.asList( + new LabeledVector<>(VectorUtils.of(1, 0), 0.), + new LabeledVector<>(VectorUtils.of(0, 1), 0.), + new LabeledVector<>(VectorUtils.of(1, 1), 0.) + ); + + double[][] pcxi = new double[][] { + new double[] {0.1, 0.2}, + new double[] {0.4, 0.3}, + new double[] {0.5, 0.6} + }; + + GmmPartitionData data = new GmmPartitionData(xs, pcxi); + Vector mean1 = VectorUtils.of(1, 1); + Vector mean2 = VectorUtils.of(0, 1); + List result = CovarianceMatricesAggregator.map(data, new Vector[] {mean1, mean2}); + + assertEquals(pcxi[0].length, result.size()); + + CovarianceMatricesAggregator res1 = result.get(0); + assertArrayEquals(mean1.asArray(), res1.mean().asArray(), 1e-4); + assertArrayEquals( + res1.weightedSum().getStorage().data(), + fromArray(2, 0.4, 0., 0., 0.1).getStorage().data(), + 1e-4 + ); + assertEquals(3, res1.rowCount()); + + CovarianceMatricesAggregator res2 = result.get(1); + assertArrayEquals(mean2.asArray(), res2.mean().asArray(), 1e-4); + assertArrayEquals( + res2.weightedSum().getStorage().data(), + fromArray(2, 0.8, -0.2, -0.2, 0.2).getStorage().data(), + 1e-4 + ); + assertEquals(3, res2.rowCount()); + } + + /** */ + private Matrix identity(int n) { + DenseMatrix matrix = new DenseMatrix(n, n); + for (int i = 0; i < n; i++) + matrix.set(i, i, 1.); + return matrix; + } + + /** */ + private Matrix fromArray(int n, double... values) { + assertTrue(n == values.length / n); + + return new DenseMatrix(values, n); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmModelTest.java new file mode 100644 index 0000000000000..0c90738530dda --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmModelTest.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering.gmm; + +import java.util.Arrays; +import java.util.Collections; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.stat.MultivariateGaussianDistribution; +import org.apache.ignite.ml.math.util.MatrixUtil; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link GmmModelTest}. + */ +public class GmmModelTest { + /** */ + @Test + public void testTrivialCasesWithOneComponent() { + Vector mean = VectorUtils.of(1., 2.); + DenseMatrix covariance = MatrixUtil.fromList(Arrays.asList( + VectorUtils.of(1, -0.5), + VectorUtils.of(-0.5, 1)), + true + ); + + GmmModel gmm = new GmmModel( + VectorUtils.of(1.0), + Collections.singletonList(new MultivariateGaussianDistribution(mean, covariance)) + ); + + Assert.assertEquals(2, gmm.dimension()); + Assert.assertEquals(1, gmm.countOfComponents()); + Assert.assertEquals(VectorUtils.of(1.), gmm.componentsProbs()); + Assert.assertEquals(0., gmm.predict(mean), 0.01); + Assert.assertEquals(1, gmm.likelihood(mean).size()); + Assert.assertEquals(0.183, gmm.likelihood(mean).get(0), 0.01); + Assert.assertEquals(0.183, gmm.prob(mean), 0.01); + } + + /** */ + @Test + public void testTwoComponents() { + Vector mean1 = VectorUtils.of(1., 2.); + DenseMatrix covariance1 = MatrixUtil.fromList(Arrays.asList( + VectorUtils.of(1, -0.25), + VectorUtils.of(-0.25, 1)), + true + ); + + Vector mean2 = VectorUtils.of(2., 1.); + DenseMatrix covariance2 = MatrixUtil.fromList(Arrays.asList( + VectorUtils.of(1, 0.5), + VectorUtils.of(0.5, 1)), + true + ); + + GmmModel gmm = new GmmModel( + VectorUtils.of(0.5, 0.5), + Arrays.asList( + new MultivariateGaussianDistribution(mean1, covariance1), + new MultivariateGaussianDistribution(mean2, covariance2) + ) + ); + + Assert.assertEquals(0., gmm.predict(mean1), 0.01); + Assert.assertEquals(1., gmm.predict(mean2), 0.01); + Assert.assertEquals(0., gmm.predict(VectorUtils.of(1.5, 1.5)), 0.01); + Assert.assertEquals(1., gmm.predict(VectorUtils.of(3., 0.)), 0.01); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmPartitionDataTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmPartitionDataTest.java new file mode 100644 index 0000000000000..cd84d68a6ca12 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmPartitionDataTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering.gmm; + +import java.util.Arrays; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.stat.MultivariateGaussianDistribution; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link GmmPartitionDataTest}. + */ +public class GmmPartitionDataTest { + /** Data. */ + private GmmPartitionData data; + + /** */ + @Before + public void setUp() throws Exception { + data = new GmmPartitionData( + Arrays.asList( + new LabeledVector<>(VectorUtils.of(1, 0), 0.), + new LabeledVector<>(VectorUtils.of(0, 1), 0.), + new LabeledVector<>(VectorUtils.of(1, 1), 0.) + ), + new double[3][2] + ); + } + + /** */ + @Test + public void testEstimateLikelihoodClusters() { + GmmPartitionData.estimateLikelihoodClusters(data, new Vector[] { + VectorUtils.of(1.0, 0.5), + VectorUtils.of(0.0, 0.5) + }); + + assertEquals(1.0, data.pcxi(0, 0), 1e-4); + assertEquals(0.0, data.pcxi(1, 0), 1e-4); + + assertEquals(0.0, data.pcxi(0, 1), 1e-4); + assertEquals(1.0, data.pcxi(1, 1), 1e-4); + + assertEquals(1.0, data.pcxi(0, 2), 1e-4); + assertEquals(0.0, data.pcxi(1, 2), 1e-4); + } + + /** */ + @Test + public void testUpdatePcxi() { + GmmPartitionData.updatePcxi( + data, + VectorUtils.of(0.3, 0.7), + Arrays.asList( + new MultivariateGaussianDistribution(VectorUtils.of(1.0, 0.5), new DenseMatrix(new double[] {0.5, 0., 0., 1.}, 2)), + new MultivariateGaussianDistribution(VectorUtils.of(0.0, 0.5), new DenseMatrix(new double[] {1.0, 0., 0., 1.}, 2)) + ) + ); + + assertEquals(0.49, data.pcxi(0, 0), 1e-2); + assertEquals(0.50, data.pcxi(1, 0), 1e-2); + + assertEquals(0.18, data.pcxi(0, 1), 1e-2); + assertEquals(0.81, data.pcxi(1, 1), 1e-2); + + assertEquals(0.49, data.pcxi(0, 2), 1e-2); + assertEquals(0.50, data.pcxi(1, 2), 1e-2); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmTrainerIntegrationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmTrainerIntegrationTest.java new file mode 100644 index 0000000000000..0c4bd9b3ae7e9 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmTrainerIntegrationTest.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering.gmm; + +import java.util.Arrays; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.cache.CacheBasedDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Assert; +import org.junit.Test; + +/** + * Integration test for GmmTrainer. + */ +public class GmmTrainerIntegrationTest extends GridCommonAbstractTest { + /** Number of nodes in grid */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** {@inheritDoc} */ + @Override protected void afterTestsStopped() { + stopAllGrids(); + } + + /** + * {@inheritDoc} + */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** */ + @Test + public void testFit() { + CacheConfiguration trainingSetCacheCfg = new CacheConfiguration<>(); + trainingSetCacheCfg.setAffinity(new RendezvousAffinityFunction(false, 3)); + trainingSetCacheCfg.setName("TRAINING_SET"); + + IgniteCache data = ignite.createCache(trainingSetCacheCfg); + data.put(0, new double[] {1.0, 1.0, 1.0}); + data.put(1, new double[] {1.0, 2.0, 1.0}); + data.put(2, new double[] {2.0, 1.0, 1.0}); + data.put(3, new double[] {-1.0, -1.0, 2.0}); + data.put(4, new double[] {-1.0, -2.0, 2.0}); + data.put(5, new double[] {-2.0, -1.0, 2.0}); + + GmmTrainer trainer = new GmmTrainer(2, 1) + .withInitialMeans(Arrays.asList( + VectorUtils.of(1.0, 2.0), + VectorUtils.of(-1.0, -2.0))); + GmmModel model = trainer.fit( + new CacheBasedDatasetBuilder<>(ignite, data), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertEquals(2, model.countOfComponents()); + Assert.assertEquals(2, model.dimension()); + Assert.assertArrayEquals(new double[] {1.33, 1.33}, model.distributions().get(0).mean().asArray(), 1e-2); + Assert.assertArrayEquals(new double[] {-1.33, -1.33}, model.distributions().get(1).mean().asArray(), 1e-2); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmTrainerTest.java new file mode 100644 index 0000000000000..529e0ca712a84 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/GmmTrainerTest.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering.gmm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for GMM trainer. + */ +public class GmmTrainerTest extends TrainerTest { + /** Data. */ + private static final Map data = new HashMap<>(); + + static { + data.put(0, new double[] {1.0, 1.0, 1.0}); + data.put(1, new double[] {1.0, 2.0, 1.0}); + data.put(2, new double[] {2.0, 1.0, 1.0}); + data.put(3, new double[] {-1.0, -1.0, 2.0}); + data.put(4, new double[] {-1.0, -2.0, 2.0}); + data.put(5, new double[] {-2.0, -1.0, 2.0}); + } + + /** */ + @Test + public void testFit() { + GmmTrainer trainer = new GmmTrainer(2, 1) + .withInitialMeans(Arrays.asList( + VectorUtils.of(1.0, 2.0), + VectorUtils.of(-1.0, -2.0))); + + GmmModel mdl = trainer.fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertEquals(2, mdl.countOfComponents()); + Assert.assertEquals(2, mdl.dimension()); + Assert.assertArrayEquals(new double[] {1.33, 1.33}, mdl.distributions().get(0).mean().asArray(), 1e-2); + Assert.assertArrayEquals(new double[] {-1.33, -1.33}, mdl.distributions().get(1).mean().asArray(), 1e-2); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testOnEmptyPartition() throws Throwable { + GmmTrainer trainer = new GmmTrainer(2, 1) + .withInitialMeans(Arrays.asList(VectorUtils.of(1.0, 2.0), VectorUtils.of(-1.0, -2.0))); + + try { + trainer.fit( + new LocalDatasetBuilder<>(new HashMap<>(), parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + } + catch (RuntimeException e) { + throw e.getCause(); + } + } + + /** */ + @Test + public void testUpdateOnEmptyDataset() { + GmmTrainer trainer = new GmmTrainer(2, 1) + .withInitialMeans(Arrays.asList( + VectorUtils.of(1.0, 2.0), + VectorUtils.of(-1.0, -2.0))); + + GmmModel mdl = trainer.fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + mdl = trainer.updateModel(mdl, + new LocalDatasetBuilder<>(new HashMap<>(), parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertEquals(2, mdl.countOfComponents()); + Assert.assertEquals(2, mdl.dimension()); + Assert.assertArrayEquals(new double[] {1.33, 1.33}, mdl.distributions().get(0).mean().asArray(), 1e-2); + Assert.assertArrayEquals(new double[] {-1.33, -1.33}, mdl.distributions().get(1).mean().asArray(), 1e-2); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/MeanWithClusterProbAggregatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/MeanWithClusterProbAggregatorTest.java new file mode 100644 index 0000000000000..7a3043dc88e52 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/MeanWithClusterProbAggregatorTest.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering.gmm; + +import java.util.Arrays; +import java.util.List; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link MeanWithClusterProbAggregator}. + */ +public class MeanWithClusterProbAggregatorTest { + /** */ + private MeanWithClusterProbAggregator aggregator1 = new MeanWithClusterProbAggregator(); + + /** */ + private MeanWithClusterProbAggregator aggregator2 = new MeanWithClusterProbAggregator(); + + /** + * Default constructor. + */ + public MeanWithClusterProbAggregatorTest() { + aggregator1.add(VectorUtils.of(1., 1.), 0.5); + aggregator1.add(VectorUtils.of(0., 1.), 0.25); + aggregator1.add(VectorUtils.of(1., 0.), 0.75); + aggregator1.add(VectorUtils.of(0., 0.), 0.10); + + aggregator2.add(VectorUtils.of(1., 1.), 1.0); + aggregator2.add(VectorUtils.of(0., 1.), 1.0); + aggregator2.add(VectorUtils.of(1., 0.), 1.0); + aggregator2.add(VectorUtils.of(0., 0.), 1.0); + } + + /** */ + @Test + public void testAdd() { + assertArrayEquals(new double[] {0.781, 0.468}, aggregator1.mean().asArray(), 1e-2); + assertArrayEquals(new double[] {0.5, 0.5}, aggregator2.mean().asArray(), 1e-2); + + assertEquals(0.4, aggregator1.clusterProb(), 1e-4); + assertEquals(1.0, aggregator2.clusterProb(), 1e-4); + } + + /** */ + @Test + public void testPlus() { + MeanWithClusterProbAggregator res = aggregator1.plus(aggregator2); + + assertEquals(0.7, res.clusterProb(), 1e-4); + assertArrayEquals(new double[] {0.580, 0.491}, res.mean().asArray(), 1e-2); + } + + /** */ + @Test + public void testReduce() { + MeanWithClusterProbAggregator aggregator3 = new MeanWithClusterProbAggregator(); + MeanWithClusterProbAggregator aggregator4 = new MeanWithClusterProbAggregator(); + + aggregator3.add(VectorUtils.of(1., 1.), 0.5); + aggregator3.add(VectorUtils.of(0., 1.), 0.25); + aggregator3.add(VectorUtils.of(1., 0.), 0.25); + aggregator3.add(VectorUtils.of(0., 0.), 0.5); + + aggregator4.add(VectorUtils.of(1., 1.), 1.0); + aggregator4.add(VectorUtils.of(0., 1.), 1.0); + aggregator4.add(VectorUtils.of(1., 0.), 1.0); + aggregator4.add(VectorUtils.of(0., 0.), 1.0); + + List res = MeanWithClusterProbAggregator.reduce( + Arrays.asList(aggregator1, aggregator3), + Arrays.asList(aggregator2, aggregator4) + ); + + MeanWithClusterProbAggregator res1 = res.get(0); + assertEquals(0.70, res1.clusterProb(), 1e-2); + assertArrayEquals(new double[] {0.580, 0.491}, res1.mean().asArray(), 1e-2); + + MeanWithClusterProbAggregator res2 = res.get(1); + assertEquals(0.68, res2.clusterProb(), 1e-2); + assertArrayEquals(new double[] {0.50, 0.50}, res2.mean().asArray(), 1e-2); + } + + /** */ + @Test + public void testMap() { + GmmPartitionData data = new GmmPartitionData( + Arrays.asList( + new LabeledVector<>(VectorUtils.of(1, 0), 0.), + new LabeledVector<>(VectorUtils.of(0, 1), 0.), + new LabeledVector<>(VectorUtils.of(1, 1), 0.) + ), + + new double[][] { + new double[] {0.5, 0.1}, + new double[] {1.0, 0.4}, + new double[] {0.3, 0.2} + } + ); + + List res = MeanWithClusterProbAggregator.map(data, 2); + assertEquals(2, res.size()); + + MeanWithClusterProbAggregator agg1 = res.get(0); + assertEquals(0.6, agg1.clusterProb(), 1e-2); + assertArrayEquals(new double[] {0.44, 0.72}, agg1.mean().asArray(), 1e-2); + + MeanWithClusterProbAggregator agg2 = res.get(1); + assertEquals(0.23, agg2.clusterProb(), 1e-2); + assertArrayEquals(new double[] {0.42, 0.85}, agg2.mean().asArray(), 1e-2); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/NewComponentStatisticsAggregatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/NewComponentStatisticsAggregatorTest.java new file mode 100644 index 0000000000000..83a4f17a19806 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/clustering/gmm/NewComponentStatisticsAggregatorTest.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.clustering.gmm; + +import java.util.Arrays; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Before; +import org.junit.Test; + +import static org.apache.ignite.ml.clustering.gmm.NewComponentStatisticsAggregator.computeNewMeanMap; +import static org.apache.ignite.ml.clustering.gmm.NewComponentStatisticsAggregator.computeNewMeanReduce; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * Tests for {@link NewComponentStatisticsAggregator} class. + */ +public class NewComponentStatisticsAggregatorTest { + /** */ + GmmPartitionData data1 = new GmmPartitionData( + Arrays.asList( + vec(1, 0), + vec(0, 1), + vec(3, 7) + ), + new double[3][] + ); + + /** */ + GmmPartitionData data2 = new GmmPartitionData( + Arrays.asList( + vec(3, 1), + vec(1, 4), + vec(1, 3) + ), + new double[3][] + ); + + /** */ + GmmModel model; + + /** */ + @Before + public void before() { + model = mock(GmmModel.class); + when(model.prob(data1.getX(0))).thenReturn(0.1); + when(model.prob(data1.getX(1))).thenReturn(0.4); + when(model.prob(data1.getX(2))).thenReturn(0.9); + + when(model.prob(data2.getX(0))).thenReturn(0.2); + when(model.prob(data2.getX(1))).thenReturn(0.6); + when(model.prob(data2.getX(2))).thenReturn(0.1); + } + + /** */ + @Test + public void testAdd() { + NewComponentStatisticsAggregator agg = new NewComponentStatisticsAggregator(); + int rowCount = 10; + for (int i = 0; i < rowCount; i++) + agg.add(VectorUtils.of(0, 1, 2), i % 2 == 0); + + assertEquals(rowCount / 2, agg.rowCountForNewCluster()); + assertEquals(rowCount, agg.totalRowCount()); + assertArrayEquals(new double[] {0, 1, 2}, agg.mean().asArray(), 1e-4); + } + + /** */ + @Test + public void testPlus() { + NewComponentStatisticsAggregator agg1 = new NewComponentStatisticsAggregator(); + NewComponentStatisticsAggregator agg2 = new NewComponentStatisticsAggregator(); + int rowCount = 10; + for (int i = 0; i < rowCount; i++) + agg1.add(VectorUtils.of(0, 1, 2), i % 2 == 0); + + for (int i = 0; i < rowCount; i++) + agg2.add(VectorUtils.of(2, 1, 0), i % 2 == 1); + + NewComponentStatisticsAggregator sum = agg1.plus(agg2); + assertEquals(rowCount, sum.rowCountForNewCluster()); + assertEquals(rowCount * 2, sum.totalRowCount()); + assertArrayEquals(new double[] {1, 1, 1}, sum.mean().asArray(), 1e-4); + } + + /** */ + @Test + public void testMap() { + NewComponentStatisticsAggregator agg = computeNewMeanMap(data1, 1.0, 2, model); + + assertEquals(2, agg.rowCountForNewCluster()); + assertEquals(data1.size(), agg.totalRowCount()); + assertArrayEquals(new double[] {0.5, 0.5}, agg.mean().asArray(), 1e-4); + } + + /** */ + @Test + public void testReduce() { + double maxXsProb = 1.0; + int maxProbDivergence = 2; + NewComponentStatisticsAggregator agg1 = computeNewMeanMap(data1, maxXsProb, maxProbDivergence, model); + NewComponentStatisticsAggregator agg2 = computeNewMeanMap(data2, maxXsProb, maxProbDivergence, model); + + NewComponentStatisticsAggregator res = computeNewMeanReduce(agg1, null); + assertEquals(agg1.rowCountForNewCluster(), res.rowCountForNewCluster()); + assertEquals(agg1.totalRowCount(), res.totalRowCount()); + assertArrayEquals(agg1.mean().asArray(), res.mean().asArray(), 1e-4); + + res = computeNewMeanReduce(null, agg1); + assertEquals(agg1.rowCountForNewCluster(), res.rowCountForNewCluster()); + assertEquals(agg1.totalRowCount(), res.totalRowCount()); + assertArrayEquals(agg1.mean().asArray(), res.mean().asArray(), 1e-4); + + res = computeNewMeanReduce(agg2, agg1); + assertEquals(4, res.rowCountForNewCluster()); + assertEquals(6, res.totalRowCount()); + assertArrayEquals(new double[] {1.25, 1.25}, res.mean().asArray(), 1e-4); + + res = computeNewMeanReduce(agg1, agg2); + assertEquals(4, res.rowCountForNewCluster()); + assertEquals(6, res.totalRowCount()); + assertArrayEquals(new double[] {1.25, 1.25}, res.mean().asArray(), 1e-4); + } + + /** */ + private LabeledVector vec(double... values) { + return new LabeledVector<>(VectorUtils.of(values), 1.0); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/CollectionsTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/CollectionsTest.java new file mode 100644 index 0000000000000..5e9db3a0be9a4 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/CollectionsTest.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.common; + +import java.util.HashSet; +import java.util.Set; +import org.apache.ignite.ml.clustering.kmeans.KMeansModel; +import org.apache.ignite.ml.clustering.kmeans.KMeansModelFormat; +import org.apache.ignite.ml.knn.ann.ANNClassificationModel; +import org.apache.ignite.ml.knn.ann.ANNClassificationTrainer; +import org.apache.ignite.ml.knn.ann.ANNModelFormat; +import org.apache.ignite.ml.math.distances.EuclideanDistance; +import org.apache.ignite.ml.math.distances.HammingDistance; +import org.apache.ignite.ml.math.distances.ManhattanDistance; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorizedViewMatrix; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel; +import org.apache.ignite.ml.structures.Dataset; +import org.apache.ignite.ml.structures.DatasetRow; +import org.apache.ignite.ml.structures.FeatureMetadata; +import org.apache.ignite.ml.structures.LabeledVector; +import org.apache.ignite.ml.structures.LabeledVectorSet; +import org.apache.ignite.ml.svm.SVMLinearClassificationModel; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +/** + * Tests for equals and hashCode methods in classes that provide own implementations of these. + */ +public class CollectionsTest { + /** */ + @Test + @SuppressWarnings("unchecked") + public void test() { + test(new VectorizedViewMatrix(new DenseMatrix(2, 2), 1, 1, 1, 1), + new VectorizedViewMatrix(new DenseMatrix(3, 2), 2, 1, 1, 1)); + + specialTest(new ManhattanDistance(), new ManhattanDistance()); + + specialTest(new HammingDistance(), new HammingDistance()); + + specialTest(new EuclideanDistance(), new EuclideanDistance()); + + FeatureMetadata data = new FeatureMetadata("name2"); + data.setName("name1"); + test(data, new FeatureMetadata("name2")); + + test(new DatasetRow<>(new DenseVector()), new DatasetRow<>(new DenseVector(1))); + + test(new LabeledVector<>(new DenseVector(), null), new LabeledVector<>(new DenseVector(1), null)); + + test(new Dataset>(new DatasetRow[] {}, new FeatureMetadata[] {}), + new Dataset>(new DatasetRow[] {new DatasetRow()}, + new FeatureMetadata[] {new FeatureMetadata()})); + + test(new LogisticRegressionModel(new DenseVector(), 1.0), + new LogisticRegressionModel(new DenseVector(), 0.5)); + + test(new KMeansModelFormat(new Vector[] {}, new ManhattanDistance()), + new KMeansModelFormat(new Vector[] {}, new HammingDistance())); + + test(new KMeansModel(new Vector[] {}, new ManhattanDistance()), + new KMeansModel(new Vector[] {}, new HammingDistance())); + + test(new SVMLinearClassificationModel(null, 1.0), new SVMLinearClassificationModel(null, 0.5)); + + test(new ANNClassificationModel(new LabeledVectorSet<>(), new ANNClassificationTrainer.CentroidStat()), + new ANNClassificationModel(new LabeledVectorSet<>(1, 1), new ANNClassificationTrainer.CentroidStat())); + + test(new ANNModelFormat(1, new ManhattanDistance(), false, new LabeledVectorSet<>(), + new ANNClassificationTrainer.CentroidStat()), + new ANNModelFormat(2, new ManhattanDistance(), false, new LabeledVectorSet<>(), + new ANNClassificationTrainer.CentroidStat())); + } + + /** Test classes that have all instances equal (eg, metrics). */ + private void specialTest(T o1, T o2) { + assertEquals(o1, o2); + + test(o1, new Object()); + } + + /** */ + private void test(T o1, T o2) { + assertNotEquals(o1, null); + assertNotEquals(o2, null); + + assertEquals(o1, o1); + assertEquals(o2, o2); + + assertNotEquals(o1, o2); + + Set set = new HashSet<>(); + set.add(o1); + set.add(o1); + assertEquals(1, set.size()); + + set.add(o2); + set.add(o2); + assertEquals(2, set.size()); + + set.remove(o1); + assertEquals(1, set.size()); + + set.remove(o2); + assertEquals(0, set.size()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/CommonTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/CommonTestSuite.java new file mode 100644 index 0000000000000..2f42dd5d61f6d --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/CommonTestSuite.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.common; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.trees package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + LocalModelsTest.class, + CollectionsTest.class, + ExternalizeTest.class, + ModelTest.class, + KeepBinaryTest.class +}) +public class CommonTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/ExternalizeTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/ExternalizeTest.java new file mode 100644 index 0000000000000..72aca07836622 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/ExternalizeTest.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.common; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import org.apache.ignite.ml.math.Destroyable; +import org.apache.ignite.ml.math.distances.EuclideanDistance; +import org.apache.ignite.ml.math.distances.HammingDistance; +import org.apache.ignite.ml.math.distances.ManhattanDistance; +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DelegatingVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorizedViewMatrix; +import org.apache.ignite.ml.structures.Dataset; +import org.apache.ignite.ml.structures.DatasetRow; +import org.apache.ignite.ml.structures.FeatureMetadata; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +/** + * Tests for externalizable classes. + */ +public class ExternalizeTest { + /** */ + @Test + @SuppressWarnings("unchecked") + public void test() { + externalizeTest(new DelegatingVector(new DenseVector(1))); + + externalizeTest(new VectorizedViewMatrix(new DenseMatrix(2, 2), 1, 1, 1, 1)); + + externalizeTest(new ManhattanDistance()); + + externalizeTest(new HammingDistance()); + + externalizeTest(new EuclideanDistance()); + + externalizeTest(new FeatureMetadata()); + + externalizeTest(new VectorizedViewMatrix(new DenseMatrix(2, 2), 1, 1, 1, 1)); + + externalizeTest(new DatasetRow<>(new DenseVector())); + + externalizeTest(new LabeledVector<>(new DenseVector(), null)); + + externalizeTest(new Dataset>(new DatasetRow[] {}, new FeatureMetadata[] {})); + } + + /** */ + @SuppressWarnings("unchecked") + private void externalizeTest(T initObj) { + T objRestored = null; + + try { + ByteArrayOutputStream byteArrOutputStream = new ByteArrayOutputStream(); + ObjectOutputStream objOutputStream = new ObjectOutputStream(byteArrOutputStream); + + objOutputStream.writeObject(initObj); + + ByteArrayInputStream byteArrInputStream = new ByteArrayInputStream(byteArrOutputStream.toByteArray()); + ObjectInputStream objInputStream = new ObjectInputStream(byteArrInputStream); + + objRestored = (T)objInputStream.readObject(); + + assertEquals(MathTestConstants.VAL_NOT_EQUALS, initObj, objRestored); + + assertEquals(MathTestConstants.VAL_NOT_EQUALS, 0, Integer.compare(initObj.hashCode(), objRestored.hashCode())); + } + catch (ClassNotFoundException | IOException e) { + fail(e + " [" + e.getMessage() + "]"); + } + finally { + if (objRestored instanceof Destroyable) + ((Destroyable)objRestored).destroy(); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/KeepBinaryTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/KeepBinaryTest.java new file mode 100644 index 0000000000000..ef33acae1a6ed --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/KeepBinaryTest.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.common; + +import java.util.UUID; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.binary.BinaryObject; +import org.apache.ignite.binary.BinaryObjectBuilder; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.clustering.kmeans.KMeansModel; +import org.apache.ignite.ml.clustering.kmeans.KMeansTrainer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.BinaryObjectVectorizer; +import org.apache.ignite.ml.dataset.impl.cache.CacheBasedDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Test for IGNITE-10700. + */ +public class KeepBinaryTest extends GridCommonAbstractTest { + /** Number of nodes in grid. */ + private static final int NODE_COUNT = 2; + + /** Number of samples. */ + public static final int NUMBER_OF_SAMPLES = 1000; + + /** Half of samples. */ + public static final int HALF = NUMBER_OF_SAMPLES / 2; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** {@inheritDoc} */ + @Override protected void afterTestsStopped() { + stopAllGrids(); + } + + /** {@inheritDoc} */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** + * Startup Ignite, populate cache and train some model. + */ + @Test + public void test() { + IgniteCache dataCache = populateCache(ignite); + + KMeansTrainer trainer = new KMeansTrainer(); + + CacheBasedDatasetBuilder datasetBuilder = + new CacheBasedDatasetBuilder<>(ignite, dataCache).withKeepBinary(true); + + KMeansModel mdl = trainer.fit(datasetBuilder, new BinaryObjectVectorizer("feature1").labeled("label")); + + Integer zeroCentre = mdl.predict(VectorUtils.num2Vec(0.0)); + + assertTrue(mdl.centers()[zeroCentre].get(0) == 0); + } + + /** + * Populate cache with binary objects. + */ + private IgniteCache populateCache(Ignite ignite) { + CacheConfiguration cacheConfiguration = new CacheConfiguration<>(); + cacheConfiguration.setName("TEST_" + UUID.randomUUID()); + + IgniteCache cache = ignite.createCache(cacheConfiguration).withKeepBinary(); + + BinaryObjectBuilder builder = ignite.binary().builder("testType"); + + for (int i = 0; i < NUMBER_OF_SAMPLES; i++) { + if (i < HALF) + cache.put(i, builder.setField("feature1", 0.0).setField("label", 0.0).build()); + else + cache.put(i, builder.setField("feature1", 10.0).setField("label", 1.0).build()); + } + + return cache; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/LocalModelsTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/LocalModelsTest.java new file mode 100644 index 0000000000000..5e1e0813b765f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/LocalModelsTest.java @@ -0,0 +1,188 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.common; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import org.apache.ignite.ml.Exporter; +import org.apache.ignite.ml.FileExporter; +import org.apache.ignite.ml.clustering.kmeans.KMeansModel; +import org.apache.ignite.ml.clustering.kmeans.KMeansModelFormat; +import org.apache.ignite.ml.clustering.kmeans.KMeansTrainer; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.knn.NNClassificationModel; +import org.apache.ignite.ml.knn.ann.ANNClassificationModel; +import org.apache.ignite.ml.knn.ann.ANNClassificationTrainer; +import org.apache.ignite.ml.knn.ann.ANNModelFormat; +import org.apache.ignite.ml.knn.ann.KNNModelFormat; +import org.apache.ignite.ml.math.distances.ManhattanDistance; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.regressions.linear.LinearRegressionModel; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel; +import org.apache.ignite.ml.structures.LabeledVector; +import org.apache.ignite.ml.structures.LabeledVectorSet; +import org.apache.ignite.ml.svm.SVMLinearClassificationModel; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for models import/export functionality. + */ +public class LocalModelsTest { + /** */ + @Test + public void importExportKMeansModelTest() throws IOException { + executeModelTest(mdlFilePath -> { + KMeansModel mdl = getClusterModel(); + + Exporter exporter = new FileExporter<>(); + + mdl.saveModel(exporter, mdlFilePath); + + KMeansModelFormat load = exporter.load(mdlFilePath); + + Assert.assertNotNull(load); + + KMeansModel importedMdl = new KMeansModel(load.getCenters(), load.getDistance()); + + Assert.assertEquals("", mdl, importedMdl); + + return null; + }); + } + + /** */ + @Test + public void importExportLinearRegressionModelTest() throws IOException { + executeModelTest(mdlFilePath -> { + LinearRegressionModel mdl = new LinearRegressionModel(new DenseVector(new double[]{1, 2}), 3); + Exporter exporter = new FileExporter<>(); + mdl.saveModel(exporter, mdlFilePath); + + LinearRegressionModel load = exporter.load(mdlFilePath); + + Assert.assertNotNull(load); + Assert.assertEquals("", mdl, load); + + return null; + }); + } + + /** */ + @Test + public void importExportSVMBinaryClassificationModelTest() throws IOException { + executeModelTest(mdlFilePath -> { + SVMLinearClassificationModel mdl = new SVMLinearClassificationModel(new DenseVector(new double[] {1, 2}), 3); + Exporter exporter = new FileExporter<>(); + mdl.saveModel(exporter, mdlFilePath); + + SVMLinearClassificationModel load = exporter.load(mdlFilePath); + + Assert.assertNotNull(load); + Assert.assertEquals("", mdl, load); + + return null; + }); + } + + /** */ + @Test + public void importExportLogisticRegressionModelTest() throws IOException { + executeModelTest(mdlFilePath -> { + LogisticRegressionModel mdl = new LogisticRegressionModel(new DenseVector(new double[]{1, 2}), 3); + Exporter exporter = new FileExporter<>(); + mdl.saveModel(exporter, mdlFilePath); + + LogisticRegressionModel load = exporter.load(mdlFilePath); + + Assert.assertNotNull(load); + Assert.assertEquals("", mdl, load); + + return null; + }); + } + + /** */ + private void executeModelTest(Function code) throws IOException { + Path mdlPath = Files.createTempFile(null, null); + + Assert.assertNotNull(mdlPath); + + try { + String mdlFilePath = mdlPath.toAbsolutePath().toString(); + + Assert.assertTrue(String.format("File %s not found.", mdlFilePath), Files.exists(mdlPath)); + + code.apply(mdlFilePath); + } + finally { + Files.deleteIfExists(mdlPath); + } + } + + /** */ + private KMeansModel getClusterModel() { + Map data = new HashMap<>(); + data.put(0, new double[] {1.0, 1959, 325100}); + data.put(1, new double[] {1.0, 1960, 373200}); + + KMeansTrainer trainer = new KMeansTrainer() + .withAmountOfClusters(1); + + return trainer.fit( + new LocalDatasetBuilder<>(data, 2), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + } + + /** */ + @Test + public void importExportANNModelTest() throws IOException { + executeModelTest(mdlFilePath -> { + final LabeledVectorSet centers = new LabeledVectorSet<>(); + + NNClassificationModel mdl = new ANNClassificationModel(centers, new ANNClassificationTrainer.CentroidStat()) + .withK(4) + .withDistanceMeasure(new ManhattanDistance()) + .withWeighted(true); + + Exporter exporter = new FileExporter<>(); + mdl.saveModel(exporter, mdlFilePath); + + ANNModelFormat load = (ANNModelFormat)exporter.load(mdlFilePath); + + Assert.assertNotNull(load); + + NNClassificationModel importedMdl = + new ANNClassificationModel(load.getCandidates(), new ANNClassificationTrainer.CentroidStat()) + .withK(load.getK()) + .withDistanceMeasure(load.getDistanceMeasure()) + .withWeighted(true); + + Assert.assertEquals("", mdl, importedMdl); + + return null; + }); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/ModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/ModelTest.java new file mode 100644 index 0000000000000..66be960cd4def --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/ModelTest.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.common; + +import org.apache.ignite.ml.IgniteModel; +import org.junit.Test; + +import static org.junit.Assert.assertNotNull; + +/** + * Tests for {@link IgniteModel} functionality. + */ +public class ModelTest { + /** */ + @Test + public void testCombine() { + IgniteModel mdl = new TestModel<>().combine(new TestModel<>(), (x, y) -> x); + + assertNotNull(mdl.toString(true)); + assertNotNull(mdl.toString(false)); + } + + /** */ + private static class TestModel implements IgniteModel { + /** {@inheritDoc} */ + @Override public V predict(T t) { + return null; + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/TrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/TrainerTest.java new file mode 100644 index 0000000000000..fc196e957ea4c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/common/TrainerTest.java @@ -0,0 +1,1189 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.common; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +/** + * Basic fields and methods for the trainer tests. + */ +@RunWith(Parameterized.class) +public class TrainerTest { + /** Number of parts to be tested. */ + private static final int[] partsToBeTested = new int[] {1, 2, 3, 4, 13}; + + /** Parameters. */ + @Parameterized.Parameters(name = "Data divided on {0} partitions, training with batch size {1}") + public static Iterable data() { + List res = new ArrayList<>(); + + for (int part : partsToBeTested) + res.add(new Integer[] {part}); + + return res; + } + + /** Number of partitions. */ + @Parameterized.Parameter + public int parts; + + /** Precision in test checks. */ + protected static final double PRECISION = 1e-2; + + /** Two-easy clustered data. */ + protected static final double[][] twoClusters = { + {0, 519.9017766224466, 554.4100892224841}, + {0, 563.5609233456146, 558.5857619285702}, + {0, 503.5549215892729, 594.3825404658926}, + {0, 584.7460223841858, 515.2243614011547}, + {0, 575.095839624477, 590.8556618187845}, + {0, 594.3592060102463, 554.2221434279162}, + {0, 583.432820535236, 504.66164764881523}, + {0, 599.0963460154512, 534.1774623344388}, + {0, 568.9703081604248, 543.2226391011388}, + {0, 586.6698629586531, 529.5241964168969}, + {0, 551.1051323168858, 539.1885401513679}, + {0, 508.4609024546371, 504.35073029226396}, + {0, 599.0470661333914, 569.4595846036917}, + {0, 570.5493551454197, 526.7253349784085}, + {0, 534.2832458435303, 550.3000463382016}, + {0, 594.4616179647461, 536.3197487506842}, + {0, 565.3197172280577, 506.3293991999001}, + {0, 592.6602122456759, 513.646808538896}, + {0, 509.8216048850749, 509.4973240875119}, + {0, 502.3878128815718, 570.9482197992043}, + {0, 594.6632085763065, 547.9275009326266}, + {0, 529.6467177083762, 547.9107158851994}, + {0, 544.9626346641528, 567.3832919235468}, + {0, 511.4105135690089, 578.1849565872583}, + {0, 501.01584549257973, 570.6868576016038}, + {0, 595.8080144542582, 512.03499265368}, + {0, 528.786843178995, 502.8166496868458}, + {0, 528.6621082789842, 560.8712577770658}, + {0, 510.8974224808237, 596.4667253000505}, + {0, 583.8947380467763, 547.9688139648637}, + {0, 561.4766784411281, 531.2449896695659}, + {0, 560.6943663394893, 566.9710095676068}, + {0, 517.393777179133, 588.7651419118193}, + {0, 500.4713974957799, 528.0769354138976}, + {0, 545.8783916658755, 586.1791106273984}, + {0, 587.1987551324714, 552.7968581692342}, + {0, 504.14955324617733, 502.9202365190475}, + {0, 589.118356537786, 567.5453447798067}, + {0, 581.0404600079042, 524.3383641814191}, + {0, 578.836850556919, 519.0303628080188}, + {0, 532.684541905037, 592.0373074571884}, + {0, 539.631541540315, 500.86701934899133}, + {0, 585.080559785121, 559.185605736917}, + {0, 557.6130747490417, 586.9060188494332}, + {0, 511.4069711786483, 505.20182772247955}, + {0, 543.3420695017039, 589.0522243776551}, + {0, 545.7836567392021, 545.9829264066165}, + {0, 587.4404520697882, 566.2450515524025}, + {0, 598.0352806197182, 592.9871556855218}, + {0, 599.1191676869415, 517.072913155282}, + {0, 598.7990121325806, 542.5922389368699}, + {0, 567.9157541778169, 508.8637304888606}, + {0, 516.9141893487038, 504.5333015373364}, + {0, 528.2650000284832, 592.3618290091457}, + {0, 577.0877824827497, 572.106440915086}, + {0, 569.5034479656674, 513.1883531774486}, + {0, 587.7126777761002, 568.9323649263932}, + {0, 565.9489368582279, 516.9745616328178}, + {0, 557.5589060305804, 515.2687667913198}, + {0, 503.1554198985989, 509.09477188561954}, + {0, 550.0203572858189, 595.1223421437577}, + {0, 524.7913631016987, 523.3640528148924}, + {0, 552.7246513026029, 546.2810129784725}, + {0, 586.3892191983499, 552.7576239548819}, + {0, 526.0748315118926, 573.804342015302}, + {0, 565.1398123093003, 539.6854465576956}, + {0, 527.0537447563926, 595.2059572407275}, + {0, 598.4431244531863, 518.7675712573573}, + {0, 518.1347648644486, 571.2772685572616}, + {0, 522.0665003535328, 597.4691949058798}, + {0, 559.3717433904218, 507.63523020707987}, + {0, 517.7519710704423, 595.9228343205995}, + {0, 557.1028047052068, 513.67799332853}, + {0, 527.9783249961056, 596.5923404246605}, + {0, 508.9548667053109, 583.3851484560171}, + {0, 597.3054599709918, 572.0492942719156}, + {0, 506.48170301986886, 545.2749213691201}, + {0, 569.5215580939445, 552.2362437646713}, + {0, 530.5232047696994, 517.814585379635}, + {0, 582.7447646378554, 554.0837636670908}, + {0, 510.04656659835496, 548.308864572033}, + {0, 517.0884034675382, 503.6293035255885}, + {0, 547.4077952612713, 521.8105170207767}, + {0, 525.2452470246204, 565.7690087891091}, + {0, 525.726872006642, 592.172865284197}, + {0, 598.311246268818, 506.29428096115674}, + {0, 599.4974643204109, 579.8062124124598}, + {0, 584.7506624741848, 592.2505541944379}, + {0, 598.7379007956142, 561.8346831647877}, + {0, 553.9325403298083, 540.4895037718127}, + {0, 577.4868596401562, 533.9482256583582}, + {0, 524.7729276101758, 523.3563039535018}, + {0, 513.6033305233657, 572.2592770048955}, + {0, 574.5120210087475, 557.5521505158835}, + {0, 573.951281294893, 527.3670057739082}, + {0, 548.1326423460839, 551.1839666791825}, + {0, 508.2214563147455, 521.2342805765958}, + {0, 515.93448815859, 511.17271820377954}, + {0, 586.8712784936447, 571.3833808148395}, + {0, 557.5242762492126, 527.4051948485309}, + {1, -527.9820655500421, -597.0614987497938}, + {1, -594.7423576008234, -570.0387215442279}, + {1, -545.604557338824, -554.0763169557739}, + {1, -502.35172702595014, -586.8484342087179}, + {1, -587.293337705269, -588.0796352216714}, + {1, -587.0516505340747, -517.7300179102016}, + {1, -597.0360062250987, -547.9934802704281}, + {1, -540.578489505472, -519.8075273206096}, + {1, -530.4922286462058, -523.234050745461}, + {1, -570.1324748254381, -584.3427934817109}, + {1, -508.71765087148526, -521.2260165247377}, + {1, -506.10153233039114, -546.0469706912013}, + {1, -587.6311232069863, -500.8789962962048}, + {1, -585.9407497123008, -593.6250426349442}, + {1, -597.7192354774427, -504.3968636076061}, + {1, -587.6912279656732, -587.810549281485}, + {1, -567.4906024676383, -529.7889328775241}, + {1, -510.5883782383144, -564.6056218025714}, + {1, -545.5877634339324, -503.13342363625316}, + {1, -595.491952236763, -526.4157102337199}, + {1, -565.8931103880244, -512.3930396698607}, + {1, -564.9817304867518, -518.5421568025347}, + {1, -528.5838433236987, -590.2716385768655}, + {1, -568.3038165320794, -523.2037657971182}, + {1, -513.579781599134, -540.7083264768794}, + {1, -577.5234177434545, -574.4083212880694}, + {1, -566.4331360533965, -529.8498325039095}, + {1, -517.1862636590681, -544.9513758919965}, + {1, -534.6578726508548, -515.7113551681354}, + {1, -531.5918919225953, -508.0051177928042}, + {1, -521.335920134657, -549.8508399779365}, + {1, -587.6565547672371, -500.40617781899505}, + {1, -502.89297655657947, -550.0462820641452}, + {1, -565.9529549834383, -570.5296426883887}, + {1, -539.695184660248, -566.3720803092855}, + {1, -557.2412994794262, -516.6673702747074}, + {1, -548.3193140374153, -511.0113251963232}, + {1, -599.1568790407902, -559.2622714664305}, + {1, -571.755520275542, -554.0839358749181}, + {1, -544.964945135059, -564.448243523719}, + {1, -574.7985361688525, -593.6384131471896}, + {1, -563.642288502551, -538.3721218790038}, + {1, -500.4279098845297, -583.9340798923859}, + {1, -569.917708080877, -550.7162526230916}, + {1, -549.8345448125123, -565.7759787232027}, + {1, -527.8248193430064, -562.9256751876678}, + {1, -508.69265110570973, -544.8174395269017}, + {1, -561.7662650395065, -534.6799220439667}, + {1, -510.11351976460816, -567.17615864117}, + {1, -592.6464340868883, -546.7679954740394}, + {1, -591.5566687475105, -516.335391669214}, + {1, -598.1620280980214, -563.5663494736577}, + {1, -571.6540085024682, -514.5024112396218}, + {1, -597.0973739353884, -518.6402453320493}, + {1, -597.971879649216, -541.9911785849602}, + {1, -502.7804400334985, -527.9041465965335}, + {1, -502.24013032418287, -596.8646708140396}, + {1, -598.4180305891012, -535.013864017069}, + {1, -575.018281589379, -596.0252991207353}, + {1, -593.1939727679464, -557.8288153478848}, + {1, -552.9384213856413, -579.3694486320592}, + {1, -559.9203621818546, -554.0072497905501}, + {1, -588.3411365623961, -575.9606196770269}, + {1, -517.0844394937534, -547.9291196136605}, + {1, -509.32764537741576, -591.737729755405}, + {1, -557.2674260753181, -543.5864572972603}, + {1, -565.1475139126333, -559.4796022645727}, + {1, -556.0025119789701, -572.6261174533101}, + {1, -590.7960121205607, -517.0840963139137}, + {1, -580.3696729031607, -541.5331163469414}, + {1, -519.8369954073894, -599.1883519701099}, + {1, -590.5570159829517, -587.4602437344656}, + {1, -502.5275914906194, -540.3454217852702}, + {1, -584.1282872304774, -593.2194019651928}, + {1, -557.8446121942737, -558.0626917521755}, + {1, -580.209165096907, -588.7259851212183}, + {1, -510.90874302504056, -591.5091481352281}, + {1, -514.1724729381817, -595.1020401318071}, + {1, -552.5076612804402, -548.397966879673}, + {1, -565.2070083573942, -536.1826380211752}, + {1, -565.9469212749985, -561.5506672108052}, + {1, -526.4398083538586, -507.1913169678737}, + {1, -595.2594496218172, -594.98464576562}, + {1, -530.6904491548875, -519.0678635750138}, + {1, -547.9945700155467, -597.6557660417575}, + {1, -554.9468747569997, -591.1678311453294}, + {1, -593.9678599910096, -518.9397714406934}, + {1, -580.6827396967085, -541.1770564720399}, + {1, -526.2991394747967, -595.5353558464069}, + {1, -532.0567052472832, -547.7555982808492}, + {1, -506.550640897891, -501.44148884553215}, + {1, -537.7945174903881, -539.9517392521116}, + {1, -588.1139279080066, -572.5589261656883}, + {1, -598.4030676856231, -528.8036722121387}, + {1, -532.6970859002654, -567.13898500018}, + {1, -564.8245220213231, -595.6981311004888}, + {1, -568.8669962693484, -516.5125158739406}, + {1, -549.1709908638323, -558.8129291840139}, + {1, -510.85336064345756, -575.3635308154353}, + {1, -583.9245510800588, -536.793806117792} + }; + + /** The data is easy separated with classifier by y = x. */ + public static final double[][] twoLinearlySeparableClasses = { + {0.0, -122.69914721554494, -152.90003228835155}, + {1.0, -988.7803093110984, 39.64498230320555}, + {1.0, -721.0342526056645, -167.29469954420483}, + {1.0, 606.0603250738964, 612.4505657575703}, + {1.0, -435.7428098964267, 749.26660250907}, + {0.0, 977.0266542119459, 906.2797731011997}, + {0.0, 442.79191352401017, 99.68443783203702}, + {1.0, -984.4696576079481, 98.58983213854299}, + {0.0, 950.3560064579242, -54.087172588871226}, + {0.0, 989.1247453182418, -942.9228555672748}, + {1.0, -950.3830359669219, 720.9427578590175}, + {0.0, -263.7437828854337, -369.67762228969286}, + {1.0, -837.771820186008, 966.2671117206883}, + {1.0, -101.63051923258354, 135.30595977925213}, + {0.0, 927.4068611376827, 552.576689560276}, + {1.0, 671.674613544031, 867.0342619845135}, + {0.0, 489.04809639359723, -371.80622025525497}, + {1.0, -577.8620591314951, -561.9793202960524}, + {1.0, -628.699903999805, 746.9179933415019}, + {0.0, 787.7955413710754, 729.8880998762927}, + {1.0, -160.9905826731191, 597.1342309929371}, + {1.0, -661.7582546189365, 294.3559610458383}, + {0.0, 992.067372280372, -586.7840785767917}, + {0.0, -229.6963941046797, -860.6481903559245}, + {1.0, -459.91823406828814, 174.31002243199828}, + {0.0, 132.09417954527203, -203.6015836943012}, + {0.0, 458.8315635996389, -109.92869423399452}, + {1.0, 424.63154498678796, 581.7436424491116}, + {0.0, 606.7777384705123, 382.51034075942744}, + {1.0, 133.97732363544492, 810.4293150045719}, + {1.0, -752.3792672455503, 902.3533215842801}, + {0.0, 124.02578589031486, -242.0045741962906}, + {0.0, 65.95100120357665, -362.9563512717997}, + {1.0, -975.7825688109236, -724.6782664271469}, + {1.0, -885.3333915784285, -166.8285153252507}, + {1.0, -242.89869955409756, 878.9999767933075}, + {0.0, 271.2149993049329, -490.0480096390996}, + {0.0, -74.16302081043352, -824.0859586265949}, + {1.0, -520.4108075793048, 751.6954919374432}, + {0.0, 104.03293413801771, -631.0663974778311}, + {0.0, 179.4274025610996, -610.9764997543232}, + {1.0, 291.2686412591502, 892.1178988173092}, + {0.0, 723.1240938478552, -291.3765504086348}, + {0.0, 12.575218418479949, -307.36975804125973}, + {1.0, -397.44825972130786, -295.76021536144117}, + {1.0, -163.90291786947955, 501.6868597449188}, + {0.0, 513.9232732684154, -287.4072243396091}, + {1.0, 146.81987289015547, 293.1152654799746}, + {1.0, -422.734205503476, 154.09536939552663}, + {0.0, 293.2607563043757, -141.65822134246525}, + {1.0, -93.46771747630169, 73.91086927080437}, + {1.0, -972.6525030120272, -867.0819061818511}, + {1.0, -636.136018043414, 55.4840372628596}, + {1.0, -821.240801777343, -750.3407912999469}, + {0.0, 826.9598934792543, -48.17510971836464}, + {0.0, -737.5399357047692, -834.168742619978}, + {0.0, 910.2286110591372, -321.2153303241547}, + {1.0, -539.8385115026349, -204.624635929521}, + {0.0, 710.9811829617875, 156.53494004963864}, + {1.0, -576.1327147891295, -255.98030417689222}, + {0.0, -406.9117225223731, -568.1674835571359}, + {1.0, 786.4324782672932, 879.9433045727255}, + {0.0, 655.1507253229393, -931.0320133380443}, + {1.0, 920.1359556509667, 975.4010808044634}, + {0.0, 340.9923780361835, -791.6415124130187}, + {0.0, 789.0326432258107, 101.45600150894029}, + {0.0, 301.62354598942807, -263.0383267796972}, + {0.0, -196.75683699829483, -759.6731432356696}, + {1.0, 104.36756752228234, 362.6645930627608}, + {0.0, -110.09892045131369, -522.6327938767872}, + {0.0, 983.058982063912, -853.6685099856713}, + {0.0, 853.0396544144112, -373.6430440893963}, + {0.0, 894.5396176478532, -259.3520478430646}, + {0.0, -59.540445910742505, -405.2785421154832}, + {1.0, -195.02204474289272, -98.01099074578019}, + {1.0, -400.33845881394757, 517.4826371806812}, + {0.0, 998.8721163227847, 658.7589886248159}, + {1.0, -739.9839264739526, 281.7808456690698}, + {0.0, 225.2955438875149, -240.13571797647785}, + {0.0, 415.36363610958847, 119.2467848060553}, + {1.0, -430.93611072673775, 953.9339020518189}, + {0.0, 695.641934652828, -613.3163270715312}, + {1.0, -977.0662561296275, 44.1566618295617}, + {0.0, 894.0074404584143, 115.97551230630302}, + {1.0, -256.65810543256225, 121.31432413171797}, + {1.0, -745.2570475473517, 144.83266177886867}, + {0.0, 865.266441371979, -329.08860770412593}, + {1.0, -262.69924145366974, 196.52256942501003}, + {0.0, 858.8703536921596, -755.3718265129426}, + {1.0, -620.7574721811682, 744.695289706485}, + {1.0, 526.9918067706062, 622.6110941283573}, + {1.0, 30.51838905352247, 451.84360857486945}, + {1.0, -886.670070825786, 955.5438997547349}, + {0.0, -419.85446648529296, -904.4363933507589}, + {1.0, -19.357361515996104, 288.3545217146416}, + {1.0, 425.807567480902, 617.3859577708511}, + {1.0, -369.8197242330872, 428.4625522196195}, + {1.0, -540.2030619980012, 980.1078500916262}, + {0.0, 963.0216885940265, -999.6718455904652}, + {0.0, -36.084390168692494, -930.2210871204579}, + {0.0, 686.7777019875359, 274.083830555807}, + {1.0, -798.5755214306325, -292.6360310433025}, + {0.0, -302.49374189510456, -979.2873514693756}, + {1.0, -473.88156240514184, 290.3700442022921}, + {1.0, -619.3422333592813, -203.62900604757556}, + {1.0, -603.8165620304862, 433.7049783716991}, + {0.0, -394.9003601369652, -423.49571094476414}, + {0.0, -297.5499912778255, -379.6966117627778}, + {0.0, 914.6350307682171, 395.0639307730339}, + {1.0, 302.2432544019764, 420.03068857885364}, + {1.0, -486.2192439106092, 504.61160963291354}, + {0.0, -80.9055582464382, -999.3540019713568}, + {1.0, -808.7735610468485, -600.3003616235419}, + {1.0, 559.7216432827174, 573.1410775962665}, + {0.0, 107.25054731907449, 56.68399536280276}, + {1.0, -986.8173329580039, 955.5975873551458}, + {0.0, -28.898975148538057, -764.5914349235939}, + {0.0, 544.5435587517745, 541.7144224905855}, + {1.0, -733.3388961452514, 995.0625378143936}, + {0.0, -424.0376248679678, -808.8197992783022}, + {0.0, 69.10888994619336, -596.3814493832142}, + {0.0, 668.7563898645246, -309.5338641095864}, + {1.0, -664.6829023895461, -421.3131122742957}, + {0.0, 34.30209430645755, -10.50945210920679}, + {0.0, -370.6335997213754, -510.2102646234516}, + {1.0, 430.4223842649294, 947.0324231650752}, + {1.0, -561.4417521638584, 912.0398180862007}, + {0.0, -529.1099093762112, -787.9426065835444}, + {0.0, -784.2287272477402, -950.6749150482902}, + {1.0, -292.2382923363127, 29.73057963193787}, + {1.0, 543.8216641288004, 574.9668960406921}, + {0.0, 492.70797586385834, -508.7411915523603}, + {0.0, 847.4958582226334, 141.27775112134555}, + {0.0, -294.9950818964355, -539.6512583592041}, + {1.0, -731.3440778046363, -194.13179207217638}, + {0.0, -26.21276485761848, -177.1382736912766}, + {0.0, 169.10051967522577, -877.8835027096119}, + {0.0, 869.7338657560076, -216.14439990877327}, + {0.0, 676.9668800100419, 487.3264255975398}, + {0.0, 340.2086777131092, -483.69798685778176}, + {0.0, 177.05787101614578, -187.8731928010908}, + {0.0, 514.0064634256835, -838.309309799528}, + {1.0, -945.6616134661633, -892.0662652148447}, + {0.0, 706.7531607568874, 584.875678987067}, + {0.0, 996.1691889712217, -381.420741757301}, + {0.0, 846.3827047328193, 138.5937078747695}, + {1.0, -579.1773394655615, -551.6157981896823}, + {1.0, -379.8315393213704, 376.240073123181}, + {0.0, 416.70241675343345, -762.0460887999392}, + {0.0, 784.4659593773504, -476.3450292459248}, + {0.0, -328.2495971471759, -797.0282102006712}, + {1.0, 427.63385513313506, 691.0529822653089}, + {0.0, 478.22491887051683, 368.08172770775104}, + {0.0, -194.5486491952804, -635.7562271928532}, + {1.0, 462.9118544444739, 546.477694721709}, + {1.0, -364.33646342640543, -16.525517700831642}, + {1.0, 191.5538518885253, 534.4886561736935}, + {1.0, 162.29801970257063, 204.07339353277848}, + {1.0, 359.87375962515307, 510.4390321509045}, + {0.0, 906.0920707478278, 518.474366833321}, + {0.0, -23.926514764001354, -545.5535138792807}, + {1.0, -457.5490330216003, 462.75697632384026}, + {1.0, 361.19368061986074, 602.0833438729098}, + {1.0, 240.82404813916537, 903.8580437547587}, + {0.0, 682.9887385477937, -575.5748494609797}, + {0.0, -524.9683035626636, -643.4995281011295}, + {1.0, -868.3907344133812, 687.0334981662659}, + {0.0, 483.1046447412375, 425.5242965675352}, + {0.0, 441.7390582141493, -178.6473657093535}, + {0.0, 857.9901628015248, -725.079106653412}, + {1.0, 3.9407370946466926, 501.36916187999213}, + {0.0, 987.6165576421165, -870.7792926909152}, + {0.0, 38.550394080002434, -316.2460756905849}, + {1.0, 259.98559430828277, 779.1704474238529}, + {1.0, -772.0783930084303, 457.81379891960387}, + {0.0, 965.2460667816263, -900.5906154928432}, + {0.0, 435.8488975524808, -807.3179393158829}, + {1.0, -414.9097308847265, 663.2091519493613}, + {0.0, -692.3369071358595, -853.7674486529854}, + {1.0, -527.6968945977544, -89.29268231562753}, + {0.0, 98.58509375449921, -812.2575242800065}, + {1.0, -246.4858612821199, 690.7736181778389}, + {0.0, 306.0413673433336, 50.36342267895475}, + {0.0, -326.3755954952927, -630.9271581822045}, + {0.0, 435.3759701541835, -478.72141764190417}, + {0.0, 150.07627192243012, -126.16495181072969}, + {0.0, 999.2382522208045, 293.8336213483592}, + {1.0, -970.7818229850416, 559.8116781984274}, + {0.0, 321.62133209742956, -446.07065722044115}, + {1.0, 387.61470906465297, 809.9877801153038}, + {1.0, 375.48380231362376, 548.1340438996276}, + {0.0, 198.31962497327982, -841.3407638914643}, + {0.0, -59.75027524961797, -196.91881794207666}, + {0.0, 539.4390329297466, 265.73233936446013}, + {1.0, 161.7769611006779, 420.4911194344545}, + {1.0, -422.73262266569805, 305.27632230640575}, + {0.0, 419.7041783295376, 384.4277361814418}, + {1.0, -384.80122335064925, 128.84723939702212}, + {0.0, 345.8732451410485, -634.6766931661393}, + {1.0, -753.0957875425104, 162.043321600848}, + {1.0, -721.0825943433963, -647.1437151757809}, + {0.0, 737.8179495142201, -612.9000146979762}, + {0.0, 165.62609685662937, -209.04556534374638}, + {1.0, 211.75025757991534, 762.4363190775396}, + {0.0, -282.0707259050812, -631.5669067165459}, + {0.0, -10.649387489441551, -11.742073063187377}, + {0.0, 532.2273317939553, -714.4637938741703}, + {0.0, 851.6255007653094, -428.168617931829}, + {0.0, -650.2303513768155, -701.0819971407498}, + {0.0, 486.19072881419584, 17.642342348021202}, + {0.0, 937.5878660613639, 253.91073899684488}, + {1.0, -481.7837261941776, 386.0515070365086}, + {1.0, 898.8591491398315, 960.3282479515362}, + {1.0, -795.2119099095994, -52.442255260638944}, + {1.0, -832.14760576095, 406.48368080778823}, + {1.0, 317.3610961002403, 475.88090137988934}, + {1.0, -543.9941239514503, 937.9571974443777}, + {1.0, -737.7149868841586, 412.02870959820666}, + {1.0, -86.04799530647608, 764.2717139104996}, + {1.0, -908.3441434769735, -52.62148904481751}, + {1.0, -558.4878652128368, 975.5017115797407}, + {1.0, -120.28961819893993, 58.60059810912276}, + {0.0, 797.7665926374921, -530.0884822652556}, + {0.0, -248.62486746176887, -983.5555931167586}, + {0.0, 910.1931415438364, 35.953135142478914}, + {1.0, -304.741023136228, 253.0138864886694}, + {0.0, -510.13133519018925, -642.3600729680307}, + {0.0, 988.5683650098642, -751.2030447890847}, + {1.0, -118.0142080751416, 352.20209758019996}, + {0.0, -638.757222741898, -685.6631975937353}, + {0.0, 759.5622347453971, -722.2769348273996}, + {0.0, -740.3498419247273, -974.2677201928796}, + {0.0, -776.6102763008262, -993.7697826767383}, + {1.0, -895.9448277148887, -462.29125820523006}, + {0.0, -311.8810163384071, -318.9742942085709}, + {0.0, 368.78035230644787, -273.65009131252566}, + {0.0, 731.1488644867686, -184.2725009666142}, + {1.0, 240.0262332913362, 544.8792933528591}, + {1.0, -129.8786600652611, 122.64122390591797}, + {1.0, -998.8693504661202, -989.3959455521401}, + {0.0, 358.9021702584721, -372.46195332982563}, + {0.0, 423.66170839399, -3.6733713507491075}, + {0.0, 320.08527272511014, -267.49487239617406}, + {1.0, 628.8557340365153, 716.1736088420723}, + {1.0, 87.0852622927755, 191.08205494997515}, + {0.0, -163.5535634273158, -401.43333064263857}, + {1.0, 241.57291015127043, 354.07473809573935}, + {0.0, 425.42982178930424, -659.6389818980119}, + {1.0, -513.057622632338, -150.48805414197307}, + {0.0, 435.2888705572377, -500.4699931158425}, + {1.0, -761.2341202466506, 919.1637075257438}, + {1.0, -254.8539665845866, 711.5522826694619}, + {0.0, -350.2587997576785, -911.7842377194485}, + {0.0, 588.5547568621123, -16.003674634160916}, + {0.0, -557.7880688291352, -939.7740734026603}, + {0.0, 683.6988697659988, -285.8831419034458}, + {0.0, 782.8461154585116, 426.91516285206694}, + {1.0, -792.3388875152918, 361.1342300030676}, + {1.0, -673.792921360787, 820.8934158286147}, + {1.0, -15.357504282120772, 15.275909249335541}, + {0.0, -99.22050275699814, -249.077767711845}, + {1.0, -820.111231678807, -320.1107983145504}, + {0.0, 911.7878651586336, 825.2998851049153}, + {1.0, -750.2941326911656, -629.1546336560141}, + {1.0, -890.6374102685097, -804.5407239545832}, + {1.0, -204.75148861468108, 722.1116624961337}, + {0.0, 519.1714356909579, 154.07772492651725}, + {0.0, 982.2450336212896, 897.8824490832485}, + {0.0, 554.4793545664838, 335.7541373769475}, + {1.0, -339.90247025178235, 47.02715071976445}, + {0.0, 901.2543768759774, -662.3275399668249}, + {1.0, -942.3762411246095, -875.0025895092708}, + {0.0, 418.20256050104604, -414.3102074305251}, + {0.0, 625.0294460702908, -625.6315794655841}, + {1.0, -449.74570685873516, 937.185777575773}, + {0.0, 508.2386960118979, 454.0962431757914}, + {1.0, 331.4089009636193, 589.2741722009719}, + {1.0, 99.06469391982864, 187.0394494146019}, + {1.0, -982.3370248476699, 322.0973186273661}, + {1.0, 548.6443983489316, 708.7265431968447}, + {0.0, 918.9454013804204, -383.8602043941679}, + {1.0, 47.025960736300476, 171.219298464468}, + {0.0, 378.2597384891858, 163.1492885941102}, + {0.0, 438.65288112462554, -139.6734662005057}, + {1.0, -831.8875659762939, 892.6667556591465}, + {0.0, 883.0433572247841, -405.08376291753257}, + {0.0, 885.9349479866808, -577.4873262774219}, + {1.0, -614.7099535083557, -133.06983968843338}, + {0.0, 111.7257364798395, -585.9016094589116}, + {1.0, 453.9214560104581, 999.4093349063546}, + {1.0, -660.6080448479984, -558.4295455433598}, + {0.0, -466.8209751830958, -591.196870091049}, + {0.0, -964.7665601618734, -997.9800903796079}, + {0.0, -236.07763234295055, -450.41129146522917}, + {0.0, -621.6876241277605, -797.4500041783042}, + {0.0, -773.3591978507126, -890.0043590247606}, + {1.0, -41.04699663875965, 822.3779367276668}, + {0.0, -88.10853803965915, -192.37350885363378}, + {0.0, 663.981740050287, -508.81572667480236}, + {0.0, 15.59472374839936, -806.7541810675616}, + {1.0, -892.7104844234832, -708.5235867565298}, + {1.0, -484.65491520217245, 386.6430150137869}, + {0.0, 865.0610549279427, 615.8811284084713}, + {1.0, -824.4355093837889, 655.3234320109748}, + {1.0, -274.68139814419976, -239.53727115479273}, + {0.0, -86.4277464637313, -881.0777192437689}, + {1.0, -581.4932661460668, 769.3538369247574}, + {0.0, -432.5850223289913, -577.4260081674186}, + {1.0, 166.76522990130684, 582.4331818363789}, + {0.0, 396.8182460459341, 248.34183939490367}, + {1.0, -509.8701926143476, 368.8796357552451}, + {1.0, -482.54152901054886, -248.83959837521047}, + {1.0, -300.50297994358345, 742.4139758199028}, + {0.0, 163.28493788474384, -61.41706872692157}, + {0.0, -399.2277405988791, -930.6519043114885}, + {0.0, 44.13900477801826, -571.5314250642764}, + {0.0, 457.8794897532496, -505.99693186447195}, + {0.0, 16.85880382123935, -451.1811783607169}, + {1.0, -743.4540696447744, 325.39937301862096}, + {1.0, 57.40459247973081, 106.58399169789641}, + {1.0, 183.98880310846016, 499.74779967287395}, + {1.0, 567.0903172389608, 820.4986606446041}, + {0.0, 672.4806526088855, 300.601012280614}, + {0.0, -343.8894522407976, -761.4942297431235}, + {0.0, 870.247864223385, -168.14608036197296}, + {1.0, 593.005455426467, 673.1630290763387}, + {0.0, -625.9494316959813, -983.6968015830237}, + {1.0, 494.1754094118269, 992.2691899024903}, + {0.0, 61.401789304312615, -773.2837841463802}, + {1.0, -194.76742246565573, 69.77988116139159}, + {0.0, 206.82364861578685, 121.15474801344544}, + {1.0, -265.964495521001, 50.790074285276205}, + {0.0, 818.3873132702915, 36.49793444927877}, + {0.0, 99.81409878465752, -628.0274914181116}, + {0.0, 464.149315901346, -321.29715928735277}, + {1.0, -164.52462729937565, 952.4896905712137}, + {0.0, -63.17364851415209, -149.49056773721736}, + {0.0, 882.9288293898815, 171.00117804059573}, + {0.0, 473.3733180102365, -689.3426862684687}, + {0.0, 165.7220875180078, -354.71003889056044}, + {0.0, 525.5517697849327, 415.84107073078167}, + {0.0, -38.184721358457864, -99.36030799911896}, + {0.0, 242.96729902384163, -156.16029387422054}, + {0.0, 448.4711090805122, -495.01683482080705}, + {1.0, -80.15226220702493, 970.6850105496733}, + {0.0, 870.3328249998483, 583.0363909361256}, + {0.0, -238.61798549246464, -430.95739845768026}, + {0.0, -153.01230031899092, -482.12077718764306}, + {1.0, -118.06183953458049, 40.44154430898425}, + {1.0, -876.8968143885145, -370.6419068924105}, + {0.0, 989.8165746071368, -943.0636134966381}, + {0.0, 448.68476431428917, 44.44832374987436}, + {0.0, -5.562631397638029, -594.7883897866259}, + {0.0, 880.7175397337289, 786.6444839355895}, + {0.0, 476.3278235630439, -756.8025350513306}, + {0.0, -209.1261948306602, -366.9709734757247}, + {1.0, -1.5342655753494228, 295.69953419777266}, + {1.0, 98.88194946977887, 709.984198980128}, + {1.0, -102.4522435336255, 348.55854643990347}, + {1.0, 431.6422144084929, 488.26608578711966}, + {1.0, -629.5648689407153, -389.98821373225144}, + {1.0, -655.6263155228037, 89.12505314113082}, + {0.0, -201.6475575882739, -902.9470477574147}, + {1.0, -342.30143560116915, 157.21169053018912}, + {1.0, -671.4797028289656, -49.48397951858112}, + {1.0, -993.3541982679827, 428.50119148048657}, + {0.0, 158.95824836793054, 115.93705315336206}, + {1.0, -858.292999815246, 946.8912002937116}, + {1.0, -223.10861890967476, 190.7507270694814}, + {0.0, -147.9091707330915, -899.2785339400244}, + {0.0, 254.55648822491457, -260.9331332388332}, + {0.0, 560.3172529225217, 388.76836664538814}, + {0.0, 924.1007767093995, 56.69156104001263}, + {0.0, 62.42705110549082, -888.0360838024912}, + {0.0, 222.43761905783595, 88.18795871018938}, + {0.0, 489.8756173625022, 421.3474970424486}, + {0.0, 246.6646015601891, -506.3175818566548}, + {0.0, -620.5001534479718, -774.7836865370457}, + {1.0, -654.0153133260937, -369.1547696738236}, + {0.0, 853.1429595371762, -87.56985188355861}, + {1.0, -226.84561483455388, 122.80144293902458}, + {1.0, 335.09779003775316, 731.0032200516428}, + {1.0, 87.90214612318391, 724.8989520503376}, + {0.0, -51.792728592205776, -298.0103777307395}, + {1.0, -421.181682827191, 41.01565470282776}, + {1.0, -626.6392286104665, 227.98017875883284}, + {1.0, -839.0341042344045, 990.7893877153003}, + {1.0, -9.321936022159207, 125.24249479969853}, + {0.0, 665.2916192497585, 314.9312297793483}, + {1.0, -236.71130814979108, 41.56269468081973}, + {1.0, -695.4935496704909, -364.376100277162}, + {0.0, 60.90303121087936, -525.9732822401365}, + {1.0, -740.9211189318623, 328.1577766746841}, + {0.0, 636.7728693761635, 231.63887313030887}, + {0.0, 783.8640093145868, -86.94016828207737}, + {1.0, -122.79445443476675, 446.8427679254348}, + {0.0, -599.127065456006, -641.9946421169902}, + {0.0, -133.3932116798295, -715.8087793479069}, + {0.0, 868.1768857382554, -356.8832640029416}, + {1.0, -729.5079555062296, 48.18869346933934}, + {1.0, -323.311327276945, 51.37289795053448}, + {1.0, -863.9094602749768, -526.3307161874084}, + {0.0, -172.237643059304, -545.395840196842}, + {1.0, 379.0803154405653, 860.9286051762328}, + {0.0, 646.3490077056538, 221.13771257535495}, + {1.0, -493.2329575593668, 938.8602740452263}, + {0.0, 852.1508064390962, 186.42129731281898}, + {0.0, -105.17633183875978, -819.8477185986328}, + {0.0, 794.7790444633961, 225.19911969860573}, + {0.0, 306.4485552684148, 290.3991023596727}, + {1.0, -348.52545404552563, -302.8538669615166}, + {1.0, -621.5896829696857, -586.764214213187}, + {0.0, -360.9052184666539, -501.2314262330038}, + {0.0, 512.0475423578778, -968.4211685736286}, + {0.0, -1.0553261239787162, -649.1131987920394}, + {1.0, -353.0059560079317, -343.82940709059096}, + {0.0, 281.71038662642286, -536.6960537047482}, + {1.0, -919.2355704939898, 782.9875939766282}, + {1.0, -554.7648476025646, 670.76664941987}, + {0.0, 287.54041983444336, 106.2628262971964}, + {1.0, -71.36414070058743, 481.00905876949264}, + {1.0, -525.4581932812421, 507.16990298296923}, + {0.0, 510.1084615227803, -813.3443471544821}, + {0.0, -515.8000398448883, -551.1523846072581}, + {1.0, -941.5905835281701, 178.53493537516124}, + {1.0, -826.4320007540575, -391.32308974320074}, + {1.0, -362.25207668798646, 711.1776477575349}, + {1.0, -363.13146140965796, 58.76850122459791}, + {1.0, -637.0939514034111, -57.18171960880602}, + {1.0, 811.8537434287423, 893.8406118576338}, + {1.0, -351.36128471993413, -164.8367432830371}, + {0.0, -625.8073644486308, -938.5091097468568}, + {0.0, 131.36904305993585, 59.945922200265386}, + {1.0, 300.49666138667953, 544.089396622054}, + {1.0, 150.9533638033147, 943.667562848439}, + {1.0, -232.3556550990304, 976.0470122102599}, + {1.0, 135.8097187722467, 262.21166985817695}, + {0.0, -97.51353115825805, -890.6273287611524}, + {1.0, -711.4020131465077, -20.13806627790268}, + {0.0, 917.1543030685937, -872.6562190191934}, + {1.0, -657.7632592299774, -596.4956657628013}, + {0.0, 806.7273372492091, 154.3973882475018}, + {0.0, 371.7932221354017, -847.5721372522485}, + {0.0, 887.0251089691258, -306.6059397900773}, + {1.0, -171.52557116367404, 819.6507572581761}, + {0.0, 632.2374116222845, -635.8014704304069}, + {0.0, -213.33363068356653, -639.038384943213}, + {0.0, 737.7847710201636, -843.291366957395}, + {0.0, -430.7114667797465, -665.7014140302028}, + {0.0, 18.317432837854085, -309.1307864153605}, + {0.0, 689.3196508440624, 398.22692583132357}, + {0.0, 908.6965655126414, -321.7431267700932}, + {0.0, 604.2361606207025, -174.1208906780612}, + {1.0, -816.014328616853, -468.5728222442267}, + {1.0, -124.50677921712554, 439.4225345583168}, + {0.0, -736.4729915358428, -745.435394454091}, + {1.0, -201.1314081356761, 132.070557003796}, + {1.0, -538.2469045343253, 719.2630473774586}, + {1.0, -579.3039091203984, 961.7644587928542}, + {1.0, -131.07569768983058, -14.067659190625022}, + {1.0, -961.9324831150435, 815.7775199747161}, + {0.0, 959.0805916122792, 210.22031178108682}, + {0.0, 537.3004634155134, -821.1232504829824}, + {1.0, -525.577776451393, 523.8546325250404}, + {1.0, -490.37425007561785, 613.9247103792861}, + {1.0, 725.2941641152454, 924.7691776631311}, + {0.0, 850.5191959199387, -911.7156754307339}, + {1.0, -535.3827552133765, -256.1333041657481}, + {1.0, 93.24441210512305, 980.899958839474}, + {1.0, 125.58210878499744, 489.9200659506546}, + {1.0, -265.0907509361897, -181.36232727265053}, + {1.0, -805.0528978104943, -774.3428711441273}, + {0.0, 299.481029365769, 274.2467784888322}, + {1.0, -872.6432839751412, -724.9692038478101}, + {0.0, -327.77109720806027, -346.06090524099113}, + {0.0, -769.9407295518204, -947.4499512111647}, + {0.0, 708.176001237056, -701.9900242821255}, + {0.0, 429.7900423607498, -767.8607100772805}, + {0.0, 514.9666605063433, -252.09527799878242}, + {1.0, -392.6943024744394, 943.3642876383242}, + {0.0, -171.97676164837765, -964.9749845719992}, + {0.0, 25.3949751703301, -761.3459408840288}, + {0.0, 327.0516125752938, -81.26274312696592}, + {0.0, -926.4851014957853, -970.9563176084357}, + {1.0, -985.2416286372801, -758.6127879964147}, + {0.0, 338.7854869375187, -231.37122411100802}, + {1.0, -995.9157184785086, -310.8674450540059}, + {0.0, 485.52790893379097, 7.909018196822899}, + {1.0, -289.76601009744377, -93.43411467378803}, + {1.0, -352.91681813664957, 970.6609344632727}, + {1.0, -634.2596635738871, 478.54324561131875}, + {1.0, -496.623286353002, 526.7778661797483}, + {0.0, 837.0404771301767, 671.1823960639354}, + {0.0, -284.5931069950618, -893.2503900000672}, + {0.0, 739.6925158457948, -572.886151546864}, + {1.0, 505.37418939555437, 914.4939776238757}, + {0.0, 65.79978723030536, -59.26282586191303}, + {0.0, 775.1318885055389, -698.3367782064498}, + {1.0, -871.3166585822554, -351.74555670546727} + }; + + /** 4 sets grouped around of square vertices. */ + protected static final double[][] fourSetsInSquareVertices = { + {0, 9.35096604945605, 9.946073797069054}, + {0, 9.135109633114403, 9.962676066205383}, + {0, 9.046654725589521, 9.610699793950662}, + {0, 9.827221553421282, 9.4176319880153}, + {0, 9.277441430833566, 9.502990699976}, + {0, 9.444827307967367, 9.903310367805602}, + {0, 9.911404997680545, 9.226246217883297}, + {0, 9.950231642973769, 9.453518533258803}, + {0, 9.281545278543017, 9.438272102773379}, + {0, 9.032306746555102, 9.517675092676706}, + {0, 9.286542956290456, 9.15288903978334}, + {0, 9.896451632473255, 9.019751070009821}, + {0, 9.611642481367562, 9.17209652044495}, + {0, 9.592540623266126, 9.306160678545629}, + {0, 9.817470117880873, 9.838651444371973}, + {0, 9.263220850397941, 9.139179322873582}, + {0, 9.949097640181272, 9.624710378790242}, + {0, 9.616004097319287, 9.421557303733453}, + {0, 9.512900976289933, 9.28642137092367}, + {0, 9.207793663546337, 9.40094289636865}, + {0, 9.079279410265883, 9.76978559451163}, + {0, 9.328945661288095, 9.645773710532888}, + {0, 9.80101696222916, 9.511903913501255}, + {0, 9.882593127029741, 9.73545127073394}, + {0, 9.75372887212885, 9.435141350132769}, + {0, 9.288527674365598, 9.055665753045206}, + {0, 9.88272159816372, 9.055932205550423}, + {0, 9.385642321423624, 9.922172934733265}, + {0, 9.830217517055729, 9.415174260405154}, + {0, 9.184970761195489, 9.03515483431538}, + {0, 9.747503155479809, 9.38708759338332}, + {0, 9.953962908254736, 9.483949174467012}, + {0, 9.271685731881993, 9.128890010491494}, + {0, 9.441240324686845, 9.07960435205457}, + {0, 9.168560731741703, 9.256530860101089}, + {0, 9.010517147230432, 9.94335328515589}, + {0, 9.1749227239244, 9.018681913631386}, + {0, 9.413360501729251, 9.302212703700196}, + {0, 9.439461439481182, 9.318631395882242}, + {0, 9.531551691985907, 9.232525664308465}, + {0, 9.466805772615563, 9.511711890834333}, + {0, 9.633242901042053, 9.972778102570045}, + {0, 9.517692290376388, 9.73537462150143}, + {0, 9.187046049036134, 9.059073377533783}, + {0, 9.121523234392956, 9.504221886903101}, + {0, 9.493957951674021, 9.608201135992367}, + {0, 9.981993764415321, 9.333278989889811}, + {0, 9.371277571698762, 9.110041365023866}, + {0, 9.681446270907697, 9.7870063720198}, + {0, 9.639466883264246, 9.434768030033164}, + {0, 9.391982858267035, 9.934707093985823}, + {0, 9.550060071547726, 9.473132681990514}, + {0, 9.256562054384402, 9.211913854106896}, + {0, 9.46408385327689, 9.158869250798142}, + {0, 9.442994981367162, 9.189227375629654}, + {0, 9.697833866121318, 9.21112449845501}, + {0, 9.115534908153043, 9.115227178046245}, + {0, 9.835218474137239, 9.98174155822633}, + {0, 9.026698146309743, 9.248759846540965}, + {0, 9.68118581769866, 9.40512628823504}, + {0, 9.81721640069966, 9.369105145483651}, + {0, 9.975877208452287, 9.640693828024975}, + {0, 9.823272242807437, 9.46823993908653}, + {0, 9.638281188176519, 9.534774307683374}, + {0, 9.597003178481613, 9.84238115941204}, + {0, 9.941999007792681, 9.331877359355289}, + {0, 9.050540877852525, 9.244472301490417}, + {0, 9.358931306187054, 9.900809398285286}, + {0, 9.170247599517836, 9.87585551194908}, + {0, 9.461705027907554, 9.167319400226486}, + {0, 9.076729207165052, 9.677578134220534}, + {0, 9.488544686081216, 9.62380634923249}, + {0, 9.929150661994122, 9.152491122614597}, + {0, 9.890051482992417, 9.1709621079536}, + {0, 9.839485513056095, 9.643849781319778}, + {0, 9.749461922180853, 9.045432748127462}, + {0, 9.58439542919333, 9.225044809549836}, + {0, 9.479465134364697, 9.706551666966702}, + {0, 9.00707492076871, 9.839317970534172}, + {0, 9.948409701102793, 9.380261430658763}, + {0, 9.264850115578076, 9.696516344063658}, + {0, 9.977078194073387, 9.213405339955512}, + {0, 9.648087669569941, 9.898977891084664}, + {0, 9.724090075117749, 9.876133066062916}, + {0, 9.445249316659568, 9.373023119966643}, + {0, 9.995541563884071, 9.57923804140667}, + {0, 9.667359233860397, 9.720098746660245}, + {0, 9.379303845088474, 9.520602789251743}, + {0, 9.996287800651865, 9.838061655335768}, + {0, 9.318835567328465, 9.009915558605616}, + {0, 9.103894679089793, 9.674971708485224}, + {0, 9.346826400314828, 9.888779618232787}, + {0, 9.659116962016478, 9.608712473271416}, + {0, 9.661516337354719, 9.416786365864226}, + {0, 9.642593770590324, 9.251344999039574}, + {0, 9.134003475979116, 9.551760245909657}, + {0, 9.524862003327057, 9.307789887454172}, + {0, 9.883705581666579, 9.325086464359684}, + {0, 9.96076863440133, 9.81636527085299}, + {0, 9.995704158311584, 9.544553004819253}, + {1, -9.094953387232211, 9.06233128328723}, + {1, -9.304897363378368, 9.143926554861004}, + {1, -9.03524958020074, 9.370326522034881}, + {1, -9.120893310395626, 9.271851530835537}, + {1, -9.510902040922451, 9.2470398948938}, + {1, -9.6525973741057, 9.725355730393005}, + {1, -9.65730261326345, 9.757814601272596}, + {1, -9.597463454487615, 9.870093256106818}, + {1, -9.190101362739775, 9.594505054154807}, + {1, -9.72020516663928, 9.49084494643775}, + {1, -9.723347588431338, 9.129139508430457}, + {1, -9.33996314024198, 9.525934956132764}, + {1, -9.824803485424123, 9.128546700002982}, + {1, -9.346973220919576, 9.934992542662958}, + {1, -9.685940369418338, 9.30810392592615}, + {1, -9.064058121381708, 9.846942888423445}, + {1, -9.368987058951426, 9.557135466015499}, + {1, -9.782353308524383, 9.857550405413855}, + {1, -9.281500887267686, 9.056968941046172}, + {1, -9.514451522447168, 9.713696846961527}, + {1, -9.607099689382135, 9.682075033940093}, + {1, -9.144871412854759, 9.146320338346246}, + {1, -9.54203309158306, 9.220014377847022}, + {1, -9.238269645840251, 9.948063795512258}, + {1, -9.286942806777112, 9.522342489392214}, + {1, -9.591474157985536, 9.240285207594253}, + {1, -9.652843973116592, 9.557983695755953}, + {1, -9.126794849562028, 9.452966323026885}, + {1, -9.877221229728452, 9.151312939643672}, + {1, -9.170379066479606, 9.381576400806694}, + {1, -9.411298671068392, 9.133322302544746}, + {1, -9.666443924685849, 9.66428867311317}, + {1, -9.347964494643556, 9.012849397302583}, + {1, -9.493681117964078, 9.332240464982554}, + {1, -9.623975723800413, 9.419921503264844}, + {1, -9.292219487063763, 9.00214102314859}, + {1, -9.194419464738496, 9.640048387436925}, + {1, -9.886720923292938, 9.834939723803704}, + {1, -9.90520284610924, 9.17595267606471}, + {1, -9.284829868633738, 9.268795876426012}, + {1, -9.498878372098952, 9.5997098342015}, + {1, -9.359302922869169, 9.47880701571168}, + {1, -9.258562740082393, 9.497531680793207}, + {1, -9.895388929537848, 9.00756585816333}, + {1, -9.627928477333924, 9.391262771761872}, + {1, -9.525281129279826, 9.796892255719904}, + {1, -9.59598592778135, 9.067874949457092}, + {1, -9.110283105135892, 9.821653780489235}, + {1, -9.343973780672988, 9.63557812382392}, + {1, -9.87812414314095, 9.978007969979139}, + {1, -9.98832246915748, 9.623150872300222}, + {1, -9.115997082508613, 9.965470531748467}, + {1, -9.874391718339105, 9.214113577543877}, + {1, -9.671664494678888, 9.15862012290195}, + {1, -9.031596433460688, 9.616814958480965}, + {1, -9.758627761132653, 9.511908952613643}, + {1, -9.205087108977219, 9.840949306240816}, + {1, -9.171734592697309, 9.702842939318314}, + {1, -9.082886085070493, 9.524201651321903}, + {1, -9.74595864484071, 9.219346103723025}, + {1, -9.898468941378516, 9.994402484197503}, + {1, -9.341582531784448, 9.193680038418634}, + {1, -9.570090524257228, 9.201198104295603}, + {1, -9.88361320124743, 9.027615263347323}, + {1, -9.154222720481965, 9.799927021695417}, + {1, -9.364221227791875, 9.042090834574182}, + {1, -9.333131749015948, 9.790442620484125}, + {1, -9.286700941581561, 9.89073867458494}, + {1, -9.348737197099151, 9.637939060929087}, + {1, -9.442420524656606, 9.07802294456236}, + {1, -9.069329135123306, 9.658515489139848}, + {1, -9.306682910312364, 9.20831776028291}, + {1, -9.033846541544232, 9.32904963306478}, + {1, -9.706767953982897, 9.9204656840812}, + {1, -9.855922299233484, 9.212398390928783}, + {1, -9.31778377138365, 9.001381041592891}, + {1, -9.498262395904716, 9.627240779587641}, + {1, -9.165515191167106, 9.8269942856602}, + {1, -9.975445549855277, 9.940934989111799}, + {1, -9.083105286998059, 9.006127740460453}, + {1, -9.570145038082837, 9.682155599203648}, + {1, -9.61392195996382, 9.417864984298848}, + {1, -9.274771331302999, 9.641773516631659}, + {1, -9.296296304670749, 9.782496135034126}, + {1, -9.906415110246952, 9.754391405446135}, + {1, -9.401887484923442, 9.177845637020802}, + {1, -9.530971211940608, 9.165119804525942}, + {1, -9.82379861350907, 9.79567065636976}, + {1, -9.652776399686564, 9.905939382705197}, + {1, -9.876593047451918, 9.945310791455892}, + {1, -9.663611565135188, 9.362793091580434}, + {1, -9.199103361444621, 9.635196006461447}, + {1, -9.190013322848332, 9.124127000468004}, + {1, -9.29736354578434, 9.717999298890678}, + {1, -9.220547853711237, 9.559927412569595}, + {1, -9.300431356958706, 9.76396216541998}, + {1, -9.157649670754807, 9.990846988919046}, + {1, -9.681918677002109, 9.68618286595764}, + {1, -9.309195235661146, 9.312880801021818}, + {1, -9.061160475710913, 9.076614202325946}, + {2, -9.062489260904384, -9.29639290758419}, + {2, -9.228543182338143, -9.678377216077045}, + {2, -9.058090832908235, -9.193945883550121}, + {2, -9.133051729493113, -9.591373007767894}, + {2, -9.287844094445548, -9.551255004015}, + {2, -9.007505358739156, -9.364102496975889}, + {2, -9.573448348548297, -9.721351111009751}, + {2, -9.839063104064442, -9.913376420693114}, + {2, -9.009615911555375, -9.726047024128608}, + {2, -9.101017317976435, -9.704243867142955}, + {2, -9.982108914119253, -9.16651010251761}, + {2, -9.446194150458751, -9.254956921695555}, + {2, -9.189473272816354, -9.810681137049205}, + {2, -9.118077427599777, -9.540094810610913}, + {2, -9.771250464767986, -9.523914718655663}, + {2, -9.66962428717098, -9.363171620624835}, + {2, -9.312167530669402, -9.343252976723711}, + {2, -9.464223946364095, -9.030677424225916}, + {2, -9.360044171938823, -9.307110078788382}, + {2, -9.011501658023455, -9.36530250968901}, + {2, -9.775885771959839, -9.99889314514122}, + {2, -9.674611861667914, -9.258187855592231}, + {2, -9.738640777018995, -9.111785670315005}, + {2, -9.246690988432968, -9.721028941948624}, + {2, -9.390261807995243, -9.588861735182837}, + {2, -9.291113352727827, -9.269267155328981}, + {2, -9.851335630543913, -9.706611637556188}, + {2, -9.585157995064394, -9.405552049981731}, + {2, -9.918436572526948, -9.16760421314763}, + {2, -9.845493743518675, -9.355482956823167}, + {2, -9.731220848845956, -9.225343258111073}, + {2, -9.222705334863235, -9.494812693860784}, + {2, -9.981016698450784, -9.905493543993186}, + {2, -9.46735837748333, -9.4826245649012}, + {2, -9.244414641225871, -9.747631208358092}, + {2, -9.055383358563462, -9.531078141057671}, + {2, -9.769432919539609, -9.61352749756392}, + {2, -9.369900693663043, -9.108143337018905}, + {2, -9.607028386780009, -9.114073621581822}, + {2, -9.777391839524553, -9.011542490337462}, + {2, -9.006992341646022, -9.807142738339437}, + {2, -9.268800709859363, -9.64049865255139}, + {2, -9.675247117678266, -9.59986912340877}, + {2, -9.64637138569114, -9.373492063216789}, + {2, -9.107966171477159, -9.89296888054194}, + {2, -9.844813041035149, -9.265286980535892}, + {2, -9.741557572466677, -9.332262102684087}, + {2, -9.877113842998332, -9.236779890169021}, + {2, -9.717067250147496, -9.064661056318842}, + {2, -9.621588376526242, -9.877688077281952}, + {2, -9.517814042484112, -9.540587835450802}, + {2, -9.301056957528804, -9.825047942369075}, + {2, -9.571496535251406, -9.7886645523611}, + {2, -9.720509286872675, -9.391715190333258}, + {2, -9.440573147395899, -9.788983529514448}, + {2, -9.26187156355727, -9.6495064067468}, + {2, -9.658496105019307, -9.56612823492413}, + {2, -9.380443710902496, -9.68085867523561}, + {2, -9.942337341048844, -9.051311192273833}, + {2, -9.078217384202866, -9.916249447505033}, + {2, -9.040049870218203, -9.034931091928817}, + {2, -9.38309299369458, -9.652061835126116}, + {2, -9.2185666133056, -9.230952753648268}, + {2, -9.556045604713985, -9.68622417688499}, + {2, -9.763408055045888, -9.879577482698977}, + {2, -9.4247203087675, -9.639176769093654}, + {2, -9.87358328609414, -9.895570735983382}, + {2, -9.819441742886346, -9.8365613475581}, + {2, -9.658089225310723, -9.489731885421016}, + {2, -9.943990436893316, -9.452660725226394}, + {2, -9.499576083220616, -9.936796975306573}, + {2, -9.209278737078256, -9.515912871664437}, + {2, -9.822627739746856, -9.208467277950026}, + {2, -9.250697491903084, -9.388580147580788}, + {2, -9.499425743259364, -9.350980719673753}, + {2, -9.275926339651928, -9.617104986484284}, + {2, -9.1796228747286, -9.600489090237376}, + {2, -9.349551823375743, -9.006466892950566}, + {2, -9.894633921415739, -9.68766261225829}, + {2, -9.65858550958029, -9.981852526887}, + {2, -9.260496691277194, -9.809097777192473}, + {2, -9.819512412109138, -9.897278497873733}, + {2, -9.095722203640902, -9.373361177409254}, + {2, -9.350211015838992, -9.8070103689666}, + {2, -9.666932714082296, -9.012476306081684}, + {2, -9.808494394881976, -9.141856503904373}, + {2, -9.659369482494562, -9.813220865668578}, + {2, -9.695328684452264, -9.348824074007899}, + {2, -9.559852026507784, -9.874175917293163}, + {2, -9.09372192117967, -9.409697201374975}, + {2, -9.287303427948462, -9.501710345463191}, + {2, -9.559530752361578, -9.915461534620048}, + {2, -9.582664553428488, -9.403076102016477}, + {2, -9.52173572568699, -9.741375773070464}, + {2, -9.65354706029232, -9.818082622224445}, + {2, -9.415838021477068, -9.532580879297706}, + {2, -9.574004758496413, -9.086286237660188}, + {2, -9.270611925252807, -9.702167164438746}, + {2, -9.95686463396123, -9.162427711243494}, + {2, -9.274599236711888, -9.877754856511778}, + {3, 9.14689232210878, -9.775341371070157}, + {3, 9.023355885230728, -9.650091265039629}, + {3, 9.445914402990603, -9.367844134021585}, + {3, 9.12739637867819, -9.790557561179597}, + {3, 9.041303217790349, -9.586261899702581}, + {3, 9.205210383417626, -9.985844424729768}, + {3, 9.760747183322884, -9.739749414833623}, + {3, 9.43601987318095, -9.299718258558077}, + {3, 9.14356842517825, -9.295462642558103}, + {3, 9.833809705258039, -9.468846417300268}, + {3, 9.718477547371677, -9.823866211360837}, + {3, 9.922658697442182, -9.769889056711964}, + {3, 9.33663363137869, -9.088267105115708}, + {3, 9.137230799593524, -9.764401780467223}, + {3, 9.858088980083506, -9.304992329702712}, + {3, 9.382828855133841, -9.493306421449871}, + {3, 9.798884510277261, -9.380868512833228}, + {3, 9.62129491417874, -9.539240839001467}, + {3, 9.779444510688629, -9.192918853610157}, + {3, 9.200804425227417, -9.214343851278091}, + {3, 9.775531213188497, -9.073023597174036}, + {3, 9.390609731389022, -9.977531450420052}, + {3, 9.852766749781729, -9.994823748876888}, + {3, 9.206238360247045, -9.67091791978384}, + {3, 9.188602950870685, -9.510463637602879}, + {3, 9.331589555754434, -9.922823935052168}, + {3, 9.476697182752012, -9.313064140116326}, + {3, 9.356805613304504, -9.835977587036306}, + {3, 9.339818380404573, -9.255810669018475}, + {3, 9.17366847248557, -9.947584334056048}, + {3, 9.53360390823212, -9.795041609984915}, + {3, 9.609560038477422, -9.285015745600694}, + {3, 9.577553857280723, -9.96914900300197}, + {3, 9.464374595524664, -9.618239089480822}, + {3, 9.398719356212853, -9.7406758194444}, + {3, 9.154688949078198, -9.248998548314239}, + {3, 9.679073636776373, -9.965328464852867}, + {3, 9.47893626848198, -9.9671543632786}, + {3, 9.068547258387513, -9.297377035663166}, + {3, 9.076923603177063, -9.914463831030272}, + {3, 9.976578331543791, -9.360722370503666}, + {3, 9.089452654960278, -9.675962954595512}, + {3, 9.070526769096297, -9.878206691195288}, + {3, 9.930847945955737, -9.07583308430197}, + {3, 9.241217613699337, -9.631175172125698}, + {3, 9.124100921554351, -9.228953372107389}, + {3, 9.508344880276217, -9.860603437908713}, + {3, 9.11156100183317, -9.325392997885503}, + {3, 9.817235693989044, -9.39425968469714}, + {3, 9.001600449220064, -9.425174755596974}, + {3, 9.548114105927628, -9.808330723888258}, + {3, 9.26226050324015, -9.767116578977086}, + {3, 9.614597629315545, -9.041844364395292}, + {3, 9.538354218499835, -9.098393947752555}, + {3, 9.103392813936214, -9.09952673162608}, + {3, 9.420097750306217, -9.098700662928707}, + {3, 9.751668557712422, -9.38794903932924}, + {3, 9.931275926738792, -9.567768498966414}, + {3, 9.046080675655736, -9.638494792341994}, + {3, 9.770315794108765, -9.43037261292599}, + {3, 9.752980345824852, -9.748629501818872}, + {3, 9.451157497026747, -9.122294173303064}, + {3, 9.8842318143802, -9.26500677925286}, + {3, 9.757618739984443, -9.43137249310142}, + {3, 9.312628300108653, -9.35390228978602}, + {3, 9.290443903557156, -9.235565486135597}, + {3, 9.006123561818931, -9.152783217337547}, + {3, 9.570084759165916, -9.927282503148907}, + {3, 9.421900208122063, -9.081045753111953}, + {3, 9.653736596553786, -9.901709124803725}, + {3, 9.18417654510616, -9.251983632346962}, + {3, 9.528620521688604, -9.153806541933662}, + {3, 9.804333603959915, -9.140503586471738}, + {3, 9.450969957775413, -9.158071229394206}, + {3, 9.20163405176059, -9.485982651544383}, + {3, 9.54780101021909, -9.037382999154193}, + {3, 9.075028540176401, -9.398829949196564}, + {3, 9.874199751417624, -9.811808331246274}, + {3, 9.954958362231842, -9.233648957978966}, + {3, 9.03258466527879, -9.432021155003355}, + {3, 9.611337142970585, -9.18050106929956}, + {3, 9.748186934551581, -9.283395815931486}, + {3, 9.203357880317562, -9.734448423320636}, + {3, 9.918736141570848, -9.139927237002235}, + {3, 9.923607379931829, -9.747509729243488}, + {3, 9.11966639233584, -9.144799648581206}, + {3, 9.332910738465808, -9.836313230806809}, + {3, 9.72782406722705, -9.636979470475252}, + {3, 9.587733884348484, -9.429388313887008}, + {3, 9.713001308076503, -9.378155762534723}, + {3, 9.553879064305177, -9.456119811781296}, + {3, 9.326716553614768, -9.398273985573583}, + {3, 9.845721054911849, -9.2609414976378}, + {3, 9.43826634715752, -9.226109072709436}, + {3, 9.46721793264904, -9.959943210987339}, + {3, 9.47560676057465, -9.963482009295927}, + {3, 9.006435968586619, -9.202759792205478}, + {3, 9.053062605095485, -9.798289703474865}, + {3, 9.959296741639132, -9.762961500922069}, + {3, 9.882357321966778, -9.069477551120192} + }; + + /** xor truth table. */ + protected static final double[][] xor = { + {0.0, 0.0, 0.0}, + {0.0, 1.0, 1.0}, + {1.0, 0.0, 1.0}, + {1.0, 1.0, 0.0} + }; + + /** + * Create cache mock. + * + * @param vals Values for cache mock. + * @return Cache mock. + */ + protected Map getCacheMock(double[][] vals) { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < vals.length; i++) { + double[] row = vals[i]; + double[] convertedRow = new double[row.length]; + System.arraycopy(row, 0, convertedRow, 0, row.length); + cacheMock.put(i, convertedRow); + } + return cacheMock; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/CompositionTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/CompositionTestSuite.java new file mode 100644 index 0000000000000..eb741eac19617 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/CompositionTestSuite.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition; + +import org.apache.ignite.ml.composition.bagging.BaggingTest; +import org.apache.ignite.ml.composition.boosting.GDBTrainerTest; +import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerTest; +import org.apache.ignite.ml.composition.boosting.convergence.median.MedianOfMedianConvergenceCheckerTest; +import org.apache.ignite.ml.composition.predictionsaggregator.MeanValuePredictionsAggregatorTest; +import org.apache.ignite.ml.composition.predictionsaggregator.OnMajorityPredictionsAggregatorTest; +import org.apache.ignite.ml.composition.predictionsaggregator.WeightedPredictionsAggregatorTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all ensemble models tests. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + GDBTrainerTest.class, + MeanValuePredictionsAggregatorTest.class, + OnMajorityPredictionsAggregatorTest.class, + BaggingTest.class, + StackingTest.class, + WeightedPredictionsAggregatorTest.class, + MedianOfMedianConvergenceCheckerTest.class, + MeanAbsValueConvergenceCheckerTest.class +}) +public class CompositionTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/StackingTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/StackingTest.java new file mode 100644 index 0000000000000..49407ea89cac6 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/StackingTest.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition; + +import org.apache.ignite.IgniteCache; +import org.apache.ignite.ml.IgniteModel; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.composition.stacking.StackedDatasetTrainer; +import org.apache.ignite.ml.composition.stacking.StackedModel; +import org.apache.ignite.ml.composition.stacking.StackedVectorDatasetTrainer; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.math.functions.IgniteFunction; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.nn.Activators; +import org.apache.ignite.ml.nn.MLPTrainer; +import org.apache.ignite.ml.nn.MultilayerPerceptron; +import org.apache.ignite.ml.nn.UpdatesStrategy; +import org.apache.ignite.ml.nn.architecture.MLPArchitecture; +import org.apache.ignite.ml.optimization.LossFunctions; +import org.apache.ignite.ml.optimization.SmoothParametrized; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator; +import org.apache.ignite.ml.regressions.linear.LinearRegressionLSQRTrainer; +import org.apache.ignite.ml.regressions.linear.LinearRegressionModel; +import org.apache.ignite.ml.trainers.AdaptableDatasetModel; +import org.apache.ignite.ml.trainers.AdaptableDatasetTrainer; +import org.apache.ignite.ml.trainers.DatasetTrainer; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import static org.junit.Assert.assertEquals; + +/** + * Tests stacked trainers. + */ +public class StackingTest extends TrainerTest { + /** Rule to check exceptions. */ + @Rule + public ExpectedException thrown = ExpectedException.none(); + + /** + * Tests simple stack training. + */ + @Test + public void testSimpleStack() { + StackedDatasetTrainer trainer = + new StackedDatasetTrainer<>(); + + UpdatesStrategy updatesStgy = new UpdatesStrategy<>( + new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, + SimpleGDParameterUpdate.AVG + ); + + MLPArchitecture arch = new MLPArchitecture(2). + withAddedLayer(10, true, Activators.RELU). + withAddedLayer(1, false, Activators.SIGMOID); + + MLPTrainer trainer1 = new MLPTrainer<>( + arch, + LossFunctions.MSE, + updatesStgy, + 3000, + 10, + 50, + 123L + ); + + // Convert model trainer to produce Vector -> Vector model + DatasetTrainer, Double> mlpTrainer = + AdaptableDatasetTrainer.of(trainer1) + .beforeTrainedModel((Vector v) -> new DenseMatrix(v.asArray(), 1)) + .afterTrainedModel((Matrix mtx) -> mtx.getRow(0)) + .withConvertedLabels(VectorUtils::num2Arr); + + final double factor = 3; + + StackedModel mdl = trainer + .withAggregatorTrainer(new LinearRegressionLSQRTrainer().withConvertedLabels(x -> x * factor)) + .addTrainer(mlpTrainer) + .withAggregatorInputMerger(VectorUtils::concat) + .withSubmodelOutput2VectorConverter(IgniteFunction.identity()) + .withVector2SubmodelInputConverter(IgniteFunction.identity()) + .withOriginalFeaturesKept(IgniteFunction.identity()) + .withEnvironmentBuilder(TestUtils.testEnvBuilder()) + .fit(getCacheMock(xor), parts, new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST)); + + assertEquals(0.0 * factor, mdl.predict(VectorUtils.of(0.0, 0.0)), 0.3); + assertEquals(1.0 * factor, mdl.predict(VectorUtils.of(0.0, 1.0)), 0.3); + assertEquals(1.0 * factor, mdl.predict(VectorUtils.of(1.0, 0.0)), 0.3); + assertEquals(0.0 * factor, mdl.predict(VectorUtils.of(1.0, 1.0)), 0.3); + } + + /** + * Tests simple stack training. + */ + @Test + public void testSimpleVectorStack() { + StackedVectorDatasetTrainer trainer = + new StackedVectorDatasetTrainer<>(); + + UpdatesStrategy updatesStgy = new UpdatesStrategy<>( + new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, + SimpleGDParameterUpdate.AVG + ); + + MLPArchitecture arch = new MLPArchitecture(2). + withAddedLayer(10, true, Activators.RELU). + withAddedLayer(1, false, Activators.SIGMOID); + + DatasetTrainer mlpTrainer = new MLPTrainer<>( + arch, + LossFunctions.MSE, + updatesStgy, + 3000, + 10, + 50, + 123L + ).withConvertedLabels(VectorUtils::num2Arr); + + final double factor = 3; + + StackedModel mdl = trainer + .withAggregatorTrainer(new LinearRegressionLSQRTrainer().withConvertedLabels(x -> x * factor)) + .addMatrix2MatrixTrainer(mlpTrainer) + .withEnvironmentBuilder(TestUtils.testEnvBuilder()) + .fit(getCacheMock(xor), parts, new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST)); + + assertEquals(0.0 * factor, mdl.predict(VectorUtils.of(0.0, 0.0)), 0.3); + assertEquals(1.0 * factor, mdl.predict(VectorUtils.of(0.0, 1.0)), 0.3); + assertEquals(1.0 * factor, mdl.predict(VectorUtils.of(1.0, 0.0)), 0.3); + assertEquals(0.0 * factor, mdl.predict(VectorUtils.of(1.0, 1.0)), 0.3); + } + + /** + * Tests that if there is no any way for input of first layer to propagate to second layer, + * exception will be thrown. + */ + @Test + public void testINoWaysOfPropagation() { + StackedDatasetTrainer, Void> trainer = + new StackedDatasetTrainer<>(); + thrown.expect(IllegalStateException.class); + trainer.fit(null, (IgniteCache)null, null); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/bagging/BaggingTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/bagging/BaggingTest.java new file mode 100644 index 0000000000000..d31b8e4dc851b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/bagging/BaggingTest.java @@ -0,0 +1,239 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition.bagging; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.IgniteModel; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.composition.combinators.parallel.ModelsParallelComposition; +import org.apache.ignite.ml.composition.predictionsaggregator.MeanValuePredictionsAggregator; +import org.apache.ignite.ml.composition.predictionsaggregator.OnMajorityPredictionsAggregator; +import org.apache.ignite.ml.dataset.Dataset; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.environment.LearningEnvironment; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.apache.ignite.ml.math.functions.IgniteTriFunction; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.nn.UpdatesStrategy; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator; +import org.apache.ignite.ml.preprocessing.Preprocessor; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer; +import org.apache.ignite.ml.trainers.AdaptableDatasetModel; +import org.apache.ignite.ml.trainers.DatasetTrainer; +import org.apache.ignite.ml.trainers.TrainerTransformers; +import org.junit.Test; + +/** + * Tests for bagging algorithm. + */ +public class BaggingTest extends TrainerTest { + /** + * Dependency of weights of first model in ensemble after training in + * {@link BaggingTest#testNaiveBaggingLogRegression()}. This dependency is tested to ensure that it is + * fully determined by provided seeds. + */ + private static Map firstMdlWeights; + + static { + firstMdlWeights = new HashMap<>(); + + firstMdlWeights.put(1, VectorUtils.of(-0.14721735583126058, 4.366377931980097)); + firstMdlWeights.put(2, VectorUtils.of(0.37824664453495443, 2.9422474282114495)); + firstMdlWeights.put(3, VectorUtils.of(-1.584467989609169, 2.8467326345685824)); + firstMdlWeights.put(4, VectorUtils.of(-2.543461229777167, 0.1317660102621108)); + firstMdlWeights.put(13, VectorUtils.of(-1.6329364937353634, 0.39278455436019116)); + } + + /** + * Test that count of entries in context is equal to initial dataset size * subsampleRatio. + */ + @Test + public void testBaggingContextCount() { + count((ctxCount, countData, integer) -> ctxCount); + } + + /** + * Test that count of entries in data is equal to initial dataset size * subsampleRatio. + */ + @Test + public void testBaggingDataCount() { + count((ctxCount, countData, integer) -> countData.cnt); + } + + /** + * Test that bagged log regression makes correct predictions. + */ + @Test + public void testNaiveBaggingLogRegression() { + Map cacheMock = getCacheMock(twoLinearlySeparableClasses); + + DatasetTrainer trainer = + new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(30000) + .withLocIterations(100) + .withBatchSize(10) + .withSeed(123L); + + BaggedTrainer baggedTrainer = TrainerTransformers.makeBagged( + trainer, + 7, + 0.7, + 2, + 2, + new OnMajorityPredictionsAggregator()) + .withEnvironmentBuilder(TestUtils.testEnvBuilder()); + + BaggedModel mdl = baggedTrainer.fit( + cacheMock, + parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + + Vector weights = ((LogisticRegressionModel)((AdaptableDatasetModel)((ModelsParallelComposition)((AdaptableDatasetModel)mdl + .model()).innerModel()).submodels().get(0)).innerModel()).weights(); + + TestUtils.assertEquals(firstMdlWeights.get(parts), weights, 0.0); + TestUtils.assertEquals(0, mdl.predict(VectorUtils.of(100, 10)), PRECISION); + TestUtils.assertEquals(1, mdl.predict(VectorUtils.of(10, 100)), PRECISION); + } + + /** + * Method used to test counts of data passed in context and in data builders. + * + * @param cntr Function specifying which data we should count. + */ + protected void count(IgniteTriFunction cntr) { + Map cacheMock = getCacheMock(twoLinearlySeparableClasses); + + CountTrainer cntTrainer = new CountTrainer(cntr); + + double subsampleRatio = 0.3; + + BaggedModel mdl = TrainerTransformers.makeBagged( + cntTrainer, + 100, + subsampleRatio, + 2, + 2, + new MeanValuePredictionsAggregator()) + .fit(cacheMock, parts, new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST)); + + Double res = mdl.predict(null); + + TestUtils.assertEquals(twoLinearlySeparableClasses.length * subsampleRatio, res, twoLinearlySeparableClasses.length / 10); + } + + /** + * Get sum of two Long values each of which can be null. + * + * @param a First value. + * @param b Second value. + * @return Sum of parameters. + */ + protected static Long plusOfNullables(Long a, Long b) { + if (a == null) + return b; + + if (b == null) + return a; + + return a + b; + } + + /** + * Trainer used to count entries in context or in data. + */ + protected static class CountTrainer extends DatasetTrainer, Double> { + /** + * Function specifying which entries to count. + */ + private final IgniteTriFunction cntr; + + /** + * Construct instance of this class. + * + * @param cntr Function specifying which entries to count. + */ + public CountTrainer(IgniteTriFunction cntr) { + this.cntr = cntr; + } + + /** {@inheritDoc} */ + @Override public IgniteModel fitWithInitializedDeployingContext( + DatasetBuilder datasetBuilder, + Preprocessor extractor) { + Dataset dataset = datasetBuilder.build( + TestUtils.testEnvBuilder(), + (env, upstreamData, upstreamDataSize) -> upstreamDataSize, + (env, upstreamData, upstreamDataSize, ctx) -> new CountData(upstreamDataSize), + TestUtils.testEnvBuilder().buildForTrainer() + ); + + Long cnt = dataset.computeWithCtx(cntr, BaggingTest::plusOfNullables); + + return x -> Double.valueOf(cnt); + } + + /** {@inheritDoc} */ + @Override public boolean isUpdateable(IgniteModel mdl) { + return true; + } + + /** {@inheritDoc} */ + @Override protected IgniteModel updateModel( + IgniteModel mdl, + DatasetBuilder datasetBuilder, + Preprocessor extractor) { + return fit(datasetBuilder, extractor); + } + + /** {@inheritDoc} */ + @Override public CountTrainer withEnvironmentBuilder(LearningEnvironmentBuilder envBuilder) { + return (CountTrainer)super.withEnvironmentBuilder(envBuilder); + } + } + + /** Data for count trainer. */ + protected static class CountData implements AutoCloseable { + /** Counter. */ + private long cnt; + + /** + * Construct instance of this class. + * + * @param cnt Counter. + */ + public CountData(long cnt) { + this.cnt = cnt; + } + + /** {@inheritDoc} */ + @Override public void close() { + // No-op + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/GDBTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/GDBTrainerTest.java new file mode 100644 index 0000000000000..950e59d172cfc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/GDBTrainerTest.java @@ -0,0 +1,210 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition.boosting; + +import java.util.HashMap; +import java.util.Map; +import java.util.function.BiFunction; +import org.apache.ignite.ml.IgniteModel; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.composition.ModelsComposition; +import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory; +import org.apache.ignite.ml.composition.boosting.convergence.simple.ConvergenceCheckerStubFactory; +import org.apache.ignite.ml.composition.predictionsaggregator.WeightedPredictionsAggregator; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.functions.IgniteBiFunction; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.tree.DecisionTreeModel; +import org.apache.ignite.ml.tree.boosting.GDBBinaryClassifierOnTreesTrainer; +import org.apache.ignite.ml.tree.boosting.GDBRegressionOnTreesTrainer; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** */ +public class GDBTrainerTest extends TrainerTest { + /** */ + @Test + public void testFitRegression() { + int size = 100; + double[] xs = new double[size]; + double[] ys = new double[size]; + double from = -5.0; + double to = 5.0; + double step = Math.abs(from - to) / size; + + Map learningSample = new HashMap<>(); + for (int i = 0; i < size; i++) { + xs[i] = from + step * i; + ys[i] = 2 * xs[i]; + learningSample.put(i, new double[] {xs[i], ys[i]}); + } + + GDBTrainer trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 3, 0.0) + .withUsingIdx(true); + + IgniteModel mdl = trainer.fit( + learningSample, 1, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + double mse = 0.0; + for (int j = 0; j < size; j++) { + double x = xs[j]; + double y = ys[j]; + double p = mdl.predict(VectorUtils.of(x)); + mse += Math.pow(y - p, 2); + } + mse /= size; + + assertEquals(0.0, mse, 0.0001); + + ModelsComposition composition = (ModelsComposition)mdl; + assertTrue(!composition.toString().isEmpty()); + assertTrue(!composition.toString(true).isEmpty()); + assertTrue(!composition.toString(false).isEmpty()); + + composition.getModels().forEach(m -> assertTrue(m instanceof DecisionTreeModel)); + + assertEquals(2000, composition.getModels().size()); + assertTrue(composition.getPredictionsAggregator() instanceof WeightedPredictionsAggregator); + + trainer = trainer.withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.1)); + assertTrue(trainer.fit(learningSample, 1, new DoubleArrayVectorizer().labeled(1)).getModels().size() < 2000); + } + + /** */ + @Test + public void testFitClassifier() { + testClassifier((trainer, learningSample) -> trainer.fit( + learningSample, 1, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + )); + } + + /** */ + @Test + public void testFitClassifierWithLearningStrategy() { + testClassifier((trainer, learningSample) -> trainer.fit( + new LocalDatasetBuilder<>(learningSample, 1), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + )); + } + + /** */ + private void testClassifier(BiFunction, + IgniteModel> fitter) { + int sampleSize = 100; + double[] xs = new double[sampleSize]; + double[] ys = new double[sampleSize]; + + for (int i = 0; i < sampleSize; i++) { + xs[i] = i; + ys[i] = ((int)(xs[i] / 10.0) % 2) == 0 ? -1.0 : 1.0; + } + + Map learningSample = new HashMap<>(); + for (int i = 0; i < sampleSize; i++) + learningSample.put(i, new double[] {xs[i], ys[i]}); + + GDBTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(0.3, 500, 3, 0.0) + .withUsingIdx(true) + .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.3)); + + IgniteModel mdl = fitter.apply(trainer, learningSample); + + int errorsCnt = 0; + for (int j = 0; j < sampleSize; j++) { + double x = xs[j]; + double y = ys[j]; + double p = mdl.predict(VectorUtils.of(x)); + if (p != y) + errorsCnt++; + } + + assertEquals(0, errorsCnt); + + assertTrue(mdl instanceof ModelsComposition); + ModelsComposition composition = (ModelsComposition)mdl; + composition.getModels().forEach(m -> assertTrue(m instanceof DecisionTreeModel)); + + assertTrue(composition.getModels().size() < 500); + assertTrue(composition.getPredictionsAggregator() instanceof WeightedPredictionsAggregator); + + trainer = trainer.withCheckConvergenceStgyFactory(new ConvergenceCheckerStubFactory()); + assertEquals(500, ((ModelsComposition)fitter.apply(trainer, learningSample)).getModels().size()); + } + + /** */ + @Test + public void testUpdate() { + int sampleSize = 100; + double[] xs = new double[sampleSize]; + double[] ys = new double[sampleSize]; + + for (int i = 0; i < sampleSize; i++) { + xs[i] = i; + ys[i] = ((int)(xs[i] / 10.0) % 2) == 0 ? -1.0 : 1.0; + } + + Map learningSample = new HashMap<>(); + for (int i = 0; i < sampleSize; i++) + learningSample.put(i, new double[] {xs[i], ys[i]}); + IgniteBiFunction fExtr = (k, v) -> VectorUtils.of(v[0]); + IgniteBiFunction lExtr = (k, v) -> v[1]; + + GDBTrainer classifTrainer = new GDBBinaryClassifierOnTreesTrainer(0.3, 500, 3, 0.0) + .withUsingIdx(true) + .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.3)); + GDBTrainer regressTrainer = new GDBRegressionOnTreesTrainer(0.3, 500, 3, 0.0) + .withUsingIdx(true) + .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.3)); + + //testUpdate(learningSample, fExtr, lExtr, classifTrainer); + //testUpdate(learningSample, fExtr, lExtr, regressTrainer); + } + + /** */ + /* private void testUpdate(Map dataset, IgniteBiFunction fExtr, + IgniteBiFunction lExtr, GDBTrainer trainer) { + + FeatureLabelExtractorWrapper vectorizer = FeatureLabelExtractorWrapper.wrap(fExtr, lExtr); + ModelsComposition originalMdl = trainer.fit(dataset, 1, vectorizer); + ModelsComposition updatedOnSameDataset = trainer.update(originalMdl, dataset, 1, vectorizer); + + LocalDatasetBuilder epmtyDataset = new LocalDatasetBuilder<>(new HashMap<>(), 1); + FeatureLabelExtractor extractor = (k, v) -> new LabeledVector<>(fExtr.apply(k, v), lExtr.apply(k, v)); + ModelsComposition updatedOnEmptyDataset = + trainer.updateModel(originalMdl, emptyDataset, new FeatureLabelExtractorWrapper<>(extractor)); + + dataset.forEach((k,v) -> { + Vector features = fExtr.apply(k, v); + + Double originalAnswer = originalMdl.predict(features); + Double updatedMdlAnswer1 = updatedOnSameDataset.predict(features); + Double updatedMdlAnswer2 = updatedOnEmptyDataset.predict(features); + + assertEquals(originalAnswer, updatedMdlAnswer1, 0.01); + assertEquals(originalAnswer, updatedMdlAnswer2, 0.01); + }); + }*/ +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/ConvergenceCheckerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/ConvergenceCheckerTest.java new file mode 100644 index 0000000000000..64905c0a2f871 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/ConvergenceCheckerTest.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition.boosting.convergence; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.composition.ModelsComposition; +import org.apache.ignite.ml.composition.boosting.loss.Loss; +import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Before; + +/** */ +public abstract class ConvergenceCheckerTest { + /** Not converged model. */ + protected ModelsComposition notConvergedMdl = new ModelsComposition(Collections.emptyList(), null) { + @Override public Double predict(Vector features) { + return 2.1 * features.get(0); + } + }; + + /** Converged model. */ + protected ModelsComposition convergedMdl = new ModelsComposition(Collections.emptyList(), null) { + @Override public Double predict(Vector features) { + return 2 * (features.get(0) + 1); + } + }; + + /** Features extractor. */ + protected LabeledDummyVectorizer vectorizer = new LabeledDummyVectorizer<>(); + + /** Data. */ + protected Map> data; + + /** */ + @Before + public void setUp() { + data = new HashMap<>(); + for (int i = 0; i < 10; i++) + data.put(i, VectorUtils.of(i, i + 1).labeled((double)(2 * (i + 1)))); + } + + /** */ + public ConvergenceChecker> createChecker(ConvergenceCheckerFactory factory, + LocalDatasetBuilder> datasetBuilder) { + + return factory.create(data.size(), + x -> x, + new Loss() { + @Override public double error(long sampleSize, double lb, double mdlAnswer) { + return mdlAnswer - lb; + } + + @Override public double gradient(long sampleSize, double lb, double mdlAnswer) { + return mdlAnswer - lb; + } + }, + datasetBuilder, vectorizer + ); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/mean/MeanAbsValueConvergenceCheckerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/mean/MeanAbsValueConvergenceCheckerTest.java new file mode 100644 index 0000000000000..926244dee237d --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/mean/MeanAbsValueConvergenceCheckerTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition.boosting.convergence.mean; + +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.composition.boosting.convergence.ConvergenceChecker; +import org.apache.ignite.ml.composition.boosting.convergence.ConvergenceCheckerTest; +import org.apache.ignite.ml.dataset.impl.local.LocalDataset; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.dataset.primitive.FeatureMatrixWithLabelsOnHeapData; +import org.apache.ignite.ml.dataset.primitive.FeatureMatrixWithLabelsOnHeapDataBuilder; +import org.apache.ignite.ml.dataset.primitive.builder.context.EmptyContextBuilder; +import org.apache.ignite.ml.dataset.primitive.context.EmptyContext; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Assert; +import org.junit.Test; + +/** */ +public class MeanAbsValueConvergenceCheckerTest extends ConvergenceCheckerTest { + /** */ + @Test + public void testConvergenceChecking() { + LocalDatasetBuilder> datasetBuilder = new LocalDatasetBuilder<>(data, 1); + ConvergenceChecker> checker = createChecker( + new MeanAbsValueConvergenceCheckerFactory(0.1), datasetBuilder); + + double error = checker.computeError(VectorUtils.of(1, 2), 4.0, notConvergedMdl); + LearningEnvironmentBuilder envBuilder = TestUtils.testEnvBuilder(); + + Assert.assertEquals(1.9, error, 0.01); + Assert.assertFalse(checker.isConverged(envBuilder, datasetBuilder, notConvergedMdl)); + Assert.assertTrue(checker.isConverged(envBuilder, datasetBuilder, convergedMdl)); + + try (LocalDataset dataset = datasetBuilder.build( + envBuilder, + new EmptyContextBuilder<>(), new FeatureMatrixWithLabelsOnHeapDataBuilder<>(vectorizer), + envBuilder.buildForTrainer())) { + + double onDSError = checker.computeMeanErrorOnDataset(dataset, notConvergedMdl); + Assert.assertEquals(1.55, onDSError, 0.01); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + /** Mean error more sensitive to anomalies in data */ + @Test + public void testConvergenceCheckingWithAnomaliesInData() { + data.put(666, VectorUtils.of(10, 11).labeled(100000.0)); + LocalDatasetBuilder> datasetBuilder = new LocalDatasetBuilder<>(data, 1); + ConvergenceChecker> checker = createChecker( + new MeanAbsValueConvergenceCheckerFactory(0.1), datasetBuilder); + + try (LocalDataset dataset = datasetBuilder.build( + TestUtils.testEnvBuilder(), + new EmptyContextBuilder<>(), new FeatureMatrixWithLabelsOnHeapDataBuilder<>(vectorizer), + TestUtils.testEnvBuilder().buildForTrainer())) { + + double onDSError = checker.computeMeanErrorOnDataset(dataset, notConvergedMdl); + Assert.assertEquals(9090.41, onDSError, 0.01); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/median/MedianOfMedianConvergenceCheckerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/median/MedianOfMedianConvergenceCheckerTest.java new file mode 100644 index 0000000000000..9370df75edf6e --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/boosting/convergence/median/MedianOfMedianConvergenceCheckerTest.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition.boosting.convergence.median; + +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.composition.boosting.convergence.ConvergenceChecker; +import org.apache.ignite.ml.composition.boosting.convergence.ConvergenceCheckerTest; +import org.apache.ignite.ml.dataset.impl.local.LocalDataset; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.dataset.primitive.FeatureMatrixWithLabelsOnHeapData; +import org.apache.ignite.ml.dataset.primitive.FeatureMatrixWithLabelsOnHeapDataBuilder; +import org.apache.ignite.ml.dataset.primitive.builder.context.EmptyContextBuilder; +import org.apache.ignite.ml.dataset.primitive.context.EmptyContext; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Assert; +import org.junit.Test; + +/** */ +public class MedianOfMedianConvergenceCheckerTest extends ConvergenceCheckerTest { + /** */ + @Test + public void testConvergenceChecking() { + data.put(666, VectorUtils.of(10, 11).labeled(100000.0)); + LocalDatasetBuilder> datasetBuilder = new LocalDatasetBuilder<>(data, 1); + + ConvergenceChecker> checker = createChecker( + new MedianOfMedianConvergenceCheckerFactory(0.1), datasetBuilder); + + double error = checker.computeError(VectorUtils.of(1, 2), 4.0, notConvergedMdl); + Assert.assertEquals(1.9, error, 0.01); + + LearningEnvironmentBuilder envBuilder = TestUtils.testEnvBuilder(); + + Assert.assertFalse(checker.isConverged(envBuilder, datasetBuilder, notConvergedMdl)); + Assert.assertTrue(checker.isConverged(envBuilder, datasetBuilder, convergedMdl)); + + try (LocalDataset dataset = datasetBuilder.build( + envBuilder, + new EmptyContextBuilder<>(), new FeatureMatrixWithLabelsOnHeapDataBuilder<>(vectorizer), + TestUtils.testEnvBuilder().buildForTrainer())) { + + double onDSError = checker.computeMeanErrorOnDataset(dataset, notConvergedMdl); + Assert.assertEquals(1.6, onDSError, 0.01); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/MeanValuePredictionsAggregatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/MeanValuePredictionsAggregatorTest.java new file mode 100644 index 0000000000000..0d46361255b3a --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/MeanValuePredictionsAggregatorTest.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition.predictionsaggregator; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** */ +public class MeanValuePredictionsAggregatorTest { + /** Aggregator. */ + private PredictionsAggregator aggregator = new MeanValuePredictionsAggregator(); + + /** */ + @Test + public void testApply() { + assertEquals(0.75, aggregator.apply(new double[]{1.0, 1.0, 1.0, 0.0}), 0.001); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/OnMajorityPredictionsAggregatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/OnMajorityPredictionsAggregatorTest.java new file mode 100644 index 0000000000000..b98a452c141ba --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/OnMajorityPredictionsAggregatorTest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition.predictionsaggregator; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link OnMajorityPredictionsAggregator}. + */ +public class OnMajorityPredictionsAggregatorTest { + /** Aggregator. */ + private PredictionsAggregator aggregator = new OnMajorityPredictionsAggregator(); + + /** */ + @Test + public void testApply() { + assertEquals(1.0, aggregator.apply(new double[]{1.0, 1.0, 1.0, 0.0}), 0.001); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/WeightedPredictionsAggregatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/WeightedPredictionsAggregatorTest.java new file mode 100644 index 0000000000000..79668d24a1f3c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/composition/predictionsaggregator/WeightedPredictionsAggregatorTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.composition.predictionsaggregator; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** */ +public class WeightedPredictionsAggregatorTest { + /** */ + public static final double[] EMPTY_DOUBLE_ARRAY = {}; + + /** */ + @Test + public void testApply1() { + WeightedPredictionsAggregator aggregator = new WeightedPredictionsAggregator(new double[] {}); + assertEquals(0.0, aggregator.apply(new double[] {}), 0.001); + } + + /** */ + @Test + public void testApply2() { + WeightedPredictionsAggregator aggregator = new WeightedPredictionsAggregator(new double[] {1.0, 0.5, 0.25}); + assertEquals(3.0, aggregator.apply(new double[] {1.0, 2.0, 4.0}), 0.001); + } + + /** Non-equal weight vector and predictions case */ + @Test(expected = IllegalArgumentException.class) + public void testIllegalArguments() { + WeightedPredictionsAggregator aggregator = new WeightedPredictionsAggregator(new double[] {1.0, 0.5, 0.25}); + aggregator.apply(new double[] { }); + } + + /** */ + @Test + public void testToString() { + PredictionsAggregator aggr = (PredictionsAggregator)doubles -> null; + assertTrue(!aggr.toString().isEmpty()); + assertTrue(!aggr.toString(true).isEmpty()); + assertTrue(!aggr.toString(false).isEmpty()); + + WeightedPredictionsAggregator aggregator = new WeightedPredictionsAggregator(EMPTY_DOUBLE_ARRAY); + assertTrue(!aggregator.toString().isEmpty()); + assertTrue(!aggregator.toString(true).isEmpty()); + assertTrue(!aggregator.toString(false).isEmpty()); + } + +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/DatasetTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/DatasetTestSuite.java new file mode 100644 index 0000000000000..7c6c516127c56 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/DatasetTestSuite.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset; + +import org.apache.ignite.ml.dataset.feature.ObjectHistogramTest; +import org.apache.ignite.ml.dataset.feature.extractor.VectorizerTest; +import org.apache.ignite.ml.dataset.impl.cache.CacheBasedDatasetBuilderTest; +import org.apache.ignite.ml.dataset.impl.cache.CacheBasedDatasetTest; +import org.apache.ignite.ml.dataset.impl.cache.util.ComputeUtilsTest; +import org.apache.ignite.ml.dataset.impl.cache.util.DatasetAffinityFunctionWrapperTest; +import org.apache.ignite.ml.dataset.impl.cache.util.IteratorWithConcurrentModificationCheckerTest; +import org.apache.ignite.ml.dataset.impl.cache.util.PartitionDataStorageTest; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilderTest; +import org.apache.ignite.ml.dataset.primitive.DatasetWrapperTest; +import org.apache.ignite.ml.dataset.primitive.SimpleDatasetTest; +import org.apache.ignite.ml.dataset.primitive.SimpleLabeledDatasetTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.dataset.* package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + DatasetWrapperTest.class, + DatasetAffinityFunctionWrapperTest.class, + IteratorWithConcurrentModificationCheckerTest.class, + PartitionDataStorageTest.class, + LocalDatasetBuilderTest.class, + SimpleDatasetTest.class, + SimpleLabeledDatasetTest.class, + DatasetWrapperTest.class, + ObjectHistogramTest.class, + ComputeUtilsTest.class, + CacheBasedDatasetBuilderTest.class, + CacheBasedDatasetTest.class, + VectorizerTest.class +}) +public class DatasetTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/feature/ObjectHistogramTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/feature/ObjectHistogramTest.java new file mode 100644 index 0000000000000..91e8b125e97d5 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/feature/ObjectHistogramTest.java @@ -0,0 +1,215 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.feature; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.Random; +import java.util.TreeMap; +import org.apache.ignite.ml.math.functions.IgniteFunction; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** */ +public class ObjectHistogramTest { + /** Data first partition. */ + private double[] dataFirstPart = new double[] {0., 0., 0., 0., 1., 1., 1, 2., 2., 3., 4., 5.}; + + /** Data second partition. */ + private double[] dataSecondPart = new double[] {0., 1., 0., 1., 0., 1., 0, 1., 0., 1., 0., 5., 6.}; + + /** */ + private ObjectHistogram hist1; + + /** */ + private ObjectHistogram hist2; + + /** + * + */ + @Before + public void setUp() throws Exception { + hist1 = new TestHist1(); + hist2 = new TestHist1(); + + fillHist(hist1, dataFirstPart); + fillHist(hist2, dataSecondPart); + } + + /** + * @param hist History. + * @param data Data. + */ + private void fillHist(ObjectHistogram hist, double[] data) { + for (int i = 0; i < data.length; i++) + hist.addElement(data[i]); + } + + /** + * + */ + @Test + public void testBuckets() { + testBuckets(hist1, new int[] {0, 1, 2, 3, 4, 5}, new int[] {4, 3, 2, 1, 1, 1}); + testBuckets(hist2, new int[] {0, 1, 5, 6}, new int[] {6, 5, 1, 1}); + } + + /** + * @param hist History. + * @param expBuckets Expected buckets. + * @param expCounters Expected counters. + */ + private void testBuckets(ObjectHistogram hist, int[] expBuckets, int[] expCounters) { + int size = hist.buckets().size(); + int[] buckets = new int[size]; + int[] counters = new int[size]; + int ptr = 0; + for (int bucket : hist.buckets()) { + counters[ptr] = hist.getValue(bucket).get().intValue(); + buckets[ptr++] = bucket; + } + + assertArrayEquals(expBuckets, buckets); + assertArrayEquals(expCounters, counters); + } + + /** + * + */ + @Test + public void testAdd() { + double val = 100.0; + hist1.addElement(val); + Optional cntr = hist1.getValue(computeBucket(val)); + + assertTrue(cntr.isPresent()); + assertEquals(1, cntr.get().intValue()); + } + + /** + * + */ + @Test + public void testAddHist() { + ObjectHistogram res = hist1.plus(hist2); + testBuckets(res, new int[] {0, 1, 2, 3, 4, 5, 6}, new int[] {10, 8, 2, 1, 1, 2, 1}); + } + + /** + * + */ + @Test + public void testDistributionFunction() { + TreeMap distribution = hist1.computeDistributionFunction(); + + int[] buckets = new int[distribution.size()]; + double[] sums = new double[distribution.size()]; + + int ptr = 0; + for (int bucket : distribution.keySet()) { + sums[ptr] = distribution.get(bucket); + buckets[ptr++] = bucket; + } + + assertArrayEquals(new int[] {0, 1, 2, 3, 4, 5}, buckets); + assertArrayEquals(new double[] {4., 7., 9., 10., 11., 12.}, sums, 0.01); + } + + /** */ + @Test + public void testOfSum() { + IgniteFunction bucketMap = x -> (int)(Math.ceil(x * 100) % 100); + IgniteFunction cntrMap = x -> Math.pow(x, 2); + + ObjectHistogram forAllHistogram = new TestHist2(); + Random rnd = new Random(); + List> partitions = new ArrayList<>(); + int cntOfPartitions = rnd.nextInt(100) + 1; + int sizeOfDataset = rnd.nextInt(10000) + 1; + for (int i = 0; i < cntOfPartitions; i++) + partitions.add(new TestHist2()); + + for (int i = 0; i < sizeOfDataset; i++) { + double objVal = rnd.nextDouble(); + forAllHistogram.addElement(objVal); + partitions.get(rnd.nextInt(partitions.size())).addElement(objVal); + } + + Optional> leftSum = partitions.stream().reduce(ObjectHistogram::plus); + Optional> rightSum = partitions.stream().reduce((x, y) -> y.plus(x)); + assertTrue(leftSum.isPresent()); + assertTrue(rightSum.isPresent()); + assertTrue(forAllHistogram.isEqualTo(leftSum.get())); + assertTrue(forAllHistogram.isEqualTo(rightSum.get())); + assertTrue(leftSum.get().isEqualTo(rightSum.get())); + } + + /** + * @param val Value. + */ + private int computeBucket(Double val) { + return (int)Math.rint(val); + } + + /** */ + private static class TestHist1 extends ObjectHistogram { + /** Serial version uid. */ + private static final long serialVersionUID = 2397005559193012602L; + + /** {@inheritDoc} */ + @Override public Integer mapToBucket(Double obj) { + return (int)Math.rint(obj); + } + + /** {@inheritDoc} */ + @Override public Double mapToCounter(Double obj) { + return 1.; + } + + /** {@inheritDoc} */ + @Override public ObjectHistogram newInstance() { + return new TestHist1(); + } + } + + /** */ + private static class TestHist2 extends ObjectHistogram { + /** Serial version uid. */ + private static final long serialVersionUID = -2080037140817825107L; + + /** {@inheritDoc} */ + @Override public Integer mapToBucket(Double x) { + return (int)(Math.ceil(x * 100) % 100); + } + + /** {@inheritDoc} */ + @Override public Double mapToCounter(Double x) { + return Math.pow(x, 2); + } + + /** {@inheritDoc} */ + @Override public ObjectHistogram newInstance() { + return new TestHist2(); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/feature/extractor/VectorizerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/feature/extractor/VectorizerTest.java new file mode 100644 index 0000000000000..ba445fd87f0d9 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/feature/extractor/VectorizerTest.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.feature.extractor; + +import java.util.Arrays; +import java.util.stream.IntStream; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for vectorizer API. + */ +public class VectorizerTest { + /** */ + @Test + public void vectorizerShouldReturnAllFeaturesByDefault() { + double[] features = {1., 2., 3.}; + DoubleArrayVectorizer vectorizer = new DoubleArrayVectorizer<>(); + LabeledVector res = vectorizer.apply(1, features); + assertEquals(res.features().size(), 3); + assertArrayEquals(res.features().asArray(), features, 0.); + assertEquals(0., res.label(), 0.); //for doubles zero by default + } + + /** */ + @Test + public void vectorizerShouldSetLabelByCoordinate() { + double[] features = {0., 1., 2.}; + for (int i = 0; i < features.length; i++) { + Vectorizer vectorizer = new DoubleArrayVectorizer().labeled(i); + LabeledVector res = vectorizer.apply(1, features); + assertEquals(res.features().size(), 2); + + final int filteredId = i; + double[] expFeatures = Arrays.stream(features).filter(f -> Math.abs(f - features[filteredId]) > 0.01).toArray(); + assertArrayEquals(res.features().asArray(), expFeatures, 0.); + assertEquals((double)i, res.label(), 0.); + } + } + + /** */ + @Test + public void vectorizerShouldSetLabelByEnum() { + double[] features = {0., 1., 2.}; + Vectorizer vectorizer = new DoubleArrayVectorizer() + .labeled(Vectorizer.LabelCoordinate.FIRST); + LabeledVector res = vectorizer.apply(1, features); + assertEquals(res.features().size(), 2); + assertArrayEquals(res.features().asArray(), new double[] {1., 2.}, 0.); + assertEquals(0., res.label(), 0.); + + vectorizer = new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST); + res = vectorizer.apply(1, features); + assertEquals(res.features().size(), 2); + assertArrayEquals(res.features().asArray(), new double[] {0., 1.}, 0.); + assertEquals(2., res.label(), 0.); + } + + /** */ + @Test + public void vectorizerShouldBeAbleExcludeFeatures() { + double[] features = IntStream.range(0, 100).mapToDouble(Double::valueOf).toArray(); + Integer[] excludedIds = IntStream.range(2, 99).boxed().toArray(Integer[]::new); + Vectorizer vectorizer = new DoubleArrayVectorizer() + .exclude(excludedIds) + .labeled(Vectorizer.LabelCoordinate.FIRST); + + LabeledVector res = vectorizer.apply(1, features); + assertEquals(res.features().size(), 2); + assertArrayEquals(res.features().asArray(), new double[] {1., 99.}, 0.); + assertEquals(0., res.label(), 0.); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/CacheBasedDatasetBuilderTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/CacheBasedDatasetBuilderTest.java new file mode 100644 index 0000000000000..55c9a16016854 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/CacheBasedDatasetBuilderTest.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.impl.cache; + +import java.util.Collection; +import java.util.UUID; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.cache.affinity.Affinity; +import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; +import org.apache.ignite.cluster.ClusterNode; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.UpstreamEntry; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link CacheBasedDatasetBuilder}. + */ +public class CacheBasedDatasetBuilderTest extends GridCommonAbstractTest { + /** Number of nodes in grid. */ + private static final int NODE_COUNT = 10; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + + /** {@inheritDoc} */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** + * Tests that partitions of the dataset cache are placed on the same nodes as upstream cache. + */ + @Test + public void testBuild() { + IgniteCache upstreamCache = createTestCache(100, 10); + CacheBasedDatasetBuilder builder = new CacheBasedDatasetBuilder<>(ignite, upstreamCache); + + CacheBasedDataset dataset = builder.build( + TestUtils.testEnvBuilder(), + (env, upstream, upstreamSize) -> upstreamSize, + (env, upstream, upstreamSize, ctx) -> null, + TestUtils.testEnvBuilder().buildForTrainer() + ); + + Affinity upstreamAffinity = ignite.affinity(upstreamCache.getName()); + Affinity datasetAffinity = ignite.affinity(dataset.getDatasetCache().getName()); + + int upstreamPartitions = upstreamAffinity.partitions(); + int datasetPartitions = datasetAffinity.partitions(); + + assertEquals(upstreamPartitions, datasetPartitions); + + for (int part = 0; part < upstreamPartitions; part++) { + Collection upstreamPartNodes = upstreamAffinity.mapPartitionToPrimaryAndBackups(part); + Collection datasetPartNodes = datasetAffinity.mapPartitionToPrimaryAndBackups(part); + + assertEqualsCollections(upstreamPartNodes, datasetPartNodes); + } + } + + /** + * Tests that predicate works correctly. + */ + @Test + public void testBuildWithPredicate() { + CacheConfiguration upstreamCacheConfiguration = new CacheConfiguration<>(); + upstreamCacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, 1)); + upstreamCacheConfiguration.setName(UUID.randomUUID().toString()); + + IgniteCache upstreamCache = ignite.createCache(upstreamCacheConfiguration); + upstreamCache.put(1, 1); + upstreamCache.put(2, 2); + + CacheBasedDatasetBuilder builder = new CacheBasedDatasetBuilder<>( + ignite, + upstreamCache, + (k, v) -> k % 2 == 0 + ); + + CacheBasedDataset dataset = builder.build( + TestUtils.testEnvBuilder(), + (env, upstream, upstreamSize) -> { + UpstreamEntry entry = upstream.next(); + assertEquals(Integer.valueOf(2), entry.getKey()); + assertEquals(Integer.valueOf(2), entry.getValue()); + assertFalse(upstream.hasNext()); + return 0L; + }, + (env, upstream, upstreamSize, ctx) -> { + UpstreamEntry entry = upstream.next(); + assertEquals(Integer.valueOf(2), entry.getKey()); + assertEquals(Integer.valueOf(2), entry.getValue()); + assertFalse(upstream.hasNext()); + return null; + }, + TestUtils.testEnvBuilder().buildForTrainer() + ); + + dataset.compute(data -> {}); + } + + /** + * Generate an Ignite Cache with the specified size and number of partitions for testing purposes. + * + * @param size Size of an Ignite Cache. + * @param parts Number of partitions. + * @return Ignite Cache instance. + */ + private IgniteCache createTestCache(int size, int parts) { + CacheConfiguration cacheConfiguration = new CacheConfiguration<>(); + cacheConfiguration.setName(UUID.randomUUID().toString()); + cacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, parts)); + + IgniteCache cache = ignite.createCache(cacheConfiguration); + + for (int i = 0; i < size; i++) + cache.put(i, "DATA_" + i); + + return cache; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/CacheBasedDatasetTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/CacheBasedDatasetTest.java new file mode 100644 index 0000000000000..1a951a8fceaba --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/CacheBasedDatasetTest.java @@ -0,0 +1,363 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.impl.cache; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.locks.LockSupport; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteAtomicLong; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.IgniteLock; +import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.IgniteKernal; +import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; +import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; +import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheAdapter; +import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; +import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionTopology; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.internal.util.typedef.G; +import org.apache.ignite.lang.IgnitePredicate; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.primitive.data.SimpleDatasetData; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link CacheBasedDataset}. + */ +public class CacheBasedDatasetTest extends GridCommonAbstractTest { + /** Number of nodes in grid. */ + private static final int NODE_COUNT = 4; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** {@inheritDoc} */ + @Override protected void afterTestsStopped() { + stopAllGrids(); + } + + /** {@inheritDoc} */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** + * Tests that partitions of the upstream cache and the partition {@code context} cache are reserved during + * computations on dataset. Reservation means that partitions won't be unloaded from the node before computation is + * completed. + */ + @Test + public void testPartitionExchangeDuringComputeCall() { + int partitions = 4; + + IgniteCache upstreamCache = generateTestData(4, 0); + + CacheBasedDatasetBuilder builder = new CacheBasedDatasetBuilder<>(ignite, upstreamCache); + + CacheBasedDataset dataset = builder.build( + TestUtils.testEnvBuilder(), + (env, upstream, upstreamSize) -> upstreamSize, + (env, upstream, upstreamSize, ctx) -> new SimpleDatasetData(new double[0], 0), + TestUtils.testEnvBuilder().buildForTrainer() + ); + + assertEquals("Upstream cache name from dataset", + upstreamCache.getName(), dataset.getUpstreamCache().getName()); + + assertTrue("Before computation all partitions should not be reserved", + areAllPartitionsNotReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); + + UUID numOfStartedComputationsId = UUID.randomUUID(); + IgniteAtomicLong numOfStartedComputations = ignite.atomicLong(numOfStartedComputationsId.toString(), 0, true); + + UUID computationsLockId = UUID.randomUUID(); + IgniteLock computationsLock = ignite.reentrantLock(computationsLockId.toString(), false, true, true); + + // lock computations lock to stop computations in the middle + computationsLock.lock(); + + try { + new Thread(() -> dataset.compute((data, partIndex) -> { + // track number of started computations + ignite.atomicLong(numOfStartedComputationsId.toString(), 0, false).incrementAndGet(); + ignite.reentrantLock(computationsLockId.toString(), false, true, false).lock(); + ignite.reentrantLock(computationsLockId.toString(), false, true, false).unlock(); + })).start(); + // wait all computations to start + + while (numOfStartedComputations.get() < partitions) { + } + + assertTrue("During computation all partitions should be reserved", + areAllPartitionsReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); + } + finally { + computationsLock.unlock(); + } + + assertTrue("All partitions should be released", + areAllPartitionsNotReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); + } + + /** + * Tests that partitions of the upstream cache and the partition {@code context} cache are reserved during + * computations on dataset. Reservation means that partitions won't be unloaded from the node before computation is + * completed. + */ + @Test + public void testPartitionExchangeDuringComputeWithCtxCall() { + int partitions = 4; + + IgniteCache upstreamCache = generateTestData(4, 0); + + CacheBasedDatasetBuilder builder = new CacheBasedDatasetBuilder<>(ignite, upstreamCache); + + CacheBasedDataset dataset = builder.build( + TestUtils.testEnvBuilder(), + (env, upstream, upstreamSize) -> upstreamSize, + (env, upstream, upstreamSize, ctx) -> new SimpleDatasetData(new double[0], 0), + TestUtils.testEnvBuilder().buildForTrainer() + ); + + assertTrue("Before computation all partitions should not be reserved", + areAllPartitionsNotReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); + + UUID numOfStartedComputationsId = UUID.randomUUID(); + IgniteAtomicLong numOfStartedComputations = ignite.atomicLong(numOfStartedComputationsId.toString(), 0, true); + + UUID computationsLockId = UUID.randomUUID(); + IgniteLock computationsLock = ignite.reentrantLock(computationsLockId.toString(), false, true, true); + + // lock computations lock to stop computations in the middle + computationsLock.lock(); + + try { + new Thread(() -> dataset.computeWithCtx((ctx, data, partIndex) -> { + // track number of started computations + ignite.atomicLong(numOfStartedComputationsId.toString(), 0, false).incrementAndGet(); + ignite.reentrantLock(computationsLockId.toString(), false, true, false).lock(); + ignite.reentrantLock(computationsLockId.toString(), false, true, false).unlock(); + })).start(); + // wait all computations to start + + while (numOfStartedComputations.get() < partitions) { + } + + assertTrue("During computation all partitions should be reserved", + areAllPartitionsReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); + } + finally { + computationsLock.unlock(); + } + + assertTrue("All partitions should be released", + areAllPartitionsNotReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); + } + + /** + * Checks that all partitions of all specified caches are not reserved. + * + * @param cacheNames Cache names to be checked. + * @return {@code true} if all partitions are not reserved, otherwise {@code false}. + */ + private boolean areAllPartitionsNotReserved(String... cacheNames) { + return checkAllPartitions(partition -> partition.reservations() == 0, cacheNames); + } + + /** + * Checks that all partitions of all specified caches not reserved. + * + * @param cacheNames Cache names to be checked. + * @return {@code true} if all partitions are reserved, otherwise {@code false}. + */ + private boolean areAllPartitionsReserved(String... cacheNames) { + return checkAllPartitions(partition -> partition.reservations() != 0, cacheNames); + } + + /** + * Checks that all partitions of all specified caches satisfies the given predicate. + * + * @param pred Predicate. + * @param cacheNames Cache names. + * @return {@code true} if all partitions satisfies the given predicate. + */ + private boolean checkAllPartitions(IgnitePredicate pred, String... cacheNames) { + boolean flag = false; + long checkingStartTs = System.currentTimeMillis(); + + while (!flag && (System.currentTimeMillis() - checkingStartTs) < 30_000) { + LockSupport.parkNanos(200L * 1000 * 1000); + flag = true; + + for (String cacheName : cacheNames) { + IgniteClusterPartitionsState state = IgniteClusterPartitionsState.getCurrentState(cacheName); + + for (IgniteInstancePartitionsState instanceState : state.instances.values()) + for (GridDhtLocalPartition partition : instanceState.parts) + if (partition != null) + flag &= pred.apply(partition); + } + } + + return flag; + } + + /** + * Aggregated data about cache partitions in Ignite cluster. + */ + private static class IgniteClusterPartitionsState { + /** */ + private final String cacheName; + + /** */ + private final Map instances; + + /** */ + static IgniteClusterPartitionsState getCurrentState(String cacheName) { + Map instances = new HashMap<>(); + + for (Ignite ignite : G.allGrids()) { + IgniteKernal igniteKernal = (IgniteKernal)ignite; + IgniteCacheProxy cache = igniteKernal.context().cache().jcache(cacheName); + + GridDhtCacheAdapter dht = dht(cache); + + GridDhtPartitionTopology top = dht.topology(); + + AffinityTopologyVersion topVer = dht.context().shared().exchange().readyAffinityVersion(); + List parts = new ArrayList<>(); + for (int p = 0; p < cache.context().config().getAffinity().partitions(); p++) { + GridDhtLocalPartition part = top.localPartition(p, AffinityTopologyVersion.NONE, false); + parts.add(part); + } + instances.put(ignite.cluster().localNode().id(), new IgniteInstancePartitionsState(topVer, parts)); + } + + return new IgniteClusterPartitionsState(cacheName, instances); + } + + /** */ + IgniteClusterPartitionsState(String cacheName, + Map instances) { + this.cacheName = cacheName; + this.instances = instances; + } + + /** */ + @Override public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("Cache ").append(cacheName).append(" is in following state:").append("\n"); + for (Map.Entry e : instances.entrySet()) { + UUID instanceId = e.getKey(); + IgniteInstancePartitionsState instanceState = e.getValue(); + builder.append("\n\t") + .append("Node ") + .append(instanceId) + .append(" with topology version [") + .append(instanceState.topVer.topologyVersion()) + .append(", ") + .append(instanceState.topVer.minorTopologyVersion()) + .append("] contains following partitions:") + .append("\n\n"); + builder.append("\t\t---------------------------------------------------------------------------------"); + builder.append("--------------------\n"); + builder.append("\t\t| ID | STATE | RELOAD | RESERVATIONS | SHOULD BE RENTING | PRIMARY |"); + builder.append(" DATA STORE SIZE |\n"); + builder.append("\t\t---------------------------------------------------------------------------------"); + builder.append("--------------------\n"); + for (GridDhtLocalPartition partition : instanceState.parts) + if (partition != null) { + builder.append("\t\t") + .append(String.format("| %3d |", partition.id())) + .append(String.format(" %7s |", partition.state())) + .append(String.format(" %13s |", partition.reservations())) + .append(String.format(" %8s |", partition.primary(instanceState.topVer))) + .append(String.format(" %16d |", partition.dataStore().fullSize())) + .append("\n"); + builder.append("\t\t-------------------------------------------------------------------------"); + builder.append("----------------------------\n"); + } + } + return builder.toString(); + } + } + + /** + * Aggregated data about cache partitions in Ignite instance. + */ + private static class IgniteInstancePartitionsState { + /** */ + private final AffinityTopologyVersion topVer; + + /** */ + private final List parts; + + /** */ + IgniteInstancePartitionsState(AffinityTopologyVersion topVer, + List parts) { + this.topVer = topVer; + this.parts = parts; + } + + /** */ + public AffinityTopologyVersion getTopVer() { + return topVer; + } + + /** */ + public List getParts() { + return parts; + } + } + + /** + * Generates Ignite Cache with data for tests. + * + * @return Ignite Cache with data for tests. + */ + private IgniteCache generateTestData(int partitions, int backups) { + CacheConfiguration cacheConfiguration = new CacheConfiguration<>(); + + cacheConfiguration.setName(UUID.randomUUID().toString()); + cacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, partitions)); + cacheConfiguration.setBackups(backups); + + IgniteCache cache = ignite.createCache(cacheConfiguration); + + for (int i = 0; i < 1000; i++) + cache.put(i, "TEST" + i); + + return cache; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/ComputeUtilsTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/ComputeUtilsTest.java new file mode 100644 index 0000000000000..668860b7b1209 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/ComputeUtilsTest.java @@ -0,0 +1,323 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.impl.cache.util; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.UUID; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteAtomicLong; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.Ignition; +import org.apache.ignite.cache.affinity.AffinityFunction; +import org.apache.ignite.cache.affinity.AffinityFunctionContext; +import org.apache.ignite.cluster.ClusterNode; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.UpstreamEntry; +import org.apache.ignite.ml.dataset.UpstreamTransformerBuilder; +import org.apache.ignite.ml.environment.deploy.DeployingContext; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link ComputeUtils}. + */ +public class ComputeUtilsTest extends GridCommonAbstractTest { + /** Number of nodes in grid. */ + private static final int NODE_COUNT = 10; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** {@inheritDoc} */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** + * Tests that in case two caches maintain their partitions on different nodes, affinity call won't be completed. + */ + @Test + public void testAffinityCallWithRetriesNegative() { + ClusterNode node1 = grid(1).cluster().localNode(); + ClusterNode node2 = grid(2).cluster().localNode(); + + String firstCacheName = "CACHE_1_" + UUID.randomUUID(); + String secondCacheName = "CACHE_2_" + UUID.randomUUID(); + + CacheConfiguration cacheConfiguration1 = new CacheConfiguration<>(); + cacheConfiguration1.setName(firstCacheName); + cacheConfiguration1.setAffinity(new TestAffinityFunction(node1)); + IgniteCache cache1 = ignite.createCache(cacheConfiguration1); + + CacheConfiguration cacheConfiguration2 = new CacheConfiguration<>(); + cacheConfiguration2.setName(secondCacheName); + cacheConfiguration2.setAffinity(new TestAffinityFunction(node2)); + IgniteCache cache2 = ignite.createCache(cacheConfiguration2); + + try { + try { + ComputeUtils.affinityCallWithRetries( + ignite, + Arrays.asList(firstCacheName, secondCacheName), + part -> part, + 0, + DeployingContext.unitialized() + ); + } + catch (IllegalStateException expectedException) { + return; + } + + fail("Missing IllegalStateException"); + } + finally { + cache1.destroy(); + cache2.destroy(); + } + } + + /** + * Test that in case two caches maintain their partitions on the same node, affinity call will be completed. + */ + @Test + public void testAffinityCallWithRetriesPositive() { + ClusterNode node = grid(1).cluster().localNode(); + + String firstCacheName = "CACHE_1_" + UUID.randomUUID(); + String secondCacheName = "CACHE_2_" + UUID.randomUUID(); + + CacheConfiguration cacheConfiguration1 = new CacheConfiguration<>(); + cacheConfiguration1.setName(firstCacheName); + cacheConfiguration1.setAffinity(new TestAffinityFunction(node)); + IgniteCache cache1 = ignite.createCache(cacheConfiguration1); + + CacheConfiguration cacheConfiguration2 = new CacheConfiguration<>(); + cacheConfiguration2.setName(secondCacheName); + cacheConfiguration2.setAffinity(new TestAffinityFunction(node)); + IgniteCache cache2 = ignite.createCache(cacheConfiguration2); + + try (IgniteAtomicLong cnt = ignite.atomicLong("COUNTER_" + UUID.randomUUID(), 0, true)) { + + ComputeUtils.affinityCallWithRetries(ignite, Arrays.asList(firstCacheName, secondCacheName), part -> { + Ignite locIgnite = Ignition.localIgnite(); + + assertEquals(node, locIgnite.cluster().localNode()); + + cnt.incrementAndGet(); + + return part; + }, 0, DeployingContext.unitialized()); + + assertEquals(1, cnt.get()); + } + finally { + cache1.destroy(); + cache2.destroy(); + } + } + + /** + * Tests {@code getData()} method. + */ + @Test + public void testGetData() { + ClusterNode node = grid(1).cluster().localNode(); + + String upstreamCacheName = "CACHE_1_" + UUID.randomUUID(); + String datasetCacheName = "CACHE_2_" + UUID.randomUUID(); + + CacheConfiguration upstreamCacheConfiguration = new CacheConfiguration<>(); + upstreamCacheConfiguration.setName(upstreamCacheName); + upstreamCacheConfiguration.setAffinity(new TestAffinityFunction(node)); + IgniteCache upstreamCache = ignite.createCache(upstreamCacheConfiguration); + + CacheConfiguration datasetCacheConfiguration = new CacheConfiguration<>(); + datasetCacheConfiguration.setName(datasetCacheName); + datasetCacheConfiguration.setAffinity(new TestAffinityFunction(node)); + IgniteCache datasetCache = ignite.createCache(datasetCacheConfiguration); + + upstreamCache.put(42, 42); + datasetCache.put(0, 0); + + UUID datasetId = UUID.randomUUID(); + + IgniteAtomicLong cnt = ignite.atomicLong("CNT_" + datasetId, 0, true); + + for (int i = 0; i < 10; i++) { + Collection data = ComputeUtils.affinityCallWithRetries( + ignite, + Arrays.asList(datasetCacheName, upstreamCacheName), + part -> ComputeUtils.getData( + ignite, + upstreamCacheName, + (k, v) -> true, + UpstreamTransformerBuilder.identity(), + datasetCacheName, + datasetId, + (env, upstream, upstreamSize, ctx) -> { + cnt.incrementAndGet(); + + assertEquals(1, upstreamSize); + + UpstreamEntry e = upstream.next(); + return new TestPartitionData(e.getKey() + e.getValue()); + }, + TestUtils.testEnvBuilder().buildForWorker(part), + false + ), + 0, + DeployingContext.unitialized() + ); + + assertEquals(1, data.size()); + + TestPartitionData dataElement = data.iterator().next(); + assertEquals(84, dataElement.val.intValue()); + } + + assertEquals(1, cnt.get()); + } + + /** + * Tests {@code initContext()} method. + */ + @Test + public void testInitContext() { + ClusterNode node = grid(1).cluster().localNode(); + + String upstreamCacheName = "CACHE_1_" + UUID.randomUUID(); + String datasetCacheName = "CACHE_2_" + UUID.randomUUID(); + + CacheConfiguration upstreamCacheConfiguration = new CacheConfiguration<>(); + upstreamCacheConfiguration.setName(upstreamCacheName); + upstreamCacheConfiguration.setAffinity(new TestAffinityFunction(node)); + IgniteCache upstreamCache = ignite.createCache(upstreamCacheConfiguration); + + CacheConfiguration datasetCacheConfiguration = new CacheConfiguration<>(); + datasetCacheConfiguration.setName(datasetCacheName); + datasetCacheConfiguration.setAffinity(new TestAffinityFunction(node)); + IgniteCache datasetCache = ignite.createCache(datasetCacheConfiguration); + + upstreamCache.put(42, 42); + + ComputeUtils.initContext( + ignite, + upstreamCacheName, + UpstreamTransformerBuilder.identity(), + (k, v) -> true, + datasetCacheName, + (env, upstream, upstreamSize) -> { + + assertEquals(1, upstreamSize); + + UpstreamEntry e = upstream.next(); + return e.getKey() + e.getValue(); + }, + TestUtils.testEnvBuilder(), + 0, + 0, + false, + DeployingContext.unitialized() + ); + + assertEquals(1, datasetCache.size()); + assertEquals(84, datasetCache.get(0).intValue()); + } + + /** + * Test partition data. + */ + private static class TestPartitionData implements AutoCloseable { + /** Value. */ + private final Integer val; + + /** + * Constructs a new instance of test partition data. + * + * @param val Value. + */ + TestPartitionData(Integer val) { + this.val = val; + } + + /** {@inheritDoc} */ + @Override public void close() { + // Do nothing, GC will clean up. + } + } + + /** + * Affinity function used in tests in this class. Defines one partition and assign it on the specified cluster node. + */ + private static class TestAffinityFunction implements AffinityFunction { + /** */ + private static final long serialVersionUID = -1353725303983563094L; + + /** Cluster node partition will be assigned on. */ + private final ClusterNode node; + + /** + * Constructs a new instance of test affinity function. + * + * @param node Cluster node partition will be assigned on. + */ + TestAffinityFunction(ClusterNode node) { + this.node = node; + } + + /** {@inheritDoc} */ + @Override public void reset() { + // Do nothing. + } + + /** {@inheritDoc} */ + @Override public int partitions() { + return 1; + } + + /** {@inheritDoc} */ + @Override public int partition(Object key) { + return 0; + } + + /** {@inheritDoc} */ + @Override public List> assignPartitions(AffinityFunctionContext affCtx) { + return Collections.singletonList(Collections.singletonList(node)); + } + + /** {@inheritDoc} */ + @Override public void removeNode(UUID nodeId) { + // Do nothing. + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/DatasetAffinityFunctionWrapperTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/DatasetAffinityFunctionWrapperTest.java new file mode 100644 index 0000000000000..efa6f95661715 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/DatasetAffinityFunctionWrapperTest.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.impl.cache.util; + +import java.util.Collections; +import java.util.List; +import java.util.UUID; +import org.apache.ignite.cache.affinity.AffinityFunction; +import org.apache.ignite.cache.affinity.AffinityFunctionContext; +import org.apache.ignite.cluster.ClusterNode; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +/** + * Tests for {@link DatasetAffinityFunctionWrapper}. + */ +@RunWith(MockitoJUnitRunner.Silent.class) +public class DatasetAffinityFunctionWrapperTest { + /** Mocked affinity function. */ + @Mock + private AffinityFunction affinityFunction; + + /** Wrapper. */ + private DatasetAffinityFunctionWrapper wrapper; + + /** Initialization. */ + @Before + public void beforeTest() { + wrapper = new DatasetAffinityFunctionWrapper(affinityFunction); + } + + /** Tests {@code reset()} method. */ + @Test + public void testReset() { + wrapper.reset(); + + verify(affinityFunction, times(1)).reset(); + } + + /** Tests {@code partitions()} method. */ + @Test + public void testPartitions() { + doReturn(42).when(affinityFunction).partitions(); + + int partitions = wrapper.partitions(); + + assertEquals(42, partitions); + verify(affinityFunction, times(1)).partitions(); + } + + /** Tests {@code partition} method. */ + @Test + public void testPartition() { + doReturn(0).when(affinityFunction).partition(eq(42)); + + int part = wrapper.partition(42); + + assertEquals(42, part); + verify(affinityFunction, times(0)).partition(any()); + } + + /** Tests {@code assignPartitions()} method. */ + @Test + public void testAssignPartitions() { + List> nodes = Collections.singletonList(Collections.singletonList(mock(ClusterNode.class))); + + doReturn(nodes).when(affinityFunction).assignPartitions(any()); + + List> resNodes = wrapper.assignPartitions(mock(AffinityFunctionContext.class)); + + assertEquals(nodes, resNodes); + verify(affinityFunction, times(1)).assignPartitions(any()); + } + + /** Tests {@code removeNode()} method. */ + @Test + public void testRemoveNode() { + UUID nodeId = UUID.randomUUID(); + + wrapper.removeNode(nodeId); + + verify(affinityFunction, times(1)).removeNode(eq(nodeId)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/IteratorWithConcurrentModificationCheckerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/IteratorWithConcurrentModificationCheckerTest.java new file mode 100644 index 0000000000000..232281ee5fa4b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/IteratorWithConcurrentModificationCheckerTest.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.impl.cache.util; + +import java.util.Arrays; +import java.util.ConcurrentModificationException; +import java.util.Iterator; +import java.util.List; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link IteratorWithConcurrentModificationChecker}. + */ +public class IteratorWithConcurrentModificationCheckerTest { + /** */ + @Test(expected = ConcurrentModificationException.class) + public void testNextWhenIteratorHasLessElementsThanExpected() { + List list = Arrays.asList(1, 2, 3); + + Iterator iter = new IteratorWithConcurrentModificationChecker<>(list.iterator(), 4, "Exception"); + + assertEquals(Integer.valueOf(1), iter.next()); + assertEquals(Integer.valueOf(2), iter.next()); + assertEquals(Integer.valueOf(3), iter.next()); + + iter.next(); // Should throw an exception. + } + + /** */ + @Test(expected = ConcurrentModificationException.class) + public void testNextWhenIteratorHasMoreElementsThanExpected() { + List list = Arrays.asList(1, 2, 3); + + Iterator iter = new IteratorWithConcurrentModificationChecker<>(list.iterator(), 2, "Exception"); + + assertEquals(Integer.valueOf(1), iter.next()); + assertEquals(Integer.valueOf(2), iter.next()); + + iter.next(); // Should throw an exception. + } + + /** */ + @Test(expected = ConcurrentModificationException.class) + public void testHasNextWhenIteratorHasLessElementsThanExpected() { + List list = Arrays.asList(1, 2, 3); + + Iterator iter = new IteratorWithConcurrentModificationChecker<>(list.iterator(), 4, "Exception"); + + assertTrue(iter.hasNext()); + iter.next(); + assertTrue(iter.hasNext()); + iter.next(); + assertTrue(iter.hasNext()); + iter.next(); + + iter.hasNext(); // Should throw an exception. + } + + /** */ + @Test(expected = ConcurrentModificationException.class) + public void testHasNextWhenIteratorHasMoreElementsThanExpected() { + List list = Arrays.asList(1, 2, 3); + + Iterator iter = new IteratorWithConcurrentModificationChecker<>(list.iterator(), 2, "Exception"); + + assertTrue(iter.hasNext()); + iter.next(); + assertTrue(iter.hasNext()); + iter.next(); + + iter.hasNext(); // Should throw an exception. + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/PartitionDataStorageTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/PartitionDataStorageTest.java new file mode 100644 index 0000000000000..d20830aeb700f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/cache/util/PartitionDataStorageTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.impl.cache.util; + +import java.util.concurrent.atomic.AtomicLong; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link PartitionDataStorage}. + */ +public class PartitionDataStorageTest { + /** Data storage. */ + private PartitionDataStorage dataStorage = new PartitionDataStorage(); + + /** Tests {@code computeDataIfAbsent()} method. */ + @Test + public void testComputeDataIfAbsent() { + AtomicLong cnt = new AtomicLong(); + + for (int i = 0; i < 10; i++) { + Integer res = dataStorage.computeDataIfAbsent(0, () -> { + cnt.incrementAndGet(); + + return 42; + }); + + assertEquals(42, res.intValue()); + } + + assertEquals(1, cnt.intValue()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/local/LocalDatasetBuilderTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/local/LocalDatasetBuilderTest.java new file mode 100644 index 0000000000000..ed3bdd5af5ffb --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/impl/local/LocalDatasetBuilderTest.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.impl.local; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.PartitionContextBuilder; +import org.apache.ignite.ml.dataset.PartitionDataBuilder; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link LocalDatasetBuilder}. + */ +public class LocalDatasetBuilderTest { + /** Tests {@code build()} method. */ + @Test + public void testBuild() { + Map data = new HashMap<>(); + for (int i = 0; i < 100; i++) + data.put(i, i); + + LocalDatasetBuilder builder = new LocalDatasetBuilder<>(data, 10); + + LocalDataset dataset = buildDataset(builder); + + assertEquals(10, dataset.getCtx().size()); + assertEquals(10, dataset.getData().size()); + + AtomicLong cnt = new AtomicLong(); + + dataset.compute((partData, env) -> { + cnt.incrementAndGet(); + + int[] arr = partData.data; + + assertEquals(10, arr.length); + + for (int i = 0; i < 10; i++) + assertEquals(env.partition() * 10 + i, arr[i]); + }); + + assertEquals(10, cnt.intValue()); + } + + /** Tests {@code build()} method with predicate. */ + @Test + public void testBuildWithPredicate() { + Map data = new HashMap<>(); + for (int i = 0; i < 100; i++) + data.put(i, i); + + LocalDatasetBuilder builder = new LocalDatasetBuilder<>(data, (k, v) -> k % 2 == 0, 10); + + LocalDataset dataset = buildDataset(builder); + + AtomicLong cnt = new AtomicLong(); + + dataset.compute((partData, env) -> { + cnt.incrementAndGet(); + + int[] arr = partData.data; + + assertEquals(5, arr.length); + + for (int i = 0; i < 5; i++) + assertEquals((env.partition() * 5 + i) * 2, arr[i]); + }); + + assertEquals(10, cnt.intValue()); + } + + /** */ + private LocalDataset buildDataset( + LocalDatasetBuilder builder) { + PartitionContextBuilder partCtxBuilder = (env, upstream, upstreamSize) -> null; + + PartitionDataBuilder partDataBuilder = + (env, upstream, upstreamSize, ctx) -> { + int[] arr = new int[Math.toIntExact(upstreamSize)]; + + int ptr = 0; + while (upstream.hasNext()) + arr[ptr++] = upstream.next().getValue(); + + return new TestPartitionData(arr); + }; + + return builder.build( + TestUtils.testEnvBuilder(), + partCtxBuilder.andThen(x -> null), + partDataBuilder.andThen((x, y) -> x), + TestUtils.testEnvBuilder().buildForTrainer() + ); + } + + /** + * Test partition {@code data}. + */ + private static class TestPartitionData implements AutoCloseable { + /** Data. */ + private int[] data; + + /** + * Constructs a new test partition data instance. + * + * @param data Data. + */ + TestPartitionData(int[] data) { + this.data = data; + } + + /** {@inheritDoc} */ + @Override public void close() { + // Do nothing, GC will clean up. + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/DatasetWrapperTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/DatasetWrapperTest.java new file mode 100644 index 0000000000000..7aef033533b66 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/DatasetWrapperTest.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.primitive; + +import java.io.Serializable; +import org.apache.ignite.ml.dataset.Dataset; +import org.apache.ignite.ml.math.functions.IgniteBiFunction; +import org.apache.ignite.ml.math.functions.IgniteBinaryOperator; +import org.apache.ignite.ml.math.functions.IgniteFunction; +import org.apache.ignite.ml.math.functions.IgniteTriFunction; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +/** + * Tests for {@link DatasetWrapper}. + */ +@RunWith(MockitoJUnitRunner.class) +public class DatasetWrapperTest { + /** Mocked dataset. */ + @Mock + private Dataset dataset; + + /** Dataset wrapper. */ + private DatasetWrapper wrapper; + + /** Initialization. */ + @Before + public void beforeTest() { + wrapper = new DatasetWrapper<>(dataset); + } + + /** Tests {@code computeWithCtx()} method. */ + @Test + @SuppressWarnings("unchecked") + public void testComputeWithCtx() { + doReturn(42).when(dataset).computeWithCtx(any(IgniteTriFunction.class), any(), any()); + + //NOTE: don't remove this cast due to Java9+ compilation failure on different Java versions and OS. + Integer res = (Integer)wrapper.computeWithCtx(mock(IgniteTriFunction.class), mock(IgniteBinaryOperator.class), + null); + + assertEquals(42, res.intValue()); + + verify(dataset, times(1)).computeWithCtx(any(IgniteTriFunction.class), any(), any()); + } + + /** Tests {@code computeWithCtx()} method. */ + @Test + @SuppressWarnings("unchecked") + public void testComputeWithCtx2() { + doReturn(42).when(dataset).computeWithCtx(any(IgniteTriFunction.class), any(), any()); + + //NOTE: don't remove this cast due to Java9+ compilation failure on different Java versions and OS. + Integer res = (Integer)wrapper.computeWithCtx(mock(IgniteBiFunction.class), mock(IgniteBinaryOperator.class), + null); + + assertEquals(42, res.intValue()); + + verify(dataset, times(1)).computeWithCtx(any(IgniteTriFunction.class), any(), any()); + } + + /** Tests {@code computeWithCtx()} method. */ + @Test + @SuppressWarnings("unchecked") + public void testComputeWithCtx3() { + wrapper.computeWithCtx((ctx, data) -> { + assertNotNull(ctx); + assertNotNull(data); + }); + + verify(dataset, times(1)).computeWithCtx(any(IgniteTriFunction.class), + any(IgniteBinaryOperator.class), any()); + } + + /** Tests {@code compute()} method. */ + @Test + @SuppressWarnings("unchecked") + public void testCompute() { + doReturn(42).when(dataset).compute(any(IgniteBiFunction.class), any(), any()); + + //NOTE: don't remove this cast due to Java9+ compilation failure on different Java versions and OS. + Integer res = (Integer)wrapper.compute(mock(IgniteBiFunction.class), mock(IgniteBinaryOperator.class), + null); + + assertEquals(42, res.intValue()); + + verify(dataset, times(1)).compute(any(IgniteBiFunction.class), any(), any()); + } + + /** Tests {@code compute()} method. */ + @Test + @SuppressWarnings("unchecked") + public void testCompute2() { + doReturn(42).when(dataset).compute(any(IgniteBiFunction.class), any(IgniteBinaryOperator.class), any()); + + //NOTE: don't remove this cast due to Java9+ compilation failure on different Java versions and OS. + Integer res = (Integer)wrapper.compute(mock(IgniteFunction.class), mock(IgniteBinaryOperator.class), + null); + + assertEquals(42, res.intValue()); + + verify(dataset, times(1)).compute(any(IgniteBiFunction.class), any(IgniteBinaryOperator.class), any()); + } + + /** Tests {@code close()} method. */ + @Test + public void testClose() throws Exception { + wrapper.close(); + + verify(dataset, times(1)).close(); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/SimpleDatasetTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/SimpleDatasetTest.java new file mode 100644 index 0000000000000..edd7ca1a398fc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/SimpleDatasetTest.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.primitive; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.DatasetFactory; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link SimpleDataset}. + */ +public class SimpleDatasetTest { + /** Basic test for SimpleDataset features. IMPL NOTE derived from LocalDatasetExample. */ + @Test + public void basicTest() throws Exception { + Map dataPoints = new HashMap<>(); + + dataPoints.put(5, VectorUtils.of(42, 10000)); + dataPoints.put(6, VectorUtils.of(32, 64000)); + dataPoints.put(7, VectorUtils.of(53, 120000)); + dataPoints.put(8, VectorUtils.of(24, 70000)); + + Vectorizer vectorizer = new DummyVectorizer<>(); + + // Creates a local simple dataset containing features and providing standard dataset API. + try (SimpleDataset dataset = DatasetFactory.createSimpleDataset( + dataPoints, + 2, + TestUtils.testEnvBuilder(), + vectorizer + )) { + assertArrayEquals("Mean values.", new double[] {37.75, 66000.0}, dataset.mean(), 0); + + assertArrayEquals("Standard deviation values.", + new double[] {10.871407452579449, 38961.519477556314}, dataset.std(), 0); + + double[][] covExp = new double[][] { + new double[] {118.1875, 135500.0}, + new double[] {135500.0, 1.518E9} + }; + double[][] cov = dataset.cov(); + int rowCov = 0; + for (double[] row : cov) + assertArrayEquals("Covariance matrix row " + rowCov, + covExp[rowCov++], row, 0); + + double[][] corrExp = new double[][] { + new double[] {1.0000000000000002, 0.31990250167874007}, + new double[] {0.31990250167874007, 1.0} + }; + double[][] corr = dataset.corr(); + int rowCorr = 0; + for (double[] row : corr) + assertArrayEquals("Correlation matrix row " + rowCorr, + corrExp[rowCorr++], row, 0); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/SimpleLabeledDatasetTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/SimpleLabeledDatasetTest.java new file mode 100644 index 0000000000000..54fdfb0acdf63 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/dataset/primitive/SimpleLabeledDatasetTest.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.dataset.primitive; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.DatasetFactory; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.math.functions.IgniteFunction; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.preprocessing.developer.PatchedPreprocessor; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertNull; + +/** + * Tests for {@link SimpleLabeledDataset}. + */ +public class SimpleLabeledDatasetTest { + /** Basic test for SimpleLabeledDataset features. */ + @Test + public void basicTest() throws Exception { + Map dataPoints = new HashMap(); + + dataPoints.put(5, VectorUtils.of(42, 10000)); + dataPoints.put(6, VectorUtils.of(32, 64000)); + dataPoints.put(7, VectorUtils.of(53, 120000)); + dataPoints.put(8, VectorUtils.of(24, 70000)); + + double[][] actualFeatures = new double[2][]; + double[][] actualLabels = new double[2][]; + int[] actualRows = new int[2]; + + Vectorizer vectorizer = new DummyVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + IgniteFunction, LabeledVector> func = + lv -> new LabeledVector<>(lv.features(), new double[] { lv.label()}); + + PatchedPreprocessor patchedPreprocessor = new PatchedPreprocessor<>(func, vectorizer); + + // Creates a local simple dataset containing features and providing standard dataset API. + try (SimpleLabeledDataset dataset = DatasetFactory.createSimpleLabeledDataset( + dataPoints, + TestUtils.testEnvBuilder(), + 2, + patchedPreprocessor + )) { + assertNull(dataset.compute((data, env) -> { + int part = env.partition(); + actualFeatures[part] = data.getFeatures(); + actualLabels[part] = data.getLabels(); + actualRows[part] = data.getRows(); + return null; + }, (k, v) -> null)); + } + + double[][] expFeatures = new double[][] { + new double[] {10000.0, 64000.0}, + new double[] {120000.0, 70000.0} + }; + int rowFeat = 0; + for (double[] row : actualFeatures) + assertArrayEquals("Features partition index " + rowFeat, + expFeatures[rowFeat++], row, 0); + + double[][] expLabels = new double[][] { + new double[] {42.0, 32.0}, + new double[] {53.0, 24.0} + }; + int rowLbl = 0; + for (double[] row : actualLabels) + assertArrayEquals("Labels partition index " + rowLbl, + expLabels[rowLbl++], row, 0); + + assertArrayEquals("Rows per partitions", new int[] {2, 2}, actualRows); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/EnvironmentTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/EnvironmentTestSuite.java new file mode 100644 index 0000000000000..cdbd9357371da --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/EnvironmentTestSuite.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.environment; + +import org.apache.ignite.ml.environment.deploy.DeployingContextImplTest; +import org.apache.ignite.ml.environment.deploy.MLDeployingTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.trees package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + LearningEnvironmentBuilderTest.class, + LearningEnvironmentTest.class, + PromiseTest.class, + DeployingContextImplTest.class, + MLDeployingTest.class +}) +public class EnvironmentTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/LearningEnvironmentBuilderTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/LearningEnvironmentBuilderTest.java new file mode 100644 index 0000000000000..dd440dc7211bf --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/LearningEnvironmentBuilderTest.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.environment; + +import org.apache.ignite.logger.NullLogger; +import org.apache.ignite.ml.environment.logging.ConsoleLogger; +import org.apache.ignite.ml.environment.logging.CustomMLLogger; +import org.apache.ignite.ml.environment.logging.MLLogger; +import org.apache.ignite.ml.environment.logging.NoOpLogger; +import org.apache.ignite.ml.environment.parallelism.DefaultParallelismStrategy; +import org.apache.ignite.ml.environment.parallelism.NoParallelismStrategy; +import org.junit.Test; + +import static org.apache.ignite.ml.environment.parallelism.ParallelismStrategy.Type.NO_PARALLELISM; +import static org.apache.ignite.ml.environment.parallelism.ParallelismStrategy.Type.ON_DEFAULT_POOL; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link LearningEnvironmentBuilder}. + */ +public class LearningEnvironmentBuilderTest { + /** */ + @Test + public void basic() { + LearningEnvironment env = LearningEnvironment.DEFAULT_TRAINER_ENV; + + assertNotNull("Strategy", env.parallelismStrategy()); + assertNotNull("Logger", env.logger()); + assertNotNull("Logger for class", env.logger(this.getClass())); + } + + /** */ + @Test + public void withParallelismStrategy() { + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyDependency(part -> NoParallelismStrategy.INSTANCE) + .buildForTrainer() + .parallelismStrategy() instanceof NoParallelismStrategy); + + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyDependency(part -> new DefaultParallelismStrategy()) + .buildForTrainer() + .parallelismStrategy() instanceof DefaultParallelismStrategy); + } + + /** */ + @Test + public void withParallelismStrategyType() { + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyType(NO_PARALLELISM).buildForTrainer() + .parallelismStrategy() instanceof NoParallelismStrategy); + + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyType(ON_DEFAULT_POOL).buildForTrainer() + .parallelismStrategy() instanceof DefaultParallelismStrategy); + } + + /** */ + @Test + public void withLoggingFactory() { + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withLoggingFactoryDependency( + part -> ConsoleLogger.factory(MLLogger.VerboseLevel.HIGH)).buildForTrainer().logger() instanceof ConsoleLogger); + + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withLoggingFactoryDependency( + part -> ConsoleLogger.factory(MLLogger.VerboseLevel.HIGH)).buildForTrainer().logger(this.getClass()) instanceof ConsoleLogger); + + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withLoggingFactoryDependency(part -> NoOpLogger.factory()) + .buildForTrainer().logger() instanceof NoOpLogger); + + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withLoggingFactoryDependency(part -> NoOpLogger.factory()) + .buildForTrainer().logger(this.getClass()) instanceof NoOpLogger); + + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withLoggingFactoryDependency( + part -> CustomMLLogger.factory(new NullLogger())).buildForTrainer().logger() instanceof CustomMLLogger); + + assertTrue(LearningEnvironmentBuilder.defaultBuilder().withLoggingFactoryDependency( + part -> CustomMLLogger.factory(new NullLogger())).buildForTrainer().logger(this.getClass()) instanceof CustomMLLogger); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/LearningEnvironmentTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/LearningEnvironmentTest.java new file mode 100644 index 0000000000000..54d869bc3c2fb --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/LearningEnvironmentTest.java @@ -0,0 +1,179 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.environment; + +import java.util.Map; +import java.util.Random; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import org.apache.ignite.ml.IgniteModel; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.Dataset; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.PartitionDataBuilder; +import org.apache.ignite.ml.dataset.feature.FeatureMeta; +import org.apache.ignite.ml.dataset.primitive.builder.context.EmptyContextBuilder; +import org.apache.ignite.ml.dataset.primitive.context.EmptyContext; +import org.apache.ignite.ml.environment.logging.ConsoleLogger; +import org.apache.ignite.ml.environment.logging.MLLogger; +import org.apache.ignite.ml.environment.parallelism.DefaultParallelismStrategy; +import org.apache.ignite.ml.environment.parallelism.ParallelismStrategy; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.preprocessing.Preprocessor; +import org.apache.ignite.ml.trainers.DatasetTrainer; +import org.apache.ignite.ml.tree.randomforest.RandomForestRegressionTrainer; +import org.apache.ignite.ml.tree.randomforest.data.FeaturesCountSelectionStrategies; +import org.junit.Test; + +import static org.apache.ignite.ml.TestUtils.constantModel; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link LearningEnvironment} that require to start the whole Ignite infrastructure. IMPL NOTE based on + * RandomForestRegressionExample example. + */ +public class LearningEnvironmentTest { + /** */ + @Test + public void testBasic() { + RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer( + IntStream.range(0, 0).mapToObj( + x -> new FeatureMeta("", 0, false)).collect(Collectors.toList()) + ).withAmountOfTrees(101) + .withFeaturesCountSelectionStrgy(FeaturesCountSelectionStrategies.ONE_THIRD) + .withMaxDepth(4) + .withMinImpurityDelta(0.) + .withSubSampleSize(0.3) + .withSeed(0); + + LearningEnvironmentBuilder envBuilder = LearningEnvironmentBuilder.defaultBuilder() + .withParallelismStrategyType(ParallelismStrategy.Type.ON_DEFAULT_POOL) + .withLoggingFactoryDependency(part -> ConsoleLogger.factory(MLLogger.VerboseLevel.LOW)); + + trainer.withEnvironmentBuilder(envBuilder); + + assertEquals(DefaultParallelismStrategy.class, trainer.learningEnvironment().parallelismStrategy().getClass()); + assertEquals(ConsoleLogger.class, trainer.learningEnvironment().logger().getClass()); + } + + /** + * Test random number generator provided by {@link LearningEnvironment}. + * We test that: + * 1. Correct random generator is returned for each partition. + * 2. Its state is saved between compute calls (for this we do several iterations of compute). + */ + @Test + public void testRandomNumbersGenerator() { + // We make such builders that provide as functions returning partition index * iteration as random number generator nextInt + LearningEnvironmentBuilder envBuilder = TestUtils.testEnvBuilder().withRandomDependency(MockRandom::new); + int partitions = 10; + int iterations = 2; + + DatasetTrainer, Void> trainer = new DatasetTrainer, Void>() { + /** {@inheritDoc} */ + @Override public IgniteModel fitWithInitializedDeployingContext( + DatasetBuilder datasetBuilder, + Preprocessor preprocessor + ) { + Dataset> ds = datasetBuilder.build(envBuilder, + new EmptyContextBuilder<>(), + (PartitionDataBuilder>)(env, upstreamData, upstreamDataSize, ctx) -> + TestUtils.DataWrapper.of(env.partition()), + envBuilder.buildForTrainer()); + + Vector v = null; + for (int iter = 0; iter < iterations; iter++) { + v = ds.compute( + (dw, env) -> VectorUtils.fill(-1, partitions).set(env.partition(), env.randomNumbersGenerator().nextInt()), + (v1, v2) -> zipOverridingEmpty(v1, v2, -1) + ); + } + return constantModel(v); + } + + /** {@inheritDoc} */ + @Override public boolean isUpdateable(IgniteModel mdl) { + return false; + } + + /** {@inheritDoc} */ + @Override protected IgniteModel updateModel(IgniteModel mdl, + DatasetBuilder datasetBuilder, Preprocessor preprocessor) { + return null; + } + }; + trainer.withEnvironmentBuilder(envBuilder); + IgniteModel mdl = trainer.fit(getCacheMock(partitions), partitions, null); + + Vector exp = VectorUtils.zeroes(partitions); + for (int i = 0; i < partitions; i++) + exp.set(i, i * iterations); + + Vector res = mdl.predict(null); + assertEquals(exp, res); + } + + /** + * For given two vectors {@code v2, v2} produce vector {@code v} where each component of {@code v} + * is produced from corresponding components {@code c1, c2} of {@code v1, v2} respectfully in following way + * {@code c = c1 != empty ? c1 : c2}. For example, zipping [2, -1, -1], [-1, 3, -1] will result in [2, 3, -1]. + * + * @param v1 First vector. + * @param v2 Second vector. + * @param empty Value treated as empty. + * @return Result of zipping as described above. + */ + private static Vector zipOverridingEmpty(Vector v1, Vector v2, double empty) { + return v1 != null ? (v2 != null ? VectorUtils.zipWith(v1, v2, (d1, d2) -> d1 != empty ? d1 : d2) : v1) : v2; + } + + /** Get cache mock */ + private Map getCacheMock(int partsCnt) { + return IntStream.range(0, partsCnt).boxed().collect(Collectors.toMap(x -> x, x -> x)); + } + + /** Mock random numbers generator. */ + private static class MockRandom extends Random { + /** Serial version uuid. */ + private static final long serialVersionUID = -7738558243461112988L; + + /** Start value. */ + private int startVal; + + /** Iteration. */ + private int iter; + + /** + * Constructs instance of this class with a specified start value. + * + * @param startVal Start value. + */ + MockRandom(int startVal) { + this.startVal = startVal; + iter = 0; + } + + /** {@inheritDoc} */ + @Override public int nextInt() { + iter++; + return startVal * iter; + } + } +} + diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/PromiseTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/PromiseTest.java new file mode 100644 index 0000000000000..50b0f1e36b8b5 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/PromiseTest.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.environment; + +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import org.apache.ignite.ml.environment.parallelism.Promise; +import org.jetbrains.annotations.NotNull; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +/** + * Tests for {@link Promise} functionality. + */ +public class PromiseTest { + /** */ + @Test + public void testUnsafeGet() { + assertNull("Strategy", new TestPromise().unsafeGet()); + } + + /** */ + @Test + public void testGetOpt() { + assertEquals(Optional.empty(), (new TestPromise() { + /** {@inheritDoc} */ + @Override public Object get() throws ExecutionException { + throw new ExecutionException("test", new RuntimeException("test cause")); + } + }).getOpt()); + } + + /** */ + private static class TestPromise implements Promise { + /** {@inheritDoc} */ + @Override public boolean cancel(boolean mayInterruptIfRunning) { + return false; + } + + /** {@inheritDoc} */ + @Override public boolean isCancelled() { + return false; + } + + /** {@inheritDoc} */ + @Override public boolean isDone() { + return true; + } + + /** {@inheritDoc} */ + @Override public Object get() throws ExecutionException { + return null; + } + + /** {@inheritDoc} */ + @Override public Object get(long timeout, @NotNull TimeUnit unit) { + return null; + } + } +} + diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/deploy/DeployingContextImplTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/deploy/DeployingContextImplTest.java new file mode 100644 index 0000000000000..ade3c99688ad7 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/deploy/DeployingContextImplTest.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.ignite.ml.environment.deploy; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import org.apache.ignite.ml.environment.LearningEnvironment; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for DeployingContextImpl. + */ +public class DeployingContextImplTest { + /** */ + private LearningEnvironment environment; + + /** */ + @Before + public void setUp() { + environment = LearningEnvironmentBuilder.defaultBuilder().buildForTrainer(); + } + + /** */ + @Test + public void testSimpleCase() { + environment.initDeployingContext(new A()); + assertEquals(A.class, environment.deployingContext().userClass()); + assertEquals(A.class.getClassLoader(), environment.deployingContext().clientClassLoader()); + } + + /** */ + @Test + public void testStraightDependency() { + environment.initDeployingContext(new C(new A())); + assertEquals(A.class, environment.deployingContext().userClass()); + assertEquals(A.class.getClassLoader(), environment.deployingContext().clientClassLoader()); + } + + /** */ + @Test + public void testNestedDependencies() { + environment.initDeployingContext(new C(new C(new C(new A())))); + assertEquals(A.class, environment.deployingContext().userClass()); + assertEquals(A.class.getClassLoader(), environment.deployingContext().clientClassLoader()); + } + + /** */ + @Test + public void testClassWithoutDependencies() { + environment.initDeployingContext(new C(new C(new B(new A())))); + assertEquals(B.class, environment.deployingContext().userClass()); + assertEquals(B.class.getClassLoader(), environment.deployingContext().clientClassLoader()); + } + + /** */ + @Test + public void testClassWithSeveralDeps1() { + environment.initDeployingContext(new C(new C(new D(new C(new C(new A())), new B(new A()))))); + // in this case we should get only first dependency + assertEquals(A.class, environment.deployingContext().userClass()); + assertEquals(A.class.getClassLoader(), environment.deployingContext().clientClassLoader()); + } + + /** */ + @Test + public void testClassWithSeveralDeps2() { + environment.initDeployingContext(new C(new C(new D(new B(new A()), new C(new C(new A())))))); + // in this case we should get only first dependency + assertEquals(B.class, environment.deployingContext().userClass()); + assertEquals(B.class.getClassLoader(), environment.deployingContext().clientClassLoader()); + } + + /** */ + private static class A { + + } + + /** */ + private static class B implements DeployableObject { + /** */ + private final Object obj; + + /** */ + public B(Object obj) { + this.obj = obj; + } + + /** {@inheritDoc} */ + @Override public List getDependencies() { + return Collections.emptyList(); + } + } + + /** */ + private static class C implements DeployableObject { + /** */ + private final Object obj; + + /** */ + public C(Object obj) { + this.obj = obj; + } + + /** {@inheritDoc} */ + @Override public List getDependencies() { + return Collections.singletonList(obj); + } + } + + /** */ + private static class D implements DeployableObject { + /** */ + private final Object obj1; + + /** */ + private final Object obj2; + + /** */ + public D(Object obj1, Object obj2) { + this.obj1 = obj1; + this.obj2 = obj2; + } + + /** {@inheritDoc} */ + @Override public List getDependencies() { + return Arrays.asList(obj1, obj2); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/deploy/MLDeployingTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/deploy/MLDeployingTest.java new file mode 100644 index 0000000000000..3702d6b542a1b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/environment/deploy/MLDeployingTest.java @@ -0,0 +1,245 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.environment.deploy; + +import java.lang.reflect.Constructor; +import java.util.Arrays; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.impl.cache.CacheBasedDatasetBuilder; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.pipeline.Pipeline; +import org.apache.ignite.ml.pipeline.PipelineMdl; +import org.apache.ignite.ml.preprocessing.PreprocessingTrainer; +import org.apache.ignite.ml.preprocessing.Preprocessor; +import org.apache.ignite.ml.preprocessing.binarization.BinarizationPreprocessor; +import org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer; +import org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainer; +import org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer; +import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer; +import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; +import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.jetbrains.annotations.NotNull; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** */ +public class MLDeployingTest extends GridCommonAbstractTest { + /** */ + private static final String EXT_VECTORIZER = "org.apache.ignite.tests.p2p.ml.CustomVectorizer"; + + /** */ + private static final String EXT_PREPROCESSOR_1 = "org.apache.ignite.tests.p2p.ml.CustomPreprocessor1"; + + /** */ + private static final String EXT_PREPROCESSOR_2 = "org.apache.ignite.tests.p2p.ml.CustomPreprocessor2"; + + /** */ + private static final int NUMBER_OF_COMPUTE_RETRIES = 3; + + /** */ + private Ignite ignite1; + + /** */ + private Ignite ignite2; + + /** */ + private Ignite ignite3; + + /** {@inheritDoc} */ + @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { + IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); + + cfg.setDiscoverySpi(new TcpDiscoverySpi() + .setIpFinder(new TcpDiscoveryVmIpFinder() + .setAddresses(Arrays.asList("127.0.0.1:47500..47509")))); + + cfg.setPeerClassLoadingEnabled(true); + + return cfg; + } + + /** */ + @Before + public void setUp() throws Exception { + ignite1 = startGrid(1); + ignite2 = startGrid(2); + } + + /** */ + @After + public void tearDown() throws Exception { + stopAllGrids(); + } + + /** */ + @Test + public void testCustomVectorizer() throws Exception { + testOnCache("testCustomVectorizer", cache -> { + Vectorizer vectorizer = createVectorizer(); + fitAndTestModel(cache, vectorizer); + }); + } + + /** */ + @Test + public void testCustomPreprocessor() throws Exception { + testOnCache("testCustomPreprocessor", cache -> { + Vectorizer vectorizer = new DummyVectorizer<>(); + vectorizer = vectorizer.labeled(Vectorizer.LabelCoordinate.LAST); + + Preprocessor customPreprocessor1 = createPreprocessor(vectorizer, EXT_PREPROCESSOR_1); + Preprocessor customPreprocessor2 = createPreprocessor(customPreprocessor1, EXT_PREPROCESSOR_2); + + Preprocessor knownPreprocessor1 = new BinarizationPreprocessor<>(0.5, customPreprocessor1); + Preprocessor knownPreprocessor2 = new BinarizationPreprocessor<>(0.5, customPreprocessor2); + + Preprocessor customPreprocessor3 = createPreprocessor(knownPreprocessor2, EXT_PREPROCESSOR_1); + Preprocessor customPreprocessor4 = createPreprocessor(customPreprocessor3, EXT_PREPROCESSOR_2); + + fitAndTestModel(cache, customPreprocessor1); + fitAndTestModel(cache, customPreprocessor2); + fitAndTestModel(cache, knownPreprocessor1); + fitAndTestModel(cache, knownPreprocessor2); + fitAndTestModel(cache, customPreprocessor3); + fitAndTestModel(cache, customPreprocessor4); + }); + } + + /** */ + @Test + public void testPipeline() throws Exception { + testOnCache("testPipeline", cache -> { + Vectorizer vectorizer = createVectorizer(); + + PipelineMdl mdl = new Pipeline() + .addVectorizer(vectorizer) + .addPreprocessingTrainer(new ImputerTrainer()) + .addPreprocessingTrainer(makePreprocessorTrainer(EXT_PREPROCESSOR_2)) + .addPreprocessingTrainer(new MinMaxScalerTrainer()) + .addPreprocessingTrainer(makePreprocessorTrainer(EXT_PREPROCESSOR_1)) + .addPreprocessingTrainer(new NormalizationTrainer() + .withP(1)) + .addTrainer(new DecisionTreeClassificationTrainer(5, 0)) + .fit(cache); + + assertEquals(0., mdl.predict(VectorUtils.of(0., 0.)), 1.); + }); + } + + /** */ + private void testOnCache(String cacheName, TestCacheConsumer body) throws Exception { + IgniteCache cache = null; + try { + cache = prepareCache(ignite1, cacheName); + body.accept(new CacheBasedDatasetBuilder<>(ignite1, cache) + .withRetriesNumber(NUMBER_OF_COMPUTE_RETRIES)); + } + finally { + if (cache != null) + cache.destroy(); + } + } + + /** */ + private void fitAndTestModel(CacheBasedDatasetBuilder datasetBuilder, + Preprocessor preprocessor) { + LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer(); + LogisticRegressionModel mdl = trainer.fit(datasetBuilder, preprocessor); + + // For this case any answer is valid. + assertEquals(0., mdl.predict(VectorUtils.of(0., 0.)), 1.); + } + + /** */ + private Vectorizer createVectorizer() + throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, + java.lang.reflect.InvocationTargetException { + ClassLoader ldr = getExternalClassLoader(); + Class clazz = ldr.loadClass(EXT_VECTORIZER); + + Constructor ctor = clazz.getConstructor(); + Vectorizer vectorizer = + (Vectorizer)ctor.newInstance(); + vectorizer = vectorizer.labeled(Vectorizer.LabelCoordinate.LAST); + return vectorizer; + } + + /** */ + private Preprocessor createPreprocessor(Preprocessor basePreprocessor, + String clsName) throws Exception { + ClassLoader ldr = getExternalClassLoader(); + Class clazz = ldr.loadClass(clsName); + + Constructor ctor = clazz.getConstructor(Preprocessor.class); + return (Preprocessor)ctor.newInstance(basePreprocessor); + } + + /** */ + @NotNull private PreprocessingTrainer makePreprocessorTrainer(String preprocessorClsName) throws Exception { + return new PreprocessingTrainer() { + @Override public Preprocessor fit(LearningEnvironmentBuilder envBuilder, DatasetBuilder datasetBuilder, + Preprocessor basePreprocessor) { + try { + return createPreprocessor(basePreprocessor, preprocessorClsName); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + }; + } + + /** xor truth table. */ + private static final double[][] xor = { + {0.0, 0.0, 0.0}, + {0.0, 1.0, 1.0}, + {1.0, 0.0, 1.0}, + {1.0, 1.0, 0.0} + }; + + /** */ + private IgniteCache prepareCache(Ignite ignite, String cacheName) { + IgniteCache cache = ignite.getOrCreateCache(new CacheConfiguration<>(cacheName)); + + for (int i = 0; i < xor.length; i++) + cache.put(i, VectorUtils.of(xor[i])); + + return cache; + } + + /** */ + @FunctionalInterface + private static interface TestCacheConsumer { + /** + * @param datasetBuilder Dataset builder. + */ + public void accept(CacheBasedDatasetBuilder datasetBuilder) throws Exception; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/IgniteModelStorageUtilTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/IgniteModelStorageUtilTest.java new file mode 100644 index 0000000000000..2feca69445faf --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/IgniteModelStorageUtilTest.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference; + +import org.apache.ignite.Ignite; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.util.plugin.MLPluginConfiguration; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link IgniteModelStorageUtil}. + */ +public class IgniteModelStorageUtilTest extends GridCommonAbstractTest { + /** Ignite configuration. */ + private final IgniteConfiguration cfg; + + /** + * Constructs a new instance of Ignite model storage util test. + */ + public IgniteModelStorageUtilTest() { + cfg = new IgniteConfiguration(); + + MLPluginConfiguration mlCfg = new MLPluginConfiguration(); + mlCfg.setWithMdlDescStorage(true); + mlCfg.setWithMdlStorage(true); + + cfg.setPluginConfigurations(mlCfg); + } + + /** */ + @Test + public void testSaveAndGet() throws Exception { + try (Ignite ignite = startGrid(cfg)) { + IgniteModelStorageUtil.saveModel(ignite, i -> 0.42, "mdl"); + Model infMdl = IgniteModelStorageUtil.getModel(ignite, "mdl"); + + assertEquals(0.42, infMdl.predict(VectorUtils.of())); + } + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testSaveModelWithTheSameName() throws Exception { + try (Ignite ignite = startGrid(cfg)) { + IgniteModelStorageUtil.saveModel(ignite, i -> 0.42, "mdl"); + IgniteModelStorageUtil.saveModel(ignite, i -> 0.42, "mdl"); + } + } + + /** */ + @Test + public void testSaveRemoveSaveModel() throws Exception { + try (Ignite ignite = startGrid(cfg)) { + IgniteModelStorageUtil.saveModel(ignite, i -> 0.42, "mdl"); + IgniteModelStorageUtil.removeModel(ignite, "mdl"); + IgniteModelStorageUtil.saveModel(ignite, i -> 0.43, "mdl"); + + Model infMdl = IgniteModelStorageUtil.getModel(ignite, "mdl"); + + assertEquals(0.43, infMdl.predict(VectorUtils.of())); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/InferenceTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/InferenceTestSuite.java new file mode 100644 index 0000000000000..27179ded126fd --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/InferenceTestSuite.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference; + +import org.apache.ignite.ml.inference.builder.IgniteDistributedModelBuilderTest; +import org.apache.ignite.ml.inference.builder.SingleModelBuilderTest; +import org.apache.ignite.ml.inference.builder.ThreadedModelBuilderTest; +import org.apache.ignite.ml.inference.storage.model.DefaultModelStorageTest; +import org.apache.ignite.ml.inference.util.DirectorySerializerTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in {@link org.apache.ignite.ml.inference} package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + SingleModelBuilderTest.class, + ThreadedModelBuilderTest.class, + DirectorySerializerTest.class, + DefaultModelStorageTest.class, + IgniteDistributedModelBuilderTest.class, + IgniteModelStorageUtilTest.class +}) +public class InferenceTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/IgniteDistributedModelBuilderTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/IgniteDistributedModelBuilderTest.java new file mode 100644 index 0000000000000..ba097ad9f63bc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/IgniteDistributedModelBuilderTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference.builder; + +import java.util.concurrent.Future; +import org.apache.ignite.Ignite; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.inference.Model; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link IgniteDistributedModelBuilder} class. + */ +public class IgniteDistributedModelBuilderTest extends GridCommonAbstractTest { + /** Number of nodes in grid */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** + * {@inheritDoc} + */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** */ + @Test + public void testBuild() { + AsyncModelBuilder mdlBuilder = new IgniteDistributedModelBuilder(ignite, 1, 1); + + Model> infMdl = mdlBuilder.build( + ModelBuilderTestUtil.getReader(), + ModelBuilderTestUtil.getParser() + ); + + // TODO: IGNITE-10250: Test hangs sometimes because of Ignite queue issue. + // for (int i = 0; i < 100; i++) + // assertEquals(Integer.valueOf(i), infMdl.predict(i).get()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/ModelBuilderTestUtil.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/ModelBuilderTestUtil.java new file mode 100644 index 0000000000000..4ff501ac39e10 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/ModelBuilderTestUtil.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference.builder; + +import org.apache.ignite.ml.inference.Model; +import org.apache.ignite.ml.inference.parser.ModelParser; +import org.apache.ignite.ml.inference.reader.ModelReader; + +/** + * Util class for model builder tests. + */ +class ModelBuilderTestUtil { + /** + * Creates dummy model reader used in tests. + * + * @return Dummy model reader used in tests. + */ + static ModelReader getReader() { + return () -> new byte[0]; + } + + /** + * Creates dummy model parser used in tests. + * + * @return Dummy model parser used in tests. + */ + static ModelParser> getParser() { + return m -> new Model() { + @Override public Integer predict(Integer input) { + return input; + } + + @Override public void close() { + // Do nothing. + } + }; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/SingleModelBuilderTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/SingleModelBuilderTest.java new file mode 100644 index 0000000000000..c09e9971c2b3c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/SingleModelBuilderTest.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference.builder; + +import org.apache.ignite.ml.inference.Model; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link SingleModelBuilder}. + */ +public class SingleModelBuilderTest { + /** */ + @Test + public void testBuild() { + SyncModelBuilder mdlBuilder = new SingleModelBuilder(); + + Model infMdl = mdlBuilder.build( + ModelBuilderTestUtil.getReader(), + ModelBuilderTestUtil.getParser() + ); + + for (int i = 0; i < 100; i++) + assertEquals(Integer.valueOf(i), infMdl.predict(i)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/ThreadedModelBuilderTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/ThreadedModelBuilderTest.java new file mode 100644 index 0000000000000..46862cc05b329 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/builder/ThreadedModelBuilderTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference.builder; + +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import org.apache.ignite.ml.inference.Model; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link ThreadedModelBuilder} class. + */ +public class ThreadedModelBuilderTest { + /** */ + @Test + public void testBuild() throws ExecutionException, InterruptedException { + AsyncModelBuilder mdlBuilder = new ThreadedModelBuilder(10); + + Model> infMdl = mdlBuilder.build( + ModelBuilderTestUtil.getReader(), + ModelBuilderTestUtil.getParser() + ); + + for (int i = 0; i < 100; i++) + assertEquals(Integer.valueOf(i), infMdl.predict(i).get()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/storage/model/AbstractModelStorageTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/storage/model/AbstractModelStorageTest.java new file mode 100644 index 0000000000000..84e2a857fc632 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/storage/model/AbstractModelStorageTest.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference.storage.model; + +import java.util.Set; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * Base tests for all implementation of {@link ModelStorage}. + */ +public abstract class AbstractModelStorageTest { + /** + * Returns model storage to be tested. + * + * @return Model storage to be tested. + */ + abstract ModelStorage getModelStorage(); + + /** */ + @Test + public void testPutGetRemoveFile() { + ModelStorage mdlStorage = getModelStorage(); + + byte[] data = new byte[]{1, 2, 3, 4, 5}; + + mdlStorage.mkdirs("/"); + mdlStorage.putFile("/test", data); + + assertTrue(mdlStorage.exists("/test")); + assertArrayEquals(data, mdlStorage.getFile("/test")); + + mdlStorage.remove("/test"); + + assertFalse(mdlStorage.exists("/test")); + } + + /** */ + @Test + public void testListDirectory() { + ModelStorage mdlStorage = getModelStorage(); + + mdlStorage.mkdirs("/a/b"); + mdlStorage.mkdirs("/a/c"); + mdlStorage.putFile("/a/test", new byte[0]); + + Set aFiles = mdlStorage.listFiles("/a"); + Set bFiles = mdlStorage.listFiles("/a/b"); + Set cFiles = mdlStorage.listFiles("/a/c"); + + assertEquals(3, aFiles.size()); + assertTrue(bFiles.isEmpty()); + assertTrue(cFiles.isEmpty()); + + assertTrue(aFiles.contains("/a/b")); + assertTrue(aFiles.contains("/a/c")); + assertTrue(aFiles.contains("/a/test")); + } + + /** */ + @Test + public void testIsDirectory() { + ModelStorage mdlStorage = getModelStorage(); + + mdlStorage.mkdirs("/a"); + + assertTrue(mdlStorage.exists("/a")); + assertTrue(mdlStorage.isDirectory("/a")); + assertFalse(mdlStorage.isFile("/a")); + } + + /** */ + @Test + public void testIsFile() { + ModelStorage mdlStorage = getModelStorage(); + + mdlStorage.mkdirs("/"); + mdlStorage.putFile("/test", new byte[0]); + + assertTrue(mdlStorage.exists("/test")); + assertTrue(mdlStorage.isFile("/test")); + assertFalse(mdlStorage.isDirectory("/test")); + } + + /** */ + @Test + public void testRemoveDirectory() { + ModelStorage mdlStorage = getModelStorage(); + + mdlStorage.mkdirs("/a/b/c"); + mdlStorage.mkdirs("/a/b/d"); + mdlStorage.mkdirs("/a/c"); + mdlStorage.putFile("/a/b/c/test", new byte[0]); + mdlStorage.putFile("/a/b/test", new byte[0]); + + mdlStorage.remove("/a/b"); + + assertFalse(mdlStorage.exists("/a/b")); + assertFalse(mdlStorage.exists("/a/b/c")); + assertFalse(mdlStorage.exists("/a/b/d")); + assertFalse(mdlStorage.exists("/a/b/test")); + assertFalse(mdlStorage.exists("/a/b/c/test")); + + assertTrue(mdlStorage.exists("/a")); + assertTrue(mdlStorage.exists("/a/c")); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testPutFileIntoNonExistingDirectory() { + ModelStorage mdlStorage = getModelStorage(); + + mdlStorage.putFile("/test", new byte[0]); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testMakeDirInNonExistingDirectory() { + ModelStorage mdlStorage = getModelStorage(); + + mdlStorage.mkdir("/test"); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/storage/model/DefaultModelStorageTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/storage/model/DefaultModelStorageTest.java new file mode 100644 index 0000000000000..ef3daa2963193 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/storage/model/DefaultModelStorageTest.java @@ -0,0 +1,145 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference.storage.model; + +import java.util.concurrent.locks.Lock; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +/** + * Tests for {@link DefaultModelStorage}. + */ +public class DefaultModelStorageTest extends AbstractModelStorageTest { + /** {@inheritDoc} */ + @Override ModelStorage getModelStorage() { + ModelStorageProvider provider = new LocalModelStorageProvider(); + return new DefaultModelStorage(provider); + } + + /** */ + @Test + public void testSynchronize() { + Lock[] locks = new Lock[10]; + for (int i = 0; i < locks.length; i++) + locks[i] = mock(Lock.class); + + DefaultModelStorage.synchronize(() -> {}, locks); + + for (Lock lock : locks) { + verify(lock, times(1)).lock(); + verify(lock, times(1)).unlock(); + verifyNoMoreInteractions(lock); + } + } + + /** */ + @Test + public void testSynchronizeWithExceptionInTask() { + Lock[] locks = new Lock[10]; + for (int i = 0; i < locks.length; i++) + locks[i] = mock(Lock.class); + + RuntimeException ex = new RuntimeException(); + + try { + DefaultModelStorage.synchronize(() -> { + throw ex; + }, locks); + fail(); + } + catch (RuntimeException e) { + assertEquals(ex, e); + } + + for (Lock lock : locks) { + verify(lock, times(1)).lock(); + verify(lock, times(1)).unlock(); + verifyNoMoreInteractions(lock); + } + } + + /** */ + @Test + public void testSynchronizeWithExceptionInLock() { + Lock[] locks = new Lock[10]; + for (int i = 0; i < locks.length; i++) + locks[i] = mock(Lock.class); + + RuntimeException ex = new RuntimeException(); + + doThrow(ex).when(locks[5]).lock(); + + try { + DefaultModelStorage.synchronize(() -> {}, locks); + fail(); + } + catch (RuntimeException e) { + assertEquals(ex, e); + } + + for (int i = 0; i < locks.length; i++) { + if (i <= 4) { + verify(locks[i], times(1)).lock(); + verify(locks[i], times(1)).unlock(); + } + else if (i > 5) { + verify(locks[i], times(0)).lock(); + verify(locks[i], times(0)).unlock(); + } + else { + verify(locks[i], times(1)).lock(); + verify(locks[i], times(0)).unlock(); + } + + verifyNoMoreInteractions(locks[i]); + } + } + + /** */ + @Test + public void testSynchronizeWithExceptionInUnlock() { + Lock[] locks = new Lock[10]; + for (int i = 0; i < locks.length; i++) + locks[i] = mock(Lock.class); + + RuntimeException ex = new RuntimeException(); + + doThrow(ex).when(locks[5]).unlock(); + + try { + DefaultModelStorage.synchronize(() -> {}, locks); + fail(); + } + catch (RuntimeException e) { + assertEquals(ex, e); + } + + for (Lock lock : locks) { + verify(lock, times(1)).lock(); + verify(lock, times(1)).unlock(); + verifyNoMoreInteractions(lock); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/util/DirectorySerializerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/util/DirectorySerializerTest.java new file mode 100644 index 0000000000000..87a515c3c7457 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/inference/util/DirectorySerializerTest.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.inference.util; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Scanner; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link DirectorySerializer} class. + */ +public class DirectorySerializerTest { + /** Source directory prefix. */ + private static final String SRC_DIRECTORY_PREFIX = "directory_serializer_test_src"; + + /** Destination directory prefix. */ + private static final String DST_DIRECTORY_PREFIX = "directory_serializer_test_dst"; + + /** */ + @Test + public void testSerializeDeserializeWithFile() throws IOException, ClassNotFoundException { + Path src = Files.createTempDirectory(SRC_DIRECTORY_PREFIX); + Path dst = Files.createTempDirectory(DST_DIRECTORY_PREFIX); + try { + File file = new File(src.toString() + "/test.txt"); + Files.createFile(file.toPath()); + try (FileWriter fw = new FileWriter(file)) { + fw.write("Hello, world!"); + fw.flush(); + } + + byte[] serialized = DirectorySerializer.serialize(src); + DirectorySerializer.deserialize(dst, serialized); + + File[] files = dst.toFile().listFiles(); + + assertNotNull(files); + assertEquals(1, files.length); + assertEquals("test.txt", files[0].getName()); + + try (Scanner scanner = new Scanner(files[0])) { + assertTrue(scanner.hasNextLine()); + assertEquals("Hello, world!", scanner.nextLine()); + assertFalse(scanner.hasNextLine()); + } + } + finally { + DirectorySerializer.deleteDirectory(src); + DirectorySerializer.deleteDirectory(dst); + } + } + + /** */ + @Test + public void testSerializeDeserializeWithDirectory() throws IOException, ClassNotFoundException { + Path src = Files.createTempDirectory(SRC_DIRECTORY_PREFIX); + Path dst = Files.createTempDirectory(DST_DIRECTORY_PREFIX); + try { + Files.createDirectories(Paths.get(src.toString() + "/a/b/")); + File file = new File(src.toString() + "/a/b/test.txt"); + Files.createFile(file.toPath()); + try (FileWriter fw = new FileWriter(file)) { + fw.write("Hello, world!"); + fw.flush(); + } + + byte[] serialized = DirectorySerializer.serialize(src); + DirectorySerializer.deserialize(dst, serialized); + + File[] files = dst.toFile().listFiles(); + + assertNotNull(files); + assertEquals(1, files.length); + assertEquals("a", files[0].getName()); + assertTrue(files[0].isDirectory()); + + files = files[0].listFiles(); + + assertNotNull(files); + assertEquals(1, files.length); + assertEquals("b", files[0].getName()); + assertTrue(files[0].isDirectory()); + + files = files[0].listFiles(); + + assertNotNull(files); + assertEquals(1, files.length); + assertEquals("test.txt", files[0].getName()); + + try (Scanner scanner = new Scanner(files[0])) { + assertTrue(scanner.hasNextLine()); + assertEquals("Hello, world!", scanner.nextLine()); + assertFalse(scanner.hasNextLine()); + } + } + finally { + DirectorySerializer.deleteDirectory(src); + DirectorySerializer.deleteDirectory(dst); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/KNNRegressionTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/KNNRegressionTest.java new file mode 100644 index 0000000000000..7a95cfde20ee0 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/KNNRegressionTest.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.knn; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.knn.regression.KNNRegressionModel; +import org.apache.ignite.ml.knn.regression.KNNRegressionTrainer; +import org.apache.ignite.ml.math.distances.EuclideanDistance; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +/** + * Tests for {@link KNNRegressionTrainer}. + */ +public class KNNRegressionTest extends TrainerTest { + /** */ + @Test + public void testSimpleRegressionWithOneNeighbour() { + Map data = new HashMap<>(); + data.put(0, new double[] {11.0, 0.0, 0.0, 0.0, 0.0, 0.0}); + data.put(1, new double[] {12.0, 2.0, 0.0, 0.0, 0.0, 0.0}); + data.put(2, new double[] {13.0, 0.0, 3.0, 0.0, 0.0, 0.0}); + data.put(3, new double[] {14.0, 0.0, 0.0, 4.0, 0.0, 0.0}); + data.put(4, new double[] {15.0, 0.0, 0.0, 0.0, 5.0, 0.0}); + data.put(5, new double[] {16.0, 0.0, 0.0, 0.0, 0.0, 6.0}); + + KNNRegressionTrainer trainer = new KNNRegressionTrainer() + .withK(1) + .withDistanceMeasure(new EuclideanDistance()) + .withWeighted(false); + + KNNRegressionModel knnMdl = trainer.fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + + assertEquals(15, knnMdl.predict(VectorUtils.of(0.0, 0.0, 0.0, 5.0, 0.0)), 1E-12); + } + + /** */ + @Test + public void testLongly() { + testLongly(false); + } + + /** */ + @Test + public void testLonglyWithWeightedStrategy() { + testLongly(true); + } + + /** */ + private void testLongly(boolean weighted) { + Map data = new HashMap<>(); + data.put(0, new double[] {60323, 83.0, 234289, 2356, 1590, 107608, 1947}); + data.put(1, new double[] {61122, 88.5, 259426, 2325, 1456, 108632, 1948}); + data.put(2, new double[] {60171, 88.2, 258054, 3682, 1616, 109773, 1949}); + data.put(3, new double[] {61187, 89.5, 284599, 3351, 1650, 110929, 1950}); + data.put(4, new double[] {63221, 96.2, 328975, 2099, 3099, 112075, 1951}); + data.put(5, new double[] {63639, 98.1, 346999, 1932, 3594, 113270, 1952}); + data.put(6, new double[] {64989, 99.0, 365385, 1870, 3547, 115094, 1953}); + data.put(7, new double[] {63761, 100.0, 363112, 3578, 3350, 116219, 1954}); + data.put(8, new double[] {66019, 101.2, 397469, 2904, 3048, 117388, 1955}); + data.put(9, new double[] {68169, 108.4, 442769, 2936, 2798, 120445, 1957}); + data.put(10, new double[] {66513, 110.8, 444546, 4681, 2637, 121950, 1958}); + data.put(11, new double[] {68655, 112.6, 482704, 3813, 2552, 123366, 1959}); + data.put(12, new double[] {69564, 114.2, 502601, 3931, 2514, 125368, 1960}); + data.put(13, new double[] {69331, 115.7, 518173, 4806, 2572, 127852, 1961}); + data.put(14, new double[] {70551, 116.9, 554894, 4007, 2827, 130081, 1962}); + + KNNRegressionTrainer trainer = new KNNRegressionTrainer() + .withK(3) + .withDistanceMeasure(new EuclideanDistance()) + .withWeighted(weighted); + + KNNRegressionModel knnMdl = trainer.fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + + Vector vector = VectorUtils.of(104.6, 419180.0, 2822.0, 2857.0, 118734.0, 1956.0); + + assertNotNull(knnMdl.predict(vector)); + + assertEquals(67857, knnMdl.predict(vector), 2000); + +// Assert.assertTrue(knnMdl.toString().contains(stgy.name())); +// Assert.assertTrue(knnMdl.toString(true).contains(stgy.name())); +// Assert.assertTrue(knnMdl.toString(false).contains(stgy.name())); + } + + /** */ + @Test + public void testUpdate() { + Map data = new HashMap<>(); + data.put(0, new double[] {11.0, 0, 0, 0, 0, 0}); + data.put(1, new double[] {12.0, 2.0, 0, 0, 0, 0}); + data.put(2, new double[] {13.0, 0, 3.0, 0, 0, 0}); + data.put(3, new double[] {14.0, 0, 0, 4.0, 0, 0}); + data.put(4, new double[] {15.0, 0, 0, 0, 5.0, 0}); + data.put(5, new double[] {16.0, 0, 0, 0, 0, 6.0}); + + KNNRegressionTrainer trainer = new KNNRegressionTrainer() + .withK(1) + .withDistanceMeasure(new EuclideanDistance()) + .withWeighted(false); + + KNNRegressionModel originalMdlOnEmptyDataset = trainer.fit( + new HashMap<>(), + parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + + KNNRegressionModel updatedOnDataset = trainer.update( + originalMdlOnEmptyDataset, + data, + parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + + Vector vector = VectorUtils.of(0.0, 0.0, 0.0, 5.0, 0.0); + assertNull(originalMdlOnEmptyDataset.predict(vector)); + assertEquals(Double.valueOf(15.0), updatedOnDataset.predict(vector)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/KNNTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/KNNTestSuite.java new file mode 100644 index 0000000000000..73fd6ee252458 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/KNNTestSuite.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.knn; + +import org.apache.ignite.ml.knn.utils.ArraySpatialIndexTest; +import org.apache.ignite.ml.knn.utils.BallTreeSpatialIndexTest; +import org.apache.ignite.ml.knn.utils.KDTreeSpatialIndexTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.trees package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + ANNClassificationTest.class, + KNNClassificationTest.class, + KNNRegressionTest.class, + ArraySpatialIndexTest.class, + BallTreeSpatialIndexTest.class, + KDTreeSpatialIndexTest.class +}) +public class KNNTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/LabeledDatasetHelper.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/LabeledDatasetHelper.java new file mode 100644 index 0000000000000..dfc0532a5068e --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/LabeledDatasetHelper.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.knn; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.nio.file.Paths; +import org.apache.ignite.ml.structures.LabeledVectorSet; +import org.apache.ignite.ml.structures.preprocessing.LabeledDatasetLoader; + +/** + * Base class for decision trees test. + */ +public class LabeledDatasetHelper { + /** Separator. */ + private static final String SEPARATOR = "\t"; + + /** + * Loads labeled dataset from file with .txt extension. + * + * @param rsrcPath path to dataset. + * @return Null if path is incorrect. + */ + public static LabeledVectorSet loadDatasetFromTxt(String rsrcPath, boolean isFallOnBadData) { + try { + Path path = Paths.get(LabeledDatasetHelper.class.getClassLoader().getResource(rsrcPath).toURI()); + try { + return LabeledDatasetLoader.loadFromTxtFile(path, SEPARATOR, isFallOnBadData); + } + catch (IOException e) { + e.printStackTrace(); + } + } + catch (URISyntaxException e) { + e.printStackTrace(); + return null; + } + return null; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/ArraySpatialIndexTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/ArraySpatialIndexTest.java new file mode 100644 index 0000000000000..aa1bea23012ab --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/ArraySpatialIndexTest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.knn.utils; + +import org.apache.ignite.ml.knn.utils.indices.ArraySpatialIndex; + +/** + * Test for {@link ArraySpatialIndex}. + */ +public class ArraySpatialIndexTest extends SpatialIndexTest { + /** + * Constructs a new instance of array spatial index test. + */ + public ArraySpatialIndexTest() { + super(ArraySpatialIndex::new); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/BallTreeSpatialIndexTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/BallTreeSpatialIndexTest.java new file mode 100644 index 0000000000000..97f2688899228 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/BallTreeSpatialIndexTest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.knn.utils; + +import org.apache.ignite.ml.knn.utils.indices.BallTreeSpatialIndex; + +/** + * Tests for {@link BallTreeSpatialIndex}. + */ +public class BallTreeSpatialIndexTest extends SpatialIndexTest { + /** + * Constructs a new instance of Ball tree spatial index test. + */ + public BallTreeSpatialIndexTest() { + super(BallTreeSpatialIndex::new); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/KDTreeSpatialIndexTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/KDTreeSpatialIndexTest.java new file mode 100644 index 0000000000000..a3284e1d6af2d --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/knn/utils/KDTreeSpatialIndexTest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.knn.utils; + +import org.apache.ignite.ml.knn.utils.indices.KDTreeSpatialIndex; + +/** + * Tests for {@link KDTreeSpatialIndex}. + */ +public class KDTreeSpatialIndexTest extends SpatialIndexTest { + /** + * Constructs a new instance of KD tree spatial index test. + */ + public KDTreeSpatialIndexTest() { + super(KDTreeSpatialIndex::new); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/ExternalizableTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/ExternalizableTest.java new file mode 100644 index 0000000000000..aed737589bc1f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/ExternalizableTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** + * Common test for externalization. + */ +public interface ExternalizableTest { + /** */ + @SuppressWarnings("unchecked") + public default void externalizeTest(T initObj) { + T objRestored = null; + + try { + ByteArrayOutputStream byteArrOutputStream = new ByteArrayOutputStream(); + ObjectOutputStream objOutputStream = new ObjectOutputStream(byteArrOutputStream); + + objOutputStream.writeObject(initObj); + + ByteArrayInputStream byteArrInputStream = new ByteArrayInputStream(byteArrOutputStream.toByteArray()); + ObjectInputStream objInputStream = new ObjectInputStream(byteArrInputStream); + + objRestored = (T)objInputStream.readObject(); + + assertTrue(MathTestConstants.VAL_NOT_EQUALS, initObj.equals(objRestored)); + assertTrue(MathTestConstants.VAL_NOT_EQUALS, Integer.compare(initObj.hashCode(), objRestored.hashCode()) == 0); + } + catch (ClassNotFoundException | IOException e) { + fail(e + " [" + e.getMessage() + "]"); + } + finally { + if (objRestored != null && objRestored instanceof Destroyable) + ((Destroyable)objRestored).destroy(); + } + } + + /** */ + @Test + public void testExternalization(); +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/ExternalizeTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/ExternalizeTest.java new file mode 100644 index 0000000000000..e98d4dd33b285 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/ExternalizeTest.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** + * Common test for externalization. + * TODO: IGNITE-7325 remove this class from all test and change on ExternalizableTest + */ +public abstract class ExternalizeTest { + /** */ + @SuppressWarnings("unchecked") + protected void externalizeTest(T initObj) { + T objRestored = null; + + try { + ByteArrayOutputStream byteArrOutputStream = new ByteArrayOutputStream(); + ObjectOutputStream objOutputStream = new ObjectOutputStream(byteArrOutputStream); + + objOutputStream.writeObject(initObj); + + ByteArrayInputStream byteArrInputStream = new ByteArrayInputStream(byteArrOutputStream.toByteArray()); + ObjectInputStream objInputStream = new ObjectInputStream(byteArrInputStream); + + objRestored = (T)objInputStream.readObject(); + + assertTrue(MathTestConstants.VAL_NOT_EQUALS, initObj.equals(objRestored)); + assertTrue(MathTestConstants.VAL_NOT_EQUALS, Integer.compare(initObj.hashCode(), objRestored.hashCode()) == 0); + } + catch (ClassNotFoundException | IOException e) { + fail(e + " [" + e.getMessage() + "]"); + } + finally { + if (objRestored != null) + objRestored.destroy(); + } + } + + /** */ + @Test + public abstract void externalizeTest(); +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/MathImplMainTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/MathImplMainTestSuite.java new file mode 100644 index 0000000000000..500900ae68da3 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/MathImplMainTestSuite.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math; + +import org.apache.ignite.ml.math.stat.StatsTestSuite; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for local and distributed math tests. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + StatsTestSuite.class, + MathImplLocalTestSuite.class, + TracerTest.class, + BlasTest.class +}) +public class MathImplMainTestSuite { + // No-op. +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/TracerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/TracerTest.java new file mode 100644 index 0000000000000..1b78390d8f6bd --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/TracerTest.java @@ -0,0 +1,248 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math; + +import java.awt.Color; +import java.awt.Desktop; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Optional; +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static java.nio.file.Files.createTempFile; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +/** + * Tests for {@link Tracer}. + */ +public class TracerTest { + /** */ + private static final String DEFAULT_FORMAT = "%.10f"; + + /** */ + private static final double DEFAULT_DELTA = 0.000000001d; + + /** + * Color mapper that maps [0, 1] range into three distinct RGB segments. + */ + private static final Tracer.ColorMapper COLOR_MAPPER = new Tracer.ColorMapper() { + /** {@inheritDoc} */ + @Override public Color apply(Double d) { + if (d <= 0.33) + return Color.RED; + else if (d <= 0.66) + return Color.GREEN; + else + return Color.BLUE; + } + }; + + /** + * @param size Vector size. + */ + private Vector makeRandomVector(int size) { + DenseVector vec = new DenseVector(size); + + vec.assign((idx) -> Math.random()); + + return vec; + } + + /** + * @param rows Amount of rows in matrix. + * @param cols Amount of columns in matrix. + */ + private Matrix makeRandomMatrix(int rows, int cols) { + DenseMatrix mtx = new DenseMatrix(rows, cols); + + // Missing assign(f)? + mtx.map((d) -> Math.random()); + + return mtx; + } + + /** */ + @Test + public void testAsciiVectorTracer() { + Vector vec = makeRandomVector(20); + + Tracer.showAscii(vec); + Tracer.showAscii(vec, "%2f"); + Tracer.showAscii(vec, "%.3g"); + } + + /** */ + @Test + public void testAsciiMatrixTracer() { + Matrix mtx = makeRandomMatrix(10, 10); + + Tracer.showAscii(mtx); + Tracer.showAscii(mtx, "%2f"); + Tracer.showAscii(mtx, "%.3g"); + } + + /** */ + @Test + public void testHtmlVectorTracer() throws IOException { + Vector vec1 = makeRandomVector(1000); + + // Default color mapping. + verifyShowHtml(() -> Tracer.showHtml(vec1)); + + // Custom color mapping. + verifyShowHtml(() -> Tracer.showHtml(vec1, COLOR_MAPPER)); + + // Default color mapping with sorted vector. + verifyShowHtml(() -> Tracer.showHtml(vec1.copy().sort())); + } + + /** */ + @Test + public void testHtmlMatrixTracer() throws IOException { + Matrix mtx1 = makeRandomMatrix(100, 100); + + // Custom color mapping. + verifyShowHtml(() -> Tracer.showHtml(mtx1, COLOR_MAPPER)); + + Matrix mtx2 = new DenseMatrix(100, 100); + + double MAX = (double)(mtx2.rowSize() * mtx2.columnSize()); + + mtx2.assign((x, y) -> (double)(x * y) / MAX); + + verifyShowHtml(() -> Tracer.showHtml(mtx2)); + } + + /** */ + @Test + public void testHtmlVectorTracerWithAsciiFallback() throws IOException { + Vector vec1 = makeRandomVector(1000); + + // Default color mapping. + Tracer.showHtml(vec1, true); + + // Custom color mapping. + Tracer.showHtml(vec1, COLOR_MAPPER, true); + + // Default color mapping with sorted vector. + Tracer.showHtml(vec1.copy().sort(), true); + } + + /** */ + @Test + public void testHtmlMatrixTracerWithAsciiFallback() throws IOException { + Matrix mtx1 = makeRandomMatrix(100, 100); + + // Custom color mapping. + Tracer.showHtml(mtx1, COLOR_MAPPER, true); + + Matrix mtx2 = new DenseMatrix(100, 100); + + double MAX = (double)(mtx2.rowSize() * mtx2.columnSize()); + + mtx2.assign((x, y) -> (double)(x * y) / MAX); + + Tracer.showHtml(mtx2, true); + } + + /** */ + @Test + public void testWriteVectorToCSVFile() throws IOException { + DenseVector vector = new DenseVector(MathTestConstants.STORAGE_SIZE); + + for (int i = 0; i < vector.size(); i++) + vector.set(i, Math.random()); + + Path file = createTempFile("vector", ".csv"); + + Tracer.saveAsCsv(vector, DEFAULT_FORMAT, file.toString()); + + System.out.println("Vector exported: " + file.getFileName()); + + List strings = Files.readAllLines(file); + Optional reduce = strings.stream().reduce((s1, s2) -> s1 + s2); + String[] csvVals = reduce.orElse("").split(","); + + for (int i = 0; i < vector.size(); i++) { + Double csvVal = Double.valueOf(csvVals[i]); + + assertEquals("Unexpected value.", csvVal, vector.get(i), DEFAULT_DELTA); + } + + Files.deleteIfExists(file); + } + + /** */ + @Test + public void testWriteMatrixToCSVFile() throws IOException { + DenseMatrix matrix = new DenseMatrix(MathTestConstants.STORAGE_SIZE, MathTestConstants.STORAGE_SIZE); + + for (int i = 0; i < matrix.rowSize(); i++) + for (int j = 0; j < matrix.columnSize(); j++) + matrix.set(i, j, Math.random()); + + Path file = createTempFile("matrix", ".csv"); + + Tracer.saveAsCsv(matrix, DEFAULT_FORMAT, file.toString()); + + System.out.println("Matrix exported: " + file.getFileName()); + + List strings = Files.readAllLines(file); + Optional reduce = strings.stream().reduce((s1, s2) -> s1 + s2); + String[] csvVals = reduce.orElse("").split(","); + + for (int i = 0; i < matrix.rowSize(); i++) + for (int j = 0; j < matrix.columnSize(); j++) { + Double csvVal = Double.valueOf(csvVals[i * matrix.rowSize() + j]); + + assertEquals("Unexpected value.", csvVal, matrix.get(i, j), DEFAULT_DELTA); + } + + Files.deleteIfExists(file); + } + + /** */ + private void verifyShowHtml(ShowHtml code) throws IOException { + final boolean browseSupported = Desktop.isDesktopSupported() + && Desktop.getDesktop().isSupported(Desktop.Action.BROWSE); + + try { + code.showHtml(); + if (!browseSupported) + fail("Expected exception was not caught: " + UnsupportedOperationException.class.getSimpleName()); + } + catch (UnsupportedOperationException uoe) { + if (browseSupported) + throw uoe; + } + } + + /** */ + @FunctionalInterface private interface ShowHtml { + /** */ + void showHtml() throws IOException; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/VectorUtilsTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/VectorUtilsTest.java new file mode 100644 index 0000000000000..42d7efd0bbdf0 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/VectorUtilsTest.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math; + +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link VectorUtils } + */ +public class VectorUtilsTest { + /** */ + @Test + public void testOf1() { + double[] values = {1.0, 2.0, 3.0}; + Vector vector = VectorUtils.of(values); + + assertEquals(3, vector.size()); + assertEquals(3, vector.nonZeroElements()); + for (int i = 0; i < values.length; i++) + assertEquals(values[i], vector.get(i), 0.001); + } + + /** */ + @Test + public void testOf2() { + Double[] values = {1.0, null, 3.0}; + Vector vector = VectorUtils.of(values); + + assertEquals(3, vector.size()); + assertEquals(2, vector.nonZeroElements()); + for (int i = 0; i < values.length; i++) { + if (values[i] == null) + assertEquals(0.0, vector.get(i), 0.001); + else + assertEquals(values[i], vector.get(i), 0.001); + } + } + + /** */ + @Test(expected = NullPointerException.class) + public void testFails1() { + VectorUtils.of((double[])null); + } + + /** */ + @Test(expected = NullPointerException.class) + public void testFails2() { + VectorUtils.of((Double[])null); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/BrayCurtisDistanceTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/BrayCurtisDistanceTest.java new file mode 100644 index 0000000000000..e798c3cc288cc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/BrayCurtisDistanceTest.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ignite.ml.math.distances; + +import java.util.Arrays; +import java.util.Collection; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertEquals; + +/** + * Evaluate BrayCurtisDistance in multiple test datasets + */ +@RunWith(Parameterized.class) +public class BrayCurtisDistanceTest { + /** Precision. */ + private static final double PRECISION = 0.01; + + /** */ + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList( + new TestData( + new double[] {0, 0, 0}, + new double[] {2, 1, 0}, + 1.0 + ), + new TestData( + new double[] {1, 2, 3}, + new double[] {2, 1, 0}, + 0.55 + ), + new TestData( + new double[] {1, 2, 3}, + new double[] {2, 1, 50}, + 0.83 + ), + new TestData( + new double[] {1, -100, 3}, + new double[] {2, 1, -50}, + 1.04 + ) + ); + } + + /** */ + private final TestData testData; + + /** */ + public BrayCurtisDistanceTest(TestData testData) { + this.testData = testData; + } + + /** */ + @Test + public void testBrayCurtisDistance() { + DistanceMeasure distanceMeasure = new BrayCurtisDistance(); + + assertEquals(testData.expRes, + distanceMeasure.compute(testData.vectorA, testData.vectorB), PRECISION); + assertEquals(testData.expRes, + distanceMeasure.compute(testData.vectorA, testData.vectorB), PRECISION); + } + + /** */ + private static class TestData { + /** */ + public final Vector vectorA; + + /** */ + public final Vector vectorB; + + /** */ + public final double expRes; + + /** */ + private TestData(double[] vectorA, double[] vectorB, double expRes) { + this.vectorA = new DenseVector(vectorA); + this.vectorB = new DenseVector(vectorB); + this.expRes = expRes; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return String.format("d(%s,%s) = %s", + Arrays.toString(vectorA.asArray()), + Arrays.toString(vectorB.asArray()), + expRes + ); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/CosineSimilarityTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/CosineSimilarityTest.java new file mode 100644 index 0000000000000..b83c81c4e8915 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/CosineSimilarityTest.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ignite.ml.math.distances; + +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** Test for {@code CosineSimilarity}. */ +public class CosineSimilarityTest { + /** Precision. */ + private static final double PRECISION = 0.0; + + /** */ + @Test + public void cosineSimilarityDistance() { + double expRes = 0.9449111825230682d; + DenseVector a = new DenseVector(new double[] {1, 2, 3}); + double[] b = {1, 1, 4}; + + DistanceMeasure distanceMeasure = new CosineSimilarity(); + + assertEquals(expRes, distanceMeasure.compute(a, b), PRECISION); + assertEquals(expRes, distanceMeasure.compute(a, new DenseVector(b)), PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/DistanceTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/DistanceTest.java new file mode 100644 index 0000000000000..40949c493b125 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/DistanceTest.java @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.distances; + +import java.util.Arrays; +import java.util.List; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Before; +import org.junit.Test; + +import static java.util.Arrays.asList; +import static java.util.stream.Collectors.joining; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** */ +public class DistanceTest { + /** Precision. */ + private static final double PRECISION = 0.0; + + /** All distace measures for distace properties tests. */ + private static final List DISTANCE_MEASURES = asList( + new ChebyshevDistance(), + new EuclideanDistance(), + new HammingDistance(), + new ManhattanDistance(), + new BrayCurtisDistance(), + new CanberraDistance(), + new JensenShannonDistance(), + new WeightedMinkowskiDistance(4, new double[]{1, 1, 1}), + new MinkowskiDistance(Math.random())); + + /** */ + private Vector v1; + + /** */ + private Vector v2; + + /** */ + private double[] data2; + + /** */ + @Before + public void setup() { + data2 = new double[] {2.0, 1.0, 0.0}; + v1 = new DenseVector(new double[] {0.0, 0.0, 0.0}); + v2 = new DenseVector(data2); + } + + /** */ + @Test + public void distanceFromPointToItselfIsZero() { + DISTANCE_MEASURES.forEach(distance -> { + Vector vector = randomVector(3); + String errorMessage = errorMessage(distance, vector, vector); + + assertEquals(errorMessage, 0d, distance.compute(vector, vector), PRECISION); + }); + } + + /** */ + @Test + public void distanceFromAToBIsTheSameAsDistanceFromBToA() { + DISTANCE_MEASURES.forEach(distance -> { + Vector vector1 = randomVector(3); + Vector vector2 = randomVector(3); + String errorMessage = errorMessage(distance, vector1, vector2); + + assertEquals(errorMessage, + distance.compute(vector1, vector2), distance.compute(vector2, vector1), PRECISION); + }); + } + + /** */ + @Test + public void distanceBetweenTwoDistinctPointsIsPositive() { + DISTANCE_MEASURES.forEach(distance -> { + Vector vector1 = randomVector(3); + Vector vector2 = randomVector(3); + String errorMessage = errorMessage(distance, vector1, vector2); + + assertTrue(errorMessage, distance.compute(vector1, vector2) > 0); + }); + } + + /** */ + @Test + public void euclideanDistance() { + double expRes = Math.pow(5, 0.5); + + DistanceMeasure distanceMeasure = new EuclideanDistance(); + + assertEquals(expRes, distanceMeasure.compute(v1, v2), PRECISION); + assertEquals(expRes, new EuclideanDistance().compute(v1, data2), PRECISION); + } + + /** */ + @Test + public void manhattanDistance() { + double expRes = 3; + + DistanceMeasure distanceMeasure = new ManhattanDistance(); + + assertEquals(expRes, distanceMeasure.compute(v1, data2), PRECISION); + assertEquals(expRes, distanceMeasure.compute(v1, v2), PRECISION); + } + + /** */ + @Test + public void hammingDistance() { + double expRes = 2; + + DistanceMeasure distanceMeasure = new HammingDistance(); + + assertEquals(expRes, distanceMeasure.compute(v1, data2), PRECISION); + assertEquals(expRes, distanceMeasure.compute(v1, v2), PRECISION); + } + + /** */ + @Test + public void chebyshevDistance() { + double expRes = 2d; + + DistanceMeasure distanceMeasure = new ChebyshevDistance(); + + assertEquals(expRes, distanceMeasure.compute(v1, data2), PRECISION); + assertEquals(expRes, distanceMeasure.compute(v1, v2), PRECISION); + } + + /** */ + @Test + public void minkowskiDistance() { + double expRes = Math.pow(5, 0.5); + + DistanceMeasure distanceMeasure = new MinkowskiDistance(2d); + + assertEquals(expRes, distanceMeasure.compute(v1, data2), PRECISION); + assertEquals(expRes, distanceMeasure.compute(v1, v2), PRECISION); + } + + /** */ + @Test + public void brayCurtisDistance() { + double expRes = 1.0; + + DistanceMeasure distanceMeasure = new BrayCurtisDistance(); + + assertEquals(expRes, distanceMeasure.compute(v1, data2), PRECISION); + assertEquals(expRes, distanceMeasure.compute(v1, v2), PRECISION); + } + + /** */ + @Test + public void canberraDistance() { + double expRes = 2.0; + + DistanceMeasure distanceMeasure = new CanberraDistance(); + + assertEquals(expRes, distanceMeasure.compute(v1, data2), PRECISION); + assertEquals(expRes, distanceMeasure.compute(v1, v2), PRECISION); + } + + /** */ + @Test + public void jensenShannonDistance() { + double precistion = 0.01; + double expRes = 0.83; + double[] pData = new double[] {1.0, 0.0, 0.0}; + Vector pV1 = new DenseVector(new double[] {0.0, 1.0, 0.0}); + Vector pV2 = new DenseVector(pData); + + DistanceMeasure distanceMeasure = new JensenShannonDistance(); + + assertEquals(expRes, distanceMeasure.compute(pV1, pData), precistion); + assertEquals(expRes, distanceMeasure.compute(pV1, pV2), precistion); + } + + /** */ + @Test + public void weightedMinkowskiDistance() { + double precistion = 0.01; + int p = 2; + double expRes = 5.0; + double[] weights = new double[]{2, 3, 4}; + + DistanceMeasure distanceMeasure = new WeightedMinkowskiDistance(p, weights); + + assertEquals(expRes, distanceMeasure.compute(v1, data2), precistion); + assertEquals(expRes, distanceMeasure.compute(v1, v2), precistion); + } + + /** Returns a random vector */ + private static Vector randomVector(int length) { + double[] vec = new double[length]; + + for (int i = 0; i < vec.length; i++) { + vec[i] = Math.random(); + } + return new DenseVector(vec); + } + + /** Creates an assertion error message from a distsnce measure and params. */ + private static String errorMessage(DistanceMeasure measure, Vector param1, Vector param2) { + return String.format("%s(%s, %s)", measure.getClass().getSimpleName(), + vectorToString(param1), vectorToString(param2)); + } + + /** Converts vector to string. */ + private static String vectorToString(Vector vector) { + return "[" + Arrays.stream(vector.asArray()).boxed() + .map(Object::toString) + .collect(joining(",")) + "]"; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/DistancesTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/DistancesTestSuite.java new file mode 100644 index 0000000000000..7ac7787f373d1 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/DistancesTestSuite.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.distances; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + JensenShannonDistanceTest.class, + JensenShannonDistanceTest.class, + CanberraDistanceTest.class, + BrayCurtisDistanceTest.class, + WeightedMinkowskiDistanceTest.class +}) +public class DistancesTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/JaccardIndexTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/JaccardIndexTest.java new file mode 100644 index 0000000000000..7d7f75959bc1b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/JaccardIndexTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ignite.ml.math.distances; + +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** Test for {@code JaccardIndex}. */ +public class JaccardIndexTest { + /** Precision. */ + private static final double PRECISION = 0.0; + + /** */ + @Test + public void jaccardIndex() { + double expRes = 0.2; + double[] data2 = new double[] {2.0, 1.0, 0.0}; + Vector v1 = new DenseVector(new double[] {0.0, 0.0, 0.0}); + Vector v2 = new DenseVector(data2); + + DistanceMeasure distanceMeasure = new JaccardIndex(); + + assertEquals(expRes, distanceMeasure.compute(v1, data2), PRECISION); + assertEquals(expRes, distanceMeasure.compute(v1, v2), PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/JensenShannonDistanceTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/JensenShannonDistanceTest.java new file mode 100644 index 0000000000000..1c94563575248 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/JensenShannonDistanceTest.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ignite.ml.math.distances; + +import java.util.Arrays; +import java.util.Collection; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertEquals; + +/** + * Evaluate JensenShannonDistance in multiple test datasets + */ +@RunWith(Parameterized.class) +public class JensenShannonDistanceTest { + /** Precision. */ + private static final double PRECISION = 0.01; + + /** */ + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList( + new TestData( + new double[] {1.0, 0.0, 0.0}, + new double[] {0.0, 1.0, 0.0}, + 2.0, + 1.0 + ), + new TestData( + new double[] {1.0, 0.0}, + new double[] {0.5, 0.5}, + Math.E, + 0.46 + ), + new TestData( + new double[] {1.0, 0.0, 0.0}, + new double[] {1.0, 0.5, 0.0}, + Math.E, + 0.36 + ) + ); + } + + /** */ + private final TestData testData; + + /** */ + public JensenShannonDistanceTest(TestData testData) { + this.testData = testData; + } + + /** */ + @Test + public void testJensenShannonDistance() { + DistanceMeasure distanceMeasure = new JensenShannonDistance(testData.base); + + assertEquals(testData.expRes, + distanceMeasure.compute(testData.vectorA, testData.vectorB), PRECISION); + assertEquals(testData.expRes, + distanceMeasure.compute(testData.vectorA, testData.vectorB), PRECISION); + } + + /** */ + private static class TestData { + /** */ + public final Vector vectorA; + + /** */ + public final Vector vectorB; + + /** */ + public final Double expRes; + + /** */ + public final Double base; + + /** */ + private TestData(double[] vectorA, double[] vectorB, Double base, Double expRes) { + this.vectorA = new DenseVector(vectorA); + this.vectorB = new DenseVector(vectorB); + this.base = base; + this.expRes = expRes; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return String.format("d(%s,%s;%s) = %s", + Arrays.toString(vectorA.asArray()), + Arrays.toString(vectorB.asArray()), + base, + expRes + ); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/WeightedMinkowskiDistanceTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/WeightedMinkowskiDistanceTest.java new file mode 100644 index 0000000000000..256141121dbb8 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/distances/WeightedMinkowskiDistanceTest.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ignite.ml.math.distances; + +import java.util.Arrays; +import java.util.Collection; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertEquals; + +/** + * Evaluate WeightedMinkowski in multiple test datasets + */ +@RunWith(Parameterized.class) +public class WeightedMinkowskiDistanceTest { + /** Precision. */ + private static final double PRECISION = 0.01; + + /** */ + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList( + new TestData( + new double[] {1.0, 0.0, 0.0}, + new double[] {0.0, 1.0, 0.0}, + 1, + new double[] {2.0, 3.0, 4.0}, + 5.0 + ), + new TestData( + new double[] {1.0, 0.0, 0.0}, + new double[] {0.0, 1.0, 0.0}, + 2, + new double[] {2.0, 3.0, 4.0}, + 3.60 + ), + new TestData( + new double[] {1.0, 0.0, 0.0}, + new double[] {0.0, 1.0, 0.0}, + 3, + new double[] {2.0, 3.0, 4.0}, + 3.27 + ) + ); + } + + /** */ + private final TestData testData; + + /** */ + public WeightedMinkowskiDistanceTest(TestData testData) { + this.testData = testData; + } + + /** */ + @Test + public void testWeightedMinkowski() { + DistanceMeasure distanceMeasure = new WeightedMinkowskiDistance(testData.p, testData.weights); + + assertEquals(testData.expRes, + distanceMeasure.compute(testData.vectorA, testData.vectorB), PRECISION); + assertEquals(testData.expRes, + distanceMeasure.compute(testData.vectorA, testData.vectorB), PRECISION); + } + + /** */ + private static class TestData { + /** */ + public final Vector vectorA; + + /** */ + public final Vector vectorB; + + /** */ + public final Integer p; + + /** */ + public final double[] weights; + + /** */ + public final Double expRes; + + /** */ + private TestData(double[] vectorA, double[] vectorB, Integer p, double[] weights, double expRes) { + this.vectorA = new DenseVector(vectorA); + this.vectorB = new DenseVector(vectorB); + this.p = p; + this.weights = weights; + this.expRes = expRes; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return String.format("d(%s,%s;%s,%s) = %s", + Arrays.toString(vectorA.asArray()), + Arrays.toString(vectorB.asArray()), + p, + Arrays.toString(weights), + expRes + ); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/isolve/lsqr/LSQROnHeapTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/isolve/lsqr/LSQROnHeapTest.java new file mode 100644 index 0000000000000..3fe1104205e51 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/isolve/lsqr/LSQROnHeapTest.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.isolve.lsqr; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.dataset.primitive.builder.data.SimpleLabeledDatasetDataBuilder; +import org.apache.ignite.ml.math.functions.IgniteFunction; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.preprocessing.Preprocessor; +import org.apache.ignite.ml.preprocessing.developer.PatchedPreprocessor; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link LSQROnHeap}. + */ +public class LSQROnHeapTest extends TrainerTest { + /** Tests solving simple linear system. */ + @Test + public void testSolveLinearSystem() { + Map data = new HashMap<>(); + data.put(0, VectorUtils.of(3.0, 2.0, -1.0, 1.0)); + data.put(1, VectorUtils.of(2.0, -2.0, 4.0, -2.0)); + data.put(2, VectorUtils.of(-1.0, 0.5, -1.0, 0.0)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + Vectorizer vectorizer = new DummyVectorizer() + .labeled(Vectorizer.LabelCoordinate.LAST); + + IgniteFunction, LabeledVector> func = + lv -> new LabeledVector<>(lv.features(), new double[]{lv.label()}); + + Preprocessor prerocessor = new PatchedPreprocessor<>(func, vectorizer); + + LSQROnHeap lsqr = new LSQROnHeap<>( + datasetBuilder, + TestUtils.testEnvBuilder(), + new SimpleLabeledDatasetDataBuilder<>(prerocessor), + TestUtils.testEnvBuilder().buildForTrainer() + ); + + LSQRResult res = lsqr.solve(0, 1e-12, 1e-12, 1e8, -1, false, null); + + assertEquals(3, res.getIterations()); + assertEquals(1, res.getIsstop()); + assertEquals(7.240617907140957E-14, res.getR1norm(), 0.0001); + assertEquals(7.240617907140957E-14, res.getR2norm(), 0.0001); + assertEquals(6.344288770224759, res.getAnorm(), 0.0001); + assertEquals(40.540617492419464, res.getAcond(), 0.0001); + assertEquals(3.4072322214704627E-13, res.getArnorm(), 0.0001); + assertEquals(3.000000000000001, res.getXnorm(), 0.0001); + assertArrayEquals(new double[]{0.0, 0.0, 0.0}, res.getVar(), 1e-6); + assertArrayEquals(new double[]{1, -2, -2}, res.getX(), 1e-6); + assertTrue(!res.toString().isEmpty()); + } + + /** Tests solving simple linear system with specified x0. */ + @Test + public void testSolveLinearSystemWithX0() { + Map data = new HashMap<>(); + data.put(0, VectorUtils.of(3.0, 2.0, -1.0, 1.0)); + data.put(1, VectorUtils.of(2.0, -2.0, 4.0, -2.0)); + data.put(2, VectorUtils.of(-1.0, 0.5, -1.0, 0.0)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + Vectorizer vectorizer = new DummyVectorizer() + .labeled(Vectorizer.LabelCoordinate.LAST); + + IgniteFunction, LabeledVector> func = + lv -> new LabeledVector<>(lv.features(), new double[]{lv.label()}); + + Preprocessor prerocessor = new PatchedPreprocessor<>(func, vectorizer); + + LSQROnHeap lsqr = new LSQROnHeap<>( + datasetBuilder, + TestUtils.testEnvBuilder(), + new SimpleLabeledDatasetDataBuilder<>(prerocessor), + TestUtils.testEnvBuilder().buildForTrainer() + ); + + LSQRResult res = lsqr.solve(0, 1e-12, 1e-12, 1e8, -1, false, + new double[] {999, 999, 999}); + + assertEquals(3, res.getIterations()); + + assertArrayEquals(new double[]{1, -2, -2}, res.getX(), 1e-6); + } + + /** Tests solving least squares problem. */ + @Test + public void testSolveLeastSquares() throws Exception { + Map data = new HashMap<>(); + data.put(0, VectorUtils.of(-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107)); + data.put(1, VectorUtils.of(-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867)); + data.put(2, VectorUtils.of(0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728)); + data.put(3, VectorUtils.of(-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991)); + data.put(4, VectorUtils.of(0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611)); + data.put(5, VectorUtils.of(0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197)); + data.put(6, VectorUtils.of(-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012)); + data.put(7, VectorUtils.of(-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889)); + data.put(8, VectorUtils.of(0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949)); + data.put(9, VectorUtils.of(-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, 1); + + Vectorizer vectorizer = new DummyVectorizer() + .labeled(Vectorizer.LabelCoordinate.LAST); + + IgniteFunction, LabeledVector> func = + lv -> new LabeledVector<>(lv.features(), new double[]{lv.label()}); + + Preprocessor prerocessor = new PatchedPreprocessor<>(func, vectorizer); + + try (LSQROnHeap lsqr = new LSQROnHeap<>( + datasetBuilder, + TestUtils.testEnvBuilder(), + new SimpleLabeledDatasetDataBuilder<>(prerocessor), TestUtils.testEnvBuilder().buildForTrainer())) { + LSQRResult res = lsqr.solve(0, 1e-12, 1e-12, 1e8, -1, false, null); + + assertEquals(8, res.getIterations()); + + assertArrayEquals(new double[]{72.26948107, 15.95144674, 24.07403921, 66.73038781}, res.getX(), 1e-6); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/MathTestConstants.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/MathTestConstants.java new file mode 100644 index 0000000000000..57da5a7e41be4 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/MathTestConstants.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives; + +/** + * Collect constants for org.apache.ignite.math tests + */ +public interface MathTestConstants { + /** */ + public double SECOND_ARG = 1d; + + /** + * We assume that we will check calculation precision in other tests. + */ + public double EXP_DELTA = 0.1d; + + /** */ + public String UNEXPECTED_VAL = "Unexpected value."; + + /** */ + public String NULL_GUID = "Null GUID."; + + /** */ + public String UNEXPECTED_GUID_VAL = "Unexpected GUID value."; + + /** */ + public String EMPTY_GUID = "Empty GUID."; + + /** */ + public String VALUES_SHOULD_BE_NOT_EQUALS = "Values should be not equals."; + + /** */ + public String NULL_VAL = "Null value."; + + /** */ + public String NULL_VALUES = "Null values."; + + /** */ + public String NOT_NULL_VAL = "Not null value."; + + /** */ + public double TEST_VAL = 1d; + + /** */ + public String VAL_NOT_EQUALS = "Values not equals."; + + /** */ + public String NO_NEXT_ELEMENT = "No next element."; + + /** */ + public int STORAGE_SIZE = 100; + + /** */ + public String WRONG_ATTRIBUTE_VAL = "Wrong attribute value."; + + /** */ + public String NULL_DATA_ELEMENT = "Null data element."; + + /** */ + public String WRONG_DATA_ELEMENT = "Wrong data element."; + + /** */ + public double NIL_DELTA = 0d; + + /** */ + public String NULL_DATA_STORAGE = "Null data storage."; + + /** */ + public String WRONG_DATA_SIZE = "Wrong data size."; + + /** */ + public String UNEXPECTED_DATA_VAL = "Unexpected data value."; +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/DenseMatrixConstructorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/DenseMatrixConstructorTest.java new file mode 100644 index 0000000000000..fc4ca166fa351 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/DenseMatrixConstructorTest.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.matrix; + +import java.util.function.Supplier; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; + +/** */ +public class DenseMatrixConstructorTest { + /** */ + @Test + public void invalidArgsTest() { + verifyAssertionError(() -> new DenseMatrix(0, 1), "invalid row parameter"); + + verifyAssertionError(() -> new DenseMatrix(1, 0), "invalid col parameter"); + + //noinspection ConstantConditions + verifyAssertionError(() -> new DenseMatrix(null), "null matrix parameter"); + + verifyAssertionError(() -> new DenseMatrix(new double[][] {null, new double[1]}), + "null row in matrix"); + } + + /** */ + @Test + public void basicTest() { + assertEquals("Expected number of rows, int parameters.", 1, + new DenseMatrix(1, 2).rowSize()); + + assertEquals("Expected number of rows, double[][] parameter.", 1, + new DenseMatrix(new double[][] {new double[2]}).rowSize()); + + assertEquals("Expected number of cols, int parameters.", 1, + new DenseMatrix(2, 1).columnSize()); + + assertEquals("Expected number of cols, double[][] parameter.", 1, + new DenseMatrix(new double[][] {new double[1], new double[1]}).columnSize()); + } + + /** */ + static void verifyAssertionError(Supplier ctor, String desc) { + try { + assertNotNull("Unexpected null matrix in " + desc, ctor.get()); + } + catch (AssertionError ae) { + return; + } + + fail("Expected error not caught in " + desc); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/LUDecompositionTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/LUDecompositionTest.java new file mode 100644 index 0000000000000..822e5f825de77 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/LUDecompositionTest.java @@ -0,0 +1,254 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.matrix; + +import org.apache.ignite.ml.math.exceptions.math.CardinalityException; +import org.apache.ignite.ml.math.exceptions.math.SingularMatrixException; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.util.MatrixUtil; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link LUDecomposition}. + */ +public class LUDecompositionTest { + /** */ + private Matrix testL; + + /** */ + private Matrix testU; + + /** */ + private Matrix testP; + + /** */ + private Matrix testMatrix; + + /** */ + private int[] rawPivot; + + /** */ + @Before + public void setUp() { + double[][] rawMatrix = new double[][] { + {2.0d, 1.0d, 1.0d, 0.0d}, + {4.0d, 3.0d, 3.0d, 1.0d}, + {8.0d, 7.0d, 9.0d, 5.0d}, + {6.0d, 7.0d, 9.0d, 8.0d}}; + double[][] rawL = { + {1.0d, 0.0d, 0.0d, 0.0d}, + {3.0d / 4.0d, 1.0d, 0.0d, 0.0d}, + {1.0d / 2.0d, -2.0d / 7.0d, 1.0d, 0.0d}, + {1.0d / 4.0d, -3.0d / 7.0d, 1.0d / 3.0d, 1.0d}}; + double[][] rawU = { + {8.0d, 7.0d, 9.0d, 5.0d}, + {0.0d, 7.0d / 4.0d, 9.0d / 4.0d, 17.0d / 4.0d}, + {0.0d, 0.0d, -6.0d / 7.0d, -2.0d / 7.0d}, + {0.0d, 0.0d, 0.0d, 2.0d / 3.0d}}; + double[][] rawP = new double[][] { + {0, 0, 1.0d, 0}, + {0, 0, 0, 1.0d}, + {0, 1.0d, 0, 0}, + {1.0d, 0, 0, 0}}; + + rawPivot = new int[] {3, 4, 2, 1}; + + testMatrix = new DenseMatrix(rawMatrix); + testL = new DenseMatrix(rawL); + testU = new DenseMatrix(rawU); + testP = new DenseMatrix(rawP); + } + + /** */ + @Test + public void getL() throws Exception { + Matrix luDecompositionL = new LUDecomposition(testMatrix).getL(); + + assertEquals("Unexpected row size.", testL.rowSize(), luDecompositionL.rowSize()); + assertEquals("Unexpected column size.", testL.columnSize(), luDecompositionL.columnSize()); + + for (int i = 0; i < testL.rowSize(); i++) + for (int j = 0; j < testL.columnSize(); j++) + assertEquals("Unexpected value at (" + i + "," + j + ").", + testL.getX(i, j), luDecompositionL.getX(i, j), 0.0000001d); + + luDecompositionL.destroy(); + } + + /** */ + @Test + public void getU() throws Exception { + Matrix luDecompositionU = new LUDecomposition(testMatrix).getU(); + + assertEquals("Unexpected row size.", testU.rowSize(), luDecompositionU.rowSize()); + assertEquals("Unexpected column size.", testU.columnSize(), luDecompositionU.columnSize()); + + for (int i = 0; i < testU.rowSize(); i++) + for (int j = 0; j < testU.columnSize(); j++) + assertEquals("Unexpected value at (" + i + "," + j + ").", + testU.getX(i, j), luDecompositionU.getX(i, j), 0.0000001d); + + luDecompositionU.destroy(); + } + + /** */ + @Test + public void getP() throws Exception { + Matrix luDecompositionP = new LUDecomposition(testMatrix).getP(); + + assertEquals("Unexpected row size.", testP.rowSize(), luDecompositionP.rowSize()); + assertEquals("Unexpected column size.", testP.columnSize(), luDecompositionP.columnSize()); + + for (int i = 0; i < testP.rowSize(); i++) + for (int j = 0; j < testP.columnSize(); j++) + assertEquals("Unexpected value at (" + i + "," + j + ").", + testP.getX(i, j), luDecompositionP.getX(i, j), 0.0000001d); + + luDecompositionP.destroy(); + } + + /** */ + @Test + public void getPivot() throws Exception { + Vector pivot = new LUDecomposition(testMatrix).getPivot(); + + assertEquals("Unexpected pivot size.", rawPivot.length, pivot.size()); + + for (int i = 0; i < testU.rowSize(); i++) + assertEquals("Unexpected value at " + i, rawPivot[i], (int)pivot.get(i) + 1); + } + + /** + * Test for {@link MatrixUtil} features (more specifically, we test matrix which does not have a native like/copy + * methods support). + */ + @Test + public void matrixUtilTest() { + LUDecomposition dec = new LUDecomposition(testMatrix); + Matrix luDecompositionL = dec.getL(); + + assertEquals("Unexpected L row size.", testL.rowSize(), luDecompositionL.rowSize()); + assertEquals("Unexpected L column size.", testL.columnSize(), luDecompositionL.columnSize()); + + for (int i = 0; i < testL.rowSize(); i++) + for (int j = 0; j < testL.columnSize(); j++) + assertEquals("Unexpected L value at (" + i + "," + j + ").", + testL.getX(i, j), luDecompositionL.getX(i, j), 0.0000001d); + + Matrix luDecompositionU = dec.getU(); + + assertEquals("Unexpected U row size.", testU.rowSize(), luDecompositionU.rowSize()); + assertEquals("Unexpected U column size.", testU.columnSize(), luDecompositionU.columnSize()); + + for (int i = 0; i < testU.rowSize(); i++) + for (int j = 0; j < testU.columnSize(); j++) + assertEquals("Unexpected U value at (" + i + "," + j + ").", + testU.getX(i, j), luDecompositionU.getX(i, j), 0.0000001d); + + Matrix luDecompositionP = dec.getP(); + + assertEquals("Unexpected P row size.", testP.rowSize(), luDecompositionP.rowSize()); + assertEquals("Unexpected P column size.", testP.columnSize(), luDecompositionP.columnSize()); + + for (int i = 0; i < testP.rowSize(); i++) + for (int j = 0; j < testP.columnSize(); j++) + assertEquals("Unexpected P value at (" + i + "," + j + ").", + testP.getX(i, j), luDecompositionP.getX(i, j), 0.0000001d); + + dec.close(); + } + + /** */ + @Test + public void singularDeterminant() throws Exception { + assertEquals("Unexpected determinant for singular matrix decomposition.", + 0d, new LUDecomposition(new DenseMatrix(2, 2)).determinant(), 0d); + } + + /** */ + @Test(expected = CardinalityException.class) + public void solveVecWrongSize() throws Exception { + new LUDecomposition(testMatrix).solve(new DenseVector(testMatrix.rowSize() + 1)); + } + + /** */ + @Test(expected = SingularMatrixException.class) + public void solveVecSingularMatrix() throws Exception { + new LUDecomposition(new DenseMatrix(testMatrix.rowSize(), testMatrix.rowSize())) + .solve(new DenseVector(testMatrix.rowSize())); + } + + /** */ + @Test + public void solveVec() throws Exception { + Vector sol = new LUDecomposition(testMatrix) + .solve(new DenseVector(testMatrix.rowSize())); + + assertEquals("Wrong solution vector size.", testMatrix.rowSize(), sol.size()); + + for (int i = 0; i < sol.size(); i++) + assertEquals("Unexpected value at index " + i, 0d, sol.getX(i), 0.0000001d); + } + + /** */ + @Test(expected = CardinalityException.class) + public void solveMtxWrongSize() throws Exception { + new LUDecomposition(testMatrix).solve( + new DenseMatrix(testMatrix.rowSize() + 1, testMatrix.rowSize())); + } + + /** */ + @Test(expected = SingularMatrixException.class) + public void solveMtxSingularMatrix() throws Exception { + new LUDecomposition(new DenseMatrix(testMatrix.rowSize(), testMatrix.rowSize())) + .solve(new DenseMatrix(testMatrix.rowSize(), testMatrix.rowSize())); + } + + /** */ + @Test + public void solveMtx() throws Exception { + Matrix sol = new LUDecomposition(testMatrix) + .solve(new DenseMatrix(testMatrix.rowSize(), testMatrix.rowSize())); + + assertEquals("Wrong solution matrix row size.", testMatrix.rowSize(), sol.rowSize()); + + assertEquals("Wrong solution matrix column size.", testMatrix.rowSize(), sol.columnSize()); + + for (int row = 0; row < sol.rowSize(); row++) + for (int col = 0; col < sol.columnSize(); col++) + assertEquals("Unexpected P value at (" + row + "," + col + ").", + 0d, sol.getX(row, col), 0.0000001d); + } + + /** */ + @Test(expected = AssertionError.class) + public void nullMatrixTest() { + new LUDecomposition(null); + } + + /** */ + @Test(expected = CardinalityException.class) + public void nonSquareMatrixTest() { + new LUDecomposition(new DenseMatrix(2, 3)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixArrayStorageTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixArrayStorageTest.java new file mode 100644 index 0000000000000..99e29caf4fd32 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixArrayStorageTest.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.matrix; + +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.apache.ignite.ml.math.primitives.matrix.storage.DenseMatrixStorage; +import org.junit.Test; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** + * Unit tests for {@link DenseMatrixStorage}. + */ +public class MatrixArrayStorageTest extends MatrixBaseStorageTest { + /** {@inheritDoc} */ + @Override public void setUp() { + storage = new DenseMatrixStorage(MathTestConstants.STORAGE_SIZE, MathTestConstants.STORAGE_SIZE); + } + + /** */ + @Test + public void isDense() throws Exception { + assertTrue(MathTestConstants.UNEXPECTED_VAL, storage.isDense()); + } + + /** */ + @Test + public void isArrayBased() throws Exception { + assertTrue(MathTestConstants.UNEXPECTED_VAL, storage.isArrayBased()); + } + + /** */ + @Test + public void data() throws Exception { + double[] data = storage.data(); + assertNotNull(MathTestConstants.NULL_VAL, data); + assertTrue(MathTestConstants.UNEXPECTED_VAL, data.length == MathTestConstants.STORAGE_SIZE * + MathTestConstants.STORAGE_SIZE); + } + +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixAttributeTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixAttributeTest.java new file mode 100644 index 0000000000000..247a024e8095e --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixAttributeTest.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.matrix; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.SparseMatrix; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Attribute tests for matrices. + */ +public class MatrixAttributeTest { + /** */ + private final List attrCfgs = Arrays.asList( + new AttrCfg("isDense", Matrix::isDense, + DenseMatrix.class), + new AttrCfg("isArrayBased", Matrix::isArrayBased, DenseMatrix.class) + ); + + /** */ + private final List specFixture = Arrays.asList( + new Specification(new DenseMatrix(1, 1)), + new Specification(new SparseMatrix(1, 1)) + ); + + /** */ + @Test + public void isDenseTest() { + assertAttribute("isDense"); + } + + /** */ + @Test + public void isArrayBasedTest() { + assertAttribute("isArrayBased"); + } + + /** */ + private void assertAttribute(String name) { + final MatrixAttributeTest.AttrCfg attr = attrCfg(name); + + for (MatrixAttributeTest.Specification spec : specFixture) + spec.verify(attr); + } + + /** */ + private MatrixAttributeTest.AttrCfg attrCfg(String name) { + for (MatrixAttributeTest.AttrCfg attr : attrCfgs) + if (attr.name.equals(name)) + return attr; + + throw new IllegalArgumentException("Undefined attribute " + name); + } + + /** See http://en.wikipedia.org/wiki/Specification_pattern */ + private static class Specification { + /** */ + private final Matrix m; + + /** */ + private final Class underlyingType; + + /** */ + private final List attrsFromUnderlying; + + /** */ + final String desc; + + /** */ + Specification(Matrix m, Class underlyingType, String... attrsFromUnderlying) { + this.m = m; + this.underlyingType = underlyingType; + this.attrsFromUnderlying = Arrays.asList(attrsFromUnderlying); + final Class clazz = m.getClass(); + desc = clazz.getSimpleName() + (clazz.equals(underlyingType) + ? "" : " (underlying type " + underlyingType.getSimpleName() + ")"); + } + + /** */ + Specification(Matrix m) { + this(m, m.getClass()); + } + + /** */ + void verify(MatrixAttributeTest.AttrCfg attr) { + final boolean obtained = attr.obtain.apply(m); + + final Class typeToCheck + = attrsFromUnderlying.contains(attr.name) ? underlyingType : m.getClass(); + + final boolean exp = attr.trueInTypes.contains(typeToCheck); + + assertEquals("Unexpected " + attr.name + " value for " + desc, exp, obtained); + } + } + + /** */ + private static class AttrCfg { + /** */ + final String name; + + /** */ + final Function obtain; + + /** */ + final List trueInTypes; + + /** */ + AttrCfg(String name, Function obtain, Class... trueInTypes) { + this.name = name; + this.obtain = obtain; + this.trueInTypes = Arrays.asList(trueInTypes); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixBaseStorageTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixBaseStorageTest.java new file mode 100644 index 0000000000000..983ea93131bcc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixBaseStorageTest.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.matrix; + +import org.apache.ignite.ml.math.ExternalizeTest; +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Abstract class with base tests for each matrix storage. + */ +public abstract class MatrixBaseStorageTest extends ExternalizeTest { + /** */ + protected T storage; + + /** */ + @Before + public abstract void setUp(); + + /** */ + @After + public void tearDown() throws Exception { + storage.destroy(); + } + + /** */ + @Test + public void getSet() throws Exception { + int rows = MathTestConstants.STORAGE_SIZE; + int cols = MathTestConstants.STORAGE_SIZE; + + for (int i = 0; i < rows; i++) { + for (int j = 0; j < cols; j++) { + double data = Math.random(); + + storage.set(i, j, data); + + Assert.assertEquals(MathTestConstants.VAL_NOT_EQUALS, storage.get(i, j), data, MathTestConstants.NIL_DELTA); + } + } + } + + /** */ + @Test + public void columnSize() throws Exception { + assertEquals(MathTestConstants.VAL_NOT_EQUALS, storage.columnSize(), MathTestConstants.STORAGE_SIZE); + } + + /** */ + @Test + public void rowSize() throws Exception { + assertEquals(MathTestConstants.VAL_NOT_EQUALS, storage.rowSize(), MathTestConstants.STORAGE_SIZE); + } + + /** */ + @Override public void externalizeTest() { + fillMatrix(); + externalizeTest(storage); + } + + /** */ + protected void fillMatrix() { + for (int i = 0; i < storage.rowSize(); i++) { + for (int j = 0; j < storage.columnSize(); j++) + storage.set(i, j, Math.random()); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixStorageImplementationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixStorageImplementationTest.java new file mode 100644 index 0000000000000..91fef7f8e42aa --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixStorageImplementationTest.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.matrix; + +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; +import org.apache.ignite.ml.math.ExternalizeTest; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; + +/** + * Unit tests for {@link MatrixStorage} implementations. + * + * TODO: IGNITE-5723, add attribute tests. + */ +public class MatrixStorageImplementationTest extends ExternalizeTest { + /** + * The columnSize() and the rowSize() test. + */ + @Test + public void sizeTest() { + final AtomicReference expRowSize = new AtomicReference<>(0); + final AtomicReference expColSize = new AtomicReference<>(0); + + consumeSampleStorages( + (x, y) -> { + expRowSize.set(x); + expColSize.set(y); + }, + (ms, desc) -> assertTrue( + "Expected size for " + desc, + expColSize.get().equals(ms.columnSize()) && expRowSize.get().equals(ms.rowSize()) + ) + ); + } + + /** */ + @Test + public void getSetTest() { + consumeSampleStorages(null, (ms, desc) -> { + for (int i = 0; i < ms.rowSize(); i++) { + for (int j = 0; j < ms.columnSize(); j++) { + double random = Math.random(); + ms.set(i, j, random); + assertTrue("Unexpected value for " + desc + " x:" + i + ", y:" + j, Double.compare(random, ms.get(i, j)) == 0); + } + } + }); + } + + /** */ + @Override public void externalizeTest() { + consumeSampleStorages(null, (ms, desc) -> externalizeTest(ms)); + } + + /** */ + private void consumeSampleStorages(BiConsumer paramsConsumer, + BiConsumer consumer) { + new MatrixStorageFixtures().consumeSampleStorages(paramsConsumer, consumer); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixViewConstructorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixViewConstructorTest.java new file mode 100644 index 0000000000000..6633f746fc3f2 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/MatrixViewConstructorTest.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.matrix; + +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.ViewMatrix; +import org.apache.ignite.ml.math.primitives.matrix.storage.ViewMatrixStorage; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** */ +public class MatrixViewConstructorTest { + /** */ + @Test + public void invalidArgsTest() { + Matrix m = new DenseMatrix(1, 1); + + DenseMatrixConstructorTest.verifyAssertionError(() -> new ViewMatrix((Matrix)null, 0, 0, 1, 1), + "Null parent matrix."); + + DenseMatrixConstructorTest.verifyAssertionError(() -> new ViewMatrix(m, -1, 0, 1, 1), + "Invalid row offset."); + + DenseMatrixConstructorTest.verifyAssertionError(() -> new ViewMatrix(m, 0, -1, 1, 1), + "Invalid col offset."); + + DenseMatrixConstructorTest.verifyAssertionError(() -> new ViewMatrix(m, 0, 0, 0, 1), + "Invalid rows."); + + DenseMatrixConstructorTest.verifyAssertionError(() -> new ViewMatrix(m, 0, 0, 1, 0), + "Invalid cols."); + } + + /** */ + @Test + public void basicTest() { + for (Matrix m : new Matrix[] { + new DenseMatrix(3, 3), + new DenseMatrix(3, 4), new DenseMatrix(4, 3)}) + for (int rowOff : new int[] {0, 1}) + for (int colOff : new int[] {0, 1}) + for (int rows : new int[] {1, 2}) + for (int cols : new int[] {1, 2}) + basicTest(m, rowOff, colOff, rows, cols); + } + + /** */ + private void basicTest(Matrix parent, int rowOff, int colOff, int rows, int cols) { + for (int row = 0; row < parent.rowSize(); row++) + for (int col = 0; col < parent.columnSize(); col++) + parent.set(row, col, row * parent.columnSize() + col + 1); + + Matrix view = new ViewMatrix(parent, rowOff, colOff, rows, cols); + + assertEquals("Rows in view.", rows, view.rowSize()); + assertEquals("Cols in view.", cols, view.columnSize()); + + for (int row = 0; row < rows; row++) + for (int col = 0; col < cols; col++) + assertEquals("Unexpected value at " + row + "x" + col, + parent.get(row + rowOff, col + colOff), view.get(row, col), 0d); + + for (int row = 0; row < rows; row++) + for (int col = 0; col < cols; col++) + view.set(row, col, 0d); + + for (int row = 0; row < rows; row++) + for (int col = 0; col < cols; col++) + assertEquals("Unexpected value set at " + row + "x" + col, + 0d, parent.get(row + rowOff, col + colOff), 0d); + } + + /** */ + @Test + public void attributeTest() { + for (Matrix m : new Matrix[] { + new DenseMatrix(3, 3), + new DenseMatrix(3, 4), new DenseMatrix(4, 3)}) { + ViewMatrix matrixView = new ViewMatrix(m, 0, 0, m.rowSize(), m.columnSize()); + + ViewMatrixStorage delegateStorage = (ViewMatrixStorage)matrixView.getStorage(); + + assertEquals(m.rowSize(), matrixView.rowSize()); + assertEquals(m.columnSize(), matrixView.columnSize()); + + assertEquals(m.rowSize(), (delegateStorage).rowsLength()); + assertEquals(m.columnSize(), (delegateStorage).columnsLength()); + + assertEquals(m.isDense(), delegateStorage.isDense()); + assertEquals(m.isArrayBased(), delegateStorage.isArrayBased()); + + assertArrayEquals(m.getStorage().data(), delegateStorage.data(), 0.0); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/SparseMatrixConstructorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/SparseMatrixConstructorTest.java new file mode 100644 index 0000000000000..f66fad0fc8e22 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/matrix/SparseMatrixConstructorTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.matrix; + +import org.apache.ignite.ml.math.primitives.matrix.impl.SparseMatrix; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** */ +public class SparseMatrixConstructorTest { + /** */ + @Test + public void invalidArgsTest() { + DenseMatrixConstructorTest.verifyAssertionError(() -> new SparseMatrix(0, 1), + "invalid row parameter"); + + DenseMatrixConstructorTest.verifyAssertionError(() -> new SparseMatrix(1, 0), + "invalid col parameter"); + } + + /** */ + @Test + public void basicTest() { + assertEquals("Expected number of rows.", 1, + new SparseMatrix(1, 2).rowSize()); + + assertEquals("Expected number of cols, int parameters.", 1, + new SparseMatrix(2, 1).columnSize()); + + SparseMatrix m = new SparseMatrix(1, 1); + //noinspection EqualsWithItself + assertTrue("Matrix is expected to be equal to self.", m.equals(m)); + assertFalse("Matrix is expected to be not equal to null.", m.equals(null)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/DelegatingVectorConstructorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/DelegatingVectorConstructorTest.java new file mode 100644 index 0000000000000..fbe6db888386a --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/DelegatingVectorConstructorTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import org.apache.ignite.ml.math.primitives.vector.impl.DelegatingVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** */ +public class DelegatingVectorConstructorTest { + /** */ + private static final int IMPOSSIBLE_SIZE = -1; + + /** */ + @Test + public void basicTest() { + final Vector parent = new DenseVector(new double[] {0, 1}); + + final DelegatingVector delegate = new DelegatingVector(parent); + + final int size = parent.size(); + + assertEquals("Delegate size differs from expected.", size, delegate.size()); + + assertEquals("Delegate vector differs from expected.", parent, delegate.getVector()); + + for (int idx = 0; idx < size; idx++) + assertDelegate(parent, delegate, idx); + } + + /** */ + private void assertDelegate(Vector parent, Vector delegate, int idx) { + assertValue(parent, delegate, idx); + + parent.set(idx, parent.get(idx) + 1); + + assertValue(parent, delegate, idx); + + delegate.set(idx, delegate.get(idx) + 2); + + assertValue(parent, delegate, idx); + } + + /** */ + private void assertValue(Vector parent, Vector delegate, int idx) { + assertEquals("Unexpected value at index " + idx, parent.get(idx), delegate.get(idx), 0d); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/MatrixVectorViewTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/MatrixVectorViewTest.java new file mode 100644 index 0000000000000..7b407c04ac1de --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/MatrixVectorViewTest.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import org.apache.ignite.ml.math.exceptions.math.IndexException; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorizedViewMatrix; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link VectorizedViewMatrix}. + */ +public class MatrixVectorViewTest { + /** */ + private static final String UNEXPECTED_VALUE = "Unexpected value"; + + /** */ + private static final int SMALL_SIZE = 3; + + /** */ + private static final int IMPOSSIBLE_SIZE = -1; + + /** */ + private Matrix parent; + + /** */ + @Before + public void setup() { + parent = newMatrix(SMALL_SIZE, SMALL_SIZE); + } + + /** */ + @Test + public void testDiagonal() { + Vector vector = parent.viewDiagonal(); + + for (int i = 0; i < SMALL_SIZE; i++) + assertView(i, i, vector, i); + } + + /** */ + @Test + public void testRow() { + for (int i = 0; i < SMALL_SIZE; i++) { + Vector viewRow = parent.viewRow(i); + + for (int j = 0; j < SMALL_SIZE; j++) + assertView(i, j, viewRow, j); + } + } + + /** */ + @Test + public void testCols() { + for (int i = 0; i < SMALL_SIZE; i++) { + Vector viewCol = parent.viewColumn(i); + + for (int j = 0; j < SMALL_SIZE; j++) + assertView(j, i, viewCol, j); + } + } + + /** */ + @Test + public void basicTest() { + for (int rowSize : new int[] {1, 2, 3, 4}) + for (int colSize : new int[] {1, 2, 3, 4}) + for (int row = 0; row < rowSize; row++) + for (int col = 0; col < colSize; col++) + for (int rowStride = 0; rowStride < rowSize; rowStride++) + for (int colStride = 0; colStride < colSize; colStride++) + if (rowStride != 0 || colStride != 0) + assertMatrixVectorView(newMatrix(rowSize, colSize), row, col, rowStride, colStride); + } + + /** */ + @Test(expected = AssertionError.class) + public void parentNullTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(null, 1, 1, 1, 1).size()); + } + + /** */ + @Test(expected = IndexException.class) + public void rowNegativeTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, -1, 1, 1, 1).size()); + } + + /** */ + @Test(expected = IndexException.class) + public void colNegativeTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, 1, -1, 1, 1).size()); + } + + /** */ + @Test(expected = IndexException.class) + public void rowTooLargeTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, parent.rowSize() + 1, 1, 1, 1).size()); + } + + /** */ + @Test(expected = IndexException.class) + public void colTooLargeTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, 1, parent.columnSize() + 1, 1, 1).size()); + } + + /** */ + @Test(expected = AssertionError.class) + public void rowStrideNegativeTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, 1, 1, -1, 1).size()); + } + + /** */ + @Test(expected = AssertionError.class) + public void colStrideNegativeTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, 1, 1, 1, -1).size()); + } + + /** */ + @Test(expected = AssertionError.class) + public void rowStrideTooLargeTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, 1, 1, parent.rowSize() + 1, 1).size()); + } + + /** */ + @Test(expected = AssertionError.class) + public void colStrideTooLargeTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, 1, 1, 1, parent.columnSize() + 1).size()); + } + + /** */ + @Test(expected = AssertionError.class) + public void bothStridesZeroTest() { + assertEquals(IMPOSSIBLE_SIZE, + new VectorizedViewMatrix(parent, 1, 1, 0, 0).size()); + } + + /** */ + private void assertMatrixVectorView(Matrix parent, int row, int col, int rowStride, int colStride) { + VectorizedViewMatrix view = new VectorizedViewMatrix(parent, row, col, rowStride, colStride); + + String desc = "parent [" + parent.rowSize() + "x" + parent.columnSize() + "], view [" + + row + "x" + col + "], strides [" + rowStride + ", " + colStride + "]"; + + final int size = view.size(); + + final int sizeByRows = rowStride == 0 ? IMPOSSIBLE_SIZE : (parent.rowSize() - row) / rowStride; + final int sizeByCols = colStride == 0 ? IMPOSSIBLE_SIZE : (parent.columnSize() - col) / colStride; + + assertTrue("Size " + size + " differs from expected for " + desc, + size == sizeByRows || size == sizeByCols); + + for (int idx = 0; idx < size; idx++) { + final int rowIdx = row + idx * rowStride; + final int colIdx = col + idx * colStride; + + assertEquals(UNEXPECTED_VALUE + " at view index " + idx + desc, + parent.get(rowIdx, colIdx), view.get(idx), 0d); + } + } + + /** */ + private Matrix newMatrix(int rowSize, int colSize) { + Matrix res = new DenseMatrix(rowSize, colSize); + + for (int i = 0; i < res.rowSize(); i++) + for (int j = 0; j < res.columnSize(); j++) + res.set(i, j, i * res.rowSize() + j); + + return res; + } + + /** */ + private void assertView(int row, int col, Vector view, int viewIdx) { + assertValue(row, col, view, viewIdx); + + parent.set(row, col, parent.get(row, col) + 1); + + assertValue(row, col, view, viewIdx); + + view.set(viewIdx, view.get(viewIdx) + 2); + + assertValue(row, col, view, viewIdx); + } + + /** */ + private void assertValue(int row, int col, Vector view, int viewIdx) { + assertEquals(UNEXPECTED_VALUE + " at row " + row + " col " + col, parent.get(row, col), view.get(viewIdx), 0d); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/SparseVectorConstructorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/SparseVectorConstructorTest.java new file mode 100644 index 0000000000000..a626b7601aa8e --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/SparseVectorConstructorTest.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import org.apache.ignite.ml.math.primitives.vector.impl.SparseVector; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +/** */ +public class SparseVectorConstructorTest { + /** */ + private static final int IMPOSSIBLE_SIZE = -1; + + /** */ + @Test(expected = AssertionError.class) + public void negativeSizeTest() { + assertEquals("Negative size.", IMPOSSIBLE_SIZE, + new SparseVector(-1).size()); + } + + /** */ + @Test(expected = AssertionError.class) + public void zeroSizeTest() { + assertEquals("0 size.", IMPOSSIBLE_SIZE, + new SparseVector(0).size()); + } + + /** */ + @Test + public void primitiveTest() { + assertEquals("1 size, random access.", 1, + new SparseVector(1).size()); + } + + /** */ + @Test + public void noParamsCtorTest() { + assertNotNull(new SparseVector().nonZeroSpliterator()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorBaseStorageTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorBaseStorageTest.java new file mode 100644 index 0000000000000..0285b485d534c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorBaseStorageTest.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import org.apache.ignite.ml.math.ExternalizeTest; +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Abstract class with base tests for each vector storage. + */ +public abstract class VectorBaseStorageTest extends ExternalizeTest { + /** */ + protected T storage; + + /** */ + @Before + public abstract void setUp(); + + /** */ + @After + public void tearDown() throws Exception { + storage.destroy(); + } + + /** */ + @Test + public void getSet() throws Exception { + for (int i = 0; i < MathTestConstants.STORAGE_SIZE; i++) { + double random = Math.random(); + + storage.set(i, random); + + assertEquals(MathTestConstants.WRONG_DATA_ELEMENT, storage.get(i), random, MathTestConstants.NIL_DELTA); + } + } + + /** */ + @Test + public void size() { + assertTrue(MathTestConstants.UNEXPECTED_VAL, storage.size() == MathTestConstants.STORAGE_SIZE); + } + + /** */ + @Override public void externalizeTest() { + externalizeTest(storage); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorImplementationsTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorImplementationsTest.java new file mode 100644 index 0000000000000..9917ffd663c20 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorImplementationsTest.java @@ -0,0 +1,850 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import org.apache.ignite.IgniteException; +import org.apache.ignite.ml.math.ExternalizeTest; +import org.apache.ignite.ml.math.exceptions.UnsupportedOperationException; +import org.apache.ignite.ml.math.exceptions.math.CardinalityException; +import org.apache.ignite.ml.math.primitives.vector.impl.DelegatingVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.SparseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorizedViewMatrix; +import org.junit.Assert; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** See also: {@link AbstractVectorTest} and {@link VectorToMatrixTest}. */ +public class VectorImplementationsTest { // TODO: IGNITE-5723, split this to smaller cohesive test classes + /** */ + @Test + public void setGetTest() { + consumeSampleVectors((v, desc) -> mutateAtIdxTest(v, desc, (vec, idx, val) -> { + vec.set(idx, val); + + return val; + })); + } + + /** */ + @Test + public void setXTest() { + consumeSampleVectors((v, desc) -> mutateAtIdxTest(v, desc, (vec, idx, val) -> { + vec.setX(idx, val); + + return val; + })); + } + + /** */ + @Test + public void incrementTest() { + consumeSampleVectors((v, desc) -> mutateAtIdxTest(v, desc, (vec, idx, val) -> { + double old = vec.get(idx); + + vec.increment(idx, val); + + return old + val; + })); + } + + /** */ + @Test + public void incrementXTest() { + consumeSampleVectors((v, desc) -> mutateAtIdxTest(v, desc, (vec, idx, val) -> { + double old = vec.getX(idx); + + vec.incrementX(idx, val); + + return old + val; + })); + } + + /** */ + @Test + public void operateXOutOfBoundsTest() { + consumeSampleVectors((v, desc) -> { + if (v instanceof SparseVector) + return; // TODO: IGNITE-5723, find out if it's OK to skip by instances here + + boolean expECaught = false; + + try { + v.getX(-1); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + if (!getXOutOfBoundsOK(v)) + assertTrue("Expect exception at negative index getX in " + desc, expECaught); + + expECaught = false; + + try { + v.setX(-1, 0); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + assertTrue("Expect exception at negative index setX in " + desc, expECaught); + + expECaught = false; + + try { + v.incrementX(-1, 1); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + assertTrue("Expect exception at negative index incrementX in " + desc, expECaught); + + expECaught = false; + + try { + v.getX(v.size()); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + if (!getXOutOfBoundsOK(v)) + assertTrue("Expect exception at too large index getX in " + desc, expECaught); + + expECaught = false; + + try { + v.setX(v.size(), 1); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + assertTrue("Expect exception at too large index setX in " + desc, expECaught); + + expECaught = false; + + try { + v.incrementX(v.size(), 1); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + assertTrue("Expect exception at too large index incrementX in " + desc, expECaught); + }); + } + + /** */ + @Test + public void sizeTest() { + final AtomicReference expSize = new AtomicReference<>(0); + + consumeSampleVectors( + expSize::set, + (v, desc) -> Assert.assertEquals("Expected size for " + desc, + (int)expSize.get(), v.size()) + ); + } + + /** */ + @Test + public void getElementTest() { + consumeSampleVectors((v, desc) -> new ElementsChecker(v, desc).assertCloseEnough(v)); + } + + /** */ + @Test + public void copyTest() { + consumeSampleVectors((v, desc) -> new ElementsChecker(v, desc).assertCloseEnough(v.copy())); + } + + /** */ + @Test + public void divideTest() { + operationTest((val, operand) -> val / operand, Vector::divide); + } + + /** */ + @Test + public void likeTest() { + for (int card : new int[] {1, 2, 4, 8, 16, 32, 64, 128}) + consumeSampleVectors((v, desc) -> { + Class expType = expLikeType(v); + + if (expType == null) { + try { + v.like(card); + } + catch (UnsupportedOperationException uoe) { + return; + } + + fail("Expected exception wasn't caught for " + desc); + + return; + } + + Vector vLike = v.like(card); + + assertNotNull("Expect non-null like vector for " + expType.getSimpleName() + " in " + desc, vLike); + assertEquals("Expect size equal to cardinality at " + desc, card, vLike.size()); + + Class actualType = vLike.getClass(); + + assertTrue("Actual vector type " + actualType.getSimpleName() + + " should be assignable from expected type " + expType.getSimpleName() + " in " + desc, + actualType.isAssignableFrom(expType)); + }); + } + + /** */ + @Test + public void minusTest() { + operationVectorTest((operand1, operand2) -> operand1 - operand2, Vector::minus); + } + + /** */ + @Test + public void plusVectorTest() { + operationVectorTest((operand1, operand2) -> operand1 + operand2, Vector::plus); + } + + /** */ + @Test + public void plusDoubleTest() { + operationTest((val, operand) -> val + operand, Vector::plus); + } + + /** */ + @Test + public void timesVectorTest() { + operationVectorTest((operand1, operand2) -> operand1 * operand2, Vector::times); + } + + /** */ + @Test + public void timesDoubleTest() { + operationTest((val, operand) -> val * operand, Vector::times); + } + + /** */ + @Test + public void viewPartTest() { + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + final int delta = size > 32 ? 3 : 1; // IMPL NOTE this is for faster test execution + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + + for (int off = 0; off < size; off += delta) + for (int len = 1; len < size - off; len += delta) + checker.assertCloseEnough(v.viewPart(off, len), Arrays.copyOfRange(ref, off, off + len)); + }); + } + + /** */ + @Test + public void sumTest() { + toDoubleTest( + ref -> Arrays.stream(ref).sum(), + Vector::sum); + } + + /** */ + @Test + public void minValueTest() { + toDoubleTest( + ref -> Arrays.stream(ref).min().getAsDouble(), + Vector::minValue); + } + + /** */ + @Test + public void maxValueTest() { + toDoubleTest( + ref -> Arrays.stream(ref).max().getAsDouble(), + Vector::maxValue); + } + + /** */ + @Test + public void sortTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly() || !v.isArrayBased()) { + boolean expECaught = false; + + try { + v.sort(); + } + catch (UnsupportedOperationException uoe) { + expECaught = true; + } + + assertTrue("Expected exception was not caught for sort in " + desc, expECaught); + + return; + } + + final int size = v.size(); + final double[] ref = new double[size]; + + new ElementsChecker(v, ref, desc).assertCloseEnough(v.sort(), Arrays.stream(ref).sorted().toArray()); + }); + } + + /** */ + @Test + public void metaAttributesTest() { + consumeSampleVectors((v, desc) -> { + assertNotNull("Null meta storage in " + desc, v.getMetaStorage()); + + final String key = "test key"; + final String val = "test value"; + final String details = "key [" + key + "] for " + desc; + + v.setAttribute(key, val); + assertTrue("Expect to have meta attribute for " + details, v.hasAttribute(key)); + assertEquals("Unexpected meta attribute value for " + details, val, v.getAttribute(key)); + + v.removeAttribute(key); + assertFalse("Expect not to have meta attribute for " + details, v.hasAttribute(key)); + assertNull("Unexpected meta attribute value for " + details, v.getAttribute(key)); + }); + } + + /** */ + @Test + public void assignDoubleTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly()) + return; + + for (double val : new double[] {0, -1, 0, 1}) { + v.assign(val); + + for (int idx = 0; idx < v.size(); idx++) { + final Metric metric = new Metric(val, v.get(idx)); + + assertTrue("Not close enough at index " + idx + ", val " + val + ", " + metric + + ", " + desc, metric.closeEnough()); + } + } + }); + } + + /** */ + @Test + public void assignDoubleArrTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly()) + return; + + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + + for (int idx = 0; idx < size; idx++) + ref[idx] = -ref[idx]; + + v.assign(ref); + + checker.assertCloseEnough(v, ref); + + assignDoubleArrWrongCardinality(v, desc); + }); + } + + /** */ + @Test + public void assignVectorTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly()) + return; + + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + + for (int idx = 0; idx < size; idx++) + ref[idx] = -ref[idx]; + + v.assign(new DenseVector(ref)); + + checker.assertCloseEnough(v, ref); + + assignVectorWrongCardinality(v, desc); + }); + } + + /** */ + @Test + public void assignFunctionTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly()) + return; + + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + + for (int idx = 0; idx < size; idx++) + ref[idx] = -ref[idx]; + + v.assign((idx) -> ref[idx]); + + checker.assertCloseEnough(v, ref); + }); + } + + /** */ + @Test + public void minElementTest() { + consumeSampleVectors((v, desc) -> { + final ElementsChecker checker = new ElementsChecker(v, desc); + + final Vector.Element minE = v.minElement(); + + final int minEIdx = minE.index(); + + assertTrue("Unexpected index from minElement " + minEIdx + ", " + desc, + minEIdx >= 0 && minEIdx < v.size()); + + final Metric metric = new Metric(minE.get(), v.minValue()); + + assertTrue("Not close enough minElement at index " + minEIdx + ", " + metric + + ", " + desc, metric.closeEnough()); + + checker.assertNewMinElement(v); + }); + } + + /** */ + @Test + public void maxElementTest() { + consumeSampleVectors((v, desc) -> { + final ElementsChecker checker = new ElementsChecker(v, desc); + + final Vector.Element maxE = v.maxElement(); + + final int minEIdx = maxE.index(); + + assertTrue("Unexpected index from minElement " + minEIdx + ", " + desc, + minEIdx >= 0 && minEIdx < v.size()); + + final Metric metric = new Metric(maxE.get(), v.maxValue()); + + assertTrue("Not close enough maxElement at index " + minEIdx + ", " + metric + + ", " + desc, metric.closeEnough()); + + checker.assertNewMaxElement(v); + }); + } + + /** */ + @Test + public void externalizeTest() { + (new ExternalizeTest() { + /** {@inheritDoc} */ + @Override public void externalizeTest() { + consumeSampleVectors((v, desc) -> externalizeTest(v)); + } + }).externalizeTest(); + } + + /** */ + @Test + public void hashCodeTest() { + consumeSampleVectors((v, desc) -> assertTrue("Zero hash code for " + desc, v.hashCode() != 0)); + } + + /** */ + private boolean getXOutOfBoundsOK(Vector v) { + // TODO: IGNITE-5723, find out if this is indeed OK + return false; + } + + /** */ + private void mutateAtIdxTest(Vector v, String desc, MutateAtIdx operation) { + if (readOnly()) { + if (v.size() < 1) + return; + + boolean expECaught = false; + + try { + operation.apply(v, 0, 1); + } + catch (UnsupportedOperationException uoe) { + expECaught = true; + } + + assertTrue("Expect exception at attempt to mutate element in " + desc, expECaught); + + return; + } + + for (double val : new double[] {0, -1, 0, 1}) + for (int idx = 0; idx < v.size(); idx++) { + double exp = operation.apply(v, idx, val); + + final Metric metric = new Metric(exp, v.get(idx)); + + assertTrue("Not close enough at index " + idx + ", val " + val + ", " + metric + + ", " + desc, metric.closeEnough()); + } + } + + /** */ + private Class expLikeType(Vector v) { + Class clazz = v.getClass(); + + if (clazz.isAssignableFrom(VectorizedViewMatrix.class) || clazz.isAssignableFrom(DelegatingVector.class)) + return DenseVector.class; // IMPL NOTE per fixture + + return clazz; + } + + /** */ + private void toDoubleTest(Function calcRef, Function calcVec) { + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + + new ElementsChecker(v, ref, desc); // IMPL NOTE this initialises vector and reference array + + final Metric metric = new Metric(calcRef.apply(ref), calcVec.apply(v)); + + assertTrue("Not close enough at " + desc + + ", " + metric, metric.closeEnough()); + }); + } + + /** */ + private void operationVectorTest(BiFunction operation, + BiFunction vecOperation) { + consumeSampleVectors((v, desc) -> { + // TODO : IGNITE-5723, find out if more elaborate testing scenario is needed or it's okay as is. + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + final Vector operand = v.copy(); + + for (int idx = 0; idx < size; idx++) + ref[idx] = operation.apply(ref[idx], ref[idx]); + + checker.assertCloseEnough(vecOperation.apply(v, operand), ref); + + assertWrongCardinality(v, desc, vecOperation); + }); + } + + /** */ + private void assignDoubleArrWrongCardinality(Vector v, String desc) { + boolean expECaught = false; + + try { + v.assign(new double[v.size() + 1]); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too large size in " + desc, expECaught); + + if (v.size() < 2) + return; + + expECaught = false; + + try { + v.assign(new double[v.size() - 1]); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too small size in " + desc, expECaught); + } + + /** */ + private void assignVectorWrongCardinality(Vector v, String desc) { + boolean expECaught = false; + + try { + v.assign(new DenseVector(v.size() + 1)); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too large size in " + desc, expECaught); + + if (v.size() < 2) + return; + + expECaught = false; + + try { + v.assign(new DenseVector(v.size() - 1)); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too small size in " + desc, expECaught); + } + + /** */ + private void assertWrongCardinality( + Vector v, String desc, BiFunction vecOperation) { + boolean expECaught = false; + + try { + vecOperation.apply(v, new DenseVector(v.size() + 1)); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too large size in " + desc, expECaught); + + if (v.size() < 2) + return; + + expECaught = false; + + try { + vecOperation.apply(v, new DenseVector(v.size() - 1)); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too small size in " + desc, expECaught); + } + + /** */ + private void operationTest(BiFunction operation, + BiFunction vecOperation) { + for (double val : new double[] {0, 0.1, 1, 2, 10}) + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, "val " + val + ", " + desc); + + for (int idx = 0; idx < size; idx++) + ref[idx] = operation.apply(ref[idx], val); + + Vector apply = vecOperation.apply(v, val); + checker.assertCloseEnough(apply, ref); + }); + } + + /** */ + private void consumeSampleVectors(BiConsumer consumer) { + consumeSampleVectors(null, consumer); + } + + /** */ + private void consumeSampleVectors(Consumer paramsConsumer, BiConsumer consumer) { + new VectorImplementationsFixtures().consumeSampleVectors(paramsConsumer, consumer); + } + + /** */ + private static boolean readOnly() { + return false; + } + + /** */ + private interface MutateAtIdx { + /** */ + double apply(Vector v, int idx, double val); + } + + /** */ + static class ElementsChecker { + /** */ + private final String fixtureDesc; + + /** */ + private final double[] refReadOnly; + + /** */ + private final boolean nonNegative; + + /** */ + ElementsChecker(Vector v, double[] ref, String fixtureDesc, boolean nonNegative) { + this.fixtureDesc = fixtureDesc; + + this.nonNegative = nonNegative; + + refReadOnly = readOnly() && ref == null ? new double[v.size()] : null; + + init(v, ref); + } + + /** */ + ElementsChecker(Vector v, double[] ref, String fixtureDesc) { + this(v, ref, fixtureDesc, false); + } + + /** */ + ElementsChecker(Vector v, String fixtureDesc) { + this(v, null, fixtureDesc); + } + + /** */ + void assertCloseEnough(Vector obtained, double[] exp) { + final int size = obtained.size(); + + for (int i = 0; i < size; i++) { + final Vector.Element e = obtained.getElement(i); + + if (refReadOnly != null && exp == null) + exp = refReadOnly; + + final Metric metric = new Metric(exp == null ? generated(i) : exp[i], e.get()); + + assertEquals("Unexpected vector index at " + fixtureDesc, i, e.index()); + assertTrue("Not close enough at index " + i + ", size " + size + ", " + metric + + ", " + fixtureDesc, metric.closeEnough()); + } + } + + /** */ + void assertCloseEnough(Vector obtained) { + assertCloseEnough(obtained, null); + } + + /** */ + void assertNewMinElement(Vector v) { + if (readOnly()) + return; + + int exp = v.size() / 2; + + v.set(exp, -(v.size() * 2 + 1)); + + assertEquals("Unexpected minElement index at " + fixtureDesc, exp, v.minElement().index()); + } + + /** */ + void assertNewMaxElement(Vector v) { + if (readOnly()) + return; + + int exp = v.size() / 2; + + v.set(exp, v.size() * 2 + 1); + + assertEquals("Unexpected minElement index at " + fixtureDesc, exp, v.maxElement().index()); + } + + /** */ + private void init(Vector v, double[] ref) { + if (readOnly()) { + initReadonly(v, ref); + + return; + } + + for (Vector.Element e : v.all()) { + int idx = e.index(); + + // IMPL NOTE introduce negative values because their absence + // blocked catching an ugly bug in AbstractVector#kNorm + int val = generated(idx); + + e.set(val); + + if (ref != null) + ref[idx] = val; + } + } + + /** */ + private void initReadonly(Vector v, double[] ref) { + if (refReadOnly != null) + for (Vector.Element e : v.all()) + refReadOnly[e.index()] = e.get(); + + if (ref != null) + for (Vector.Element e : v.all()) + ref[e.index()] = e.get(); + } + + /** */ + private int generated(int idx) { + return nonNegative || (idx & 1) == 0 ? idx : -idx; + } + } + + /** */ + static class Metric { //TODO: IGNITE-5824, consider if softer tolerance (like say 0.1 or 0.01) would make sense here + /** */ + private final double exp; + + /** */ + private final double obtained; + + /** **/ + Metric(double exp, double obtained) { + this.exp = exp; + this.obtained = obtained; + } + + /** */ + boolean closeEnough() { + return new Double(exp).equals(obtained) || closeEnoughToZero(); + } + + /** {@inheritDoc} */ + @Override public String toString() { + return "Metric{" + "expected=" + exp + + ", obtained=" + obtained + + '}'; + } + + /** */ + private boolean closeEnoughToZero() { + return (new Double(exp).equals(0.0) && new Double(obtained).equals(-0.0)) + || (new Double(exp).equals(-0.0) && new Double(obtained).equals(0.0)); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormCasesTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormCasesTest.java new file mode 100644 index 0000000000000..ca737b9cddda6 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormCasesTest.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.util.Arrays; +import java.util.Collection; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertEquals; + +/** */ +@RunWith(Parameterized.class) +public class VectorNormCasesTest { + /** + * Precision. + */ + private static final double PRECISION = 0.01; + + /** */ + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList( + new TestData( + new double[] {1.0, -1.0, 0.0}, + 1, + 2.0 + ), + new TestData( + new double[] {1.0, -1.0, 0.0}, + 2, + 1.41 + ), + new TestData( + new double[] {1.0, -1.0, 0.0}, + 3, + 1.25 + ), + new TestData( + new double[] {1.0, -1.0, 0.0}, + 4, + 1.18 + ), + new TestData( + new double[] {1.0, -1.0, 0.0}, + 5, + 1.14 + ) + ); + } + + /** */ + private final TestData testData; + + /** */ + public VectorNormCasesTest(TestData testData) { + this.testData = testData; + } + + /** */ + @Test + public void test() { + assertEquals( + testData.vector.kNorm(testData.p), + testData.expRes, + PRECISION + ); + } + + /** */ + private static class TestData { + /** */ + public final Vector vector; + + /** */ + public final Double p; + + /** */ + public final Double expRes; + + /** */ + private TestData(double[] vector, double p, double expRes) { + this.vector = new DenseVector(vector); + this.p = p; + this.expRes = expRes; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return String.format("norm(%s, %s) = %s", + Arrays.toString(vector.asArray()), + p, + expRes + ); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormTest.java new file mode 100644 index 0000000000000..23c9e6f35e59c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormTest.java @@ -0,0 +1,239 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.function.Function; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; + +/** */ +public class VectorNormTest { + /** */ + @Test + public void normalizeTest() { + normalizeTest(2, (val, len) -> val / len, Vector::normalize); + } + + /** */ + @Test + public void normalizePowerTest() { + for (double pow : new double[] {0, 0.5, 1, 2, 2.5, Double.POSITIVE_INFINITY}) + normalizeTest(pow, (val, norm) -> val / norm, (v) -> v.normalize(pow)); + } + + /** */ + @Test + public void logNormalizeTest() { + normalizeTest(2, (val, len) -> Math.log1p(val) / (len * Math.log(2)), Vector::logNormalize); + } + + /** */ + @Test + public void logNormalizePowerTest() { + for (double pow : new double[] {1.1, 2, 2.5}) + normalizeTest(pow, (val, norm) -> Math.log1p(val) / (norm * Math.log(pow)), (v) -> v.logNormalize(pow)); + } + + /** */ + @Test + public void kNormTest() { + for (double pow : new double[] {0, 0.5, 1, 2, 2.5, Double.POSITIVE_INFINITY}) + toDoubleTest(pow, ref -> new Norm(ref, pow).calculate(), v -> v.kNorm(pow)); + } + + /** */ + @Test + public void getLengthSquaredTest() { + toDoubleTest(2.0, ref -> new Norm(ref, 2).sumPowers(), Vector::getLengthSquared); + } + + /** */ + @Test + public void getDistanceSquaredTest() { + consumeSampleVectors((v, desc) -> { + new VectorImplementationsTest.ElementsChecker(v, desc); // IMPL NOTE this initialises vector + + final int size = v.size(); + final Vector vOnHeap = new DenseVector(size); + + invertValues(v, vOnHeap); + + for (int idx = 0; idx < size; idx++) { + final double exp = v.get(idx); + final int idxMirror = size - 1 - idx; + + assertTrue("On heap vector difference at " + desc + ", idx " + idx, + exp - vOnHeap.get(idxMirror) == 0); + } + + final double exp = vOnHeap.minus(v).getLengthSquared(); // IMPL NOTE this won't mutate vOnHeap + final VectorImplementationsTest.Metric metric = new VectorImplementationsTest.Metric(exp, v.getDistanceSquared(vOnHeap)); + + assertTrue("On heap vector not close enough at " + desc + ", " + metric, + metric.closeEnough()); + }); + } + + /** */ + @Test + public void dotTest() { + consumeSampleVectors((v, desc) -> { + new VectorImplementationsTest.ElementsChecker(v, desc); // IMPL NOTE this initialises vector + + final int size = v.size(); + final Vector v1 = new DenseVector(size); + + invertValues(v, v1); + + final double actual = v.dot(v1); + + double exp = 0; + + for (Vector.Element e : v.all()) + exp += e.get() * v1.get(e.index()); + + final VectorImplementationsTest.Metric metric = new VectorImplementationsTest.Metric(exp, actual); + + assertTrue("Dot product not close enough at " + desc + ", " + metric, + metric.closeEnough()); + }); + } + + /** */ + private void invertValues(Vector src, Vector dst) { + final int size = src.size(); + + for (Vector.Element e : src.all()) { + final int idx = size - 1 - e.index(); + final double val = e.get(); + + dst.set(idx, val); + } + } + + /** */ + private void toDoubleTest(Double val, Function calcRef, Function calcVec) { + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + + new VectorImplementationsTest.ElementsChecker(v, ref, desc); // IMPL NOTE this initialises vector and reference array + + final double exp = calcRef.apply(ref); + final double obtained = calcVec.apply(v); + final VectorImplementationsTest.Metric metric = new VectorImplementationsTest.Metric(exp, obtained); + + assertTrue("Not close enough at " + desc + + (val == null ? "" : ", value " + val) + ", " + metric, metric.closeEnough()); + }); + } + + /** */ + private void normalizeTest(double pow, BiFunction operation, + Function vecOperation) { + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + final boolean nonNegative = pow != (int)pow; + + final VectorImplementationsTest.ElementsChecker checker = + new VectorImplementationsTest.ElementsChecker(v, ref, desc + ", pow = " + pow, nonNegative); + final double norm = new Norm(ref, pow).calculate(); + + for (int idx = 0; idx < size; idx++) + ref[idx] = operation.apply(ref[idx], norm); + + checker.assertCloseEnough(vecOperation.apply(v), ref); + }); + } + + /** */ + private void consumeSampleVectors(BiConsumer consumer) { + new VectorImplementationsFixtures().consumeSampleVectors(null, consumer); + } + + /** */ + private static class Norm { + /** */ + private final double[] arr; + + /** */ + private final Double pow; + + /** */ + Norm(double[] arr, double pow) { + this.arr = arr; + this.pow = pow; + } + + /** */ + double calculate() { + if (pow.equals(0.0)) + return countNonZeroes(); // IMPL NOTE this is beautiful if you think of it + + if (pow.equals(Double.POSITIVE_INFINITY)) + return maxAbs(); + + return Math.pow(sumPowers(), 1 / pow); + } + + /** */ + double sumPowers() { + if (pow.equals(0.0)) + return countNonZeroes(); + + double norm = 0; + + for (double val : arr) + norm += pow == 1 ? Math.abs(val) : Math.pow(Math.abs(val), pow); + + return norm; + } + + /** */ + private int countNonZeroes() { + int cnt = 0; + + final Double zero = 0.0; + + for (double val : arr) + if (!zero.equals(val)) + cnt++; + + return cnt; + } + + /** */ + private double maxAbs() { + double res = 0; + + for (double val : arr) { + final double abs = Math.abs(val); + + if (abs > res) + res = abs; + } + + return res; + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorToMatrixTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorToMatrixTest.java new file mode 100644 index 0000000000000..a288f146e7207 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorToMatrixTest.java @@ -0,0 +1,261 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.BiConsumer; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.SparseMatrix; +import org.apache.ignite.ml.math.primitives.vector.impl.DelegatingVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.SparseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorizedViewMatrix; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** Tests for methods of Vector that involve Matrix. */ +public class VectorToMatrixTest { + /** */ + private static final Map, Class> typesMap = typesMap(); + + /** */ + @Test + public void testHaveLikeMatrix() { + for (Class key : typesMap.keySet()) { + Class val = typesMap.get(key); + + if (val == null) + System.out.println("Missing test for implementation of likeMatrix for " + key.getSimpleName()); + } + } + + /** */ + @Test + public void testLikeMatrix() { + consumeSampleVectors((v, desc) -> { + if (!availableForTesting(v)) + return; + + final Matrix matrix = v.likeMatrix(1, 1); + + Class key = v.getClass(); + + Class expMatrixType = typesMap.get(key); + + assertNotNull("Expect non-null matrix for " + key.getSimpleName() + " in " + desc, matrix); + + Class actualMatrixType = matrix.getClass(); + + assertTrue("Expected matrix type " + expMatrixType.getSimpleName() + + " should be assignable from actual type " + actualMatrixType.getSimpleName() + " in " + desc, + expMatrixType.isAssignableFrom(actualMatrixType)); + + for (int rows : new int[] {1, 2}) + for (int cols : new int[] {1, 2}) { + final Matrix actualMatrix = v.likeMatrix(rows, cols); + + String details = "rows " + rows + " cols " + cols; + + assertNotNull("Expect non-null matrix for " + details + " in " + desc, + actualMatrix); + + assertEquals("Unexpected number of rows in " + desc, rows, actualMatrix.rowSize()); + + assertEquals("Unexpected number of cols in " + desc, cols, actualMatrix.columnSize()); + } + }); + } + + /** */ + @Test + public void testToMatrix() { + consumeSampleVectors((v, desc) -> { + if (!availableForTesting(v)) + return; + + fillWithNonZeroes(v); + + final Matrix matrixRow = v.toMatrix(true); + + final Matrix matrixCol = v.toMatrix(false); + + for (Vector.Element e : v.all()) + assertToMatrixValue(desc, matrixRow, matrixCol, e.get(), e.index()); + }); + } + + /** */ + @Test + public void testToMatrixPlusOne() { + consumeSampleVectors((v, desc) -> { + if (!availableForTesting(v)) + return; + + fillWithNonZeroes(v); + + for (double zeroVal : new double[] {-1, 0, 1, 2}) { + final Matrix matrixRow = v.toMatrixPlusOne(true, zeroVal); + + final Matrix matrixCol = v.toMatrixPlusOne(false, zeroVal); + + final Metric metricRow0 = new Metric(zeroVal, matrixRow.get(0, 0)); + + assertTrue("Not close enough row like " + metricRow0 + " at index 0 in " + desc, + metricRow0.closeEnough()); + + final Metric metricCol0 = new Metric(zeroVal, matrixCol.get(0, 0)); + + assertTrue("Not close enough cols like " + metricCol0 + " at index 0 in " + desc, + metricCol0.closeEnough()); + + for (Vector.Element e : v.all()) + assertToMatrixValue(desc, matrixRow, matrixCol, e.get(), e.index() + 1); + } + }); + } + + /** */ + @Test + public void testCross() { + consumeSampleVectors((v, desc) -> { + if (!availableForTesting(v)) + return; + + fillWithNonZeroes(v); + + for (int delta : new int[] {-1, 0, 1}) { + final int size2 = v.size() + delta; + + if (size2 < 1) + return; + + final Vector v2 = new DenseVector(size2); + + for (Vector.Element e : v2.all()) + e.set(size2 - e.index()); + + assertCross(v, v2, desc); + } + }); + } + + /** */ + private void assertCross(Vector v1, Vector v2, String desc) { + assertNotNull(v1); + assertNotNull(v2); + + final Matrix res = v1.cross(v2); + + assertNotNull("Cross matrix is expected to be not null in " + desc, res); + + assertEquals("Unexpected number of rows in cross Matrix in " + desc, v1.size(), res.rowSize()); + + assertEquals("Unexpected number of cols in cross Matrix in " + desc, v2.size(), res.columnSize()); + + for (int row = 0; row < v1.size(); row++) + for (int col = 0; col < v2.size(); col++) { + final Metric metric = new Metric(v1.get(row) * v2.get(col), res.get(row, col)); + + assertTrue("Not close enough cross " + metric + " at row " + row + " at col " + col + + " in " + desc, metric.closeEnough()); + } + } + + /** */ + private void assertToMatrixValue(String desc, Matrix matrixRow, Matrix matrixCol, double exp, int idx) { + final Metric metricRow = new Metric(exp, matrixRow.get(0, idx)); + + assertTrue("Not close enough row like " + metricRow + " at index " + idx + " in " + desc, + metricRow.closeEnough()); + + final Metric metricCol = new Metric(exp, matrixCol.get(idx, 0)); + + assertTrue("Not close enough cols like " + matrixCol + " at index " + idx + " in " + desc, + metricCol.closeEnough()); + } + + /** */ + private void fillWithNonZeroes(Vector sample) { + for (Vector.Element e : sample.all()) + e.set(1 + e.index()); + } + + /** */ + private boolean availableForTesting(Vector v) { + assertNotNull("Error in test: vector is null", v); + + final boolean availableForTesting = typesMap.get(v.getClass()) != null; + + final Matrix actualLikeMatrix = v.likeMatrix(1, 1); + + assertTrue("Need to enable matrix testing for vector type " + v.getClass().getSimpleName(), + availableForTesting || actualLikeMatrix == null); + + return availableForTesting; + } + + /** */ + private void consumeSampleVectors(BiConsumer consumer) { + new VectorImplementationsFixtures().consumeSampleVectors(null, consumer); + } + + /** */ + private static Map, Class> typesMap() { + LinkedHashMap, Class> typesMap = new LinkedHashMap<>(); + + typesMap.put(DenseVector.class, DenseMatrix.class); + typesMap.put(SparseVector.class, SparseMatrix.class); + typesMap.put(VectorizedViewMatrix.class, DenseMatrix.class); // IMPL NOTE per fixture + typesMap.put(DelegatingVector.class, DenseMatrix.class); // IMPL NOTE per fixture + // IMPL NOTE check for presence of all implementations here will be done in testHaveLikeMatrix via Fixture + + return typesMap; + } + + /** */ + private static class Metric { //TODO: IGNITE-5824, consider if softer tolerance (like say 0.1 or 0.01) would make sense here. + /** */ + private final double exp; + + /** */ + private final double obtained; + + /** **/ + Metric(double exp, double obtained) { + this.exp = exp; + this.obtained = obtained; + } + + /** */ + boolean closeEnough() { + return new Double(exp).equals(obtained); + } + + /** {@inheritDoc} */ + @Override public String toString() { + return "Metric{" + "expected=" + exp + + ", obtained=" + obtained + + '}'; + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorViewTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorViewTest.java new file mode 100644 index 0000000000000..7471540d8215a --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorViewTest.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.function.BiConsumer; +import java.util.stream.IntStream; +import org.apache.ignite.ml.math.exceptions.UnsupportedOperationException; +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorView; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** + * Unit tests for {@link VectorView}. + */ +public class VectorViewTest { + /** */ + private static final int OFFSET = 10; + + /** */ + private static final int VIEW_LENGTH = 80; + + /** */ + private static final String EXTERNALIZE_TEST_FILE_NAME = "externalizeTest"; + + /** */ + private VectorView testVector; + + /** */ + private DenseVector parentVector; + + /** */ + private double[] parentData; + + /** */ + @Before + public void setup() { + parentVector = new DenseVector(MathTestConstants.STORAGE_SIZE); + + IntStream.range(0, MathTestConstants.STORAGE_SIZE).forEach(idx -> parentVector.set(idx, Math.random())); + + parentData = parentVector.getStorage().data().clone(); + + testVector = new VectorView(parentVector, OFFSET, VIEW_LENGTH); + } + + /** */ + @AfterClass + public static void cleanup() throws IOException { + Files.deleteIfExists(Paths.get(EXTERNALIZE_TEST_FILE_NAME)); + } + + /** */ + @Test + public void testCopy() throws Exception { + Vector cp = testVector.copy(); + + assertTrue(MathTestConstants.VAL_NOT_EQUALS, cp.equals(testVector)); + } + + /** */ + @Test(expected = UnsupportedOperationException.class) + public void testLike() throws Exception { + for (int card : new int[] {1, 2, 4, 8, 16, 32, 64, 128}) + consumeSampleVectors((v, desc) -> { + Vector vLike = new VectorView(v, 0, 1).like(card); + + Class expType = v.getClass(); + + assertNotNull("Expect non-null like vector for " + expType.getSimpleName() + " in " + desc, vLike); + + assertEquals("Expect size equal to cardinality at " + desc, card, vLike.size()); + + Class actualType = vLike.getClass(); + + assertTrue("Expected matrix type " + expType.getSimpleName() + + " should be assignable from actual type " + actualType.getSimpleName() + " in " + desc, + expType.isAssignableFrom(actualType)); + + }); + } + + /** See also {@link VectorToMatrixTest#testLikeMatrix()}. */ + @Test + public void testLikeMatrix() { + consumeSampleVectors((v, desc) -> { + boolean expECaught = false; + + try { + assertNull("Null view instead of exception in " + desc, new VectorView(v, 0, 1).likeMatrix(1, 1)); + } + catch (UnsupportedOperationException uoe) { + expECaught = true; + } + + assertTrue("Expected exception was not caught in " + desc, expECaught); + }); + } + + /** */ + @Test + public void testWriteReadExternal() throws Exception { + assertNotNull("Unexpected null parent data", parentData); + + File f = new File(EXTERNALIZE_TEST_FILE_NAME); + + try { + ObjectOutputStream objOutputStream = new ObjectOutputStream(new FileOutputStream(f)); + + objOutputStream.writeObject(testVector); + + objOutputStream.close(); + + ObjectInputStream objInputStream = new ObjectInputStream(new FileInputStream(f)); + + VectorView readVector = (VectorView)objInputStream.readObject(); + + objInputStream.close(); + + assertTrue(MathTestConstants.VAL_NOT_EQUALS, testVector.equals(readVector)); + } + catch (ClassNotFoundException | IOException e) { + fail(e.getMessage()); + } + } + + /** */ + private void consumeSampleVectors(BiConsumer consumer) { + new VectorImplementationsFixtures().consumeSampleVectors(null, consumer); + } + +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/storage/SparseVectorStorageTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/storage/SparseVectorStorageTest.java new file mode 100644 index 0000000000000..f16be195a9114 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/storage/SparseVectorStorageTest.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector.storage; + +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorStorage; +import org.apache.ignite.ml.math.primitives.vector.impl.SparseVector; + +/** + * Tests for a few properties of Sparse Storage. + */ +public class SparseVectorStorageTest extends AbstractStorageTest { + /** {@inheritDoc} */ + @Override protected boolean isNumericVector(VectorStorage storage) { + return true; + } + + /** {@inheritDoc} */ + @Override protected boolean isRaw(VectorStorage storage) { + return true; + } + + /** {@inheritDoc} */ + @Override protected VectorStorage createStorage(int size) { + return new SparseVectorStorage(size); + } + + /** {@inheritDoc} */ + @Override protected Vector createVector(int size) { + return new SparseVector(size); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/DistributionMixtureTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/DistributionMixtureTest.java new file mode 100644 index 0000000000000..694914f4e83d2 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/DistributionMixtureTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.stat; + +import java.util.Arrays; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * + */ +public class DistributionMixtureTest { + /** */ + private DistributionMixture mixture; + + /** */ + @Before + public void setUp() { + mixture = new DistributionMixture( + VectorUtils.of(0.3, 0.3, 0.4), + Arrays.asList(new Constant(0.5), new Constant(1.0), new Constant(0.)) + ) { + }; + + assertEquals(1, mixture.dimension()); + assertEquals(3, mixture.countOfComponents()); + } + + /** */ + @Test + public void testLikelihood() { + assertArrayEquals( + new double[] {0.15, 0.3, 0.}, + mixture.likelihood(VectorUtils.of(1.)).asArray(), 1e-4 + ); + } + + /** */ + @Test + public void testProb() { + assertEquals(0.45, mixture.prob(VectorUtils.of(1.)), 1e-4); + } + + /** */ + private static class Constant implements Distribution { + /** Value. */ + private final double val; + + /** */ + public Constant(double value) { + this.val = value; + } + + /** {@inheritDoc} */ + @Override public double prob(Vector x) { + return val; + } + + /** {@inheritDoc} */ + @Override public int dimension() { + return 1; + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/MultivariateGaussianDistributionTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/MultivariateGaussianDistributionTest.java new file mode 100644 index 0000000000000..415bc18b34b28 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/MultivariateGaussianDistributionTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.stat; + +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link MultivariateGaussianDistribution}. + */ +public class MultivariateGaussianDistributionTest { + /** */ + @Test + public void testApply() { + MultivariateGaussianDistribution distribution = new MultivariateGaussianDistribution( + VectorUtils.of(1, 2), + new DenseMatrix(new double[][] {new double[] {1, -0.5}, new double[] {-0.5, 1}}) + ); + + Assert.assertEquals(0.183, distribution.prob(VectorUtils.of(1, 2)), 0.01); + Assert.assertEquals(0.094, distribution.prob(VectorUtils.of(0, 2)), 0.01); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/StatsTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/StatsTestSuite.java new file mode 100644 index 0000000000000..5b4c80ed2dadd --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/math/stat/StatsTestSuite.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.stat; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for stat package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + DistributionMixtureTest.class, + MultivariateGaussianDistributionTest.class +}) +public class StatsTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/multiclass/MultiClassTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/multiclass/MultiClassTestSuite.java new file mode 100644 index 0000000000000..551597fd7f733 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/multiclass/MultiClassTestSuite.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.multiclass; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for multilayer perceptrons. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + OneVsRestTrainerTest.class +}) +public class MultiClassTestSuite { + // No-op. +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/multiclass/OneVsRestTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/multiclass/OneVsRestTrainerTest.java new file mode 100644 index 0000000000000..d9c97b71b8c1d --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/multiclass/OneVsRestTrainerTest.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.multiclass; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.nn.UpdatesStrategy; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link OneVsRestTrainer}. + */ +public class OneVsRestTrainerTest extends TrainerTest { + /** + * Test trainer on 2 linearly separable sets. + */ + @Test + public void testTrainWithTheLinearlySeparableCase() { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, twoLinearlySeparableClasses[i]); + + LogisticRegressionSGDTrainer binaryTrainer = new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(1000) + .withLocIterations(10) + .withBatchSize(100) + .withSeed(123L); + + OneVsRestTrainer trainer = new OneVsRestTrainer<>(binaryTrainer); + + MultiClassModel mdl = trainer.fit( + cacheMock, parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + + Assert.assertTrue(!mdl.toString().isEmpty()); + Assert.assertTrue(!mdl.toString(true).isEmpty()); + Assert.assertTrue(!mdl.toString(false).isEmpty()); + + TestUtils.assertEquals(1, mdl.predict(VectorUtils.of(-100, 0)), PRECISION); + TestUtils.assertEquals(0, mdl.predict(VectorUtils.of(100, 0)), PRECISION); + } + + /** */ + @Test + public void testUpdate() { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, twoLinearlySeparableClasses[i]); + + LogisticRegressionSGDTrainer binaryTrainer = new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(1000) + .withLocIterations(10) + .withBatchSize(100) + .withSeed(123L); + + OneVsRestTrainer trainer = new OneVsRestTrainer<>(binaryTrainer); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + MultiClassModel originalMdl = trainer.fit( + cacheMock, parts, + vectorizer + ); + + MultiClassModel updatedOnSameDS = trainer.update( + originalMdl, + cacheMock, + parts, + vectorizer + ); + + MultiClassModel updatedOnEmptyDS = trainer.update( + originalMdl, + new HashMap<>(), + parts, + vectorizer + ); + + List vectors = Arrays.asList( + VectorUtils.of(-100, 0), + VectorUtils.of(100, 0) + ); + + for (Vector vec : vectors) { + TestUtils.assertEquals(originalMdl.predict(vec), updatedOnSameDS.predict(vec), PRECISION); + TestUtils.assertEquals(originalMdl.predict(vec), updatedOnEmptyDS.predict(vec), PRECISION); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/NaiveBayesTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/NaiveBayesTestSuite.java new file mode 100644 index 0000000000000..380f7276a1457 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/NaiveBayesTestSuite.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes; + +import org.apache.ignite.ml.naivebayes.compound.CompoundNaiveBayesModelTest; +import org.apache.ignite.ml.naivebayes.compound.CompoundNaiveBayesTest; +import org.apache.ignite.ml.naivebayes.compound.CompoundNaiveBayesTrainerTest; +import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesModelTest; +import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesTest; +import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesTrainerTest; +import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesModelTest; +import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesTest; +import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesTrainerTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + GaussianNaiveBayesModelTest.class, + GaussianNaiveBayesTest.class, + GaussianNaiveBayesTrainerTest.class, + DiscreteNaiveBayesModelTest.class, + DiscreteNaiveBayesTest.class, + DiscreteNaiveBayesTrainerTest.class, + CompoundNaiveBayesModelTest.class, + CompoundNaiveBayesTest.class, + CompoundNaiveBayesTrainerTest.class +}) +public class NaiveBayesTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesModelTest.java new file mode 100644 index 0000000000000..4792262e134b0 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesModelTest.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.compound; + +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesModel; +import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesModel; +import org.junit.Test; + +import static java.util.Arrays.asList; +import static org.apache.ignite.ml.naivebayes.compound.Data.LABEL_2; +import static org.apache.ignite.ml.naivebayes.compound.Data.binarizedDataThresholds; +import static org.apache.ignite.ml.naivebayes.compound.Data.classProbabilities; +import static org.apache.ignite.ml.naivebayes.compound.Data.labels; +import static org.apache.ignite.ml.naivebayes.compound.Data.means; +import static org.apache.ignite.ml.naivebayes.compound.Data.probabilities; +import static org.apache.ignite.ml.naivebayes.compound.Data.variances; +import static org.junit.Assert.assertEquals; + +/** Tests for {@link CompoundNaiveBayesModel} */ +public class CompoundNaiveBayesModelTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-2; + + /** Test. */ + @Test + public void testPredictOnlyGauss() { + GaussianNaiveBayesModel gaussianModel = + new GaussianNaiveBayesModel(means, variances, classProbabilities, labels, null); + + Vector observation = VectorUtils.of(6, 130, 8); + + CompoundNaiveBayesModel model = new CompoundNaiveBayesModel() + .withPriorProbabilities(classProbabilities) + .withLabels(labels) + .withGaussianModel(gaussianModel); + + assertEquals(LABEL_2, model.predict(observation), PRECISION); + } + + /** Test. */ + @Test + public void testPredictOnlyDiscrete() { + DiscreteNaiveBayesModel discreteModel = + new DiscreteNaiveBayesModel(probabilities, classProbabilities, labels, binarizedDataThresholds, null); + + Vector observation = VectorUtils.of(1, 0, 1, 1, 0); + + CompoundNaiveBayesModel model = new CompoundNaiveBayesModel() + .withPriorProbabilities(classProbabilities) + .withLabels(labels) + .withDiscreteModel(discreteModel); + + assertEquals(LABEL_2, model.predict(observation), PRECISION); + } + + /** Test. */ + @Test + public void testPredictGaussAndDiscrete() { + DiscreteNaiveBayesModel discreteMdl = + new DiscreteNaiveBayesModel(probabilities, classProbabilities, labels, binarizedDataThresholds, null); + + GaussianNaiveBayesModel gaussianMdl = + new GaussianNaiveBayesModel(means, variances, classProbabilities, labels, null); + + CompoundNaiveBayesModel mdl = new CompoundNaiveBayesModel() + .withPriorProbabilities(classProbabilities) + .withLabels(labels) + .withGaussianModel(gaussianMdl) + .withGaussianFeatureIdsToSkip(asList(3, 4, 5, 6, 7)) + .withDiscreteModel(discreteMdl) + .withDiscreteFeatureIdsToSkip( asList(0, 1, 2)); + + Vector observation = VectorUtils.of(6, 130, 8, 1, 0, 1, 1, 0); + + assertEquals(LABEL_2, mdl.predict(observation), PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesTest.java new file mode 100644 index 0000000000000..596a5a5347ba5 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesTest.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.compound; + +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesTrainer; +import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesTrainer; +import org.junit.Test; + +import static java.util.Arrays.asList; +import static org.apache.ignite.ml.naivebayes.compound.Data.LABEL_1; +import static org.apache.ignite.ml.naivebayes.compound.Data.LABEL_2; +import static org.apache.ignite.ml.naivebayes.compound.Data.binarizedDataThresholds; +import static org.apache.ignite.ml.naivebayes.compound.Data.classProbabilities; +import static org.apache.ignite.ml.naivebayes.compound.Data.data; +import static org.junit.Assert.assertEquals; + +/** Integration tests for Compound naive Bayes algorithm with different datasets. */ +public class CompoundNaiveBayesTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-2; + + /** Test. */ + @Test + public void testLearnsAndPredictCorrectly() { + CompoundNaiveBayesTrainer trainer = new CompoundNaiveBayesTrainer() + .withPriorProbabilities(classProbabilities) + .withGaussianNaiveBayesTrainer(new GaussianNaiveBayesTrainer()) + .withGaussianFeatureIdsToSkip(asList(3, 4, 5, 6, 7)) + .withDiscreteNaiveBayesTrainer(new DiscreteNaiveBayesTrainer() + .setBucketThresholds(binarizedDataThresholds)) + .withDiscreteFeatureIdsToSkip(asList(0, 1, 2)); + + CompoundNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(data, 2), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Vector observation1 = VectorUtils.of(5.92, 165, 10, 1, 1, 0, 0, 0); + assertEquals(LABEL_1, mdl.predict(observation1), PRECISION); + + Vector observation2 = VectorUtils.of(6, 130, 8, 1, 0, 1, 1, 0); + assertEquals(LABEL_2, mdl.predict(observation2), PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesTrainerTest.java new file mode 100644 index 0000000000000..867cf8da64bf4 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/CompoundNaiveBayesTrainerTest.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.compound; + +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesModel; +import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesTrainer; +import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesModel; +import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesTrainer; +import org.junit.Before; +import org.junit.Test; + +import static java.util.Arrays.asList; +import static org.apache.ignite.ml.naivebayes.compound.Data.binarizedDataThresholds; +import static org.apache.ignite.ml.naivebayes.compound.Data.classProbabilities; +import static org.apache.ignite.ml.naivebayes.compound.Data.data; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** Test for {@link CompoundNaiveBayesTrainer} */ +public class CompoundNaiveBayesTrainerTest extends TrainerTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-2; + + /** Trainer under test. */ + private CompoundNaiveBayesTrainer trainer; + + /** Initialization {@code CompoundNaiveBayesTrainer}. */ + @Before + public void createTrainer() { + trainer = new CompoundNaiveBayesTrainer() + .withPriorProbabilities(classProbabilities) + .withGaussianNaiveBayesTrainer(new GaussianNaiveBayesTrainer()) + .withGaussianFeatureIdsToSkip(asList(3, 4, 5, 6, 7)) + .withDiscreteNaiveBayesTrainer(new DiscreteNaiveBayesTrainer() + .setBucketThresholds(binarizedDataThresholds)) + .withDiscreteFeatureIdsToSkip(asList(0, 1, 2)); + } + + /** Test. */ + @Test + public void test() { + CompoundNaiveBayesModel model = trainer.fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + assertDiscreteModel(model.getDiscreteModel()); + assertGaussianModel(model.getGaussianModel()); + } + + /** Discrete model assertions. */ + private void assertDiscreteModel(DiscreteNaiveBayesModel model) { + double[][][] expectedProbabilities = new double[][][] { + { + {.25, .75}, + {.5, .5}, + {.5, .5}, + {.5, .5}, + {.5, .5} + }, + { + {.0, 1}, + {.25, .75}, + {.75, .25}, + {.25, .75}, + {.5, .5} + } + }; + + for (int i = 0; i < expectedProbabilities.length; i++) { + for (int j = 0; j < expectedProbabilities[i].length; j++) + assertArrayEquals(expectedProbabilities[i][j], model.getProbabilities()[i][j], PRECISION); + } + assertArrayEquals(new double[] {.5, .5}, model.getClsProbabilities(), PRECISION); + } + + /** Gaussian model assertions. */ + private void assertGaussianModel(GaussianNaiveBayesModel model) { + double[] priorProbabilities = new double[] {.5, .5}; + + assertEquals(priorProbabilities[0], model.getClassProbabilities()[0], PRECISION); + assertEquals(priorProbabilities[1], model.getClassProbabilities()[1], PRECISION); + assertArrayEquals(new double[] {5.855, 176.25, 11.25}, model.getMeans()[0], PRECISION); + assertArrayEquals(new double[] {5.4175, 132.5, 7.5}, model.getMeans()[1], PRECISION); + double[] expectedVars = {0.026274999999999, 92.1875, 0.6875}; + assertArrayEquals(expectedVars, model.getVariances()[0], PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/Data.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/Data.java new file mode 100644 index 0000000000000..ab41b543577fc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/compound/Data.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.compound; + +import java.util.HashMap; +import java.util.Map; + +/** Data class which contains test data with precalculated statistics. */ +final class Data { + /** Private constructor. */ + private Data() { + } + + /** The first label. */ + static final double LABEL_1 = 1.; + + /** The second label. */ + static final double LABEL_2 = 2.; + + /** Labels. */ + static final double[] labels = {LABEL_1, LABEL_2}; + + /** */ + static final Map data = new HashMap<>(); + + /** Means for gaussian data part. */ + static double[][] means; + + /** Variances for gaussian data part. */ + static double[][] variances; + + /** */ + static double[] classProbabilities; + + /** Thresholds to binarize discrete data. */ + static double[][] binarizedDataThresholds; + + /** Discrete probabilities. */ + static double[][][] probabilities; + + static { + data.put(0, new double[] {6, 180, 12, 0, 0, 1, 1, 1, LABEL_1}); + data.put(1, new double[] {5.92, 190, 11, 1, 0, 1, 1, 0, LABEL_1}); + data.put(2, new double[] {5.58, 170, 12, 1, 1, 0, 0, 1, LABEL_1}); + data.put(3, new double[] {5.92, 165, 10, 1, 1, 0, 0, 0, LABEL_1}); + + data.put(4, new double[] {5, 100, 6, 1, 0, 0, 1, 1, LABEL_2}); + data.put(5, new double[] {5.5, 150, 8, 1, 1, 0, 0, 1, LABEL_2}); + data.put(6, new double[] {5.42, 130, 7, 1, 1, 1, 1, 0, LABEL_2}); + data.put(7, new double[] {5.75, 150, 9, 1, 1, 0, 1, 0, LABEL_2}); + + classProbabilities = new double[] {.5, .5}; + + means = new double[][] { + {5.855, 176.25, 11.25}, + {5.4175, 132.5, 7.5}, + }; + + variances = new double[][] { + {3.5033E-2, 1.2292E2, 9.1667E-1}, + {9.7225E-2, 5.5833E2, 1.6667}, + }; + + binarizedDataThresholds = new double[][] {{.5}, {.5}, {.5}, {.5}, {.5}}; + + probabilities = new double[][][] { + {{.25, .75}, {.25, .75}, {.5, .5}, {.5, .5}, {.5, .5}}, + {{0, 1}, {.25, .75}, {.75, .25}, {.25, .75}, {.5, .5}} + }; + } + +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesModelTest.java new file mode 100644 index 0000000000000..7d6d4940e022b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesModelTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.discrete; + +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Assert; +import org.junit.Test; + +/** Tests for {@code DiscreteNaiveBayesModel} */ +public class DiscreteNaiveBayesModelTest { + /** Test. */ + @Test + public void testPredictWithTwoClasses() { + double first = 1; + double second = 2; + double[][][] probabilities = new double[][][] { + {{.5, .5}, {.2, .3, .5}, {2. / 3., 1. / 3.}, {.4, .1, .5}, {.5, .5}}, + {{0, 1}, {1. / 7, 2. / 7, 4. / 7}, {4. / 7, 3. / 7}, {2. / 7, 3. / 7, 2. / 7}, {4. / 7, 3. / 7}} + }; + + double[] classProbabilities = new double[] {6. / 13, 7. / 13}; + double[][] thresholds = new double[][] {{.5}, {.2, .7}, {.5}, {.5, 1.5}, {.5}}; + DiscreteNaiveBayesModel mdl = new DiscreteNaiveBayesModel(probabilities, classProbabilities, + new double[] {first, second}, thresholds, new DiscreteNaiveBayesSumsHolder()); + Vector observation = VectorUtils.of(2, 0, 1, 2, 0); + + Assert.assertEquals(second, mdl.predict(observation), 0.0001); + } + +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesTest.java new file mode 100644 index 0000000000000..8ca299851049b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesTest.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.discrete; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Assert; +import org.junit.Test; + +/** + * Integration tests for Bernoulli naive Bayes algorithm with different datasets. + */ +public class DiscreteNaiveBayesTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-2; + + /** Example from book Barber D. Bayesian reasoning and machine learning. Chapter 10. */ + @Test + public void testLearnsAndPredictCorrently() { + double english = 1.; + double scottish = 2.; + + Map data = new HashMap<>(); + data.put(0, new double[] {0, 0, 1, 1, 1, english}); + data.put(1, new double[] {1, 0, 1, 1, 0, english}); + data.put(2, new double[] {1, 1, 0, 0, 1, english}); + data.put(3, new double[] {1, 1, 0, 0, 0, english}); + data.put(4, new double[] {0, 1, 0, 0, 1, english}); + data.put(5, new double[] {0, 0, 0, 1, 0, english}); + data.put(6, new double[] {1, 0, 0, 1, 1, scottish}); + data.put(7, new double[] {1, 1, 0, 0, 1, scottish}); + data.put(8, new double[] {1, 1, 1, 1, 0, scottish}); + data.put(9, new double[] {1, 1, 0, 1, 0, scottish}); + data.put(10, new double[] {1, 1, 0, 1, 1, scottish}); + data.put(11, new double[] {1, 0, 1, 1, 0, scottish}); + data.put(12, new double[] {1, 0, 1, 0, 0, scottish}); + double[][] thresholds = new double[][] {{.5}, {.5}, {.5}, {.5}, {.5}}; + + DiscreteNaiveBayesTrainer trainer = new DiscreteNaiveBayesTrainer().setBucketThresholds(thresholds); + + DiscreteNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(data, 2), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Vector observation = VectorUtils.of(1, 0, 1, 1, 0); + + Assert.assertEquals(scottish, mdl.predict(observation), PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesTrainerTest.java new file mode 100644 index 0000000000000..89e18b78e4022 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/discrete/DiscreteNaiveBayesTrainerTest.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ignite.ml.naivebayes.discrete; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** Test for {@link DiscreteNaiveBayesTrainer} */ +public class DiscreteNaiveBayesTrainerTest extends TrainerTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-2; + + /** */ + private static final double LABEL_1 = 1.; + + /** */ + private static final double LABEL_2 = 2.; + + /** Binary data. */ + private static final Map binarizedData = new HashMap<>(); + + /** Data. */ + private static final Map data = new HashMap<>(); + + /** */ + private static final double[][] binarizedDatathresholds = new double[][] {{.5}, {.5}, {.5}, {.5}, {.5}}; + + /** */ + private static final double[][] thresholds = new double[][] {{4, 8}, {.5}, {.3, .4, .5}, {250, 500, 750}}; + + static { + binarizedData.put(0, new double[] {0, 0, 1, 1, 1, LABEL_1}); + binarizedData.put(1, new double[] {1, 0, 1, 1, 0, LABEL_1}); + binarizedData.put(2, new double[] {1, 1, 0, 0, 1, LABEL_1}); + binarizedData.put(3, new double[] {1, 1, 0, 0, 0, LABEL_1}); + binarizedData.put(4, new double[] {0, 1, 0, 0, 1, LABEL_1}); + binarizedData.put(5, new double[] {0, 0, 0, 1, 0, LABEL_1}); + + binarizedData.put(6, new double[] {1, 0, 0, 1, 1, LABEL_2}); + binarizedData.put(7, new double[] {1, 1, 0, 0, 1, LABEL_2}); + binarizedData.put(8, new double[] {1, 1, 1, 1, 0, LABEL_2}); + binarizedData.put(9, new double[] {1, 1, 0, 1, 0, LABEL_2}); + binarizedData.put(10, new double[] {1, 1, 0, 1, 1, LABEL_2}); + binarizedData.put(11, new double[] {1, 0, 1, 1, 0, LABEL_2}); + binarizedData.put(12, new double[] {1, 0, 1, 0, 0, LABEL_2}); + + data.put(0, new double[] {2, 0, .34, 123, LABEL_1}); + data.put(1, new double[] {8, 0, .37, 561, LABEL_1}); + data.put(2, new double[] {5, 1, .01, 678, LABEL_1}); + data.put(3, new double[] {2, 1, .32, 453, LABEL_1}); + data.put(4, new double[] {7, 1, .67, 980, LABEL_1}); + data.put(5, new double[] {2, 1, .69, 912, LABEL_1}); + data.put(6, new double[] {8, 0, .43, 453, LABEL_1}); + data.put(7, new double[] {2, 0, .45, 752, LABEL_1}); + data.put(8, new double[] {7, 1, .01, 132, LABEL_2}); + data.put(9, new double[] {2, 1, .68, 169, LABEL_2}); + data.put(10, new double[] {8, 0, .43, 453, LABEL_2}); + data.put(11, new double[] {2, 1, .45, 748, LABEL_2}); + } + + /** Trainer under test. */ + private DiscreteNaiveBayesTrainer trainer; + + /** Initialization {@code DiscreteNaiveBayesTrainer}. */ + @Before + public void createTrainer() { + trainer = new DiscreteNaiveBayesTrainer().setBucketThresholds(binarizedDatathresholds); + } + + /** Test. */ + @Test + public void testReturnsCorrectLabelProbalities() { + + DiscreteNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(binarizedData, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + double[] expectedProbabilities = {6. / binarizedData.size(), 7. / binarizedData.size()}; + Assert.assertArrayEquals(expectedProbabilities, mdl.getClsProbabilities(), PRECISION); + } + + /** Test. */ + @Test + public void testReturnsEquivalentProbalitiesWhenSetEquiprobableClasses_() { + DiscreteNaiveBayesTrainer trainer = new DiscreteNaiveBayesTrainer() + .setBucketThresholds(binarizedDatathresholds) + .withEquiprobableClasses(); + + DiscreteNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(binarizedData, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertArrayEquals(new double[] {.5, .5}, mdl.getClsProbabilities(), PRECISION); + } + + /** Test. */ + @Test + public void testReturnsPresetProbalitiesWhenSetPriorProbabilities() { + double[] priorProbabilities = new double[] {.35, .65}; + DiscreteNaiveBayesTrainer trainer = new DiscreteNaiveBayesTrainer() + .setBucketThresholds(binarizedDatathresholds) + .setPriorProbabilities(priorProbabilities); + + DiscreteNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(binarizedData, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertArrayEquals(priorProbabilities, mdl.getClsProbabilities(), PRECISION); + } + + /** Test. */ + @Test + public void testReturnsCorrectPriorProbabilities() { + double[][][] expectedPriorProbabilites = new double[][][] { + {{.5, .5}, {.5, .5}, {2. / 3., 1. / 3.}, {.5, .5}, {.5, .5}}, + {{0, 1}, {3. / 7, 4. / 7}, {4. / 7, 3. / 7}, {2. / 7, 5. / 7}, {4. / 7, 3. / 7}} + }; + + DiscreteNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(binarizedData, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + for (int i = 0; i < expectedPriorProbabilites.length; i++) { + for (int j = 0; j < expectedPriorProbabilites[i].length; j++) + Assert.assertArrayEquals(expectedPriorProbabilites[i][j], mdl.getProbabilities()[i][j], PRECISION); + } + } + + /** Test. */ + @Test + public void testReturnsCorrectPriorProbabilitiesWithDefferentThresholds() { + double[][][] expectedPriorProbabilites = new double[][][] { + { + {4. / 8, 2. / 8, 2. / 8}, + {.5, .5}, + {1. / 8, 3. / 8, 2. / 8, 2. / 8}, + {1. / 8, 2. / 8, 2. / 8, 3. / 8}}, + { + {2. / 4, 1. / 4, 1. / 4}, + {1. / 4, 3. / 4}, + {1. / 4, 0, 2. / 4, 1. / 4}, + {2. / 4, 1. / 4, 1. / 4, 0}} + }; + + DiscreteNaiveBayesModel mdl = trainer + .setBucketThresholds(thresholds) + .fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + for (int i = 0; i < expectedPriorProbabilites.length; i++) { + for (int j = 0; j < expectedPriorProbabilites[i].length; j++) + Assert.assertArrayEquals(expectedPriorProbabilites[i][j], mdl.getProbabilities()[i][j], PRECISION); + } + } + +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesModelTest.java new file mode 100644 index 0000000000000..2117eb8f1ac5f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesModelTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.gaussian; + +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link GaussianNaiveBayesModel}. + */ +public class GaussianNaiveBayesModelTest { + /** Test. */ + @Test + public void testPredictWithTwoClasses() { + double first = 1; + double second = 2; + double[][] means = new double[][] { + {5.855, 176.25, 11.25}, + {5.4175, 132.5, 7.5}, + }; + double[][] variances = new double[][] { + {3.5033E-2, 1.2292E2, 9.1667E-1}, + {9.7225E-2, 5.5833E2, 1.6667}, + }; + double[] probabilities = new double[] {.5, .5}; + GaussianNaiveBayesModel mdl = new GaussianNaiveBayesModel(means, variances, probabilities, new double[] {first, second}, null); + Vector observation = VectorUtils.of(6, 130, 8); + + Assert.assertEquals(second, mdl.predict(observation), 0.0001); + } + +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesTest.java new file mode 100644 index 0000000000000..d46874035078e --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesTest.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.gaussian; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Assert; +import org.junit.Test; + +/** + * Complex tests for naive Bayes algorithm with different datasets. + */ +public class GaussianNaiveBayesTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-2; + + /** + * An example data set from wikipedia article about Naive Bayes https://en.wikipedia.org/wiki/Naive_Bayes_classifier#Sex_classification + */ + @Test + public void wikipediaSexClassificationDataset() { + Map data = new HashMap<>(); + double male = 0.; + double female = 1.; + data.put(0, new double[] {male, 6, 180, 12}); + data.put(2, new double[] {male, 5.92, 190, 11}); + data.put(3, new double[] {male, 5.58, 170, 12}); + data.put(4, new double[] {male, 5.92, 165, 10}); + data.put(5, new double[] {female, 5, 100, 6}); + data.put(6, new double[] {female, 5.5, 150, 8}); + data.put(7, new double[] {female, 5.42, 130, 7}); + data.put(8, new double[] {female, 5.75, 150, 9}); + GaussianNaiveBayesTrainer trainer = new GaussianNaiveBayesTrainer(); + GaussianNaiveBayesModel model = trainer.fit( + new LocalDatasetBuilder<>(data, 2), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + Vector observation = VectorUtils.of(6, 130, 8); + + Assert.assertEquals(female, model.predict(observation), PRECISION); + } + + /** Dataset from Gaussian NB example in the scikit-learn documentation */ + @Test + public void scikitLearnExample() { + Map data = new HashMap<>(); + double one = 1.; + double two = 2.; + data.put(0, new double[] {one, -1, 1}); + data.put(2, new double[] {one, -2, -1}); + data.put(3, new double[] {one, -3, -2}); + data.put(4, new double[] {two, 1, 1}); + data.put(5, new double[] {two, 2, 1}); + data.put(6, new double[] {two, 3, 2}); + GaussianNaiveBayesTrainer trainer = new GaussianNaiveBayesTrainer(); + GaussianNaiveBayesModel model = trainer.fit( + new LocalDatasetBuilder<>(data, 2), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + Vector observation = VectorUtils.of(-0.8, -1); + + Assert.assertEquals(one, model.predict(observation), PRECISION); + } + +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesTrainerTest.java new file mode 100644 index 0000000000000..2d13a72835f0c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/naivebayes/gaussian/GaussianNaiveBayesTrainerTest.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.naivebayes.gaussian; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + * Tests for {@link GaussianNaiveBayesTrainer}. + */ +public class GaussianNaiveBayesTrainerTest extends TrainerTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-2; + + /** Label. */ + private static final double LABEL_1 = 1.; + + /** Label. */ + private static final double LABEL_2 = 2.; + + /** Data. */ + private static final Map data = new HashMap<>(); + + /** {@code LABEL_1} data. */ + private static final Map singleLabeldata1 = new HashMap<>(); + + /** {@code LABEL_2} data. */ + private static final Map singleLabeldata2 = new HashMap<>(); + + static { + data.put(0, new double[] {1.0, -1.0, LABEL_1}); + data.put(1, new double[] {-1.0, 2.0, LABEL_1}); + data.put(2, new double[] {6.0, 1.0, LABEL_1}); + data.put(3, new double[] {-3.0, 2.0, LABEL_2}); + data.put(4, new double[] {-5.0, -2.0, LABEL_2}); + + singleLabeldata1.put(0, new double[] {1.0, -1.0, LABEL_1}); + singleLabeldata1.put(1, new double[] {-1.0, 2.0, LABEL_1}); + singleLabeldata1.put(2, new double[] {6.0, 1.0, LABEL_1}); + + singleLabeldata2.put(0, new double[] {-3.0, 2.0, LABEL_2}); + singleLabeldata2.put(1, new double[] {-5.0, -2.0, LABEL_2}); + } + + /** Trainer. */ + private GaussianNaiveBayesTrainer trainer; + + /** Initialization {@code GaussianNaiveBayesTrainer}.*/ + @Before + public void createTrainer() { + trainer = new GaussianNaiveBayesTrainer(); + } + + /** Test. */ + @Test + public void testWithLinearlySeparableData() { + Map cacheMock = new HashMap<>(); + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, twoLinearlySeparableClasses[i]); + + GaussianNaiveBayesModel mdl = trainer.fit( + cacheMock, + parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + + TestUtils.assertEquals(0, mdl.predict(VectorUtils.of(100, 10)), PRECISION); + TestUtils.assertEquals(1, mdl.predict(VectorUtils.of(10, 100)), PRECISION); + } + + /** Test. */ + @Test + public void testReturnsCorrectLabelProbabilities() { + + GaussianNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertEquals(3. / data.size(), mdl.getClassProbabilities()[0], PRECISION); + Assert.assertEquals(2. / data.size(), mdl.getClassProbabilities()[1], PRECISION); + } + + /** Test. */ + @Test + public void testReturnsEquivalentProbabilitiesWhenSetEquitableClasses_() { + GaussianNaiveBayesTrainer trainer = new GaussianNaiveBayesTrainer() + .withEquiprobableClasses(); + + GaussianNaiveBayesModel model = trainer.fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertEquals(.5, model.getClassProbabilities()[0], PRECISION); + Assert.assertEquals(.5, model.getClassProbabilities()[1], PRECISION); + } + + /** Test. */ + @Test + public void testReturnsPresetProbabilitiesWhenSetPriorProbabilities() { + double[] priorProbabilities = new double[] {.35, .65}; + GaussianNaiveBayesTrainer trainer = new GaussianNaiveBayesTrainer() + .setPriorProbabilities(priorProbabilities); + + GaussianNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(data, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertEquals(priorProbabilities[0], mdl.getClassProbabilities()[0], PRECISION); + Assert.assertEquals(priorProbabilities[1], mdl.getClassProbabilities()[1], PRECISION); + } + + /** Test. */ + @Test + public void testReturnsCorrectMeans() { + + GaussianNaiveBayesModel model = trainer.fit( + new LocalDatasetBuilder<>(singleLabeldata1, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + Assert.assertArrayEquals(new double[] {2.0, 2. / 3.}, model.getMeans()[0], PRECISION); + } + + /** Test. */ + @Test + public void testReturnsCorrectVariances() { + + GaussianNaiveBayesModel model = trainer.fit( + new LocalDatasetBuilder<>(singleLabeldata1, parts), + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + double[] expectedVars = {8.666666666666666, 1.5555555555555556}; + Assert.assertArrayEquals(expectedVars, model.getVariances()[0], PRECISION); + } + + /** Test. */ + @Test + public void testUpdatingModel() { + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST); + + GaussianNaiveBayesModel mdl = trainer.fit( + new LocalDatasetBuilder<>(singleLabeldata1, parts), + vectorizer + ); + + GaussianNaiveBayesModel updatedMdl = trainer.updateModel(mdl, + new LocalDatasetBuilder<>(singleLabeldata2, parts), + vectorizer + ); + + Assert.assertEquals(3. / data.size(), updatedMdl.getClassProbabilities()[0], PRECISION); + Assert.assertEquals(2. / data.size(), updatedMdl.getClassProbabilities()[1], PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/LossFunctionsTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/LossFunctionsTest.java new file mode 100644 index 0000000000000..bef05ec65a290 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/LossFunctionsTest.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn; + +import org.apache.ignite.ml.math.functions.IgniteDifferentiableVectorToDoubleFunction; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.optimization.LossFunctions; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertNotNull; + +/** + * Tests for {@link LossFunctions}. + */ +public class LossFunctionsTest { + /** */ + @Test + public void testMSE() { + IgniteDifferentiableVectorToDoubleFunction f = LossFunctions.MSE.apply(new DenseVector(new double[] {2.0, 1.0})); + + assertNotNull(f); + + test(new double[] {1.0, 3.0}, f); + } + + /** */ + @Test + public void testLOG() { + IgniteDifferentiableVectorToDoubleFunction f = LossFunctions.LOG.apply(new DenseVector(new double[] {2.0, 1.0})); + + assertNotNull(f); + + test(new double[] {1.0, 3.0}, f); + } + + /** */ + @Test + public void testL2() { + IgniteDifferentiableVectorToDoubleFunction f = LossFunctions.L2.apply(new DenseVector(new double[] {2.0, 1.0})); + + assertNotNull(f); + + test(new double[] {1.0, 3.0}, f); + } + + /** */ + @Test + public void testL1() { + IgniteDifferentiableVectorToDoubleFunction f = LossFunctions.L1.apply(new DenseVector(new double[] {2.0, 1.0})); + + assertNotNull(f); + + test(new double[] {1.0, 3.0}, f); + } + + /** */ + @Test + public void testHINGE() { + IgniteDifferentiableVectorToDoubleFunction f = LossFunctions.HINGE.apply(new DenseVector(new double[] {2.0, 1.0})); + + assertNotNull(f); + + test(new double[] {1.0, 3.0}, f); + } + + /** */ + private void test(double[] expData, IgniteDifferentiableVectorToDoubleFunction f) { + verify(expData, f.differential(new DenseVector(new double[] {3.0, 4.0}))); + } + + /** */ + private void verify(double[] expData, Vector actual) { + assertArrayEquals(expData, new DenseVector(actual.size()).assign(actual).getStorage().data(), 0); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java new file mode 100644 index 0000000000000..729fc3080a90c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn; + +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.nn.initializers.MLPInitializer; + +/** + * Initialize weights and biases with specified constant. + */ +class MLPConstInitializer implements MLPInitializer { + /** + * Constant to be used as bias for all layers. + */ + private final double bias; + + /** + * Constant to be used as weight from any neuron to any neuron in next layer. + */ + private final double weight; + + /** + * Construct MLPConstInitializer. + * + * @param weight Constant to be used as weight from any neuron to any neuron in next layer. + * @param bias Constant to be used as bias for all layers. + */ + MLPConstInitializer(double weight, double bias) { + this.bias = bias; + this.weight = weight; + } + + /** + * Construct MLPConstInitializer with biases constant equal to 0.0. + * + * @param weight Constant to be used as weight from any neuron to any neuron in next layer. + */ + MLPConstInitializer(double weight) { + this(weight, 0.0); + } + + /** {@inheritDoc} */ + @Override public void initWeights(Matrix weights) { + weights.assign(weight); + } + + /** {@inheritDoc} */ + @Override public void initBiases(Vector biases) { + biases.assign(bias); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java new file mode 100644 index 0000000000000..1bb3b62bebbba --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java @@ -0,0 +1,265 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn; + +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.math.Tracer; +import org.apache.ignite.ml.math.functions.IgniteTriFunction; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.nn.architecture.MLPArchitecture; +import org.apache.ignite.ml.optimization.LossFunctions; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link MultilayerPerceptron}. + */ +public class MLPTest { + /** + * Tests that MLP with 2 layer, 1 neuron in each layer and weight equal to 1 is equivalent to sigmoid function. + */ + @Test + public void testSimpleMLPPrediction() { + MLPArchitecture conf = new MLPArchitecture(1).withAddedLayer(1, false, Activators.SIGMOID); + + MultilayerPerceptron mlp = new MultilayerPerceptron(conf, new MLPConstInitializer(1)); + + int input = 2; + + Matrix predict = mlp.predict(new DenseMatrix(new double[][] {{input}})); + + Assert.assertEquals(predict, new DenseMatrix(new double[][] {{Activators.SIGMOID.apply(input)}})); + } + + /** + * Test that MLP with parameters that should produce function close to 'XOR' is close to 'XOR' on 'XOR' domain. + */ + @Test + public void testXOR() { + MLPArchitecture conf = new MLPArchitecture(2). + withAddedLayer(2, true, Activators.SIGMOID). + withAddedLayer(1, true, Activators.SIGMOID); + + MultilayerPerceptron mlp1 = new MultilayerPerceptron(conf, new MLPConstInitializer(1, 2)); + + mlp1.setWeights(1, new DenseMatrix(new double[][] {{20.0, 20.0}, {-20.0, -20.0}})); + mlp1.setBiases(1, new DenseVector(new double[] {-10.0, 30.0})); + + MultilayerPerceptron mlp2 = mlp1.setWeights(2, new DenseMatrix(new double[][] {{20.0, 20.0}})); + MultilayerPerceptron mlp = mlp2.setBiases(2, new DenseVector(new double[] {-30.0})); + + Matrix input = new DenseMatrix(new double[][] {{0.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}, {1.0, 1.0}}); + + Matrix predict = mlp.predict(input); + Matrix truth = new DenseMatrix(new double[][] {{0.0}, {1.0}, {1.0}, {0.0}}); + + TestUtils.checkIsInEpsilonNeighbourhood(predict.getRow(0), truth.getRow(0), 1E-4); + } + + /** + * Test that two layer MLP is equivalent to it's subparts stacked on each other. + */ + @Test + public void testStackedMLP() { + int firstLayerNeuronsCnt = 3; + int secondLayerNeuronsCnt = 2; + MLPConstInitializer initer = new MLPConstInitializer(1, 2); + + MLPArchitecture conf = new MLPArchitecture(4). + withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID). + withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID); + + MultilayerPerceptron mlp = new MultilayerPerceptron(conf, initer); + + MLPArchitecture mlpLayer1Conf = new MLPArchitecture(4). + withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID); + MLPArchitecture mlpLayer2Conf = new MLPArchitecture(firstLayerNeuronsCnt). + withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID); + + MultilayerPerceptron mlp1 = new MultilayerPerceptron(mlpLayer1Conf, initer); + MultilayerPerceptron mlp2 = new MultilayerPerceptron(mlpLayer2Conf, initer); + + MultilayerPerceptron stackedMLP = mlp1.add(mlp2); + + Matrix predict = mlp.predict(new DenseMatrix(new double[][] {{1}, {2}, {3}, {4}}).transpose()); + Matrix stackedPredict = stackedMLP.predict(new DenseMatrix(new double[][] {{1}, {2}, {3}, {4}}).transpose()); + + Assert.assertEquals(predict, stackedPredict); + } + + /** + * Test three layer MLP. + */ + @Test + public void testStackedTwiceMLP() { + int firstLayerNeuronsCnt = 3; + int secondLayerNeuronsCnt = 2; + int thirdLayerNeuronsCnt = 4; + MLPConstInitializer initer = new MLPConstInitializer(1, 2); + + MLPArchitecture mlpLayer1Conf = new MLPArchitecture(4). + withAddedLayer(firstLayerNeuronsCnt, true, Activators.SIGMOID); + MLPArchitecture mlpLayer2Conf = new MLPArchitecture(firstLayerNeuronsCnt). + withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID); + MLPArchitecture mlpLayer3Conf = new MLPArchitecture(secondLayerNeuronsCnt). + withAddedLayer(thirdLayerNeuronsCnt, false, Activators.SIGMOID); + + MultilayerPerceptron mlp1 = new MultilayerPerceptron(mlpLayer1Conf, initer); + MultilayerPerceptron mlp2 = new MultilayerPerceptron(mlpLayer2Conf, initer); + MultilayerPerceptron mlp3 = new MultilayerPerceptron(mlpLayer3Conf, initer); + + Assert.assertEquals(1., mlp1.weight(1, 0, 1), 0); + + MultilayerPerceptron stackedMLP = mlp1.add(mlp2).add(mlp3); + + Assert.assertTrue(!stackedMLP.toString().isEmpty()); + Assert.assertTrue(!stackedMLP.toString(true).isEmpty()); + Assert.assertTrue(!stackedMLP.toString(false).isEmpty()); + + Assert.assertEquals(4, stackedMLP.architecture().outputSize()); + Assert.assertEquals(8, stackedMLP.architecture().layersCount()); + } + + /** + * Test parameters count works well. + */ + @Test + public void paramsCountTest() { + int inputSize = 10; + int layerWithBiasNeuronsCnt = 13; + int layerWithoutBiasNeuronsCnt = 17; + + MLPArchitecture conf = new MLPArchitecture(inputSize). + withAddedLayer(layerWithBiasNeuronsCnt, true, Activators.SIGMOID). + withAddedLayer(layerWithoutBiasNeuronsCnt, false, Activators.SIGMOID); + + Assert.assertEquals( + layerWithBiasNeuronsCnt * inputSize + layerWithBiasNeuronsCnt + (layerWithoutBiasNeuronsCnt * layerWithBiasNeuronsCnt), + conf.parametersCount() + ); + } + + /** + * Test methods related to parameters flattening. + */ + @Test + public void setParamsFlattening() { + int inputSize = 3; + int firstLayerNeuronsCnt = 2; + int secondLayerNeurons = 1; + + DenseVector paramsVector = new DenseVector(new double[] { + 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, // First layer weight matrix. + 7.0, 8.0, // Second layer weight matrix. + 9.0 // Second layer biases. + }); + + DenseMatrix firstLayerWeights = new DenseMatrix(new double[][] {{1.0, 2.0, 3.0}, {4.0, 5.0, 6.0}}); + DenseMatrix secondLayerWeights = new DenseMatrix(new double[][] {{7.0, 8.0}}); + DenseVector secondLayerBiases = new DenseVector(new double[] {9.0}); + + MLPArchitecture conf = new MLPArchitecture(inputSize). + withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID). + withAddedLayer(secondLayerNeurons, true, Activators.SIGMOID); + + MultilayerPerceptron mlp = new MultilayerPerceptron(conf, new MLPConstInitializer(100, 200)); + + mlp.setParameters(paramsVector); + Assert.assertEquals(paramsVector, mlp.parameters()); + + Assert.assertEquals(mlp.weights(1), firstLayerWeights); + Assert.assertEquals(mlp.weights(2), secondLayerWeights); + Assert.assertEquals(mlp.biases(2), secondLayerBiases); + } + + /** + * Test differentiation. + */ + @Test + public void testDifferentiation() { + int inputSize = 2; + int firstLayerNeuronsCnt = 1; + + double w10 = 0.1; + double w11 = 0.2; + + MLPArchitecture conf = new MLPArchitecture(inputSize). + withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID); + + MultilayerPerceptron mlp1 = new MultilayerPerceptron(conf); + + mlp1.setWeight(1, 0, 0, w10); + MultilayerPerceptron mlp = mlp1.setWeight(1, 1, 0, w11); + double x0 = 1.0; + double x1 = 3.0; + + Matrix inputs = new DenseMatrix(new double[][] {{x0, x1}}).transpose(); + double ytt = 1.0; + Matrix truth = new DenseMatrix(new double[][] {{ytt}}).transpose(); + + Vector grad = mlp.differentiateByParameters(LossFunctions.MSE, inputs, truth); + + // Let yt be y ground truth value. + // d/dw1i [(yt - sigma(w10 * x0 + w11 * x1))^2] = + // 2 * (yt - sigma(w10 * x0 + w11 * x1)) * (-1) * (sigma(w10 * x0 + w11 * x1)) * (1 - sigma(w10 * x0 + w11 * x1)) * xi = + // let z = sigma(w10 * x0 + w11 * x1) + // - 2* (yt - z) * (z) * (1 - z) * xi. + + IgniteTriFunction partialDer = (yt, w, x) -> { + Double z = Activators.SIGMOID.apply(w.dot(x)); + + return x.copy().map(xi -> -2 * (yt - z) * z * (1 - z) * xi); + }; + + Vector weightsVec = mlp.weights(1).getRow(0); + Tracer.showAscii(weightsVec); + + Vector trueGrad = partialDer.andThen(x -> x).apply(ytt, weightsVec, inputs.getCol(0)); + + Tracer.showAscii(trueGrad); + Tracer.showAscii(grad); + + Assert.assertEquals(mlp.architecture().parametersCount(), grad.size()); + Assert.assertEquals(trueGrad, grad); + } + + /** + * Test methods related to per-neuron bias. + */ + @Test + public void testNeuronBias() { + int inputSize = 3; + int firstLayerNeuronsCnt = 2; + int secondLayerNeurons = 1; + + MLPArchitecture conf = new MLPArchitecture(inputSize). + withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID). + withAddedLayer(secondLayerNeurons, true, Activators.SIGMOID); + + MultilayerPerceptron mlp1 = new MultilayerPerceptron(conf, new MLPConstInitializer(100, 200)); + + MultilayerPerceptron mlp = mlp1.setBias(2, 0, 1.); + Assert.assertEquals(1., mlp.bias(2, 0), 0); + + mlp.setBias(2, 0, 0.5); + Assert.assertEquals(0.5, mlp.bias(2, 0), 0); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTestSuite.java new file mode 100644 index 0000000000000..6f01b7cfe3ab8 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTestSuite.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for multilayer perceptrons. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + MLPTest.class, + MLPTrainerTest.class, + LossFunctionsTest.class, + MLPTrainerIntegrationTest.class +}) +public class MLPTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java new file mode 100644 index 0000000000000..cf19b0747badb --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn; + +import java.io.Serializable; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.internal.util.typedef.X; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer; +import org.apache.ignite.ml.math.Tracer; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.nn.architecture.MLPArchitecture; +import org.apache.ignite.ml.optimization.LossFunctions; +import org.apache.ignite.ml.optimization.updatecalculators.NesterovParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.NesterovUpdateCalculator; +import org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.RPropUpdateCalculator; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator; +import org.apache.ignite.ml.structures.LabeledVector; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link MLPTrainer} that require to start the whole Ignite infrastructure. + */ +public class MLPTrainerIntegrationTest extends GridCommonAbstractTest { + /** Number of nodes in grid */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** + * {@inheritDoc} + */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** + * Test 'XOR' operation training with {@link SimpleGDUpdateCalculator}. + */ + @Test + public void testXORSimpleGD() { + xorTest(new UpdatesStrategy<>( + new SimpleGDUpdateCalculator(0.3), + SimpleGDParameterUpdate.SUM_LOCAL, + SimpleGDParameterUpdate.AVG + )); + } + + /** + * Test 'XOR' operation training with {@link RPropUpdateCalculator}. + */ + @Test + public void testXORRProp() { + xorTest(new UpdatesStrategy<>( + new RPropUpdateCalculator(), + RPropParameterUpdate.SUM_LOCAL, + RPropParameterUpdate.AVG + )); + } + + /** + * Test 'XOR' operation training with {@link NesterovUpdateCalculator}. + */ + @Test + public void testXORNesterov() { + xorTest(new UpdatesStrategy<>( + new NesterovUpdateCalculator(0.1, 0.7), + NesterovParameterUpdate::sum, + NesterovParameterUpdate::avg + )); + } + + /** + * Common method for testing 'XOR' with various updaters. + * + * @param updatesStgy Update strategy. + * @param

Updater parameters type. + */ + private

void xorTest(UpdatesStrategy updatesStgy) { + CacheConfiguration> xorCacheCfg = new CacheConfiguration<>(); + xorCacheCfg.setName("XorData"); + xorCacheCfg.setAffinity(new RendezvousAffinityFunction(false, 5)); + IgniteCache> xorCache = ignite.createCache(xorCacheCfg); + + try { + xorCache.put(0, VectorUtils.of(0.0, 0.0).labeled(new double[] {0.0})); + xorCache.put(1, VectorUtils.of(0.0, 1.0).labeled(new double[] {1.0})); + xorCache.put(2, VectorUtils.of(1.0, 0.0).labeled(new double[] {1.0})); + xorCache.put(3, VectorUtils.of(1.0, 1.0).labeled(new double[] {0.0})); + + MLPArchitecture arch = new MLPArchitecture(2). + withAddedLayer(10, true, Activators.RELU). + withAddedLayer(1, false, Activators.SIGMOID); + + MLPTrainer

trainer = new MLPTrainer<>( + arch, + LossFunctions.MSE, + updatesStgy, + 2500, + 4, + 50, + 123L + ); + + MultilayerPerceptron mlp = trainer.fit(ignite, xorCache, new LabeledDummyVectorizer<>()); + Matrix predict = mlp.predict(new DenseMatrix(new double[][] { + {0.0, 0.0}, + {0.0, 1.0}, + {1.0, 0.0}, + {1.0, 1.0} + })); + + Tracer.showAscii(predict); + + X.println(new DenseVector(new double[] {0.0}).minus(predict.getRow(0)).kNorm(2) + ""); + + TestUtils.checkIsInEpsilonNeighbourhood(new DenseVector(new double[] {0.0}), predict.getRow(0), 1E-1); + } + finally { + xorCache.destroy(); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java new file mode 100644 index 0000000000000..e1185b35aca5f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java @@ -0,0 +1,260 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.nn.architecture.MLPArchitecture; +import org.apache.ignite.ml.optimization.LossFunctions; +import org.apache.ignite.ml.optimization.SmoothParametrized; +import org.apache.ignite.ml.optimization.updatecalculators.NesterovParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.NesterovUpdateCalculator; +import org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.RPropUpdateCalculator; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +/** + * Tests for {@link MLPTrainer} that don't require to start the whole Ignite infrastructure. + */ +@RunWith(Enclosed.class) +public class MLPTrainerTest { + /** + * Parameterized tests. + */ + @RunWith(Parameterized.class) + public static class ComponentParamTests { + /** Number of parts to be tested. */ + private static final int[] partsToBeTested = new int[] {1, 2, 3, 4, 5, 7}; + + /** Batch sizes to be tested. */ + private static final int[] batchSizesToBeTested = new int[] {1, 2, 3, 4}; + + /** Parameters. */ + @Parameterized.Parameters(name = "Data divided on {0} partitions, training with batch size {1}") + public static Iterable data() { + List res = new ArrayList<>(); + for (int part : partsToBeTested) + for (int batchSize1 : batchSizesToBeTested) + res.add(new Integer[] {part, batchSize1}); + + return res; + } + + /** Number of partitions. */ + @Parameterized.Parameter + public int parts; + + /** Batch size. */ + @Parameterized.Parameter(1) + public int batchSize; + + /** + * Test 'XOR' operation training with {@link SimpleGDUpdateCalculator} updater. + */ + @Test + public void testXORSimpleGD() { + xorTest(new UpdatesStrategy<>( + new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, + SimpleGDParameterUpdate.AVG + )); + } + + /** + * Test 'XOR' operation training with {@link RPropUpdateCalculator}. + */ + @Test + public void testXORRProp() { + xorTest(new UpdatesStrategy<>( + new RPropUpdateCalculator(), + RPropParameterUpdate.SUM_LOCAL, + RPropParameterUpdate.AVG + )); + } + + /** + * Test 'XOR' operation training with {@link NesterovUpdateCalculator}. + */ + @Test + public void testXORNesterov() { + xorTest(new UpdatesStrategy<>( + new NesterovUpdateCalculator(0.1, 0.7), + NesterovParameterUpdate::sum, + NesterovParameterUpdate::avg + )); + } + + /** + * Common method for testing 'XOR' with various updaters. + * @param updatesStgy Update strategy. + * @param

Updater parameters type. + */ + private

void xorTest(UpdatesStrategy updatesStgy) { + Map> xorData = new HashMap<>(); + xorData.put(0, VectorUtils.of(0.0, 0.0).labeled(new double[]{0.0})); + xorData.put(1, VectorUtils.of(0.0, 1.0).labeled(new double[]{1.0})); + xorData.put(2, VectorUtils.of(1.0, 0.0).labeled(new double[]{1.0})); + xorData.put(3, VectorUtils.of(1.0, 1.0).labeled(new double[]{0.0})); + + MLPArchitecture arch = new MLPArchitecture(2). + withAddedLayer(10, true, Activators.RELU). + withAddedLayer(1, false, Activators.SIGMOID); + + MLPTrainer

trainer = new MLPTrainer<>( + arch, + LossFunctions.MSE, + updatesStgy, + 3000, + batchSize, + 50, + 123L + ); + + MultilayerPerceptron mlp = trainer.fit(xorData, parts, new LabeledDummyVectorizer<>()); + + Matrix predict = mlp.predict(new DenseMatrix(new double[][]{ + {0.0, 0.0}, + {0.0, 1.0}, + {1.0, 0.0}, + {1.0, 1.0} + })); + + TestUtils.checkIsInEpsilonNeighbourhood(new DenseVector(new double[]{0.0}), predict.getRow(0), 1E-1); + } + + /** */ + @Test + public void testUpdate() { + UpdatesStrategy updatesStgy = new UpdatesStrategy<>( + new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, + SimpleGDParameterUpdate.AVG + ); + + Map> xorData = new HashMap<>(); + xorData.put(0, VectorUtils.of(0.0, 0.0).labeled(new double[]{0.0})); + xorData.put(1, VectorUtils.of(0.0, 1.0).labeled(new double[]{1.0})); + xorData.put(2, VectorUtils.of(1.0, 0.0).labeled(new double[]{1.0})); + xorData.put(3, VectorUtils.of(1.0, 1.0).labeled(new double[]{0.0})); + + MLPArchitecture arch = new MLPArchitecture(2). + withAddedLayer(10, true, Activators.RELU). + withAddedLayer(1, false, Activators.SIGMOID); + + MLPTrainer trainer = new MLPTrainer<>( + arch, + LossFunctions.MSE, + updatesStgy, + 3000, + batchSize, + 50, + 123L + ); + + MultilayerPerceptron originalMdl = trainer.fit(xorData, parts, new LabeledDummyVectorizer<>()); + + MultilayerPerceptron updatedOnSameDS = trainer.update( + originalMdl, + xorData, + parts, + new LabeledDummyVectorizer<>() + ); + + MultilayerPerceptron updatedOnEmptyDS = trainer.update( + originalMdl, + new HashMap>(), + parts, + new LabeledDummyVectorizer<>() + ); + + DenseMatrix matrix = new DenseMatrix(new double[][] { + {0.0, 0.0}, + {0.0, 1.0}, + {1.0, 0.0}, + {1.0, 1.0} + }); + + TestUtils.checkIsInEpsilonNeighbourhood( + originalMdl.predict(matrix).getRow(0), + updatedOnSameDS.predict(matrix).getRow(0), + 1E-1 + ); + TestUtils.checkIsInEpsilonNeighbourhood( + originalMdl.predict(matrix).getRow(0), + updatedOnEmptyDS.predict(matrix).getRow(0), + 1E-1 + ); + } + } + + /** + * Non-parameterized tests. + */ + public static class ComponentSingleTests { + /** Data. */ + private double[] data; + + /** Initialization. */ + @Before + public void init() { + data = new double[10]; + for (int i = 0; i < 10; i++) + data[i] = i; + } + + /** */ + @Test + public void testBatchWithSingleColumnAndSingleRow() { + double[] res = MLPTrainer.batch(data, new int[]{1}, 10); + + TestUtils.assertEquals(new double[]{1.0}, res, 1e-12); + } + + /** */ + @Test + public void testBatchWithMultiColumnAndSingleRow() { + double[] res = MLPTrainer.batch(data, new int[]{1}, 5); + + TestUtils.assertEquals(new double[]{1.0, 6.0}, res, 1e-12); + } + + /** */ + @Test + public void testBatchWithMultiColumnAndMultiRow() { + double[] res = MLPTrainer.batch(data, new int[]{1, 3}, 5); + + TestUtils.assertEquals(new double[]{1.0, 3.0, 6.0, 8.0}, res, 1e-12); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java new file mode 100644 index 0000000000000..7aacd0def83e8 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn.performance; + +import org.apache.ignite.Ignite; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.nn.MLPTrainer; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; + +/** + * Tests {@link MLPTrainer} on the MNIST dataset that require to start the whole Ignite infrastructure. + */ +public class MLPTrainerMnistIntegrationTest extends GridCommonAbstractTest { + /** Number of nodes in grid */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** + * {@inheritDoc} + */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** Tests on the MNIST dataset. */ + /* @Test + public void testMNIST() throws IOException { + int featCnt = 28 * 28; + int hiddenNeuronsCnt = 100; + + CacheConfiguration trainingSetCacheCfg = new CacheConfiguration<>(); + trainingSetCacheCfg.setAffinity(new RendezvousAffinityFunction(false, 10)); + trainingSetCacheCfg.setName("MNIST_TRAINING_SET"); + IgniteCache trainingSet = ignite.createCache(trainingSetCacheCfg); + + int i = 0; + for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTrainingSet(6_000)) + trainingSet.put(i++, e); + + MLPArchitecture arch = new MLPArchitecture(featCnt). + withAddedLayer(hiddenNeuronsCnt, true, Activators.SIGMOID). + withAddedLayer(10, false, Activators.SIGMOID); + + MLPTrainer trainer = new MLPTrainer<>( + arch, + LossFunctions.MSE, + new UpdatesStrategy<>( + new RPropUpdateCalculator(), + RPropParameterUpdate.SUM, + RPropParameterUpdate.AVG + ), + 200, + 2000, + 10, + 123L + ); + + System.out.println("Start training..."); + long start = System.currentTimeMillis(); + MultilayerPerceptron mdl = trainer.fit( + ignite, + trainingSet, + FeatureLabelExtractorWrapper.wrap( + (k, v) -> VectorUtils.of(v.getPixels()), + (k, v) -> VectorUtils.oneHot(v.getLabel(), 10).getStorage().data() + ) + ); + System.out.println("Training completed in " + (System.currentTimeMillis() - start) + "ms"); + + int correctAnswers = 0; + int incorrectAnswers = 0; + + for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTestSet(1_000)) { + Matrix input = new DenseMatrix(new double[][] {e.getPixels()}); + Matrix outputMatrix = mdl.predict(input); + + int predicted = (int)VectorUtils.vec2Num(outputMatrix.getRow(0)); + + if (predicted == e.getLabel()) + correctAnswers++; + else + incorrectAnswers++; + } + + double accuracy = 1.0 * correctAnswers / (correctAnswers + incorrectAnswers); + assertTrue("Accuracy should be >= 80%", accuracy >= 0.8); + }*/ +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java new file mode 100644 index 0000000000000..d1132670bd2a9 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn.performance; + +import org.apache.ignite.ml.nn.MLPTrainer; + +/** + * Tests {@link MLPTrainer} on the MNIST dataset using locally stored data. + */ +public class MLPTrainerMnistTest { + /** Tests on the MNIST dataset. */ +/* @Test + public void testMNIST() throws IOException { + int featCnt = 28 * 28; + int hiddenNeuronsCnt = 100; + + Map trainingSet = new HashMap<>(); + + int i = 0; + for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTrainingSet(60_000)) + trainingSet.put(i++, e); + + MLPArchitecture arch = new MLPArchitecture(featCnt). + withAddedLayer(hiddenNeuronsCnt, true, Activators.SIGMOID). + withAddedLayer(10, false, Activators.SIGMOID); + + MLPTrainer trainer = new MLPTrainer<>( + arch, + LossFunctions.MSE, + new UpdatesStrategy<>( + new RPropUpdateCalculator(), + RPropParameterUpdate.SUM, + RPropParameterUpdate.AVG + ), + 200, + 2000, + 10, + 123L + ); + + System.out.println("Start training..."); + long start = System.currentTimeMillis(); + MultilayerPerceptron mdl = trainer.fit( + trainingSet, + 1, + FeatureLabelExtractorWrapper.wrap( + (k, v) -> VectorUtils.of(v.getPixels()), + (k, v) -> VectorUtils.oneHot(v.getLabel(), 10).getStorage().data() + ) + ); + System.out.println("Training completed in " + (System.currentTimeMillis() - start) + "ms"); + + int correctAnswers = 0; + int incorrectAnswers = 0; + + for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTestSet(10_000)) { + Matrix input = new DenseMatrix(new double[][] {e.getPixels()}); + Matrix outputMatrix = mdl.predict(input); + + int predicted = (int)VectorUtils.vec2Num(outputMatrix.getRow(0)); + + if (predicted == e.getLabel()) + correctAnswers++; + else + incorrectAnswers++; + } + + double accuracy = 1.0 * correctAnswers / (correctAnswers + incorrectAnswers); + assertTrue("Accuracy should be >= 80% (not " + accuracy * 100 + "%)", accuracy >= 0.8); + }*/ +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java new file mode 100644 index 0000000000000..ed6ce71b58768 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.nn.performance; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.Properties; +import java.util.Random; +import java.util.stream.Stream; +import org.apache.ignite.lang.IgniteBiTuple; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.util.MnistUtils; + +/** */ +public class MnistMLPTestUtil { + /** Name of the property specifying path to training set images. */ + private static final String PROP_TRAINING_IMAGES = "mnist.training.images"; + + /** Name of property specifying path to training set labels. */ + private static final String PROP_TRAINING_LABELS = "mnist.training.labels"; + + /** Name of property specifying path to test set images. */ + private static final String PROP_TEST_IMAGES = "mnist.test.images"; + + /** Name of property specifying path to test set labels. */ + private static final String PROP_TEST_LABELS = "mnist.test.labels"; + + /** */ + static IgniteBiTuple, Stream> loadMnist(int samplesCnt) throws IOException { + Properties props = loadMNISTProperties(); + + Stream trainingMnistStream = MnistUtils.mnistAsStream(props.getProperty(PROP_TRAINING_IMAGES), + props.getProperty(PROP_TRAINING_LABELS), new Random(123L), samplesCnt); + + Stream testMnistStream = MnistUtils.mnistAsStream(props.getProperty(PROP_TEST_IMAGES), + props.getProperty(PROP_TEST_LABELS), new Random(123L), 10_000); + + return new IgniteBiTuple<>(trainingMnistStream, testMnistStream); + } + + /** + * Loads training set. + * + * @param cnt Count of objects. + * @return List of MNIST images. + * @throws IOException In case of exception. + */ + public static List loadTrainingSet(int cnt) throws IOException { + Properties props = loadMNISTProperties(); + return MnistUtils.mnistAsList( + props.getProperty(PROP_TRAINING_IMAGES), + props.getProperty(PROP_TRAINING_LABELS), + new Random(123L), + cnt + ); + } + + /** + * Loads test set. + * + * @param cnt Count of objects. + * @return List of MNIST images. + * @throws IOException In case of exception. + */ + public static List loadTestSet(int cnt) throws IOException { + Properties props = loadMNISTProperties(); + return MnistUtils.mnistAsList(props.getProperty(PROP_TEST_IMAGES), props.getProperty(PROP_TEST_LABELS), new Random(123L), cnt); + } + + /** Load properties for MNIST tests. */ + private static Properties loadMNISTProperties() throws IOException { + Properties res = new Properties(); + + InputStream is = MnistMLPTestUtil.class.getClassLoader().getResourceAsStream("manualrun/trees/columntrees.manualrun.properties"); + + res.load(is); + + return res; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineMdlTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineMdlTest.java new file mode 100644 index 0000000000000..3347a6a87fef5 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineMdlTest.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.pipeline; + +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel; +import org.junit.Test; + +/** + * Tests for {@link PipelineMdl}. + */ +public class PipelineMdlTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-6; + + /** */ + @Test + public void testPredict() { + Vector weights = new DenseVector(new double[] {2.0, 3.0}); + + verifyPredict(getMdl(new LogisticRegressionModel(weights, 1.0).withRawLabels(true))); + } + + /** + * Get the empty internal model. + * + * @param internalMdl Internal model. + */ + private PipelineMdl getMdl(LogisticRegressionModel internalMdl) { + return new PipelineMdl() + .withPreprocessor(null) + .withInternalMdl(internalMdl); + } + + /** */ + private void verifyPredict(PipelineMdl mdl) { + Vector observation = new DenseVector(new double[] {1.0, 1.0}); + TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 + 3.0 * 1.0), mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[] {2.0, 1.0}); + TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 2.0 + 3.0 * 1.0), mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[] {1.0, 2.0}); + TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 + 3.0 * 2.0), mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[] {-2.0, 1.0}); + TestUtils.assertEquals(sigmoid(1.0 - 2.0 * 2.0 + 3.0 * 1.0), mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[] {1.0, -2.0}); + TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 - 3.0 * 2.0), mdl.predict(observation), PRECISION); + } + + /** + * Sigmoid function. + * + * @param z The regression value. + * @return The result. + */ + private static double sigmoid(double z) { + return 1.0 / (1.0 + Math.exp(-z)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineTest.java new file mode 100644 index 0000000000000..25d4a12152ae5 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineTest.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.pipeline; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainer; +import org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer; +import org.apache.ignite.ml.svm.SVMLinearClassificationTrainer; +import org.junit.Test; + +/** + * Tests for {@link Pipeline}. + */ +public class PipelineTest extends TrainerTest { + /** + * Test trainer on classification model y = x. + */ + @Test + public void testTrainWithTheLinearlySeparableCase() { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, VectorUtils.of(twoLinearlySeparableClasses[i])); + + Vectorizer vectorizer = new DummyVectorizer() + .labeled(Vectorizer.LabelCoordinate.FIRST); + + PipelineMdl mdl = new Pipeline() + .addVectorizer(vectorizer) + .addPreprocessingTrainer(new MinMaxScalerTrainer()) + .addPreprocessingTrainer(new NormalizationTrainer() + .withP(1)) + .addTrainer(new SVMLinearClassificationTrainer()) + .fit(cacheMock, parts); + + TestUtils.assertEquals(0, mdl.predict(VectorUtils.of(100, 10)), PRECISION); + TestUtils.assertEquals(1, mdl.predict(VectorUtils.of(10, 100)), PRECISION); + } + + /** + * Test the missed final state. + */ + @Test(expected = IllegalStateException.class) + public void testTrainWithMissedFinalStage() { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, VectorUtils.of(twoLinearlySeparableClasses[i])); + + Vectorizer vectorizer = new DummyVectorizer() + .labeled(Vectorizer.LabelCoordinate.FIRST); + + PipelineMdl mdl = new Pipeline() + .addVectorizer(vectorizer) + .addPreprocessingTrainer(new MinMaxScalerTrainer()) + .addPreprocessingTrainer(new NormalizationTrainer() + .withP(1)) + .fit(cacheMock, parts); + + TestUtils.assertEquals(0, mdl.predict(VectorUtils.of(100, 10)), PRECISION); + TestUtils.assertEquals(1, mdl.predict(VectorUtils.of(10, 100)), PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineTestSuite.java new file mode 100644 index 0000000000000..4c28db9f3f2dc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/pipeline/PipelineTestSuite.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.pipeline; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for the pipeline. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + PipelineTest.class, + PipelineMdlTest.class +}) +public class PipelineTestSuite { + // No-op. +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/PreprocessingTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/PreprocessingTestSuite.java new file mode 100644 index 0000000000000..6b43b762677cc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/PreprocessingTestSuite.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing; + +import org.apache.ignite.ml.preprocessing.binarization.BinarizationPreprocessorTest; +import org.apache.ignite.ml.preprocessing.binarization.BinarizationTrainerTest; +import org.apache.ignite.ml.preprocessing.encoding.EncoderTrainerTest; +import org.apache.ignite.ml.preprocessing.encoding.FrequencyEncoderPreprocessorTest; +import org.apache.ignite.ml.preprocessing.encoding.LabelEncoderPreprocessorTest; +import org.apache.ignite.ml.preprocessing.encoding.OneHotEncoderPreprocessorTest; +import org.apache.ignite.ml.preprocessing.encoding.StringEncoderPreprocessorTest; +import org.apache.ignite.ml.preprocessing.encoding.TargetEncoderPreprocessorTest; +import org.apache.ignite.ml.preprocessing.imputing.ImputerPreprocessorTest; +import org.apache.ignite.ml.preprocessing.imputing.ImputerTrainerTest; +import org.apache.ignite.ml.preprocessing.maxabsscaling.MaxAbsScalerPreprocessorTest; +import org.apache.ignite.ml.preprocessing.maxabsscaling.MaxAbsScalerTrainerTest; +import org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerPreprocessorTest; +import org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainerTest; +import org.apache.ignite.ml.preprocessing.normalization.NormalizationPreprocessorTest; +import org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainerTest; +import org.apache.ignite.ml.preprocessing.standardscaling.StandardScalerPreprocessorTest; +import org.apache.ignite.ml.preprocessing.standardscaling.StandardScalerTrainerTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.preprocessing.* package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + MinMaxScalerPreprocessorTest.class, + MinMaxScalerTrainerTest.class, + BinarizationPreprocessorTest.class, + BinarizationTrainerTest.class, + ImputerPreprocessorTest.class, + ImputerTrainerTest.class, + EncoderTrainerTest.class, + OneHotEncoderPreprocessorTest.class, + FrequencyEncoderPreprocessorTest.class, + StringEncoderPreprocessorTest.class, + TargetEncoderPreprocessorTest.class, + LabelEncoderPreprocessorTest.class, + NormalizationTrainerTest.class, + NormalizationPreprocessorTest.class, + + StandardScalerTrainerTest.class, + StandardScalerPreprocessorTest.class, + + MaxAbsScalerTrainerTest.class, + MaxAbsScalerPreprocessorTest.class +}) +public class PreprocessingTestSuite { + // No-op. +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java new file mode 100644 index 0000000000000..aba1db2755086 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.binarization; + +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link BinarizationPreprocessor}. + */ +public class BinarizationPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + double[][] data = new double[][]{ + {1, 2, 3}, + {1, 4, 8}, + {1, 8, 16}, + }; + + Vectorizer vectorizer = new DoubleArrayVectorizer<>(0, 1, 2); + + BinarizationPreprocessor preprocessor = new BinarizationPreprocessor<>( + 7, + vectorizer + ); + + double[][] postProcessedData = new double[][]{ + {0, 0, 0}, + {0, 0, 1}, + {0, 1, 1} + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java new file mode 100644 index 0000000000000..c7aa45dd16f32 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.binarization; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link BinarizationTrainer}. + */ +public class BinarizationTrainerTest extends TrainerTest { + /** Tests {@code fit()} method. */ + @Test + public void testFit() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(2, 4, 1)); + data.put(2, VectorUtils.of(1, 8, 22)); + data.put(3, VectorUtils.of(4, 10, 100)); + data.put(4, VectorUtils.of(0, 22, 300)); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + BinarizationTrainer binarizationTrainer = new BinarizationTrainer() + .withThreshold(10); + + assertEquals(10., binarizationTrainer.getThreshold(), 0); + + BinarizationPreprocessor preprocessor = binarizationTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertEquals(binarizationTrainer.getThreshold(), preprocessor.getThreshold(), 0); + + assertArrayEquals(new double[] {0, 0, 1}, preprocessor.apply(5, VectorUtils.of(1, 10, 100)).features().asArray(), 1e-8); + } + + /** Tests default implementation of {@code fit()} method. */ + @Test + public void testFitDefault() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(2, 4, 1)); + data.put(2, VectorUtils.of(1, 8, 22)); + data.put(3, VectorUtils.of(4, 10, 100)); + data.put(4, VectorUtils.of(0, 22, 300)); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + BinarizationTrainer binarizationTrainer = new BinarizationTrainer() + .withThreshold(10); + + assertEquals(10., binarizationTrainer.getThreshold(), 0); + + BinarizationPreprocessor preprocessor = (BinarizationPreprocessor)binarizationTrainer.fit( + TestUtils.testEnvBuilder(), + data, + parts, + vectorizer + ); + + assertArrayEquals(new double[] {0, 0, 1}, preprocessor.apply(5, VectorUtils.of(1, 10, 100)).features().asArray(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/EncoderTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/EncoderTrainerTest.java new file mode 100644 index 0000000000000..faae5602e3108 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/EncoderTrainerTest.java @@ -0,0 +1,285 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.encoding; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.ObjectArrayVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.exceptions.preprocessing.UnknownCategorialValueException; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +/** + * Tests for {@link EncoderTrainer}. + */ +public class EncoderTrainerTest extends TrainerTest { + /** Tests {@code fit()} method. */ + @Test + public void testFitOnStringCategorialFeatures() { + Map data = new HashMap<>(); + data.put(1, new DenseVector(new Serializable[] {1.0, "Monday", "September"})); + data.put(2, new DenseVector(new Serializable[] {2.0, "Monday", "August"})); + data.put(3, new DenseVector(new Serializable[] {3.0, "Monday", "August"})); + data.put(4, new DenseVector(new Serializable[] {4.0, "Friday", "June"})); + data.put(5, new DenseVector(new Serializable[] {5.0, "Friday", "June"})); + data.put(6, new DenseVector(new Serializable[] {6.0, "Sunday", "August"})); + + final Vectorizer vectorizer = new DummyVectorizer(1, 2).labeled(0); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + EncoderTrainer strEncoderTrainer = new EncoderTrainer() + .withEncoderType(EncoderType.STRING_ENCODER) + .withEncodedFeature(0) + .withEncodedFeature(1); + + EncoderPreprocessor preprocessor = strEncoderTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {0.0, 2.0}, + preprocessor.apply(7, new DenseVector(new Serializable[] {7.0, "Monday", "September"})).features().asArray(), + 1e-8 + ); + } + + /** Tests {@code fit()} method. */ + @Test + public void testFitOnIntegerCategorialFeatures() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(3.0, 0.0)); + data.put(2, VectorUtils.of(3.0, 12.0)); + data.put(3, VectorUtils.of(3.0, 12.0)); + data.put(4, VectorUtils.of(2.0, 45.0)); + data.put(5, VectorUtils.of(2.0, 45.0)); + data.put(6, VectorUtils.of(14.0, 12.0)); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + EncoderTrainer strEncoderTrainer = new EncoderTrainer() + .withEncoderType(EncoderType.ONE_HOT_ENCODER) + .withEncodedFeature(0) + .withEncodedFeature(1); + + EncoderPreprocessor preprocessor = strEncoderTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + assertArrayEquals( + new double[] {1.0, 0.0, 0.0, 0.0, 0.0, 1.0}, + preprocessor.apply(7, VectorUtils.of(3.0, 0.0)).features().asArray(), + 1e-8 + ); + assertArrayEquals( + new double[] {0.0, 1.0, 0.0, 1.0, 0.0, 0.0}, + preprocessor.apply(8, VectorUtils.of(2.0, 12.0)).features().asArray(), + 1e-8 + ); + } + + /** Tests {@code fit()} method. */ + @Test + public void testFitWithUnknownStringValueInTheGivenData() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(3.0, 0.0)); + data.put(2, VectorUtils.of(3.0, 12.0)); + data.put(3, VectorUtils.of(3.0, 12.0)); + data.put(4, VectorUtils.of(2.0, 45.0)); + data.put(5, VectorUtils.of(2.0, 45.0)); + data.put(6, VectorUtils.of(14.0, 12.0)); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + EncoderTrainer strEncoderTrainer = new EncoderTrainer() + .withEncoderType(EncoderType.STRING_ENCODER) + .withEncodedFeature(0) + .withEncodedFeature(1); + + EncoderPreprocessor preprocessor = strEncoderTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + try { + preprocessor.apply(7, new DenseVector(new Serializable[] {"Monday", "September"})).features().asArray(); + fail("UnknownCategorialFeatureValue"); + } + catch (UnknownCategorialValueException e) { + return; + } + fail("UnknownCategorialFeatureValue"); + } + + /** Tests {@code fit()} method. */ + @Test + public void testFitOnStringCategorialFeaturesWithReversedOrder() { + Map data = new HashMap<>(); + data.put(1, new DenseVector(new Serializable[] {"Monday", "September"})); + data.put(2, new DenseVector(new Serializable[] {"Monday", "August"})); + data.put(3, new DenseVector(new Serializable[] {"Monday", "August"})); + data.put(4, new DenseVector(new Serializable[] {"Friday", "June"})); + data.put(5, new DenseVector(new Serializable[] {"Friday", "June"})); + data.put(6, new DenseVector(new Serializable[] {"Sunday", "August"})); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + EncoderTrainer strEncoderTrainer = new EncoderTrainer() + .withEncoderType(EncoderType.STRING_ENCODER) + .withEncoderIndexingStrategy(EncoderSortingStrategy.FREQUENCY_ASC) + .withEncodedFeature(0) + .withEncodedFeature(1); + + EncoderPreprocessor preprocessor = strEncoderTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {2.0, 0.0}, + preprocessor.apply(7, new DenseVector(new Serializable[] {"Monday", "September"})).features().asArray(), + 1e-8 + ); + } + + /** Tests {@code fit()} method. */ + @Test + public void testFitOnStringCategorialFeaturesWithFrequencyEncoding() { + Map data = new HashMap<>(); + data.put(1, new DenseVector(new Serializable[] {"Monday", "September"})); + data.put(2, new DenseVector(new Serializable[] {"Monday", "August"})); + data.put(3, new DenseVector(new Serializable[] {"Monday", "August"})); + data.put(4, new DenseVector(new Serializable[] {"Friday", "June"})); + data.put(5, new DenseVector(new Serializable[] {"Friday", "June"})); + data.put(6, new DenseVector(new Serializable[] {"Sunday", "August"})); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + EncoderTrainer strEncoderTrainer = new EncoderTrainer() + .withEncoderType(EncoderType.FREQUENCY_ENCODER) + .withEncodedFeature(0) + .withEncodedFeature(1); + + EncoderPreprocessor preprocessor = strEncoderTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {0.5, 0.166}, + preprocessor.apply(7, new DenseVector(new Serializable[] {"Monday", "September"})).features().asArray(), + 0.1 + ); + assertArrayEquals( + new double[] {0.33, 0.5}, + preprocessor.apply(7, new DenseVector(new Serializable[] {"Friday", "August"})).features().asArray(), + 0.1 + ); + assertArrayEquals( + new double[] {0.166, 0.33}, + preprocessor.apply(7, new DenseVector(new Serializable[] {"Sunday", "June"})).features().asArray(), + 0.1 + ); + } + + /** Tests {@code fit()} method. */ + @Test + public void testFitOnStringCategorialFeaturesAndLabels() { + Map data = new HashMap<>(); + data.put(1, new Object[] {"1.0", 1.0, 2.0}); + data.put(2, new Object[] {"2.0", 2.0, 3.0}); + data.put(3, new Object[] {"1.0", 3.0, 1.0}); + data.put(4, new Object[] {"1.0", 2.0, 1.0}); + data.put(5, new Object[] {"1.0", 1.0, 1.0}); + data.put(6, new Object[] {"2.0", 1.0, 2.2}); + + final Vectorizer vectorizer = new ObjectArrayVectorizer(1, 2).labeled(0); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + EncoderTrainer labelEncoderTrainer = new EncoderTrainer() + .withEncoderType(EncoderType.LABEL_ENCODER); + + EncoderPreprocessor preprocessor = labelEncoderTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertEquals(0.0, (Double)preprocessor.apply(8, new Object[] {"1.0", 2.0, 3.0}).label(), 1e-8); + } + + /** Tests {@code fit()} method. */ + @Test(expected = org.apache.ignite.ml.math.exceptions.preprocessing.IllegalFeatureTypeException.class) + public void testFitWithExceptionOnMissedEncodedFeatureIndex() { + Map data = new HashMap<>(); + data.put(1, new DenseVector(new Serializable[] {1.0, "Monday", "September"})); + data.put(2, new DenseVector(new Serializable[] {2.0, "Monday", "August"})); + data.put(3, new DenseVector(new Serializable[] {3.0, "Monday", "August"})); + data.put(4, new DenseVector(new Serializable[] {4.0, "Friday", "June"})); + data.put(5, new DenseVector(new Serializable[] {5.0, "Friday", "June"})); + data.put(6, new DenseVector(new Serializable[] {6.0, "Sunday", "August"})); + + final Vectorizer vectorizer = new DummyVectorizer(1, 2).labeled(0); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + EncoderTrainer strEncoderTrainer = new EncoderTrainer() + .withEncoderType(EncoderType.STRING_ENCODER) + .withEncodedFeature(0); + + EncoderPreprocessor preprocessor = strEncoderTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {0.0, 2.0}, + preprocessor.apply(7, new DenseVector(new Serializable[] {7.0, "Monday", "September"})).features().asArray(), + 1e-8 + ); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/FrequencyEncoderPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/FrequencyEncoderPreprocessorTest.java new file mode 100644 index 0000000000000..b3fc07aa9eb11 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/FrequencyEncoderPreprocessorTest.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.encoding; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.preprocessing.encoding.frequency.FrequencyEncoderPreprocessor; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link FrequencyEncoderPreprocessor}. + */ +public class FrequencyEncoderPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + Vector[] data = new Vector[] { + new DenseVector(new Serializable[] {"1", "Moscow", "A"}), + new DenseVector(new Serializable[] {"2", "Moscow", "B"}), + new DenseVector(new Serializable[] {"2", "Moscow", "B"}), + }; + + Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + HashMap map = new HashMap(); + + map.put("1", 0.33); + map.put("2", 0.66); + + HashMap map1 = new HashMap(); + + map1.put("A", 0.33); + map1.put("B", 0.66); + + FrequencyEncoderPreprocessor preprocessor = new FrequencyEncoderPreprocessor( + new Map[] {map, Collections.singletonMap("Moscow", 1.0), map1}, + vectorizer, + new HashSet(Arrays.asList(0, 1, 2))); + + double[][] postProcessedData = new double[][] { + {0.33, 1.0, 0.33}, + {0.66, 1.0, 0.66}, + {0.66, 1.0, 0.66}, + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 0.1); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/LabelEncoderPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/LabelEncoderPreprocessorTest.java new file mode 100644 index 0000000000000..f8340afa80542 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/LabelEncoderPreprocessorTest.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.encoding; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.ObjectArrayVectorizer; +import org.apache.ignite.ml.preprocessing.encoding.label.LabelEncoderPreprocessor; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link LabelEncoderPreprocessor}. + */ +public class LabelEncoderPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + Map data = new HashMap<>(); + data.put(0, new Object[] {1, "A"}); + data.put(1, new Object[] {2, "B"}); + data.put(2, new Object[] {3, "B"}); + + final Vectorizer vectorizer = new ObjectArrayVectorizer(0).labeled(1); + + LabelEncoderPreprocessor preprocessor = new LabelEncoderPreprocessor( + new HashMap() { + { + put("A", 1); + put("B", 0); + } + }, + vectorizer + ); + + double[] postProcessedData = new double[] { + 1.0, + 0.0, + 0.0 + }; + + for (int i = 0; i < data.size(); i++) + assertEquals(postProcessedData[i], (Double)preprocessor.apply(i, data.get(i)).label(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/OneHotEncoderPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/OneHotEncoderPreprocessorTest.java new file mode 100644 index 0000000000000..3226d95e9ad50 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/OneHotEncoderPreprocessorTest.java @@ -0,0 +1,207 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.encoding; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.math.exceptions.preprocessing.UnknownCategorialValueException; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.preprocessing.encoding.onehotencoder.OneHotEncoderPreprocessor; +import org.apache.ignite.ml.preprocessing.encoding.stringencoder.StringEncoderPreprocessor; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.fail; + +/** + * Tests for {@link StringEncoderPreprocessor}. + */ +public class OneHotEncoderPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApplyWithStringValues() { + Vector[] data = new Vector[] { + new DenseVector(new Serializable[] {"1", "Moscow", "A"}), + new DenseVector(new Serializable[] {"2", "Moscow", "A"}), + new DenseVector(new Serializable[] {"2", "Moscow", "B"}), + }; + + Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + Map map1 = new HashMap(); + + map1.put("1", 1); + map1.put("2", 0); + + Map map2 = new HashMap(); + + map2.put("A", 0); + map2.put("B", 1); + + OneHotEncoderPreprocessor preprocessor = new OneHotEncoderPreprocessor( + new Map[] {map1, Collections.singletonMap("Moscow", 0), map2}, + vectorizer, + new HashSet(Arrays.asList(0, 1, 2))); + + double[][] postProcessedData = new double[][] { + {0.0, 1.0, 1.0, 1.0, 0.0}, + {1.0, 0.0, 1.0, 1.0, 0.0}, + {1.0, 0.0, 1.0, 0.0, 1.0}, + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } + + /** */ + @Test + public void testOneCategorialFeature() { + Vector[] data = new Vector[] { + new DenseVector(new Serializable[] {"42"}), + new DenseVector(new Serializable[] {"43"}), + new DenseVector(new Serializable[] {"42"}), + }; + + Vectorizer vectorizer = new DummyVectorizer<>(0); + + OneHotEncoderPreprocessor preprocessor = new OneHotEncoderPreprocessor( + new HashMap[] { + new HashMap() { + { + put("42", 0); + put("43", 1); + } + } }, + vectorizer, + new HashSet() { + { + add(0); + } + }); + + double[][] postProcessedData = new double[][] { + {1.0, 0.0}, + {0.0, 1.0}, + {1.0, 0.0}, + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } + + /** */ + @Test + public void testTwoCategorialFeatureAndTwoDoubleFeatures() { + Vector[] data = new Vector[] { + new DenseVector(new Serializable[] {"42", 1.0, "M", 2.0}), + new DenseVector(new Serializable[] {"43", 2.0, "F", 3.0}), + new DenseVector(new Serializable[] {"42", 3.0, Double.NaN, 4.0}), + new DenseVector(new Serializable[] {"42", 4.0, "F", 5.0}) + }; + + Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2, 3); + + HashMap[] encodingValues = new HashMap[4]; + encodingValues[0] = new HashMap() { + { + put("42", 0); + put("43", 1); + } + }; + + encodingValues[2] = new HashMap() { + { + put("F", 0); + put("M", 1); + put("", 2); + } + }; + + OneHotEncoderPreprocessor preprocessor = new OneHotEncoderPreprocessor( + encodingValues, + vectorizer, + new HashSet() { + { + add(0); + add(2); + } + }); + + double[][] postProcessedData = new double[][] { + {1.0, 2.0, 1.0, 0.0, 0.0, 1.0, 0.0}, + {2.0, 3.0, 0.0, 1.0, 1.0, 0.0, 0.0}, + {3.0, 4.0, 1.0, 0.0, 0.0, 0.0, 1.0}, + {4.0, 5.0, 1.0, 0.0, 1.0, 0.0, 0.0}, + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } + + /** + * The {@code apply()} method is failed with UnknownCategorialFeatureValue exception. + * + * The reason is missed information in encodingValues. + * + * @see UnknownCategorialValueException + */ + @Test + public void testApplyWithUnknownCategorialValues() { + Vector[] data = new Vector[] { + new DenseVector(new Serializable[] {"1", "Moscow", "A"}), + new DenseVector(new Serializable[] {"2", "Moscow", "A"}), + new DenseVector(new Serializable[] {"2", "Moscow", "B"}), + }; + + Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + Map map1 = new HashMap(); + + map1.put("A", 0); + map1.put("B", 1); + + OneHotEncoderPreprocessor preprocessor = new OneHotEncoderPreprocessor( + new Map[] {Collections.singletonMap("2", 0), Collections.singletonMap("Moscow", 0), map1}, + vectorizer, + new HashSet(Arrays.asList(0, 1, 2))); + + double[][] postProcessedData = new double[][] { + {0.0, 1.0, 1.0, 1.0, 0.0}, + {1.0, 0.0, 1.0, 1.0, 0.0}, + {1.0, 0.0, 1.0, 0.0, 1.0}, + }; + + try { + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + + fail("UnknownCategorialFeatureValue"); + } + catch (UnknownCategorialValueException e) { + return; + } + fail("UnknownCategorialFeatureValue"); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/StringEncoderPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/StringEncoderPreprocessorTest.java new file mode 100644 index 0000000000000..1f37703ab7613 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/StringEncoderPreprocessorTest.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.encoding; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.preprocessing.encoding.stringencoder.StringEncoderPreprocessor; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link StringEncoderPreprocessor}. + */ +public class StringEncoderPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + Vector[] data = new Vector[] { + new DenseVector(new Serializable[] {"1", "Moscow", "A"}), + new DenseVector(new Serializable[] {"2", "Moscow", "B"}), + new DenseVector(new Serializable[] {"2", "Moscow", "B"}), + }; + + Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + Map map1 = new HashMap(); + + map1.put("1", 1); + map1.put("2", 0); + + Map map2 = new HashMap(); + + map2.put("A", 1); + map2.put("B", 0); + + StringEncoderPreprocessor preprocessor = new StringEncoderPreprocessor( + new Map[] {map1, Collections.singletonMap("Moscow", 0), map2}, + vectorizer, + new HashSet(Arrays.asList(0, 1, 2))); + + double[][] postProcessedData = new double[][] { + {1.0, 0.0, 1.0}, + {0.0, 0.0, 0.0}, + {0.0, 0.0, 0.0}, + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/TargetEncoderPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/TargetEncoderPreprocessorTest.java new file mode 100644 index 0000000000000..9390503a514f2 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/encoding/TargetEncoderPreprocessorTest.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.encoding; + +import java.io.Serializable; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.preprocessing.encoding.target.TargetEncoderPreprocessor; +import org.apache.ignite.ml.preprocessing.encoding.target.TargetEncodingMeta; +import org.junit.Test; +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link TargetEncoderPreprocessor}. + */ +public class TargetEncoderPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + Vector[] data = new Vector[] { + new DenseVector(new Serializable[] {"1", "Moscow", "A"}), + new DenseVector(new Serializable[] {"2", "Moscow", "B"}), + new DenseVector(new Serializable[] {"3", "Moscow", "B"}), + }; + + Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + Map mean = new HashMap<>(); + + mean.put("1", 1.0); // category "1" avg mean = 1.0 + mean.put("2", 0.0); // category "2" avg mean = 0.0 + + Map mean1 = new HashMap<>(); + + mean1.put("A", 1.0); // category "A" avg mean 1.0 + mean1.put("B", 2.0); // category "B" avg mean 2.0 + + TargetEncoderPreprocessor preprocessor = new TargetEncoderPreprocessor<>( + new TargetEncodingMeta[]{ + // feature 0 + new TargetEncodingMeta() + .withGlobalMean(0.5) + .withCategoryMean(mean), + // feature 1 + new TargetEncodingMeta() + .withGlobalMean(0.1) + .withCategoryMean(Collections.emptyMap()), + // feature 2 + new TargetEncodingMeta() + .withGlobalMean(0.1) + .withCategoryMean(mean1) + }, + vectorizer, + new HashSet() { + { + add(0); + add(1); + add(2); + } + }); + + double[][] postProcessedData = new double[][] { + { + 1.0, // "1" contains in dict => use category mean 1.0 + 0.1, // "Moscow" not contains in dict => use global 0.1 + 1.0 // "A" contains in dict => use category mean 1.0 + }, + { + 0.0, // "2" contains in dict => use category mean 0.0 + 0.1, // "Moscow" not contains in dict => use global 0.1 + 2.0 // "B" contains in dict => use category mean 2.0 + }, + { + 0.5, // "3" not contains in dict => use global mean 0.5 + 0.1, // "Moscow" not contains in dict => use global 0.1 + 2.0 // "B" contains in dict => use category mean 2.0 + }, + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java new file mode 100644 index 0000000000000..6cfb18f66c256 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.imputing; + +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link ImputerPreprocessor}. + */ +public class ImputerPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + double[][] data = new double[][]{ + {Double.NaN, 20, 3}, + {2, Double.NaN, 8}, + {Double.NaN, Double.NaN, Double.NaN}, + }; + + Vectorizer vectorizer = new DoubleArrayVectorizer<>(0, 1, 2); + + ImputerPreprocessor preprocessor = new ImputerPreprocessor<>( + VectorUtils.of(1.1, 10.1, 100.1), + vectorizer + ); + + double[][] postProcessedData = new double[][]{ + {1.1, 20, 3}, + {2, 10.1, 8}, + {1.1, 10.1, 100.1}, + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java new file mode 100644 index 0000000000000..3ed20497f23cd --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.imputing; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link ImputerTrainer}. + */ +public class ImputerTrainerTest extends TrainerTest { + /** Tests {@code fit()} method. */ + @Test + public void testMostFrequent() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(1, 2, Double.NaN)); + data.put(2, VectorUtils.of(1, Double.NaN, 22)); + data.put(3, VectorUtils.of(Double.NaN, 10, 100)); + data.put(4, VectorUtils.of(0, 2, 100)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + ImputerTrainer imputerTrainer = new ImputerTrainer() + .withImputingStrategy(ImputingStrategy.MOST_FREQUENT); + + ImputerPreprocessor preprocessor = imputerTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {1, 0, 100}, + preprocessor.apply(5, VectorUtils.of(Double.NaN, 0, Double.NaN)).features().asArray(), + 1e-8 + ); + } + + + /** Tests {@code fit()} method. */ + @Test + public void testLeastFrequent() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(1, 2, Double.NaN)); + data.put(2, VectorUtils.of(1, Double.NaN, 22)); + data.put(3, VectorUtils.of(Double.NaN, 10, 100)); + data.put(4, VectorUtils.of(0, 2, 100)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + ImputerTrainer imputerTrainer = new ImputerTrainer() + .withImputingStrategy(ImputingStrategy.LEAST_FREQUENT); + + ImputerPreprocessor preprocessor = imputerTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {0, 0, 22}, + preprocessor.apply(5, VectorUtils.of(Double.NaN, 0, Double.NaN)).features().asArray(), + 1e-8 + ); + } + + /** Tests {@code fit()} method. */ + @Test + public void testMin() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(-1, 2, Double.NaN)); + data.put(2, VectorUtils.of(-1, Double.NaN, 22)); + data.put(3, VectorUtils.of(Double.NaN, 10, 100)); + data.put(4, VectorUtils.of(0, 2, 100)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + ImputerTrainer imputerTrainer = new ImputerTrainer() + .withImputingStrategy(ImputingStrategy.MIN); + + ImputerPreprocessor preprocessor = imputerTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {-1, 0, 22}, + preprocessor.apply(5, VectorUtils.of(Double.NaN, 0, Double.NaN)).features().asArray(), + 1e-8 + ); + } + + /** Tests {@code fit()} method. */ + @Test + public void testMax() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(-1, 2, Double.NaN)); + data.put(2, VectorUtils.of(-1, Double.NaN, 22)); + data.put(3, VectorUtils.of(Double.NaN, 10, 100)); + data.put(4, VectorUtils.of(0, 2, 100)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + ImputerTrainer imputerTrainer = new ImputerTrainer() + .withImputingStrategy(ImputingStrategy.MAX); + + ImputerPreprocessor preprocessor = imputerTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {0, 0, 100}, + preprocessor.apply(5, VectorUtils.of(Double.NaN, 0, Double.NaN)).features().asArray(), + 1e-8 + ); + } + + /** Tests {@code fit()} method. */ + @Test + public void testCount() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(-1, 2, Double.NaN)); + data.put(2, VectorUtils.of(-1, Double.NaN, 22)); + data.put(3, VectorUtils.of(Double.NaN, 10, 100)); + data.put(4, VectorUtils.of(0, 2, 100)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + ImputerTrainer imputerTrainer = new ImputerTrainer() + .withImputingStrategy(ImputingStrategy.COUNT); + + ImputerPreprocessor preprocessor = imputerTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals( + new double[] {3, 0, 3}, + preprocessor.apply(5, VectorUtils.of(Double.NaN, 0, Double.NaN)).features().asArray(), + 1e-8 + ); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/maxabsscaling/MaxAbsScalerPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/maxabsscaling/MaxAbsScalerPreprocessorTest.java new file mode 100644 index 0000000000000..90777465caf5b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/maxabsscaling/MaxAbsScalerPreprocessorTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.maxabsscaling; + +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link MaxAbsScalerPreprocessor}. + */ +public class MaxAbsScalerPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + double[][] data = new double[][] { + {2., 4., 1.}, + {1., 8., 22.}, + {-4., 10., 100.}, + {0., 22., 300.} + }; + double[] maxAbs = new double[] {4, 22, 300}; + + Vectorizer vectorizer = new DoubleArrayVectorizer<>(0, 1, 2); + + MaxAbsScalerPreprocessor preprocessor = new MaxAbsScalerPreprocessor<>( + maxAbs, + vectorizer + ); + + double[][] expData = new double[][] { + {.5, 4. / 22, 1. / 300}, + {.25, 8. / 22, 22. / 300}, + {-1., 10. / 22, 100. / 300}, + {0., 22. / 22, 300. / 300} + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(expData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/maxabsscaling/MaxAbsScalerTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/maxabsscaling/MaxAbsScalerTrainerTest.java new file mode 100644 index 0000000000000..4f3253387320d --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/maxabsscaling/MaxAbsScalerTrainerTest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.maxabsscaling; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link MaxAbsScalerTrainer}. + */ +public class MaxAbsScalerTrainerTest extends TrainerTest { + /** Tests {@code fit()} method. */ + @Test + public void testFit() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(2, -4, 1)); + data.put(2, VectorUtils.of(1, -8, 22)); + data.put(3, VectorUtils.of(-4, 10, 100)); + data.put(4, VectorUtils.of(0, 22, 300)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + MaxAbsScalerTrainer standardizationTrainer = new MaxAbsScalerTrainer<>(); + + MaxAbsScalerPreprocessor preprocessor = standardizationTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals(new double[] {4, 22, 300}, preprocessor.getMaxAbs(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java new file mode 100644 index 0000000000000..b35fa9e87cbd8 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.minmaxscaling; + +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link MinMaxScalerPreprocessor}. + */ +public class MinMaxScalerPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + double[][] data = new double[][]{ + {2., 4., 1.}, + {1., 8., 22.}, + {4., 10., 100.}, + {0., 22., 300.} + }; + + Vectorizer vectorizer = new DoubleArrayVectorizer<>(0, 1, 2); + + MinMaxScalerPreprocessor preprocessor = new MinMaxScalerPreprocessor<>( + new double[] {0, 4, 1}, + new double[] {4, 22, 300}, + vectorizer + ); + + double[][] standardData = new double[][]{ + {2. / 4, (4. - 4.) / 18., 0.}, + {1. / 4, (8. - 4.) / 18., (22. - 1.) / 299.}, + {1., (10. - 4.) / 18., (100. - 1.) / 299.}, + {0., (22. - 4.) / 18., (300. - 1.) / 299.} + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(standardData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } + + /** Test {@code apply()} method with division by zero. */ + @Test + public void testApplyDivisionByZero() { + double[][] data = new double[][]{{1.}, {1.}, {1.}, {1.}}; + + MinMaxScalerPreprocessor preprocessor = new MinMaxScalerPreprocessor<>( + new double[] {1.}, + new double[] {1.}, + new DoubleArrayVectorizer<>(0) + ); + + double[][] standardData = new double[][]{{0.}, {0.}, {0.}, {0.}}; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(standardData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java new file mode 100644 index 0000000000000..c607eefb28140 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.minmaxscaling; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link MinMaxScalerTrainer}. + */ +public class MinMaxScalerTrainerTest extends TrainerTest { + /** Tests {@code fit()} method. */ + @Test + public void testFit() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(2, 4, 1)); + data.put(2, VectorUtils.of(1, 8, 22)); + data.put(3, VectorUtils.of(4, 10, 100)); + data.put(4, VectorUtils.of(0, 22, 300)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + MinMaxScalerTrainer standardizationTrainer = new MinMaxScalerTrainer<>(); + + MinMaxScalerPreprocessor preprocessor = standardizationTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals(new double[] {0, 4, 1}, preprocessor.getMin(), 1e-8); + assertArrayEquals(new double[] {4, 22, 300}, preprocessor.getMax(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java new file mode 100644 index 0000000000000..891854437d3ba --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.normalization; + +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.preprocessing.binarization.BinarizationPreprocessor; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link BinarizationPreprocessor}. + */ +public class NormalizationPreprocessorTest { + /** Tests {@code apply()} method. */ + @Test + public void testApply() { + double[][] data = new double[][]{ + {1, 2, 1}, + {1, 1, 1}, + {1, 0, 0}, + }; + + Vectorizer vectorizer = new DoubleArrayVectorizer<>(0, 1, 2); + + NormalizationPreprocessor preprocessor = new NormalizationPreprocessor<>( + 1, + vectorizer + ); + + double[][] postProcessedData = new double[][]{ + {0.25, 0.5, 0.25}, + {0.33, 0.33, 0.33}, + {1, 0, 0} + }; + + for (int i = 0; i < data.length; i++) + assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-2); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java new file mode 100644 index 0000000000000..929a45bbea089 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.normalization; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.preprocessing.binarization.BinarizationTrainer; +import org.junit.Test; + +import static org.apache.ignite.ml.TestUtils.assertEquals; +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link BinarizationTrainer}. + */ +public class NormalizationTrainerTest extends TrainerTest { + /** Tests {@code fit()} method. */ + @Test + public void testFit() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(2, 4, 1)); + data.put(2, VectorUtils.of(1, 8, 22)); + data.put(3, VectorUtils.of(4, 10, 100)); + data.put(4, VectorUtils.of(0, 22, 300)); + + DatasetBuilder datasetBuilder = new LocalDatasetBuilder<>(data, parts); + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2); + + NormalizationTrainer normalizationTrainer = new NormalizationTrainer() + .withP(3); + + assertEquals(3., normalizationTrainer.p(), 0); + + NormalizationPreprocessor preprocessor = normalizationTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertEquals(normalizationTrainer.p(), preprocessor.p(), 0); + + assertArrayEquals(new double[] {0.125, 0.99, 0.125}, preprocessor.apply(5, VectorUtils.of(1., 8., 1.)).features().asArray(), 1e-2); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/standardscaling/StandardScalerPreprocessorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/standardscaling/StandardScalerPreprocessorTest.java new file mode 100644 index 0000000000000..8b7e695fe0e46 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/standardscaling/StandardScalerPreprocessorTest.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.standardscaling; + +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link StandardScalerPreprocessor}. + */ +public class StandardScalerPreprocessorTest { + /** Test {@code apply()} method. */ + @Test + public void testApply() { + double[][] inputData = new double[][] { + {0, 2., 4., .1}, + {0, 1., -18., 2.2}, + {1, 4., 10., -.1}, + {1, 0., 22., 1.3} + }; + double[] means = new double[] {0.5, 1.75, 4.5, 0.875}; + double[] sigmas = new double[] {0.5, 1.47901995, 14.51723114, 0.93374247}; + + final Vectorizer vectorizer = new DummyVectorizer(0, 1, 2, 3).labeled(0); + + StandardScalerPreprocessor preprocessor = new StandardScalerPreprocessor<>( + means, + sigmas, + vectorizer + ); + + double[][] expectedData = new double[][] { + {-1., 0.16903085, -0.03444183, -0.82999331}, + {-1., -0.50709255, -1.54988233, 1.41902081}, + {1., 1.52127766, 0.37886012, -1.04418513}, + {1., -1.18321596, 1.20546403, 0.45515762} + }; + + for (int i = 0; i < inputData.length; i++) + assertArrayEquals(expectedData[i], preprocessor.apply(i, VectorUtils.of(inputData[i])).features().asArray(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/standardscaling/StandardScalerTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/standardscaling/StandardScalerTrainerTest.java new file mode 100644 index 0000000000000..0ba3794bfc2fb --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/preprocessing/standardscaling/StandardScalerTrainerTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.preprocessing.standardscaling; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link StandardScalerTrainer}. + */ +public class StandardScalerTrainerTest extends TrainerTest { + /** Data. */ + private DatasetBuilder datasetBuilder; + + /** Trainer to be tested. */ + private StandardScalerTrainer standardizationTrainer; + + /** */ + @Before + public void prepareDataset() { + Map data = new HashMap<>(); + data.put(1, VectorUtils.of(0, 2., 4., .1)); + data.put(2, VectorUtils.of(0, 1., -18., 2.2)); + data.put(3, VectorUtils.of(1, 4., 10., -.1)); + data.put(4, VectorUtils.of(1, 0., 22., 1.3)); + datasetBuilder = new LocalDatasetBuilder<>(data, parts); + } + + /** */ + @Before + public void createTrainer() { + standardizationTrainer = new StandardScalerTrainer<>(); + } + + /** Test {@code fit()} method. */ + @Test + public void testCalculatesCorrectMeans() { + double[] expectedMeans = new double[] {0.5, 1.75, 4.5, 0.875}; + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2, 3); + + StandardScalerPreprocessor preprocessor = standardizationTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals(expectedMeans, preprocessor.getMeans(), 1e-8); + } + + /** Test {@code fit()} method. */ + @Test + public void testCalculatesCorrectStandardDeviations() { + double[] expectedSigmas = new double[] {0.5, 1.47901995, 14.51723114, 0.93374247}; + + final Vectorizer vectorizer = new DummyVectorizer<>(0, 1, 2, 3); + + StandardScalerPreprocessor preprocessor = standardizationTrainer.fit( + TestUtils.testEnvBuilder(), + datasetBuilder, + vectorizer + ); + + assertArrayEquals(expectedSigmas, preprocessor.getSigmas(), 1e-8); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTestSuite.java new file mode 100644 index 0000000000000..a567d9dd7655b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTestSuite.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.recommendation; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.preprocessing.* package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + RecommendationTrainerTest.class, + RecommendationTrainerSQLTest.class + +}) +public class RecommendationTestSuite { + // No-op. +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTrainerSQLTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTrainerSQLTest.java new file mode 100644 index 0000000000000..7cdf1372068ed --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTrainerSQLTest.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.recommendation; + +import java.io.Serializable; +import java.util.Random; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.cache.query.SqlFieldsQuery; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.apache.ignite.ml.sql.SQLFunctions; +import org.apache.ignite.ml.sql.SqlDatasetBuilder; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** Tests for {@link RecommendationTrainer} with binary objects and SQL. */ +public class RecommendationTrainerSQLTest extends GridCommonAbstractTest { + /** Dummy cache name. */ + private static final String DUMMY_CACHE_NAME = "dummy_cache"; + + /** Number of nodes in grid. */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** {@inheritDoc} */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** */ + @Test + public void testFit() { + // Dummy cache is required to perform SQL queries. + CacheConfiguration cacheCfg = new CacheConfiguration<>(DUMMY_CACHE_NAME) + .setSqlSchema("PUBLIC") + .setSqlFunctionClasses(SQLFunctions.class); + + IgniteCache cache = null; + try { + cache = ignite.getOrCreateCache(cacheCfg); + + System.out.println(">>> Creating table with training data..."); + cache.query(new SqlFieldsQuery("create table ratings (\n" + + " rating_id int primary key,\n" + + " obj_id int,\n" + + " subj_id int,\n" + + " rating float\n" + + ") with \"template=partitioned\";")).getAll(); + + int size = 100; + Random rnd = new Random(0L); + SqlFieldsQuery qry = new SqlFieldsQuery("insert into ratings (rating_id, obj_id, subj_id, rating) values (?, ?, ?, ?)"); + // Quadrant I contains "0", quadrant II contains "1", quadrant III contains "0", quadrant IV contains "1". + for (int i = 0; i < size; i++) { + for (int j = 0; j < size; j++) { + if (rnd.nextBoolean()) { + double rating = ((i > size / 2) ^ (j > size / 2)) ? 1.0 : 0.0; + qry.setArgs(i * size + j, i, j, rating); + cache.query(qry); + } + } + } + + RecommendationTrainer trainer = new RecommendationTrainer() + .withMaxIterations(100) + .withLearningRate(50.0) + .withBatchSize(10) + .withK(2) + .withLearningEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(1)) + .withTrainerEnvironment(LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(1).buildForTrainer()); + + RecommendationModel mdl = trainer.fit( + new SqlDatasetBuilder(ignite, "SQL_PUBLIC_RATINGS"), + "obj_id", + "subj_id", + "rating" + ); + + int incorrect = 0; + for (int i = 0; i < size; i++) { + for (int j = 0; j < size; j++) { + if (rnd.nextBoolean()) { + double rating = ((i > size / 2) ^ (j > size / 2)) ? 1.0 : 0.0; + double prediction = mdl.predict(new ObjectSubjectPair<>(i, j)); + if (Math.abs(prediction - rating) >= 1e-5) + incorrect++; + } + } + } + + assertEquals(0, incorrect); + } + finally { + cache.query(new SqlFieldsQuery("DROP TABLE ratings")); + cache.destroy(); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTrainerTest.java new file mode 100644 index 0000000000000..60d79fdab9599 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/recommendation/RecommendationTrainerTest.java @@ -0,0 +1,211 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.recommendation; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +/** Tests for {@link RecommendationTrainer}. */ +public class RecommendationTrainerTest { + /** */ + @Test + public void testFit() { + int size = 100; + Random rnd = new Random(0L); + Double[][] ratings = new Double[size][size]; + // Quadrant I contains "0", quadrant II contains "1", quadrant III contains "0", quadrant IV contains "1". + for (int i = 0; i < size; i++) { + for (int j = 0; j < size; j++) { + if (rnd.nextBoolean()) + ratings[i][j] = ((i > size / 2) ^ (j > size / 2)) ? 1.0 : 0.0; + } + } + + int seq = 0; + Map> data = new HashMap<>(); + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) + data.put(seq++, triplet); + + RecommendationTrainer trainer = new RecommendationTrainer() + .withLearningRate(50.0) + .withBatchSize(10) + .withK(2) + .withMaxIterations(-1) + .withMinMdlImprovement(0.5) + .withLearningEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(1)) + .withTrainerEnvironment(LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(1).buildForTrainer()); + + RecommendationModel mdl = trainer.fit(new LocalDatasetBuilder<>(data, 10)); + + int incorrect = 0; + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) { + double prediction = Math.round(mdl.predict(triplet)); + if (Math.abs(prediction - triplet.getRating( + + )) >= 1e-5) + incorrect++; + } + + assertEquals(0, incorrect); + } + + /** */ + @Test + public void testUpdate() { + int size = 100; + Random rnd = new Random(0L); + Double[][] ratings = new Double[size][size]; + // Quadrant I contains "0", quadrant II contains "1", quadrant III contains "0", quadrant IV contains "1". + for (int i = 0; i < size; i++) { + for (int j = 0; j < size; j++) { + if (rnd.nextBoolean()) + ratings[i][j] = ((i > size / 2) ^ (j > size / 2)) ? 1.0 : 0.0; + } + } + + int seq = 0; + Map> data = new HashMap<>(); + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) + data.put(seq++, triplet); + + RecommendationTrainer trainer = new RecommendationTrainer() + .withLearningRate(50.0) + .withBatchSize(10) + .withK(2) + .withMaxIterations(25) + .withLearningEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(1)) + .withTrainerEnvironment(LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(1).buildForTrainer()); + + RecommendationModel mdl = trainer.fit(new LocalDatasetBuilder<>(data, 10)); + + int incorrect = 0; + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) { + double prediction = Math.round(mdl.predict(triplet)); + if (Math.abs(prediction - triplet.getRating()) >= 1e-5) + incorrect++; + } + + assertNotEquals(0, incorrect); + + RecommendationModel updatedMdl = trainer.update(new LocalDatasetBuilder<>(data, 10), mdl); + + incorrect = 0; + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) { + double prediction = Math.round(updatedMdl.predict(triplet)); + if (Math.abs(prediction - triplet.getRating()) >= 1e-5) + incorrect++; + } + + assertEquals(0, incorrect); + } + + /** */ + @Test + public void testUpdateWithChangedData() { + int size = 100; + Random rnd = new Random(0L); + Double[][] ratings = new Double[size][size]; + // Quadrant I contains "0", quadrant II contains "1", quadrant III contains "0", quadrant IV contains "1". + for (int i = 0; i < size; i++) { + for (int j = 0; j < size; j++) { + if (rnd.nextBoolean()) + ratings[i][j] = ((i > size / 2) ^ (j > size / 2)) ? 1.0 : 0.0; + } + } + + int seq = 0; + Map> data = new HashMap<>(); + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) + data.put(seq++, triplet); + + RecommendationTrainer trainer = new RecommendationTrainer() + .withLearningRate(50.0) + .withBatchSize(10) + .withK(2) + .withMaxIterations(-1) + .withMinMdlImprovement(0.5) + .withLearningEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(1)) + .withTrainerEnvironment(LearningEnvironmentBuilder.defaultBuilder().withRNGSeed(1).buildForTrainer()); + + RecommendationModel mdl = trainer.fit(new LocalDatasetBuilder<>(data, 10)); + + int incorrect = 0; + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) { + double prediction = Math.round(mdl.predict(triplet)); + if (Math.abs(prediction - triplet.getRating( + + )) >= 1e-5) + incorrect++; + } + + assertEquals(0, incorrect); + + ratings = new Double[size][size]; + // Quadrant I contains "1", quadrant II contains "0", quadrant III contains "1", quadrant IV contains "0". + for (int i = 0; i < size; i++) { + for (int j = 0; j < size; j++) { + if (rnd.nextBoolean()) + ratings[i][j] = ((i > size / 2) ^ (j > size / 2)) ? 0.0 : 1.0; + } + } + + seq = 0; + data = new HashMap<>(); + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) + data.put(seq++, triplet); + + RecommendationModel updatedMdl = trainer.update(new LocalDatasetBuilder<>(data, 10), mdl); + + incorrect = 0; + for (ObjectSubjectRatingTriplet triplet : toList(ratings)) { + double prediction = Math.round(updatedMdl.predict(triplet)); + if (Math.abs(prediction - triplet.getRating()) >= 1e-5) + incorrect++; + } + + assertEquals(0, incorrect); + } + + /** + * Converts rating matrix to list of {@link ObjectSubjectRatingTriplet} objects. + * + * @param ratings Rating matrix. + * @return List of {@link ObjectSubjectRatingTriplet} objects. + */ + private static List> toList(Double[][] ratings) { + List> res = new ArrayList<>(); + + for (int i = 0; i < ratings.length; i++) { + for (int j = 0; j < ratings[i].length; j++) { + if (ratings[i][j] != null) + res.add(new ObjectSubjectRatingTriplet<>(i, j, ratings[i][j])); + } + } + + return res; + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/RegressionsTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/RegressionsTestSuite.java new file mode 100644 index 0000000000000..2fa69ef20c007 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/RegressionsTestSuite.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.regressions; + +import org.apache.ignite.ml.regressions.linear.LinearRegressionLSQRTrainerTest; +import org.apache.ignite.ml.regressions.linear.LinearRegressionModelTest; +import org.apache.ignite.ml.regressions.linear.LinearRegressionSGDTrainerTest; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModelTest; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainerTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.regressions.* package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + LinearRegressionModelTest.class, + LinearRegressionLSQRTrainerTest.class, + LinearRegressionSGDTrainerTest.class, + LogisticRegressionModelTest.class, + LogisticRegressionSGDTrainerTest.class +}) +public class RegressionsTestSuite { + // No-op. +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java new file mode 100644 index 0000000000000..0325d37472c83 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.regressions.linear; + +import java.util.HashMap; +import java.util.Map; +import java.util.Random; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link LinearRegressionLSQRTrainer}. + */ +public class LinearRegressionLSQRTrainerTest extends TrainerTest { + /** + * Tests {@code fit()} method on a simple small dataset. + */ + @Test + public void testSmallDataFit() { + Map data = new HashMap<>(); + data.put(0, new double[]{-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107}); + data.put(1, new double[]{-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867}); + data.put(2, new double[]{0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728}); + data.put(3, new double[]{-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991}); + data.put(4, new double[]{0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611}); + data.put(5, new double[]{0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197}); + data.put(6, new double[]{-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012}); + data.put(7, new double[]{-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889}); + data.put(8, new double[]{0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949}); + data.put(9, new double[]{-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583}); + + LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer(); + + LinearRegressionModel mdl = trainer.fit( + data, + parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + assertArrayEquals( + new double[]{72.26948107, 15.95144674, 24.07403921, 66.73038781}, + mdl.weights().getStorage().data(), + 1e-6 + ); + + assertEquals(2.8421709430404007e-14, mdl.intercept(), 1e-6); + } + + /** + * Tests {@code fit()} method on a big (100000 x 100) dataset. + */ + @Test + public void testBigDataFit() { + Random rnd = new Random(0); + Map data = new HashMap<>(); + double[] coef = new double[100]; + double intercept = rnd.nextDouble() * 10; + + for (int i = 0; i < 100000; i++) { + double[] x = new double[coef.length + 1]; + + for (int j = 0; j < coef.length; j++) + x[j] = rnd.nextDouble() * 10; + + x[coef.length] = intercept; + + data.put(i, x); + } + + LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer(); + + LinearRegressionModel mdl = trainer.fit( + data, + parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + assertArrayEquals(coef, mdl.weights().getStorage().data(), 1e-6); + + assertEquals(intercept, mdl.intercept(), 1e-6); + } + + /** */ + @Test + public void testUpdate() { + Random rnd = new Random(0); + Map data = new HashMap<>(); + double[] coef = new double[100]; + double intercept = rnd.nextDouble() * 10; + + for (int i = 0; i < 100000; i++) { + double[] x = new double[coef.length + 1]; + + for (int j = 0; j < coef.length; j++) + x[j] = rnd.nextDouble() * 10; + + x[coef.length] = intercept; + + data.put(i, x); + } + + LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer(); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST); + LinearRegressionModel originalMdl = trainer.fit( + data, + parts, + vectorizer + ); + + LinearRegressionModel updatedOnSameDS = trainer.update( + originalMdl, + data, + parts, + vectorizer + ); + + LinearRegressionModel updatedOnEmptyDS = trainer.update( + originalMdl, + new HashMap<>(), + parts, + vectorizer + ); + + assertArrayEquals(originalMdl.weights().getStorage().data(), updatedOnSameDS.weights().getStorage().data(), 1e-6); + assertEquals(originalMdl.intercept(), updatedOnSameDS.intercept(), 1e-6); + + assertArrayEquals(originalMdl.weights().getStorage().data(), updatedOnEmptyDS.weights().getStorage().data(), 1e-6); + assertEquals(originalMdl.intercept(), updatedOnEmptyDS.intercept(), 1e-6); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java new file mode 100644 index 0000000000000..78a5766bf0f74 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.regressions.linear; + +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.math.exceptions.math.CardinalityException; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link LinearRegressionModel}. + */ +public class LinearRegressionModelTest { + /** */ + private static final double PRECISION = 1e-6; + + /** */ + @Test + public void testPredict() { + Vector weights = new DenseVector(new double[]{2.0, 3.0}); + LinearRegressionModel mdl = new LinearRegressionModel(weights, 1.0); + + assertTrue(!mdl.toString().isEmpty()); + assertTrue(!mdl.toString(true).isEmpty()); + assertTrue(!mdl.toString(false).isEmpty()); + + Vector observation = new DenseVector(new double[]{1.0, 1.0}); + TestUtils.assertEquals(1.0 + 2.0 * 1.0 + 3.0 * 1.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{2.0, 1.0}); + TestUtils.assertEquals(1.0 + 2.0 * 2.0 + 3.0 * 1.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{1.0, 2.0}); + TestUtils.assertEquals(1.0 + 2.0 * 1.0 + 3.0 * 2.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{-2.0, 1.0}); + TestUtils.assertEquals(1.0 - 2.0 * 2.0 + 3.0 * 1.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{1.0, -2.0}); + TestUtils.assertEquals(1.0 + 2.0 * 1.0 - 3.0 * 2.0, mdl.predict(observation), PRECISION); + } + + /** */ + @Test(expected = CardinalityException.class) + public void testPredictOnAnObservationWithWrongCardinality() { + Vector weights = new DenseVector(new double[]{2.0, 3.0}); + + LinearRegressionModel mdl = new LinearRegressionModel(weights, 1.0); + + Vector observation = new DenseVector(new double[]{1.0}); + + mdl.predict(observation); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java new file mode 100644 index 0000000000000..9f503697697af --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.regressions.linear; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.nn.UpdatesStrategy; +import org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.RPropUpdateCalculator; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link LinearRegressionSGDTrainer}. + */ +public class LinearRegressionSGDTrainerTest extends TrainerTest { + /** + * Tests {@code fit()} method on a simple small dataset. + */ + @Test + public void testSmallDataFit() { + Map data = new HashMap<>(); + data.put(0, new double[]{-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107}); + data.put(1, new double[]{-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867}); + data.put(2, new double[]{0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728}); + data.put(3, new double[]{-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991}); + data.put(4, new double[]{0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611}); + data.put(5, new double[]{0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197}); + data.put(6, new double[]{-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012}); + data.put(7, new double[]{-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889}); + data.put(8, new double[]{0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949}); + data.put(9, new double[]{-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583}); + + LinearRegressionSGDTrainer trainer = new LinearRegressionSGDTrainer<>(new UpdatesStrategy<>( + new RPropUpdateCalculator(), + RPropParameterUpdate.SUM_LOCAL, + RPropParameterUpdate.AVG + ), 100000, 10, 100, 123L); + + LinearRegressionModel mdl = trainer.fit( + data, parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + assertArrayEquals( + new double[]{72.26948107, 15.95144674, 24.07403921, 66.73038781}, + mdl.weights().getStorage().data(), + 1e-1 + ); + + assertEquals(2.8421709430404007e-14, mdl.intercept(), 1e-1); + } + + /** */ + @Test + public void testUpdate() { + Map data = new HashMap<>(); + data.put(0, new double[]{-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107}); + data.put(1, new double[]{-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867}); + data.put(2, new double[]{0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728}); + data.put(3, new double[]{-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991}); + data.put(4, new double[]{0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611}); + data.put(5, new double[]{0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197}); + data.put(6, new double[]{-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012}); + data.put(7, new double[]{-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889}); + data.put(8, new double[]{0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949}); + data.put(9, new double[]{-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583}); + + LinearRegressionSGDTrainer trainer = new LinearRegressionSGDTrainer<>(new UpdatesStrategy<>( + new RPropUpdateCalculator(), + RPropParameterUpdate.SUM_LOCAL, + RPropParameterUpdate.AVG + ), 100000, 10, 100, 0L); + + LinearRegressionModel originalMdl = trainer.withSeed(0).fit( + data, parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + LinearRegressionModel updatedOnSameDS = trainer.withSeed(0).update( + originalMdl, + data, + parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + LinearRegressionModel updatedOnEmptyDS = trainer.withSeed(0).update( + originalMdl, + new HashMap<>(), + parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ); + + assertArrayEquals( + originalMdl.weights().getStorage().data(), + updatedOnSameDS.weights().getStorage().data(), + 1.0 + ); + + assertEquals(originalMdl.intercept(), updatedOnSameDS.intercept(), 1.0); + + assertArrayEquals( + originalMdl.weights().getStorage().data(), + updatedOnEmptyDS.weights().getStorage().data(), + 1e-1 + ); + + assertEquals(originalMdl.intercept(), updatedOnEmptyDS.intercept(), 1e-1); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java new file mode 100644 index 0000000000000..cf37ad9a4e614 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.regressions.logistic; + +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.math.exceptions.math.CardinalityException; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link LogisticRegressionModel}. + */ +public class LogisticRegressionModelTest { + /** */ + private static final double PRECISION = 1e-6; + + /** */ + @Test + public void testPredict() { + Vector weights = new DenseVector(new double[] {2.0, 3.0}); + + assertFalse(new LogisticRegressionModel(weights, 1.0).isKeepingRawLabels()); + + assertEquals(0.1, new LogisticRegressionModel(weights, 1.0).withThreshold(0.1).threshold(), 0); + + assertTrue(!new LogisticRegressionModel(weights, 1.0).toString().isEmpty()); + assertTrue(!new LogisticRegressionModel(weights, 1.0).toString(true).isEmpty()); + assertTrue(!new LogisticRegressionModel(weights, 1.0).toString(false).isEmpty()); + + verifyPredict(new LogisticRegressionModel(weights, 1.0).withRawLabels(true)); + verifyPredict(new LogisticRegressionModel(null, 1.0).withRawLabels(true).withWeights(weights)); + verifyPredict(new LogisticRegressionModel(weights, 1.0).withRawLabels(true).withThreshold(0.5)); + verifyPredict(new LogisticRegressionModel(weights, 0.0).withRawLabels(true).withIntercept(1.0)); + } + + /** */ + @Test(expected = CardinalityException.class) + public void testPredictOnAnObservationWithWrongCardinality() { + Vector weights = new DenseVector(new double[] {2.0, 3.0}); + + LogisticRegressionModel mdl = new LogisticRegressionModel(weights, 1.0); + + Vector observation = new DenseVector(new double[] {1.0}); + + mdl.predict(observation); + } + + /** */ + private void verifyPredict(LogisticRegressionModel mdl) { + Vector observation = new DenseVector(new double[] {1.0, 1.0}); + TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 + 3.0 * 1.0), mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[] {2.0, 1.0}); + TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 2.0 + 3.0 * 1.0), mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[] {1.0, 2.0}); + TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 + 3.0 * 2.0), mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[] {-2.0, 1.0}); + TestUtils.assertEquals(sigmoid(1.0 - 2.0 * 2.0 + 3.0 * 1.0), mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[] {1.0, -2.0}); + TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 - 3.0 * 2.0), mdl.predict(observation), PRECISION); + } + + /** + * Sigmoid function. + * + * @param z The regression value. + * @return The result. + */ + private static double sigmoid(double z) { + return 1.0 / (1.0 + Math.exp(-z)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java new file mode 100644 index 0000000000000..1c7e6473fd4a0 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.regressions.logistic; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.nn.UpdatesStrategy; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator; +import org.junit.Test; + +/** + * Tests for {@link LogisticRegressionSGDTrainer}. + */ +public class LogisticRegressionSGDTrainerTest extends TrainerTest { + /** + * Test trainer on classification model y = x. + */ + @Test + public void trainWithTheLinearlySeparableCase() { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, twoLinearlySeparableClasses[i]); + + LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(100000) + .withLocIterations(100) + .withBatchSize(14) + .withSeed(123L); + + LogisticRegressionModel mdl = trainer.fit(cacheMock, parts, new DoubleArrayVectorizer().labeled(0)); + + TestUtils.assertEquals(0, mdl.predict(VectorUtils.of(100, 10)), PRECISION); + TestUtils.assertEquals(1, mdl.predict(VectorUtils.of(10, 100)), PRECISION); + } + + /** */ + @Test + public void testUpdate() { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, twoLinearlySeparableClasses[i]); + + LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(100000) + .withLocIterations(100) + .withBatchSize(10) + .withSeed(123L); + + LogisticRegressionModel originalMdl = trainer.fit( + cacheMock, + parts, + new DoubleArrayVectorizer().labeled(0) + ); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + LogisticRegressionModel updatedOnSameDS = trainer.update( + originalMdl, + cacheMock, + parts, + vectorizer + ); + + LogisticRegressionModel updatedOnEmptyDS = trainer.update( + originalMdl, + new HashMap<>(), + parts, + vectorizer + ); + + Vector v1 = VectorUtils.of(100, 10); + Vector v2 = VectorUtils.of(10, 100); + TestUtils.assertEquals(originalMdl.predict(v1), updatedOnSameDS.predict(v1), PRECISION); + TestUtils.assertEquals(originalMdl.predict(v2), updatedOnSameDS.predict(v2), PRECISION); + TestUtils.assertEquals(originalMdl.predict(v2), updatedOnEmptyDS.predict(v2), PRECISION); + TestUtils.assertEquals(originalMdl.predict(v1), updatedOnEmptyDS.predict(v1), PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/SelectionTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/SelectionTestSuite.java new file mode 100644 index 0000000000000..08072ca899f9f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/SelectionTestSuite.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection; + +import org.apache.ignite.ml.selection.cv.CrossValidationTest; +import org.apache.ignite.ml.selection.paramgrid.ParameterSetGeneratorTest; +import org.apache.ignite.ml.selection.scoring.cursor.CacheBasedLabelPairCursorTest; +import org.apache.ignite.ml.selection.scoring.cursor.LocalLabelPairCursorTest; +import org.apache.ignite.ml.selection.scoring.evaluator.BinaryClassificationEvaluatorTest; +import org.apache.ignite.ml.selection.scoring.evaluator.RegressionEvaluatorTest; +import org.apache.ignite.ml.selection.scoring.evaluator.aggregator.BinaryClassificationPointwiseMetricStatsAggregatorTest; +import org.apache.ignite.ml.selection.scoring.evaluator.aggregator.RegressionMetricStatsAggregatorTest; +import org.apache.ignite.ml.selection.scoring.evaluator.context.BinaryClassificationEvaluationContextTest; +import org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetricsTest; +import org.apache.ignite.ml.selection.scoring.metric.regression.RegressionMetricsTest; +import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitterTest; +import org.apache.ignite.ml.selection.split.mapper.SHA256UniformMapperTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.selection.* package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + CrossValidationTest.class, + ParameterSetGeneratorTest.class, + LocalLabelPairCursorTest.class, + SHA256UniformMapperTest.class, + TrainTestDatasetSplitterTest.class, + CacheBasedLabelPairCursorTest.class, + BinaryClassificationEvaluatorTest.class, + RegressionEvaluatorTest.class, + BinaryClassificationPointwiseMetricStatsAggregatorTest.class, + RegressionMetricStatsAggregatorTest.class, + BinaryClassificationEvaluationContextTest.class, + BinaryClassificationMetricsTest.class, + RegressionMetricsTest.class +}) +public class SelectionTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/cv/CrossValidationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/cv/CrossValidationTest.java new file mode 100644 index 0000000000000..87addb0697dfd --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/cv/CrossValidationTest.java @@ -0,0 +1,344 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.cv; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.nn.UpdatesStrategy; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate; +import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator; +import org.apache.ignite.ml.pipeline.Pipeline; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel; +import org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer; +import org.apache.ignite.ml.selection.paramgrid.ParamGrid; +import org.apache.ignite.ml.selection.paramgrid.RandomStrategy; +import org.apache.ignite.ml.selection.scoring.metric.MetricName; +import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer; +import org.apache.ignite.ml.tree.DecisionTreeModel; +import org.junit.Test; + +import static org.apache.ignite.ml.common.TrainerTest.twoLinearlySeparableClasses; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link CrossValidation}. + */ +public class CrossValidationTest { + /** */ + @Test + public void testScoreWithGoodDataset() { + Map data = new HashMap<>(); + + for (int i = 0; i < 1000; i++) + data.put(i, new double[] {i > 500 ? 1.0 : 0.0, i}); + + DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(1, 0); + + DebugCrossValidation scoreCalculator = + new DebugCrossValidation<>(); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + int folds = 4; + + scoreCalculator + .withUpstreamMap(data) + .withAmountOfParts(1) + .withTrainer(trainer) + .withMetric(MetricName.ACCURACY) + .withPreprocessor(vectorizer) + .withAmountOfFolds(folds) + .isRunningOnPipeline(false); + + verifyScores(folds, scoreCalculator.scoreByFolds()); + } + + /** */ + @Test + public void testScoreWithGoodDatasetAndBinaryMetrics() { + Map data = new HashMap<>(); + + for (int i = 0; i < 1000; i++) + data.put(i, new double[] {i > 500 ? 1.0 : 0.0, i}); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(1, 0); + + DebugCrossValidation scoreCalculator = + new DebugCrossValidation<>(); + + int folds = 4; + + scoreCalculator + .withUpstreamMap(data) + .withAmountOfParts(1) + .withTrainer(trainer) + .withMetric(MetricName.ACCURACY) + .withPreprocessor(vectorizer) + .withAmountOfFolds(folds) + .isRunningOnPipeline(false); + + verifyScores(folds, scoreCalculator.scoreByFolds()); + } + + /** + * + */ + @Test + public void testBasicFunctionality() { + Map data = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + data.put(i, twoLinearlySeparableClasses[i]); + + LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(100000) + .withLocIterations(100) + .withBatchSize(14) + .withSeed(123L); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + DebugCrossValidation scoreCalculator = + new DebugCrossValidation<>(); + + int folds = 4; + + scoreCalculator + .withUpstreamMap(data) + .withAmountOfParts(1) + .withTrainer(trainer) + .withMetric(MetricName.ACCURACY) + .withPreprocessor(vectorizer) + .withAmountOfFolds(folds) + .isRunningOnPipeline(false); + + double[] scores = scoreCalculator.scoreByFolds(); + + assertEquals(0.8389830508474576, scores[0], 1e-6); + assertEquals(0.9402985074626866, scores[1], 1e-6); + assertEquals(0.8809523809523809, scores[2], 1e-6); + assertEquals(0.9921259842519685, scores[3], 1e-6); + } + + /** + * + */ + @Test + public void testGridSearch() { + Map data = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + data.put(i, twoLinearlySeparableClasses[i]); + + LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(100000) + .withLocIterations(100) + .withBatchSize(14) + .withSeed(123L); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + ParamGrid paramGrid = new ParamGrid() + .addHyperParam("maxIterations", trainer::withMaxIterations, new Double[]{10.0, 100.0, 1000.0, 10000.0}) + .addHyperParam("locIterations", trainer::withLocIterations, new Double[]{10.0, 100.0, 1000.0, 10000.0}) + .addHyperParam("batchSize", trainer::withBatchSize, new Double[]{1.0, 2.0, 4.0, 8.0, 16.0}); + + DebugCrossValidation scoreCalculator = + (DebugCrossValidation) + new DebugCrossValidation() + .withUpstreamMap(data) + .withAmountOfParts(1) + .withTrainer(trainer) + .withMetric(MetricName.ACCURACY) + .withPreprocessor(vectorizer) + .withAmountOfFolds(4) + .isRunningOnPipeline(false) + .withParamGrid(paramGrid); + + CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters(); + + assertArrayEquals( + crossValidationRes.getBestScore(), + new double[]{0.9745762711864406, 1.0, 0.8968253968253969, 0.8661417322834646}, + 1e-6 + ); + assertEquals(crossValidationRes.getBestAvgScore(), 0.9343858500738256, 1e-6); + assertEquals(crossValidationRes.getScoringBoard().size(), 80); + } + + /** + * + */ + @Test + public void testRandomSearch() { + Map data = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + data.put(i, twoLinearlySeparableClasses[i]); + + LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(100000) + .withLocIterations(100) + .withBatchSize(14) + .withSeed(123L); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + ParamGrid paramGrid = new ParamGrid() + .withParameterSearchStrategy( + new RandomStrategy() + .withMaxTries(10) + .withSeed(1234L) + .withSatisfactoryFitness(0.9) + ) + .addHyperParam("maxIterations", trainer::withMaxIterations, new Double[]{10.0, 100.0, 1000.0, 10000.0}) + .addHyperParam("locIterations", trainer::withLocIterations, new Double[]{10.0, 100.0, 1000.0, 10000.0}) + .addHyperParam("batchSize", trainer::withBatchSize, new Double[]{1.0, 2.0, 4.0, 8.0, 16.0}); + + DebugCrossValidation scoreCalculator = + (DebugCrossValidation) + new DebugCrossValidation() + .withUpstreamMap(data) + .withAmountOfParts(1) + .withTrainer(trainer) + .withMetric(MetricName.ACCURACY) + .withPreprocessor(vectorizer) + .withAmountOfFolds(4) + .isRunningOnPipeline(false) + .withParamGrid(paramGrid); + + CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters(); + + assertEquals(crossValidationRes.getBestAvgScore(), 0.9343858500738256, 1e-6); + assertEquals(crossValidationRes.getScoringBoard().size(), 10); + } + + /** + * + */ + @Test + public void testRandomSearchWithPipeline() { + Map data = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + data.put(i, twoLinearlySeparableClasses[i]); + + LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer() + .withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), + SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)) + .withMaxIterations(100000) + .withLocIterations(100) + .withBatchSize(14) + .withSeed(123L); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + ParamGrid paramGrid = new ParamGrid() + .withParameterSearchStrategy( + new RandomStrategy() + .withMaxTries(10) + .withSeed(1234L) + .withSatisfactoryFitness(0.9) + ) + .addHyperParam("maxIterations", trainer::withMaxIterations, new Double[]{10.0, 100.0, 1000.0, 10000.0}) + .addHyperParam("locIterations", trainer::withLocIterations, new Double[]{10.0, 100.0, 1000.0, 10000.0}) + .addHyperParam("batchSize", trainer::withBatchSize, new Double[]{1.0, 2.0, 4.0, 8.0, 16.0}); + + Pipeline pipeline = new Pipeline() + .addVectorizer(vectorizer) + .addTrainer(trainer); + + DebugCrossValidation scoreCalculator = + (DebugCrossValidation) + new DebugCrossValidation() + .withUpstreamMap(data) + .withAmountOfParts(1) + .withPipeline(pipeline) + .withMetric(MetricName.ACCURACY) + .withPreprocessor(vectorizer) + .withAmountOfFolds(4) + .isRunningOnPipeline(true) + .withParamGrid(paramGrid); + + CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters(); + + assertEquals(crossValidationRes.getBestAvgScore(), 0.9343858500738256, 1e-6); + assertEquals(crossValidationRes.getScoringBoard().size(), 10); + } + + /** */ + @Test + public void testScoreWithBadDataset() { + Map data = new HashMap<>(); + + for (int i = 0; i < 1000; i++) + data.put(i, new double[] { i, i % 2 == 0 ? 1.0 : 0.0}); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST); + + DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(1, 0); + + DebugCrossValidation scoreCalculator = + new DebugCrossValidation<>(); + + int folds = 4; + + scoreCalculator + .withUpstreamMap(data) + .withAmountOfParts(1) + .withTrainer(trainer) + .withMetric(MetricName.ACCURACY) + .withPreprocessor(vectorizer) + .withAmountOfFolds(folds) + .isRunningOnPipeline(false); + + double[] scores = scoreCalculator.scoreByFolds(); + + assertEquals(folds, scores.length); + + for (int i = 0; i < folds; i++) + assertTrue(scores[i] < 0.6); + } + + /** */ + private void verifyScores(int folds, double[] scores) { + assertEquals(folds, scores.length); + + for (int i = 0; i < folds; i++) + assertEquals(1, scores[i], 1e-1); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/paramgrid/ParameterSetGeneratorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/paramgrid/ParameterSetGeneratorTest.java new file mode 100644 index 0000000000000..7f5d3e3cde11c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/paramgrid/ParameterSetGeneratorTest.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.paramgrid; + +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link ParameterSetGenerator}. + */ +public class ParameterSetGeneratorTest { + /** */ + @Test + public void testParamSetGenerator() { + Map map = new TreeMap<>(); + map.put(0, new Double[]{1.1, 2.1}); + map.put(1, new Double[]{1.2, 2.2, 3.2, 4.2}); + map.put(2, new Double[]{1.3, 2.3}); + map.put(3, new Double[]{1.4}); + + List res = new ParameterSetGenerator(map).generate(); + assertEquals(res.size(), 16); + } + + /** */ + @Test(expected = java.lang.AssertionError.class) + public void testParamSetGeneratorWithEmptyMap() { + Map map = new TreeMap<>(); + new ParameterSetGenerator(map).generate(); + + } + + /** */ + @Test(expected = java.lang.AssertionError.class) + public void testNullHandling() { + new ParameterSetGenerator(null).generate(); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/TestLabelPairCursor.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/TestLabelPairCursor.java new file mode 100644 index 0000000000000..84cfd3b5dbbea --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/TestLabelPairCursor.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.scoring; + +import java.util.Iterator; +import java.util.List; +import org.apache.ignite.ml.selection.scoring.cursor.LabelPairCursor; +import org.jetbrains.annotations.NotNull; + +/** + * Util truth with prediction cursor to be used in tests. + * + * @param Type of a label (truth or prediction). + */ +public class TestLabelPairCursor implements LabelPairCursor { + /** List of truth values. */ + private final List truth; + + /** List of predicted values. */ + private final List predicted; + + /** + * Constructs a new instance of test truth with prediction cursor. + * + * @param truth List of truth values. + * @param predicted List of predicted values. + */ + public TestLabelPairCursor(List truth, List predicted) { + this.truth = truth; + this.predicted = predicted; + } + + /** {@inheritDoc} */ + @Override public void close() throws Exception { + /* Do nothing. */ + } + + /** {@inheritDoc} */ + @NotNull @Override public Iterator> iterator() { + return new TestTruthWithPredictionIterator<>(truth.iterator(), predicted.iterator()); + } + + /** + * Util truth with prediction iterator to be used in tests. + * + * @param Type of a label (truth or prediction). + */ + private static final class TestTruthWithPredictionIterator implements Iterator> { + /** Iterator of truth values. */ + private final Iterator truthIter; + + /** Iterator of predicted values. */ + private final Iterator predictedIter; + + /** + * Constructs a new instance of test truth with prediction iterator. + * + * @param truthIter Iterator of truth values. + * @param predictedIter Iterator of predicted values. + */ + public TestTruthWithPredictionIterator(Iterator truthIter, Iterator predictedIter) { + this.truthIter = truthIter; + this.predictedIter = predictedIter; + } + + /** {@inheritDoc} */ + @Override public boolean hasNext() { + return truthIter.hasNext() && predictedIter.hasNext(); + } + + /** {@inheritDoc} */ + @Override public LabelPair next() { + return new LabelPair<>(truthIter.next(), predictedIter.next()); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/cursor/CacheBasedLabelPairCursorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/cursor/CacheBasedLabelPairCursorTest.java new file mode 100644 index 0000000000000..3b7d98cf859ac --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/cursor/CacheBasedLabelPairCursorTest.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.scoring.cursor; + +import java.util.UUID; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.selection.scoring.LabelPair; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link CacheBasedLabelPairCursor}. + */ +public class CacheBasedLabelPairCursorTest extends GridCommonAbstractTest { + /** Number of nodes in grid. */ + private static final int NODE_COUNT = 4; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** {@inheritDoc} */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** */ + @Test + public void testIterate() { + IgniteCache data = ignite.createCache(UUID.randomUUID().toString()); + + for (int i = 0; i < 1000; i++) + data.put(i, new double[] { i, i}); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + LabelPairCursor cursor = new CacheBasedLabelPairCursor<>( + data, + (k, v) -> v[1] % 2 == 0, + vectorizer, + vec -> vec.get(0) + ); + + int cnt = 0; + for (LabelPair e : cursor) { + assertEquals(e.getPrediction(), e.getTruth()); + cnt++; + } + assertEquals(500, cnt); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/cursor/LocalLabelPairCursorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/cursor/LocalLabelPairCursorTest.java new file mode 100644 index 0000000000000..f9f6a9f487284 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/cursor/LocalLabelPairCursorTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.scoring.cursor; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.selection.scoring.LabelPair; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link LocalLabelPairCursor}. + */ +public class LocalLabelPairCursorTest { + /** */ + @Test + public void testIterate() { + Map data = new HashMap<>(); + + for (int i = 0; i < 1000; i++) + data.put(i, new double[] {i, i}); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + + LabelPairCursor cursor = new LocalLabelPairCursor<>( + data, + (k, v) -> v[1] % 2 == 0, + vectorizer, + vec -> vec.get(0) + ); + + int cnt = 0; + for (LabelPair e : cursor) { + assertEquals(e.getPrediction(), e.getTruth()); + cnt++; + } + assertEquals(500, cnt); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/aggregator/BinaryClassificationPointwiseMetricStatsAggregatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/aggregator/BinaryClassificationPointwiseMetricStatsAggregatorTest.java new file mode 100644 index 0000000000000..c39e3cb9d6930 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/aggregator/BinaryClassificationPointwiseMetricStatsAggregatorTest.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.scoring.evaluator.aggregator; + +import org.apache.ignite.ml.IgniteModel; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.selection.scoring.evaluator.context.BinaryClassificationEvaluationContext; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Test class for {@link BinaryClassificationPointwiseMetricStatsAggregator} class. + */ +public class BinaryClassificationPointwiseMetricStatsAggregatorTest { + /** + * + */ + @Test + public void testAggregate() { + BinaryClassificationPointwiseMetricStatsAggregator aggregator = new BinaryClassificationPointwiseMetricStatsAggregator<>(); + assertEquals(null, aggregator.getFalseLabel()); + assertEquals(null, aggregator.getTruthLabel()); + + aggregator.initByContext(new BinaryClassificationEvaluationContext<>(0., 1.)); + assertEquals(0., aggregator.getFalseLabel(), 0.); + assertEquals(1., aggregator.getTruthLabel(), 0.); + assertEquals(0, aggregator.getTrueNegative()); + assertEquals(0, aggregator.getFalseNegative()); + assertEquals(0, aggregator.getTruePositive()); + assertEquals(0, aggregator.getFalsePositive()); + + aggregator.aggregate(mdl, VectorUtils.of(0.).labeled(0.)); + aggregator.aggregate(mdl, VectorUtils.of(1.).labeled(0.)); + aggregator.aggregate(mdl, VectorUtils.of(1.).labeled(1.)); + aggregator.aggregate(mdl, VectorUtils.of(0.).labeled(1.)); + + assertEquals(1, aggregator.getTrueNegative()); + assertEquals(1, aggregator.getFalseNegative()); + assertEquals(1, aggregator.getTruePositive()); + assertEquals(1, aggregator.getFalsePositive()); + } + + /** + * + */ + @Test + public void testMerge() { + BinaryClassificationPointwiseMetricStatsAggregator agg1 = new BinaryClassificationPointwiseMetricStatsAggregator<>(); + BinaryClassificationPointwiseMetricStatsAggregator agg2 = new BinaryClassificationPointwiseMetricStatsAggregator<>(); + + agg1.initByContext(new BinaryClassificationEvaluationContext<>(0., 1.)); + agg2.initByContext(new BinaryClassificationEvaluationContext<>(0., 1.)); + + agg1.aggregate(mdl, VectorUtils.of(0.).labeled(0.)); + agg1.aggregate(mdl, VectorUtils.of(1.).labeled(0.)); + agg2.aggregate(mdl, VectorUtils.of(1.).labeled(1.)); + agg2.aggregate(mdl, VectorUtils.of(0.).labeled(1.)); + + BinaryClassificationPointwiseMetricStatsAggregator res = agg1.mergeWith(agg2); + assertEquals(1, res.getTrueNegative()); + assertEquals(1, res.getFalseNegative()); + assertEquals(1, res.getTruePositive()); + assertEquals(1, res.getFalsePositive()); + } + + /** + * + */ + @Test(expected = IllegalArgumentException.class) + public void testMergeInequalAggreagators() { + BinaryClassificationPointwiseMetricStatsAggregator agg1 = new BinaryClassificationPointwiseMetricStatsAggregator<>(); + BinaryClassificationPointwiseMetricStatsAggregator agg2 = new BinaryClassificationPointwiseMetricStatsAggregator<>(); + + agg1.initByContext(new BinaryClassificationEvaluationContext<>(0., 0.)); + agg2.initByContext(new BinaryClassificationEvaluationContext<>(1., 1.)); + agg1.mergeWith(agg2); + } + + /** + * + */ + private IgniteModel mdl = new IgniteModel() { + @Override public Double predict(Vector input) { + return input.get(0) == 0.0 ? 0. : 1.; + } + }; +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/aggregator/RegressionMetricStatsAggregatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/aggregator/RegressionMetricStatsAggregatorTest.java new file mode 100644 index 0000000000000..f4c54ca069cd1 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/aggregator/RegressionMetricStatsAggregatorTest.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.scoring.evaluator.aggregator; + +import org.apache.ignite.ml.IgniteModel; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link RegressionMetricStatsAggregator} class. + */ +public class RegressionMetricStatsAggregatorTest { + /** + * + */ + @Test + public void itShouldBeCarefulWithNaNs() { + RegressionMetricStatsAggregator agg = new RegressionMetricStatsAggregator(); + assertEquals(Double.NaN, agg.getMAE(), 0.); + assertEquals(Double.NaN, agg.getMSE(), 0.); + assertEquals(Double.NaN, agg.getRss(), 0.); + assertEquals(Double.NaN, agg.ysVariance(), 0.); + assertEquals(Double.NaN, agg.ysRss(), 0.); + + agg.aggregate(mdl, VectorUtils.of(1.).labeled(1.)); + assertEquals(0., agg.getMAE(), 0.); + assertEquals(0., agg.getMSE(), 0.); + assertEquals(0., agg.getRss(), 0.); + assertEquals(0., agg.ysVariance(), 0.); + assertEquals(0., agg.ysRss(), 0.); + } + + /** + * + */ + @Test + public void testAggregate() { + RegressionMetricStatsAggregator agg = new RegressionMetricStatsAggregator(); + + agg.aggregate(mdl, VectorUtils.of(1.).labeled(2.)); + agg.aggregate(mdl, VectorUtils.of(2.).labeled(4.)); + agg.aggregate(mdl, VectorUtils.of(3.).labeled(6.)); + assertEquals(6. / 3, agg.getMAE(), 0.); + assertEquals(14. / 3, agg.getMSE(), 0.); + assertEquals(14., agg.getRss(), 0.); + assertEquals(2.6, agg.ysVariance(), 0.1); + assertEquals(8.0, agg.ysRss(), 0.1); + } + + /** + * + */ + @Test + public void testMerge() { + RegressionMetricStatsAggregator agg1 = new RegressionMetricStatsAggregator(); + RegressionMetricStatsAggregator agg2 = new RegressionMetricStatsAggregator(); + + agg1.aggregate(mdl, VectorUtils.of(1.).labeled(2.)); + agg2.aggregate(mdl, VectorUtils.of(2.).labeled(4.)); + agg1.aggregate(mdl, VectorUtils.of(3.).labeled(6.)); + + RegressionMetricStatsAggregator res = agg1.mergeWith(agg2); + assertEquals(6. / 3, res.getMAE(), 0.); + assertEquals(14. / 3, res.getMSE(), 0.); + assertEquals(14., res.getRss(), 0.); + assertEquals(2.6, res.ysVariance(), 0.1); + assertEquals(8.0, res.ysRss(), 0.1); + } + + /** + * + */ + private static IgniteModel mdl = new IgniteModel() { + @Override public Double predict(Vector input) { + return input.get(0); + } + }; +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/context/BinaryClassificationEvaluationContextTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/context/BinaryClassificationEvaluationContextTest.java new file mode 100644 index 0000000000000..f150bfeca22ea --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/evaluator/context/BinaryClassificationEvaluationContextTest.java @@ -0,0 +1,149 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.scoring.evaluator.context; + +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link BinaryClassificationEvaluationContext} class. + */ +public class BinaryClassificationEvaluationContextTest { + /** + * + */ + @Test + public void testAggregate() { + BinaryClassificationEvaluationContext ctx = new BinaryClassificationEvaluationContext<>(); + ctx.aggregate(VectorUtils.of().labeled(1.0)); + assertEquals(ctx.getFirstClsLbl(), 1., 0.); + assertEquals(ctx.getSecondClsLbl(), null); + + ctx.aggregate(VectorUtils.of().labeled(0.0)); + assertEquals(ctx.getFirstClsLbl(), 0., 0.); + assertEquals(ctx.getSecondClsLbl(), 1., 0.); + } + + /** + * + */ + @Test(expected = IllegalArgumentException.class) + public void testAggregateWithThreeLabels() { + BinaryClassificationEvaluationContext ctx = new BinaryClassificationEvaluationContext<>(); + ctx.aggregate(VectorUtils.of().labeled(-1.0)); + ctx.aggregate(VectorUtils.of().labeled(1.0)); + + assertEquals(ctx.getFirstClsLbl(), -1., 0.); + assertEquals(ctx.getSecondClsLbl(), 1., 0.); + + ctx.aggregate(VectorUtils.of().labeled(0.0)); + } + + /** + * + */ + @Test + public void testMerge1() { + BinaryClassificationEvaluationContext left = new BinaryClassificationEvaluationContext<>(); + BinaryClassificationEvaluationContext right = new BinaryClassificationEvaluationContext<>(); + + BinaryClassificationEvaluationContext res = left.mergeWith(right); + assertEquals(res.getFirstClsLbl(), null); + assertEquals(res.getSecondClsLbl(), null); + } + + /** + * + */ + @Test + public void testMerge2() { + BinaryClassificationEvaluationContext left = new BinaryClassificationEvaluationContext<>(0., null); + BinaryClassificationEvaluationContext right = new BinaryClassificationEvaluationContext<>(); + + BinaryClassificationEvaluationContext res = left.mergeWith(right); + assertEquals(res.getFirstClsLbl(), 0., 0.); + assertEquals(res.getSecondClsLbl(), null); + + res = right.mergeWith(left); + assertEquals(res.getFirstClsLbl(), 0., 0.); + assertEquals(res.getSecondClsLbl(), null); + } + + /** + * + */ + @Test + public void testMerge3() { + BinaryClassificationEvaluationContext left = new BinaryClassificationEvaluationContext<>(null, 0.); + BinaryClassificationEvaluationContext right = new BinaryClassificationEvaluationContext<>(); + + BinaryClassificationEvaluationContext res = left.mergeWith(right); + assertEquals(res.getFirstClsLbl(), 0., 0.); + assertEquals(res.getSecondClsLbl(), null); + + res = right.mergeWith(left); + assertEquals(res.getFirstClsLbl(), 0., 0.); + assertEquals(res.getSecondClsLbl(), null); + } + + /** + * + */ + @Test + public void testMerge4() { + BinaryClassificationEvaluationContext left = new BinaryClassificationEvaluationContext<>(1., 0.); + BinaryClassificationEvaluationContext right = new BinaryClassificationEvaluationContext<>(); + + BinaryClassificationEvaluationContext res = left.mergeWith(right); + assertEquals(res.getFirstClsLbl(), 0., 0.); + assertEquals(res.getSecondClsLbl(), 1., 0.); + + res = right.mergeWith(left); + assertEquals(res.getFirstClsLbl(), 0., 0.); + assertEquals(res.getSecondClsLbl(), 1., 0.); + } + + /** + * + */ + @Test + public void testMerge5() { + BinaryClassificationEvaluationContext left = new BinaryClassificationEvaluationContext<>(1., 0.); + BinaryClassificationEvaluationContext right = new BinaryClassificationEvaluationContext<>(0., 1.); + + BinaryClassificationEvaluationContext res = left.mergeWith(right); + assertEquals(res.getFirstClsLbl(), 0., 0.); + assertEquals(res.getSecondClsLbl(), 1., 0.); + + res = right.mergeWith(left); + assertEquals(res.getFirstClsLbl(), 0., 0.); + assertEquals(res.getSecondClsLbl(), 1., 0.); + } + + /** + * + */ + @Test(expected = IllegalArgumentException.class) + public void testMerge6() { + BinaryClassificationEvaluationContext left = new BinaryClassificationEvaluationContext<>(1., 0.); + BinaryClassificationEvaluationContext right = new BinaryClassificationEvaluationContext<>(2., 1.); + BinaryClassificationEvaluationContext res = left.mergeWith(right); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/metric/classification/BinaryClassificationMetricsTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/metric/classification/BinaryClassificationMetricsTest.java new file mode 100644 index 0000000000000..f0a51f9e144c8 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/metric/classification/BinaryClassificationMetricsTest.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.scoring.metric.classification; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.IgniteModel; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.selection.scoring.evaluator.EvaluationResult; +import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator; +import org.apache.ignite.ml.selection.scoring.metric.MetricName; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for binary classification metrics. + */ +public class BinaryClassificationMetricsTest { + /** + * + */ + @Test + public void testCalculation() { + Map xorset = new HashMap(); + + xorset.put(VectorUtils.of(0., 0.), 0.); + xorset.put(VectorUtils.of(0., 1.), 1.); + xorset.put(VectorUtils.of(1., 0.), 1.); + xorset.put(VectorUtils.of(1., 1.), 0.); + + IgniteModel xorFunction = v -> { + if (Math.abs(v.get(0) - v.get(1)) < 0.01) + return 0.; + else + return 1.; + }; + + IgniteModel andFunction = v -> { + if (Math.abs(v.get(0) - v.get(1)) < 0.01 && v.get(0) > 0) + return 1.; + else + return 0.; + }; + + IgniteModel orFunction = v -> { + if (v.get(0) > 0 || v.get(1) > 0) + return 1.; + else + return 0.; + }; + + EvaluationResult xorResult = Evaluator.evaluateBinaryClassification(xorset, xorFunction, Vector::labeled); + assertEquals(1., xorResult.get(MetricName.ACCURACY), 0.01); + assertEquals(1., xorResult.get(MetricName.PRECISION), 0.01); + assertEquals(1., xorResult.get(MetricName.RECALL), 0.01); + assertEquals(1., xorResult.get(MetricName.F_MEASURE), 0.01); + + EvaluationResult andResult = Evaluator.evaluateBinaryClassification(xorset, andFunction, Vector::labeled); + assertEquals(0.25, andResult.get(MetricName.ACCURACY), 0.01); + assertEquals(0., andResult.get(MetricName.PRECISION), 0.01); // there is no TP + assertEquals(0., andResult.get(MetricName.RECALL), 0.01); // there is no TP + assertEquals(Double.NaN, andResult.get(MetricName.F_MEASURE), 0.01); // // there is no TP and zero in denominator + + EvaluationResult orResult = Evaluator.evaluateBinaryClassification(xorset, orFunction, Vector::labeled); + assertEquals(0.75, orResult.get(MetricName.ACCURACY), 0.01); + assertEquals(0.66, orResult.get(MetricName.PRECISION), 0.01); // there is no TP + assertEquals(1., orResult.get(MetricName.RECALL), 0.01); // there is no TP + assertEquals(0.8, orResult.get(MetricName.F_MEASURE), 0.01); // // there is no TP and zero in denominator + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/metric/regression/RegressionMetricsTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/metric/regression/RegressionMetricsTest.java new file mode 100644 index 0000000000000..177767736d17b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/scoring/metric/regression/RegressionMetricsTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.scoring.metric.regression; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.IgniteModel; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.selection.scoring.evaluator.EvaluationResult; +import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator; +import org.apache.ignite.ml.selection.scoring.metric.MetricName; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for regression metrics. + */ +public class RegressionMetricsTest { + /** + * + */ + @Test + public void testCalculation() { + Map linearSet = new HashMap<>(); + + linearSet.put(VectorUtils.of(0.), 0.); + linearSet.put(VectorUtils.of(1.), 1.); + linearSet.put(VectorUtils.of(2.), 2.); + linearSet.put(VectorUtils.of(3.), 3.); + + IgniteModel linearModel = v -> v.get(0); + IgniteModel squareModel = v -> Math.pow(v.get(0), 2); + + EvaluationResult linearRes = Evaluator.evaluateRegression(linearSet, linearModel, Vector::labeled); + assertEquals(0., linearRes.get(MetricName.MAE), 0.01); + assertEquals(0., linearRes.get(MetricName.MSE), 0.01); + assertEquals(0., linearRes.get(MetricName.R2), 0.01); + assertEquals(0., linearRes.get(MetricName.RSS), 0.01); + assertEquals(0., linearRes.get(MetricName.RMSE), 0.01); + + EvaluationResult squareRes = Evaluator.evaluateRegression(linearSet, squareModel, Vector::labeled); + assertEquals(2., squareRes.get(MetricName.MAE), 0.01); + assertEquals(10., squareRes.get(MetricName.MSE), 0.01); + assertEquals(8., squareRes.get(MetricName.R2), 0.01); + assertEquals(40., squareRes.get(MetricName.RSS), 0.01); + assertEquals(Math.sqrt(10), squareRes.get(MetricName.RMSE), 0.01); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/split/TrainTestDatasetSplitterTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/split/TrainTestDatasetSplitterTest.java new file mode 100644 index 0000000000000..25ac74e7470f0 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/split/TrainTestDatasetSplitterTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.split; + +import org.junit.Test; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link TrainTestDatasetSplitter}. + */ +public class TrainTestDatasetSplitterTest { + /** */ + @Test + public void testSplitWithSpecifiedTrainAndTestSize() { + TrainTestDatasetSplitter splitter = new TrainTestDatasetSplitter<>((k, v) -> k); + + TrainTestSplit split = splitter.split(0.4, 0.4); + + assertTrue(split.getTrainFilter().apply(0.0, 0.0)); + assertTrue(split.getTrainFilter().apply(0.2, 0.0)); + assertFalse(split.getTrainFilter().apply(0.4, 0.0)); + assertFalse(split.getTrainFilter().apply(0.6, 0.0)); + + assertFalse(split.getTestFilter().apply(0.0, 0.0)); + assertFalse(split.getTestFilter().apply(0.2, 0.0)); + assertTrue(split.getTestFilter().apply(0.4, 0.0)); + assertTrue(split.getTestFilter().apply(0.6, 0.0)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/split/mapper/SHA256UniformMapperTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/split/mapper/SHA256UniformMapperTest.java new file mode 100644 index 0000000000000..f1f1774070555 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/selection/split/mapper/SHA256UniformMapperTest.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.selection.split.mapper; + +import java.util.Random; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; + +/** + * Test for {@link SHA256UniformMapper}. + */ +public class SHA256UniformMapperTest { + /** */ + @Test + public void testMap() { + UniformMapper mapper = new SHA256UniformMapper<>(new Random(42)); + + int cnt = 0; + + for (int i = 0; i < 100_000; i++) { + double pnt = mapper.map(i, i); + + if (pnt < 0.2) + cnt++; + } + + double err = 1.0 * Math.abs(cnt - 20_000) / 20_000; + + // Hash function should provide a good distribution so that error should be less that 2% in case 10^5 tests. + assertTrue(err < 0.02); + } + + /** */ + @Test + public void testMapAndMapAgain() { + UniformMapper firstMapper = new SHA256UniformMapper<>(new Random(42)); + UniformMapper secondMapper = new SHA256UniformMapper<>(new Random(21)); + + int cnt = 0; + + for (int i = 0; i < 100_000; i++) { + double firstPnt = firstMapper.map(i, i); + double secondPnt = secondMapper.map(i, i); + + if (firstPnt < 0.5 && secondPnt < 0.5) + cnt++; + } + + double err = 1.0 * Math.abs(cnt - 25_000) / 25_000; + + // Hash function should provide a good distribution so that error should be less that 2% in case 10^5 tests. + assertTrue(err < 0.02); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/DatasetStructureTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/DatasetStructureTest.java new file mode 100644 index 0000000000000..3b9a9f478ae5f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/DatasetStructureTest.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.structures; + +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link Dataset} basic features. + */ +public class DatasetStructureTest { + /** + * Basic test + */ + @Test + @SuppressWarnings("unchecked") + public void testBasic() { + Assert.assertNull("Feature names constructor", new Dataset>(1, 1, + new String[] {"tests"}).data()); + + Dataset> dataset = new Dataset>(new DatasetRow[] {}, + new FeatureMetadata[] {}); + + Assert.assertEquals("Expect empty data", 0, dataset.data().length); + Assert.assertEquals("Expect empty meta", 0, dataset.data().length); + + dataset.setData(new DatasetRow[] {new DatasetRow()}); + dataset.setMeta(new FeatureMetadata[] {new FeatureMetadata()}); + + Assert.assertEquals("Expect non empty data", 1, dataset.data().length); + Assert.assertEquals("Expect non empty meta", 1, dataset.data().length); + Assert.assertEquals(1, dataset.meta().length); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/LabeledVectorSetTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/LabeledVectorSetTest.java new file mode 100644 index 0000000000000..c7cf824e6fc25 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/LabeledVectorSetTest.java @@ -0,0 +1,291 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.structures; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Objects; +import org.apache.ignite.ml.knn.LabeledDatasetHelper; +import org.apache.ignite.ml.math.ExternalizableTest; +import org.apache.ignite.ml.math.exceptions.datastructures.EmptyFileException; +import org.apache.ignite.ml.math.exceptions.datastructures.FileParsingException; +import org.apache.ignite.ml.math.exceptions.datastructures.NoLabelVectorException; +import org.apache.ignite.ml.math.exceptions.math.CardinalityException; +import org.apache.ignite.ml.math.exceptions.math.NoDataException; +import org.apache.ignite.ml.structures.preprocessing.LabeledDatasetLoader; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +/** Tests behaviour of KNNClassificationTest. */ +public class LabeledVectorSetTest implements ExternalizableTest { + /** */ + private static final String KNN_IRIS_TXT = "datasets/knn/iris.txt"; + + /** */ + private static final String NO_DATA_TXT = "datasets/knn/no_data.txt"; + + /** */ + private static final String EMPTY_TXT = "datasets/knn/empty.txt"; + + /** */ + private static final String IRIS_INCORRECT_TXT = "datasets/knn/iris_incorrect.txt"; + + /** */ + private static final String IRIS_MISSED_DATA = "datasets/knn/missed_data.txt"; + + /** */ + @Test + public void testFeatureNames() { + double[][] mtx = + new double[][] { + {1.0, 1.0}, + {1.0, 2.0}, + {2.0, 1.0}, + {-1.0, -1.0}, + {-1.0, -2.0}, + {-2.0, -1.0}}; + double[] lbs = new double[] {1.0, 1.0, 1.0, 2.0, 2.0, 2.0}; + + String[] featureNames = new String[] {"x", "y"}; + final LabeledVectorSet dataset = new LabeledVectorSet(mtx, lbs, featureNames); + + assertEquals(dataset.getFeatureName(0), "x"); + } + + /** */ + @Test + public void testAccessMethods() { + double[][] mtx = + new double[][] { + {1.0, 1.0}, + {1.0, 2.0}, + {2.0, 1.0}, + {-1.0, -1.0}, + {-1.0, -2.0}, + {-2.0, -1.0}}; + double[] lbs = new double[] {1.0, 1.0, 1.0, 2.0, 2.0, 2.0}; + + final LabeledVectorSet dataset = new LabeledVectorSet(mtx, lbs, null); + + assertEquals(dataset.colSize(), 2); + assertEquals(dataset.rowSize(), 6); + + assertEquals(dataset.label(0), lbs[0], 0); + + assertEquals(dataset.copy().colSize(), 2); + + final LabeledVector row = (LabeledVector)dataset.getRow(0); + + assertEquals(1.0, row.features().get(0), 0); + assertEquals(1.0, row.label(), 0); + dataset.setLabel(0, 2.0); + assertEquals(2.0, row.label(), 0); + + assertEquals(0, new LabeledVectorSet().rowSize()); + assertEquals(1, new LabeledVectorSet(1, 2).rowSize()); + assertEquals(1, new LabeledVectorSet(1, 2).rowSize()); + assertEquals(1, new LabeledVectorSet(1, 2, null).rowSize()); + } + + /** */ + @Test + public void testFailOnYNull() { + double[][] mtx = + new double[][] { + {1.0, 1.0}, + {1.0, 2.0}, + {2.0, 1.0}, + {-1.0, -1.0}, + {-1.0, -2.0}, + {-2.0, -1.0}}; + double[] lbs = new double[] {}; + + try { + new LabeledVectorSet(mtx, lbs); + fail("CardinalityException"); + } + catch (CardinalityException e) { + return; + } + fail("CardinalityException"); + } + + /** */ + @Test + public void testFailOnXNull() { + double[][] mtx = + new double[][] {}; + double[] lbs = new double[] {1.0, 1.0, 1.0, 2.0, 2.0, 2.0}; + + try { + new LabeledVectorSet(mtx, lbs); + fail("CardinalityException"); + } + catch (CardinalityException e) { + return; + } + fail("CardinalityException"); + } + + /** */ + @Test + public void testLoadingCorrectTxtFile() { + LabeledVectorSet training = LabeledDatasetHelper.loadDatasetFromTxt(KNN_IRIS_TXT, false); + assertEquals(training.rowSize(), 150); + } + + /** */ + @Test + public void testLoadingEmptyFile() { + try { + LabeledDatasetHelper.loadDatasetFromTxt(EMPTY_TXT, false); + fail("EmptyFileException"); + } + catch (EmptyFileException e) { + return; + } + fail("EmptyFileException"); + } + + /** */ + @Test + public void testLoadingFileWithFirstEmptyRow() { + try { + LabeledDatasetHelper.loadDatasetFromTxt(NO_DATA_TXT, false); + fail("NoDataException"); + } + catch (NoDataException e) { + return; + } + fail("NoDataException"); + } + + /** */ + @Test + public void testLoadingFileWithIncorrectData() { + LabeledVectorSet training = LabeledDatasetHelper.loadDatasetFromTxt(IRIS_INCORRECT_TXT, false); + assertEquals(149, training.rowSize()); + } + + /** */ + @Test + public void testFailOnLoadingFileWithIncorrectData() { + try { + LabeledDatasetHelper.loadDatasetFromTxt(IRIS_INCORRECT_TXT, true); + fail("FileParsingException"); + } + catch (FileParsingException e) { + return; + } + fail("FileParsingException"); + + } + + /** */ + @Test + public void testLoadingFileWithMissedData() throws URISyntaxException, IOException { + Path path = Paths.get(Objects.requireNonNull(getClass().getClassLoader().getResource(IRIS_MISSED_DATA)).toURI()); + + LabeledVectorSet training = LabeledDatasetLoader.loadFromTxtFile(path, ",", false); + + assertEquals(training.features(2).get(1), 0.0, 0); + } + + /** */ + @Test + public void testSplitting() { + double[][] mtx = + new double[][] { + {1.0, 1.0}, + {1.0, 2.0}, + {2.0, 1.0}, + {-1.0, -1.0}, + {-1.0, -2.0}, + {-2.0, -1.0}}; + double[] lbs = new double[] {1.0, 1.0, 1.0, 2.0, 2.0, 2.0}; + + LabeledVectorSet training = new LabeledVectorSet(mtx, lbs); + + LabeledVectorSetTestTrainPair split1 = new LabeledVectorSetTestTrainPair(training, 0.67); + + assertEquals(4, split1.test().rowSize()); + assertEquals(2, split1.train().rowSize()); + + LabeledVectorSetTestTrainPair split2 = new LabeledVectorSetTestTrainPair(training, 0.65); + + assertEquals(3, split2.test().rowSize()); + assertEquals(3, split2.train().rowSize()); + + LabeledVectorSetTestTrainPair split3 = new LabeledVectorSetTestTrainPair(training, 0.4); + + assertEquals(2, split3.test().rowSize()); + assertEquals(4, split3.train().rowSize()); + + LabeledVectorSetTestTrainPair split4 = new LabeledVectorSetTestTrainPair(training, 0.3); + + assertEquals(1, split4.test().rowSize()); + assertEquals(5, split4.train().rowSize()); + } + + /** */ + @Test + public void testLabels() { + double[][] mtx = + new double[][] { + {1.0, 1.0}, + {1.0, 2.0}, + {2.0, 1.0}, + {-1.0, -1.0}, + {-1.0, -2.0}, + {-2.0, -1.0}}; + double[] lbs = new double[] {1.0, 1.0, 1.0, 2.0, 2.0, 2.0}; + + LabeledVectorSet dataset = new LabeledVectorSet(mtx, lbs); + final double[] labels = dataset.labels(); + for (int i = 0; i < lbs.length; i++) + assertEquals(lbs[i], labels[i], 0); + } + + /** */ + @Test(expected = NoLabelVectorException.class) + @SuppressWarnings("unchecked") + public void testSetLabelInvalid() { + new LabeledVectorSet(new LabeledVector[1]).setLabel(0, 2.0); + } + + /** */ + @Test + @Override public void testExternalization() { + double[][] mtx = + new double[][] { + {1.0, 1.0}, + {1.0, 2.0}, + {2.0, 1.0}, + {-1.0, -1.0}, + {-1.0, -2.0}, + {-2.0, -1.0}}; + double[] lbs = new double[] {1.0, 1.0, 1.0, 2.0, 2.0, 2.0}; + + LabeledVectorSet dataset = new LabeledVectorSet(mtx, lbs); + externalizeTest(dataset); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/StructuresTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/StructuresTestSuite.java new file mode 100644 index 0000000000000..0788733a936ec --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/structures/StructuresTestSuite.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.structures; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.trees package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + DatasetStructureTest.class, + LabeledVectorSetTest.class +}) +public class StructuresTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java new file mode 100644 index 0000000000000..ede489fd82053 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.svm; + +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +/** + * Tests for {@link SVMLinearClassificationTrainer}. + */ +public class SVMBinaryTrainerTest extends TrainerTest { + /** + * Test trainer on classification model y = x. + */ + @Test + public void testTrainWithTheLinearlySeparableCase() { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, twoLinearlySeparableClasses[i]); + + SVMLinearClassificationTrainer trainer = new SVMLinearClassificationTrainer() + .withSeed(1234L); + + SVMLinearClassificationModel mdl = trainer.fit( + cacheMock, parts, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST) + ); + + TestUtils.assertEquals(0, mdl.predict(VectorUtils.of(100, 10)), PRECISION); + TestUtils.assertEquals(1, mdl.predict(VectorUtils.of(10, 100)), PRECISION); + } + + /** */ + @Test + public void testUpdate() { + Map cacheMock = new HashMap<>(); + + for (int i = 0; i < twoLinearlySeparableClasses.length; i++) + cacheMock.put(i, twoLinearlySeparableClasses[i]); + + SVMLinearClassificationTrainer trainer = new SVMLinearClassificationTrainer() + .withAmountOfIterations(1000) + .withSeed(1234L); + + Vectorizer vectorizer = + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST); + SVMLinearClassificationModel originalMdl = trainer.fit( + cacheMock, parts, + vectorizer + ); + + SVMLinearClassificationModel updatedOnSameDS = trainer.update( + originalMdl, + cacheMock, + parts, + vectorizer + ); + + SVMLinearClassificationModel updatedOnEmptyDS = trainer.update( + originalMdl, + new HashMap<>(), + parts, + vectorizer + ); + + Vector v = VectorUtils.of(100, 10); + TestUtils.assertEquals(originalMdl.predict(v), updatedOnSameDS.predict(v), PRECISION); + TestUtils.assertEquals(originalMdl.predict(v), updatedOnEmptyDS.predict(v), PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java new file mode 100644 index 0000000000000..bc0f935171d77 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.svm; + +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.math.exceptions.math.CardinalityException; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.regressions.linear.LinearRegressionModel; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link LinearRegressionModel}. + */ +public class SVMModelTest { + /** Precision in test checks. */ + private static final double PRECISION = 1e-6; + + /** */ + @Test + public void testPredictWithRawLabels() { + Vector weights = new DenseVector(new double[]{2.0, 3.0}); + SVMLinearClassificationModel mdl = new SVMLinearClassificationModel(weights, 1.0).withRawLabels(true); + + Vector observation = new DenseVector(new double[]{1.0, 1.0}); + TestUtils.assertEquals(1.0 + 2.0 * 1.0 + 3.0 * 1.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{2.0, 1.0}); + TestUtils.assertEquals(1.0 + 2.0 * 2.0 + 3.0 * 1.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{1.0, 2.0}); + TestUtils.assertEquals(1.0 + 2.0 * 1.0 + 3.0 * 2.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{-2.0, 1.0}); + TestUtils.assertEquals(1.0 - 2.0 * 2.0 + 3.0 * 1.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{1.0, -2.0}); + TestUtils.assertEquals(1.0 + 2.0 * 1.0 - 3.0 * 2.0, mdl.predict(observation), PRECISION); + + Assert.assertTrue(mdl.isKeepingRawLabels()); + + Assert.assertTrue(!mdl.toString().isEmpty()); + Assert.assertTrue(!mdl.toString(true).isEmpty()); + Assert.assertTrue(!mdl.toString(false).isEmpty()); + } + + /** */ + @Test + public void testPredictWithErasedLabels() { + Vector weights = new DenseVector(new double[]{1.0, 1.0}); + SVMLinearClassificationModel mdl = new SVMLinearClassificationModel(weights, 1.0); + + Vector observation = new DenseVector(new double[]{1.0, 1.0}); + TestUtils.assertEquals(1.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{3.0, 4.0}); + TestUtils.assertEquals(1.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{-1.0, -1.0}); + TestUtils.assertEquals(0.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{-2.0, 1.0}); + TestUtils.assertEquals(0.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{-1.0, -2.0}); + TestUtils.assertEquals(0.0, mdl.predict(observation), PRECISION); + + final SVMLinearClassificationModel mdlWithNewData = mdl.withIntercept(-2.0).withWeights(new DenseVector(new double[] {-2.0, -2.0})); + System.out.println("The SVM model is " + mdlWithNewData); + + observation = new DenseVector(new double[]{-1.0, -2.0}); + TestUtils.assertEquals(1.0, mdl.predict(observation), PRECISION); + TestUtils.assertEquals(-2.0, mdl.intercept(), PRECISION); + } + + /** */ + @Test + public void testPredictWithErasedLabelsAndChangedThreshold() { + Vector weights = new DenseVector(new double[]{1.0, 1.0}); + SVMLinearClassificationModel mdl = new SVMLinearClassificationModel(weights, 1.0).withThreshold(5); + + Vector observation = new DenseVector(new double[]{1.0, 1.0}); + TestUtils.assertEquals(0.0, mdl.predict(observation), PRECISION); + + observation = new DenseVector(new double[]{3.0, 4.0}); + TestUtils.assertEquals(1.0, mdl.predict(observation), PRECISION); + + TestUtils.assertEquals(5, mdl.threshold(), PRECISION); + } + + /** */ + @Test(expected = CardinalityException.class) + public void testPredictOnAnObservationWithWrongCardinality() { + Vector weights = new DenseVector(new double[]{2.0, 3.0}); + + SVMLinearClassificationModel mdl = new SVMLinearClassificationModel(weights, 1.0); + + Vector observation = new DenseVector(new double[]{1.0}); + + mdl.predict(observation); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java new file mode 100644 index 0000000000000..a2aea6ef9798b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.svm; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in org.apache.ignite.ml.svm.* package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + SVMModelTest.class, + SVMBinaryTrainerTest.class, +}) +public class SVMTestSuite { + // No-op. +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeClassificationTrainerIntegrationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeClassificationTrainerIntegrationTest.java new file mode 100644 index 0000000000000..2e665c9bdfe70 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeClassificationTrainerIntegrationTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree; + +import java.util.Random; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link DecisionTreeClassificationTrainer} that require to start the whole Ignite infrastructure. + */ +public class DecisionTreeClassificationTrainerIntegrationTest extends GridCommonAbstractTest { + /** Number of nodes in grid */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** + * {@inheritDoc} + */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** */ + @Test + public void testFit() { + int size = 100; + + CacheConfiguration trainingSetCacheCfg = new CacheConfiguration<>(); + trainingSetCacheCfg.setAffinity(new RendezvousAffinityFunction(false, 10)); + trainingSetCacheCfg.setName("TRAINING_SET"); + + IgniteCache data = ignite.createCache(trainingSetCacheCfg); + + Random rnd = new Random(0); + for (int i = 0; i < size; i++) { + double x = rnd.nextDouble() - 0.5; + data.put(i, new double[]{x, x > 0 ? 1 : 0}); + } + + DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(1, 0); + + DecisionTreeModel tree = trainer.fit(ignite, data, new DoubleArrayVectorizer().labeled(1)); + + DecisionTreeNode decisionTreeNode = tree.getRootNode(); + assertTrue(decisionTreeNode instanceof DecisionTreeConditionalNode); + + DecisionTreeConditionalNode node = (DecisionTreeConditionalNode)decisionTreeNode; + + assertEquals(0, node.getThreshold(), 1e-3); + + assertTrue(node.getThenNode() instanceof DecisionTreeLeafNode); + assertTrue(node.getElseNode() instanceof DecisionTreeLeafNode); + + DecisionTreeLeafNode thenNode = (DecisionTreeLeafNode)node.getThenNode(); + DecisionTreeLeafNode elseNode = (DecisionTreeLeafNode)node.getElseNode(); + + assertEquals(1, thenNode.getVal(), 1e-10); + assertEquals(0, elseNode.getVal(), 1e-10); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeClassificationTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeClassificationTrainerTest.java new file mode 100644 index 0000000000000..e618f634c0234 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeClassificationTrainerTest.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link DecisionTreeClassificationTrainer}. + */ +@RunWith(Parameterized.class) +public class DecisionTreeClassificationTrainerTest { + /** Number of parts to be tested. */ + private static final int[] partsToBeTested = new int[] {1, 2, 3, 4, 5, 7}; + + /** Number of partitions. */ + @Parameterized.Parameter() + public int parts; + + /** Use index [= 1 if true]. */ + @Parameterized.Parameter(1) + public int useIdx; + + /** Test parameters. */ + @Parameterized.Parameters(name = "Data divided on {0} partitions. Use index = {1}.") + public static Iterable data() { + List res = new ArrayList<>(); + for (int i = 0; i < 2; i++) { + for (int part : partsToBeTested) + res.add(new Integer[] {part, i}); + } + + return res; + } + + /** */ + @Test + public void testFit() { + int size = 100; + + Map data = new HashMap<>(); + + Random rnd = new Random(0); + for (int i = 0; i < size; i++) { + double x = rnd.nextDouble() - 0.5; + data.put(i, new double[] {x, x > 0 ? 1 : 0}); + } + + DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(1, 0) + .withUseIndex(useIdx == 1); + + DecisionTreeNode treeNode = trainer.fit(data, parts, new DoubleArrayVectorizer().labeled(1)).getRootNode(); + + assertTrue(treeNode instanceof DecisionTreeConditionalNode); + + DecisionTreeConditionalNode node = (DecisionTreeConditionalNode)treeNode; + + assertEquals(0, node.getThreshold(), 1e-3); + assertEquals(0, node.getCol()); + assertNotNull(node.toString()); + assertNotNull(node.toString(true)); + assertNotNull(node.toString(false)); + + assertTrue(node.getThenNode() instanceof DecisionTreeLeafNode); + assertTrue(node.getElseNode() instanceof DecisionTreeLeafNode); + + DecisionTreeLeafNode thenNode = (DecisionTreeLeafNode)node.getThenNode(); + DecisionTreeLeafNode elseNode = (DecisionTreeLeafNode)node.getElseNode(); + + assertEquals(1, thenNode.getVal(), 1e-10); + assertEquals(0, elseNode.getVal(), 1e-10); + + assertNotNull(thenNode.toString()); + assertNotNull(thenNode.toString(true)); + assertNotNull(thenNode.toString(false)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeRegressionTrainerIntegrationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeRegressionTrainerIntegrationTest.java new file mode 100644 index 0000000000000..e38fbf4f61b2f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeRegressionTrainerIntegrationTest.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree; + +import java.util.Random; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link DecisionTreeRegressionTrainer} that require to start the whole Ignite infrastructure. + */ +public class DecisionTreeRegressionTrainerIntegrationTest extends GridCommonAbstractTest { + /** Number of nodes in grid */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** {@inheritDoc} */ + @Override protected void afterTestsStopped() { + stopAllGrids(); + } + + /** + * {@inheritDoc} + */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** */ + @Test + public void testFit() { + int size = 100; + + CacheConfiguration trainingSetCacheCfg = new CacheConfiguration<>(); + trainingSetCacheCfg.setAffinity(new RendezvousAffinityFunction(false, 10)); + trainingSetCacheCfg.setName("TRAINING_SET"); + + IgniteCache data = ignite.createCache(trainingSetCacheCfg); + + Random rnd = new Random(0); + for (int i = 0; i < size; i++) { + double x = rnd.nextDouble() - 0.5; + data.put(i, new double[]{x, x > 0 ? 1 : 0}); + } + + DecisionTreeRegressionTrainer trainer = new DecisionTreeRegressionTrainer(1, 0); + + DecisionTreeNode treeNode = trainer.fit( + ignite, + data, + new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST) + ).getRootNode(); + + assertTrue(treeNode instanceof DecisionTreeConditionalNode); + + DecisionTreeConditionalNode node = (DecisionTreeConditionalNode)treeNode; + + assertEquals(0, node.getThreshold(), 1e-3); + + assertTrue(node.getThenNode() instanceof DecisionTreeLeafNode); + assertTrue(node.getElseNode() instanceof DecisionTreeLeafNode); + + DecisionTreeLeafNode thenNode = (DecisionTreeLeafNode)node.getThenNode(); + DecisionTreeLeafNode elseNode = (DecisionTreeLeafNode)node.getElseNode(); + + assertEquals(1, thenNode.getVal(), 1e-10); + assertEquals(0, elseNode.getVal(), 1e-10); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeRegressionTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeRegressionTrainerTest.java new file mode 100644 index 0000000000000..252f453c9246a --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeRegressionTrainerTest.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link DecisionTreeRegressionTrainer}. + */ +@RunWith(Parameterized.class) +public class DecisionTreeRegressionTrainerTest { + /** Number of parts to be tested. */ + private static final int[] partsToBeTested = new int[] {1, 2, 3, 4, 5, 7}; + + /** Number of partitions. */ + @Parameterized.Parameter() + public int parts; + + /** Use index [= 1 if true]. */ + @Parameterized.Parameter(1) + public int useIdx; + + /** Test parameters. */ + @Parameterized.Parameters(name = "Data divided on {0} partitions. Use index = {1}.") + public static Iterable data() { + List res = new ArrayList<>(); + for (int i = 0; i < 2; i++) { + for (int part : partsToBeTested) + res.add(new Integer[] {part, i}); + } + + return res; + } + + /** */ + @Test + public void testFit() { + int size = 100; + + Map data = new HashMap<>(); + + Random rnd = new Random(0); + for (int i = 0; i < size; i++) { + double x = rnd.nextDouble() - 0.5; + data.put(i, new double[]{x, x > 0 ? 1 : 0}); + } + + DecisionTreeRegressionTrainer trainer = new DecisionTreeRegressionTrainer(1, 0) + .withUsingIdx(useIdx == 1); + + DecisionTreeNode treeNode = trainer.fit(data, parts, new DoubleArrayVectorizer().labeled(1)).getRootNode(); + + assertTrue(treeNode instanceof DecisionTreeConditionalNode); + + DecisionTreeConditionalNode node = (DecisionTreeConditionalNode)treeNode; + + assertEquals(0, node.getThreshold(), 1e-3); + + assertTrue(node.getThenNode() instanceof DecisionTreeLeafNode); + assertTrue(node.getElseNode() instanceof DecisionTreeLeafNode); + + DecisionTreeLeafNode thenNode = (DecisionTreeLeafNode)node.getThenNode(); + DecisionTreeLeafNode elseNode = (DecisionTreeLeafNode)node.getElseNode(); + + assertEquals(1, thenNode.getVal(), 1e-10); + assertEquals(0, elseNode.getVal(), 1e-10); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeTestSuite.java new file mode 100644 index 0000000000000..eb832fa7b57bc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/DecisionTreeTestSuite.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree; + +import org.apache.ignite.ml.tree.data.DecisionTreeDataTest; +import org.apache.ignite.ml.tree.data.TreeDataIndexTest; +import org.apache.ignite.ml.tree.impurity.gini.GiniImpurityMeasureCalculatorTest; +import org.apache.ignite.ml.tree.impurity.gini.GiniImpurityMeasureTest; +import org.apache.ignite.ml.tree.impurity.mse.MSEImpurityMeasureCalculatorTest; +import org.apache.ignite.ml.tree.impurity.mse.MSEImpurityMeasureTest; +import org.apache.ignite.ml.tree.impurity.util.SimpleStepFunctionCompressorTest; +import org.apache.ignite.ml.tree.impurity.util.StepFunctionTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in {@link org.apache.ignite.ml.tree} package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + DecisionTreeClassificationTrainerTest.class, + DecisionTreeRegressionTrainerTest.class, + DecisionTreeDataTest.class, + GiniImpurityMeasureCalculatorTest.class, + GiniImpurityMeasureTest.class, + MSEImpurityMeasureCalculatorTest.class, + MSEImpurityMeasureTest.class, + StepFunctionTest.class, + SimpleStepFunctionCompressorTest.class, + DecisionTreeRegressionTrainerIntegrationTest.class, + DecisionTreeClassificationTrainerIntegrationTest.class, + TreeDataIndexTest.class +}) +public class DecisionTreeTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/data/DecisionTreeDataTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/data/DecisionTreeDataTest.java new file mode 100644 index 0000000000000..7405c164faab6 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/data/DecisionTreeDataTest.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.data; + +import java.util.Arrays; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link DecisionTreeData}. + */ +@RunWith(Parameterized.class) +public class DecisionTreeDataTest { + /** Parameters. */ + @Parameterized.Parameters(name = "Use index {0}") + public static Iterable data() { + return Arrays.asList( + new Boolean[] {true}, + new Boolean[] {false} + ); + } + + /** Use index. */ + @Parameterized.Parameter + public boolean useIdx; + + /** */ + @Test + public void testFilter() { + double[][] features = new double[][]{{0}, {1}, {2}, {3}, {4}, {5}}; + double[] labels = new double[]{0, 1, 2, 3, 4, 5}; + + DecisionTreeData data = new DecisionTreeData(features, labels, useIdx); + DecisionTreeData filteredData = data.filter(obj -> obj[0] > 2); + + assertArrayEquals(new double[][]{{3}, {4}, {5}}, filteredData.getFeatures()); + assertArrayEquals(new double[]{3, 4, 5}, filteredData.getLabels(), 1e-10); + } + + /** */ + @Test + public void testSort() { + double[][] features = new double[][]{{4, 1}, {3, 3}, {2, 0}, {1, 4}, {0, 2}}; + double[] labels = new double[]{0, 1, 2, 3, 4}; + + DecisionTreeData data = new DecisionTreeData(features, labels, useIdx); + + data.sort(0); + + assertArrayEquals(new double[][]{{0, 2}, {1, 4}, {2, 0}, {3, 3}, {4, 1}}, features); + assertArrayEquals(new double[]{4, 3, 2, 1, 0}, labels, 1e-10); + + data.sort(1); + + assertArrayEquals(new double[][]{{2, 0}, {4, 1}, {0, 2}, {3, 3}, {1, 4}}, features); + assertArrayEquals(new double[]{2, 0, 4, 1, 3}, labels, 1e-10); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/data/TreeDataIndexTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/data/TreeDataIndexTest.java new file mode 100644 index 0000000000000..b8ad49a589b53 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/data/TreeDataIndexTest.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.data; + +import org.apache.ignite.ml.tree.TreeFilter; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Test for {@link TreeDataIndex}. + */ +public class TreeDataIndexTest { + /** */ + private double[][] features = { + {1., 2., 3., 4.}, + {2., 3., 4., 1.}, + {3., 4., 1., 2.}, + {4., 1., 2., 3.} + }; + + /** */ + private double[] labels = {1., 2., 3, 4.}; + + /** */ + private double[][] labelsInSortedOrder = { + {1., 4., 3., 2.}, + {2., 1., 4., 3.}, + {3., 2., 1., 4.}, + {4., 3., 2., 1.} + }; + + /** */ + private double[][][] featuresInSortedOrder = { + { + {1., 2., 3., 4.}, + {4., 1., 2., 3.}, + {3., 4., 1., 2.}, + {2., 3., 4., 1.}, + }, + { + {2., 3., 4., 1.}, + {1., 2., 3., 4.}, + {4., 1., 2., 3.}, + {3., 4., 1., 2.}, + }, + { + {3., 4., 1., 2.}, + {2., 3., 4., 1.}, + {1., 2., 3., 4.}, + {4., 1., 2., 3.}, + }, + { + {4., 1., 2., 3.}, + {3., 4., 1., 2.}, + {2., 3., 4., 1.}, + {1., 2., 3., 4.}, + } + }; + + /** */ + private TreeDataIndex idx = new TreeDataIndex(features, labels); + + /** */ + @Test + public void labelInSortedOrderTest() { + assertEquals(features.length, idx.rowsCount()); + assertEquals(features[0].length, idx.columnsCount()); + + for (int k = 0; k < idx.rowsCount(); k++) { + for (int featureId = 0; featureId < idx.columnsCount(); featureId++) + assertEquals(labelsInSortedOrder[k][featureId], idx.labelInSortedOrder(k, featureId), 0.01); + } + } + + /** */ + @Test + public void featuresInSortedOrderTest() { + assertEquals(features.length, idx.rowsCount()); + assertEquals(features[0].length, idx.columnsCount()); + + for (int k = 0; k < idx.rowsCount(); k++) { + for (int featureId = 0; featureId < idx.columnsCount(); featureId++) + assertArrayEquals(featuresInSortedOrder[k][featureId], idx.featuresInSortedOrder(k, featureId), 0.01); + } + } + + /** */ + @Test + public void featureInSortedOrderTest() { + assertEquals(features.length, idx.rowsCount()); + assertEquals(features[0].length, idx.columnsCount()); + + for (int k = 0; k < idx.rowsCount(); k++) { + for (int featureId = 0; featureId < idx.columnsCount(); featureId++) + assertEquals((double)k + 1, idx.featureInSortedOrder(k, featureId), 0.01); + } + } + + /** */ + @Test + public void filterTest() { + TreeFilter filter1 = features -> features[0] > 2; + TreeFilter filter2 = features -> features[1] > 2; + TreeFilter filterAnd = filter1.and(features -> features[1] > 2); + + TreeDataIndex filtered1 = idx.filter(filter1); + TreeDataIndex filtered2 = filtered1.filter(filter2); + TreeDataIndex filtered3 = idx.filter(filterAnd); + + assertEquals(2, filtered1.rowsCount()); + assertEquals(4, filtered1.columnsCount()); + assertEquals(1, filtered2.rowsCount()); + assertEquals(4, filtered2.columnsCount()); + assertEquals(1, filtered3.rowsCount()); + assertEquals(4, filtered3.columnsCount()); + + double[] obj1 = {3, 4, 1, 2}; + double[] obj2 = {4, 1, 2, 3}; + double[][] restObjs = new double[][] {obj1, obj2}; + int[][] restObjIndxInSortedOrderPerFeatures = new int[][] { + {0, 1}, //feature 0 + {1, 0}, //feature 1 + {0, 1}, //feature 2 + {0, 1}, //feature 3 + }; + + for (int featureId = 0; featureId < filtered1.columnsCount(); featureId++) { + for (int k = 0; k < filtered1.rowsCount(); k++) { + int objId = restObjIndxInSortedOrderPerFeatures[featureId][k]; + double[] obj = restObjs[objId]; + assertArrayEquals(obj, filtered1.featuresInSortedOrder(k, featureId), 0.01); + } + } + + for (int featureId = 0; featureId < filtered2.columnsCount(); featureId++) { + for (int k = 0; k < filtered2.rowsCount(); k++) { + assertArrayEquals(obj1, filtered2.featuresInSortedOrder(k, featureId), 0.01); + assertArrayEquals(obj1, filtered3.featuresInSortedOrder(k, featureId), 0.01); + } + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/gini/GiniImpurityMeasureCalculatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/gini/GiniImpurityMeasureCalculatorTest.java new file mode 100644 index 0000000000000..63d3f6c73a18c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/gini/GiniImpurityMeasureCalculatorTest.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.impurity.gini; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.tree.data.DecisionTreeData; +import org.apache.ignite.ml.tree.impurity.util.StepFunction; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link GiniImpurityMeasureCalculator}. + */ +@RunWith(Parameterized.class) +public class GiniImpurityMeasureCalculatorTest { + /** Parameters. */ + @Parameterized.Parameters(name = "Use index {0}") + public static Iterable data() { + return Arrays.asList( + new Boolean[] {true}, + new Boolean[] {false} + ); + } + + /** Use index. */ + @Parameterized.Parameter + public boolean useIdx; + + /** */ + @Test + public void testCalculate() { + double[][] data = new double[][]{{0, 1}, {1, 0}, {2, 2}, {3, 3}}; + double[] labels = new double[]{0, 1, 1, 1}; + + Map encoder = new HashMap<>(); + encoder.put(0.0, 0); + encoder.put(1.0, 1); + GiniImpurityMeasureCalculator calculator = new GiniImpurityMeasureCalculator(encoder, useIdx); + + StepFunction[] impurity = calculator.calculate(new DecisionTreeData(data, labels, useIdx), fs -> true, 0); + + assertEquals(2, impurity.length); + + // Check Gini calculated for the first column. + assertArrayEquals(new double[]{Double.NEGATIVE_INFINITY, 0, 1, 2, 3}, impurity[0].getX(), 1e-10); + assertEquals(-2.500, impurity[0].getY()[0].impurity(), 1e-3); + assertEquals(-4.000, impurity[0].getY()[1].impurity(), 1e-3); + assertEquals(-3.000, impurity[0].getY()[2].impurity(), 1e-3); + assertEquals(-2.666, impurity[0].getY()[3].impurity(), 1e-3); + assertEquals(-2.500, impurity[0].getY()[4].impurity(), 1e-3); + + // Check Gini calculated for the second column. + assertArrayEquals(new double[]{Double.NEGATIVE_INFINITY, 0, 1, 2, 3}, impurity[1].getX(), 1e-10); + assertEquals(-2.500, impurity[1].getY()[0].impurity(), 1e-3); + assertEquals(-2.666, impurity[1].getY()[1].impurity(), 1e-3); + assertEquals(-3.000, impurity[1].getY()[2].impurity(), 1e-3); + assertEquals(-2.666, impurity[1].getY()[3].impurity(), 1e-3); + assertEquals(-2.500, impurity[1].getY()[4].impurity(), 1e-3); + } + + /** */ + @Test + public void testCalculateWithRepeatedData() { + double[][] data = new double[][]{{0}, {1}, {2}, {2}, {3}}; + double[] labels = new double[]{0, 1, 1, 1, 1}; + + Map encoder = new HashMap<>(); + encoder.put(0.0, 0); + encoder.put(1.0, 1); + GiniImpurityMeasureCalculator calculator = new GiniImpurityMeasureCalculator(encoder, useIdx); + + StepFunction[] impurity = calculator.calculate(new DecisionTreeData(data, labels, useIdx), fs -> true, 0); + + assertEquals(1, impurity.length); + + // Check Gini calculated for the first column. + assertArrayEquals(new double[]{Double.NEGATIVE_INFINITY, 0, 1, 2, 3}, impurity[0].getX(), 1e-10); + assertEquals(-3.400, impurity[0].getY()[0].impurity(), 1e-3); + assertEquals(-5.000, impurity[0].getY()[1].impurity(), 1e-3); + assertEquals(-4.000, impurity[0].getY()[2].impurity(), 1e-3); + assertEquals(-3.500, impurity[0].getY()[3].impurity(), 1e-3); + assertEquals(-3.400, impurity[0].getY()[4].impurity(), 1e-3); + } + + /** */ + @Test + public void testGetLabelCode() { + Map encoder = new HashMap<>(); + encoder.put(0.0, 0); + encoder.put(1.0, 1); + encoder.put(2.0, 2); + + GiniImpurityMeasureCalculator calculator = new GiniImpurityMeasureCalculator(encoder, useIdx); + + assertEquals(0, calculator.getLabelCode(0.0)); + assertEquals(1, calculator.getLabelCode(1.0)); + assertEquals(2, calculator.getLabelCode(2.0)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/gini/GiniImpurityMeasureTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/gini/GiniImpurityMeasureTest.java new file mode 100644 index 0000000000000..25f635d285ce6 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/gini/GiniImpurityMeasureTest.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.impurity.gini; + +import java.util.Random; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link GiniImpurityMeasure}. + */ +public class GiniImpurityMeasureTest { + /** */ + @Test + public void testImpurityOnEmptyData() { + long[] left = new long[]{0, 0, 0}; + long[] right = new long[]{0, 0, 0}; + + GiniImpurityMeasure impurity = new GiniImpurityMeasure(left, right); + + assertEquals(0.0, impurity.impurity(), 1e-10); + } + + /** */ + @Test + public void testImpurityLeftPart() { + long[] left = new long[]{3, 0, 0}; + long[] right = new long[]{0, 0, 0}; + + GiniImpurityMeasure impurity = new GiniImpurityMeasure(left, right); + + assertEquals(-3, impurity.impurity(), 1e-10); + } + + /** */ + @Test + public void testImpurityRightPart() { + long[] left = new long[]{0, 0, 0}; + long[] right = new long[]{3, 0, 0}; + + GiniImpurityMeasure impurity = new GiniImpurityMeasure(left, right); + + assertEquals(-3, impurity.impurity(), 1e-10); + } + + /** */ + @Test + public void testImpurityLeftAndRightPart() { + long[] left = new long[]{3, 0, 0}; + long[] right = new long[]{0, 3, 0}; + + GiniImpurityMeasure impurity = new GiniImpurityMeasure(left, right); + + assertEquals(-6, impurity.impurity(), 1e-10); + } + + /** */ + @Test + public void testAdd() { + Random rnd = new Random(0); + + GiniImpurityMeasure a = new GiniImpurityMeasure( + new long[]{randCnt(rnd), randCnt(rnd), randCnt(rnd)}, + new long[]{randCnt(rnd), randCnt(rnd), randCnt(rnd)} + ); + + GiniImpurityMeasure b = new GiniImpurityMeasure( + new long[]{randCnt(rnd), randCnt(rnd), randCnt(rnd)}, + new long[]{randCnt(rnd), randCnt(rnd), randCnt(rnd)} + ); + + GiniImpurityMeasure c = a.add(b); + + assertEquals(a.getLeft()[0] + b.getLeft()[0], c.getLeft()[0]); + assertEquals(a.getLeft()[1] + b.getLeft()[1], c.getLeft()[1]); + assertEquals(a.getLeft()[2] + b.getLeft()[2], c.getLeft()[2]); + + assertEquals(a.getRight()[0] + b.getRight()[0], c.getRight()[0]); + assertEquals(a.getRight()[1] + b.getRight()[1], c.getRight()[1]); + assertEquals(a.getRight()[2] + b.getRight()[2], c.getRight()[2]); + } + + /** */ + @Test + public void testSubtract() { + Random rnd = new Random(0); + + GiniImpurityMeasure a = new GiniImpurityMeasure( + new long[]{randCnt(rnd), randCnt(rnd), randCnt(rnd)}, + new long[]{randCnt(rnd), randCnt(rnd), randCnt(rnd)} + ); + + GiniImpurityMeasure b = new GiniImpurityMeasure( + new long[]{randCnt(rnd), randCnt(rnd), randCnt(rnd)}, + new long[]{randCnt(rnd), randCnt(rnd), randCnt(rnd)} + ); + + GiniImpurityMeasure c = a.subtract(b); + + assertEquals(a.getLeft()[0] - b.getLeft()[0], c.getLeft()[0]); + assertEquals(a.getLeft()[1] - b.getLeft()[1], c.getLeft()[1]); + assertEquals(a.getLeft()[2] - b.getLeft()[2], c.getLeft()[2]); + + assertEquals(a.getRight()[0] - b.getRight()[0], c.getRight()[0]); + assertEquals(a.getRight()[1] - b.getRight()[1], c.getRight()[1]); + assertEquals(a.getRight()[2] - b.getRight()[2], c.getRight()[2]); + } + + /** Generates random count. */ + private long randCnt(Random rnd) { + return Math.abs(rnd.nextInt()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/mse/MSEImpurityMeasureCalculatorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/mse/MSEImpurityMeasureCalculatorTest.java new file mode 100644 index 0000000000000..aa097815dd6b1 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/mse/MSEImpurityMeasureCalculatorTest.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.impurity.mse; + +import java.util.Arrays; +import org.apache.ignite.ml.tree.data.DecisionTreeData; +import org.apache.ignite.ml.tree.impurity.util.StepFunction; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link MSEImpurityMeasureCalculator}. + */ +@RunWith(Parameterized.class) +public class MSEImpurityMeasureCalculatorTest { + /** Parameters. */ + @Parameterized.Parameters(name = "Use index {0}") + public static Iterable data() { + return Arrays.asList( + new Boolean[] {true}, + new Boolean[] {false} + ); + } + + /** Use index. */ + @Parameterized.Parameter + public boolean useIdx; + + /** */ + @Test + public void testCalculate() { + double[][] data = new double[][]{{0, 2}, {1, 1}, {2, 0}, {3, 3}}; + double[] labels = new double[]{1, 2, 2, 1}; + + MSEImpurityMeasureCalculator calculator = new MSEImpurityMeasureCalculator(useIdx); + + StepFunction[] impurity = calculator.calculate(new DecisionTreeData(data, labels, useIdx), fs -> true, 0); + + assertEquals(2, impurity.length); + + // Test MSE calculated for the first column. + assertArrayEquals(new double[]{Double.NEGATIVE_INFINITY, 0, 1, 2, 3}, impurity[0].getX(), 1e-10); + assertEquals(1.000, impurity[0].getY()[0].impurity(), 1e-3); + assertEquals(0.666, impurity[0].getY()[1].impurity(), 1e-3); + assertEquals(1.000, impurity[0].getY()[2].impurity(), 1e-3); + assertEquals(0.666, impurity[0].getY()[3].impurity(), 1e-3); + assertEquals(1.000, impurity[0].getY()[4].impurity(), 1e-3); + + // Test MSE calculated for the second column. + assertArrayEquals(new double[]{Double.NEGATIVE_INFINITY, 0, 1, 2, 3}, impurity[1].getX(), 1e-10); + assertEquals(1.000, impurity[1].getY()[0].impurity(), 1e-3); + assertEquals(0.666, impurity[1].getY()[1].impurity(), 1e-3); + assertEquals(0.000, impurity[1].getY()[2].impurity(), 1e-3); + assertEquals(0.666, impurity[1].getY()[3].impurity(), 1e-3); + assertEquals(1.000, impurity[1].getY()[4].impurity(), 1e-3); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/mse/MSEImpurityMeasureTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/mse/MSEImpurityMeasureTest.java new file mode 100644 index 0000000000000..e1520d9670191 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/mse/MSEImpurityMeasureTest.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.impurity.mse; + +import java.util.Random; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link MSEImpurityMeasure}. + */ +public class MSEImpurityMeasureTest { + /** */ + @Test + public void testImpurityOnEmptyData() { + MSEImpurityMeasure impurity = new MSEImpurityMeasure(0, 0, 0, 0, 0, 0); + + assertEquals(0.0, impurity.impurity(), 1e-10); + } + + /** */ + @Test + public void testImpurityLeftPart() { + // Test on left part [1, 2, 2, 1, 1, 1]. + MSEImpurityMeasure impurity = new MSEImpurityMeasure(8, 12, 6, 0, 0, 0); + + assertEquals(1.333, impurity.impurity(), 1e-3); + } + + /** */ + @Test + public void testImpurityRightPart() { + // Test on right part [1, 2, 2, 1, 1, 1]. + MSEImpurityMeasure impurity = new MSEImpurityMeasure(0, 0, 0, 8, 12, 6); + + assertEquals(1.333, impurity.impurity(), 1e-3); + } + + /** */ + @Test + public void testImpurityLeftAndRightPart() { + // Test on left part [1, 2, 2] and right part [1, 1, 1]. + MSEImpurityMeasure impurity = new MSEImpurityMeasure(5, 9, 3, 3, 3, 3); + + assertEquals(0.666, impurity.impurity(), 1e-3); + } + + /** */ + @Test + public void testAdd() { + Random rnd = new Random(0); + + MSEImpurityMeasure a = new MSEImpurityMeasure( + rnd.nextDouble(), rnd.nextDouble(), rnd.nextInt(), rnd.nextDouble(), rnd.nextDouble(), rnd.nextInt() + ); + + MSEImpurityMeasure b = new MSEImpurityMeasure( + rnd.nextDouble(), rnd.nextDouble(), rnd.nextInt(), rnd.nextDouble(), rnd.nextDouble(), rnd.nextInt() + ); + + MSEImpurityMeasure c = a.add(b); + + assertEquals(a.getLeftY() + b.getLeftY(), c.getLeftY(), 1e-10); + assertEquals(a.getLeftY2() + b.getLeftY2(), c.getLeftY2(), 1e-10); + assertEquals(a.getLeftCnt() + b.getLeftCnt(), c.getLeftCnt()); + assertEquals(a.getRightY() + b.getRightY(), c.getRightY(), 1e-10); + assertEquals(a.getRightY2() + b.getRightY2(), c.getRightY2(), 1e-10); + assertEquals(a.getRightCnt() + b.getRightCnt(), c.getRightCnt()); + } + + /** */ + @Test + public void testSubtract() { + Random rnd = new Random(0); + + MSEImpurityMeasure a = new MSEImpurityMeasure( + rnd.nextDouble(), rnd.nextDouble(), rnd.nextInt(), rnd.nextDouble(), rnd.nextDouble(), rnd.nextInt() + ); + + MSEImpurityMeasure b = new MSEImpurityMeasure( + rnd.nextDouble(), rnd.nextDouble(), rnd.nextInt(), rnd.nextDouble(), rnd.nextDouble(), rnd.nextInt() + ); + + MSEImpurityMeasure c = a.subtract(b); + + assertEquals(a.getLeftY() - b.getLeftY(), c.getLeftY(), 1e-10); + assertEquals(a.getLeftY2() - b.getLeftY2(), c.getLeftY2(), 1e-10); + assertEquals(a.getLeftCnt() - b.getLeftCnt(), c.getLeftCnt()); + assertEquals(a.getRightY() - b.getRightY(), c.getRightY(), 1e-10); + assertEquals(a.getRightY2() - b.getRightY2(), c.getRightY2(), 1e-10); + assertEquals(a.getRightCnt() - b.getRightCnt(), c.getRightCnt()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/SimpleStepFunctionCompressorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/SimpleStepFunctionCompressorTest.java new file mode 100644 index 0000000000000..cca8a9bb72a30 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/SimpleStepFunctionCompressorTest.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.impurity.util; + +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link SimpleStepFunctionCompressor}. + */ +public class SimpleStepFunctionCompressorTest { + /** */ + @Test + @SuppressWarnings("unchecked") + public void testDefaultCompress() { + StepFunction function = new StepFunction<>( + new double[]{1, 2, 3, 4}, + TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3, 4) + ); + + SimpleStepFunctionCompressor compressor = new SimpleStepFunctionCompressor<>(); + + StepFunction resFunction = compressor.compress(new StepFunction[] {function})[0]; + + assertArrayEquals(new double[]{1, 2, 3, 4}, resFunction.getX(), 1e-10); + assertArrayEquals(TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3, 4), resFunction.getY()); + } + + /** */ + @Test + public void testDefaults() { + StepFunction function = new StepFunction<>( + new double[]{1, 2, 3, 4}, + TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3, 4) + ); + + SimpleStepFunctionCompressor compressor = new SimpleStepFunctionCompressor<>(); + + StepFunction resFunction = compressor.compress(function); + + assertArrayEquals(new double[]{1, 2, 3, 4}, resFunction.getX(), 1e-10); + assertArrayEquals(TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3, 4), resFunction.getY()); + } + + /** */ + @Test + public void testCompressSmallFunction() { + StepFunction function = new StepFunction<>( + new double[]{1, 2, 3, 4}, + TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3, 4) + ); + + SimpleStepFunctionCompressor compressor = new SimpleStepFunctionCompressor<>(5, 0, 0); + + StepFunction resFunction = compressor.compress(function); + + assertArrayEquals(new double[]{1, 2, 3, 4}, resFunction.getX(), 1e-10); + assertArrayEquals(TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3, 4), resFunction.getY()); + } + + /** */ + @Test + public void testCompressIncreasingFunction() { + StepFunction function = new StepFunction<>( + new double[]{1, 2, 3, 4, 5}, + TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3, 4, 5) + ); + + SimpleStepFunctionCompressor compressor = new SimpleStepFunctionCompressor<>(1, 0.4, 0); + + StepFunction resFunction = compressor.compress(function); + + assertArrayEquals(new double[]{1, 3, 5}, resFunction.getX(), 1e-10); + assertArrayEquals(TestImpurityMeasure.asTestImpurityMeasures(1, 3, 5), resFunction.getY()); + } + + /** */ + @Test + public void testCompressDecreasingFunction() { + StepFunction function = new StepFunction<>( + new double[]{1, 2, 3, 4, 5}, + TestImpurityMeasure.asTestImpurityMeasures(5, 4, 3, 2, 1) + ); + + SimpleStepFunctionCompressor compressor = new SimpleStepFunctionCompressor<>(1, 0, 0.4); + + StepFunction resFunction = compressor.compress(function); + + assertArrayEquals(new double[]{1, 3, 5}, resFunction.getX(), 1e-10); + assertArrayEquals(TestImpurityMeasure.asTestImpurityMeasures(5, 3, 1), resFunction.getY()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/StepFunctionTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/StepFunctionTest.java new file mode 100644 index 0000000000000..2a0279cec0eef --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/StepFunctionTest.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.impurity.util; + +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; + +/** + * Tests for {@link StepFunction}. + */ +public class StepFunctionTest { + /** */ + @Test + public void testAddIncreasingFunctions() { + StepFunction a = new StepFunction<>( + new double[]{1, 3, 5}, + TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3) + ); + + StepFunction b = new StepFunction<>( + new double[]{0, 2, 4}, + TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3) + ); + + StepFunction c = a.add(b); + + assertArrayEquals(new double[]{0, 1, 2, 3, 4, 5}, c.getX(), 1e-10); + assertArrayEquals( + TestImpurityMeasure.asTestImpurityMeasures(1, 2, 3, 4, 5, 6), + c.getY() + ); + } + + /** */ + @Test + public void testAddDecreasingFunctions() { + StepFunction a = new StepFunction<>( + new double[]{1, 3, 5}, + TestImpurityMeasure.asTestImpurityMeasures(3, 2, 1) + ); + + StepFunction b = new StepFunction<>( + new double[]{0, 2, 4}, + TestImpurityMeasure.asTestImpurityMeasures(3, 2, 1) + ); + + StepFunction c = a.add(b); + + assertArrayEquals(new double[]{0, 1, 2, 3, 4, 5}, c.getX(), 1e-10); + assertArrayEquals( + TestImpurityMeasure.asTestImpurityMeasures(3, 6, 5, 4, 3, 2), + c.getY() + ); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/TestImpurityMeasure.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/TestImpurityMeasure.java new file mode 100644 index 0000000000000..c0d1911a586e8 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/impurity/util/TestImpurityMeasure.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.impurity.util; + +import java.util.Objects; +import org.apache.ignite.ml.tree.impurity.ImpurityMeasure; + +/** + * Utils class used as impurity measure in tests. + */ +class TestImpurityMeasure implements ImpurityMeasure { + /** */ + private static final long serialVersionUID = 2414020770162797847L; + + /** Impurity. */ + private final double impurity; + + /** + * Constructs a new instance of test impurity measure. + * + * @param impurity Impurity. + */ + private TestImpurityMeasure(double impurity) { + this.impurity = impurity; + } + + /** + * Convert doubles to array of test impurity measures. + * + * @param impurity Impurity as array of doubles. + * @return Test impurity measure objects as array. + */ + static TestImpurityMeasure[] asTestImpurityMeasures(double... impurity) { + TestImpurityMeasure[] res = new TestImpurityMeasure[impurity.length]; + + for (int i = 0; i < impurity.length; i++) + res[i] = new TestImpurityMeasure(impurity[i]); + + return res; + } + + /** {@inheritDoc} */ + @Override public double impurity() { + return impurity; + } + + /** {@inheritDoc} */ + @Override public TestImpurityMeasure add(TestImpurityMeasure measure) { + return new TestImpurityMeasure(impurity + measure.impurity); + } + + /** {@inheritDoc} */ + @Override public TestImpurityMeasure subtract(TestImpurityMeasure measure) { + return new TestImpurityMeasure(impurity - measure.impurity); + } + + /** */ + @Override public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + TestImpurityMeasure measure = (TestImpurityMeasure)o; + + return Double.compare(measure.impurity, impurity) == 0; + } + + /** */ + @Override public int hashCode() { + + return Objects.hash(impurity); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/performance/DecisionTreeMNISTIntegrationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/performance/DecisionTreeMNISTIntegrationTest.java new file mode 100644 index 0000000000000..f10da9a8ba6c4 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/performance/DecisionTreeMNISTIntegrationTest.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.performance; + +import org.apache.ignite.Ignite; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; + +/** + * Tests {@link DecisionTreeClassificationTrainer} on the MNIST dataset that require to start the whole Ignite + * infrastructure. For manual run. + */ +public class DecisionTreeMNISTIntegrationTest extends GridCommonAbstractTest { + /** Number of nodes in grid */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** + * {@inheritDoc} + */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** Tests on the MNIST dataset. For manual run. */ + /*@Test + public void testMNIST() throws IOException { + CacheConfiguration trainingSetCacheCfg = new CacheConfiguration<>(); + trainingSetCacheCfg.setAffinity(new RendezvousAffinityFunction(false, 10)); + trainingSetCacheCfg.setName("MNIST_TRAINING_SET"); + + IgniteCache trainingSet = ignite.createCache(trainingSetCacheCfg); + + int i = 0; + for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTrainingSet(60_000)) + trainingSet.put(i++, e); + + DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer( + 8, + 0, + new SimpleStepFunctionCompressor<>()); + + DecisionTreeNode mdl = trainer.fit( + ignite, + trainingSet, + FeatureLabelExtractorWrapper.wrap( + (k, v) -> VectorUtils.of(v.getPixels()), + (k, v) -> (double)v.getLabel() + ) + ); + + int correctAnswers = 0; + int incorrectAnswers = 0; + + for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTestSet(10_000)) { + double res = mdl.predict(new DenseVector(e.getPixels())); + + if (res == e.getLabel()) + correctAnswers++; + else + incorrectAnswers++; + } + + double accuracy = 1.0 * correctAnswers / (correctAnswers + incorrectAnswers); + + assertTrue(accuracy > 0.8); + }*/ +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/performance/DecisionTreeMNISTTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/performance/DecisionTreeMNISTTest.java new file mode 100644 index 0000000000000..7de268fcc4675 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/performance/DecisionTreeMNISTTest.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.performance; + +import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer; + +/** + * Tests {@link DecisionTreeClassificationTrainer} on the MNIST dataset using locally stored data. For manual run. + */ +public class DecisionTreeMNISTTest { + /** Tests on the MNIST dataset. For manual run. */ +/* @Test + public void testMNIST() throws IOException { + Map trainingSet = new HashMap<>(); + + int i = 0; + for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTrainingSet(60_000)) + trainingSet.put(i++, e); + + DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer( + 8, + 0, + new SimpleStepFunctionCompressor<>()); + + DecisionTreeNode mdl = trainer.fit( + trainingSet, + 10, FeatureLabelExtractorWrapper.wrap( + (k, v) -> VectorUtils.of(v.getPixels()), + (k, v) -> (double)v.getLabel() + ) + ); + + int correctAnswers = 0; + int incorrectAnswers = 0; + + for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTestSet(10_000)) { + double res = mdl.predict(new DenseVector(e.getPixels())); + + if (res == e.getLabel()) + correctAnswers++; + else + incorrectAnswers++; + } + + double accuracy = 1.0 * correctAnswers / (correctAnswers + incorrectAnswers); + + assertTrue(accuracy > 0.8); + }*/ +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestClassifierTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestClassifierTrainerTest.java new file mode 100644 index 0000000000000..4d2ef46fd807f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestClassifierTrainerTest.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.composition.predictionsaggregator.OnMajorityPredictionsAggregator; +import org.apache.ignite.ml.dataset.feature.FeatureMeta; +import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.structures.LabeledVector; +import org.apache.ignite.ml.trainers.DatasetTrainer; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link RandomForestClassifierTrainer}. + */ +public class RandomForestClassifierTrainerTest extends TrainerTest { + /** */ + @Test + public void testFit() { + int sampleSize = 1000; + Map> sample = new HashMap<>(); + for (int i = 0; i < sampleSize; i++) { + double x1 = i; + double x2 = x1 / 10.0; + double x3 = x2 / 10.0; + double x4 = x3 / 10.0; + + sample.put(i, VectorUtils.of(x1, x2, x3, x4).labeled((double)i % 2)); + } + + ArrayList meta = new ArrayList<>(); + for (int i = 0; i < 4; i++) + meta.add(new FeatureMeta("", i, false)); + DatasetTrainer trainer = new RandomForestClassifierTrainer(meta) + .withAmountOfTrees(5) + .withFeaturesCountSelectionStrgy(x -> 2) + .withEnvironmentBuilder(TestUtils.testEnvBuilder()); + + RandomForestModel mdl = trainer.fit(sample, parts, new LabeledDummyVectorizer<>()); + + assertTrue(mdl.getPredictionsAggregator() instanceof OnMajorityPredictionsAggregator); + assertEquals(5, mdl.getModels().size()); + } + + /** */ + @Test + public void testUpdate() { + int sampleSize = 1000; + Map> sample = new HashMap<>(); + for (int i = 0; i < sampleSize; i++) { + double x1 = i; + double x2 = x1 / 10.0; + double x3 = x2 / 10.0; + double x4 = x3 / 10.0; + + sample.put(i, VectorUtils.of(x1, x2, x3, x4).labeled((double)i % 2)); + } + + ArrayList meta = new ArrayList<>(); + for (int i = 0; i < 4; i++) + meta.add(new FeatureMeta("", i, false)); + DatasetTrainer trainer = new RandomForestClassifierTrainer(meta) + .withAmountOfTrees(100) + .withFeaturesCountSelectionStrgy(x -> 2) + .withEnvironmentBuilder(TestUtils.testEnvBuilder()); + + RandomForestModel originalMdl = trainer.fit(sample, parts, new LabeledDummyVectorizer<>()); + RandomForestModel updatedOnSameDS = trainer.update(originalMdl, sample, parts, new LabeledDummyVectorizer<>()); + RandomForestModel updatedOnEmptyDS = + trainer.update(originalMdl, new HashMap>(), parts, new LabeledDummyVectorizer<>()); + + Vector v = VectorUtils.of(5, 0.5, 0.05, 0.005); + assertEquals(originalMdl.predict(v), updatedOnSameDS.predict(v), 0.01); + assertEquals(originalMdl.predict(v), updatedOnEmptyDS.predict(v), 0.01); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestIntegrationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestIntegrationTest.java new file mode 100644 index 0000000000000..dc2be8536dd0f --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestIntegrationTest.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest; + +import java.util.ArrayList; +import java.util.Random; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.ml.composition.predictionsaggregator.MeanValuePredictionsAggregator; +import org.apache.ignite.ml.dataset.feature.FeatureMeta; +import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Tests for {@link RandomForestTrainer}. + */ +public class RandomForestIntegrationTest extends GridCommonAbstractTest { + /** Number of nodes in grid */ + private static final int NODE_COUNT = 3; + + /** Ignite instance. */ + private Ignite ignite; + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + for (int i = 1; i <= NODE_COUNT; i++) + startGrid(i); + } + + /** {@inheritDoc} */ + @Override protected void afterTestsStopped() { + stopAllGrids(); + } + + /** + * {@inheritDoc} + */ + @Override protected void beforeTest() { + /* Grid instance. */ + ignite = grid(NODE_COUNT); + ignite.configuration().setPeerClassLoadingEnabled(true); + IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); + } + + /** */ + @Test + public void testFit() { + int size = 100; + + CacheConfiguration trainingSetCacheCfg = new CacheConfiguration<>(); + trainingSetCacheCfg.setAffinity(new RendezvousAffinityFunction(false, 10)); + trainingSetCacheCfg.setName("TRAINING_SET"); + + IgniteCache data = ignite.createCache(trainingSetCacheCfg); + + Random rnd = new Random(0); + for (int i = 0; i < size; i++) { + double x = rnd.nextDouble() - 0.5; + data.put(i, new double[] {x, x > 0 ? 1 : 0}); + } + + ArrayList meta = new ArrayList<>(); + meta.add(new FeatureMeta("", 0, false)); + RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(meta) + .withAmountOfTrees(5) + .withFeaturesCountSelectionStrgy(x -> 2); + + RandomForestModel mdl = trainer.fit(ignite, data, new DoubleArrayVectorizer().labeled(1)); + + assertTrue(mdl.getPredictionsAggregator() instanceof MeanValuePredictionsAggregator); + assertEquals(5, mdl.getModels().size()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestRegressionTrainerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestRegressionTrainerTest.java new file mode 100644 index 0000000000000..c7fc98130ea29 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestRegressionTrainerTest.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import org.apache.ignite.ml.common.TrainerTest; +import org.apache.ignite.ml.composition.predictionsaggregator.MeanValuePredictionsAggregator; +import org.apache.ignite.ml.dataset.feature.FeatureMeta; +import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.structures.LabeledVector; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link RandomForestRegressionTrainer}. + */ +public class RandomForestRegressionTrainerTest extends TrainerTest { + /** */ + @Test + public void testFit() { + int sampleSize = 1000; + Map> sample = new HashMap<>(); + for (int i = 0; i < sampleSize; i++) { + double x1 = i; + double x2 = x1 / 10.0; + double x3 = x2 / 10.0; + double x4 = x3 / 10.0; + + sample.put(x1 * x2 + x3 * x4, VectorUtils.of(x1, x2, x3, x4).labeled((double)i % 2)); + } + + ArrayList meta = new ArrayList<>(); + for (int i = 0; i < 4; i++) + meta.add(new FeatureMeta("", i, false)); + RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(meta) + .withAmountOfTrees(5) + .withFeaturesCountSelectionStrgy(x -> 2); + + RandomForestModel mdl = trainer.fit(sample, parts, new LabeledDummyVectorizer<>()); + assertTrue(mdl.getPredictionsAggregator() instanceof MeanValuePredictionsAggregator); + assertEquals(5, mdl.getModels().size()); + } + + /** */ + @Test + public void testUpdate() { + int sampleSize = 1000; + Map> sample = new HashMap<>(); + for (int i = 0; i < sampleSize; i++) { + double x1 = i; + double x2 = x1 / 10.0; + double x3 = x2 / 10.0; + double x4 = x3 / 10.0; + + sample.put(x1 * x2 + x3 * x4, VectorUtils.of(x1, x2, x3, x4).labeled((double)i % 2)); + } + + ArrayList meta = new ArrayList<>(); + for (int i = 0; i < 4; i++) + meta.add(new FeatureMeta("", i, false)); + RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(meta) + .withAmountOfTrees(100) + .withFeaturesCountSelectionStrgy(x -> 2); + + RandomForestModel originalMdl = trainer.fit(sample, parts, new LabeledDummyVectorizer<>()); + RandomForestModel updatedOnSameDS = trainer.update(originalMdl, sample, parts, new LabeledDummyVectorizer<>()); + RandomForestModel updatedOnEmptyDS = + trainer.update(originalMdl, new HashMap>(), parts, new LabeledDummyVectorizer<>()); + + Vector v = VectorUtils.of(5, 0.5, 0.05, 0.005); + assertEquals(originalMdl.predict(v), updatedOnSameDS.predict(v), 0.1); + assertEquals(originalMdl.predict(v), updatedOnEmptyDS.predict(v), 0.1); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestTest.java new file mode 100644 index 0000000000000..eb81b36a7b159 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestTest.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest; + +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import org.apache.ignite.ml.dataset.feature.FeatureMeta; +import org.apache.ignite.ml.tree.randomforest.data.NodeSplit; +import org.apache.ignite.ml.tree.randomforest.data.TreeNode; +import org.junit.Test; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** */ +public class RandomForestTest { + /** Seed. */ + private final long seed = 0; + + /** Count of trees. */ + private final int cntOfTrees = 10; + + /** Min imp delta. */ + private final double minImpDelta = 1.0; + + /** Max depth. */ + private final int maxDepth = 1; + + /** Meta. */ + private final List meta = Arrays.asList( + new FeatureMeta("", 0, false), + new FeatureMeta("", 1, true), + new FeatureMeta("", 2, false), + new FeatureMeta("", 3, true), + new FeatureMeta("", 4, false), + new FeatureMeta("", 5, true), + new FeatureMeta("", 6, false) + ); + + /** Rf. */ + private RandomForestClassifierTrainer rf = new RandomForestClassifierTrainer(meta) + .withAmountOfTrees(cntOfTrees) + .withSeed(seed) + .withFeaturesCountSelectionStrgy(x -> 4) + .withMaxDepth(maxDepth) + .withMinImpurityDelta(minImpDelta) + .withSubSampleSize(0.1); + + /** */ + @Test + public void testNeedSplit() { + TreeNode node = new TreeNode(1, 1); + node.setImpurity(1000); + assertTrue(rf.needSplit(node, Optional.of(new NodeSplit(0, 0, node.getImpurity() - minImpDelta * 1.01)))); + assertFalse(rf.needSplit(node, Optional.of(new NodeSplit(0, 0, node.getImpurity() - minImpDelta * 0.5)))); + assertFalse(rf.needSplit(node, Optional.of(new NodeSplit(0, 0, node.getImpurity())))); + + TreeNode child = node.toConditional(0, 0).get(0); + child.setImpurity(1000); + assertFalse(rf.needSplit(child, Optional.of(new NodeSplit(0, 0, child.getImpurity() - minImpDelta * 1.01)))); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestTreeTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestTreeTestSuite.java new file mode 100644 index 0000000000000..5610a6c411f00 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/RandomForestTreeTestSuite.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest; + +import org.apache.ignite.ml.tree.randomforest.data.TreeNodeTest; +import org.apache.ignite.ml.tree.randomforest.data.impurity.GiniFeatureHistogramTest; +import org.apache.ignite.ml.tree.randomforest.data.impurity.MSEHistogramTest; +import org.apache.ignite.ml.tree.randomforest.data.statistics.NormalDistributionStatisticsComputerTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in {@link org.apache.ignite.ml.tree} package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + RandomForestClassifierTrainerTest.class, + RandomForestRegressionTrainerTest.class, + GiniFeatureHistogramTest.class, + MSEHistogramTest.class, + NormalDistributionStatisticsComputerTest.class, + RandomForestTest.class, + RandomForestIntegrationTest.class, + TreeNodeTest.class +}) +public class RandomForestTreeTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/TreeNodeTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/TreeNodeTest.java new file mode 100644 index 0000000000000..0550eca187d31 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/TreeNodeTest.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest.data; + +import java.util.List; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** */ +public class TreeNodeTest { + /** Features 1. */ + private final Vector features1 = VectorUtils.of(0., 1.); + + /** Features 2. */ + private final Vector features2 = VectorUtils.of(1., 0.); + + /** */ + @Test + public void testPredictNextIdCondNodeAtTreeCorner() { + TreeNode node = new TreeNode(5, 1); + + assertEquals(TreeNode.Type.UNKNOWN, node.getType()); + assertEquals(5, node.predictNextNodeKey(features1).getNodeId()); + assertEquals(5, node.predictNextNodeKey(features2).getNodeId()); + } + + /** */ + @Test + public void testPredictNextIdForLeaf() { + TreeNode node = new TreeNode(5, 1); + node.toLeaf(0.5); + + assertEquals(TreeNode.Type.LEAF, node.getType()); + assertEquals(5, node.predictNextNodeKey(features1).getNodeId()); + assertEquals(5, node.predictNextNodeKey(features2).getNodeId()); + } + + /** */ + @Test + public void testPredictNextIdForTree() { + TreeNode root = new TreeNode(1, 1); + root.toConditional(0, 0.1); + + assertEquals(TreeNode.Type.CONDITIONAL, root.getType()); + assertEquals(2, root.predictNextNodeKey(features1).getNodeId()); + assertEquals(3, root.predictNextNodeKey(features2).getNodeId()); + } + + /** */ + @Test + public void testPredictProba() { + TreeNode root = new TreeNode(1, 1); + List leaves = root.toConditional(0, 0.1); + leaves.forEach(leaf -> leaf.toLeaf(leaf.getId().getNodeId() % 2)); + + assertEquals(TreeNode.Type.CONDITIONAL, root.getType()); + assertEquals(0.0, root.predict(features1), 0.001); + assertEquals(1.0, root.predict(features2), 0.001); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/GiniFeatureHistogramTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/GiniFeatureHistogramTest.java new file mode 100644 index 0000000000000..6400dc3c9cf5a --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/GiniFeatureHistogramTest.java @@ -0,0 +1,257 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest.data.impurity; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.ignite.ml.dataset.feature.BucketMeta; +import org.apache.ignite.ml.dataset.feature.FeatureMeta; +import org.apache.ignite.ml.dataset.impl.bootstrapping.BootstrappedVector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.tree.randomforest.data.NodeSplit; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** */ +public class GiniFeatureHistogramTest extends ImpurityHistogramTest { + /** Feature 1 meta. */ + private BucketMeta feature1Meta = new BucketMeta(new FeatureMeta("", 0, true)); + + /** Feature 2 meta. */ + private BucketMeta feature2Meta = new BucketMeta(new FeatureMeta("", 1, false)); + + /** Feature 3 meta. */ + private BucketMeta feature3Meta = new BucketMeta(new FeatureMeta("", 2, true)); + + /** */ + @Before + public void setUp() { + feature2Meta.setMinVal(-5); + feature2Meta.setBucketSize(1); + } + + /** */ + @Test + public void testAddVector() { + Map lblMapping = new HashMap<>(); + lblMapping.put(1.0, 0); + lblMapping.put(2.0, 1); + lblMapping.put(3.0, 2); + + GiniHistogram catFeatureSmpl1 = new GiniHistogram(0, lblMapping, feature1Meta); + GiniHistogram catFeatureSmpl2 = new GiniHistogram(1, lblMapping, feature1Meta); + + GiniHistogram contFeatureSmpl1 = new GiniHistogram(0, lblMapping, feature2Meta); + GiniHistogram contFeatureSmpl2 = new GiniHistogram(1, lblMapping, feature2Meta); + + for (BootstrappedVector vec : dataset) { + catFeatureSmpl1.addElement(vec); + catFeatureSmpl2.addElement(vec); + contFeatureSmpl1.addElement(vec); + contFeatureSmpl2.addElement(vec); + } + + checkBucketIds(catFeatureSmpl1.buckets(), new Integer[] {0, 1}); + checkBucketIds(catFeatureSmpl2.buckets(), new Integer[] {0, 1}); + checkBucketIds(contFeatureSmpl1.buckets(), new Integer[] {1, 4, 6, 7, 8}); + checkBucketIds(contFeatureSmpl2.buckets(), new Integer[] {1, 4, 6, 7, 8}); + + //categorical feature + checkCounters(catFeatureSmpl1.getHistForLabel(1.0), new double[] {2, 1}); //for feature values 0 and 1 + checkBucketIds(catFeatureSmpl1.getHistForLabel(1.0).buckets(), new Integer[] {0, 1}); + checkCounters(catFeatureSmpl1.getHistForLabel(2.0), new double[] {3}); //for feature value 1 + checkBucketIds(catFeatureSmpl1.getHistForLabel(2.0).buckets(), new Integer[] {1}); + checkCounters(catFeatureSmpl1.getHistForLabel(3.0), new double[] {2}); //for feature value 0 + checkBucketIds(catFeatureSmpl1.getHistForLabel(3.0).buckets(), new Integer[] {0}); + + checkCounters(catFeatureSmpl2.getHistForLabel(1.0), new double[] {1, 2}); //for feature values 0 and 1 + checkBucketIds(catFeatureSmpl2.getHistForLabel(1.0).buckets(), new Integer[] {0, 1}); + checkCounters(catFeatureSmpl2.getHistForLabel(2.0), new double[] {3}); //for feature value 1 + checkBucketIds(catFeatureSmpl2.getHistForLabel(2.0).buckets(), new Integer[] {1}); + checkCounters(catFeatureSmpl2.getHistForLabel(3.0), new double[] {0}); //for feature value 0 + checkBucketIds(catFeatureSmpl2.getHistForLabel(3.0).buckets(), new Integer[] {0}); + + //continuous feature + checkCounters(contFeatureSmpl1.getHistForLabel(1.0), new double[] {1, 2}); //for feature values 0 and 1 + checkBucketIds(contFeatureSmpl1.getHistForLabel(1.0).buckets(), new Integer[] {4, 6}); + checkCounters(contFeatureSmpl1.getHistForLabel(2.0), new double[] {1, 2}); //for feature value 1 + checkBucketIds(contFeatureSmpl1.getHistForLabel(2.0).buckets(), new Integer[] {1, 7}); + checkCounters(contFeatureSmpl1.getHistForLabel(3.0), new double[] {2}); //for feature value 0 + checkBucketIds(contFeatureSmpl1.getHistForLabel(3.0).buckets(), new Integer[] {8}); + + checkCounters(contFeatureSmpl2.getHistForLabel(1.0), new double[] {2, 1}); //for feature values 0 and 1 + checkBucketIds(contFeatureSmpl2.getHistForLabel(1.0).buckets(), new Integer[] {4, 6}); + checkCounters(contFeatureSmpl2.getHistForLabel(2.0), new double[] {2, 1}); //for feature value 1 + checkBucketIds(contFeatureSmpl2.getHistForLabel(2.0).buckets(), new Integer[] {1, 7}); + checkCounters(contFeatureSmpl2.getHistForLabel(3.0), new double[] {0}); //for feature value 0 + checkBucketIds(contFeatureSmpl2.getHistForLabel(3.0).buckets(), new Integer[] {8}); + } + + /** */ + @Test + public void testSplit() { + Map lblMapping = new HashMap<>(); + lblMapping.put(1.0, 0); + lblMapping.put(2.0, 1); + + GiniHistogram catFeatureSmpl1 = new GiniHistogram(0, lblMapping, feature1Meta); + GiniHistogram contFeatureSmpl1 = new GiniHistogram(0, lblMapping, feature2Meta); + GiniHistogram emptyHist = new GiniHistogram(0, lblMapping, feature3Meta); + GiniHistogram catFeatureSmpl2 = new GiniHistogram(0, lblMapping, feature3Meta); + + feature2Meta.setMinVal(-5); + feature2Meta.setBucketSize(1); + + for (BootstrappedVector vec : toSplitDataset) { + catFeatureSmpl1.addElement(vec); + contFeatureSmpl1.addElement(vec); + catFeatureSmpl2.addElement(vec); + } + + NodeSplit catSplit = catFeatureSmpl1.findBestSplit().get(); + NodeSplit contSplit = contFeatureSmpl1.findBestSplit().get(); + assertEquals(1.0, catSplit.getVal(), 0.01); + assertEquals(-0.5, contSplit.getVal(), 0.01); + assertFalse(emptyHist.findBestSplit().isPresent()); + assertFalse(catFeatureSmpl2.findBestSplit().isPresent()); + } + + /** */ + @Test + public void testOfSums() { + int sampleId = 0; + BucketMeta bucketMeta1 = new BucketMeta(new FeatureMeta("", 0, false)); + bucketMeta1.setMinVal(0.); + bucketMeta1.setBucketSize(0.1); + BucketMeta bucketMeta2 = new BucketMeta(new FeatureMeta("", 1, true)); + + GiniHistogram forAllHist1 = new GiniHistogram(sampleId, lblMapping, bucketMeta1); + GiniHistogram forAllHist2 = new GiniHistogram(sampleId, lblMapping, bucketMeta2); + + List partitions1 = new ArrayList<>(); + List partitions2 = new ArrayList<>(); + int cntOfPartitions = rnd.nextInt(1000); + for (int i = 0; i < cntOfPartitions; i++) { + partitions1.add(new GiniHistogram(sampleId, lblMapping, bucketMeta1)); + partitions2.add(new GiniHistogram(sampleId, lblMapping, bucketMeta2)); + } + + int datasetSize = rnd.nextInt(10000); + for (int i = 0; i < datasetSize; i++) { + BootstrappedVector vec = randomVector(true); + vec.features().set(1, (vec.features().get(1) * 100) % 100); + + forAllHist1.addElement(vec); + forAllHist2.addElement(vec); + int partId = rnd.nextInt(cntOfPartitions); + partitions1.get(partId).addElement(vec); + partitions2.get(partId).addElement(vec); + } + + checkSums(forAllHist1, partitions1); + checkSums(forAllHist2, partitions2); + + GiniHistogram emptyHist1 = new GiniHistogram(sampleId, lblMapping, bucketMeta1); + GiniHistogram emptyHist2 = new GiniHistogram(sampleId, lblMapping, bucketMeta2); + assertTrue(forAllHist1.isEqualTo(forAllHist1.plus(emptyHist1))); + assertTrue(forAllHist2.isEqualTo(forAllHist2.plus(emptyHist2))); + assertTrue(forAllHist1.isEqualTo(emptyHist1.plus(forAllHist1))); + assertTrue(forAllHist2.isEqualTo(emptyHist2.plus(forAllHist2))); + } + + /** */ + @Test + public void testJoin() { + Map lblMapping = new HashMap<>(); + lblMapping.put(1.0, 0); + lblMapping.put(2.0, 1); + lblMapping.put(3.0, 2); + + GiniHistogram catFeatureSmpl1 = new GiniHistogram(0, lblMapping, feature1Meta); + GiniHistogram catFeatureSmpl2 = new GiniHistogram(0, lblMapping, feature1Meta); + + GiniHistogram contFeatureSmpl1 = new GiniHistogram(0, lblMapping, feature2Meta); + GiniHistogram contFeatureSmpl2 = new GiniHistogram(0, lblMapping, feature2Meta); + + for (BootstrappedVector vec : dataset) { + catFeatureSmpl1.addElement(vec); + contFeatureSmpl1.addElement(vec); + } + + for (BootstrappedVector vec : toSplitDataset) { + catFeatureSmpl2.addElement(vec); + contFeatureSmpl2.addElement(vec); + } + + GiniHistogram res1 = catFeatureSmpl1.plus(catFeatureSmpl2); + GiniHistogram res2 = contFeatureSmpl1.plus(contFeatureSmpl2); + + checkBucketIds(res1.buckets(), new Integer[] {0, 1, 2}); + checkBucketIds(res2.buckets(), new Integer[] {1, 4, 6, 7, 8}); + + //categorical feature + checkCounters(res1.getHistForLabel(1.0), new double[] {3, 2, 6}); //for feature values 0 and 1 + checkBucketIds(res1.getHistForLabel(1.0).buckets(), new Integer[] {0, 1, 2}); + checkCounters(res1.getHistForLabel(2.0), new double[] {4, 6}); //for feature value 1 + checkBucketIds(res1.getHistForLabel(2.0).buckets(), new Integer[] {0, 1}); + checkCounters(res1.getHistForLabel(3.0), new double[] {2}); //for feature value 0 + checkBucketIds(res1.getHistForLabel(3.0).buckets(), new Integer[] {0}); + + //continuous feature + checkCounters(res2.getHistForLabel(1.0), new double[] {1, 1, 8, 1}); //for feature values 0 and 1 + checkBucketIds(res2.getHistForLabel(1.0).buckets(), new Integer[] {1, 4, 6, 8}); + checkCounters(res2.getHistForLabel(2.0), new double[] {1, 4, 0, 5}); //for feature value 1 + checkBucketIds(res2.getHistForLabel(2.0).buckets(), new Integer[] {1, 4, 6, 7}); + checkCounters(res2.getHistForLabel(3.0), new double[] {2}); //for feature value 0 + checkBucketIds(res2.getHistForLabel(3.0).buckets(), new Integer[] {8}); + } + + /** Dataset. */ + private BootstrappedVector[] dataset = new BootstrappedVector[] { + new BootstrappedVector(VectorUtils.of(1, -1), 1, new int[] {1, 2}), + new BootstrappedVector(VectorUtils.of(1, 2), 2, new int[] {2, 1}), + new BootstrappedVector(VectorUtils.of(0, 3), 3, new int[] {2, 0}), + new BootstrappedVector(VectorUtils.of(0, 1), 1, new int[] {2, 1}), + new BootstrappedVector(VectorUtils.of(1, -4), 2, new int[] {1, 2}), + }; + + /** To split dataset. */ + private BootstrappedVector[] toSplitDataset = new BootstrappedVector[] { + new BootstrappedVector(VectorUtils.of(0, -1, 0, 0), 2, new int[] {2}), + new BootstrappedVector(VectorUtils.of(0, -1, 0, 0), 2, new int[] {1}), + new BootstrappedVector(VectorUtils.of(0, -1, 0, 0), 2, new int[] {1}), + new BootstrappedVector(VectorUtils.of(0, 3, 0, 0), 1, new int[] {1}), + new BootstrappedVector(VectorUtils.of(0, 1, 0, 0), 2, new int[] {0}), + new BootstrappedVector(VectorUtils.of(1, 2, 0, 0), 2, new int[] {1}), + new BootstrappedVector(VectorUtils.of(1, 2, 0, 0), 2, new int[] {1}), + new BootstrappedVector(VectorUtils.of(1, 2, 0, 0), 2, new int[] {1}), + new BootstrappedVector(VectorUtils.of(1, -4, 0, 0), 1, new int[] {1}), + new BootstrappedVector(VectorUtils.of(2, 1, 0, 0), 1, new int[] {1}), + new BootstrappedVector(VectorUtils.of(2, 1, 0, 0), 1, new int[] {1}), + new BootstrappedVector(VectorUtils.of(2, 1, 0, 0), 1, new int[] {1}), + new BootstrappedVector(VectorUtils.of(2, 1, 0, 0), 1, new int[] {1}), + new BootstrappedVector(VectorUtils.of(2, 1, 0, 0), 1, new int[] {1}), + new BootstrappedVector(VectorUtils.of(2, 1, 0, 1), 1, new int[] {1}), + }; +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/ImpurityHistogramTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/ImpurityHistogramTest.java new file mode 100644 index 0000000000000..0d5dc456b8f3e --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/ImpurityHistogramTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest.data.impurity; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.stream.DoubleStream; +import java.util.stream.IntStream; +import org.apache.ignite.ml.dataset.feature.Histogram; +import org.apache.ignite.ml.dataset.feature.ObjectHistogram; +import org.apache.ignite.ml.dataset.impl.bootstrapping.BootstrappedVector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link ImpurityHistogram}. + */ +public class ImpurityHistogramTest { + /** Count of classes. */ + private static final int COUNT_OF_CLASSES = 3; + + /** Lbl mapping. */ + static final Map lblMapping = new HashMap<>(); + + /** Random generator. */ + protected Random rnd = new Random(); + + static { + for (int i = 0; i < COUNT_OF_CLASSES; i++) + lblMapping.put((double)i, i); + } + + /** */ + void checkBucketIds(Set bucketIdsSet, Integer[] exp) { + Integer[] bucketIds = new Integer[bucketIdsSet.size()]; + bucketIdsSet.toArray(bucketIds); + assertArrayEquals(exp, bucketIds); + } + + /** */ + void checkCounters(ObjectHistogram hist, double[] exp) { + double[] counters = hist.buckets().stream().mapToDouble(x -> hist.getValue(x).get()).toArray(); + assertArrayEquals(exp, counters, 0.01); + } + + /** + * Generates random vector. + * + * @param isClassification Is classification. + */ + BootstrappedVector randomVector(boolean isClassification) { + double[] features = DoubleStream.generate(() -> rnd.nextDouble()).limit(2).toArray(); + int[] counters = IntStream.generate(() -> rnd.nextInt(10)).limit(1).toArray(); + double lbl = isClassification ? Math.abs(rnd.nextInt() % COUNT_OF_CLASSES) : rnd.nextDouble(); + return new BootstrappedVector(VectorUtils.of(features), lbl, counters); + } + + /** + * Check sums. + * + * @param exp Expected value. + * @param partitions Partitions. + */ + > void checkSums(T exp, List partitions) { + T leftSum = partitions.stream().reduce((x, y) -> x.plus(y)).get(); + T rightSum = partitions.stream().reduce((x, y) -> y.plus(x)).get(); + assertTrue(exp.isEqualTo(leftSum)); + assertTrue(exp.isEqualTo(rightSum)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/MSEHistogramTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/MSEHistogramTest.java new file mode 100644 index 0000000000000..2d4e1d6708ce2 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/MSEHistogramTest.java @@ -0,0 +1,139 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest.data.impurity; + +import java.util.ArrayList; +import java.util.List; +import org.apache.ignite.ml.dataset.feature.BucketMeta; +import org.apache.ignite.ml.dataset.feature.FeatureMeta; +import org.apache.ignite.ml.dataset.impl.bootstrapping.BootstrappedVector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; + +/** */ +public class MSEHistogramTest extends ImpurityHistogramTest { + /** Feature 1 meta. */ + private BucketMeta feature1Meta = new BucketMeta(new FeatureMeta("", 0, true)); + + /** Feature 2 meta. */ + private BucketMeta feature2Meta = new BucketMeta(new FeatureMeta("", 1, false)); + + /** */ + @Before + public void setUp() throws Exception { + feature2Meta.setMinVal(-5); + feature2Meta.setBucketSize(1); + } + + /** */ + @Test + public void testAdd() { + MSEHistogram catHist1 = new MSEHistogram(0, feature1Meta); + MSEHistogram contHist1 = new MSEHistogram(0, feature2Meta); + + MSEHistogram catHist2 = new MSEHistogram(1, feature1Meta); + MSEHistogram contHist2 = new MSEHistogram(1, feature2Meta); + + for (BootstrappedVector vec : dataset) { + catHist1.addElement(vec); + catHist2.addElement(vec); + contHist1.addElement(vec); + contHist2.addElement(vec); + } + + checkBucketIds(catHist1.buckets(), new Integer[] {0, 1}); + checkBucketIds(catHist2.buckets(), new Integer[] {0, 1}); + checkBucketIds(contHist1.buckets(), new Integer[] {1, 4, 6, 7, 8}); + checkBucketIds(contHist2.buckets(), new Integer[] {1, 4, 6, 7, 8}); + + //counters + checkCounters(catHist1.getCounters(), new double[] {4, 4}); + checkCounters(catHist2.getCounters(), new double[] {1, 5}); + checkCounters(contHist1.getCounters(), new double[] {1, 1, 2, 2, 2}); + checkCounters(contHist2.getCounters(), new double[] {2, 2, 1, 1, 0}); + + //ys + checkCounters(catHist1.getSumOfLabels(), new double[] {2 * 4 + 2 * 3, 5 + 1 + 2 * 2}); + checkCounters(catHist2.getSumOfLabels(), new double[] {4, 2 * 5 + 2 * 1 + 2}); + checkCounters(contHist1.getSumOfLabels(), new double[] {5 * 1, 1 * 1, 4 * 2, 2 * 2, 3 * 2}); + checkCounters(contHist2.getSumOfLabels(), new double[]{ 2 * 5, 2 * 1, 1 * 4, 2 * 1, 0 * 3 }); + + //y2s + checkCounters(catHist1.getSumOfSquaredLabels(), new double[] {2 * 4 * 4 + 2 * 3 * 3, 5 * 5 + 1 + 2 * 2 * 2}); + checkCounters(catHist2.getSumOfSquaredLabels(), new double[] {4 * 4, 2 * 5 * 5 + 2 * 1 * 1 + 2 * 2}); + checkCounters(contHist1.getSumOfSquaredLabels(), new double[] {1 * 5 * 5, 1 * 1 * 1, 2 * 4 * 4, 2 * 2 * 2, 2 * 3 * 3}); + checkCounters(contHist2.getSumOfSquaredLabels(), new double[]{ 2 * 5 * 5, 2 * 1 * 1, 1 * 4 * 4, 1 * 2 * 2, 0 * 3 * 3 }); + } + + /** */ + @Test + public void testOfSums() { + int sampleId = 0; + BucketMeta bucketMeta1 = new BucketMeta(new FeatureMeta("", 0, false)); + bucketMeta1.setMinVal(0.); + bucketMeta1.setBucketSize(0.1); + BucketMeta bucketMeta2 = new BucketMeta(new FeatureMeta("", 1, true)); + + MSEHistogram forAllHist1 = new MSEHistogram(sampleId, bucketMeta1); + MSEHistogram forAllHist2 = new MSEHistogram(sampleId, bucketMeta2); + + List partitions1 = new ArrayList<>(); + List partitions2 = new ArrayList<>(); + + int cntOfPartitions = rnd.nextInt(100) + 1; + + for (int i = 0; i < cntOfPartitions; i++) { + partitions1.add(new MSEHistogram(sampleId, bucketMeta1)); + partitions2.add(new MSEHistogram(sampleId, bucketMeta2)); + } + + int datasetSize = rnd.nextInt(1000) + 1; + for (int i = 0; i < datasetSize; i++) { + BootstrappedVector vec = randomVector(false); + vec.features().set(1, (vec.features().get(1) * 100) % 100); + + forAllHist1.addElement(vec); + forAllHist2.addElement(vec); + int partId = rnd.nextInt(cntOfPartitions); + partitions1.get(partId).addElement(vec); + partitions2.get(partId).addElement(vec); + } + + checkSums(forAllHist1, partitions1); + checkSums(forAllHist2, partitions2); + + MSEHistogram emptyHist1 = new MSEHistogram(sampleId, bucketMeta1); + MSEHistogram emptyHist2 = new MSEHistogram(sampleId, bucketMeta2); + assertTrue(forAllHist1.isEqualTo(forAllHist1.plus(emptyHist1))); + assertTrue(forAllHist2.isEqualTo(forAllHist2.plus(emptyHist2))); + assertTrue(forAllHist1.isEqualTo(emptyHist1.plus(forAllHist1))); + assertTrue(forAllHist2.isEqualTo(emptyHist2.plus(forAllHist2))); + } + + /** Dataset. */ + private BootstrappedVector[] dataset = new BootstrappedVector[] { + new BootstrappedVector(VectorUtils.of(1, -4), 5, new int[] {1, 2}), + new BootstrappedVector(VectorUtils.of(1, -1), 1, new int[] {1, 2}), + new BootstrappedVector(VectorUtils.of(0, 1), 4, new int[] {2, 1}), + new BootstrappedVector(VectorUtils.of(1, 2), 2, new int[] {2, 1}), + new BootstrappedVector(VectorUtils.of(0, 3), 3, new int[] {2, 0}), + }; +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/statistics/NormalDistributionStatisticsComputerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/statistics/NormalDistributionStatisticsComputerTest.java new file mode 100644 index 0000000000000..c65a9ac0dc670 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/statistics/NormalDistributionStatisticsComputerTest.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.tree.randomforest.data.statistics; + +import java.util.Arrays; +import java.util.List; +import org.apache.ignite.ml.dataset.feature.FeatureMeta; +import org.apache.ignite.ml.dataset.impl.bootstrapping.BootstrappedDatasetPartition; +import org.apache.ignite.ml.dataset.impl.bootstrapping.BootstrappedVector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** */ +public class NormalDistributionStatisticsComputerTest { + /** Features Meta. */ + private final List meta = Arrays.asList( + new FeatureMeta("", 0, false), + new FeatureMeta("", 1, true), + new FeatureMeta("", 2, false), + new FeatureMeta("", 3, true), + new FeatureMeta("", 4, false), + new FeatureMeta("", 5, true), + new FeatureMeta("", 6, false) + ); + + /** Partition. */ + private BootstrappedDatasetPartition partition = new BootstrappedDatasetPartition(new BootstrappedVector[] { + new BootstrappedVector(VectorUtils.of(0, 1, 2, 1, 4, 2, 6), 0., null), + new BootstrappedVector(VectorUtils.of(1, 0, 3, 2, 5, 3, 7), 0., null), + new BootstrappedVector(VectorUtils.of(2, 1, 4, 1, 6, 2, 8), 0., null), + new BootstrappedVector(VectorUtils.of(3, 0, 5, 2, 7, 3, 9), 0., null), + new BootstrappedVector(VectorUtils.of(4, 1, 6, 1, 8, 2, 10), 0., null), + new BootstrappedVector(VectorUtils.of(5, 0, 7, 2, 9, 3, 11), 0., null), + new BootstrappedVector(VectorUtils.of(6, 1, 8, 1, 10, 2, 12), 0., null), + new BootstrappedVector(VectorUtils.of(7, 0, 9, 2, 11, 3, 13), 0., null), + new BootstrappedVector(VectorUtils.of(8, 1, 10, 1, 12, 2, 14), 0., null), + new BootstrappedVector(VectorUtils.of(9, 0, 11, 2, 13, 3, 15), 0., null), + }); + + /** Normal Distribution Statistics Computer. */ + private NormalDistributionStatisticsComputer computer = new NormalDistributionStatisticsComputer(); + + /** */ + @Test + public void computeStatsOnPartitionTest() { + List res = computer.computeStatsOnPartition(partition, meta); + NormalDistributionStatistics[] exp = new NormalDistributionStatistics[] { + new NormalDistributionStatistics(0, 9, 285, 45, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(2, 11, 505, 65, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(4, 13, 805, 85, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(6, 15, 1185, 105, 10), + }; + + assertEquals(exp.length, res.size()); + for (int i = 0; i < exp.length; i++) { + NormalDistributionStatistics expStat = exp[i]; + NormalDistributionStatistics resStat = res.get(i); + assertEquals(expStat.mean(), resStat.mean(), 0.01); + assertEquals(expStat.variance(), resStat.variance(), 0.01); + assertEquals(expStat.std(), resStat.std(), 0.01); + assertEquals(expStat.min(), resStat.min(), 0.01); + assertEquals(expStat.max(), resStat.max(), 0.01); + } + } + + /** */ + @Test + public void reduceStatsTest() { + List left = Arrays.asList( + new NormalDistributionStatistics(0, 9, 285, 45, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(2, 11, 505, 65, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(4, 13, 805, 85, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(6, 15, 1185, 105, 10) + ); + + List right = Arrays.asList( + new NormalDistributionStatistics(6, 15, 1185, 105, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(4, 13, 805, 85, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(2, 11, 505, 65, 10), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(0, 9, 285, 45, 10) + ); + + List res = computer.reduceStats(left, right, meta); + NormalDistributionStatistics[] exp = new NormalDistributionStatistics[] { + new NormalDistributionStatistics(0, 15, 1470, 150, 20), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(2, 13, 1310, 150, 20), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(2, 13, 1310, 150, 20), + new NormalDistributionStatistics(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0, 10), + new NormalDistributionStatistics(0, 15, 1470, 150, 20) + }; + + assertEquals(exp.length, res.size()); + for (int i = 0; i < exp.length; i++) { + NormalDistributionStatistics expStat = exp[i]; + NormalDistributionStatistics resStat = res.get(i); + assertEquals(expStat.mean(), resStat.mean(), 0.01); + assertEquals(expStat.variance(), resStat.variance(), 0.01); + assertEquals(expStat.std(), resStat.std(), 0.01); + assertEquals(expStat.min(), resStat.min(), 0.01); + assertEquals(expStat.max(), resStat.max(), 0.01); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/LRUCacheTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/LRUCacheTest.java new file mode 100644 index 0000000000000..2b8d01d0f50d2 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/LRUCacheTest.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util; + +import java.util.ArrayList; +import java.util.List; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link LRUCache}. + */ +public class LRUCacheTest { + /** */ + @Test + public void testSize() { + LRUCache cache = new LRUCache<>(10); + for (int i = 0; i < 100; i++) + cache.put(i, i); + + assertEquals(10, cache.size()); + } + + /** */ + @Test + public void testValues() { + LRUCache cache = new LRUCache<>(10); + for (int i = 0; i < 100; i++) { + cache.get(0); + cache.put(i, i); + } + + assertTrue(cache.containsKey(0)); + + for (int i = 91; i < 100; i++) + assertTrue(cache.containsKey(i)); + } + + /** */ + @Test + public void testExpirationListener() { + List expired = new ArrayList<>(); + + LRUCache cache = new LRUCache<>(10, expired::add); + for (int i = 0; i < 100; i++) + cache.put(i, i); + + for (int i = 0; i < 90; i++) + assertEquals(i, expired.get(i).longValue()); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/UtilTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/UtilTestSuite.java new file mode 100644 index 0000000000000..08dbf132fa6cd --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/UtilTestSuite.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util; + +import org.apache.ignite.ml.util.genetic.GeneticAlgorithmTest; +import org.apache.ignite.ml.util.genetic.PopulationTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in {@link org.apache.ignite.ml.util} package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + LRUCacheTest.class, + GeneticAlgorithmTest.class, + PopulationTest.class +}) +public class UtilTestSuite { +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorFillCacheTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorFillCacheTest.java new file mode 100644 index 0000000000000..96edca168c889 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorFillCacheTest.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators; + +import java.util.Arrays; +import java.util.UUID; +import java.util.stream.DoubleStream; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.Ignition; +import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer; +import org.apache.ignite.ml.dataset.impl.cache.CacheBasedDataset; +import org.apache.ignite.ml.dataset.impl.cache.CacheBasedDatasetBuilder; +import org.apache.ignite.ml.dataset.primitive.builder.context.EmptyContextBuilder; +import org.apache.ignite.ml.dataset.primitive.builder.data.SimpleDatasetDataBuilder; +import org.apache.ignite.ml.dataset.primitive.context.EmptyContext; +import org.apache.ignite.ml.dataset.primitive.data.SimpleDatasetData; +import org.apache.ignite.ml.environment.LearningEnvironment; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.apache.ignite.ml.math.functions.IgniteFunction; +import org.apache.ignite.ml.structures.LabeledVector; +import org.apache.ignite.ml.util.generators.primitives.scalar.GaussRandomProducer; +import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; +import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.junit.Test; + +/** + * Test for {@link DataStreamGenerator} cache filling. + */ +public class DataStreamGeneratorFillCacheTest extends GridCommonAbstractTest { + /** */ + @Test + public void testCacheFilling() { + IgniteConfiguration configuration = new IgniteConfiguration() + .setDiscoverySpi(new TcpDiscoverySpi() + .setIpFinder(new TcpDiscoveryVmIpFinder() + .setAddresses(Arrays.asList("127.0.0.1:47500..47509")))); + + String cacheName = "TEST_CACHE"; + CacheConfiguration> cacheConfiguration = + new CacheConfiguration>(cacheName) + .setAffinity(new RendezvousAffinityFunction(false, 10)); + int datasetSize = 5000; + + try (Ignite ignite = Ignition.start(configuration)) { + IgniteCache> cache = ignite.getOrCreateCache(cacheConfiguration); + DataStreamGenerator generator = new GaussRandomProducer(0).vectorize(1).asDataStream(); + generator.fillCacheWithVecUUIDAsKey(datasetSize, cache); + + LabeledDummyVectorizer vectorizer = new LabeledDummyVectorizer<>(); + CacheBasedDatasetBuilder> datasetBuilder = new CacheBasedDatasetBuilder<>(ignite, cache); + + IgniteFunction map = data -> + new StatPair(DoubleStream.of(data.getFeatures()).sum(), data.getRows()); + LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().buildForTrainer(); + env.deployingContext().initByClientObject(map); + + try (CacheBasedDataset, EmptyContext, SimpleDatasetData> dataset = + datasetBuilder.build( + LearningEnvironmentBuilder.defaultBuilder(), + new EmptyContextBuilder<>(), + new SimpleDatasetDataBuilder<>(vectorizer), + env + )) { + + StatPair res = dataset.compute(map, StatPair::sum); + assertEquals(datasetSize, res.cntOfRows); + assertEquals(0.0, res.elementsSum / res.cntOfRows, 1e-2); + } + + ignite.destroyCache(cacheName); + } + } + + /** */ + static class StatPair { + /** */ + private double elementsSum; + + /** */ + private int cntOfRows; + + /** */ + public StatPair(double elementsSum, int cntOfRows) { + this.elementsSum = elementsSum; + this.cntOfRows = cntOfRows; + } + + /** */ + static StatPair sum(StatPair left, StatPair right) { + if (left == null && right == null) + return new StatPair(0, 0); + else if (left == null) + return right; + else if (right == null) + return left; + else + return new StatPair( + right.elementsSum + left.elementsSum, + right.cntOfRows + left.cntOfRows + ); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorTest.java new file mode 100644 index 0000000000000..b3f74e022a5be --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorTest.java @@ -0,0 +1,208 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import org.apache.ignite.ml.dataset.Dataset; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.UpstreamEntry; +import org.apache.ignite.ml.dataset.UpstreamTransformer; +import org.apache.ignite.ml.dataset.UpstreamTransformerBuilder; +import org.apache.ignite.ml.dataset.primitive.builder.context.EmptyContextBuilder; +import org.apache.ignite.ml.dataset.primitive.context.EmptyContext; +import org.apache.ignite.ml.environment.LearningEnvironment; +import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.preprocessing.Preprocessor; +import org.apache.ignite.ml.structures.LabeledVector; +import org.apache.ignite.ml.structures.LabeledVectorSet; +import org.apache.ignite.ml.structures.partition.LabeledDatasetPartitionDataBuilderOnHeap; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link DataStreamGenerator}. + */ +public class DataStreamGeneratorTest { + /** */ + @Test + public void testUnlabeled() { + DataStreamGenerator generator = new DataStreamGenerator() { + @Override public Stream> labeled() { + return Stream.generate(() -> new LabeledVector<>(VectorUtils.of(1., 2.), 100.)); + } + }; + + generator.unlabeled().limit(100).forEach(v -> assertArrayEquals(new double[] {1., 2.}, v.asArray(), 1e-7)); + } + + /** */ + @Test + public void testLabeled() { + DataStreamGenerator generator = new DataStreamGenerator() { + @Override public Stream> labeled() { + return Stream.generate(() -> new LabeledVector<>(VectorUtils.of(1., 2.), 100.)); + } + }; + + generator.labeled(v -> -100.).limit(100).forEach(v -> { + assertArrayEquals(new double[] {1., 2.}, v.features().asArray(), 1e-7); + assertEquals(-100., v.label(), 1e-7); + }); + } + + /** */ + @Test + public void testMapVectors() { + DataStreamGenerator generator = new DataStreamGenerator() { + @Override public Stream> labeled() { + return Stream.generate(() -> new LabeledVector<>(VectorUtils.of(1., 2.), 100.)); + } + }; + + generator.mapVectors(v -> VectorUtils.of(2., 1.)).labeled().limit(100).forEach(v -> { + assertArrayEquals(new double[] {2., 1.}, v.features().asArray(), 1e-7); + assertEquals(100., v.label(), 1e-7); + }); + } + + /** */ + @Test + public void testBlur() { + DataStreamGenerator generator = new DataStreamGenerator() { + @Override public Stream> labeled() { + return Stream.generate(() -> new LabeledVector<>(VectorUtils.of(1., 2.), 100.)); + } + }; + + generator.blur(() -> 1.).labeled().limit(100).forEach(v -> { + assertArrayEquals(new double[] {2., 3.}, v.features().asArray(), 1e-7); + assertEquals(100., v.label(), 1e-7); + }); + } + + /** */ + @Test + public void testAsMap() { + DataStreamGenerator generator = new DataStreamGenerator() { + @Override public Stream> labeled() { + return Stream.generate(() -> new LabeledVector<>(VectorUtils.of(1., 2.), 100.)); + } + }; + + int N = 100; + Map dataset = generator.asMap(N); + assertEquals(N, dataset.size()); + dataset.forEach(((vector, label) -> { + assertArrayEquals(new double[] {1., 2.}, vector.asArray(), 1e-7); + assertEquals(100., label, 1e-7); + })); + } + + /** */ + @Test + public void testAsDatasetBuilder() throws Exception { + AtomicInteger cntr = new AtomicInteger(); + + DataStreamGenerator generator = new DataStreamGenerator() { + @Override public Stream> labeled() { + return Stream.generate(() -> { + int val = cntr.getAndIncrement(); + return new LabeledVector<>(VectorUtils.of(val), (double)val % 2); + }); + } + }; + + int N = 100; + cntr.set(0); + DatasetBuilder b1 = generator.asDatasetBuilder(N, 2); + cntr.set(0); + DatasetBuilder b2 = generator.asDatasetBuilder(N, (v, l) -> l == 0, 2); + cntr.set(0); + DatasetBuilder b3 = generator.asDatasetBuilder(N, (v, l) -> l == 1, 2, + new UpstreamTransformerBuilder() { + @Override public UpstreamTransformer build(LearningEnvironment env) { + return new UpstreamTransformerForTest(); + } + }); + + checkDataset(N, b1, v -> (Double)v.label() == 0 || (Double)v.label() == 1); + checkDataset(N / 2, b2, v -> (Double)v.label() == 0); + checkDataset(N / 2, b3, v -> (Double)v.label() < 0); + } + + /** */ + private void checkDataset(int sampleSize, DatasetBuilder datasetBuilder, + Predicate lbCheck) throws Exception { + + try (Dataset> dataset = buildDataset(datasetBuilder)) { + List res = dataset.compute(this::map, this::reduce); + assertEquals(sampleSize, res.size()); + + res.forEach(v -> assertTrue(lbCheck.test(v))); + } + } + + /** */ + private Dataset> buildDataset( + DatasetBuilder b1) { + return b1.build(LearningEnvironmentBuilder.defaultBuilder(), + new EmptyContextBuilder<>(), + new LabeledDatasetPartitionDataBuilderOnHeap<>((Preprocessor)LabeledVector::new), + LearningEnvironmentBuilder.defaultBuilder().buildForTrainer() + ); + } + + /** */ + private List map(LabeledVectorSet d) { + return IntStream.range(0, d.rowSize()).mapToObj(d::getRow).collect(Collectors.toList()); + } + + /** */ + private List reduce(List l, List r) { + if (l == null) + return r == null ? Collections.emptyList() : r; + else { + List res = new ArrayList<>(); + res.addAll(l); + res.addAll(r); + return res; + } + } + + /** */ + private static class UpstreamTransformerForTest implements UpstreamTransformer { + /** {@inheritDoc} */ + @Override public Stream transform( + Stream upstream) { + return upstream.map(entry -> new UpstreamEntry<>(entry.getKey(), -((double)entry.getValue()))); + } + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorTestSuite.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorTestSuite.java new file mode 100644 index 0000000000000..b9d47fe395480 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/DataStreamGeneratorTestSuite.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators; + +import org.apache.ignite.ml.util.generators.primitives.scalar.DiscreteRandomProducerTest; +import org.apache.ignite.ml.util.generators.primitives.scalar.GaussRandomProducerTest; +import org.apache.ignite.ml.util.generators.primitives.scalar.RandomProducerTest; +import org.apache.ignite.ml.util.generators.primitives.scalar.UniformRandomProducerTest; +import org.apache.ignite.ml.util.generators.primitives.vector.ParametricVectorGeneratorTest; +import org.apache.ignite.ml.util.generators.primitives.vector.VectorGeneratorPrimitivesTest; +import org.apache.ignite.ml.util.generators.primitives.vector.VectorGeneratorTest; +import org.apache.ignite.ml.util.generators.primitives.vector.VectorGeneratorsFamilyTest; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** + * Test suite for all tests located in {@link org.apache.ignite.ml.util.generators} package. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + DiscreteRandomProducerTest.class, + GaussRandomProducerTest.class, + RandomProducerTest.class, + UniformRandomProducerTest.class, + ParametricVectorGeneratorTest.class, + VectorGeneratorPrimitivesTest.class, + VectorGeneratorsFamilyTest.class, + VectorGeneratorTest.class, + DataStreamGeneratorTest.class, + DataStreamGeneratorFillCacheTest.class +}) +public class DataStreamGeneratorTestSuite { +} + + diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/DiscreteRandomProducerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/DiscreteRandomProducerTest.java new file mode 100644 index 0000000000000..83178ac230fbf --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/DiscreteRandomProducerTest.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators.primitives.scalar; + +import java.util.HashMap; +import java.util.Map; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link DiscreteRandomProducer}. + */ +public class DiscreteRandomProducerTest { + /** */ + @Test + public void testGet() { + double[] probs = new double[] {0.1, 0.2, 0.3, 0.4}; + DiscreteRandomProducer producer = new DiscreteRandomProducer(0L, probs); + + Map counters = new HashMap<>(); + IntStream.range(0, probs.length).forEach(i -> counters.put(i, 0.0)); + + final int N = 500000; + Stream.generate(producer::getInt).limit(N).forEach(i -> counters.put(i, counters.get(i) + 1)); + IntStream.range(0, probs.length).forEach(i -> counters.put(i, counters.get(i) / N)); + + for (int i = 0; i < probs.length; i++) + assertEquals(probs[i], counters.get(i), 0.01); + + assertEquals(probs.length, producer.size()); + } + + /** */ + @Test + public void testSeedConsidering() { + DiscreteRandomProducer producer1 = new DiscreteRandomProducer(0L, 0.1, 0.2, 0.3, 0.4); + DiscreteRandomProducer producer2 = new DiscreteRandomProducer(0L, 0.1, 0.2, 0.3, 0.4); + + assertEquals(producer1.get(), producer2.get(), 0.0001); + } + + /** */ + @Test + public void testUniformGeneration() { + int N = 10; + DiscreteRandomProducer producer = DiscreteRandomProducer.uniform(N); + + Map counters = new HashMap<>(); + IntStream.range(0, N).forEach(i -> counters.put(i, 0.0)); + + final int sampleSize = 500000; + Stream.generate(producer::getInt).limit(sampleSize).forEach(i -> counters.put(i, counters.get(i) + 1)); + IntStream.range(0, N).forEach(i -> counters.put(i, counters.get(i) / sampleSize)); + + for (int i = 0; i < N; i++) + assertEquals(1.0 / N, counters.get(i), 0.01); + } + + /** */ + @Test + public void testDistributionGeneration() { + double[] probs = DiscreteRandomProducer.randomDistribution(5, 0L); + assertArrayEquals(new double[] {0.23, 0.27, 0.079, 0.19, 0.20}, probs, 0.01); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testInvalidDistribution1() { + new DiscreteRandomProducer(0L, 0.1, 0.2, 0.3, 0.0); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testInvalidDistribution2() { + new DiscreteRandomProducer(0L, 0.1, 0.2, 0.3, 1.0); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testInvalidDistribution3() { + new DiscreteRandomProducer(0L, 0.1, 0.2, 0.3, 1.0, -0.6); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/GaussRandomProducerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/GaussRandomProducerTest.java new file mode 100644 index 0000000000000..845c28409f0cc --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/GaussRandomProducerTest.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators.primitives.scalar; + +import java.util.Random; +import java.util.stream.IntStream; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link GaussRandomProducer}. + */ +public class GaussRandomProducerTest { + /** */ + @Test + public void testGet() { + Random random = new Random(0L); + final double mean = random.nextInt(5) - 2.5; + final double variance = random.nextInt(5); + GaussRandomProducer producer = new GaussRandomProducer(mean, variance, 1L); + + final int N = 50000; + double meanStat = IntStream.range(0, N).mapToDouble(i -> producer.get()).sum() / N; + double varianceStat = IntStream.range(0, N).mapToDouble(i -> Math.pow(producer.get() - mean, 2)).sum() / N; + + assertEquals(mean, meanStat, 0.01); + assertEquals(variance, varianceStat, 0.1); + } + + /** */ + @Test + public void testSeedConsidering() { + GaussRandomProducer producer1 = new GaussRandomProducer(0L); + GaussRandomProducer producer2 = new GaussRandomProducer(0L); + + assertEquals(producer1.get(), producer2.get(), 0.0001); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testIllegalVariance1() { + new GaussRandomProducer(0, 0.); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testIllegalVariance2() { + new GaussRandomProducer(0, -1.); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/RandomProducerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/RandomProducerTest.java new file mode 100644 index 0000000000000..34e44b32ea38b --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/RandomProducerTest.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators.primitives.scalar; + +import org.apache.ignite.ml.math.functions.IgniteFunction; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link RandomProducer}. + */ +public class RandomProducerTest { + /** */ + @Test + public void testVectorize() { + RandomProducer p = () -> 1.0; + Vector vec = p.vectorize(3).get(); + + assertEquals(3, vec.size()); + assertArrayEquals(new double[] {1., 1., 1.}, vec.asArray(), 1e-7); + } + + /** */ + @Test + public void testVectorize2() { + Vector vec = RandomProducer.vectorize( + () -> 1.0, + () -> 2.0, + () -> 3.0 + ).get(); + + assertEquals(3, vec.size()); + assertArrayEquals(new double[] {1., 2., 3.}, vec.asArray(), 1e-7); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testVectorizeFail() { + RandomProducer.vectorize(); + } + + /** */ + @Test + public void testNoizify1() { + IgniteFunction f = v -> 2 * v; + RandomProducer p = () -> 1.0; + + IgniteFunction res = p.noizify(f); + + for (int i = 0; i < 10; i++) + assertEquals(2 * i + 1.0, res.apply((double)i), 1e-7); + } + + /** */ + @Test + public void testNoizify2() { + RandomProducer p = () -> 1.0; + assertArrayEquals(new double[] {1., 2.}, p.noizify(VectorUtils.of(0., 1.)).asArray(), 1e-7); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/UniformRandomProducerTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/UniformRandomProducerTest.java new file mode 100644 index 0000000000000..24408b3c52c6a --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/scalar/UniformRandomProducerTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators.primitives.scalar; + +import java.util.Random; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link UniformRandomProducer}. + */ +public class UniformRandomProducerTest { + /** */ + @Test + public void testGet() { + Random random = new Random(0L); + double[] bounds = Stream.of(random.nextInt(10) - 5, random.nextInt(10) - 5).sorted().mapToDouble(x -> x) + .toArray(); + + double min = Math.min(bounds[0], bounds[1]); + double max = Math.max(bounds[0], bounds[1]); + + double mean = (min + max) / 2; + double variance = Math.pow(min - max, 2) / 12; + UniformRandomProducer producer = new UniformRandomProducer(min, max, 0L); + + final int N = 500000; + double meanStat = IntStream.range(0, N).mapToDouble(i -> producer.get()).sum() / N; + double varianceStat = IntStream.range(0, N).mapToDouble(i -> Math.pow(producer.get() - mean, 2)).sum() / N; + + assertEquals(mean, meanStat, 0.01); + assertEquals(variance, varianceStat, 0.1); + } + + /** */ + @Test + public void testSeedConsidering() { + UniformRandomProducer producer1 = new UniformRandomProducer(0, 1, 0L); + UniformRandomProducer producer2 = new UniformRandomProducer(0, 1, 0L); + + assertEquals(producer1.get(), producer2.get(), 0.0001); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testFail() { + new UniformRandomProducer(1, 0, 0L); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/ParametricVectorGeneratorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/ParametricVectorGeneratorTest.java new file mode 100644 index 0000000000000..70ae237a93684 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/ParametricVectorGeneratorTest.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators.primitives.vector; + +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +/** + * Tests for {@link ParametricVectorGenerator}. + */ +public class ParametricVectorGeneratorTest { + /** */ + @Test + public void testGet() { + Vector vec = new ParametricVectorGenerator( + () -> 2., + t -> t, + t -> 2 * t, + t -> 3 * t, + t -> 100. + ).get(); + + assertEquals(4, vec.size()); + assertArrayEquals(new double[] {2., 4., 6., 100.}, vec.asArray(), 1e-7); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testIllegalArguments() { + new ParametricVectorGenerator(() -> 2.).get(); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorPrimitivesTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorPrimitivesTest.java new file mode 100644 index 0000000000000..85dd6df6945be --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorPrimitivesTest.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators.primitives.vector; + +import java.util.stream.IntStream; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link VectorGeneratorPrimitives}. + */ +public class VectorGeneratorPrimitivesTest { + /** */ + @Test + public void testConstant() { + Vector vec = VectorUtils.of(1.0, 0.0); + assertArrayEquals(vec.copy().asArray(), VectorGeneratorPrimitives.constant(vec).get().asArray(), 1e-7); + } + + /** */ + @Test + public void testZero() { + assertArrayEquals(new double[] {0., 0.}, VectorGeneratorPrimitives.zero(2).get().asArray(), 1e-7); + } + + /** */ + @Test + public void testRing() { + VectorGeneratorPrimitives.ring(1., 0, 2 * Math.PI) + .asDataStream().unlabeled().limit(1000) + .forEach(v -> assertEquals(v.getLengthSquared(), 1., 1e-7)); + + VectorGeneratorPrimitives.ring(1., 0, Math.PI / 2) + .asDataStream().unlabeled().limit(1000) + .forEach(v -> { + assertTrue(v.get(0) >= 0.); + assertTrue(v.get(1) >= 0.); + }); + } + + /** */ + @Test + public void testCircle() { + VectorGeneratorPrimitives.circle(1.) + .asDataStream().unlabeled().limit(1000) + .forEach(v -> assertTrue(Math.sqrt(v.getLengthSquared()) <= 1.)); + } + + /** */ + @Test + public void testParallelogram() { + VectorGeneratorPrimitives.parallelogram(VectorUtils.of(2., 100.)) + .asDataStream().unlabeled().limit(1000) + .forEach(v -> { + assertTrue(v.get(0) <= 2.); + assertTrue(v.get(0) >= -2.); + assertTrue(v.get(1) <= 100.); + assertTrue(v.get(1) >= -100.); + }); + } + + /** */ + @Test + public void testGauss() { + VectorGenerator gen = VectorGeneratorPrimitives.gauss(VectorUtils.of(2., 100.), VectorUtils.of(20., 1.), 10L); + + final double[] mean = new double[] {2., 100.}; + final double[] variance = new double[] {20., 1.}; + + final int N = 50000; + Vector meanStat = IntStream.range(0, N).mapToObj(i -> gen.get()).reduce(Vector::plus).get().times(1. / N); + Vector varianceStat = IntStream.range(0, N).mapToObj(i -> gen.get().minus(meanStat)) + .map(v -> v.times(v)).reduce(Vector::plus).get().times(1. / N); + + assertArrayEquals(mean, meanStat.asArray(), 0.1); + assertArrayEquals(variance, varianceStat.asArray(), 0.1); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testGaussFail1() { + VectorGeneratorPrimitives.gauss(VectorUtils.of(), VectorUtils.of()); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testGaussFail2() { + VectorGeneratorPrimitives.gauss(VectorUtils.of(0.5, -0.5), VectorUtils.of(1.0, -1.0)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorTest.java new file mode 100644 index 0000000000000..ddac9c80c494e --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorTest.java @@ -0,0 +1,194 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators.primitives.vector; + +import org.apache.ignite.ml.math.exceptions.math.CardinalityException; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.util.generators.primitives.scalar.UniformRandomProducer; +import org.junit.Test; +import org.junit.internal.ArrayComparisonFailure; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link VectorGenerator}. + */ +public class VectorGeneratorTest { + /** */ + @Test + public void testMap() { + Vector originalVec = new UniformRandomProducer(-1, 1).vectorize(2).get(); + Vector doubledVec = VectorGeneratorPrimitives.constant(originalVec).map(v -> v.times(2.)).get(); + assertArrayEquals(originalVec.times(2.).asArray(), doubledVec.asArray(), 1e-7); + } + + /** */ + @Test + public void testFilter() { + new UniformRandomProducer(-1, 1).vectorize(2) + .filter(v -> v.get(0) < 0.5) + .filter(v -> v.get(1) > -0.5) + .asDataStream().unlabeled().limit(100) + .forEach(v -> assertTrue(v.get(0) < 0.5 && v.get(1) > -0.5)); + } + + /** */ + @Test + public void concat1() { + VectorGenerator g1 = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 2.)); + VectorGenerator g2 = VectorGeneratorPrimitives.constant(VectorUtils.of(3., 4.)); + VectorGenerator g12 = g1.concat(g2); + VectorGenerator g21 = g2.concat(g1); + + assertArrayEquals(new double[] {1., 2., 3., 4.}, g12.get().asArray(), 1e-7); + assertArrayEquals(new double[] {3., 4., 1., 2.}, g21.get().asArray(), 1e-7); + } + + /** */ + @Test + public void concat2() { + VectorGenerator g1 = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 2.)); + VectorGenerator g2 = g1.concat(() -> 1.0); + + assertArrayEquals(new double[] {1., 2., 1.}, g2.get().asArray(), 1e-7); + } + + /** */ + @Test + public void plus() { + VectorGenerator g1 = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 2.)); + VectorGenerator g2 = VectorGeneratorPrimitives.constant(VectorUtils.of(3., 4.)); + VectorGenerator g12 = g1.plus(g2); + VectorGenerator g21 = g2.plus(g1); + + assertArrayEquals(new double[] {4., 6.}, g21.get().asArray(), 1e-7); + assertArrayEquals(g21.get().asArray(), g12.get().asArray(), 1e-7); + } + + /** */ + @Test(expected = CardinalityException.class) + public void testPlusForDifferentSizes1() { + VectorGenerator g1 = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 2.)); + VectorGenerator g2 = VectorGeneratorPrimitives.constant(VectorUtils.of(3.)); + g1.plus(g2).get(); + } + + /** */ + @Test(expected = CardinalityException.class) + public void testPlusForDifferentSizes2() { + VectorGenerator g1 = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 2.)); + VectorGenerator g2 = VectorGeneratorPrimitives.constant(VectorUtils.of(3.)); + g2.plus(g1).get(); + } + + /** */ + @Test + public void shuffle() { + VectorGenerator g1 = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 2., 3., 4.)) + .shuffle(0L); + + double[] exp = {4., 1., 2., 3.}; + Vector v1 = g1.get(); + Vector v2 = g1.get(); + assertArrayEquals(exp, v1.asArray(), 1e-7); + assertArrayEquals(v1.asArray(), v2.asArray(), 1e-7); + } + + /** */ + @Test + public void duplicateRandomFeatures() { + VectorGenerator g1 = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 2., 3., 4.)) + .duplicateRandomFeatures(2, 1L); + + double[] exp = {1., 2., 3., 4., 3., 1.}; + Vector v1 = g1.get(); + Vector v2 = g1.get(); + + assertArrayEquals(exp, v1.asArray(), 1e-7); + + try { + assertArrayEquals(v1.asArray(), v2.asArray(), 1e-7); + } + catch (ArrayComparisonFailure e) { + //this is valid situation - duplicator should get different features + } + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testWithNegativeIncreaseSize() { + VectorGeneratorPrimitives.constant(VectorUtils.of(1., 2., 3., 4.)) + .duplicateRandomFeatures(-2, 1L).get(); + } + + /** */ + @Test + public void move() { + Vector res = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 1.)) + .move(VectorUtils.of(2., 4.)) + .get(); + + assertArrayEquals(new double[] {3., 5.}, res.asArray(), 1e-7); + } + + /** */ + @Test(expected = CardinalityException.class) + public void testMoveWithDifferentSizes1() { + VectorGeneratorPrimitives.constant(VectorUtils.of(1., 1.)) + .move(VectorUtils.of(2.)) + .get(); + } + + /** */ + @Test(expected = CardinalityException.class) + public void testMoveWithDifferentSizes2() { + VectorGeneratorPrimitives.constant(VectorUtils.of(1.)) + .move(VectorUtils.of(2., 1.)) + .get(); + } + + /** */ + @Test + public void rotate() { + double[] angles = {0., Math.PI / 2, -Math.PI / 2, Math.PI, 2 * Math.PI, Math.PI / 4}; + Vector[] exp = new Vector[] { + VectorUtils.of(1., 0., 100.), + VectorUtils.of(0., -1., 100.), + VectorUtils.of(0., 1., 100.), + VectorUtils.of(-1., 0., 100.), + VectorUtils.of(1., 0., 100.), + VectorUtils.of(0.707, -0.707, 100.) + }; + + for (int i = 0; i < angles.length; i++) { + Vector res = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 0., 100.)) + .rotate(angles[i]).get(); + assertArrayEquals(exp[i].asArray(), res.asArray(), 1e-3); + } + } + + /** */ + @Test + public void noisify() { + Vector res = VectorGeneratorPrimitives.constant(VectorUtils.of(1., 0.)) + .noisify(() -> 0.5).get(); + assertArrayEquals(new double[] {1.5, 0.5}, res.asArray(), 1e-7); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorsFamilyTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorsFamilyTest.java new file mode 100644 index 0000000000000..c11d32c912d6c --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/generators/primitives/vector/VectorGeneratorsFamilyTest.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.generators.primitives.vector; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.DoubleStream; +import java.util.stream.IntStream; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Tests for {@link VectorGeneratorsFamily}. + */ +public class VectorGeneratorsFamilyTest { + /** */ + @Test + public void testSelection() { + VectorGeneratorsFamily family = new VectorGeneratorsFamily.Builder() + .add(() -> VectorUtils.of(1., 2.), 0.5) + .add(() -> VectorUtils.of(1., 2.), 0.25) + .add(() -> VectorUtils.of(1., 4.), 0.25) + .build(0L); + + Map counters = new HashMap<>(); + for (int i = 0; i < 3; i++) + counters.put(i, VectorUtils.zeroes(2)); + + int N = 50000; + IntStream.range(0, N).forEach(i -> { + VectorGeneratorsFamily.VectorWithDistributionId vector = family.getWithId(); + int id = vector.distributionId(); + counters.put(id, counters.get(id).plus(vector.vector())); + }); + + for (int i = 0; i < 3; i++) + counters.put(i, counters.get(i).divide(N)); + + assertArrayEquals(new double[] {0.5, 1.0}, counters.get(0).asArray(), 1e-2); + assertArrayEquals(new double[] {0.25, .5}, counters.get(1).asArray(), 1e-2); + assertArrayEquals(new double[] {0.25, 1.}, counters.get(2).asArray(), 1e-2); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testInvalidParameters1() { + new VectorGeneratorsFamily.Builder().build(); + } + + /** */ + @Test(expected = IllegalArgumentException.class) + public void testInvalidParameters2() { + new VectorGeneratorsFamily.Builder().add(() -> VectorUtils.of(1.), -1.).build(); + } + + /** */ + @Test + public void testMap() { + VectorGeneratorsFamily family = new VectorGeneratorsFamily.Builder() + .add(() -> VectorUtils.of(1., 2.)) + .map(g -> g.move(VectorUtils.of(1, -1))) + .build(0L); + + assertArrayEquals(new double[] {2., 1.}, family.get().asArray(), 1e-7); + } + + /** */ + @Test + public void testGet() { + VectorGeneratorsFamily family = new VectorGeneratorsFamily.Builder() + .add(() -> VectorUtils.of(0.)) + .add(() -> VectorUtils.of(1.)) + .add(() -> VectorUtils.of(2.)) + .build(0L); + + Set validValues = DoubleStream.of(0., 1., 2.).boxed().collect(Collectors.toSet()); + for (int i = 0; i < 100; i++) { + Vector vector = family.get(); + assertTrue(validValues.contains(vector.get(0))); + } + } + + /** */ + @Test + public void testAsDataStream() { + VectorGeneratorsFamily family = new VectorGeneratorsFamily.Builder() + .add(() -> VectorUtils.of(0.)) + .add(() -> VectorUtils.of(1.)) + .add(() -> VectorUtils.of(2.)) + .build(0L); + + family.asDataStream().labeled().limit(100).forEach(v -> assertEquals(v.features().get(0), v.label(), 1e-7)); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/genetic/GeneticAlgorithmTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/genetic/GeneticAlgorithmTest.java new file mode 100644 index 0000000000000..34cef84c2381d --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/genetic/GeneticAlgorithmTest.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.genetic; + +import java.util.ArrayList; +import java.util.List; +import java.util.Random; +import java.util.function.BiFunction; +import java.util.function.Function; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + * Tests for {@link GeneticAlgorithm}. + */ +public class GeneticAlgorithmTest { + /** Amount of genes in chromosome. */ + public static final int AMOUNT_OF_GENES_IN_CHROMOSOME = 8; + + /** Precision. */ + private static final double PRECISION = 0.00000001; + + /** Fitness function. */ + Function fitnessFunction = (Chromosome ch) -> { + double fitness = 0; + for (int i = 0; i < ch.size(); i += 2) fitness += ch.getGene(i); + return fitness; + }; + + /** Random. */ + Random rnd = new Random(1234L); + + /** Genetic algorithm instance. */ + private GeneticAlgorithm ga; + + /** + * + */ + @Before + public void setUp() { + List rawData = new ArrayList<>(); + for (int i = 0; i < 100; i++) { + Double[] chromosomeData = new Double[AMOUNT_OF_GENES_IN_CHROMOSOME]; + for (int j = 0; j < AMOUNT_OF_GENES_IN_CHROMOSOME; j++) + chromosomeData[j] = rnd.nextDouble(); + rawData.add(i, chromosomeData); + } + + ga = new GeneticAlgorithm(rawData); + BiFunction mutator = + (integer, aDouble) -> rnd.nextDouble() > 0.5 ? aDouble + (rnd.nextDouble() / 100) : aDouble - (rnd.nextDouble() / 100); + + ga.withFitnessFunction(fitnessFunction) + .withMutationOperator(mutator) + .withAmountOfEliteChromosomes(10) + .withCrossingoverProbability(0.01) + .withCrossoverStgy(CrossoverStrategy.ONE_POINT) + .withAmountOfGenerations(100) + .withSelectionStgy(SelectionStrategy.ROULETTE_WHEEL) + .withMutationProbability(0.05); + } + + /** + * + */ + @Test + public void runGeneticAlgorithm() { + ga.run(); + double[] expBestSolution = {0.9227093559081438, 0.8716316379636383, 0.9393034992555963, 0.9264946442527818, + 0.8030164650964057, 0.41871505180713764, 1.0294056830181408, 0.5760945730781087}; + Assert.assertArrayEquals(ga.getTheBestSolution(), expBestSolution, PRECISION); + } + + /** + * + */ + @Test + public void runParallelGeneticAlgorithm() { + ga.run(); + double[] expBestSolution = {0.9227093559081438, 0.8716316379636383, 0.9393034992555963, 0.9264946442527818, + 0.8030164650964057, 0.41871505180713764, 1.0294056830181408, 0.5760945730781087}; + Assert.assertArrayEquals(ga.getTheBestSolution(), expBestSolution, PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/genetic/PopulationTest.java b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/genetic/PopulationTest.java new file mode 100644 index 0000000000000..803f1f4aec9e6 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/java/org/apache/ignite/ml/util/genetic/PopulationTest.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.util.genetic; + +import java.util.function.Function; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + * Tests for {@link Population}. + */ +public class PopulationTest { + /** Amount of the best chromosomes. */ + public static final int K = 10; + + /** Test population. */ + Population population; + + /** Precision. */ + private static final double PRECISION = 0.00000001; + + /** Fitness function. */ + Function fitnessFunction = (Chromosome ch) -> ch.getGene(0) + ch.getGene(1); + + /** + * + */ + @Before + public void setUp() { + population = new Population(100); + Double[] chromosomeData = new Double[2]; + for (int i = 0; i < population.size(); i++) { + chromosomeData[0] = (double)i; + chromosomeData[1] = (double)i; + population.setChromosome(i, new Chromosome(chromosomeData)); + } + } + + /** + * + */ + @Test + public void calculateFitnessForChromosome() { + population.calculateFitnessForChromosome(0, fitnessFunction); + Assert.assertEquals(population.getChromosome(0).getFitness(), 0, PRECISION); + } + + /** + * + */ + @Test + public void calculateFitnessForAll() { + population.calculateFitnessForAll(fitnessFunction); + Assert.assertEquals(population.getChromosome(0).getFitness(), 0, PRECISION); + } + + /** + * + */ + @Test + public void selectBestKChromosomeWithoutFitnessCalculation() { + Assert.assertNull(population.selectBestKChromosome(K)); + } + + /** + * + */ + @Test + public void selectBestKChromosomeWithPartiallyFitnessCalculation() { + population.calculateFitnessForChromosome(0, fitnessFunction); + population.calculateFitnessForChromosome(1, fitnessFunction); + Assert.assertNull(population.selectBestKChromosome(K)); + } + + /** + * + */ + @Test + public void selectBestKChromosome() { + population.calculateFitnessForAll(fitnessFunction); + Chromosome[] res = population.selectBestKChromosome(K); + Assert.assertEquals(res[0].getFitness(), 180, PRECISION); + } + + /** + * + */ + @Test + public void getTotalFitness() { + double res = population.getTotalFitness(); + Assert.assertEquals(res, Double.NaN, PRECISION); + + population.calculateFitnessForAll(fitnessFunction); + res = population.getTotalFitness(); + Assert.assertEquals(res, 9900.0, PRECISION); + } + + /** + * + */ + @Test + public void getAverageFitness() { + double res = population.getAverageFitness(); + Assert.assertEquals(res, Double.NaN, PRECISION); + + population.calculateFitnessForAll(fitnessFunction); + res = population.getAverageFitness(); + Assert.assertEquals(res, 99.0, PRECISION); + } +} diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/README.md b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/README.md new file mode 100644 index 0000000000000..128626ec4df49 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/README.md @@ -0,0 +1,5 @@ +iris.txt and cleared_machines are from Lichman, M. (2013). UCI Machine Learning Repository [http://archive.ics.uci.edu/ml]. Irvine, CA: University of California, School of Information and Computer Science. +Read more about machine dataset http://archive.ics.uci.edu/ml/machine-learning-databases/cpu-performance/machine.names + + +titanic dataset has next columns survived pclass sex age sibsp parch fare diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/cleared_machines.txt b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/cleared_machines.txt new file mode 100644 index 0000000000000..cf8b6b08b3117 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/cleared_machines.txt @@ -0,0 +1,209 @@ +199,125,256,6000,256,16,128 +253,29,8000,32000,32,8,32 +253,29,8000,32000,32,8,32 +253,29,8000,32000,32,8,32 +132,29,8000,16000,32,8,16 +290,26,8000,32000,64,8,32 +381,23,16000,32000,64,16,32 +381,23,16000,32000,64,16,32 +749,23,16000,64000,64,16,32 +1238,23,32000,64000,128,32,64 +23,400,1000,3000,0,1,2 +24,400,512,3500,4,1,6 +70,60,2000,8000,65,1,8 +117,50,4000,16000,65,1,8 +15,350,64,64,0,1,4 +64,200,512,16000,0,4,32 +23,167,524,2000,8,4,15 +29,143,512,5000,0,7,32 +22,143,1000,2000,0,5,16 +124,110,5000,5000,142,8,64 +35,143,1500,6300,0,5,32 +39,143,3100,6200,0,5,20 +40,143,2300,6200,0,6,64 +45,110,3100,6200,0,6,64 +28,320,128,6000,0,1,12 +21,320,512,2000,4,1,3 +28,320,256,6000,0,1,6 +22,320,256,3000,4,1,3 +28,320,512,5000,4,1,5 +27,320,256,5000,4,1,6 +102,25,1310,2620,131,12,24 +102,25,1310,2620,131,12,24 +74,50,2620,10480,30,12,24 +74,50,2620,10480,30,12,24 +138,56,5240,20970,30,12,24 +136,64,5240,20970,30,12,24 +23,50,500,2000,8,1,4 +29,50,1000,4000,8,1,5 +44,50,2000,8000,8,1,5 +30,50,1000,4000,8,3,5 +41,50,1000,8000,8,3,5 +74,50,2000,16000,8,3,5 +74,50,2000,16000,8,3,6 +74,50,2000,16000,8,3,6 +54,133,1000,12000,9,3,12 +41,133,1000,8000,9,3,12 +18,810,512,512,8,1,1 +28,810,1000,5000,0,1,1 +36,320,512,8000,4,1,5 +38,200,512,8000,8,1,8 +34,700,384,8000,0,1,1 +19,700,256,2000,0,1,1 +72,140,1000,16000,16,1,3 +36,200,1000,8000,0,1,2 +30,110,1000,4000,16,1,2 +56,110,1000,12000,16,1,2 +42,220,1000,8000,16,1,2 +34,800,256,8000,0,1,4 +34,800,256,8000,0,1,4 +34,800,256,8000,0,1,4 +34,800,256,8000,0,1,4 +34,800,256,8000,0,1,4 +19,125,512,1000,0,8,20 +75,75,2000,8000,64,1,38 +113,75,2000,16000,64,1,38 +157,75,2000,16000,128,1,38 +18,90,256,1000,0,3,10 +20,105,256,2000,0,3,10 +28,105,1000,4000,0,3,24 +33,105,2000,4000,8,3,19 +47,75,2000,8000,8,3,24 +54,75,3000,8000,8,3,48 +20,175,256,2000,0,3,24 +23,300,768,3000,0,6,24 +25,300,768,3000,6,6,24 +52,300,768,12000,6,6,24 +27,300,768,4500,0,1,24 +50,300,384,12000,6,1,24 +18,300,192,768,6,6,24 +53,180,768,12000,6,1,31 +23,330,1000,3000,0,2,4 +30,300,1000,4000,8,3,64 +73,300,1000,16000,8,2,112 +20,330,1000,2000,0,1,2 +25,330,1000,4000,0,3,6 +28,140,2000,4000,0,3,6 +29,140,2000,4000,0,4,8 +32,140,2000,4000,8,1,20 +175,140,2000,32000,32,1,20 +57,140,2000,8000,32,1,54 +181,140,2000,32000,32,1,54 +181,140,2000,32000,32,1,54 +32,140,2000,4000,8,1,20 +82,57,4000,16000,1,6,12 +171,57,4000,24000,64,12,16 +361,26,16000,32000,64,16,24 +350,26,16000,32000,64,8,24 +220,26,8000,32000,0,8,24 +113,26,8000,16000,0,8,16 +15,480,96,512,0,1,1 +21,203,1000,2000,0,1,5 +35,115,512,6000,16,1,6 +18,1100,512,1500,0,1,1 +20,1100,768,2000,0,1,1 +20,600,768,2000,0,1,1 +28,400,2000,4000,0,1,1 +45,400,4000,8000,0,1,1 +18,900,1000,1000,0,1,2 +17,900,512,1000,0,1,2 +26,900,1000,4000,4,1,2 +28,900,1000,4000,8,1,2 +28,900,2000,4000,0,3,6 +31,225,2000,4000,8,3,6 +31,225,2000,4000,8,3,6 +42,180,2000,8000,8,1,6 +76,185,2000,16000,16,1,6 +76,180,2000,16000,16,1,6 +26,225,1000,4000,2,3,6 +59,25,2000,12000,8,1,4 +65,25,2000,12000,16,3,5 +101,17,4000,16000,8,6,12 +116,17,4000,16000,32,6,12 +18,1500,768,1000,0,0,0 +20,1500,768,2000,0,0,0 +20,800,768,2000,0,0,0 +30,50,2000,4000,0,3,6 +44,50,2000,8000,8,3,6 +44,50,2000,8000,8,1,6 +82,50,2000,16000,24,1,6 +82,50,2000,16000,24,1,6 +128,50,8000,16000,48,1,10 +37,100,1000,8000,0,2,6 +46,100,1000,8000,24,2,6 +46,100,1000,8000,24,3,6 +80,50,2000,16000,12,3,16 +88,50,2000,16000,24,6,16 +88,50,2000,16000,24,6,16 +33,150,512,4000,0,8,128 +46,115,2000,8000,16,1,3 +29,115,2000,4000,2,1,5 +53,92,2000,8000,32,1,6 +53,92,2000,8000,32,1,6 +41,92,2000,8000,4,1,6 +86,75,4000,16000,16,1,6 +95,60,4000,16000,32,1,6 +107,60,2000,16000,64,5,8 +117,60,4000,16000,64,5,8 +119,50,4000,16000,64,5,10 +120,72,4000,16000,64,8,16 +48,72,2000,8000,16,6,8 +126,40,8000,16000,32,8,16 +266,40,8000,32000,64,8,24 +270,35,8000,32000,64,8,24 +426,38,16000,32000,128,16,32 +151,48,4000,24000,32,8,24 +267,38,8000,32000,64,8,24 +603,30,16000,32000,256,16,24 +19,112,1000,1000,0,1,4 +21,84,1000,2000,0,1,6 +26,56,1000,4000,0,1,6 +35,56,2000,6000,0,1,8 +41,56,2000,8000,0,1,8 +47,56,4000,8000,0,1,8 +62,56,4000,12000,0,1,8 +78,56,4000,16000,0,1,8 +80,38,4000,8000,32,16,32 +80,38,4000,8000,32,16,32 +142,38,8000,16000,64,4,8 +281,38,8000,24000,160,4,8 +190,38,4000,16000,128,16,32 +21,200,1000,2000,0,1,2 +25,200,1000,4000,0,1,4 +67,200,2000,8000,64,1,5 +24,250,512,4000,0,1,7 +24,250,512,4000,0,4,7 +64,250,1000,16000,1,1,8 +25,160,512,4000,2,1,5 +20,160,512,2000,2,3,8 +29,160,1000,4000,8,1,14 +43,160,1000,8000,16,1,14 +53,160,2000,8000,32,1,13 +19,240,512,1000,8,1,3 +22,240,512,2000,8,1,5 +31,105,2000,4000,8,3,8 +41,105,2000,6000,16,6,16 +47,105,2000,8000,16,4,14 +99,52,4000,16000,32,4,12 +67,70,4000,12000,8,6,8 +81,59,4000,12000,32,6,12 +149,59,8000,16000,64,12,24 +183,26,8000,24000,32,8,16 +275,26,8000,32000,64,12,16 +382,26,8000,32000,128,24,32 +56,116,2000,8000,32,5,28 +182,50,2000,32000,24,6,26 +227,50,2000,32000,48,26,52 +341,50,2000,32000,112,52,104 +360,50,4000,32000,112,52,104 +919,30,8000,64000,96,12,176 +978,30,8000,64000,128,12,176 +24,180,262,4000,0,1,3 +24,180,512,4000,0,1,3 +24,180,262,4000,0,1,3 +24,180,512,4000,0,1,3 +37,124,1000,8000,0,1,8 +50,98,1000,8000,32,2,8 +41,125,2000,8000,0,2,14 +47,480,512,8000,32,0,0 +25,480,1000,4000,0,0,0 diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/empty.txt b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/empty.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/iris.txt b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/iris.txt new file mode 100644 index 0000000000000..18f5f7c886472 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/iris.txt @@ -0,0 +1,150 @@ +1.0 5.1 3.5 1.4 0.2 +1.0 4.9 3.0 1.4 0.2 +1.0 4.7 3.2 1.3 0.2 +1.0 4.6 3.1 1.5 0.2 +1.0 5.0 3.6 1.4 0.2 +1.0 5.4 3.9 1.7 0.4 +1.0 4.6 3.4 1.4 0.3 +1.0 5.0 3.4 1.5 0.2 +1.0 4.4 2.9 1.4 0.2 +1.0 4.9 3.1 1.5 0.1 +1.0 5.4 3.7 1.5 0.2 +1.0 4.8 3.4 1.6 0.2 +1.0 4.8 3.0 1.4 0.1 +1.0 4.3 3.0 1.1 0.1 +1.0 5.8 4.0 1.2 0.2 +1.0 5.7 4.4 1.5 0.4 +1.0 5.4 3.9 1.3 0.4 +1.0 5.1 3.5 1.4 0.3 +1.0 5.7 3.8 1.7 0.3 +1.0 5.1 3.8 1.5 0.3 +1.0 5.4 3.4 1.7 0.2 +1.0 5.1 3.7 1.5 0.4 +1.0 4.6 3.6 1.0 0.2 +1.0 5.1 3.3 1.7 0.5 +1.0 4.8 3.4 1.9 0.2 +1.0 5.0 3.0 1.6 0.2 +1.0 5.0 3.4 1.6 0.4 +1.0 5.2 3.5 1.5 0.2 +1.0 5.2 3.4 1.4 0.2 +1.0 4.7 3.2 1.6 0.2 +1.0 4.8 3.1 1.6 0.2 +1.0 5.4 3.4 1.5 0.4 +1.0 5.2 4.1 1.5 0.1 +1.0 5.5 4.2 1.4 0.2 +1.0 4.9 3.1 1.5 0.1 +1.0 5.0 3.2 1.2 0.2 +1.0 5.5 3.5 1.3 0.2 +1.0 4.9 3.1 1.5 0.1 +1.0 4.4 3.0 1.3 0.2 +1.0 5.1 3.4 1.5 0.2 +1.0 5.0 3.5 1.3 0.3 +1.0 4.5 2.3 1.3 0.3 +1.0 4.4 3.2 1.3 0.2 +1.0 5.0 3.5 1.6 0.6 +1.0 5.1 3.8 1.9 0.4 +1.0 4.8 3.0 1.4 0.3 +1.0 5.1 3.8 1.6 0.2 +1.0 4.6 3.2 1.4 0.2 +1.0 5.3 3.7 1.5 0.2 +1.0 5.0 3.3 1.4 0.2 +2.0 7.0 3.2 4.7 1.4 +2.0 6.4 3.2 4.5 1.5 +2.0 6.9 3.1 4.9 1.5 +2.0 5.5 2.3 4.0 1.3 +2.0 6.5 2.8 4.6 1.5 +2.0 5.7 2.8 4.5 1.3 +2.0 6.3 3.3 4.7 1.6 +2.0 4.9 2.4 3.3 1.0 +2.0 6.6 2.9 4.6 1.3 +2.0 5.2 2.7 3.9 1.4 +2.0 5.0 2.0 3.5 1.0 +2.0 5.9 3.0 4.2 1.5 +2.0 6.0 2.2 4.0 1.0 +2.0 6.1 2.9 4.7 1.4 +2.0 5.6 2.9 3.6 1.3 +2.0 6.7 3.1 4.4 1.4 +2.0 5.6 3.0 4.5 1.5 +2.0 5.8 2.7 4.1 1.0 +2.0 6.2 2.2 4.5 1.5 +2.0 5.6 2.5 3.9 1.1 +2.0 5.9 3.2 4.8 1.8 +2.0 6.1 2.8 4.0 1.3 +2.0 6.3 2.5 4.9 1.5 +2.0 6.1 2.8 4.7 1.2 +2.0 6.4 2.9 4.3 1.3 +2.0 6.6 3.0 4.4 1.4 +2.0 6.8 2.8 4.8 1.4 +2.0 6.7 3.0 5.0 1.7 +2.0 6.0 2.9 4.5 1.5 +2.0 5.7 2.6 3.5 1.0 +2.0 5.5 2.4 3.8 1.1 +2.0 5.5 2.4 3.7 1.0 +2.0 5.8 2.7 3.9 1.2 +2.0 6.0 2.7 5.1 1.6 +2.0 5.4 3.0 4.5 1.5 +2.0 6.0 3.4 4.5 1.6 +2.0 6.7 3.1 4.7 1.5 +2.0 6.3 2.3 4.4 1.3 +2.0 5.6 3.0 4.1 1.3 +2.0 5.5 2.5 4.0 1.3 +2.0 5.5 2.6 4.4 1.2 +2.0 6.1 3.0 4.6 1.4 +2.0 5.8 2.6 4.0 1.2 +2.0 5.0 2.3 3.3 1.0 +2.0 5.6 2.7 4.2 1.3 +2.0 5.7 3.0 4.2 1.2 +2.0 5.7 2.9 4.2 1.3 +2.0 6.2 2.9 4.3 1.3 +2.0 5.1 2.5 3.0 1.1 +2.0 5.7 2.8 4.1 1.3 +3.0 6.3 3.3 6.0 2.5 +3.0 5.8 2.7 5.1 1.9 +3.0 7.1 3.0 5.9 2.1 +3.0 6.3 2.9 5.6 1.8 +3.0 6.5 3.0 5.8 2.2 +3.0 7.6 3.0 6.6 2.1 +3.0 4.9 2.5 4.5 1.7 +3.0 7.3 2.9 6.3 1.8 +3.0 6.7 2.5 5.8 1.8 +3.0 7.2 3.6 6.1 2.5 +3.0 6.5 3.2 5.1 2.0 +3.0 6.4 2.7 5.3 1.9 +3.0 6.8 3.0 5.5 2.1 +3.0 5.7 2.5 5.0 2.0 +3.0 5.8 2.8 5.1 2.4 +3.0 6.4 3.2 5.3 2.3 +3.0 6.5 3.0 5.5 1.8 +3.0 7.7 3.8 6.7 2.2 +3.0 7.7 2.6 6.9 2.3 +3.0 6.0 2.2 5.0 1.5 +3.0 6.9 3.2 5.7 2.3 +3.0 5.6 2.8 4.9 2.0 +3.0 7.7 2.8 6.7 2.0 +3.0 6.3 2.7 4.9 1.8 +3.0 6.7 3.3 5.7 2.1 +3.0 7.2 3.2 6.0 1.8 +3.0 6.2 2.8 4.8 1.8 +3.0 6.1 3.0 4.9 1.8 +3.0 6.4 2.8 5.6 2.1 +3.0 7.2 3.0 5.8 1.6 +3.0 7.4 2.8 6.1 1.9 +3.0 7.9 3.8 6.4 2.0 +3.0 6.4 2.8 5.6 2.2 +3.0 6.3 2.8 5.1 1.5 +3.0 6.1 2.6 5.6 1.4 +3.0 7.7 3.0 6.1 2.3 +3.0 6.3 3.4 5.6 2.4 +3.0 6.4 3.1 5.5 1.8 +3.0 6.0 3.0 4.8 1.8 +3.0 6.9 3.1 5.4 2.1 +3.0 6.7 3.1 5.6 2.4 +3.0 6.9 3.1 5.1 2.3 +3.0 5.8 2.7 5.1 1.9 +3.0 6.8 3.2 5.9 2.3 +3.0 6.7 3.3 5.7 2.5 +3.0 6.7 3.0 5.2 2.3 +3.0 6.3 2.5 5.0 1.9 +3.0 6.5 3.0 5.2 2.0 +3.0 6.2 3.4 5.4 2.3 +3.0 5.9 3.0 5.1 1.8 diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/iris_incorrect.txt b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/iris_incorrect.txt new file mode 100644 index 0000000000000..7bb42c6afee98 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/iris_incorrect.txt @@ -0,0 +1,150 @@ +1.0 5.1 3.5 1.4 13ls +ss 4.9 3.0 1.4 0.2 +1.0 4.7 3.2 1.3 0.2 +1.0 4.6 3.1 1.5 0.2 +1.0 5.0 3.6 1.4 0.2 +1.0 5.4 3.9 1.7 0.4 +1.0 4.6 3.4 1.4 0.3 +1.0 5.0 3.4 1.5 0.2 +1.0 4.4 2.9 1.4 0.2 +1.0 4.9 3.1 1.5 0.1 +1.0 5.4 3.7 1.5 0.2 +1.0 4.8 3.4 1.6 0.2 +1.0 4.8 3.0 1.4 0.1 +1.0 4.3 3.0 1.1 0.1 +1.0 5.8 4.0 1.2 0.2 +1.0 5.7 4.4 1.5 0.4 +1.0 5.4 3.9 1.3 0.4 +1.0 5.1 3.5 1.4 0.3 +1.0 5.7 3.8 1.7 0.3 +1.0 5.1 3.8 1.5 0.3 +1.0 5.4 3.4 1.7 0.2 +1.0 5.1 3.7 1.5 0.4 +1.0 4.6 3.6 1.0 0.2 +1.0 5.1 3.3 1.7 0.5 +1.0 4.8 3.4 1.9 0.2 +1.0 5.0 3.0 1.6 0.2 +1.0 5.0 3.4 1.6 0.4 +1.0 5.2 3.5 1.5 0.2 +1.0 5.2 3.4 1.4 0.2 +1.0 4.7 3.2 1.6 0.2 +1.0 4.8 3.1 1.6 0.2 +1.0 5.4 3.4 1.5 0.4 +1.0 5.2 4.1 1.5 0.1 +1.0 5.5 4.2 1.4 0.2 +1.0 4.9 3.1 1.5 0.1 +1.0 5.0 3.2 1.2 0.2 +1.0 5.5 3.5 1.3 0.2 +1.0 4.9 3.1 1.5 0.1 +1.0 4.4 3.0 1.3 0.2 +1.0 5.1 3.4 1.5 0.2 +1.0 5.0 3.5 1.3 0.3 +1.0 4.5 2.3 1.3 0.3 +1.0 4.4 3.2 1.3 0.2 +1.0 5.0 3.5 1.6 0.6 +1.0 5.1 3.8 1.9 0.4 +1.0 4.8 3.0 1.4 0.3 +1.0 5.1 3.8 1.6 0.2 +1.0 4.6 3.2 1.4 0.2 +1.0 5.3 3.7 1.5 0.2 +1.0 5.0 3.3 1.4 0.2 +2.0 7.0 3.2 4.7 1.4 +2.0 6.4 3.2 4.5 1.5 +2.0 6.9 3.1 4.9 1.5 +2.0 5.5 2.3 4.0 1.3 +2.0 6.5 2.8 4.6 1.5 +2.0 5.7 2.8 4.5 1.3 +2.0 6.3 3.3 4.7 1.6 +2.0 4.9 2.4 3.3 1.0 +2.0 6.6 2.9 4.6 1.3 +2.0 5.2 2.7 3.9 1.4 +2.0 5.0 2.0 3.5 1.0 +2.0 5.9 3.0 4.2 1.5 +2.0 6.0 2.2 4.0 1.0 +2.0 6.1 2.9 4.7 1.4 +2.0 5.6 2.9 3.6 1.3 +2.0 6.7 3.1 4.4 1.4 +2.0 5.6 3.0 4.5 1.5 +2.0 5.8 2.7 4.1 1.0 +2.0 6.2 2.2 4.5 1.5 +2.0 5.6 2.5 3.9 1.1 +2.0 5.9 3.2 4.8 1.8 +2.0 6.1 2.8 4.0 1.3 +2.0 6.3 2.5 4.9 1.5 +2.0 6.1 2.8 4.7 1.2 +2.0 6.4 2.9 4.3 1.3 +2.0 6.6 3.0 4.4 1.4 +2.0 6.8 2.8 4.8 1.4 +2.0 6.7 3.0 5.0 1.7 +2.0 6.0 2.9 4.5 1.5 +2.0 5.7 2.6 3.5 1.0 +2.0 5.5 2.4 3.8 1.1 +2.0 5.5 2.4 3.7 1.0 +2.0 5.8 2.7 3.9 1.2 +2.0 6.0 2.7 5.1 1.6 +2.0 5.4 3.0 4.5 1.5 +2.0 6.0 3.4 4.5 1.6 +2.0 6.7 3.1 4.7 1.5 +2.0 6.3 2.3 4.4 1.3 +2.0 5.6 3.0 4.1 1.3 +2.0 5.5 2.5 4.0 1.3 +2.0 5.5 2.6 4.4 1.2 +2.0 6.1 3.0 4.6 1.4 +2.0 5.8 2.6 4.0 1.2 +2.0 5.0 2.3 3.3 1.0 +2.0 5.6 2.7 4.2 1.3 +2.0 5.7 3.0 4.2 1.2 +2.0 5.7 2.9 4.2 1.3 +2.0 6.2 2.9 4.3 1.3 +2.0 5.1 2.5 3.0 1.1 +2.0 5.7 2.8 4.1 1.3 +3.0 6.3 3.3 6.0 2.5 +3.0 5.8 2.7 5.1 1.9 +3.0 7.1 3.0 5.9 2.1 +3.0 6.3 2.9 5.6 1.8 +3.0 6.5 3.0 5.8 2.2 +3.0 7.6 3.0 6.6 2.1 +3.0 4.9 2.5 4.5 1.7 +3.0 7.3 2.9 6.3 1.8 +3.0 6.7 2.5 5.8 1.8 +3.0 7.2 3.6 6.1 2.5 +3.0 6.5 3.2 5.1 2.0 +3.0 6.4 2.7 5.3 1.9 +3.0 6.8 3.0 5.5 2.1 +3.0 5.7 2.5 5.0 2.0 +3.0 5.8 2.8 5.1 2.4 +3.0 6.4 3.2 5.3 2.3 +3.0 6.5 3.0 5.5 1.8 +3.0 7.7 3.8 6.7 2.2 +3.0 7.7 2.6 6.9 2.3 +3.0 6.0 2.2 5.0 1.5 +3.0 6.9 3.2 5.7 2.3 +3.0 5.6 2.8 4.9 2.0 +3.0 7.7 2.8 6.7 2.0 +3.0 6.3 2.7 4.9 1.8 +3.0 6.7 3.3 5.7 2.1 +3.0 7.2 3.2 6.0 1.8 +3.0 6.2 2.8 4.8 1.8 +3.0 6.1 3.0 4.9 1.8 +3.0 6.4 2.8 5.6 2.1 +3.0 7.2 3.0 5.8 1.6 +3.0 7.4 2.8 6.1 1.9 +3.0 7.9 3.8 6.4 2.0 +3.0 6.4 2.8 5.6 2.2 +3.0 6.3 2.8 5.1 1.5 +3.0 6.1 2.6 5.6 1.4 +3.0 7.7 3.0 6.1 2.3 +3.0 6.3 3.4 5.6 2.4 +3.0 6.4 3.1 5.5 1.8 +3.0 6.0 3.0 4.8 1.8 +3.0 6.9 3.1 5.4 2.1 +3.0 6.7 3.1 5.6 2.4 +3.0 6.9 3.1 5.1 2.3 +3.0 5.8 2.7 5.1 1.9 +3.0 6.8 3.2 5.9 2.3 +3.0 6.7 3.3 5.7 2.5 +3.0 6.7 3.0 5.2 2.3 +3.0 6.3 2.5 5.0 1.9 +3.0 6.5 3.0 5.2 2.0 +3.0 6.2 3.4 5.4 2.3 +3.0 5.9 3.0 5.1 1.8 diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/machine.data.txt b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/machine.data.txt new file mode 100644 index 0000000000000..656ed8cd1f8f7 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/machine.data.txt @@ -0,0 +1,209 @@ +adviser,32/60,125,256,6000,256,16,128,198,199 +amdahl,470v/7,29,8000,32000,32,8,32,269,253 +amdahl,470v/7a,29,8000,32000,32,8,32,220,253 +amdahl,470v/7b,29,8000,32000,32,8,32,172,253 +amdahl,470v/7c,29,8000,16000,32,8,16,132,132 +amdahl,470v/b,26,8000,32000,64,8,32,318,290 +amdahl,580-5840,23,16000,32000,64,16,32,367,381 +amdahl,580-5850,23,16000,32000,64,16,32,489,381 +amdahl,580-5860,23,16000,64000,64,16,32,636,749 +amdahl,580-5880,23,32000,64000,128,32,64,1144,1238 +apollo,dn320,400,1000,3000,0,1,2,38,23 +apollo,dn420,400,512,3500,4,1,6,40,24 +basf,7/65,60,2000,8000,65,1,8,92,70 +basf,7/68,50,4000,16000,65,1,8,138,117 +bti,5000,350,64,64,0,1,4,10,15 +bti,8000,200,512,16000,0,4,32,35,64 +burroughs,b1955,167,524,2000,8,4,15,19,23 +burroughs,b2900,143,512,5000,0,7,32,28,29 +burroughs,b2925,143,1000,2000,0,5,16,31,22 +burroughs,b4955,110,5000,5000,142,8,64,120,124 +burroughs,b5900,143,1500,6300,0,5,32,30,35 +burroughs,b5920,143,3100,6200,0,5,20,33,39 +burroughs,b6900,143,2300,6200,0,6,64,61,40 +burroughs,b6925,110,3100,6200,0,6,64,76,45 +c.r.d,68/10-80,320,128,6000,0,1,12,23,28 +c.r.d,universe:2203t,320,512,2000,4,1,3,69,21 +c.r.d,universe:68,320,256,6000,0,1,6,33,28 +c.r.d,universe:68/05,320,256,3000,4,1,3,27,22 +c.r.d,universe:68/137,320,512,5000,4,1,5,77,28 +c.r.d,universe:68/37,320,256,5000,4,1,6,27,27 +cdc,cyber:170/750,25,1310,2620,131,12,24,274,102 +cdc,cyber:170/760,25,1310,2620,131,12,24,368,102 +cdc,cyber:170/815,50,2620,10480,30,12,24,32,74 +cdc,cyber:170/825,50,2620,10480,30,12,24,63,74 +cdc,cyber:170/835,56,5240,20970,30,12,24,106,138 +cdc,cyber:170/845,64,5240,20970,30,12,24,208,136 +cdc,omega:480-i,50,500,2000,8,1,4,20,23 +cdc,omega:480-ii,50,1000,4000,8,1,5,29,29 +cdc,omega:480-iii,50,2000,8000,8,1,5,71,44 +cambex,1636-1,50,1000,4000,8,3,5,26,30 +cambex,1636-10,50,1000,8000,8,3,5,36,41 +cambex,1641-1,50,2000,16000,8,3,5,40,74 +cambex,1641-11,50,2000,16000,8,3,6,52,74 +cambex,1651-1,50,2000,16000,8,3,6,60,74 +dec,decsys:10:1091,133,1000,12000,9,3,12,72,54 +dec,decsys:20:2060,133,1000,8000,9,3,12,72,41 +dec,microvax-1,810,512,512,8,1,1,18,18 +dec,vax:11/730,810,1000,5000,0,1,1,20,28 +dec,vax:11/750,320,512,8000,4,1,5,40,36 +dec,vax:11/780,200,512,8000,8,1,8,62,38 +dg,eclipse:c/350,700,384,8000,0,1,1,24,34 +dg,eclipse:m/600,700,256,2000,0,1,1,24,19 +dg,eclipse:mv/10000,140,1000,16000,16,1,3,138,72 +dg,eclipse:mv/4000,200,1000,8000,0,1,2,36,36 +dg,eclipse:mv/6000,110,1000,4000,16,1,2,26,30 +dg,eclipse:mv/8000,110,1000,12000,16,1,2,60,56 +dg,eclipse:mv/8000-ii,220,1000,8000,16,1,2,71,42 +formation,f4000/100,800,256,8000,0,1,4,12,34 +formation,f4000/200,800,256,8000,0,1,4,14,34 +formation,f4000/200ap,800,256,8000,0,1,4,20,34 +formation,f4000/300,800,256,8000,0,1,4,16,34 +formation,f4000/300ap,800,256,8000,0,1,4,22,34 +four-phase,2000/260,125,512,1000,0,8,20,36,19 +gould,concept:32/8705,75,2000,8000,64,1,38,144,75 +gould,concept:32/8750,75,2000,16000,64,1,38,144,113 +gould,concept:32/8780,75,2000,16000,128,1,38,259,157 +hp,3000/30,90,256,1000,0,3,10,17,18 +hp,3000/40,105,256,2000,0,3,10,26,20 +hp,3000/44,105,1000,4000,0,3,24,32,28 +hp,3000/48,105,2000,4000,8,3,19,32,33 +hp,3000/64,75,2000,8000,8,3,24,62,47 +hp,3000/88,75,3000,8000,8,3,48,64,54 +hp,3000/iii,175,256,2000,0,3,24,22,20 +harris,100,300,768,3000,0,6,24,36,23 +harris,300,300,768,3000,6,6,24,44,25 +harris,500,300,768,12000,6,6,24,50,52 +harris,600,300,768,4500,0,1,24,45,27 +harris,700,300,384,12000,6,1,24,53,50 +harris,80,300,192,768,6,6,24,36,18 +harris,800,180,768,12000,6,1,31,84,53 +honeywell,dps:6/35,330,1000,3000,0,2,4,16,23 +honeywell,dps:6/92,300,1000,4000,8,3,64,38,30 +honeywell,dps:6/96,300,1000,16000,8,2,112,38,73 +honeywell,dps:7/35,330,1000,2000,0,1,2,16,20 +honeywell,dps:7/45,330,1000,4000,0,3,6,22,25 +honeywell,dps:7/55,140,2000,4000,0,3,6,29,28 +honeywell,dps:7/65,140,2000,4000,0,4,8,40,29 +honeywell,dps:8/44,140,2000,4000,8,1,20,35,32 +honeywell,dps:8/49,140,2000,32000,32,1,20,134,175 +honeywell,dps:8/50,140,2000,8000,32,1,54,66,57 +honeywell,dps:8/52,140,2000,32000,32,1,54,141,181 +honeywell,dps:8/62,140,2000,32000,32,1,54,189,181 +honeywell,dps:8/20,140,2000,4000,8,1,20,22,32 +ibm,3033:s,57,4000,16000,1,6,12,132,82 +ibm,3033:u,57,4000,24000,64,12,16,237,171 +ibm,3081,26,16000,32000,64,16,24,465,361 +ibm,3081:d,26,16000,32000,64,8,24,465,350 +ibm,3083:b,26,8000,32000,0,8,24,277,220 +ibm,3083:e,26,8000,16000,0,8,16,185,113 +ibm,370/125-2,480,96,512,0,1,1,6,15 +ibm,370/148,203,1000,2000,0,1,5,24,21 +ibm,370/158-3,115,512,6000,16,1,6,45,35 +ibm,38/3,1100,512,1500,0,1,1,7,18 +ibm,38/4,1100,768,2000,0,1,1,13,20 +ibm,38/5,600,768,2000,0,1,1,16,20 +ibm,38/7,400,2000,4000,0,1,1,32,28 +ibm,38/8,400,4000,8000,0,1,1,32,45 +ibm,4321,900,1000,1000,0,1,2,11,18 +ibm,4331-1,900,512,1000,0,1,2,11,17 +ibm,4331-11,900,1000,4000,4,1,2,18,26 +ibm,4331-2,900,1000,4000,8,1,2,22,28 +ibm,4341,900,2000,4000,0,3,6,37,28 +ibm,4341-1,225,2000,4000,8,3,6,40,31 +ibm,4341-10,225,2000,4000,8,3,6,34,31 +ibm,4341-11,180,2000,8000,8,1,6,50,42 +ibm,4341-12,185,2000,16000,16,1,6,76,76 +ibm,4341-2,180,2000,16000,16,1,6,66,76 +ibm,4341-9,225,1000,4000,2,3,6,24,26 +ibm,4361-4,25,2000,12000,8,1,4,49,59 +ibm,4361-5,25,2000,12000,16,3,5,66,65 +ibm,4381-1,17,4000,16000,8,6,12,100,101 +ibm,4381-2,17,4000,16000,32,6,12,133,116 +ibm,8130-a,1500,768,1000,0,0,0,12,18 +ibm,8130-b,1500,768,2000,0,0,0,18,20 +ibm,8140,800,768,2000,0,0,0,20,20 +ipl,4436,50,2000,4000,0,3,6,27,30 +ipl,4443,50,2000,8000,8,3,6,45,44 +ipl,4445,50,2000,8000,8,1,6,56,44 +ipl,4446,50,2000,16000,24,1,6,70,82 +ipl,4460,50,2000,16000,24,1,6,80,82 +ipl,4480,50,8000,16000,48,1,10,136,128 +magnuson,m80/30,100,1000,8000,0,2,6,16,37 +magnuson,m80/31,100,1000,8000,24,2,6,26,46 +magnuson,m80/32,100,1000,8000,24,3,6,32,46 +magnuson,m80/42,50,2000,16000,12,3,16,45,80 +magnuson,m80/43,50,2000,16000,24,6,16,54,88 +magnuson,m80/44,50,2000,16000,24,6,16,65,88 +microdata,seq.ms/3200,150,512,4000,0,8,128,30,33 +nas,as/3000,115,2000,8000,16,1,3,50,46 +nas,as/3000-n,115,2000,4000,2,1,5,40,29 +nas,as/5000,92,2000,8000,32,1,6,62,53 +nas,as/5000-e,92,2000,8000,32,1,6,60,53 +nas,as/5000-n,92,2000,8000,4,1,6,50,41 +nas,as/6130,75,4000,16000,16,1,6,66,86 +nas,as/6150,60,4000,16000,32,1,6,86,95 +nas,as/6620,60,2000,16000,64,5,8,74,107 +nas,as/6630,60,4000,16000,64,5,8,93,117 +nas,as/6650,50,4000,16000,64,5,10,111,119 +nas,as/7000,72,4000,16000,64,8,16,143,120 +nas,as/7000-n,72,2000,8000,16,6,8,105,48 +nas,as/8040,40,8000,16000,32,8,16,214,126 +nas,as/8050,40,8000,32000,64,8,24,277,266 +nas,as/8060,35,8000,32000,64,8,24,370,270 +nas,as/9000-dpc,38,16000,32000,128,16,32,510,426 +nas,as/9000-n,48,4000,24000,32,8,24,214,151 +nas,as/9040,38,8000,32000,64,8,24,326,267 +nas,as/9060,30,16000,32000,256,16,24,510,603 +ncr,v8535:ii,112,1000,1000,0,1,4,8,19 +ncr,v8545:ii,84,1000,2000,0,1,6,12,21 +ncr,v8555:ii,56,1000,4000,0,1,6,17,26 +ncr,v8565:ii,56,2000,6000,0,1,8,21,35 +ncr,v8565:ii-e,56,2000,8000,0,1,8,24,41 +ncr,v8575:ii,56,4000,8000,0,1,8,34,47 +ncr,v8585:ii,56,4000,12000,0,1,8,42,62 +ncr,v8595:ii,56,4000,16000,0,1,8,46,78 +ncr,v8635,38,4000,8000,32,16,32,51,80 +ncr,v8650,38,4000,8000,32,16,32,116,80 +ncr,v8655,38,8000,16000,64,4,8,100,142 +ncr,v8665,38,8000,24000,160,4,8,140,281 +ncr,v8670,38,4000,16000,128,16,32,212,190 +nixdorf,8890/30,200,1000,2000,0,1,2,25,21 +nixdorf,8890/50,200,1000,4000,0,1,4,30,25 +nixdorf,8890/70,200,2000,8000,64,1,5,41,67 +perkin-elmer,3205,250,512,4000,0,1,7,25,24 +perkin-elmer,3210,250,512,4000,0,4,7,50,24 +perkin-elmer,3230,250,1000,16000,1,1,8,50,64 +prime,50-2250,160,512,4000,2,1,5,30,25 +prime,50-250-ii,160,512,2000,2,3,8,32,20 +prime,50-550-ii,160,1000,4000,8,1,14,38,29 +prime,50-750-ii,160,1000,8000,16,1,14,60,43 +prime,50-850-ii,160,2000,8000,32,1,13,109,53 +siemens,7.521,240,512,1000,8,1,3,6,19 +siemens,7.531,240,512,2000,8,1,5,11,22 +siemens,7.536,105,2000,4000,8,3,8,22,31 +siemens,7.541,105,2000,6000,16,6,16,33,41 +siemens,7.551,105,2000,8000,16,4,14,58,47 +siemens,7.561,52,4000,16000,32,4,12,130,99 +siemens,7.865-2,70,4000,12000,8,6,8,75,67 +siemens,7.870-2,59,4000,12000,32,6,12,113,81 +siemens,7.872-2,59,8000,16000,64,12,24,188,149 +siemens,7.875-2,26,8000,24000,32,8,16,173,183 +siemens,7.880-2,26,8000,32000,64,12,16,248,275 +siemens,7.881-2,26,8000,32000,128,24,32,405,382 +sperry,1100/61-h1,116,2000,8000,32,5,28,70,56 +sperry,1100/81,50,2000,32000,24,6,26,114,182 +sperry,1100/82,50,2000,32000,48,26,52,208,227 +sperry,1100/83,50,2000,32000,112,52,104,307,341 +sperry,1100/84,50,4000,32000,112,52,104,397,360 +sperry,1100/93,30,8000,64000,96,12,176,915,919 +sperry,1100/94,30,8000,64000,128,12,176,1150,978 +sperry,80/3,180,262,4000,0,1,3,12,24 +sperry,80/4,180,512,4000,0,1,3,14,24 +sperry,80/5,180,262,4000,0,1,3,18,24 +sperry,80/6,180,512,4000,0,1,3,21,24 +sperry,80/8,124,1000,8000,0,1,8,42,37 +sperry,90/80-model-3,98,1000,8000,32,2,8,46,50 +sratus,32,125,2000,8000,0,2,14,52,41 +wang,vs-100,480,512,8000,32,0,0,67,47 +wang,vs-90,480,1000,4000,0,0,0,45,25 diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/missed_data.txt b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/missed_data.txt new file mode 100644 index 0000000000000..83ce9a5f1b7ee --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/missed_data.txt @@ -0,0 +1,3 @@ +1.0,5.1,3.5,1.4,0.2 +1.0,4.9,3.0,1.4,0.2 +1.0,4.7,,1.3,0.2 diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/no_data.txt b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/no_data.txt new file mode 100644 index 0000000000000..d1d4c7b4f3646 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/knn/no_data.txt @@ -0,0 +1,6 @@ + + +2 + + +323 diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/README.md b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/README.md new file mode 100644 index 0000000000000..b4d57cf866775 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/README.md @@ -0,0 +1,98 @@ +This package contains two datasets: + +Boston House Prices dataset +=========================== + +Notes +------ +Data Set Characteristics: + + :Number of Instances: 506 + + :Number of Attributes: 13 numeric/categorical predictive + + :Median Value (attribute 14) is usually the target + + :Attribute Information (in order): + - CRIM per capita crime rate by town + - ZN proportion of residential land zoned for lots over 25,000 sq.ft. + - INDUS proportion of non-retail business acres per town + - CHAS Charles River dummy variable (= 1 if tract bounds river; 0 otherwise) + - NOX nitric oxides concentration (parts per 10 million) + - RM average number of rooms per dwelling + - AGE proportion of owner-occupied units built prior to 1940 + - DIS weighted distances to five Boston employment centres + - RAD index of accessibility to radial highways + - TAX full-value property-tax rate per $10,000 + - PTRATIO pupil-teacher ratio by town + - B 1000(Bk - 0.63)^2 where Bk is the proportion of blacks by town + - LSTAT % lower status of the population + - MEDV Median value of owner-occupied homes in $1000's + + :Missing Attribute Values: None + + :Creator: Harrison, D. and Rubinfeld, D.L. + +This is a copy of UCI ML housing dataset. +http://archive.ics.uci.edu/ml/datasets/Housing + + +This dataset was taken from the StatLib library which is maintained at Carnegie Mellon University. + +The Boston house-price data of Harrison, D. and Rubinfeld, D.L. 'Hedonic +prices and the demand for clean air', J. Environ. Economics & Management, +vol.5, 81-102, 1978. Used in Belsley, Kuh & Welsch, 'Regression diagnostics +...', Wiley, 1980. N.B. Various transformations are used in the table on +pages 244-261 of the latter. + +The Boston house-price data has been used in many machine learning papers that address regression +problems. + +**References** + + - Belsley, Kuh & Welsch, 'Regression diagnostics: Identifying Influential Data and Sources of Collinearity', Wiley, 1980. 244-261. + - Quinlan,R. (1993). Combining Instance-Based and Model-Based Learning. In Proceedings on the Tenth International Conference of Machine Learning, 236-243, University of Massachusetts, Amherst. Morgan Kaufmann. + - many more! (see http://archive.ics.uci.edu/ml/datasets/Housing) + + +Diabetes dataset +================ + +Notes +----- + +Ten baseline variables, age, sex, body mass index, average blood +pressure, and six blood serum measurements were obtained for each of n = +442 diabetes patients, as well as the response of interest, a +quantitative measure of disease progression one year after baseline. + +Data Set Characteristics: + + :Number of Instances: 442 + + :Number of Attributes: First 10 columns are numeric predictive values + + :Target: Column 11 is a quantitative measure of disease progression one year after baseline + + :Attributes: + :Age: + :Sex: + :Body mass index: + :Average blood pressure: + :S1: + :S2: + :S3: + :S4: + :S5: + :S6: + +Note: Each of these 10 feature variables have been mean centered and scaled by the standard deviation times +n_samples + (i.e. the sum of squares of each column totals 1). + +Source URL: +http://www4.stat.ncsu.edu/~boos/var.select/diabetes.html + +For more information see: +Bradley Efron, Trevor Hastie, Iain Johnstone and Robert Tibshirani (2004) "Least Angle Regression," Annals of Statistics (with discussion), 407-499. +(http://web.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf) \ No newline at end of file diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/boston.csv b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/boston.csv new file mode 100644 index 0000000000000..b43afa959b4e4 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/boston.csv @@ -0,0 +1,506 @@ +24.0, 0.00632, 18.0, 2.31, 0.0, 0.538, 6.575, 65.2, 4.09, 1.0, 296.0, 15.3, 396.9, 4.98 +21.6, 0.02731, 0.0, 7.07, 0.0, 0.469, 6.421, 78.9, 4.9671, 2.0, 242.0, 17.8, 396.9, 9.14 +34.7, 0.02729, 0.0, 7.07, 0.0, 0.469, 7.185, 61.1, 4.9671, 2.0, 242.0, 17.8, 392.83, 4.03 +33.4, 0.03237, 0.0, 2.18, 0.0, 0.458, 6.998, 45.8, 6.0622, 3.0, 222.0, 18.7, 394.63, 2.94 +36.2, 0.06905, 0.0, 2.18, 0.0, 0.458, 7.147, 54.2, 6.0622, 3.0, 222.0, 18.7, 396.9, 5.33 +28.7, 0.02985, 0.0, 2.18, 0.0, 0.458, 6.43, 58.7, 6.0622, 3.0, 222.0, 18.7, 394.12, 5.21 +22.9, 0.08829, 12.5, 7.87, 0.0, 0.524, 6.012, 66.6, 5.5605, 5.0, 311.0, 15.2, 395.6, 12.43 +27.1, 0.14455, 12.5, 7.87, 0.0, 0.524, 6.172, 96.1, 5.9505, 5.0, 311.0, 15.2, 396.9, 19.15 +16.5, 0.21124, 12.5, 7.87, 0.0, 0.524, 5.631, 100.0, 6.0821, 5.0, 311.0, 15.2, 386.63, 29.93 +18.9, 0.17004, 12.5, 7.87, 0.0, 0.524, 6.004, 85.9, 6.5921, 5.0, 311.0, 15.2, 386.71, 17.1 +15.0, 0.22489, 12.5, 7.87, 0.0, 0.524, 6.377, 94.3, 6.3467, 5.0, 311.0, 15.2, 392.52, 20.45 +18.9, 0.11747, 12.5, 7.87, 0.0, 0.524, 6.009, 82.9, 6.2267, 5.0, 311.0, 15.2, 396.9, 13.27 +21.7, 0.09378, 12.5, 7.87, 0.0, 0.524, 5.889, 39.0, 5.4509, 5.0, 311.0, 15.2, 390.5, 15.71 +20.4, 0.62976, 0.0, 8.14, 0.0, 0.538, 5.949, 61.8, 4.7075, 4.0, 307.0, 21.0, 396.9, 8.26 +18.2, 0.63796, 0.0, 8.14, 0.0, 0.538, 6.096, 84.5, 4.4619, 4.0, 307.0, 21.0, 380.02, 10.26 +19.9, 0.62739, 0.0, 8.14, 0.0, 0.538, 5.834, 56.5, 4.4986, 4.0, 307.0, 21.0, 395.62, 8.47 +23.1, 1.05393, 0.0, 8.14, 0.0, 0.538, 5.935, 29.3, 4.4986, 4.0, 307.0, 21.0, 386.85, 6.58 +17.5, 0.7842, 0.0, 8.14, 0.0, 0.538, 5.99, 81.7, 4.2579, 4.0, 307.0, 21.0, 386.75, 14.67 +20.2, 0.80271, 0.0, 8.14, 0.0, 0.538, 5.456, 36.6, 3.7965, 4.0, 307.0, 21.0, 288.99, 11.69 +18.2, 0.7258, 0.0, 8.14, 0.0, 0.538, 5.727, 69.5, 3.7965, 4.0, 307.0, 21.0, 390.95, 11.28 +13.6, 1.25179, 0.0, 8.14, 0.0, 0.538, 5.57, 98.1, 3.7979, 4.0, 307.0, 21.0, 376.57, 21.02 +19.6, 0.85204, 0.0, 8.14, 0.0, 0.538, 5.965, 89.2, 4.0123, 4.0, 307.0, 21.0, 392.53, 13.83 +15.2, 1.23247, 0.0, 8.14, 0.0, 0.538, 6.142, 91.7, 3.9769, 4.0, 307.0, 21.0, 396.9, 18.72 +14.5, 0.98843, 0.0, 8.14, 0.0, 0.538, 5.813, 100.0, 4.0952, 4.0, 307.0, 21.0, 394.54, 19.88 +15.6, 0.75026, 0.0, 8.14, 0.0, 0.538, 5.924, 94.1, 4.3996, 4.0, 307.0, 21.0, 394.33, 16.3 +13.9, 0.84054, 0.0, 8.14, 0.0, 0.538, 5.599, 85.7, 4.4546, 4.0, 307.0, 21.0, 303.42, 16.51 +16.6, 0.67191, 0.0, 8.14, 0.0, 0.538, 5.813, 90.3, 4.682, 4.0, 307.0, 21.0, 376.88, 14.81 +14.8, 0.95577, 0.0, 8.14, 0.0, 0.538, 6.047, 88.8, 4.4534, 4.0, 307.0, 21.0, 306.38, 17.28 +18.4, 0.77299, 0.0, 8.14, 0.0, 0.538, 6.495, 94.4, 4.4547, 4.0, 307.0, 21.0, 387.94, 12.8 +21.0, 1.00245, 0.0, 8.14, 0.0, 0.538, 6.674, 87.3, 4.239, 4.0, 307.0, 21.0, 380.23, 11.98 +12.7, 1.13081, 0.0, 8.14, 0.0, 0.538, 5.713, 94.1, 4.233, 4.0, 307.0, 21.0, 360.17, 22.6 +14.5, 1.35472, 0.0, 8.14, 0.0, 0.538, 6.072, 100.0, 4.175, 4.0, 307.0, 21.0, 376.73, 13.04 +13.2, 1.38799, 0.0, 8.14, 0.0, 0.538, 5.95, 82.0, 3.99, 4.0, 307.0, 21.0, 232.6, 27.71 +13.1, 1.15172, 0.0, 8.14, 0.0, 0.538, 5.701, 95.0, 3.7872, 4.0, 307.0, 21.0, 358.77, 18.35 +13.5, 1.61282, 0.0, 8.14, 0.0, 0.538, 6.096, 96.9, 3.7598, 4.0, 307.0, 21.0, 248.31, 20.34 +18.9, 0.06417, 0.0, 5.96, 0.0, 0.499, 5.933, 68.2, 3.3603, 5.0, 279.0, 19.2, 396.9, 9.68 +20.0, 0.09744, 0.0, 5.96, 0.0, 0.499, 5.841, 61.4, 3.3779, 5.0, 279.0, 19.2, 377.56, 11.41 +21.0, 0.08014, 0.0, 5.96, 0.0, 0.499, 5.85, 41.5, 3.9342, 5.0, 279.0, 19.2, 396.9, 8.77 +24.7, 0.17505, 0.0, 5.96, 0.0, 0.499, 5.966, 30.2, 3.8473, 5.0, 279.0, 19.2, 393.43, 10.13 +30.8, 0.02763, 75.0, 2.95, 0.0, 0.428, 6.595, 21.8, 5.4011, 3.0, 252.0, 18.3, 395.63, 4.32 +34.9, 0.03359, 75.0, 2.95, 0.0, 0.428, 7.024, 15.8, 5.4011, 3.0, 252.0, 18.3, 395.62, 1.98 +26.6, 0.12744, 0.0, 6.91, 0.0, 0.448, 6.77, 2.9, 5.7209, 3.0, 233.0, 17.9, 385.41, 4.84 +25.3, 0.1415, 0.0, 6.91, 0.0, 0.448, 6.169, 6.6, 5.7209, 3.0, 233.0, 17.9, 383.37, 5.81 +24.7, 0.15936, 0.0, 6.91, 0.0, 0.448, 6.211, 6.5, 5.7209, 3.0, 233.0, 17.9, 394.46, 7.44 +21.2, 0.12269, 0.0, 6.91, 0.0, 0.448, 6.069, 40.0, 5.7209, 3.0, 233.0, 17.9, 389.39, 9.55 +19.3, 0.17142, 0.0, 6.91, 0.0, 0.448, 5.682, 33.8, 5.1004, 3.0, 233.0, 17.9, 396.9, 10.21 +20.0, 0.18836, 0.0, 6.91, 0.0, 0.448, 5.786, 33.3, 5.1004, 3.0, 233.0, 17.9, 396.9, 14.15 +16.6, 0.22927, 0.0, 6.91, 0.0, 0.448, 6.03, 85.5, 5.6894, 3.0, 233.0, 17.9, 392.74, 18.8 +14.4, 0.25387, 0.0, 6.91, 0.0, 0.448, 5.399, 95.3, 5.87, 3.0, 233.0, 17.9, 396.9, 30.81 +19.4, 0.21977, 0.0, 6.91, 0.0, 0.448, 5.602, 62.0, 6.0877, 3.0, 233.0, 17.9, 396.9, 16.2 +19.7, 0.08873, 21.0, 5.64, 0.0, 0.439, 5.963, 45.7, 6.8147, 4.0, 243.0, 16.8, 395.56, 13.45 +20.5, 0.04337, 21.0, 5.64, 0.0, 0.439, 6.115, 63.0, 6.8147, 4.0, 243.0, 16.8, 393.97, 9.43 +25.0, 0.0536, 21.0, 5.64, 0.0, 0.439, 6.511, 21.1, 6.8147, 4.0, 243.0, 16.8, 396.9, 5.28 +23.4, 0.04981, 21.0, 5.64, 0.0, 0.439, 5.998, 21.4, 6.8147, 4.0, 243.0, 16.8, 396.9, 8.43 +18.9, 0.0136, 75.0, 4.0, 0.0, 0.41, 5.888, 47.6, 7.3197, 3.0, 469.0, 21.1, 396.9, 14.8 +35.4, 0.01311, 90.0, 1.22, 0.0, 0.403, 7.249, 21.9, 8.6966, 5.0, 226.0, 17.9, 395.93, 4.81 +24.7, 0.02055, 85.0, 0.74, 0.0, 0.41, 6.383, 35.7, 9.1876, 2.0, 313.0, 17.3, 396.9, 5.77 +31.6, 0.01432, 100.0, 1.32, 0.0, 0.411, 6.816, 40.5, 8.3248, 5.0, 256.0, 15.1, 392.9, 3.95 +23.3, 0.15445, 25.0, 5.13, 0.0, 0.453, 6.145, 29.2, 7.8148, 8.0, 284.0, 19.7, 390.68, 6.86 +19.6, 0.10328, 25.0, 5.13, 0.0, 0.453, 5.927, 47.2, 6.932, 8.0, 284.0, 19.7, 396.9, 9.22 +18.7, 0.14932, 25.0, 5.13, 0.0, 0.453, 5.741, 66.2, 7.2254, 8.0, 284.0, 19.7, 395.11, 13.15 +16.0, 0.17171, 25.0, 5.13, 0.0, 0.453, 5.966, 93.4, 6.8185, 8.0, 284.0, 19.7, 378.08, 14.44 +22.2, 0.11027, 25.0, 5.13, 0.0, 0.453, 6.456, 67.8, 7.2255, 8.0, 284.0, 19.7, 396.9, 6.73 +25.0, 0.1265, 25.0, 5.13, 0.0, 0.453, 6.762, 43.4, 7.9809, 8.0, 284.0, 19.7, 395.58, 9.5 +33.0, 0.01951, 17.5, 1.38, 0.0, 0.4161, 7.104, 59.5, 9.2229, 3.0, 216.0, 18.6, 393.24, 8.05 +23.5, 0.03584, 80.0, 3.37, 0.0, 0.398, 6.29, 17.8, 6.6115, 4.0, 337.0, 16.1, 396.9, 4.67 +19.4, 0.04379, 80.0, 3.37, 0.0, 0.398, 5.787, 31.1, 6.6115, 4.0, 337.0, 16.1, 396.9, 10.24 +22.0, 0.05789, 12.5, 6.07, 0.0, 0.409, 5.878, 21.4, 6.498, 4.0, 345.0, 18.9, 396.21, 8.1 +17.4, 0.13554, 12.5, 6.07, 0.0, 0.409, 5.594, 36.8, 6.498, 4.0, 345.0, 18.9, 396.9, 13.09 +20.9, 0.12816, 12.5, 6.07, 0.0, 0.409, 5.885, 33.0, 6.498, 4.0, 345.0, 18.9, 396.9, 8.79 +24.2, 0.08826, 0.0, 10.81, 0.0, 0.413, 6.417, 6.6, 5.2873, 4.0, 305.0, 19.2, 383.73, 6.72 +21.7, 0.15876, 0.0, 10.81, 0.0, 0.413, 5.961, 17.5, 5.2873, 4.0, 305.0, 19.2, 376.94, 9.88 +22.8, 0.09164, 0.0, 10.81, 0.0, 0.413, 6.065, 7.8, 5.2873, 4.0, 305.0, 19.2, 390.91, 5.52 +23.4, 0.19539, 0.0, 10.81, 0.0, 0.413, 6.245, 6.2, 5.2873, 4.0, 305.0, 19.2, 377.17, 7.54 +24.1, 0.07896, 0.0, 12.83, 0.0, 0.437, 6.273, 6.0, 4.2515, 5.0, 398.0, 18.7, 394.92, 6.78 +21.4, 0.09512, 0.0, 12.83, 0.0, 0.437, 6.286, 45.0, 4.5026, 5.0, 398.0, 18.7, 383.23, 8.94 +20.0, 0.10153, 0.0, 12.83, 0.0, 0.437, 6.279, 74.5, 4.0522, 5.0, 398.0, 18.7, 373.66, 11.97 +20.8, 0.08707, 0.0, 12.83, 0.0, 0.437, 6.14, 45.8, 4.0905, 5.0, 398.0, 18.7, 386.96, 10.27 +21.2, 0.05646, 0.0, 12.83, 0.0, 0.437, 6.232, 53.7, 5.0141, 5.0, 398.0, 18.7, 386.4, 12.34 +20.3, 0.08387, 0.0, 12.83, 0.0, 0.437, 5.874, 36.6, 4.5026, 5.0, 398.0, 18.7, 396.06, 9.1 +28.0, 0.04113, 25.0, 4.86, 0.0, 0.426, 6.727, 33.5, 5.4007, 4.0, 281.0, 19.0, 396.9, 5.29 +23.9, 0.04462, 25.0, 4.86, 0.0, 0.426, 6.619, 70.4, 5.4007, 4.0, 281.0, 19.0, 395.63, 7.22 +24.8, 0.03659, 25.0, 4.86, 0.0, 0.426, 6.302, 32.2, 5.4007, 4.0, 281.0, 19.0, 396.9, 6.72 +22.9, 0.03551, 25.0, 4.86, 0.0, 0.426, 6.167, 46.7, 5.4007, 4.0, 281.0, 19.0, 390.64, 7.51 +23.9, 0.05059, 0.0, 4.49, 0.0, 0.449, 6.389, 48.0, 4.7794, 3.0, 247.0, 18.5, 396.9, 9.62 +26.6, 0.05735, 0.0, 4.49, 0.0, 0.449, 6.63, 56.1, 4.4377, 3.0, 247.0, 18.5, 392.3, 6.53 +22.5, 0.05188, 0.0, 4.49, 0.0, 0.449, 6.015, 45.1, 4.4272, 3.0, 247.0, 18.5, 395.99, 12.86 +22.2, 0.07151, 0.0, 4.49, 0.0, 0.449, 6.121, 56.8, 3.7476, 3.0, 247.0, 18.5, 395.15, 8.44 +23.6, 0.0566, 0.0, 3.41, 0.0, 0.489, 7.007, 86.3, 3.4217, 2.0, 270.0, 17.8, 396.9, 5.5 +28.7, 0.05302, 0.0, 3.41, 0.0, 0.489, 7.079, 63.1, 3.4145, 2.0, 270.0, 17.8, 396.06, 5.7 +22.6, 0.04684, 0.0, 3.41, 0.0, 0.489, 6.417, 66.1, 3.0923, 2.0, 270.0, 17.8, 392.18, 8.81 +22.0, 0.03932, 0.0, 3.41, 0.0, 0.489, 6.405, 73.9, 3.0921, 2.0, 270.0, 17.8, 393.55, 8.2 +22.9, 0.04203, 28.0, 15.04, 0.0, 0.464, 6.442, 53.6, 3.6659, 4.0, 270.0, 18.2, 395.01, 8.16 +25.0, 0.02875, 28.0, 15.04, 0.0, 0.464, 6.211, 28.9, 3.6659, 4.0, 270.0, 18.2, 396.33, 6.21 +20.6, 0.04294, 28.0, 15.04, 0.0, 0.464, 6.249, 77.3, 3.615, 4.0, 270.0, 18.2, 396.9, 10.59 +28.4, 0.12204, 0.0, 2.89, 0.0, 0.445, 6.625, 57.8, 3.4952, 2.0, 276.0, 18.0, 357.98, 6.65 +21.4, 0.11504, 0.0, 2.89, 0.0, 0.445, 6.163, 69.6, 3.4952, 2.0, 276.0, 18.0, 391.83, 11.34 +38.7, 0.12083, 0.0, 2.89, 0.0, 0.445, 8.069, 76.0, 3.4952, 2.0, 276.0, 18.0, 396.9, 4.21 +43.8, 0.08187, 0.0, 2.89, 0.0, 0.445, 7.82, 36.9, 3.4952, 2.0, 276.0, 18.0, 393.53, 3.57 +33.2, 0.0686, 0.0, 2.89, 0.0, 0.445, 7.416, 62.5, 3.4952, 2.0, 276.0, 18.0, 396.9, 6.19 +27.5, 0.14866, 0.0, 8.56, 0.0, 0.52, 6.727, 79.9, 2.7778, 5.0, 384.0, 20.9, 394.76, 9.42 +26.5, 0.11432, 0.0, 8.56, 0.0, 0.52, 6.781, 71.3, 2.8561, 5.0, 384.0, 20.9, 395.58, 7.67 +18.6, 0.22876, 0.0, 8.56, 0.0, 0.52, 6.405, 85.4, 2.7147, 5.0, 384.0, 20.9, 70.8, 10.63 +19.3, 0.21161, 0.0, 8.56, 0.0, 0.52, 6.137, 87.4, 2.7147, 5.0, 384.0, 20.9, 394.47, 13.44 +20.1, 0.1396, 0.0, 8.56, 0.0, 0.52, 6.167, 90.0, 2.421, 5.0, 384.0, 20.9, 392.69, 12.33 +19.5, 0.13262, 0.0, 8.56, 0.0, 0.52, 5.851, 96.7, 2.1069, 5.0, 384.0, 20.9, 394.05, 16.47 +19.5, 0.1712, 0.0, 8.56, 0.0, 0.52, 5.836, 91.9, 2.211, 5.0, 384.0, 20.9, 395.67, 18.66 +20.4, 0.13117, 0.0, 8.56, 0.0, 0.52, 6.127, 85.2, 2.1224, 5.0, 384.0, 20.9, 387.69, 14.09 +19.8, 0.12802, 0.0, 8.56, 0.0, 0.52, 6.474, 97.1, 2.4329, 5.0, 384.0, 20.9, 395.24, 12.27 +19.4, 0.26363, 0.0, 8.56, 0.0, 0.52, 6.229, 91.2, 2.5451, 5.0, 384.0, 20.9, 391.23, 15.55 +21.7, 0.10793, 0.0, 8.56, 0.0, 0.52, 6.195, 54.4, 2.7778, 5.0, 384.0, 20.9, 393.49, 13.0 +22.8, 0.10084, 0.0, 10.01, 0.0, 0.547, 6.715, 81.6, 2.6775, 6.0, 432.0, 17.8, 395.59, 10.16 +18.8, 0.12329, 0.0, 10.01, 0.0, 0.547, 5.913, 92.9, 2.3534, 6.0, 432.0, 17.8, 394.95, 16.21 +18.7, 0.22212, 0.0, 10.01, 0.0, 0.547, 6.092, 95.4, 2.548, 6.0, 432.0, 17.8, 396.9, 17.09 +18.5, 0.14231, 0.0, 10.01, 0.0, 0.547, 6.254, 84.2, 2.2565, 6.0, 432.0, 17.8, 388.74, 10.45 +18.3, 0.17134, 0.0, 10.01, 0.0, 0.547, 5.928, 88.2, 2.4631, 6.0, 432.0, 17.8, 344.91, 15.76 +21.2, 0.13158, 0.0, 10.01, 0.0, 0.547, 6.176, 72.5, 2.7301, 6.0, 432.0, 17.8, 393.3, 12.04 +19.2, 0.15098, 0.0, 10.01, 0.0, 0.547, 6.021, 82.6, 2.7474, 6.0, 432.0, 17.8, 394.51, 10.3 +20.4, 0.13058, 0.0, 10.01, 0.0, 0.547, 5.872, 73.1, 2.4775, 6.0, 432.0, 17.8, 338.63, 15.37 +19.3, 0.14476, 0.0, 10.01, 0.0, 0.547, 5.731, 65.2, 2.7592, 6.0, 432.0, 17.8, 391.5, 13.61 +22.0, 0.06899, 0.0, 25.65, 0.0, 0.581, 5.87, 69.7, 2.2577, 2.0, 188.0, 19.1, 389.15, 14.37 +20.3, 0.07165, 0.0, 25.65, 0.0, 0.581, 6.004, 84.1, 2.1974, 2.0, 188.0, 19.1, 377.67, 14.27 +20.5, 0.09299, 0.0, 25.65, 0.0, 0.581, 5.961, 92.9, 2.0869, 2.0, 188.0, 19.1, 378.09, 17.93 +17.3, 0.15038, 0.0, 25.65, 0.0, 0.581, 5.856, 97.0, 1.9444, 2.0, 188.0, 19.1, 370.31, 25.41 +18.8, 0.09849, 0.0, 25.65, 0.0, 0.581, 5.879, 95.8, 2.0063, 2.0, 188.0, 19.1, 379.38, 17.58 +21.4, 0.16902, 0.0, 25.65, 0.0, 0.581, 5.986, 88.4, 1.9929, 2.0, 188.0, 19.1, 385.02, 14.81 +15.7, 0.38735, 0.0, 25.65, 0.0, 0.581, 5.613, 95.6, 1.7572, 2.0, 188.0, 19.1, 359.29, 27.26 +16.2, 0.25915, 0.0, 21.89, 0.0, 0.624, 5.693, 96.0, 1.7883, 4.0, 437.0, 21.2, 392.11, 17.19 +18.0, 0.32543, 0.0, 21.89, 0.0, 0.624, 6.431, 98.8, 1.8125, 4.0, 437.0, 21.2, 396.9, 15.39 +14.3, 0.88125, 0.0, 21.89, 0.0, 0.624, 5.637, 94.7, 1.9799, 4.0, 437.0, 21.2, 396.9, 18.34 +19.2, 0.34006, 0.0, 21.89, 0.0, 0.624, 6.458, 98.9, 2.1185, 4.0, 437.0, 21.2, 395.04, 12.6 +19.6, 1.19294, 0.0, 21.89, 0.0, 0.624, 6.326, 97.7, 2.271, 4.0, 437.0, 21.2, 396.9, 12.26 +23.0, 0.59005, 0.0, 21.89, 0.0, 0.624, 6.372, 97.9, 2.3274, 4.0, 437.0, 21.2, 385.76, 11.12 +18.4, 0.32982, 0.0, 21.89, 0.0, 0.624, 5.822, 95.4, 2.4699, 4.0, 437.0, 21.2, 388.69, 15.03 +15.6, 0.97617, 0.0, 21.89, 0.0, 0.624, 5.757, 98.4, 2.346, 4.0, 437.0, 21.2, 262.76, 17.31 +18.1, 0.55778, 0.0, 21.89, 0.0, 0.624, 6.335, 98.2, 2.1107, 4.0, 437.0, 21.2, 394.67, 16.96 +17.4, 0.32264, 0.0, 21.89, 0.0, 0.624, 5.942, 93.5, 1.9669, 4.0, 437.0, 21.2, 378.25, 16.9 +17.1, 0.35233, 0.0, 21.89, 0.0, 0.624, 6.454, 98.4, 1.8498, 4.0, 437.0, 21.2, 394.08, 14.59 +13.3, 0.2498, 0.0, 21.89, 0.0, 0.624, 5.857, 98.2, 1.6686, 4.0, 437.0, 21.2, 392.04, 21.32 +17.8, 0.54452, 0.0, 21.89, 0.0, 0.624, 6.151, 97.9, 1.6687, 4.0, 437.0, 21.2, 396.9, 18.46 +14.0, 0.2909, 0.0, 21.89, 0.0, 0.624, 6.174, 93.6, 1.6119, 4.0, 437.0, 21.2, 388.08, 24.16 +14.4, 1.62864, 0.0, 21.89, 0.0, 0.624, 5.019, 100.0, 1.4394, 4.0, 437.0, 21.2, 396.9, 34.41 +13.4, 3.32105, 0.0, 19.58, 1.0, 0.871, 5.403, 100.0, 1.3216, 5.0, 403.0, 14.7, 396.9, 26.82 +15.6, 4.0974, 0.0, 19.58, 0.0, 0.871, 5.468, 100.0, 1.4118, 5.0, 403.0, 14.7, 396.9, 26.42 +11.8, 2.77974, 0.0, 19.58, 0.0, 0.871, 4.903, 97.8, 1.3459, 5.0, 403.0, 14.7, 396.9, 29.29 +13.8, 2.37934, 0.0, 19.58, 0.0, 0.871, 6.13, 100.0, 1.4191, 5.0, 403.0, 14.7, 172.91, 27.8 +15.6, 2.15505, 0.0, 19.58, 0.0, 0.871, 5.628, 100.0, 1.5166, 5.0, 403.0, 14.7, 169.27, 16.65 +14.6, 2.36862, 0.0, 19.58, 0.0, 0.871, 4.926, 95.7, 1.4608, 5.0, 403.0, 14.7, 391.71, 29.53 +17.8, 2.33099, 0.0, 19.58, 0.0, 0.871, 5.186, 93.8, 1.5296, 5.0, 403.0, 14.7, 356.99, 28.32 +15.4, 2.73397, 0.0, 19.58, 0.0, 0.871, 5.597, 94.9, 1.5257, 5.0, 403.0, 14.7, 351.85, 21.45 +21.5, 1.6566, 0.0, 19.58, 0.0, 0.871, 6.122, 97.3, 1.618, 5.0, 403.0, 14.7, 372.8, 14.1 +19.6, 1.49632, 0.0, 19.58, 0.0, 0.871, 5.404, 100.0, 1.5916, 5.0, 403.0, 14.7, 341.6, 13.28 +15.3, 1.12658, 0.0, 19.58, 1.0, 0.871, 5.012, 88.0, 1.6102, 5.0, 403.0, 14.7, 343.28, 12.12 +19.4, 2.14918, 0.0, 19.58, 0.0, 0.871, 5.709, 98.5, 1.6232, 5.0, 403.0, 14.7, 261.95, 15.79 +17.0, 1.41385, 0.0, 19.58, 1.0, 0.871, 6.129, 96.0, 1.7494, 5.0, 403.0, 14.7, 321.02, 15.12 +15.6, 3.53501, 0.0, 19.58, 1.0, 0.871, 6.152, 82.6, 1.7455, 5.0, 403.0, 14.7, 88.01, 15.02 +13.1, 2.44668, 0.0, 19.58, 0.0, 0.871, 5.272, 94.0, 1.7364, 5.0, 403.0, 14.7, 88.63, 16.14 +41.3, 1.22358, 0.0, 19.58, 0.0, 0.605, 6.943, 97.4, 1.8773, 5.0, 403.0, 14.7, 363.43, 4.59 +24.3, 1.34284, 0.0, 19.58, 0.0, 0.605, 6.066, 100.0, 1.7573, 5.0, 403.0, 14.7, 353.89, 6.43 +23.3, 1.42502, 0.0, 19.58, 0.0, 0.871, 6.51, 100.0, 1.7659, 5.0, 403.0, 14.7, 364.31, 7.39 +27.0, 1.27346, 0.0, 19.58, 1.0, 0.605, 6.25, 92.6, 1.7984, 5.0, 403.0, 14.7, 338.92, 5.5 +50.0, 1.46336, 0.0, 19.58, 0.0, 0.605, 7.489, 90.8, 1.9709, 5.0, 403.0, 14.7, 374.43, 1.73 +50.0, 1.83377, 0.0, 19.58, 1.0, 0.605, 7.802, 98.2, 2.0407, 5.0, 403.0, 14.7, 389.61, 1.92 +50.0, 1.51902, 0.0, 19.58, 1.0, 0.605, 8.375, 93.9, 2.162, 5.0, 403.0, 14.7, 388.45, 3.32 +22.7, 2.24236, 0.0, 19.58, 0.0, 0.605, 5.854, 91.8, 2.422, 5.0, 403.0, 14.7, 395.11, 11.64 +25.0, 2.924, 0.0, 19.58, 0.0, 0.605, 6.101, 93.0, 2.2834, 5.0, 403.0, 14.7, 240.16, 9.81 +50.0, 2.01019, 0.0, 19.58, 0.0, 0.605, 7.929, 96.2, 2.0459, 5.0, 403.0, 14.7, 369.3, 3.7 +23.8, 1.80028, 0.0, 19.58, 0.0, 0.605, 5.877, 79.2, 2.4259, 5.0, 403.0, 14.7, 227.61, 12.14 +23.8, 2.3004, 0.0, 19.58, 0.0, 0.605, 6.319, 96.1, 2.1, 5.0, 403.0, 14.7, 297.09, 11.1 +22.3, 2.44953, 0.0, 19.58, 0.0, 0.605, 6.402, 95.2, 2.2625, 5.0, 403.0, 14.7, 330.04, 11.32 +17.4, 1.20742, 0.0, 19.58, 0.0, 0.605, 5.875, 94.6, 2.4259, 5.0, 403.0, 14.7, 292.29, 14.43 +19.1, 2.3139, 0.0, 19.58, 0.0, 0.605, 5.88, 97.3, 2.3887, 5.0, 403.0, 14.7, 348.13, 12.03 +23.1, 0.13914, 0.0, 4.05, 0.0, 0.51, 5.572, 88.5, 2.5961, 5.0, 296.0, 16.6, 396.9, 14.69 +23.6, 0.09178, 0.0, 4.05, 0.0, 0.51, 6.416, 84.1, 2.6463, 5.0, 296.0, 16.6, 395.5, 9.04 +22.6, 0.08447, 0.0, 4.05, 0.0, 0.51, 5.859, 68.7, 2.7019, 5.0, 296.0, 16.6, 393.23, 9.64 +29.4, 0.06664, 0.0, 4.05, 0.0, 0.51, 6.546, 33.1, 3.1323, 5.0, 296.0, 16.6, 390.96, 5.33 +23.2, 0.07022, 0.0, 4.05, 0.0, 0.51, 6.02, 47.2, 3.5549, 5.0, 296.0, 16.6, 393.23, 10.11 +24.6, 0.05425, 0.0, 4.05, 0.0, 0.51, 6.315, 73.4, 3.3175, 5.0, 296.0, 16.6, 395.6, 6.29 +29.9, 0.06642, 0.0, 4.05, 0.0, 0.51, 6.86, 74.4, 2.9153, 5.0, 296.0, 16.6, 391.27, 6.92 +37.2, 0.0578, 0.0, 2.46, 0.0, 0.488, 6.98, 58.4, 2.829, 3.0, 193.0, 17.8, 396.9, 5.04 +39.8, 0.06588, 0.0, 2.46, 0.0, 0.488, 7.765, 83.3, 2.741, 3.0, 193.0, 17.8, 395.56, 7.56 +36.2, 0.06888, 0.0, 2.46, 0.0, 0.488, 6.144, 62.2, 2.5979, 3.0, 193.0, 17.8, 396.9, 9.45 +37.9, 0.09103, 0.0, 2.46, 0.0, 0.488, 7.155, 92.2, 2.7006, 3.0, 193.0, 17.8, 394.12, 4.82 +32.5, 0.10008, 0.0, 2.46, 0.0, 0.488, 6.563, 95.6, 2.847, 3.0, 193.0, 17.8, 396.9, 5.68 +26.4, 0.08308, 0.0, 2.46, 0.0, 0.488, 5.604, 89.8, 2.9879, 3.0, 193.0, 17.8, 391.0, 13.98 +29.6, 0.06047, 0.0, 2.46, 0.0, 0.488, 6.153, 68.8, 3.2797, 3.0, 193.0, 17.8, 387.11, 13.15 +50.0, 0.05602, 0.0, 2.46, 0.0, 0.488, 7.831, 53.6, 3.1992, 3.0, 193.0, 17.8, 392.63, 4.45 +32.0, 0.07875, 45.0, 3.44, 0.0, 0.437, 6.782, 41.1, 3.7886, 5.0, 398.0, 15.2, 393.87, 6.68 +29.8, 0.12579, 45.0, 3.44, 0.0, 0.437, 6.556, 29.1, 4.5667, 5.0, 398.0, 15.2, 382.84, 4.56 +34.9, 0.0837, 45.0, 3.44, 0.0, 0.437, 7.185, 38.9, 4.5667, 5.0, 398.0, 15.2, 396.9, 5.39 +37.0, 0.09068, 45.0, 3.44, 0.0, 0.437, 6.951, 21.5, 6.4798, 5.0, 398.0, 15.2, 377.68, 5.1 +30.5, 0.06911, 45.0, 3.44, 0.0, 0.437, 6.739, 30.8, 6.4798, 5.0, 398.0, 15.2, 389.71, 4.69 +36.4, 0.08664, 45.0, 3.44, 0.0, 0.437, 7.178, 26.3, 6.4798, 5.0, 398.0, 15.2, 390.49, 2.87 +31.1, 0.02187, 60.0, 2.93, 0.0, 0.401, 6.8, 9.9, 6.2196, 1.0, 265.0, 15.6, 393.37, 5.03 +29.1, 0.01439, 60.0, 2.93, 0.0, 0.401, 6.604, 18.8, 6.2196, 1.0, 265.0, 15.6, 376.7, 4.38 +50.0, 0.01381, 80.0, 0.46, 0.0, 0.422, 7.875, 32.0, 5.6484, 4.0, 255.0, 14.4, 394.23, 2.97 +33.3, 0.04011, 80.0, 1.52, 0.0, 0.404, 7.287, 34.1, 7.309, 2.0, 329.0, 12.6, 396.9, 4.08 +30.3, 0.04666, 80.0, 1.52, 0.0, 0.404, 7.107, 36.6, 7.309, 2.0, 329.0, 12.6, 354.31, 8.61 +34.6, 0.03768, 80.0, 1.52, 0.0, 0.404, 7.274, 38.3, 7.309, 2.0, 329.0, 12.6, 392.2, 6.62 +34.9, 0.0315, 95.0, 1.47, 0.0, 0.403, 6.975, 15.3, 7.6534, 3.0, 402.0, 17.0, 396.9, 4.56 +32.9, 0.01778, 95.0, 1.47, 0.0, 0.403, 7.135, 13.9, 7.6534, 3.0, 402.0, 17.0, 384.3, 4.45 +24.1, 0.03445, 82.5, 2.03, 0.0, 0.415, 6.162, 38.4, 6.27, 2.0, 348.0, 14.7, 393.77, 7.43 +42.3, 0.02177, 82.5, 2.03, 0.0, 0.415, 7.61, 15.7, 6.27, 2.0, 348.0, 14.7, 395.38, 3.11 +48.5, 0.0351, 95.0, 2.68, 0.0, 0.4161, 7.853, 33.2, 5.118, 4.0, 224.0, 14.7, 392.78, 3.81 +50.0, 0.02009, 95.0, 2.68, 0.0, 0.4161, 8.034, 31.9, 5.118, 4.0, 224.0, 14.7, 390.55, 2.88 +22.6, 0.13642, 0.0, 10.59, 0.0, 0.489, 5.891, 22.3, 3.9454, 4.0, 277.0, 18.6, 396.9, 10.87 +24.4, 0.22969, 0.0, 10.59, 0.0, 0.489, 6.326, 52.5, 4.3549, 4.0, 277.0, 18.6, 394.87, 10.97 +22.5, 0.25199, 0.0, 10.59, 0.0, 0.489, 5.783, 72.7, 4.3549, 4.0, 277.0, 18.6, 389.43, 18.06 +24.4, 0.13587, 0.0, 10.59, 1.0, 0.489, 6.064, 59.1, 4.2392, 4.0, 277.0, 18.6, 381.32, 14.66 +20.0, 0.43571, 0.0, 10.59, 1.0, 0.489, 5.344, 100.0, 3.875, 4.0, 277.0, 18.6, 396.9, 23.09 +21.7, 0.17446, 0.0, 10.59, 1.0, 0.489, 5.96, 92.1, 3.8771, 4.0, 277.0, 18.6, 393.25, 17.27 +19.3, 0.37578, 0.0, 10.59, 1.0, 0.489, 5.404, 88.6, 3.665, 4.0, 277.0, 18.6, 395.24, 23.98 +22.4, 0.21719, 0.0, 10.59, 1.0, 0.489, 5.807, 53.8, 3.6526, 4.0, 277.0, 18.6, 390.94, 16.03 +28.1, 0.14052, 0.0, 10.59, 0.0, 0.489, 6.375, 32.3, 3.9454, 4.0, 277.0, 18.6, 385.81, 9.38 +23.7, 0.28955, 0.0, 10.59, 0.0, 0.489, 5.412, 9.8, 3.5875, 4.0, 277.0, 18.6, 348.93, 29.55 +25.0, 0.19802, 0.0, 10.59, 0.0, 0.489, 6.182, 42.4, 3.9454, 4.0, 277.0, 18.6, 393.63, 9.47 +23.3, 0.0456, 0.0, 13.89, 1.0, 0.55, 5.888, 56.0, 3.1121, 5.0, 276.0, 16.4, 392.8, 13.51 +28.7, 0.07013, 0.0, 13.89, 0.0, 0.55, 6.642, 85.1, 3.4211, 5.0, 276.0, 16.4, 392.78, 9.69 +21.5, 0.11069, 0.0, 13.89, 1.0, 0.55, 5.951, 93.8, 2.8893, 5.0, 276.0, 16.4, 396.9, 17.92 +23.0, 0.11425, 0.0, 13.89, 1.0, 0.55, 6.373, 92.4, 3.3633, 5.0, 276.0, 16.4, 393.74, 10.5 +26.7, 0.35809, 0.0, 6.2, 1.0, 0.507, 6.951, 88.5, 2.8617, 8.0, 307.0, 17.4, 391.7, 9.71 +21.7, 0.40771, 0.0, 6.2, 1.0, 0.507, 6.164, 91.3, 3.048, 8.0, 307.0, 17.4, 395.24, 21.46 +27.5, 0.62356, 0.0, 6.2, 1.0, 0.507, 6.879, 77.7, 3.2721, 8.0, 307.0, 17.4, 390.39, 9.93 +30.1, 0.6147, 0.0, 6.2, 0.0, 0.507, 6.618, 80.8, 3.2721, 8.0, 307.0, 17.4, 396.9, 7.6 +44.8, 0.31533, 0.0, 6.2, 0.0, 0.504, 8.266, 78.3, 2.8944, 8.0, 307.0, 17.4, 385.05, 4.14 +50.0, 0.52693, 0.0, 6.2, 0.0, 0.504, 8.725, 83.0, 2.8944, 8.0, 307.0, 17.4, 382.0, 4.63 +37.6, 0.38214, 0.0, 6.2, 0.0, 0.504, 8.04, 86.5, 3.2157, 8.0, 307.0, 17.4, 387.38, 3.13 +31.6, 0.41238, 0.0, 6.2, 0.0, 0.504, 7.163, 79.9, 3.2157, 8.0, 307.0, 17.4, 372.08, 6.36 +46.7, 0.29819, 0.0, 6.2, 0.0, 0.504, 7.686, 17.0, 3.3751, 8.0, 307.0, 17.4, 377.51, 3.92 +31.5, 0.44178, 0.0, 6.2, 0.0, 0.504, 6.552, 21.4, 3.3751, 8.0, 307.0, 17.4, 380.34, 3.76 +24.3, 0.537, 0.0, 6.2, 0.0, 0.504, 5.981, 68.1, 3.6715, 8.0, 307.0, 17.4, 378.35, 11.65 +31.7, 0.46296, 0.0, 6.2, 0.0, 0.504, 7.412, 76.9, 3.6715, 8.0, 307.0, 17.4, 376.14, 5.25 +41.7, 0.57529, 0.0, 6.2, 0.0, 0.507, 8.337, 73.3, 3.8384, 8.0, 307.0, 17.4, 385.91, 2.47 +48.3, 0.33147, 0.0, 6.2, 0.0, 0.507, 8.247, 70.4, 3.6519, 8.0, 307.0, 17.4, 378.95, 3.95 +29.0, 0.44791, 0.0, 6.2, 1.0, 0.507, 6.726, 66.5, 3.6519, 8.0, 307.0, 17.4, 360.2, 8.05 +24.0, 0.33045, 0.0, 6.2, 0.0, 0.507, 6.086, 61.5, 3.6519, 8.0, 307.0, 17.4, 376.75, 10.88 +25.1, 0.52058, 0.0, 6.2, 1.0, 0.507, 6.631, 76.5, 4.148, 8.0, 307.0, 17.4, 388.45, 9.54 +31.5, 0.51183, 0.0, 6.2, 0.0, 0.507, 7.358, 71.6, 4.148, 8.0, 307.0, 17.4, 390.07, 4.73 +23.7, 0.08244, 30.0, 4.93, 0.0, 0.428, 6.481, 18.5, 6.1899, 6.0, 300.0, 16.6, 379.41, 6.36 +23.3, 0.09252, 30.0, 4.93, 0.0, 0.428, 6.606, 42.2, 6.1899, 6.0, 300.0, 16.6, 383.78, 7.37 +22.0, 0.11329, 30.0, 4.93, 0.0, 0.428, 6.897, 54.3, 6.3361, 6.0, 300.0, 16.6, 391.25, 11.38 +20.1, 0.10612, 30.0, 4.93, 0.0, 0.428, 6.095, 65.1, 6.3361, 6.0, 300.0, 16.6, 394.62, 12.4 +22.2, 0.1029, 30.0, 4.93, 0.0, 0.428, 6.358, 52.9, 7.0355, 6.0, 300.0, 16.6, 372.75, 11.22 +23.7, 0.12757, 30.0, 4.93, 0.0, 0.428, 6.393, 7.8, 7.0355, 6.0, 300.0, 16.6, 374.71, 5.19 +17.6, 0.20608, 22.0, 5.86, 0.0, 0.431, 5.593, 76.5, 7.9549, 7.0, 330.0, 19.1, 372.49, 12.5 +18.5, 0.19133, 22.0, 5.86, 0.0, 0.431, 5.605, 70.2, 7.9549, 7.0, 330.0, 19.1, 389.13, 18.46 +24.3, 0.33983, 22.0, 5.86, 0.0, 0.431, 6.108, 34.9, 8.0555, 7.0, 330.0, 19.1, 390.18, 9.16 +20.5, 0.19657, 22.0, 5.86, 0.0, 0.431, 6.226, 79.2, 8.0555, 7.0, 330.0, 19.1, 376.14, 10.15 +24.5, 0.16439, 22.0, 5.86, 0.0, 0.431, 6.433, 49.1, 7.8265, 7.0, 330.0, 19.1, 374.71, 9.52 +26.2, 0.19073, 22.0, 5.86, 0.0, 0.431, 6.718, 17.5, 7.8265, 7.0, 330.0, 19.1, 393.74, 6.56 +24.4, 0.1403, 22.0, 5.86, 0.0, 0.431, 6.487, 13.0, 7.3967, 7.0, 330.0, 19.1, 396.28, 5.9 +24.8, 0.21409, 22.0, 5.86, 0.0, 0.431, 6.438, 8.9, 7.3967, 7.0, 330.0, 19.1, 377.07, 3.59 +29.6, 0.08221, 22.0, 5.86, 0.0, 0.431, 6.957, 6.8, 8.9067, 7.0, 330.0, 19.1, 386.09, 3.53 +42.8, 0.36894, 22.0, 5.86, 0.0, 0.431, 8.259, 8.4, 8.9067, 7.0, 330.0, 19.1, 396.9, 3.54 +21.9, 0.04819, 80.0, 3.64, 0.0, 0.392, 6.108, 32.0, 9.2203, 1.0, 315.0, 16.4, 392.89, 6.57 +20.9, 0.03548, 80.0, 3.64, 0.0, 0.392, 5.876, 19.1, 9.2203, 1.0, 315.0, 16.4, 395.18, 9.25 +44.0, 0.01538, 90.0, 3.75, 0.0, 0.394, 7.454, 34.2, 6.3361, 3.0, 244.0, 15.9, 386.34, 3.11 +50.0, 0.61154, 20.0, 3.97, 0.0, 0.647, 8.704, 86.9, 1.801, 5.0, 264.0, 13.0, 389.7, 5.12 +36.0, 0.66351, 20.0, 3.97, 0.0, 0.647, 7.333, 100.0, 1.8946, 5.0, 264.0, 13.0, 383.29, 7.79 +30.1, 0.65665, 20.0, 3.97, 0.0, 0.647, 6.842, 100.0, 2.0107, 5.0, 264.0, 13.0, 391.93, 6.9 +33.8, 0.54011, 20.0, 3.97, 0.0, 0.647, 7.203, 81.8, 2.1121, 5.0, 264.0, 13.0, 392.8, 9.59 +43.1, 0.53412, 20.0, 3.97, 0.0, 0.647, 7.52, 89.4, 2.1398, 5.0, 264.0, 13.0, 388.37, 7.26 +48.8, 0.52014, 20.0, 3.97, 0.0, 0.647, 8.398, 91.5, 2.2885, 5.0, 264.0, 13.0, 386.86, 5.91 +31.0, 0.82526, 20.0, 3.97, 0.0, 0.647, 7.327, 94.5, 2.0788, 5.0, 264.0, 13.0, 393.42, 11.25 +36.5, 0.55007, 20.0, 3.97, 0.0, 0.647, 7.206, 91.6, 1.9301, 5.0, 264.0, 13.0, 387.89, 8.1 +22.8, 0.76162, 20.0, 3.97, 0.0, 0.647, 5.56, 62.8, 1.9865, 5.0, 264.0, 13.0, 392.4, 10.45 +30.7, 0.7857, 20.0, 3.97, 0.0, 0.647, 7.014, 84.6, 2.1329, 5.0, 264.0, 13.0, 384.07, 14.79 +50.0, 0.57834, 20.0, 3.97, 0.0, 0.575, 8.297, 67.0, 2.4216, 5.0, 264.0, 13.0, 384.54, 7.44 +43.5, 0.5405, 20.0, 3.97, 0.0, 0.575, 7.47, 52.6, 2.872, 5.0, 264.0, 13.0, 390.3, 3.16 +20.7, 0.09065, 20.0, 6.96, 1.0, 0.464, 5.92, 61.5, 3.9175, 3.0, 223.0, 18.6, 391.34, 13.65 +21.1, 0.29916, 20.0, 6.96, 0.0, 0.464, 5.856, 42.1, 4.429, 3.0, 223.0, 18.6, 388.65, 13.0 +25.2, 0.16211, 20.0, 6.96, 0.0, 0.464, 6.24, 16.3, 4.429, 3.0, 223.0, 18.6, 396.9, 6.59 +24.4, 0.1146, 20.0, 6.96, 0.0, 0.464, 6.538, 58.7, 3.9175, 3.0, 223.0, 18.6, 394.96, 7.73 +35.2, 0.22188, 20.0, 6.96, 1.0, 0.464, 7.691, 51.8, 4.3665, 3.0, 223.0, 18.6, 390.77, 6.58 +32.4, 0.05644, 40.0, 6.41, 1.0, 0.447, 6.758, 32.9, 4.0776, 4.0, 254.0, 17.6, 396.9, 3.53 +32.0, 0.09604, 40.0, 6.41, 0.0, 0.447, 6.854, 42.8, 4.2673, 4.0, 254.0, 17.6, 396.9, 2.98 +33.2, 0.10469, 40.0, 6.41, 1.0, 0.447, 7.267, 49.0, 4.7872, 4.0, 254.0, 17.6, 389.25, 6.05 +33.1, 0.06127, 40.0, 6.41, 1.0, 0.447, 6.826, 27.6, 4.8628, 4.0, 254.0, 17.6, 393.45, 4.16 +29.1, 0.07978, 40.0, 6.41, 0.0, 0.447, 6.482, 32.1, 4.1403, 4.0, 254.0, 17.6, 396.9, 7.19 +35.1, 0.21038, 20.0, 3.33, 0.0, 0.4429, 6.812, 32.2, 4.1007, 5.0, 216.0, 14.9, 396.9, 4.85 +45.4, 0.03578, 20.0, 3.33, 0.0, 0.4429, 7.82, 64.5, 4.6947, 5.0, 216.0, 14.9, 387.31, 3.76 +35.4, 0.03705, 20.0, 3.33, 0.0, 0.4429, 6.968, 37.2, 5.2447, 5.0, 216.0, 14.9, 392.23, 4.59 +46.0, 0.06129, 20.0, 3.33, 1.0, 0.4429, 7.645, 49.7, 5.2119, 5.0, 216.0, 14.9, 377.07, 3.01 +50.0, 0.01501, 90.0, 1.21, 1.0, 0.401, 7.923, 24.8, 5.885, 1.0, 198.0, 13.6, 395.52, 3.16 +32.2, 0.00906, 90.0, 2.97, 0.0, 0.4, 7.088, 20.8, 7.3073, 1.0, 285.0, 15.3, 394.72, 7.85 +22.0, 0.01096, 55.0, 2.25, 0.0, 0.389, 6.453, 31.9, 7.3073, 1.0, 300.0, 15.3, 394.72, 8.23 +20.1, 0.01965, 80.0, 1.76, 0.0, 0.385, 6.23, 31.5, 9.0892, 1.0, 241.0, 18.2, 341.6, 12.93 +23.2, 0.03871, 52.5, 5.32, 0.0, 0.405, 6.209, 31.3, 7.3172, 6.0, 293.0, 16.6, 396.9, 7.14 +22.3, 0.0459, 52.5, 5.32, 0.0, 0.405, 6.315, 45.6, 7.3172, 6.0, 293.0, 16.6, 396.9, 7.6 +24.8, 0.04297, 52.5, 5.32, 0.0, 0.405, 6.565, 22.9, 7.3172, 6.0, 293.0, 16.6, 371.72, 9.51 +28.5, 0.03502, 80.0, 4.95, 0.0, 0.411, 6.861, 27.9, 5.1167, 4.0, 245.0, 19.2, 396.9, 3.33 +37.3, 0.07886, 80.0, 4.95, 0.0, 0.411, 7.148, 27.7, 5.1167, 4.0, 245.0, 19.2, 396.9, 3.56 +27.9, 0.03615, 80.0, 4.95, 0.0, 0.411, 6.63, 23.4, 5.1167, 4.0, 245.0, 19.2, 396.9, 4.7 +23.9, 0.08265, 0.0, 13.92, 0.0, 0.437, 6.127, 18.4, 5.5027, 4.0, 289.0, 16.0, 396.9, 8.58 +21.7, 0.08199, 0.0, 13.92, 0.0, 0.437, 6.009, 42.3, 5.5027, 4.0, 289.0, 16.0, 396.9, 10.4 +28.6, 0.12932, 0.0, 13.92, 0.0, 0.437, 6.678, 31.1, 5.9604, 4.0, 289.0, 16.0, 396.9, 6.27 +27.1, 0.05372, 0.0, 13.92, 0.0, 0.437, 6.549, 51.0, 5.9604, 4.0, 289.0, 16.0, 392.85, 7.39 +20.3, 0.14103, 0.0, 13.92, 0.0, 0.437, 5.79, 58.0, 6.32, 4.0, 289.0, 16.0, 396.9, 15.84 +22.5, 0.06466, 70.0, 2.24, 0.0, 0.4, 6.345, 20.1, 7.8278, 5.0, 358.0, 14.8, 368.24, 4.97 +29.0, 0.05561, 70.0, 2.24, 0.0, 0.4, 7.041, 10.0, 7.8278, 5.0, 358.0, 14.8, 371.58, 4.74 +24.8, 0.04417, 70.0, 2.24, 0.0, 0.4, 6.871, 47.4, 7.8278, 5.0, 358.0, 14.8, 390.86, 6.07 +22.0, 0.03537, 34.0, 6.09, 0.0, 0.433, 6.59, 40.4, 5.4917, 7.0, 329.0, 16.1, 395.75, 9.5 +26.4, 0.09266, 34.0, 6.09, 0.0, 0.433, 6.495, 18.4, 5.4917, 7.0, 329.0, 16.1, 383.61, 8.67 +33.1, 0.1, 34.0, 6.09, 0.0, 0.433, 6.982, 17.7, 5.4917, 7.0, 329.0, 16.1, 390.43, 4.86 +36.1, 0.05515, 33.0, 2.18, 0.0, 0.472, 7.236, 41.1, 4.022, 7.0, 222.0, 18.4, 393.68, 6.93 +28.4, 0.05479, 33.0, 2.18, 0.0, 0.472, 6.616, 58.1, 3.37, 7.0, 222.0, 18.4, 393.36, 8.93 +33.4, 0.07503, 33.0, 2.18, 0.0, 0.472, 7.42, 71.9, 3.0992, 7.0, 222.0, 18.4, 396.9, 6.47 +28.2, 0.04932, 33.0, 2.18, 0.0, 0.472, 6.849, 70.3, 3.1827, 7.0, 222.0, 18.4, 396.9, 7.53 +22.8, 0.49298, 0.0, 9.9, 0.0, 0.544, 6.635, 82.5, 3.3175, 4.0, 304.0, 18.4, 396.9, 4.54 +20.3, 0.3494, 0.0, 9.9, 0.0, 0.544, 5.972, 76.7, 3.1025, 4.0, 304.0, 18.4, 396.24, 9.97 +16.1, 2.63548, 0.0, 9.9, 0.0, 0.544, 4.973, 37.8, 2.5194, 4.0, 304.0, 18.4, 350.45, 12.64 +22.1, 0.79041, 0.0, 9.9, 0.0, 0.544, 6.122, 52.8, 2.6403, 4.0, 304.0, 18.4, 396.9, 5.98 +19.4, 0.26169, 0.0, 9.9, 0.0, 0.544, 6.023, 90.4, 2.834, 4.0, 304.0, 18.4, 396.3, 11.72 +21.6, 0.26938, 0.0, 9.9, 0.0, 0.544, 6.266, 82.8, 3.2628, 4.0, 304.0, 18.4, 393.39, 7.9 +23.8, 0.3692, 0.0, 9.9, 0.0, 0.544, 6.567, 87.3, 3.6023, 4.0, 304.0, 18.4, 395.69, 9.28 +16.2, 0.25356, 0.0, 9.9, 0.0, 0.544, 5.705, 77.7, 3.945, 4.0, 304.0, 18.4, 396.42, 11.5 +17.8, 0.31827, 0.0, 9.9, 0.0, 0.544, 5.914, 83.2, 3.9986, 4.0, 304.0, 18.4, 390.7, 18.33 +19.8, 0.24522, 0.0, 9.9, 0.0, 0.544, 5.782, 71.7, 4.0317, 4.0, 304.0, 18.4, 396.9, 15.94 +23.1, 0.40202, 0.0, 9.9, 0.0, 0.544, 6.382, 67.2, 3.5325, 4.0, 304.0, 18.4, 395.21, 10.36 +21.0, 0.47547, 0.0, 9.9, 0.0, 0.544, 6.113, 58.8, 4.0019, 4.0, 304.0, 18.4, 396.23, 12.73 +23.8, 0.1676, 0.0, 7.38, 0.0, 0.493, 6.426, 52.3, 4.5404, 5.0, 287.0, 19.6, 396.9, 7.2 +23.1, 0.18159, 0.0, 7.38, 0.0, 0.493, 6.376, 54.3, 4.5404, 5.0, 287.0, 19.6, 396.9, 6.87 +20.4, 0.35114, 0.0, 7.38, 0.0, 0.493, 6.041, 49.9, 4.7211, 5.0, 287.0, 19.6, 396.9, 7.7 +18.5, 0.28392, 0.0, 7.38, 0.0, 0.493, 5.708, 74.3, 4.7211, 5.0, 287.0, 19.6, 391.13, 11.74 +25.0, 0.34109, 0.0, 7.38, 0.0, 0.493, 6.415, 40.1, 4.7211, 5.0, 287.0, 19.6, 396.9, 6.12 +24.6, 0.19186, 0.0, 7.38, 0.0, 0.493, 6.431, 14.7, 5.4159, 5.0, 287.0, 19.6, 393.68, 5.08 +23.0, 0.30347, 0.0, 7.38, 0.0, 0.493, 6.312, 28.9, 5.4159, 5.0, 287.0, 19.6, 396.9, 6.15 +22.2, 0.24103, 0.0, 7.38, 0.0, 0.493, 6.083, 43.7, 5.4159, 5.0, 287.0, 19.6, 396.9, 12.79 +19.3, 0.06617, 0.0, 3.24, 0.0, 0.46, 5.868, 25.8, 5.2146, 4.0, 430.0, 16.9, 382.44, 9.97 +22.6, 0.06724, 0.0, 3.24, 0.0, 0.46, 6.333, 17.2, 5.2146, 4.0, 430.0, 16.9, 375.21, 7.34 +19.8, 0.04544, 0.0, 3.24, 0.0, 0.46, 6.144, 32.2, 5.8736, 4.0, 430.0, 16.9, 368.57, 9.09 +17.1, 0.05023, 35.0, 6.06, 0.0, 0.4379, 5.706, 28.4, 6.6407, 1.0, 304.0, 16.9, 394.02, 12.43 +19.4, 0.03466, 35.0, 6.06, 0.0, 0.4379, 6.031, 23.3, 6.6407, 1.0, 304.0, 16.9, 362.25, 7.83 +22.2, 0.05083, 0.0, 5.19, 0.0, 0.515, 6.316, 38.1, 6.4584, 5.0, 224.0, 20.2, 389.71, 5.68 +20.7, 0.03738, 0.0, 5.19, 0.0, 0.515, 6.31, 38.5, 6.4584, 5.0, 224.0, 20.2, 389.4, 6.75 +21.1, 0.03961, 0.0, 5.19, 0.0, 0.515, 6.037, 34.5, 5.9853, 5.0, 224.0, 20.2, 396.9, 8.01 +19.5, 0.03427, 0.0, 5.19, 0.0, 0.515, 5.869, 46.3, 5.2311, 5.0, 224.0, 20.2, 396.9, 9.8 +18.5, 0.03041, 0.0, 5.19, 0.0, 0.515, 5.895, 59.6, 5.615, 5.0, 224.0, 20.2, 394.81, 10.56 +20.6, 0.03306, 0.0, 5.19, 0.0, 0.515, 6.059, 37.3, 4.8122, 5.0, 224.0, 20.2, 396.14, 8.51 +19.0, 0.05497, 0.0, 5.19, 0.0, 0.515, 5.985, 45.4, 4.8122, 5.0, 224.0, 20.2, 396.9, 9.74 +18.7, 0.06151, 0.0, 5.19, 0.0, 0.515, 5.968, 58.5, 4.8122, 5.0, 224.0, 20.2, 396.9, 9.29 +32.7, 0.01301, 35.0, 1.52, 0.0, 0.442, 7.241, 49.3, 7.0379, 1.0, 284.0, 15.5, 394.74, 5.49 +16.5, 0.02498, 0.0, 1.89, 0.0, 0.518, 6.54, 59.7, 6.2669, 1.0, 422.0, 15.9, 389.96, 8.65 +23.9, 0.02543, 55.0, 3.78, 0.0, 0.484, 6.696, 56.4, 5.7321, 5.0, 370.0, 17.6, 396.9, 7.18 +31.2, 0.03049, 55.0, 3.78, 0.0, 0.484, 6.874, 28.1, 6.4654, 5.0, 370.0, 17.6, 387.97, 4.61 +17.5, 0.03113, 0.0, 4.39, 0.0, 0.442, 6.014, 48.5, 8.0136, 3.0, 352.0, 18.8, 385.64, 10.53 +17.2, 0.06162, 0.0, 4.39, 0.0, 0.442, 5.898, 52.3, 8.0136, 3.0, 352.0, 18.8, 364.61, 12.67 +23.1, 0.0187, 85.0, 4.15, 0.0, 0.429, 6.516, 27.7, 8.5353, 4.0, 351.0, 17.9, 392.43, 6.36 +24.5, 0.01501, 80.0, 2.01, 0.0, 0.435, 6.635, 29.7, 8.344, 4.0, 280.0, 17.0, 390.94, 5.99 +26.6, 0.02899, 40.0, 1.25, 0.0, 0.429, 6.939, 34.5, 8.7921, 1.0, 335.0, 19.7, 389.85, 5.89 +22.9, 0.06211, 40.0, 1.25, 0.0, 0.429, 6.49, 44.4, 8.7921, 1.0, 335.0, 19.7, 396.9, 5.98 +24.1, 0.0795, 60.0, 1.69, 0.0, 0.411, 6.579, 35.9, 10.7103, 4.0, 411.0, 18.3, 370.78, 5.49 +18.6, 0.07244, 60.0, 1.69, 0.0, 0.411, 5.884, 18.5, 10.7103, 4.0, 411.0, 18.3, 392.33, 7.79 +30.1, 0.01709, 90.0, 2.02, 0.0, 0.41, 6.728, 36.1, 12.1265, 5.0, 187.0, 17.0, 384.46, 4.5 +18.2, 0.04301, 80.0, 1.91, 0.0, 0.413, 5.663, 21.9, 10.5857, 4.0, 334.0, 22.0, 382.8, 8.05 +20.6, 0.10659, 80.0, 1.91, 0.0, 0.413, 5.936, 19.5, 10.5857, 4.0, 334.0, 22.0, 376.04, 5.57 +17.8, 8.98296, 0.0, 18.1, 1.0, 0.77, 6.212, 97.4, 2.1222, 24.0, 666.0, 20.2, 377.73, 17.6 +21.7, 3.8497, 0.0, 18.1, 1.0, 0.77, 6.395, 91.0, 2.5052, 24.0, 666.0, 20.2, 391.34, 13.27 +22.7, 5.20177, 0.0, 18.1, 1.0, 0.77, 6.127, 83.4, 2.7227, 24.0, 666.0, 20.2, 395.43, 11.48 +22.6, 4.26131, 0.0, 18.1, 0.0, 0.77, 6.112, 81.3, 2.5091, 24.0, 666.0, 20.2, 390.74, 12.67 +25.0, 4.54192, 0.0, 18.1, 0.0, 0.77, 6.398, 88.0, 2.5182, 24.0, 666.0, 20.2, 374.56, 7.79 +19.9, 3.83684, 0.0, 18.1, 0.0, 0.77, 6.251, 91.1, 2.2955, 24.0, 666.0, 20.2, 350.65, 14.19 +20.8, 3.67822, 0.0, 18.1, 0.0, 0.77, 5.362, 96.2, 2.1036, 24.0, 666.0, 20.2, 380.79, 10.19 +16.8, 4.22239, 0.0, 18.1, 1.0, 0.77, 5.803, 89.0, 1.9047, 24.0, 666.0, 20.2, 353.04, 14.64 +21.9, 3.47428, 0.0, 18.1, 1.0, 0.718, 8.78, 82.9, 1.9047, 24.0, 666.0, 20.2, 354.55, 5.29 +27.5, 4.55587, 0.0, 18.1, 0.0, 0.718, 3.561, 87.9, 1.6132, 24.0, 666.0, 20.2, 354.7, 7.12 +21.9, 3.69695, 0.0, 18.1, 0.0, 0.718, 4.963, 91.4, 1.7523, 24.0, 666.0, 20.2, 316.03, 14.0 +23.1, 13.5222, 0.0, 18.1, 0.0, 0.631, 3.863, 100.0, 1.5106, 24.0, 666.0, 20.2, 131.42, 13.33 +50.0, 4.89822, 0.0, 18.1, 0.0, 0.631, 4.97, 100.0, 1.3325, 24.0, 666.0, 20.2, 375.52, 3.26 +50.0, 5.66998, 0.0, 18.1, 1.0, 0.631, 6.683, 96.8, 1.3567, 24.0, 666.0, 20.2, 375.33, 3.73 +50.0, 6.53876, 0.0, 18.1, 1.0, 0.631, 7.016, 97.5, 1.2024, 24.0, 666.0, 20.2, 392.05, 2.96 +50.0, 9.2323, 0.0, 18.1, 0.0, 0.631, 6.216, 100.0, 1.1691, 24.0, 666.0, 20.2, 366.15, 9.53 +50.0, 8.26725, 0.0, 18.1, 1.0, 0.668, 5.875, 89.6, 1.1296, 24.0, 666.0, 20.2, 347.88, 8.88 +13.8, 11.1081, 0.0, 18.1, 0.0, 0.668, 4.906, 100.0, 1.1742, 24.0, 666.0, 20.2, 396.9, 34.77 +13.8, 18.4982, 0.0, 18.1, 0.0, 0.668, 4.138, 100.0, 1.137, 24.0, 666.0, 20.2, 396.9, 37.97 +15.0, 19.6091, 0.0, 18.1, 0.0, 0.671, 7.313, 97.9, 1.3163, 24.0, 666.0, 20.2, 396.9, 13.44 +13.9, 15.288, 0.0, 18.1, 0.0, 0.671, 6.649, 93.3, 1.3449, 24.0, 666.0, 20.2, 363.02, 23.24 +13.3, 9.82349, 0.0, 18.1, 0.0, 0.671, 6.794, 98.8, 1.358, 24.0, 666.0, 20.2, 396.9, 21.24 +13.1, 23.6482, 0.0, 18.1, 0.0, 0.671, 6.38, 96.2, 1.3861, 24.0, 666.0, 20.2, 396.9, 23.69 +10.2, 17.8667, 0.0, 18.1, 0.0, 0.671, 6.223, 100.0, 1.3861, 24.0, 666.0, 20.2, 393.74, 21.78 +10.4, 88.9762, 0.0, 18.1, 0.0, 0.671, 6.968, 91.9, 1.4165, 24.0, 666.0, 20.2, 396.9, 17.21 +10.9, 15.8744, 0.0, 18.1, 0.0, 0.671, 6.545, 99.1, 1.5192, 24.0, 666.0, 20.2, 396.9, 21.08 +11.3, 9.18702, 0.0, 18.1, 0.0, 0.7, 5.536, 100.0, 1.5804, 24.0, 666.0, 20.2, 396.9, 23.6 +12.3, 7.99248, 0.0, 18.1, 0.0, 0.7, 5.52, 100.0, 1.5331, 24.0, 666.0, 20.2, 396.9, 24.56 +8.8, 20.0849, 0.0, 18.1, 0.0, 0.7, 4.368, 91.2, 1.4395, 24.0, 666.0, 20.2, 285.83, 30.63 +7.2, 16.8118, 0.0, 18.1, 0.0, 0.7, 5.277, 98.1, 1.4261, 24.0, 666.0, 20.2, 396.9, 30.81 +10.5, 24.3938, 0.0, 18.1, 0.0, 0.7, 4.652, 100.0, 1.4672, 24.0, 666.0, 20.2, 396.9, 28.28 +7.4, 22.5971, 0.0, 18.1, 0.0, 0.7, 5.0, 89.5, 1.5184, 24.0, 666.0, 20.2, 396.9, 31.99 +10.2, 14.3337, 0.0, 18.1, 0.0, 0.7, 4.88, 100.0, 1.5895, 24.0, 666.0, 20.2, 372.92, 30.62 +11.5, 8.15174, 0.0, 18.1, 0.0, 0.7, 5.39, 98.9, 1.7281, 24.0, 666.0, 20.2, 396.9, 20.85 +15.1, 6.96215, 0.0, 18.1, 0.0, 0.7, 5.713, 97.0, 1.9265, 24.0, 666.0, 20.2, 394.43, 17.11 +23.2, 5.29305, 0.0, 18.1, 0.0, 0.7, 6.051, 82.5, 2.1678, 24.0, 666.0, 20.2, 378.38, 18.76 +9.7, 11.5779, 0.0, 18.1, 0.0, 0.7, 5.036, 97.0, 1.77, 24.0, 666.0, 20.2, 396.9, 25.68 +13.8, 8.64476, 0.0, 18.1, 0.0, 0.693, 6.193, 92.6, 1.7912, 24.0, 666.0, 20.2, 396.9, 15.17 +12.7, 13.3598, 0.0, 18.1, 0.0, 0.693, 5.887, 94.7, 1.7821, 24.0, 666.0, 20.2, 396.9, 16.35 +13.1, 8.71675, 0.0, 18.1, 0.0, 0.693, 6.471, 98.8, 1.7257, 24.0, 666.0, 20.2, 391.98, 17.12 +12.5, 5.87205, 0.0, 18.1, 0.0, 0.693, 6.405, 96.0, 1.6768, 24.0, 666.0, 20.2, 396.9, 19.37 +8.5, 7.67202, 0.0, 18.1, 0.0, 0.693, 5.747, 98.9, 1.6334, 24.0, 666.0, 20.2, 393.1, 19.92 +5.0, 38.3518, 0.0, 18.1, 0.0, 0.693, 5.453, 100.0, 1.4896, 24.0, 666.0, 20.2, 396.9, 30.59 +6.3, 9.91655, 0.0, 18.1, 0.0, 0.693, 5.852, 77.8, 1.5004, 24.0, 666.0, 20.2, 338.16, 29.97 +5.6, 25.0461, 0.0, 18.1, 0.0, 0.693, 5.987, 100.0, 1.5888, 24.0, 666.0, 20.2, 396.9, 26.77 +7.2, 14.2362, 0.0, 18.1, 0.0, 0.693, 6.343, 100.0, 1.5741, 24.0, 666.0, 20.2, 396.9, 20.32 +12.1, 9.59571, 0.0, 18.1, 0.0, 0.693, 6.404, 100.0, 1.639, 24.0, 666.0, 20.2, 376.11, 20.31 +8.3, 24.8017, 0.0, 18.1, 0.0, 0.693, 5.349, 96.0, 1.7028, 24.0, 666.0, 20.2, 396.9, 19.77 +8.5, 41.5292, 0.0, 18.1, 0.0, 0.693, 5.531, 85.4, 1.6074, 24.0, 666.0, 20.2, 329.46, 27.38 +5.0, 67.9208, 0.0, 18.1, 0.0, 0.693, 5.683, 100.0, 1.4254, 24.0, 666.0, 20.2, 384.97, 22.98 +11.9, 20.7162, 0.0, 18.1, 0.0, 0.659, 4.138, 100.0, 1.1781, 24.0, 666.0, 20.2, 370.22, 23.34 +27.9, 11.9511, 0.0, 18.1, 0.0, 0.659, 5.608, 100.0, 1.2852, 24.0, 666.0, 20.2, 332.09, 12.13 +17.2, 7.40389, 0.0, 18.1, 0.0, 0.597, 5.617, 97.9, 1.4547, 24.0, 666.0, 20.2, 314.64, 26.4 +27.5, 14.4383, 0.0, 18.1, 0.0, 0.597, 6.852, 100.0, 1.4655, 24.0, 666.0, 20.2, 179.36, 19.78 +15.0, 51.1358, 0.0, 18.1, 0.0, 0.597, 5.757, 100.0, 1.413, 24.0, 666.0, 20.2, 2.6, 10.11 +17.2, 14.0507, 0.0, 18.1, 0.0, 0.597, 6.657, 100.0, 1.5275, 24.0, 666.0, 20.2, 35.05, 21.22 +17.9, 18.811, 0.0, 18.1, 0.0, 0.597, 4.628, 100.0, 1.5539, 24.0, 666.0, 20.2, 28.79, 34.37 +16.3, 28.6558, 0.0, 18.1, 0.0, 0.597, 5.155, 100.0, 1.5894, 24.0, 666.0, 20.2, 210.97, 20.08 +7.0, 45.7461, 0.0, 18.1, 0.0, 0.693, 4.519, 100.0, 1.6582, 24.0, 666.0, 20.2, 88.27, 36.98 +7.2, 18.0846, 0.0, 18.1, 0.0, 0.679, 6.434, 100.0, 1.8347, 24.0, 666.0, 20.2, 27.25, 29.05 +7.5, 10.8342, 0.0, 18.1, 0.0, 0.679, 6.782, 90.8, 1.8195, 24.0, 666.0, 20.2, 21.57, 25.79 +10.4, 25.9406, 0.0, 18.1, 0.0, 0.679, 5.304, 89.1, 1.6475, 24.0, 666.0, 20.2, 127.36, 26.64 +8.8, 73.5341, 0.0, 18.1, 0.0, 0.679, 5.957, 100.0, 1.8026, 24.0, 666.0, 20.2, 16.45, 20.62 +8.4, 11.8123, 0.0, 18.1, 0.0, 0.718, 6.824, 76.5, 1.794, 24.0, 666.0, 20.2, 48.45, 22.74 +16.7, 11.0874, 0.0, 18.1, 0.0, 0.718, 6.411, 100.0, 1.8589, 24.0, 666.0, 20.2, 318.75, 15.02 +14.2, 7.02259, 0.0, 18.1, 0.0, 0.718, 6.006, 95.3, 1.8746, 24.0, 666.0, 20.2, 319.98, 15.7 +20.8, 12.0482, 0.0, 18.1, 0.0, 0.614, 5.648, 87.6, 1.9512, 24.0, 666.0, 20.2, 291.55, 14.1 +13.4, 7.05042, 0.0, 18.1, 0.0, 0.614, 6.103, 85.1, 2.0218, 24.0, 666.0, 20.2, 2.52, 23.29 +11.7, 8.79212, 0.0, 18.1, 0.0, 0.584, 5.565, 70.6, 2.0635, 24.0, 666.0, 20.2, 3.65, 17.16 +8.3, 15.8603, 0.0, 18.1, 0.0, 0.679, 5.896, 95.4, 1.9096, 24.0, 666.0, 20.2, 7.68, 24.39 +10.2, 12.2472, 0.0, 18.1, 0.0, 0.584, 5.837, 59.7, 1.9976, 24.0, 666.0, 20.2, 24.65, 15.69 +10.9, 37.6619, 0.0, 18.1, 0.0, 0.679, 6.202, 78.7, 1.8629, 24.0, 666.0, 20.2, 18.82, 14.52 +11.0, 7.36711, 0.0, 18.1, 0.0, 0.679, 6.193, 78.1, 1.9356, 24.0, 666.0, 20.2, 96.73, 21.52 +9.5, 9.33889, 0.0, 18.1, 0.0, 0.679, 6.38, 95.6, 1.9682, 24.0, 666.0, 20.2, 60.72, 24.08 +14.5, 8.49213, 0.0, 18.1, 0.0, 0.584, 6.348, 86.1, 2.0527, 24.0, 666.0, 20.2, 83.45, 17.64 +14.1, 10.0623, 0.0, 18.1, 0.0, 0.584, 6.833, 94.3, 2.0882, 24.0, 666.0, 20.2, 81.33, 19.69 +16.1, 6.44405, 0.0, 18.1, 0.0, 0.584, 6.425, 74.8, 2.2004, 24.0, 666.0, 20.2, 97.95, 12.03 +14.3, 5.58107, 0.0, 18.1, 0.0, 0.713, 6.436, 87.9, 2.3158, 24.0, 666.0, 20.2, 100.19, 16.22 +11.7, 13.9134, 0.0, 18.1, 0.0, 0.713, 6.208, 95.0, 2.2222, 24.0, 666.0, 20.2, 100.63, 15.17 +13.4, 11.1604, 0.0, 18.1, 0.0, 0.74, 6.629, 94.6, 2.1247, 24.0, 666.0, 20.2, 109.85, 23.27 +9.6, 14.4208, 0.0, 18.1, 0.0, 0.74, 6.461, 93.3, 2.0026, 24.0, 666.0, 20.2, 27.49, 18.05 +8.7, 15.1772, 0.0, 18.1, 0.0, 0.74, 6.152, 100.0, 1.9142, 24.0, 666.0, 20.2, 9.32, 26.45 +8.4, 13.6781, 0.0, 18.1, 0.0, 0.74, 5.935, 87.9, 1.8206, 24.0, 666.0, 20.2, 68.95, 34.02 +12.8, 9.39063, 0.0, 18.1, 0.0, 0.74, 5.627, 93.9, 1.8172, 24.0, 666.0, 20.2, 396.9, 22.88 +10.5, 22.0511, 0.0, 18.1, 0.0, 0.74, 5.818, 92.4, 1.8662, 24.0, 666.0, 20.2, 391.45, 22.11 +17.1, 9.72418, 0.0, 18.1, 0.0, 0.74, 6.406, 97.2, 2.0651, 24.0, 666.0, 20.2, 385.96, 19.52 +18.4, 5.66637, 0.0, 18.1, 0.0, 0.74, 6.219, 100.0, 2.0048, 24.0, 666.0, 20.2, 395.69, 16.59 +15.4, 9.96654, 0.0, 18.1, 0.0, 0.74, 6.485, 100.0, 1.9784, 24.0, 666.0, 20.2, 386.73, 18.85 +10.8, 12.8023, 0.0, 18.1, 0.0, 0.74, 5.854, 96.6, 1.8956, 24.0, 666.0, 20.2, 240.52, 23.79 +11.8, 0.6718, 0.0, 18.1, 0.0, 0.74, 6.459, 94.8, 1.9879, 24.0, 666.0, 20.2, 43.06, 23.98 +14.9, 6.28807, 0.0, 18.1, 0.0, 0.74, 6.341, 96.4, 2.072, 24.0, 666.0, 20.2, 318.01, 17.79 +12.6, 9.92485, 0.0, 18.1, 0.0, 0.74, 6.251, 96.6, 2.198, 24.0, 666.0, 20.2, 388.52, 16.44 +14.1, 9.32909, 0.0, 18.1, 0.0, 0.713, 6.185, 98.7, 2.2616, 24.0, 666.0, 20.2, 396.9, 18.13 +13.0, 7.52601, 0.0, 18.1, 0.0, 0.713, 6.417, 98.3, 2.185, 24.0, 666.0, 20.2, 304.21, 19.31 +13.4, 6.71772, 0.0, 18.1, 0.0, 0.713, 6.749, 92.6, 2.3236, 24.0, 666.0, 20.2, 0.32, 17.44 +15.2, 5.44114, 0.0, 18.1, 0.0, 0.713, 6.655, 98.2, 2.3552, 24.0, 666.0, 20.2, 355.29, 17.73 +16.1, 5.09017, 0.0, 18.1, 0.0, 0.713, 6.297, 91.8, 2.3682, 24.0, 666.0, 20.2, 385.09, 17.27 +17.8, 8.24809, 0.0, 18.1, 0.0, 0.713, 7.393, 99.3, 2.4527, 24.0, 666.0, 20.2, 375.87, 16.74 +14.9, 9.51363, 0.0, 18.1, 0.0, 0.713, 6.728, 94.1, 2.4961, 24.0, 666.0, 20.2, 6.68, 18.71 +14.1, 4.75237, 0.0, 18.1, 0.0, 0.713, 6.525, 86.5, 2.4358, 24.0, 666.0, 20.2, 50.92, 18.13 +12.7, 4.66883, 0.0, 18.1, 0.0, 0.713, 5.976, 87.9, 2.5806, 24.0, 666.0, 20.2, 10.48, 19.01 +13.5, 8.20058, 0.0, 18.1, 0.0, 0.713, 5.936, 80.3, 2.7792, 24.0, 666.0, 20.2, 3.5, 16.94 +14.9, 7.75223, 0.0, 18.1, 0.0, 0.713, 6.301, 83.7, 2.7831, 24.0, 666.0, 20.2, 272.21, 16.23 +20.0, 6.80117, 0.0, 18.1, 0.0, 0.713, 6.081, 84.4, 2.7175, 24.0, 666.0, 20.2, 396.9, 14.7 +16.4, 4.81213, 0.0, 18.1, 0.0, 0.713, 6.701, 90.0, 2.5975, 24.0, 666.0, 20.2, 255.23, 16.42 +17.7, 3.69311, 0.0, 18.1, 0.0, 0.713, 6.376, 88.4, 2.5671, 24.0, 666.0, 20.2, 391.43, 14.65 +19.5, 6.65492, 0.0, 18.1, 0.0, 0.713, 6.317, 83.0, 2.7344, 24.0, 666.0, 20.2, 396.9, 13.99 +20.2, 5.82115, 0.0, 18.1, 0.0, 0.713, 6.513, 89.9, 2.8016, 24.0, 666.0, 20.2, 393.82, 10.29 +21.4, 7.83932, 0.0, 18.1, 0.0, 0.655, 6.209, 65.4, 2.9634, 24.0, 666.0, 20.2, 396.9, 13.22 +19.9, 3.1636, 0.0, 18.1, 0.0, 0.655, 5.759, 48.2, 3.0665, 24.0, 666.0, 20.2, 334.4, 14.13 +19.0, 3.77498, 0.0, 18.1, 0.0, 0.655, 5.952, 84.7, 2.8715, 24.0, 666.0, 20.2, 22.01, 17.15 +19.1, 4.42228, 0.0, 18.1, 0.0, 0.584, 6.003, 94.5, 2.5403, 24.0, 666.0, 20.2, 331.29, 21.32 +19.1, 15.5757, 0.0, 18.1, 0.0, 0.58, 5.926, 71.0, 2.9084, 24.0, 666.0, 20.2, 368.74, 18.13 +20.1, 13.0751, 0.0, 18.1, 0.0, 0.58, 5.713, 56.7, 2.8237, 24.0, 666.0, 20.2, 396.9, 14.76 +19.9, 4.34879, 0.0, 18.1, 0.0, 0.58, 6.167, 84.0, 3.0334, 24.0, 666.0, 20.2, 396.9, 16.29 +19.6, 4.03841, 0.0, 18.1, 0.0, 0.532, 6.229, 90.7, 3.0993, 24.0, 666.0, 20.2, 395.33, 12.87 +23.2, 3.56868, 0.0, 18.1, 0.0, 0.58, 6.437, 75.0, 2.8965, 24.0, 666.0, 20.2, 393.37, 14.36 +29.8, 4.64689, 0.0, 18.1, 0.0, 0.614, 6.98, 67.6, 2.5329, 24.0, 666.0, 20.2, 374.68, 11.66 +13.8, 8.05579, 0.0, 18.1, 0.0, 0.584, 5.427, 95.4, 2.4298, 24.0, 666.0, 20.2, 352.58, 18.14 +13.3, 6.39312, 0.0, 18.1, 0.0, 0.584, 6.162, 97.4, 2.206, 24.0, 666.0, 20.2, 302.76, 24.1 +16.7, 4.87141, 0.0, 18.1, 0.0, 0.614, 6.484, 93.6, 2.3053, 24.0, 666.0, 20.2, 396.21, 18.68 +12.0, 15.0234, 0.0, 18.1, 0.0, 0.614, 5.304, 97.3, 2.1007, 24.0, 666.0, 20.2, 349.48, 24.91 +14.6, 10.233, 0.0, 18.1, 0.0, 0.614, 6.185, 96.7, 2.1705, 24.0, 666.0, 20.2, 379.7, 18.03 +21.4, 14.3337, 0.0, 18.1, 0.0, 0.614, 6.229, 88.0, 1.9512, 24.0, 666.0, 20.2, 383.32, 13.11 +23.0, 5.82401, 0.0, 18.1, 0.0, 0.532, 6.242, 64.7, 3.4242, 24.0, 666.0, 20.2, 396.9, 10.74 +23.7, 5.70818, 0.0, 18.1, 0.0, 0.532, 6.75, 74.9, 3.3317, 24.0, 666.0, 20.2, 393.07, 7.74 +25.0, 5.73116, 0.0, 18.1, 0.0, 0.532, 7.061, 77.0, 3.4106, 24.0, 666.0, 20.2, 395.28, 7.01 +21.8, 2.81838, 0.0, 18.1, 0.0, 0.532, 5.762, 40.3, 4.0983, 24.0, 666.0, 20.2, 392.92, 10.42 +20.6, 2.37857, 0.0, 18.1, 0.0, 0.583, 5.871, 41.9, 3.724, 24.0, 666.0, 20.2, 370.73, 13.34 +21.2, 3.67367, 0.0, 18.1, 0.0, 0.583, 6.312, 51.9, 3.9917, 24.0, 666.0, 20.2, 388.62, 10.58 +19.1, 5.69175, 0.0, 18.1, 0.0, 0.583, 6.114, 79.8, 3.5459, 24.0, 666.0, 20.2, 392.68, 14.98 +20.6, 4.83567, 0.0, 18.1, 0.0, 0.583, 5.905, 53.2, 3.1523, 24.0, 666.0, 20.2, 388.22, 11.45 +15.2, 0.15086, 0.0, 27.74, 0.0, 0.609, 5.454, 92.7, 1.8209, 4.0, 711.0, 20.1, 395.09, 18.06 +7.0, 0.18337, 0.0, 27.74, 0.0, 0.609, 5.414, 98.3, 1.7554, 4.0, 711.0, 20.1, 344.05, 23.97 +8.1, 0.20746, 0.0, 27.74, 0.0, 0.609, 5.093, 98.0, 1.8226, 4.0, 711.0, 20.1, 318.43, 29.68 +13.6, 0.10574, 0.0, 27.74, 0.0, 0.609, 5.983, 98.8, 1.8681, 4.0, 711.0, 20.1, 390.11, 18.07 +20.1, 0.11132, 0.0, 27.74, 0.0, 0.609, 5.983, 83.5, 2.1099, 4.0, 711.0, 20.1, 396.9, 13.35 +21.8, 0.17331, 0.0, 9.69, 0.0, 0.585, 5.707, 54.0, 2.3817, 6.0, 391.0, 19.2, 396.9, 12.01 +24.5, 0.27957, 0.0, 9.69, 0.0, 0.585, 5.926, 42.6, 2.3817, 6.0, 391.0, 19.2, 396.9, 13.59 +23.1, 0.17899, 0.0, 9.69, 0.0, 0.585, 5.67, 28.8, 2.7986, 6.0, 391.0, 19.2, 393.29, 17.6 +19.7, 0.2896, 0.0, 9.69, 0.0, 0.585, 5.39, 72.9, 2.7986, 6.0, 391.0, 19.2, 396.9, 21.14 +18.3, 0.26838, 0.0, 9.69, 0.0, 0.585, 5.794, 70.6, 2.8927, 6.0, 391.0, 19.2, 396.9, 14.1 +21.2, 0.23912, 0.0, 9.69, 0.0, 0.585, 6.019, 65.3, 2.4091, 6.0, 391.0, 19.2, 396.9, 12.92 +17.5, 0.17783, 0.0, 9.69, 0.0, 0.585, 5.569, 73.5, 2.3999, 6.0, 391.0, 19.2, 395.77, 15.1 +16.8, 0.22438, 0.0, 9.69, 0.0, 0.585, 6.027, 79.7, 2.4982, 6.0, 391.0, 19.2, 396.9, 14.33 +22.4, 0.06263, 0.0, 11.93, 0.0, 0.573, 6.593, 69.1, 2.4786, 1.0, 273.0, 21.0, 391.99, 9.67 +20.6, 0.04527, 0.0, 11.93, 0.0, 0.573, 6.12, 76.7, 2.2875, 1.0, 273.0, 21.0, 396.9, 9.08 +23.9, 0.06076, 0.0, 11.93, 0.0, 0.573, 6.976, 91.0, 2.1675, 1.0, 273.0, 21.0, 396.9, 5.64 +22.0, 0.10959, 0.0, 11.93, 0.0, 0.573, 6.794, 89.3, 2.3889, 1.0, 273.0, 21.0, 393.45, 6.48 +11.9, 0.04741, 0.0, 11.93, 0.0, 0.573, 6.03, 80.8, 2.505, 1.0, 273.0, 21.0, 396.9, 7.88 \ No newline at end of file diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/diabetes.csv b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/diabetes.csv new file mode 100644 index 0000000000000..634208119eae3 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/datasets/regression/diabetes.csv @@ -0,0 +1,442 @@ +151.0, 0.0380759064334, 0.0506801187398, 0.0616962065187, 0.021872354995, -0.0442234984244, -0.0348207628377, -0.043400845652, -0.00259226199818, 0.0199084208763, -0.0176461251598 +75.0, -0.00188201652779, -0.044641636507, -0.0514740612388, -0.0263278347174, -0.00844872411122, -0.0191633397482, 0.0744115640788, -0.0394933828741, -0.0683297436244, -0.0922040496268 +141.0, 0.0852989062967, 0.0506801187398, 0.0444512133366, -0.00567061055493, -0.0455994512826, -0.0341944659141, -0.0323559322398, -0.00259226199818, 0.00286377051894, -0.0259303389895 +206.0, -0.0890629393523, -0.044641636507, -0.0115950145052, -0.0366564467986, 0.0121905687618, 0.0249905933641, -0.0360375700439, 0.0343088588777, 0.0226920225667, -0.00936191133014 +135.0, 0.00538306037425, -0.044641636507, -0.0363846922045, 0.021872354995, 0.00393485161259, 0.0155961395104, 0.00814208360519, -0.00259226199818, -0.0319914449414, -0.0466408735636 +97.0, -0.0926954778033, -0.044641636507, -0.0406959405, -0.0194420933299, -0.0689906498721, -0.0792878444118, 0.041276823842, -0.07639450375, -0.041180385188, -0.0963461565417 +138.0, -0.04547247794, 0.0506801187398, -0.0471628129433, -0.0159992226361, -0.0400956398498, -0.0248000120604, 0.000778807997018, -0.0394933828741, -0.0629129499163, -0.038356659734 +63.0, 0.0635036755906, 0.0506801187398, -0.00189470584028, 0.0666296740135, 0.0906198816793, 0.108914381124, 0.0228686348215, 0.0177033544836, -0.0358167281015, 0.00306440941437 +110.0, 0.0417084448844, 0.0506801187398, 0.0616962065187, -0.0400993174923, -0.013952535544, 0.00620168565673, -0.0286742944357, -0.00259226199818, -0.0149564750249, 0.011348623244 +310.0, -0.0709002470972, -0.044641636507, 0.0390621529672, -0.0332135761048, -0.0125765826858, -0.0345076143759, -0.0249926566316, -0.00259226199818, 0.0677363261103, -0.013504018245 +101.0, -0.0963280162543, -0.044641636507, -0.0838084234552, 0.00810087222001, -0.103389471327, -0.0905611890362, -0.0139477432193, -0.07639450375, -0.0629129499163, -0.0342145528191 +69.0, 0.0271782910804, 0.0506801187398, 0.0175059114896, -0.0332135761048, -0.00707277125302, 0.045971540304, -0.0654906724765, 0.0712099797536, -0.0964332228918, -0.0590671943082 +179.0, 0.0162806757273, -0.044641636507, -0.0288400076873, -0.00911348124867, -0.00432086553661, -0.00976888589454, 0.0449584616461, -0.0394933828741, -0.0307512098646, -0.0424987666488 +185.0, 0.00538306037425, 0.0506801187398, -0.00189470584028, 0.00810087222001, -0.00432086553661, -0.0157187066685, -0.00290282980707, -0.00259226199818, 0.0383932482117, -0.013504018245 +118.0, 0.0453409833355, -0.044641636507, -0.0256065714657, -0.0125563519424, 0.0176943801946, -6.12835790605e-05, 0.0817748396869, -0.0394933828741, -0.0319914449414, -0.0756356219675 +171.0, -0.0527375548421, 0.0506801187398, -0.0180618869485, 0.0804011567885, 0.0892439288211, 0.107661787277, -0.0397192078479, 0.10811110063, 0.0360557900898, -0.0424987666488 +166.0, -0.00551455497881, -0.044641636507, 0.0422955891888, 0.0494153205448, 0.0245741444856, -0.0238605666751, 0.0744115640788, -0.0394933828741, 0.0522799997968, 0.0279170509034 +144.0, 0.0707687524926, 0.0506801187398, 0.0121168511202, 0.0563010619323, 0.034205814493, 0.0494161733837, -0.0397192078479, 0.0343088588777, 0.0273677075426, -0.00107769750047 +97.0, -0.038207401038, -0.044641636507, -0.0105172024313, -0.0366564467986, -0.0373437341334, -0.01947648821, -0.0286742944357, -0.00259226199818, -0.0181182673079, -0.0176461251598 +168.0, -0.0273097856849, -0.044641636507, -0.0180618869485, -0.0400993174923, -0.00294491267841, -0.0113346282035, 0.0375951860379, -0.0394933828741, -0.0089440189578, -0.0549250873933 +68.0, -0.049105016391, -0.044641636507, -0.0568631216082, -0.043542188186, -0.0455994512826, -0.043275771306, 0.000778807997018, -0.0394933828741, -0.0119006848015, 0.0154907301589 +49.0, -0.0854304009012, 0.0506801187398, -0.022373135244, 0.00121513083254, -0.0373437341334, -0.0263657543694, 0.0155053592134, -0.0394933828741, -0.072128454602, -0.0176461251598 +68.0, -0.0854304009012, -0.044641636507, -0.00405032998805, -0.00911348124867, -0.00294491267841, 0.00776742796568, 0.0228686348215, -0.0394933828741, -0.0611765950943, -0.013504018245 +245.0, 0.0453409833355, 0.0506801187398, 0.0606183944448, 0.0310533436263, 0.0287020030602, -0.0473467013093, -0.0544457590643, 0.0712099797536, 0.133598980013, 0.135611830689 +184.0, -0.0636351701951, -0.044641636507, 0.0358287167455, -0.0228849640236, -0.0304639698424, -0.0188501912864, -0.00658446761116, -0.00259226199818, -0.0259524244352, -0.0549250873933 +202.0, -0.0672677086461, 0.0506801187398, -0.0126728265791, -0.0400993174923, -0.0153284884022, 0.00463594334778, -0.0581273968684, 0.0343088588777, 0.0191990330786, -0.0342145528191 +137.0, -0.107225631607, -0.044641636507, -0.0773415510119, -0.0263278347174, -0.0896299427451, -0.0961978613484, 0.0265502726256, -0.07639450375, -0.0425721049228, -0.0052198044153 +85.0, -0.0236772472339, -0.044641636507, 0.0595405823709, -0.0400993174923, -0.0428475455662, -0.0435889197678, 0.0118237214093, -0.0394933828741, -0.0159982677581, 0.0403433716479 +131.0, 0.0526060602375, -0.044641636507, -0.0212953231701, -0.0745280244297, -0.0400956398498, -0.0376390989938, -0.00658446761116, -0.0394933828741, -0.000609254186102, -0.0549250873933 +283.0, 0.0671362140416, 0.0506801187398, -0.00620595413581, 0.0631868033198, -0.0428475455662, -0.0958847128867, 0.0523217372542, -0.07639450375, 0.0594238004448, 0.0527696923924 +129.0, -0.0600026317441, -0.044641636507, 0.0444512133366, -0.0194420933299, -0.00982467696942, -0.00757684666201, 0.0228686348215, -0.0394933828741, -0.0271286455543, -0.00936191133014 +59.0, -0.0236772472339, -0.044641636507, -0.0654856181993, -0.0814137658171, -0.0387196869916, -0.0536096705451, 0.0596850128624, -0.07639450375, -0.0371283460105, -0.0424987666488 +341.0, 0.0344433679824, 0.0506801187398, 0.125287118878, 0.0287580963824, -0.0538551684319, -0.0129003705124, -0.102307050517, 0.10811110063, 0.000271485727907, 0.0279170509034 +87.0, 0.0308108295314, -0.044641636507, -0.0503962491649, -0.0022277398612, -0.0442234984244, -0.0899348921127, 0.118591217728, -0.07639450375, -0.0181182673079, 0.00306440941437 +65.0, 0.0162806757273, -0.044641636507, -0.0633299940515, -0.057313670961, -0.0579830270065, -0.0489124436182, 0.00814208360519, -0.0394933828741, -0.0594726974107, -0.0673514081378 +102.0, 0.0489735217865, 0.0506801187398, -0.0309956318351, -0.0492803060204, 0.0493412959332, -0.00413221358232, 0.133317768944, -0.0535158088069, 0.0213108465682, 0.0196328370737 +265.0, 0.0126481372763, -0.044641636507, 0.022894971859, 0.0528581912386, 0.0080627101872, -0.0285577936019, 0.0375951860379, -0.0394933828741, 0.0547240033482, -0.0259303389895 +276.0, -0.00914709342983, -0.044641636507, 0.0110390390463, -0.057313670961, -0.0249601584096, -0.0429626228442, 0.0302319104297, -0.0394933828741, 0.0170371324148, -0.0052198044153 +252.0, -0.00188201652779, 0.0506801187398, 0.0713965151836, 0.0976155102572, 0.0878679759629, 0.0754074957122, -0.0213110188275, 0.0712099797536, 0.0714240327806, 0.0237749439885 +90.0, -0.00188201652779, 0.0506801187398, 0.0142724752679, -0.0745280244297, 0.00255889875439, 0.00620168565673, -0.0139477432193, -0.00259226199818, 0.0191990330786, 0.00306440941437 +100.0, 0.00538306037425, 0.0506801187398, -0.00836157828357, 0.021872354995, 0.054845107366, 0.0732154564797, -0.0249926566316, 0.0343088588777, 0.0125531528134, 0.0941907615407 +55.0, -0.0999605547053, -0.044641636507, -0.067641242347, -0.108956731367, -0.0744944613049, -0.0727117267142, 0.0155053592134, -0.0394933828741, -0.0498684677352, -0.00936191133014 +61.0, -0.0600026317441, 0.0506801187398, -0.0105172024313, -0.014851599083, -0.0497273098573, -0.0235474182133, -0.0581273968684, 0.0158582984398, -0.00991895736315, -0.0342145528191 +92.0, 0.0199132141783, -0.044641636507, -0.0234509473179, -0.0710851537359, 0.020446285911, -0.0100820343563, 0.118591217728, -0.07639450375, -0.0425721049228, 0.0734802269666 +259.0, 0.0453409833355, 0.0506801187398, 0.068163078962, 0.00810087222001, -0.0167044412604, 0.00463594334778, -0.0765355858888, 0.0712099797536, 0.0324332257796, -0.0176461251598 +53.0, 0.0271782910804, 0.0506801187398, -0.0353068801306, 0.0322009670762, -0.0112006298276, 0.00150445872989, -0.0102661054152, -0.00259226199818, -0.0149564750249, -0.0507829804785 +190.0, -0.0563700932931, -0.044641636507, -0.0115950145052, -0.0332135761048, -0.0469754041408, -0.0476598497711, 0.00446044580111, -0.0394933828741, -0.00797939755454, -0.088061942712 +142.0, -0.0781653239992, -0.044641636507, -0.0730303027164, -0.057313670961, -0.0841261313123, -0.0742774690232, -0.0249926566316, -0.0394933828741, -0.0181182673079, -0.0839198357972 +75.0, 0.0671362140416, 0.0506801187398, -0.0417737525739, 0.0115437429137, 0.00255889875439, 0.00588853719494, 0.041276823842, -0.0394933828741, -0.0594726974107, -0.0217882320746 +142.0, -0.041839939489, 0.0506801187398, 0.0142724752679, -0.00567061055493, -0.0125765826858, 0.00620168565673, -0.0728539480847, 0.0712099797536, 0.0354619386608, -0.013504018245 +155.0, 0.0344433679824, -0.044641636507, -0.00728376620969, 0.0149866136075, -0.0442234984244, -0.037325950532, -0.00290282980707, -0.0394933828741, -0.0213936809404, 0.0072065163292 +225.0, 0.0598711371395, 0.0506801187398, 0.0164280994157, 0.0287580963824, -0.041471592708, -0.0291840905255, -0.0286742944357, -0.00259226199818, -0.00239668149341, -0.0217882320746 +59.0, -0.0527375548421, -0.044641636507, -0.00943939035745, -0.00567061055493, 0.0397096259258, 0.0447189464568, 0.0265502726256, -0.00259226199818, -0.0181182673079, -0.013504018245 +104.0, -0.00914709342983, -0.044641636507, -0.0159062628007, 0.0700725447073, 0.0121905687618, 0.022172257208, 0.0155053592134, -0.00259226199818, -0.0332487872476, 0.0486275854776 +182.0, -0.049105016391, -0.044641636507, 0.0250505960067, 0.00810087222001, 0.020446285911, 0.0177881787429, 0.0523217372542, -0.0394933828741, -0.041180385188, 0.0072065163292 +128.0, -0.041839939489, -0.044641636507, -0.049318437091, -0.0366564467986, -0.00707277125302, -0.0226079728279, 0.085456477491, -0.0394933828741, -0.0664881482228, 0.0072065163292 +52.0, -0.041839939489, -0.044641636507, 0.041217777115, -0.0263278347174, -0.0318399227006, -0.0304366843726, -0.0360375700439, 0.0029429061332, 0.0336568129024, -0.0176461251598 +37.0, -0.0273097856849, -0.044641636507, -0.0633299940515, -0.0504279295735, -0.0896299427451, -0.104339721355, 0.0523217372542, -0.07639450375, -0.056157573095, -0.0673514081378 +170.0, 0.0417084448844, -0.044641636507, -0.0644078061254, 0.0356438377699, 0.0121905687618, -0.0579937490101, 0.181179060397, -0.07639450375, -0.000609254186102, -0.0507829804785 +170.0, 0.0635036755906, 0.0506801187398, -0.0256065714657, 0.0115437429137, 0.0644767773734, 0.0484767279983, 0.0302319104297, -0.00259226199818, 0.0383932482117, 0.0196328370737 +61.0, -0.0709002470972, -0.044641636507, -0.00405032998805, -0.0400993174923, -0.0662387441557, -0.0786615474882, 0.0523217372542, -0.07639450375, -0.0514005352606, -0.0342145528191 +144.0, -0.041839939489, 0.0506801187398, 0.004572166603, -0.0538708002672, -0.0442234984244, -0.0273051997547, -0.0802172236929, 0.0712099797536, 0.0366457977934, 0.0196328370737 +52.0, -0.0273097856849, 0.0506801187398, -0.00728376620969, -0.0400993174923, -0.0112006298276, -0.0138398158978, 0.0596850128624, -0.0394933828741, -0.0823814832581, -0.0259303389895 +128.0, -0.034574862587, -0.044641636507, -0.0374625042784, -0.0607565416547, 0.020446285911, 0.0434663526097, -0.0139477432193, -0.00259226199818, -0.0307512098646, -0.0714935150527 +71.0, 0.0671362140416, 0.0506801187398, -0.0256065714657, -0.0400993174923, -0.0634868384393, -0.0598726397809, -0.00290282980707, -0.0394933828741, -0.0191970476139, 0.011348623244 +163.0, -0.04547247794, 0.0506801187398, -0.0245287593918, 0.0597439326261, 0.00531080447079, 0.0149698425868, -0.0544457590643, 0.0712099797536, 0.0423448954496, 0.0154907301589 +150.0, -0.00914709342983, 0.0506801187398, -0.0180618869485, -0.0332135761048, -0.020832299835, 0.0121515064307, -0.0728539480847, 0.0712099797536, 0.000271485727907, 0.0196328370737 +97.0, 0.0417084448844, 0.0506801187398, -0.0148284507269, -0.0171468461892, -0.00569681839481, 0.00839372488926, -0.0139477432193, -0.00185423958066, -0.0119006848015, 0.00306440941437 +160.0, 0.0380759064334, 0.0506801187398, -0.0299178197612, -0.0400993174923, -0.0332158755588, -0.0241737151369, -0.0102661054152, -0.00259226199818, -0.0129079422542, 0.00306440941437 +178.0, 0.0162806757273, -0.044641636507, -0.0460850008694, -0.00567061055493, -0.0758704141631, -0.0614383820898, -0.0139477432193, -0.0394933828741, -0.0514005352606, 0.0196328370737 +48.0, -0.00188201652779, -0.044641636507, -0.0697968664948, -0.0125563519424, -0.00019300696201, -0.00914258897096, 0.0707299262747, -0.0394933828741, -0.0629129499163, 0.0403433716479 +270.0, -0.00188201652779, -0.044641636507, 0.0336730925978, 0.125158475807, 0.0245741444856, 0.0262431872113, -0.0102661054152, -0.00259226199818, 0.0267142576335, 0.0610539062221 +202.0, 0.0635036755906, 0.0506801187398, -0.00405032998805, -0.0125563519424, 0.103003457403, 0.0487898764601, 0.0560033750583, -0.00259226199818, 0.0844952822124, -0.0176461251598 +111.0, 0.0126481372763, 0.0506801187398, -0.0202175110963, -0.0022277398612, 0.0383336730676, 0.0531739549252, -0.00658446761116, 0.0343088588777, -0.00514530798026, -0.00936191133014 +85.0, 0.0126481372763, 0.0506801187398, 0.00241654245524, 0.0563010619323, 0.027326050202, 0.0171618818194, 0.041276823842, -0.0394933828741, 0.00371173823344, 0.0734802269666 +42.0, -0.00914709342983, 0.0506801187398, -0.0309956318351, -0.0263278347174, -0.0112006298276, -0.00100072896443, -0.0213110188275, -0.00259226199818, 0.00620931561651, 0.0279170509034 +170.0, -0.0309423241359, 0.0506801187398, 0.0282840322284, 0.0700725447073, -0.126780669917, -0.106844909049, -0.0544457590643, -0.0479806406756, -0.0307512098646, 0.0154907301589 +200.0, -0.0963280162543, -0.044641636507, -0.0363846922045, -0.0745280244297, -0.0387196869916, -0.0276183482165, 0.0155053592134, -0.0394933828741, -0.0740888714915, -0.00107769750047 +252.0, 0.00538306037425, -0.044641636507, -0.0579409336821, -0.0228849640236, -0.0676146970139, -0.0683276482492, -0.0544457590643, -0.00259226199818, 0.0428956878925, -0.0839198357972 +113.0, -0.103593093156, -0.044641636507, -0.0374625042784, -0.0263278347174, 0.00255889875439, 0.0199802179755, 0.0118237214093, -0.00259226199818, -0.0683297436244, -0.0259303389895 +143.0, 0.0707687524926, -0.044641636507, 0.0121168511202, 0.0425295791574, 0.0713565416644, 0.0534871033869, 0.0523217372542, -0.00259226199818, 0.0253931349154, -0.0052198044153 +51.0, 0.0126481372763, 0.0506801187398, -0.022373135244, -0.0297707054111, 0.0108146159036, 0.0284352264438, -0.0213110188275, 0.0343088588777, -0.00608024819631, -0.00107769750047 +52.0, -0.0164121703319, -0.044641636507, -0.0353068801306, -0.0263278347174, 0.0328298616348, 0.0171618818194, 0.100183028707, -0.0394933828741, -0.0702093127287, -0.0797777288823 +210.0, -0.038207401038, -0.044641636507, 0.00996122697241, -0.0469850588798, -0.0593589798647, -0.0529833736215, -0.0102661054152, -0.0394933828741, -0.0159982677581, -0.0424987666488 +65.0, 0.00175052192323, -0.044641636507, -0.0396181284261, -0.100923366426, -0.0290880169842, -0.0301235359109, 0.0449584616461, -0.0501947079281, -0.0683297436244, -0.12948301186 +141.0, 0.0453409833355, -0.044641636507, 0.0713965151836, 0.00121513083254, -0.00982467696942, -0.00100072896443, 0.0155053592134, -0.0394933828741, -0.041180385188, -0.0714935150527 +55.0, -0.0709002470972, 0.0506801187398, -0.0751859268642, -0.0400993174923, -0.0511032627155, -0.015092409745, -0.0397192078479, -0.00259226199818, -0.0964332228918, -0.0342145528191 +134.0, 0.0453409833355, -0.044641636507, -0.00620595413581, 0.0115437429137, 0.0631008245152, 0.016222436434, 0.0965013909033, -0.0394933828741, 0.0428956878925, -0.038356659734 +42.0, -0.0527375548421, 0.0506801187398, -0.0406959405, -0.0676422830422, -0.0318399227006, -0.0370128020702, 0.0375951860379, -0.0394933828741, -0.0345237153303, 0.0693381200517 +111.0, -0.04547247794, -0.044641636507, -0.0482406250172, -0.0194420933299, -0.00019300696201, -0.0160318551303, 0.0670482884706, -0.0394933828741, -0.0247911874325, 0.0196328370737 +98.0, 0.0126481372763, -0.044641636507, -0.0256065714657, -0.0400993174923, -0.0304639698424, -0.0451546620768, 0.0780932018828, -0.07639450375, -0.072128454602, 0.011348623244 +164.0, 0.0453409833355, -0.044641636507, 0.0519958978538, -0.0538708002672, 0.0631008245152, 0.0647604480114, -0.0102661054152, 0.0343088588777, 0.037232011209, 0.0196328370737 +48.0, -0.0200447087829, -0.044641636507, 0.004572166603, 0.0976155102572, 0.00531080447079, -0.0207290820572, 0.0633666506665, -0.0394933828741, 0.0125531528134, 0.011348623244 +96.0, -0.049105016391, -0.044641636507, -0.0644078061254, -0.10207098998, -0.00294491267841, -0.0154055582067, 0.0633666506665, -0.047242618258, -0.0332487872476, -0.0549250873933 +90.0, -0.0781653239992, -0.044641636507, -0.0169840748746, -0.0125563519424, -0.00019300696201, -0.013526667436, 0.0707299262747, -0.0394933828741, -0.041180385188, -0.0922040496268 +162.0, -0.0709002470972, -0.044641636507, -0.0579409336821, -0.0814137658171, -0.0455994512826, -0.0288709420637, -0.043400845652, -0.00259226199818, 0.00114379737951, -0.0052198044153 +150.0, 0.0562385986885, 0.0506801187398, 0.00996122697241, 0.0494153205448, -0.00432086553661, -0.0122740735889, -0.043400845652, 0.0343088588777, 0.0607877541507, 0.0320591578182 +279.0, -0.0273097856849, -0.044641636507, 0.0886415083657, -0.0251802111642, 0.0218222387692, 0.0425269072243, -0.0323559322398, 0.0343088588777, 0.00286377051894, 0.0776223338814 +92.0, 0.00175052192323, 0.0506801187398, -0.00512814206193, -0.0125563519424, -0.0153284884022, -0.0138398158978, 0.00814208360519, -0.0394933828741, -0.00608024819631, -0.0673514081378 +83.0, -0.00188201652779, -0.044641636507, -0.0644078061254, 0.0115437429137, 0.027326050202, 0.0375165318357, -0.0139477432193, 0.0343088588777, 0.0117839003836, -0.0549250873933 +128.0, 0.0162806757273, -0.044641636507, 0.0175059114896, -0.0228849640236, 0.0603489187988, 0.0444057979951, 0.0302319104297, -0.00259226199818, 0.037232011209, -0.00107769750047 +102.0, 0.0162806757273, 0.0506801187398, -0.0450071887955, 0.0631868033198, 0.0108146159036, -0.00037443204085, 0.0633666506665, -0.0394933828741, -0.0307512098646, 0.036201264733 +302.0, -0.0926954778033, -0.044641636507, 0.0282840322284, -0.0159992226361, 0.0369577202094, 0.0249905933641, 0.0560033750583, -0.0394933828741, -0.00514530798026, -0.00107769750047 +198.0, 0.0598711371395, 0.0506801187398, 0.041217777115, 0.0115437429137, 0.041085578784, 0.0707102687854, -0.0360375700439, 0.0343088588777, -0.0109044358474, -0.0300724459043 +95.0, -0.0273097856849, -0.044641636507, 0.0649296427403, -0.0022277398612, -0.0249601584096, -0.0172844489775, 0.0228686348215, -0.0394933828741, -0.0611765950943, -0.063209301223 +53.0, 0.0235457526293, 0.0506801187398, -0.0320734439089, -0.0400993174923, -0.0318399227006, -0.0216685274425, -0.0139477432193, -0.00259226199818, -0.0109044358474, 0.0196328370737 +134.0, -0.0963280162543, -0.044641636507, -0.0762637389381, -0.043542188186, -0.0455994512826, -0.0348207628377, 0.00814208360519, -0.0394933828741, -0.0594726974107, -0.0839198357972 +144.0, 0.0271782910804, -0.044641636507, 0.049840273706, -0.0550184238203, -0.00294491267841, 0.0406480164536, -0.0581273968684, 0.0527594193157, -0.0529587932392, -0.0052198044153 +232.0, 0.0199132141783, 0.0506801187398, 0.0455290254105, 0.0299057198322, -0.0621108855811, -0.0558017097776, -0.0728539480847, 0.0269286347025, 0.0456008084141, 0.0403433716479 +81.0, 0.0380759064334, 0.0506801187398, -0.00943939035745, 0.00236275438564, 0.00118294589619, 0.0375165318357, -0.0544457590643, 0.0501763408544, -0.0259524244352, 0.106617082285 +104.0, 0.0417084448844, 0.0506801187398, -0.0320734439089, -0.0228849640236, -0.0497273098573, -0.0401442866881, 0.0302319104297, -0.0394933828741, -0.12609738556, 0.0154907301589 +59.0, 0.0199132141783, -0.044641636507, 0.004572166603, -0.0263278347174, 0.0231981916274, 0.01027261566, 0.0670482884706, -0.0394933828741, -0.0236445575721, -0.0466408735636 +246.0, -0.0854304009012, -0.044641636507, 0.0207393477112, -0.0263278347174, 0.00531080447079, 0.0196670695137, -0.00290282980707, -0.00259226199818, -0.0236445575721, 0.00306440941437 +297.0, 0.0199132141783, 0.0506801187398, 0.0142724752679, 0.0631868033198, 0.0149424744782, 0.0202933664373, -0.0470824834561, 0.0343088588777, 0.0466607723568, 0.0900486546259 +258.0, 0.0235457526293, -0.044641636507, 0.110197749843, 0.0631868033198, 0.01356652162, -0.032941872067, -0.0249926566316, 0.0206554441536, 0.099240225734, 0.0237749439885 +229.0, -0.0309423241359, 0.0506801187398, 0.00133873038136, -0.00567061055493, 0.0644767773734, 0.0494161733837, -0.0470824834561, 0.10811110063, 0.0837967663655, 0.00306440941437 +275.0, 0.0489735217865, 0.0506801187398, 0.058462770297, 0.0700725447073, 0.01356652162, 0.020606514899, -0.0213110188275, 0.0343088588777, 0.0220040504562, 0.0279170509034 +281.0, 0.0598711371395, -0.044641636507, -0.0212953231701, 0.0872868981759, 0.0452134373586, 0.0315667110617, -0.0470824834561, 0.0712099797536, 0.0791210813897, 0.135611830689 +179.0, -0.0563700932931, 0.0506801187398, -0.0105172024313, 0.0253152256887, 0.0231981916274, 0.04002171953, -0.0397192078479, 0.0343088588777, 0.0206123307214, 0.0569117993072 +200.0, 0.0162806757273, -0.044641636507, -0.0471628129433, -0.0022277398612, -0.0194563469768, -0.0429626228442, 0.0339135482338, -0.0394933828741, 0.0273677075426, 0.0279170509034 +200.0, -0.049105016391, -0.044641636507, 0.004572166603, 0.0115437429137, -0.0373437341334, -0.0185370428246, -0.0176293810234, -0.00259226199818, -0.0398095943643, -0.0217882320746 +173.0, 0.0635036755906, -0.044641636507, 0.0175059114896, 0.021872354995, 0.0080627101872, 0.0215459602844, -0.0360375700439, 0.0343088588777, 0.0199084208763, 0.011348623244 +180.0, 0.0489735217865, 0.0506801187398, 0.0810968238485, 0.021872354995, 0.0438374845004, 0.0641341510878, -0.0544457590643, 0.0712099797536, 0.0324332257796, 0.0486275854776 +84.0, 0.00538306037425, 0.0506801187398, 0.0347509046717, -0.0010801163081, 0.152537760298, 0.198787989657, -0.0618090346725, 0.18523444326, 0.0155668445407, 0.0734802269666 +121.0, -0.00551455497881, -0.044641636507, 0.0239727839329, 0.00810087222001, -0.034591828417, -0.038891692841, 0.0228686348215, -0.0394933828741, -0.0159982677581, -0.013504018245 +161.0, -0.00551455497881, 0.0506801187398, -0.00836157828357, -0.0022277398612, -0.0332158755588, -0.0636304213223, -0.0360375700439, -0.00259226199818, 0.0805854642387, 0.0072065163292 +99.0, -0.0890629393523, -0.044641636507, -0.0611743699037, -0.0263278347174, -0.0552311212901, -0.0545491159304, 0.041276823842, -0.07639450375, -0.0939356455087, -0.0549250873933 +109.0, 0.0344433679824, 0.0506801187398, -0.00189470584028, -0.0125563519424, 0.0383336730676, 0.0137172487397, 0.0780932018828, -0.0394933828741, 0.00455189046613, -0.0963461565417 +115.0, -0.0527375548421, -0.044641636507, -0.0622521819776, -0.0263278347174, -0.00569681839481, -0.00507165896769, 0.0302319104297, -0.0394933828741, -0.0307512098646, -0.0714935150527 +268.0, 0.00901559882527, -0.044641636507, 0.0164280994157, 0.00465800152627, 0.0094386630454, 0.0105857641218, -0.0286742944357, 0.0343088588777, 0.0389683660309, 0.11904340303 +274.0, -0.0636351701951, 0.0506801187398, 0.0961861928829, 0.104501251645, -0.00294491267841, -0.0047585105059, -0.00658446761116, -0.00259226199818, 0.0226920225667, 0.0734802269666 +158.0, -0.0963280162543, -0.044641636507, -0.0697968664948, -0.0676422830422, -0.0194563469768, -0.0107083312799, 0.0155053592134, -0.0394933828741, -0.0468794828442, -0.0797777288823 +107.0, 0.0162806757273, 0.0506801187398, -0.0212953231701, -0.00911348124867, 0.034205814493, 0.0478504310747, 0.000778807997018, -0.00259226199818, -0.0129079422542, 0.0237749439885 +83.0, -0.041839939489, 0.0506801187398, -0.0536296853866, -0.0400993174923, -0.0841261313123, -0.0717722813289, -0.00290282980707, -0.0394933828741, -0.072128454602, -0.0300724459043 +103.0, -0.0745327855482, -0.044641636507, 0.0433734012627, -0.0332135761048, 0.0121905687618, 0.000251864882729, 0.0633666506665, -0.0394933828741, -0.0271286455543, -0.0466408735636 +272.0, -0.00551455497881, -0.044641636507, 0.0563071461493, -0.0366564467986, -0.048351356999, -0.0429626228442, -0.0728539480847, 0.0379989709653, 0.050781513363, 0.0569117993072 +85.0, -0.0926954778033, -0.044641636507, -0.0816527993075, -0.057313670961, -0.0607349327229, -0.0680144997874, 0.0486400994501, -0.07639450375, -0.0664881482228, -0.0217882320746 +280.0, 0.00538306037425, -0.044641636507, 0.049840273706, 0.0976155102572, -0.0153284884022, -0.0163450035921, -0.00658446761116, -0.00259226199818, 0.0170371324148, -0.013504018245 +336.0, 0.0344433679824, 0.0506801187398, 0.111275561917, 0.0769582860947, -0.0318399227006, -0.0338813174523, -0.0213110188275, -0.00259226199818, 0.0280165065233, 0.0734802269666 +281.0, 0.0235457526293, -0.044641636507, 0.0616962065187, 0.0528581912386, -0.034591828417, -0.0489124436182, -0.0286742944357, -0.00259226199818, 0.0547240033482, -0.0052198044153 +118.0, 0.0417084448844, 0.0506801187398, 0.0142724752679, 0.0425295791574, -0.0304639698424, -0.00131387742622, -0.043400845652, -0.00259226199818, -0.0332487872476, 0.0154907301589 +317.0, -0.0273097856849, -0.044641636507, 0.0476846495582, -0.0469850588798, 0.034205814493, 0.0572448849284, -0.0802172236929, 0.130251773155, 0.0450661683363, 0.131469723774 +235.0, 0.0417084448844, 0.0506801187398, 0.0121168511202, 0.0390867084636, 0.054845107366, 0.0444057979951, 0.00446044580111, -0.00259226199818, 0.0456008084141, -0.00107769750047 +60.0, -0.0309423241359, -0.044641636507, 0.00564997867688, -0.00911348124867, 0.0190703330528, 0.00682798258031, 0.0744115640788, -0.0394933828741, -0.041180385188, -0.0424987666488 +174.0, 0.0308108295314, 0.0506801187398, 0.0466068374844, -0.0159992226361, 0.020446285911, 0.0506687672308, -0.0581273968684, 0.0712099797536, 0.00620931561651, 0.0072065163292 +259.0, -0.041839939489, -0.044641636507, 0.128520555099, 0.0631868033198, -0.0332158755588, -0.0326287236052, 0.0118237214093, -0.0394933828741, -0.0159982677581, -0.0507829804785 +178.0, -0.0309423241359, 0.0506801187398, 0.0595405823709, 0.00121513083254, 0.0121905687618, 0.0315667110617, -0.043400845652, 0.0343088588777, 0.0148227108413, 0.0072065163292 +128.0, -0.0563700932931, -0.044641636507, 0.0929527566612, -0.0194420933299, 0.0149424744782, 0.0234248510552, -0.0286742944357, 0.0254525898675, 0.0260560896337, 0.0403433716479 +96.0, -0.0600026317441, 0.0506801187398, 0.0153502873418, -0.0194420933299, 0.0369577202094, 0.0481635795365, 0.0191869970175, -0.00259226199818, -0.0307512098646, -0.00107769750047 +126.0, -0.049105016391, 0.0506801187398, -0.00512814206193, -0.0469850588798, -0.020832299835, -0.0204159335954, -0.0691723102806, 0.0712099797536, 0.0612379075197, -0.038356659734 +288.0, 0.0235457526293, -0.044641636507, 0.0703187031097, 0.0253152256887, -0.034591828417, -0.0144661128214, -0.0323559322398, -0.00259226199818, -0.0191970476139, -0.00936191133014 +88.0, 0.00175052192323, -0.044641636507, -0.00405032998805, -0.00567061055493, -0.00844872411122, -0.0238605666751, 0.0523217372542, -0.0394933828741, -0.0089440189578, -0.013504018245 +292.0, -0.034574862587, 0.0506801187398, -0.000816893766404, 0.0700725447073, 0.0397096259258, 0.0669524872439, -0.0654906724765, 0.10811110063, 0.0267142576335, 0.0734802269666 +71.0, 0.0417084448844, 0.0506801187398, -0.0439293767216, 0.0631868033198, -0.00432086553661, 0.016222436434, -0.0139477432193, -0.00259226199818, -0.0345237153303, 0.011348623244 +197.0, 0.0671362140416, 0.0506801187398, 0.0207393477112, -0.00567061055493, 0.020446285911, 0.0262431872113, -0.00290282980707, -0.00259226199818, 0.00864028293306, 0.00306440941437 +186.0, -0.0273097856849, 0.0506801187398, 0.0606183944448, 0.0494153205448, 0.0851160702465, 0.0863676918749, -0.00290282980707, 0.0343088588777, 0.0378144788263, 0.0486275854776 +25.0, -0.0164121703319, -0.044641636507, -0.0105172024313, 0.00121513083254, -0.0373437341334, -0.0357602082231, 0.0118237214093, -0.0394933828741, -0.0213936809404, -0.0342145528191 +84.0, -0.00188201652779, 0.0506801187398, -0.0331512559828, -0.0182944697768, 0.0314539087766, 0.0428400556861, -0.0139477432193, 0.0199174217361, 0.010225642405, 0.0279170509034 +96.0, -0.0127796318808, -0.044641636507, -0.0654856181993, -0.0699375301828, 0.00118294589619, 0.0168487333576, -0.00290282980707, -0.00702039650329, -0.0307512098646, -0.0507829804785 +195.0, -0.00551455497881, -0.044641636507, 0.0433734012627, 0.0872868981759, 0.01356652162, 0.0071411310421, -0.0139477432193, -0.00259226199818, 0.0423448954496, -0.0176461251598 +53.0, -0.00914709342983, -0.044641636507, -0.0622521819776, -0.0745280244297, -0.0235842055514, -0.0132135189742, 0.00446044580111, -0.0394933828741, -0.0358167281015, -0.0466408735636 +217.0, -0.04547247794, 0.0506801187398, 0.0638518306665, 0.0700725447073, 0.133274420283, 0.131461070373, -0.0397192078479, 0.10811110063, 0.0757375884575, 0.0859065477111 +172.0, -0.0527375548421, -0.044641636507, 0.0304396563761, -0.0745280244297, -0.0235842055514, -0.0113346282035, -0.00290282980707, -0.00259226199818, -0.0307512098646, -0.00107769750047 +131.0, 0.0162806757273, 0.0506801187398, 0.0724743272575, 0.0769582860947, -0.00844872411122, 0.00557538873315, -0.00658446761116, -0.00259226199818, -0.0236445575721, 0.0610539062221 +214.0, 0.0453409833355, -0.044641636507, -0.0191396990224, 0.021872354995, 0.027326050202, -0.013526667436, 0.100183028707, -0.0394933828741, 0.0177634778671, -0.013504018245 +59.0, -0.041839939489, -0.044641636507, -0.0665634302731, -0.0469850588798, -0.0373437341334, -0.043275771306, 0.0486400994501, -0.0394933828741, -0.056157573095, -0.013504018245 +70.0, -0.0563700932931, 0.0506801187398, -0.0600965578299, -0.0366564467986, -0.0882539898869, -0.0708328359435, -0.0139477432193, -0.0394933828741, -0.0781409106691, -0.104630370371 +220.0, 0.0707687524926, -0.044641636507, 0.0692408910359, 0.0379390850138, 0.0218222387692, 0.00150445872989, -0.0360375700439, 0.0391060045916, 0.0776327891956, 0.106617082285 +268.0, 0.00175052192323, 0.0506801187398, 0.0595405823709, -0.0022277398612, 0.061724871657, 0.0631947057024, -0.0581273968684, 0.10811110063, 0.0689822116363, 0.127327616859 +152.0, -0.00188201652779, -0.044641636507, -0.0266843835395, 0.0494153205448, 0.0589729659406, -0.0160318551303, -0.0470824834561, 0.0712099797536, 0.133598980013, 0.0196328370737 +47.0, 0.0235457526293, 0.0506801187398, -0.0202175110963, -0.0366564467986, -0.013952535544, -0.015092409745, 0.0596850128624, -0.0394933828741, -0.0964332228918, -0.0176461251598 +74.0, -0.0200447087829, -0.044641636507, -0.0460850008694, -0.0986281192858, -0.0758704141631, -0.0598726397809, -0.0176293810234, -0.0394933828741, -0.0514005352606, -0.0466408735636 +295.0, 0.0417084448844, 0.0506801187398, 0.0713965151836, 0.00810087222001, 0.0383336730676, 0.0159092879722, -0.0176293810234, 0.0343088588777, 0.0734100780491, 0.0859065477111 +101.0, -0.0636351701951, 0.0506801187398, -0.0794971751597, -0.00567061055493, -0.0717425555885, -0.0664487574784, -0.0102661054152, -0.0394933828741, -0.0181182673079, -0.0549250873933 +151.0, 0.0162806757273, 0.0506801187398, 0.00996122697241, -0.043542188186, -0.0965097070361, -0.0946321190395, -0.0397192078479, -0.0394933828741, 0.0170371324148, 0.0072065163292 +127.0, 0.0671362140416, -0.044641636507, -0.0385403163522, -0.0263278347174, -0.0318399227006, -0.0263657543694, 0.00814208360519, -0.0394933828741, -0.0271286455543, 0.00306440941437 +237.0, 0.0453409833355, 0.0506801187398, 0.0196615356373, 0.0390867084636, 0.020446285911, 0.0259300387495, 0.00814208360519, -0.00259226199818, -0.00330371257868, 0.0196328370737 +225.0, 0.0489735217865, -0.044641636507, 0.0272062201545, -0.0251802111642, 0.0231981916274, 0.0184144756665, -0.0618090346725, 0.0800662487639, 0.0722236508199, 0.0320591578182 +81.0, 0.0417084448844, -0.044641636507, -0.00836157828357, -0.0263278347174, 0.0245741444856, 0.016222436434, 0.0707299262747, -0.0394933828741, -0.0483617248029, -0.0300724459043 +151.0, -0.0236772472339, -0.044641636507, -0.0159062628007, -0.0125563519424, 0.020446285911, 0.0412743133772, -0.043400845652, 0.0343088588777, 0.0140724525158, -0.00936191133014 +107.0, -0.038207401038, 0.0506801187398, 0.004572166603, 0.0356438377699, -0.0112006298276, 0.00588853719494, -0.0470824834561, 0.0343088588777, 0.0163049527999, -0.00107769750047 +64.0, 0.0489735217865, -0.044641636507, -0.0428515646478, -0.0538708002672, 0.0452134373586, 0.0500424703073, 0.0339135482338, -0.00259226199818, -0.0259524244352, -0.063209301223 +138.0, 0.0453409833355, 0.0506801187398, 0.00564997867688, 0.0563010619323, 0.0644767773734, 0.089186028031, -0.0397192078479, 0.0712099797536, 0.0155668445407, -0.00936191133014 +185.0, 0.0453409833355, 0.0506801187398, -0.0353068801306, 0.0631868033198, -0.00432086553661, -0.00162702588801, -0.0102661054152, -0.00259226199818, 0.0155668445407, 0.0569117993072 +265.0, 0.0162806757273, -0.044641636507, 0.0239727839329, -0.0228849640236, -0.0249601584096, -0.0260526059076, -0.0323559322398, -0.00259226199818, 0.037232011209, 0.0320591578182 +101.0, -0.0745327855482, 0.0506801187398, -0.0180618869485, 0.00810087222001, -0.0194563469768, -0.0248000120604, -0.0654906724765, 0.0343088588777, 0.0673172179147, -0.0176461251598 +137.0, -0.0817978624502, 0.0506801187398, 0.0422955891888, -0.0194420933299, 0.0397096259258, 0.0575580333902, -0.0691723102806, 0.10811110063, 0.047186167886, -0.038356659734 +143.0, -0.0672677086461, -0.044641636507, -0.0547074974604, -0.0263278347174, -0.0758704141631, -0.0821061805679, 0.0486400994501, -0.07639450375, -0.0868289932163, -0.104630370371 +141.0, 0.00538306037425, -0.044641636507, -0.00297251791417, 0.0494153205448, 0.0741084473809, 0.0707102687854, 0.0449584616461, -0.00259226199818, -0.00149858682029, -0.00936191133014 +79.0, -0.00188201652779, -0.044641636507, -0.0665634302731, 0.00121513083254, -0.00294491267841, 0.00307020103883, 0.0118237214093, -0.00259226199818, -0.0202887477516, -0.0259303389895 +292.0, 0.00901559882527, -0.044641636507, -0.0126728265791, 0.0287580963824, -0.0180803941186, -0.00507165896769, -0.0470824834561, 0.0343088588777, 0.0233748412798, -0.0052198044153 +178.0, -0.00551455497881, 0.0506801187398, -0.0417737525739, -0.043542188186, -0.0799982727377, -0.0761563597939, -0.0323559322398, -0.0394933828741, 0.010225642405, -0.00936191133014 +91.0, 0.0562385986885, 0.0506801187398, -0.0309956318351, 0.00810087222001, 0.0190703330528, 0.0212328118226, 0.0339135482338, -0.0394933828741, -0.0295276227418, -0.0590671943082 +116.0, 0.00901559882527, 0.0506801187398, -0.00512814206193, -0.0641994123485, 0.0699805888062, 0.0838625041805, -0.0397192078479, 0.0712099797536, 0.039539878072, 0.0196328370737 +86.0, -0.0672677086461, -0.044641636507, -0.059018745756, 0.0322009670762, -0.0511032627155, -0.0495387405418, -0.0102661054152, -0.0394933828741, 0.00200784054982, 0.0237749439885 +122.0, 0.0271782910804, 0.0506801187398, 0.0250505960067, 0.0149866136075, 0.0259500973438, 0.0484767279983, -0.0397192078479, 0.0343088588777, 0.00783714230182, 0.0237749439885 +72.0, -0.0236772472339, -0.044641636507, -0.0460850008694, -0.0332135761048, 0.0328298616348, 0.0362639379885, 0.0375951860379, -0.00259226199818, -0.0332487872476, 0.011348623244 +129.0, 0.0489735217865, 0.0506801187398, 0.00349435452912, 0.0700725447073, -0.00844872411122, 0.0134041002779, -0.0544457590643, 0.0343088588777, 0.0133159679089, 0.036201264733 +142.0, -0.0527375548421, -0.044641636507, 0.0541515220015, -0.0263278347174, -0.0552311212901, -0.0338813174523, -0.0139477432193, -0.0394933828741, -0.0740888714915, -0.0590671943082 +90.0, 0.0417084448844, -0.044641636507, -0.0450071887955, 0.0344962143201, 0.0438374845004, -0.0157187066685, 0.0375951860379, -0.0144006206785, 0.0898986932777, 0.0072065163292 +158.0, 0.0562385986885, -0.044641636507, -0.0579409336821, -0.00796585769557, 0.0520932016496, 0.0491030249219, 0.0560033750583, -0.0214118336449, -0.028320242548, 0.0444854785627 +39.0, -0.034574862587, 0.0506801187398, -0.0557853095343, -0.0159992226361, -0.00982467696942, -0.0078899951238, 0.0375951860379, -0.0394933828741, -0.0529587932392, 0.0279170509034 +196.0, 0.0816663678457, 0.0506801187398, 0.00133873038136, 0.0356438377699, 0.126394655992, 0.0910649188017, 0.0191869970175, 0.0343088588777, 0.0844952822124, -0.0300724459043 +222.0, -0.00188201652779, 0.0506801187398, 0.0304396563761, 0.0528581912386, 0.0397096259258, 0.0566185880048, -0.0397192078479, 0.0712099797536, 0.0253931349154, 0.0279170509034 +277.0, 0.110726675454, 0.0506801187398, 0.00672779075076, 0.0287580963824, -0.027712064126, -0.00726369820022, -0.0470824834561, 0.0343088588777, 0.00200784054982, 0.0776223338814 +99.0, -0.0309423241359, -0.044641636507, 0.0466068374844, 0.0149866136075, -0.0167044412604, -0.0470335528475, 0.000778807997018, -0.00259226199818, 0.0634559213721, -0.0259303389895 +196.0, 0.00175052192323, 0.0506801187398, 0.0261284080806, -0.00911348124867, 0.0245741444856, 0.0384559772211, -0.0213110188275, 0.0343088588777, 0.00943640914608, 0.00306440941437 +202.0, 0.00901559882527, -0.044641636507, 0.0455290254105, 0.0287580963824, 0.0121905687618, -0.0138398158978, 0.0265502726256, -0.0394933828741, 0.0461323310394, 0.036201264733 +155.0, 0.0308108295314, -0.044641636507, 0.0401399650411, 0.0769582860947, 0.0176943801946, 0.0378296802975, -0.0286742944357, 0.0343088588777, -0.00149858682029, 0.11904340303 +77.0, 0.0380759064334, 0.0506801187398, -0.0180618869485, 0.0666296740135, -0.0511032627155, -0.0166581520539, -0.0765355858888, 0.0343088588777, -0.0119006848015, -0.013504018245 +191.0, 0.00901559882527, -0.044641636507, 0.0142724752679, 0.0149866136075, 0.054845107366, 0.0472241341512, 0.0707299262747, -0.0394933828741, -0.0332487872476, -0.0590671943082 +70.0, 0.0925639831987, -0.044641636507, 0.0369065288194, 0.021872354995, -0.0249601584096, -0.0166581520539, 0.000778807997018, -0.0394933828741, -0.0225121719297, -0.0217882320746 +73.0, 0.0671362140416, -0.044641636507, 0.00349435452912, 0.0356438377699, 0.0493412959332, 0.0312535625999, 0.0707299262747, -0.0394933828741, -0.000609254186102, 0.0196328370737 +49.0, 0.00175052192323, -0.044641636507, -0.0708746785687, -0.0228849640236, -0.00156895982021, -0.00100072896443, 0.0265502726256, -0.0394933828741, -0.0225121719297, 0.0072065163292 +65.0, 0.0308108295314, -0.044641636507, -0.0331512559828, -0.0228849640236, -0.0469754041408, -0.0811667351825, 0.103864666511, -0.07639450375, -0.0398095943643, -0.0549250873933 +263.0, 0.0271782910804, 0.0506801187398, 0.0940305687351, 0.0976155102572, -0.034591828417, -0.0320024266816, -0.043400845652, -0.00259226199818, 0.0366457977934, 0.106617082285 +248.0, 0.0126481372763, 0.0506801187398, 0.0358287167455, 0.0494153205448, 0.0534691545078, 0.0741549018651, -0.0691723102806, 0.145012221505, 0.0456008084141, 0.0486275854776 +296.0, 0.0744012909436, -0.044641636507, 0.03151746845, 0.101058380951, 0.0465893902168, 0.0368902349121, 0.0155053592134, -0.00259226199818, 0.0336568129024, 0.0444854785627 +214.0, -0.041839939489, -0.044641636507, -0.0654856181993, -0.0400993174923, -0.00569681839481, 0.0143435456633, -0.043400845652, 0.0343088588777, 0.00702686254915, -0.013504018245 +185.0, -0.0890629393523, -0.044641636507, -0.0417737525739, -0.0194420933299, -0.0662387441557, -0.0742774690232, 0.00814208360519, -0.0394933828741, 0.00114379737951, -0.0300724459043 +78.0, 0.0235457526293, 0.0506801187398, -0.0396181284261, -0.00567061055493, -0.048351356999, -0.0332550205288, 0.0118237214093, -0.0394933828741, -0.101643547946, -0.0673514081378 +93.0, -0.04547247794, -0.044641636507, -0.0385403163522, -0.0263278347174, -0.0153284884022, 0.000878161806308, -0.0323559322398, -0.00259226199818, 0.00114379737951, -0.038356659734 +252.0, -0.0236772472339, 0.0506801187398, -0.0256065714657, 0.0425295791574, -0.0538551684319, -0.0476598497711, -0.0213110188275, -0.0394933828741, 0.00114379737951, 0.0196328370737 +150.0, -0.0999605547053, -0.044641636507, -0.0234509473179, -0.0641994123485, -0.0579830270065, -0.0601857882427, 0.0118237214093, -0.0394933828741, -0.0181182673079, -0.0507829804785 +77.0, -0.0273097856849, -0.044641636507, -0.0665634302731, -0.112399602061, -0.0497273098573, -0.0413968805353, 0.000778807997018, -0.0394933828741, -0.0358167281015, -0.00936191133014 +208.0, 0.0308108295314, 0.0506801187398, 0.0325952805239, 0.0494153205448, -0.0400956398498, -0.0435889197678, -0.0691723102806, 0.0343088588777, 0.0630166151147, 0.00306440941437 +77.0, -0.103593093156, 0.0506801187398, -0.0460850008694, -0.0263278347174, -0.0249601584096, -0.0248000120604, 0.0302319104297, -0.0394933828741, -0.0398095943643, -0.0549250873933 +108.0, 0.0671362140416, 0.0506801187398, -0.0299178197612, 0.0574486853821, -0.00019300696201, -0.0157187066685, 0.0744115640788, -0.0505637191369, -0.0384591123014, 0.0072065163292 +160.0, -0.0527375548421, -0.044641636507, -0.0126728265791, -0.0607565416547, -0.00019300696201, 0.00808057642747, 0.0118237214093, -0.00259226199818, -0.0271286455543, -0.0507829804785 +53.0, -0.0273097856849, 0.0506801187398, -0.0159062628007, -0.0297707054111, 0.00393485161259, -0.00068758050264, 0.041276823842, -0.0394933828741, -0.0236445575721, 0.011348623244 +220.0, -0.038207401038, 0.0506801187398, 0.0713965151836, -0.057313670961, 0.153913713157, 0.155886650392, 0.000778807997018, 0.0719480021712, 0.05027649339, 0.0693381200517 +154.0, 0.00901559882527, -0.044641636507, -0.0309956318351, 0.021872354995, 0.0080627101872, 0.00870687335105, 0.00446044580111, -0.00259226199818, 0.00943640914608, 0.011348623244 +259.0, 0.0126481372763, 0.0506801187398, 0.000260918307477, -0.0114087283893, 0.0397096259258, 0.0572448849284, -0.0397192078479, 0.0560805201945, 0.0240525832269, 0.0320591578182 +90.0, 0.0671362140416, -0.044641636507, 0.0369065288194, -0.0504279295735, -0.0235842055514, -0.0345076143759, 0.0486400994501, -0.0394933828741, -0.0259524244352, -0.038356659734 +246.0, 0.0453409833355, -0.044641636507, 0.0390621529672, 0.0459724498511, 0.006686757329, -0.0241737151369, 0.00814208360519, -0.0125555646347, 0.0643282330237, 0.0569117993072 +124.0, 0.0671362140416, 0.0506801187398, -0.0148284507269, 0.0585963091762, -0.0593589798647, -0.0345076143759, -0.0618090346725, 0.0129062087697, -0.00514530798026, 0.0486275854776 +67.0, 0.0271782910804, -0.044641636507, 0.00672779075076, 0.0356438377699, 0.0796122588137, 0.0707102687854, 0.0155053592134, 0.0343088588777, 0.0406722637145, 0.011348623244 +72.0, 0.0562385986885, -0.044641636507, -0.0687190544209, -0.0687899065953, -0.00019300696201, -0.00100072896443, 0.0449584616461, -0.0376483268303, -0.0483617248029, -0.00107769750047 +257.0, 0.0344433679824, 0.0506801187398, -0.00943939035745, 0.0597439326261, -0.0359677812752, -0.00757684666201, -0.0765355858888, 0.0712099797536, 0.0110081010459, -0.0217882320746 +262.0, 0.0235457526293, -0.044641636507, 0.0196615356373, -0.0125563519424, 0.0837401173883, 0.0387691256828, 0.0633666506665, -0.00259226199818, 0.0660482061631, 0.0486275854776 +275.0, 0.0489735217865, 0.0506801187398, 0.0746299514053, 0.0666296740135, -0.00982467696942, -0.00225332281159, -0.043400845652, 0.0343088588777, 0.0336568129024, 0.0196328370737 +177.0, 0.0308108295314, 0.0506801187398, -0.00836157828357, 0.00465800152627, 0.0149424744782, 0.0274957810584, 0.00814208360519, -0.00812743012957, -0.0295276227418, 0.0569117993072 +71.0, -0.103593093156, 0.0506801187398, -0.0234509473179, -0.0228849640236, -0.0868780370287, -0.0677013513256, -0.0176293810234, -0.0394933828741, -0.0781409106691, -0.0714935150527 +47.0, 0.0162806757273, 0.0506801187398, -0.0460850008694, 0.0115437429137, -0.0332158755588, -0.0160318551303, -0.0102661054152, -0.00259226199818, -0.0439854025656, -0.0424987666488 +187.0, -0.0600026317441, 0.0506801187398, 0.0541515220015, -0.0194420933299, -0.0497273098573, -0.0489124436182, 0.0228686348215, -0.0394933828741, -0.0439854025656, -0.0052198044153 +125.0, -0.0273097856849, -0.044641636507, -0.0353068801306, -0.0297707054111, -0.0566070741483, -0.0586200459337, 0.0302319104297, -0.0394933828741, -0.0498684677352, -0.12948301186 +78.0, 0.0417084448844, -0.044641636507, -0.0320734439089, -0.0619041652078, 0.0796122588137, 0.0509819156926, 0.0560033750583, -0.00997248617336, 0.0450661683363, -0.0590671943082 +51.0, -0.0817978624502, -0.044641636507, -0.0816527993075, -0.0400993174923, 0.00255889875439, -0.0185370428246, 0.0707299262747, -0.0394933828741, -0.0109044358474, -0.0922040496268 +258.0, -0.041839939489, -0.044641636507, 0.0476846495582, 0.0597439326261, 0.127770608851, 0.128016437293, -0.0249926566316, 0.10811110063, 0.0638931206368, 0.0403433716479 +215.0, -0.0127796318808, -0.044641636507, 0.0606183944448, 0.0528581912386, 0.047965343075, 0.0293746718292, -0.0176293810234, 0.0343088588777, 0.0702112981933, 0.0072065163292 +303.0, 0.0671362140416, -0.044641636507, 0.0563071461493, 0.073515415401, -0.013952535544, -0.0392048413028, -0.0323559322398, -0.00259226199818, 0.0757375884575, 0.036201264733 +243.0, -0.0527375548421, 0.0506801187398, 0.0983418170306, 0.0872868981759, 0.0603489187988, 0.0487898764601, -0.0581273968684, 0.10811110063, 0.0844952822124, 0.0403433716479 +91.0, 0.00538306037425, -0.044641636507, 0.0595405823709, -0.0561660474079, 0.0245741444856, 0.0528608064634, -0.043400845652, 0.0509143632719, -0.00421985970695, -0.0300724459043 +150.0, 0.0816663678457, -0.044641636507, 0.0336730925978, 0.00810087222001, 0.0520932016496, 0.0566185880048, -0.0176293810234, 0.0343088588777, 0.0348641930962, 0.0693381200517 +310.0, 0.0308108295314, 0.0506801187398, 0.0563071461493, 0.0769582860947, 0.0493412959332, -0.0122740735889, -0.0360375700439, 0.0712099797536, 0.120053382002, 0.0900486546259 +153.0, 0.00175052192323, -0.044641636507, -0.0654856181993, -0.00567061055493, -0.00707277125302, -0.01947648821, 0.041276823842, -0.0394933828741, -0.00330371257868, 0.0072065163292 +346.0, -0.049105016391, -0.044641636507, 0.160854917316, -0.0469850588798, -0.0290880169842, -0.0197896366718, -0.0470824834561, 0.0343088588777, 0.0280165065233, 0.011348623244 +63.0, -0.0273097856849, 0.0506801187398, -0.0557853095343, 0.0253152256887, -0.00707277125302, -0.0235474182133, 0.0523217372542, -0.0394933828741, -0.00514530798026, -0.0507829804785 +89.0, 0.0780338293946, 0.0506801187398, -0.0245287593918, -0.0423945646329, 0.006686757329, 0.0528608064634, -0.0691723102806, 0.0808042711814, -0.0371283460105, 0.0569117993072 +50.0, 0.0126481372763, -0.044641636507, -0.0363846922045, 0.0425295791574, -0.013952535544, 0.0129343775852, -0.0268334755336, 0.00515697338576, -0.0439854025656, 0.0072065163292 +39.0, 0.0417084448844, -0.044641636507, -0.00836157828357, -0.057313670961, 0.0080627101872, -0.031376129758, 0.151725957965, -0.07639450375, -0.0802365402489, -0.0176461251598 +103.0, 0.0489735217865, -0.044641636507, -0.0417737525739, 0.104501251645, 0.0355817673512, -0.0257394574458, 0.177497422593, -0.07639450375, -0.0129079422542, 0.0154907301589 +308.0, -0.0164121703319, 0.0506801187398, 0.127442743025, 0.0976155102572, 0.0163184273364, 0.0174750302812, -0.0213110188275, 0.0343088588777, 0.0348641930962, 0.00306440941437 +116.0, -0.0745327855482, 0.0506801187398, -0.0773415510119, -0.0469850588798, -0.0469754041408, -0.0326287236052, 0.00446044580111, -0.0394933828741, -0.072128454602, -0.0176461251598 +145.0, 0.0344433679824, 0.0506801187398, 0.0282840322284, -0.0332135761048, -0.0455994512826, -0.00976888589454, -0.0507641212602, -0.00259226199818, -0.0594726974107, -0.0217882320746 +74.0, -0.034574862587, 0.0506801187398, -0.0256065714657, -0.0171468461892, 0.00118294589619, -0.00287961973517, 0.00814208360519, -0.0155076543048, 0.0148227108413, 0.0403433716479 +45.0, -0.0527375548421, 0.0506801187398, -0.0622521819776, 0.0115437429137, -0.00844872411122, -0.0366996536084, 0.122272855532, -0.07639450375, -0.0868289932163, 0.00306440941437 +115.0, 0.0598711371395, -0.044641636507, -0.000816893766404, -0.0848566365109, 0.0754844002391, 0.0794784257155, 0.00446044580111, 0.0343088588777, 0.0233748412798, 0.0279170509034 +264.0, 0.0635036755906, 0.0506801187398, 0.0886415083657, 0.0700725447073, 0.020446285911, 0.0375165318357, -0.0507641212602, 0.0712099797536, 0.0293004132686, 0.0734802269666 +87.0, 0.00901559882527, -0.044641636507, -0.0320734439089, -0.0263278347174, 0.0424615316422, -0.0103951828181, 0.159089233573, -0.07639450375, -0.0119006848015, -0.038356659734 +202.0, 0.00538306037425, 0.0506801187398, 0.0304396563761, 0.0838440274822, -0.0373437341334, -0.0473467013093, 0.0155053592134, -0.0394933828741, 0.00864028293306, 0.0154907301589 +127.0, 0.0380759064334, 0.0506801187398, 0.00888341489852, 0.0425295791574, -0.0428475455662, -0.021042230519, -0.0397192078479, -0.00259226199818, -0.0181182673079, 0.0072065163292 +182.0, 0.0126481372763, -0.044641636507, 0.00672779075076, -0.0561660474079, -0.0758704141631, -0.0664487574784, -0.0213110188275, -0.0376483268303, -0.0181182673079, -0.0922040496268 +241.0, 0.0744012909436, 0.0506801187398, -0.0202175110963, 0.0459724498511, 0.0741084473809, 0.0328193049088, -0.0360375700439, 0.0712099797536, 0.106354276742, 0.036201264733 +66.0, 0.0162806757273, -0.044641636507, -0.0245287593918, 0.0356438377699, -0.00707277125302, -0.00319276819696, -0.0139477432193, -0.00259226199818, 0.0155668445407, 0.0154907301589 +94.0, -0.00551455497881, 0.0506801187398, -0.0115950145052, 0.0115437429137, -0.0222082526932, -0.0154055582067, -0.0213110188275, -0.00259226199818, 0.0110081010459, 0.0693381200517 +283.0, 0.0126481372763, -0.044641636507, 0.0261284080806, 0.0631868033198, 0.125018703134, 0.0916912157253, 0.0633666506665, -0.00259226199818, 0.0575728562024, -0.0217882320746 +64.0, -0.034574862587, -0.044641636507, -0.059018745756, 0.00121513083254, -0.0538551684319, -0.0780352505647, 0.0670482884706, -0.07639450375, -0.0213936809404, 0.0154907301589 +102.0, 0.0671362140416, 0.0506801187398, -0.0363846922045, -0.0848566365109, -0.00707277125302, 0.0196670695137, -0.0544457590643, 0.0343088588777, 0.00114379737951, 0.0320591578182 +200.0, 0.0380759064334, 0.0506801187398, -0.0245287593918, 0.00465800152627, -0.0263361112678, -0.0263657543694, 0.0155053592134, -0.0394933828741, -0.0159982677581, -0.0259303389895 +265.0, 0.00901559882527, 0.0506801187398, 0.0185837235635, 0.0390867084636, 0.0176943801946, 0.0105857641218, 0.0191869970175, -0.00259226199818, 0.0163049527999, -0.0176461251598 +94.0, -0.0926954778033, 0.0506801187398, -0.0902752958985, -0.057313670961, -0.0249601584096, -0.0304366843726, -0.00658446761116, -0.00259226199818, 0.0240525832269, 0.00306440941437 +230.0, 0.0707687524926, -0.044641636507, -0.00512814206193, -0.00567061055493, 0.0878679759629, 0.10296456035, 0.0118237214093, 0.0343088588777, -0.0089440189578, 0.0279170509034 +181.0, -0.0164121703319, -0.044641636507, -0.0525518733127, -0.0332135761048, -0.0442234984244, -0.0363865051466, 0.0191869970175, -0.0394933828741, -0.0683297436244, -0.0300724459043 +156.0, 0.0417084448844, 0.0506801187398, -0.022373135244, 0.0287580963824, -0.0662387441557, -0.0451546620768, -0.0618090346725, -0.00259226199818, 0.00286377051894, -0.0549250873933 +233.0, 0.0126481372763, -0.044641636507, -0.0202175110963, -0.0159992226361, 0.0121905687618, 0.0212328118226, -0.0765355858888, 0.10811110063, 0.0598807230655, -0.0217882320746 +60.0, -0.038207401038, -0.044641636507, -0.0547074974604, -0.0779708951234, -0.0332158755588, -0.086490259033, 0.140681044552, -0.07639450375, -0.0191970476139, -0.0052198044153 +219.0, 0.0453409833355, -0.044641636507, -0.00620595413581, -0.0159992226361, 0.125018703134, 0.125198101137, 0.0191869970175, 0.0343088588777, 0.0324332257796, -0.0052198044153 +80.0, 0.0707687524926, 0.0506801187398, -0.0169840748746, 0.021872354995, 0.0438374845004, 0.0563054395431, 0.0375951860379, -0.00259226199818, -0.0702093127287, -0.0176461251598 +68.0, -0.0745327855482, 0.0506801187398, 0.0552293340754, -0.0400993174923, 0.0534691545078, 0.0531739549252, -0.043400845652, 0.0712099797536, 0.0612379075197, -0.0342145528191 +332.0, 0.0598711371395, 0.0506801187398, 0.076785575553, 0.0253152256887, 0.00118294589619, 0.0168487333576, -0.0544457590643, 0.0343088588777, 0.0299356483965, 0.0444854785627 +248.0, 0.0744012909436, -0.044641636507, 0.0185837235635, 0.0631868033198, 0.061724871657, 0.0428400556861, 0.00814208360519, -0.00259226199818, 0.0580391276639, -0.0590671943082 +84.0, 0.00901559882527, -0.044641636507, -0.022373135244, -0.0320659525517, -0.0497273098573, -0.068640796711, 0.0780932018828, -0.0708593356186, -0.0629129499163, -0.038356659734 +200.0, -0.0709002470972, -0.044641636507, 0.0929527566612, 0.0126913664668, 0.020446285911, 0.0425269072243, 0.000778807997018, 0.00035982767189, -0.0545441527111, -0.00107769750047 +55.0, 0.0235457526293, 0.0506801187398, -0.0309956318351, -0.00567061055493, -0.0167044412604, 0.0177881787429, -0.0323559322398, -0.00259226199818, -0.0740888714915, -0.0342145528191 +85.0, -0.0527375548421, 0.0506801187398, 0.0390621529672, -0.0400993174923, -0.00569681839481, -0.0129003705124, 0.0118237214093, -0.0394933828741, 0.0163049527999, 0.00306440941437 +89.0, 0.0671362140416, -0.044641636507, -0.0611743699037, -0.0400993174923, -0.0263361112678, -0.0244868635986, 0.0339135482338, -0.0394933828741, -0.056157573095, -0.0590671943082 +31.0, 0.00175052192323, -0.044641636507, -0.00836157828357, -0.0641994123485, -0.0387196869916, -0.0244868635986, 0.00446044580111, -0.0394933828741, -0.0646830224645, -0.0549250873933 +129.0, 0.0235457526293, 0.0506801187398, -0.0374625042784, -0.0469850588798, -0.0910058956033, -0.0755300628703, -0.0323559322398, -0.0394933828741, -0.0307512098646, -0.013504018245 +83.0, 0.0380759064334, 0.0506801187398, -0.013750638653, -0.0159992226361, -0.0359677812752, -0.0219816759043, -0.0139477432193, -0.00259226199818, -0.0259524244352, -0.00107769750047 +275.0, 0.0162806757273, -0.044641636507, 0.0735521393314, -0.0412469410454, -0.00432086553661, -0.013526667436, -0.0139477432193, -0.00111621716315, 0.0428956878925, 0.0444854785627 +65.0, -0.00188201652779, 0.0506801187398, -0.0245287593918, 0.0528581912386, 0.027326050202, 0.0300009687527, 0.0302319104297, -0.00259226199818, -0.0213936809404, 0.036201264733 +198.0, 0.0126481372763, -0.044641636507, 0.0336730925978, 0.033348590526, 0.0300779559184, 0.0271826325966, -0.00290282980707, 0.00884708547335, 0.0311929907028, 0.0279170509034 +236.0, 0.0744012909436, -0.044641636507, 0.0347509046717, 0.0941726395634, 0.0575970130824, 0.0202933664373, 0.0228686348215, -0.00259226199818, 0.07380214692, -0.0217882320746 +253.0, 0.0417084448844, 0.0506801187398, -0.0385403163522, 0.0528581912386, 0.0768603530973, 0.116429944207, -0.0397192078479, 0.0712099797536, -0.0225121719297, -0.013504018245 +124.0, -0.00914709342983, 0.0506801187398, -0.0396181284261, -0.0400993174923, -0.00844872411122, 0.016222436434, -0.0654906724765, 0.0712099797536, 0.0177634778671, -0.0673514081378 +44.0, 0.00901559882527, 0.0506801187398, -0.00189470584028, 0.021872354995, -0.0387196869916, -0.0248000120604, -0.00658446761116, -0.0394933828741, -0.0398095943643, -0.013504018245 +172.0, 0.0671362140416, 0.0506801187398, -0.0309956318351, 0.00465800152627, 0.0245741444856, 0.0356376410649, -0.0286742944357, 0.0343088588777, 0.0233748412798, 0.0817644407962 +114.0, 0.00175052192323, -0.044641636507, -0.0460850008694, -0.0332135761048, -0.0731185084467, -0.0814798836443, 0.0449584616461, -0.0693832907836, -0.0611765950943, -0.0797777288823 +142.0, -0.00914709342983, 0.0506801187398, 0.00133873038136, -0.0022277398612, 0.0796122588137, 0.0700839718618, 0.0339135482338, -0.00259226199818, 0.0267142576335, 0.0817644407962 +109.0, -0.00551455497881, -0.044641636507, 0.0649296427403, 0.0356438377699, -0.00156895982021, 0.0149698425868, -0.0139477432193, 0.000728838880649, -0.0181182673079, 0.0320591578182 +180.0, 0.0961965216497, -0.044641636507, 0.0401399650411, -0.057313670961, 0.0452134373586, 0.0606895180081, -0.0213110188275, 0.0361539149215, 0.0125531528134, 0.0237749439885 +144.0, -0.0745327855482, -0.044641636507, -0.0234509473179, -0.00567061055493, -0.020832299835, -0.0141529643596, 0.0155053592134, -0.0394933828741, -0.0384591123014, -0.0300724459043 +163.0, 0.0598711371395, 0.0506801187398, 0.0530737099276, 0.0528581912386, 0.0328298616348, 0.0196670695137, -0.0102661054152, 0.0343088588777, 0.0552050380896, -0.00107769750047 +147.0, -0.0236772472339, -0.044641636507, 0.0401399650411, -0.0125563519424, -0.00982467696942, -0.00100072896443, -0.00290282980707, -0.00259226199818, -0.0119006848015, -0.038356659734 +97.0, 0.00901559882527, -0.044641636507, -0.0202175110963, -0.0538708002672, 0.0314539087766, 0.020606514899, 0.0560033750583, -0.0394933828741, -0.0109044358474, -0.00107769750047 +220.0, 0.0162806757273, 0.0506801187398, 0.0142724752679, 0.00121513083254, 0.00118294589619, -0.0213553789807, -0.0323559322398, 0.0343088588777, 0.0749683360277, 0.0403433716479 +190.0, 0.0199132141783, -0.044641636507, -0.0342290680567, 0.0551534384825, 0.0672286830898, 0.0741549018651, -0.00658446761116, 0.0328328140427, 0.0247253233428, 0.0693381200517 +109.0, 0.0889314447477, -0.044641636507, 0.00672779075076, 0.0253152256887, 0.0300779559184, 0.00870687335105, 0.0633666506665, -0.0394933828741, 0.00943640914608, 0.0320591578182 +191.0, 0.0199132141783, -0.044641636507, 0.004572166603, 0.0459724498511, -0.0180803941186, -0.0545491159304, 0.0633666506665, -0.0394933828741, 0.0286607203138, 0.0610539062221 +122.0, -0.0236772472339, -0.044641636507, 0.0304396563761, -0.00567061055493, 0.0823641645301, 0.0920043641871, -0.0176293810234, 0.0712099797536, 0.0330470723549, 0.00306440941437 +230.0, 0.0961965216497, -0.044641636507, 0.0519958978538, 0.0792535333387, 0.054845107366, 0.0365770864503, -0.0765355858888, 0.141322109418, 0.0986463743049, 0.0610539062221 +242.0, 0.0235457526293, 0.0506801187398, 0.0616962065187, 0.06203917987, 0.0245741444856, -0.0360733566849, -0.0912621371052, 0.155344535351, 0.133395733837, 0.0817644407962 +248.0, 0.0707687524926, 0.0506801187398, -0.00728376620969, 0.0494153205448, 0.0603489187988, -0.00444536204411, -0.0544457590643, 0.10811110063, 0.1290194116, 0.0569117993072 +249.0, 0.0308108295314, -0.044641636507, 0.00564997867688, 0.0115437429137, 0.0782363059555, 0.0779126834065, -0.043400845652, 0.10811110063, 0.0660482061631, 0.0196328370737 +192.0, -0.00188201652779, -0.044641636507, 0.0541515220015, -0.0664946594891, 0.0727324945226, 0.0566185880048, -0.043400845652, 0.0848633944777, 0.0844952822124, 0.0486275854776 +131.0, 0.0453409833355, 0.0506801187398, -0.00836157828357, -0.0332135761048, -0.00707277125302, 0.0011913102681, -0.0397192078479, 0.0343088588777, 0.0299356483965, 0.0279170509034 +237.0, 0.0744012909436, -0.044641636507, 0.114508998139, 0.0287580963824, 0.0245741444856, 0.0249905933641, 0.0191869970175, -0.00259226199818, -0.000609254186102, -0.0052198044153 +78.0, -0.038207401038, -0.044641636507, 0.0670852668881, -0.0607565416547, -0.0290880169842, -0.0232342697515, -0.0102661054152, -0.00259226199818, -0.00149858682029, 0.0196328370737 +135.0, -0.0127796318808, 0.0506801187398, -0.0557853095343, -0.0022277398612, -0.027712064126, -0.0291840905255, 0.0191869970175, -0.0394933828741, -0.0170521046047, 0.0444854785627 +244.0, 0.00901559882527, 0.0506801187398, 0.0304396563761, 0.0425295791574, -0.00294491267841, 0.0368902349121, -0.0654906724765, 0.0712099797536, -0.0236445575721, 0.0154907301589 +199.0, 0.0816663678457, 0.0506801187398, -0.0256065714657, -0.0366564467986, -0.0703666027303, -0.0464072559239, -0.0397192078479, -0.00259226199818, -0.041180385188, -0.0052198044153 +270.0, 0.0308108295314, -0.044641636507, 0.104808689474, 0.0769582860947, -0.0112006298276, -0.0113346282035, -0.0581273968684, 0.0343088588777, 0.0571041874478, 0.036201264733 +164.0, 0.0271782910804, 0.0506801187398, -0.00620595413581, 0.0287580963824, -0.0167044412604, -0.00162702588801, -0.0581273968684, 0.0343088588777, 0.0293004132686, 0.0320591578182 +72.0, -0.0600026317441, 0.0506801187398, -0.0471628129433, -0.0228849640236, -0.0717425555885, -0.0576806005483, -0.00658446761116, -0.0394933828741, -0.0629129499163, -0.0549250873933 +96.0, 0.00538306037425, -0.044641636507, -0.0482406250172, -0.0125563519424, 0.00118294589619, -0.00663740127664, 0.0633666506665, -0.0394933828741, -0.0514005352606, -0.0590671943082 +306.0, -0.0200447087829, -0.044641636507, 0.0854080721441, -0.0366564467986, 0.0919958345375, 0.0894991764927, -0.0618090346725, 0.145012221505, 0.0809479135113, 0.0527696923924 +91.0, 0.0199132141783, 0.0506801187398, -0.0126728265791, 0.0700725447073, -0.0112006298276, 0.0071411310421, -0.0397192078479, 0.0343088588777, 0.00538436996855, 0.00306440941437 +214.0, -0.0636351701951, -0.044641636507, -0.0331512559828, -0.0332135761048, 0.00118294589619, 0.0240511479787, -0.0249926566316, -0.00259226199818, -0.0225121719297, -0.0590671943082 +95.0, 0.0271782910804, -0.044641636507, -0.00728376620969, -0.0504279295735, 0.0754844002391, 0.0566185880048, 0.0339135482338, -0.00259226199818, 0.0434431722528, 0.0154907301589 +216.0, -0.0164121703319, -0.044641636507, -0.013750638653, 0.132044217195, -0.00982467696942, -0.00381906512053, 0.0191869970175, -0.0394933828741, -0.0358167281015, -0.0300724459043 +263.0, 0.0308108295314, 0.0506801187398, 0.0595405823709, 0.0563010619323, -0.0222082526932, 0.0011913102681, -0.0323559322398, -0.00259226199818, -0.0247911874325, -0.0176461251598 +178.0, 0.0562385986885, 0.0506801187398, 0.0218171597851, 0.0563010619323, -0.00707277125302, 0.0181013272047, -0.0323559322398, -0.00259226199818, -0.0236445575721, 0.0237749439885 +113.0, -0.0200447087829, -0.044641636507, 0.0185837235635, 0.0907297688697, 0.00393485161259, 0.00870687335105, 0.0375951860379, -0.0394933828741, -0.0578000656756, 0.0072065163292 +200.0, -0.107225631607, -0.044641636507, -0.0115950145052, -0.0400993174923, 0.0493412959332, 0.0644472995496, -0.0139477432193, 0.0343088588777, 0.00702686254915, -0.0300724459043 +139.0, 0.0816663678457, 0.0506801187398, -0.00297251791417, -0.0332135761048, 0.0424615316422, 0.057871181852, -0.0102661054152, 0.0343088588777, -0.000609254186102, -0.00107769750047 +139.0, 0.00538306037425, 0.0506801187398, 0.0175059114896, 0.0322009670762, 0.127770608851, 0.127390140369, -0.0213110188275, 0.0712099797536, 0.0625751814581, 0.0154907301589 +88.0, 0.0380759064334, 0.0506801187398, -0.0299178197612, -0.0745280244297, -0.0125765826858, -0.0125872220506, 0.00446044580111, -0.00259226199818, 0.00371173823344, -0.0300724459043 +148.0, 0.0308108295314, -0.044641636507, -0.0202175110963, -0.00567061055493, -0.00432086553661, -0.0294972389873, 0.0780932018828, -0.0394933828741, -0.0109044358474, -0.00107769750047 +88.0, 0.00175052192323, 0.0506801187398, -0.0579409336821, -0.043542188186, -0.0965097070361, -0.0470335528475, -0.0986254127133, 0.0343088588777, -0.0611765950943, -0.0714935150527 +243.0, -0.0273097856849, 0.0506801187398, 0.0606183944448, 0.107944122338, 0.0121905687618, -0.0175975974393, -0.00290282980707, -0.00259226199818, 0.0702112981933, 0.135611830689 +71.0, -0.0854304009012, 0.0506801187398, -0.0406959405, -0.0332135761048, -0.0813742255959, -0.0695802420963, -0.00658446761116, -0.0394933828741, -0.0578000656756, -0.0424987666488 +77.0, 0.0126481372763, 0.0506801187398, -0.0719524906425, -0.0469850588798, -0.0511032627155, -0.0971373067338, 0.118591217728, -0.07639450375, -0.0202887477516, -0.038356659734 +109.0, -0.0527375548421, -0.044641636507, -0.0557853095343, -0.0366564467986, 0.0892439288211, -0.00319276819696, 0.00814208360519, 0.0343088588777, 0.132372649339, 0.00306440941437 +272.0, -0.0236772472339, 0.0506801187398, 0.0455290254105, 0.021872354995, 0.109883221694, 0.0888728795692, 0.000778807997018, 0.0343088588777, 0.07419253669, 0.0610539062221 +60.0, -0.0745327855482, 0.0506801187398, -0.00943939035745, 0.0149866136075, -0.0373437341334, -0.0216685274425, -0.0139477432193, -0.00259226199818, -0.0332487872476, 0.011348623244 +54.0, -0.00551455497881, 0.0506801187398, -0.0331512559828, -0.0159992226361, 0.0080627101872, 0.016222436434, 0.0155053592134, -0.00259226199818, -0.028320242548, -0.0756356219675 +221.0, -0.0600026317441, 0.0506801187398, 0.049840273706, 0.0184294843012, -0.0167044412604, -0.0301235359109, -0.0176293810234, -0.00259226199818, 0.0497686599207, -0.0590671943082 +90.0, -0.0200447087829, -0.044641636507, -0.0848862355291, -0.0263278347174, -0.0359677812752, -0.0341944659141, 0.041276823842, -0.0516707527631, -0.0823814832581, -0.0466408735636 +311.0, 0.0380759064334, 0.0506801187398, 0.00564997867688, 0.0322009670762, 0.006686757329, 0.0174750302812, -0.0249926566316, 0.0343088588777, 0.0148227108413, 0.0610539062221 +281.0, 0.0162806757273, -0.044641636507, 0.0207393477112, 0.021872354995, -0.013952535544, -0.0132135189742, -0.00658446761116, -0.00259226199818, 0.0133159679089, 0.0403433716479 +182.0, 0.0417084448844, -0.044641636507, -0.00728376620969, 0.0287580963824, -0.0428475455662, -0.0482861466946, 0.0523217372542, -0.07639450375, -0.072128454602, 0.0237749439885 +321.0, 0.0199132141783, 0.0506801187398, 0.104808689474, 0.0700725447073, -0.0359677812752, -0.0266789028312, -0.0249926566316, -0.00259226199818, 0.00371173823344, 0.0403433716479 +58.0, -0.049105016391, 0.0506801187398, -0.0245287593918, 6.75072794357e-05, -0.0469754041408, -0.0282446451401, -0.0654906724765, 0.0284046795376, 0.0191990330786, 0.011348623244 +262.0, 0.00175052192323, 0.0506801187398, -0.00620595413581, -0.0194420933299, -0.00982467696942, 0.00494909180957, -0.0397192078479, 0.0343088588777, 0.0148227108413, 0.0983328684556 +206.0, 0.0344433679824, -0.044641636507, -0.0385403163522, -0.0125563519424, 0.0094386630454, 0.00526224027136, -0.00658446761116, -0.00259226199818, 0.0311929907028, 0.0983328684556 +233.0, -0.04547247794, 0.0506801187398, 0.13714305169, -0.0159992226361, 0.041085578784, 0.0318798595235, -0.043400845652, 0.0712099797536, 0.071021577946, 0.0486275854776 +242.0, -0.00914709342983, 0.0506801187398, 0.170555225981, 0.0149866136075, 0.0300779559184, 0.0337587502942, -0.0213110188275, 0.0343088588777, 0.0336568129024, 0.0320591578182 +123.0, -0.0164121703319, 0.0506801187398, 0.00241654245524, 0.0149866136075, 0.0218222387692, -0.0100820343563, -0.0249926566316, 0.0343088588777, 0.0855331211874, 0.0817644407962 +167.0, -0.00914709342983, -0.044641636507, 0.0379843408933, -0.0400993174923, -0.0249601584096, -0.00381906512053, -0.043400845652, 0.0158582984398, -0.00514530798026, 0.0279170509034 +63.0, 0.0199132141783, -0.044641636507, -0.0579409336821, -0.057313670961, -0.00156895982021, -0.0125872220506, 0.0744115640788, -0.0394933828741, -0.0611765950943, -0.0756356219675 +197.0, 0.0526060602375, 0.0506801187398, -0.00943939035745, 0.0494153205448, 0.0507172487914, -0.0191633397482, -0.0139477432193, 0.0343088588777, 0.119343994204, -0.0176461251598 +71.0, -0.0273097856849, 0.0506801187398, -0.0234509473179, -0.0159992226361, 0.01356652162, 0.0127778033543, 0.0265502726256, -0.00259226199818, -0.0109044358474, -0.0217882320746 +168.0, -0.0745327855482, -0.044641636507, -0.0105172024313, -0.00567061055493, -0.0662387441557, -0.0570543036248, -0.00290282980707, -0.0394933828741, -0.0425721049228, -0.00107769750047 +140.0, -0.107225631607, -0.044641636507, -0.0342290680567, -0.0676422830422, -0.0634868384393, -0.0705196874817, 0.00814208360519, -0.0394933828741, -0.000609254186102, -0.0797777288823 +217.0, 0.0453409833355, 0.0506801187398, -0.00297251791417, 0.107944122338, 0.0355817673512, 0.0224854056698, 0.0265502726256, -0.00259226199818, 0.0280165065233, 0.0196328370737 +121.0, -0.00188201652779, -0.044641636507, 0.068163078962, -0.00567061055493, 0.119514891701, 0.130208476525, -0.0249926566316, 0.0867084505215, 0.0461323310394, -0.00107769750047 +235.0, 0.0199132141783, 0.0506801187398, 0.00996122697241, 0.0184294843012, 0.0149424744782, 0.0447189464568, -0.0618090346725, 0.0712099797536, 0.00943640914608, -0.063209301223 +245.0, 0.0162806757273, 0.0506801187398, 0.00241654245524, -0.00567061055493, -0.00569681839481, 0.0108989125836, -0.0507641212602, 0.0343088588777, 0.0226920225667, -0.038356659734 +40.0, -0.00188201652779, -0.044641636507, -0.0385403163522, 0.021872354995, -0.10889328276, -0.115613065979, 0.0228686348215, -0.07639450375, -0.0468794828442, 0.0237749439885 +52.0, 0.0162806757273, -0.044641636507, 0.0261284080806, 0.0585963091762, -0.0607349327229, -0.0442152166914, -0.0139477432193, -0.0339582147427, -0.0514005352606, -0.0259303389895 +104.0, -0.0709002470972, 0.0506801187398, -0.0891974838246, -0.0745280244297, -0.0428475455662, -0.0257394574458, -0.0323559322398, -0.00259226199818, -0.0129079422542, -0.0549250873933 +132.0, 0.0489735217865, -0.044641636507, 0.0606183944448, -0.0228849640236, -0.0235842055514, -0.0727117267142, -0.043400845652, -0.00259226199818, 0.104137611359, 0.036201264733 +88.0, 0.00538306037425, 0.0506801187398, -0.0288400076873, -0.00911348124867, -0.0318399227006, -0.0288709420637, 0.00814208360519, -0.0394933828741, -0.0181182673079, 0.0072065163292 +69.0, 0.0344433679824, 0.0506801187398, -0.0299178197612, 0.00465800152627, 0.0933717873957, 0.0869939887984, 0.0339135482338, -0.00259226199818, 0.0240525832269, -0.038356659734 +219.0, 0.0235457526293, 0.0506801187398, -0.0191396990224, 0.0494153205448, -0.0634868384393, -0.061125233628, 0.00446044580111, -0.0394933828741, -0.0259524244352, -0.013504018245 +72.0, 0.0199132141783, -0.044641636507, -0.0406959405, -0.0159992226361, -0.00844872411122, -0.0175975974393, 0.0523217372542, -0.0394933828741, -0.0307512098646, 0.00306440941437 +201.0, -0.04547247794, -0.044641636507, 0.0153502873418, -0.0745280244297, -0.0497273098573, -0.0172844489775, -0.0286742944357, -0.00259226199818, -0.104364820832, -0.0756356219675 +110.0, 0.0526060602375, 0.0506801187398, -0.0245287593918, 0.0563010619323, -0.00707277125302, -0.00507165896769, -0.0213110188275, -0.00259226199818, 0.0267142576335, -0.038356659734 +51.0, -0.00551455497881, 0.0506801187398, 0.00133873038136, -0.0848566365109, -0.0112006298276, -0.0166581520539, 0.0486400994501, -0.0394933828741, -0.041180385188, -0.088061942712 +277.0, 0.00901559882527, 0.0506801187398, 0.0692408910359, 0.0597439326261, 0.0176943801946, -0.0232342697515, -0.0470824834561, 0.0343088588777, 0.103292264912, 0.0734802269666 +63.0, -0.0236772472339, -0.044641636507, -0.0697968664948, -0.0641994123485, -0.0593589798647, -0.0504781859272, 0.0191869970175, -0.0394933828741, -0.0891368600793, -0.0507829804785 +118.0, -0.041839939489, 0.0506801187398, -0.0299178197612, -0.0022277398612, 0.0218222387692, 0.0365770864503, 0.0118237214093, -0.00259226199818, -0.041180385188, 0.0651960131369 +69.0, -0.0745327855482, -0.044641636507, -0.0460850008694, -0.043542188186, -0.0290880169842, -0.0232342697515, 0.0155053592134, -0.0394933828741, -0.0398095943643, -0.0217882320746 +273.0, 0.0344433679824, -0.044641636507, 0.0185837235635, 0.0563010619323, 0.0121905687618, -0.0545491159304, -0.0691723102806, 0.0712099797536, 0.130080609522, 0.0072065163292 +258.0, -0.0600026317441, -0.044641636507, 0.00133873038136, -0.0297707054111, -0.00707277125302, -0.0216685274425, 0.0118237214093, -0.00259226199818, 0.0318152175008, -0.0549250873933 +43.0, -0.0854304009012, 0.0506801187398, -0.0309956318351, -0.0228849640236, -0.0634868384393, -0.0542359674686, 0.0191869970175, -0.0394933828741, -0.0964332228918, -0.0342145528191 +198.0, 0.0526060602375, -0.044641636507, -0.00405032998805, -0.0309183289642, -0.0469754041408, -0.0583068974719, -0.0139477432193, -0.02583996815, 0.0360557900898, 0.0237749439885 +242.0, 0.0126481372763, -0.044641636507, 0.0153502873418, -0.0332135761048, 0.041085578784, 0.0321930079853, -0.00290282980707, -0.00259226199818, 0.0450661683363, -0.0673514081378 +232.0, 0.0598711371395, 0.0506801187398, 0.022894971859, 0.0494153205448, 0.0163184273364, 0.0118383579689, -0.0139477432193, -0.00259226199818, 0.039539878072, 0.0196328370737 +175.0, -0.0236772472339, -0.044641636507, 0.0455290254105, 0.0907297688697, -0.0180803941186, -0.0354470597613, 0.0707299262747, -0.0394933828741, -0.0345237153303, -0.00936191133014 +93.0, 0.0162806757273, -0.044641636507, -0.0450071887955, -0.057313670961, -0.034591828417, -0.0539228190069, 0.0744115640788, -0.07639450375, -0.0425721049228, 0.0403433716479 +168.0, 0.110726675454, 0.0506801187398, -0.0331512559828, -0.0228849640236, -0.00432086553661, 0.0202933664373, -0.0618090346725, 0.0712099797536, 0.0155668445407, 0.0444854785627 +275.0, -0.0200447087829, -0.044641636507, 0.0972640049568, -0.00567061055493, -0.00569681839481, -0.0238605666751, -0.0213110188275, -0.00259226199818, 0.0616858488239, 0.0403433716479 +293.0, -0.0164121703319, -0.044641636507, 0.0541515220015, 0.0700725447073, -0.0332158755588, -0.0279314966783, 0.00814208360519, -0.0394933828741, -0.0271286455543, -0.00936191133014 +281.0, 0.0489735217865, 0.0506801187398, 0.12313149473, 0.0838440274822, -0.104765424185, -0.100895088275, -0.0691723102806, -0.00259226199818, 0.0366457977934, -0.0300724459043 +72.0, -0.0563700932931, -0.044641636507, -0.0805749872336, -0.0848566365109, -0.0373437341334, -0.0370128020702, 0.0339135482338, -0.0394933828741, -0.056157573095, -0.13776722569 +140.0, 0.0271782910804, -0.044641636507, 0.0929527566612, -0.0527231767141, 0.0080627101872, 0.0397085710682, -0.0286742944357, 0.0210244553624, -0.0483617248029, 0.0196328370737 +189.0, 0.0635036755906, -0.044641636507, -0.0503962491649, 0.107944122338, 0.0314539087766, 0.0193539210519, -0.0176293810234, 0.0236075338237, 0.0580391276639, 0.0403433716479 +181.0, -0.0527375548421, 0.0506801187398, -0.0115950145052, 0.0563010619323, 0.0562210602242, 0.0729023080179, -0.0397192078479, 0.0712099797536, 0.0305664873984, -0.0052198044153 +209.0, -0.00914709342983, 0.0506801187398, -0.0277621956134, 0.00810087222001, 0.047965343075, 0.0372033833739, -0.0286742944357, 0.0343088588777, 0.0660482061631, -0.0424987666488 +136.0, 0.00538306037425, -0.044641636507, 0.058462770297, -0.043542188186, -0.0731185084467, -0.0723985782524, 0.0191869970175, -0.07639450375, -0.0514005352606, -0.0259303389895 +261.0, 0.0744012909436, -0.044641636507, 0.0854080721441, 0.0631868033198, 0.0149424744782, 0.0130909518161, 0.0155053592134, -0.00259226199818, 0.00620931561651, 0.0859065477111 +113.0, -0.0527375548421, -0.044641636507, -0.000816893766404, -0.0263278347174, 0.0108146159036, 0.0071411310421, 0.0486400994501, -0.0394933828741, -0.0358167281015, 0.0196328370737 +131.0, 0.0816663678457, 0.0506801187398, 0.00672779075076, -0.00452298700183, 0.109883221694, 0.11705624113, -0.0323559322398, 0.0918746074441, 0.0547240033482, 0.0072065163292 +174.0, -0.00551455497881, -0.044641636507, 0.00888341489852, -0.0504279295735, 0.0259500973438, 0.0472241341512, -0.043400845652, 0.0712099797536, 0.0148227108413, 0.00306440941437 +257.0, -0.0273097856849, -0.044641636507, 0.0800190117747, 0.098763133707, -0.00294491267841, 0.0181013272047, -0.0176293810234, 0.00331191734196, -0.0295276227418, 0.036201264733 +55.0, -0.0527375548421, -0.044641636507, 0.0713965151836, -0.0745280244297, -0.0153284884022, -0.00131387742622, 0.00446044580111, -0.0214118336449, -0.0468794828442, 0.00306440941437 +84.0, 0.00901559882527, -0.044641636507, -0.0245287593918, -0.0263278347174, 0.0988755988285, 0.0941964034196, 0.0707299262747, -0.00259226199818, -0.0213936809404, 0.0072065163292 +42.0, -0.0200447087829, -0.044641636507, -0.0547074974604, -0.0538708002672, -0.0662387441557, -0.0573674520865, 0.0118237214093, -0.0394933828741, -0.0740888714915, -0.0052198044153 +146.0, 0.0235457526293, -0.044641636507, -0.0363846922045, 6.75072794357e-05, 0.00118294589619, 0.0346981956796, -0.043400845652, 0.0343088588777, -0.0332487872476, 0.0610539062221 +212.0, 0.0380759064334, 0.0506801187398, 0.0164280994157, 0.021872354995, 0.0397096259258, 0.0450320949186, -0.043400845652, 0.0712099797536, 0.0497686599207, 0.0154907301589 +233.0, -0.0781653239992, 0.0506801187398, 0.0778633876269, 0.0528581912386, 0.0782363059555, 0.0644472995496, 0.0265502726256, -0.00259226199818, 0.0406722637145, -0.00936191133014 +91.0, 0.00901559882527, 0.0506801187398, -0.0396181284261, 0.0287580963824, 0.0383336730676, 0.0735286049415, -0.0728539480847, 0.10811110063, 0.0155668445407, -0.0466408735636 +111.0, 0.00175052192323, 0.0506801187398, 0.0110390390463, -0.0194420933299, -0.0167044412604, -0.00381906512053, -0.0470824834561, 0.0343088588777, 0.0240525832269, 0.0237749439885 +152.0, -0.0781653239992, -0.044641636507, -0.0406959405, -0.0814137658171, -0.100637565611, -0.112794729823, 0.0228686348215, -0.07639450375, -0.0202887477516, -0.0507829804785 +120.0, 0.0308108295314, 0.0506801187398, -0.0342290680567, 0.0436772026072, 0.0575970130824, 0.0688313780146, -0.0323559322398, 0.0575565650295, 0.0354619386608, 0.0859065477111 +67.0, -0.034574862587, 0.0506801187398, 0.00564997867688, -0.00567061055493, -0.0731185084467, -0.062690975937, -0.00658446761116, -0.0394933828741, -0.045420957777, 0.0320591578182 +310.0, 0.0489735217865, 0.0506801187398, 0.0886415083657, 0.0872868981759, 0.0355817673512, 0.0215459602844, -0.0249926566316, 0.0343088588777, 0.0660482061631, 0.131469723774 +94.0, -0.041839939489, -0.044641636507, -0.0331512559828, -0.0228849640236, 0.0465893902168, 0.0415874618389, 0.0560033750583, -0.0247329345237, -0.0259524244352, -0.038356659734 +183.0, -0.00914709342983, -0.044641636507, -0.0568631216082, -0.0504279295735, 0.0218222387692, 0.0453452433804, -0.0286742944357, 0.0343088588777, -0.00991895736315, -0.0176461251598 +66.0, 0.0707687524926, 0.0506801187398, -0.0309956318351, 0.021872354995, -0.0373437341334, -0.0470335528475, 0.0339135482338, -0.0394933828741, -0.0149564750249, -0.00107769750047 +173.0, 0.00901559882527, -0.044641636507, 0.0552293340754, -0.00567061055493, 0.0575970130824, 0.0447189464568, -0.00290282980707, 0.023238522615, 0.0556835477027, 0.106617082285 +72.0, -0.0273097856849, -0.044641636507, -0.0600965578299, -0.0297707054111, 0.0465893902168, 0.0199802179755, 0.122272855532, -0.0394933828741, -0.0514005352606, -0.00936191133014 +49.0, 0.0162806757273, -0.044641636507, 0.00133873038136, 0.00810087222001, 0.00531080447079, 0.0108989125836, 0.0302319104297, -0.0394933828741, -0.045420957777, 0.0320591578182 +64.0, -0.0127796318808, -0.044641636507, -0.0234509473179, -0.0400993174923, -0.0167044412604, 0.00463594334778, -0.0176293810234, -0.00259226199818, -0.0384591123014, -0.038356659734 +48.0, -0.0563700932931, -0.044641636507, -0.0741081147903, -0.0504279295735, -0.0249601584096, -0.0470335528475, 0.0928197530992, -0.07639450375, -0.0611765950943, -0.0466408735636 +178.0, 0.0417084448844, 0.0506801187398, 0.0196615356373, 0.0597439326261, -0.00569681839481, -0.00256647127338, -0.0286742944357, -0.00259226199818, 0.0311929907028, 0.0072065163292 +104.0, -0.00551455497881, 0.0506801187398, -0.0159062628007, -0.0676422830422, 0.0493412959332, 0.0791652772537, -0.0286742944357, 0.0343088588777, -0.0181182673079, 0.0444854785627 +132.0, 0.0417084448844, 0.0506801187398, -0.0159062628007, 0.0172818607481, -0.0373437341334, -0.0138398158978, -0.0249926566316, -0.0110795197996, -0.0468794828442, 0.0154907301589 +220.0, -0.04547247794, -0.044641636507, 0.0390621529672, 0.00121513083254, 0.0163184273364, 0.0152829910486, -0.0286742944357, 0.0265596234938, 0.0445283740214, -0.0259303389895 +57.0, -0.04547247794, -0.044641636507, -0.0730303027164, -0.0814137658171, 0.0837401173883, 0.0278089295202, 0.173815784789, -0.0394933828741, -0.00421985970695, 0.00306440941437 \ No newline at end of file diff --git a/ignite-extensions/modules/ml-ext/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties b/ignite-extensions/modules/ml-ext/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties new file mode 100644 index 0000000000000..b7c9c6d078e43 --- /dev/null +++ b/ignite-extensions/modules/ml-ext/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties @@ -0,0 +1,21 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Paths to mnistAsStream dataset parts. +mnist.training.images=/path/to/mnist/train-images-idx3-ubyte +mnist.training.labels=/path/to/mnist/train-labels-idx1-ubyte +mnist.test.images=/path/to/mnist/t10k-images-idx3-ubyte +mnist.test.labels=/path/to/mnist/t10k-labels-idx1-ubyte diff --git a/ignite-extensions/modules/mongodb-relay/pom.xml b/ignite-extensions/modules/mongodb-relay/pom.xml index 052dda3dbdd88..5334e13d617e8 100644 --- a/ignite-extensions/modules/mongodb-relay/pom.xml +++ b/ignite-extensions/modules/mongodb-relay/pom.xml @@ -6,11 +6,11 @@ 4.0.0 - org.apache.ignite - ignite-parent-internal - 2.16.999-SNAPSHOT - ../../parent-internal/pom.xml - + org.apache.ignite + ignite-parent-ext-internal + 1 + ../../parent-internal/pom.xml + ignite-mongodb-realy @@ -77,7 +77,8 @@ org.apache.ignite - ignite-ml + ignite-ml-ext + 1.1.0-SNAPSHOT provided