From c45b9913fbccbbfc8999b98c1fca4dee54cc562b Mon Sep 17 00:00:00 2001 From: Tanner Lewis Date: Mon, 8 Jul 2024 16:03:26 -0400 Subject: [PATCH] Add Backfill basic integ tests / Refactor integ tests to use Console Library (#759) --Add Backfill basic integ tests Lays the groundwork for generic backfill integration tests (usable across any backfill migration) that operate by using the Console Library. These tests currently expect a single execution flow of a backfill migration, that is all data will be preloaded onto the source cluster as needed by the different tests, the backfill migration will be kicked off, and the test cases will verify the result. This keeps the execution time at a reasonable rate (not starting,stopping containers multiple times) for our common tests, with my expectation being that in the future we have more targeted tests cases which need to control this flow and can operate independently. --Refactor all integ tests to use Console Library This change involved moving our integ tests to be a library on the Migration Console. Since we do plan on having the Migration Console execute these tests this does make some sense, but the larger goals this enables were to be able to have these tests available from startup on the Migration Console (allowing us to remove some troublesome code that tries to pull this with git) as well as allow these tests to easily make use of the Console Library for performing operations (a desperately needed addition to our tests) --------- Signed-off-by: Tanner Lewis --- .github/workflows/CI.yml | 9 +- .../main/docker/migrationConsole/Dockerfile | 6 +- .../docker-compose-console-only.yml | 6 +- .../migrationConsole/lib/console_link/Pipfile | 1 + .../lib/console_link/Pipfile.lock | 1087 ++++++++++++++++- .../lib/console_link/console_link/cli.py | 11 +- .../console_link/console_link/environment.py | 3 +- .../console_link/logic/clusters.py | 11 +- .../console_link/console_link/logic/replay.py | 10 +- .../console_link/logic/snapshot.py | 9 + .../console_link/models/backfill_rfs.py | 6 +- .../console_link/models/cluster.py | 48 +- .../console_link/models/replayer_base.py | 5 +- .../console_link/models/replayer_docker.py | 28 + .../console_link/models/replayer_ecs.py | 15 +- .../console_link/models/snapshot.py | 19 + .../lib/console_link/services.yaml | 2 + .../migrationConsole/lib/integ_test}/Pipfile | 3 +- .../lib/integ_test}/Pipfile.lock | 112 +- .../migrationConsole/lib/integ_test/README.md | 44 + .../integ_test/integ_test/backfill_tests.py | 111 ++ .../integ_test/common_operations.py | 216 ++++ .../lib/integ_test/integ_test/conftest.py | 25 + .../integ_test/metric_operations.py | 80 ++ .../integ_test/integ_test/replayer_tests.py | 247 ++++ .../migrationConsole/setupIntegTests.sh | 48 - .../lib/lambda/msk-public-endpoint-handler.ts | 2 +- .../lib/migration-assistance-stack.ts | 36 +- .../lib/msk-utility-stack.ts | 2 + .../rfsBackfillE2EPipeline.groovy | 5 +- .../vars/defaultIntegPipeline.groovy | 7 +- test/README.md | 80 +- test/awsE2ESolutionSetup.sh | 28 +- test/awsRunIntegTests.sh | 68 +- test/conftest.py | 95 -- test/operations.py | 102 -- test/setup.py | 10 - test/tests.py | 436 ------- 38 files changed, 2089 insertions(+), 944 deletions(-) create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_docker.py rename {test => TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test}/Pipfile (83%) rename {test => TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test}/Pipfile.lock (66%) create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/README.md create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/backfill_tests.py create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/common_operations.py create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/conftest.py create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/metric_operations.py create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/replayer_tests.py delete mode 100755 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/setupIntegTests.sh delete mode 100644 test/conftest.py delete mode 100644 test/operations.py delete mode 100644 test/setup.py delete mode 100644 test/tests.py diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index b3f436675..3eb8ebe0f 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -124,15 +124,10 @@ jobs: run: ./gradlew -p TrafficCapture dockerSolution:ComposeUp -x test --scan --info --stacktrace env: OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED: '' - - name: Install python dependencies - working-directory: test - run: | - python -m pip install --upgrade pipenv - pipenv install --deploy --dev - name: Run E2E test script - working-directory: test + working-directory: TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test run: | - pipenv run pytest tests.py --unique_id="testindex" + docker exec $(docker ps --filter "name=migration-console" -q) pytest /root/lib/integ_test/integ_test/replayer_tests.py --unique_id="testindex" -s - name: Clean up migrations docker images before caching run: | docker stop $(docker ps -q) && docker rm $(docker ps -aq) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile index 55bb3bff7..dd759d390 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile @@ -24,9 +24,6 @@ COPY kafkaCmdRef.md /root/kafka-tools COPY humanReadableLogs.py /root/ RUN chmod ug+x /root/humanReadableLogs.py -COPY setupIntegTests.sh /root/ -RUN chmod ug+x /root/setupIntegTests.sh - COPY showFetchMigrationCommand.sh /root/ RUN chmod ug+x /root/showFetchMigrationCommand.sh @@ -39,6 +36,9 @@ RUN chmod ug+x /root/loadServicesFromParameterStore.sh COPY lib /root/lib WORKDIR /root/lib/console_link RUN pipenv install --system --deploy --ignore-pipfile +WORKDIR /root/lib/integ_test +RUN pipenv install --system --deploy --ignore-pipfile + # Experimental console API, not started by default COPY console_api /root/console_api diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/docker-compose-console-only.yml b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/docker-compose-console-only.yml index 073c026b3..6e442594c 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/docker-compose-console-only.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/docker-compose-console-only.yml @@ -13,14 +13,14 @@ services: volumes: - ./lib/console_link/services.yaml:/etc/migration_services.yaml # this is a convenience thing for testing -- it should be removed before this makes it to prod. - - ./lib/console_link:/root/lib/console_link + - ./lib:/root/lib + - ~/.aws:/root/.aws environment: # Copy local AWS env to Docker container - #- ~/.aws:/root/.aws - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN} - - AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION} + - AWS_DEFAULT_REGION=us-east-1 - API_ALLOWED_HOSTS=localhost ports: - "8000:8000" diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/Pipfile b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/Pipfile index 7e0272860..7ea9950f2 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/Pipfile +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/Pipfile @@ -5,6 +5,7 @@ name = "pypi" [packages] requests = ">=2.32.3" +opensearch-benchmark = "*" boto3 = "*" pyyaml = "*" Click = "*" diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/Pipfile.lock b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/Pipfile.lock index 860e79065..e928c401c 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/Pipfile.lock +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "de97a4154f7a084ec567fdfdc2b5b6024a5e8fe6e4b04b39a9943fce7d1e6dbe" + "sha256": "db7ad0232b205cd63a9e0df7e3fb0971e394904388baa5cee2206866ce2fe3ed" }, "pipfile-spec": 6, "requires": { @@ -16,31 +16,144 @@ ] }, "default": { + "aiohttp": { + "hashes": [ + "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8", + "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c", + "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475", + "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed", + "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf", + "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372", + "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81", + "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f", + "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1", + "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd", + "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a", + "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb", + "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46", + "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de", + "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78", + "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c", + "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771", + "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb", + "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430", + "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233", + "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156", + "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9", + "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59", + "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888", + "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c", + "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c", + "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da", + "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424", + "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2", + "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb", + "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8", + "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a", + "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10", + "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0", + "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09", + "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031", + "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4", + "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3", + "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa", + "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a", + "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe", + "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a", + "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2", + "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1", + "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323", + "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b", + "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b", + "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106", + "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac", + "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6", + "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832", + "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75", + "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6", + "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d", + "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72", + "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db", + "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a", + "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da", + "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678", + "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b", + "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24", + "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed", + "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f", + "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e", + "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58", + "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a", + "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342", + "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558", + "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2", + "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551", + "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595", + "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee", + "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11", + "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d", + "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7", + "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f" + ], + "version": "==3.9.5" + }, + "aiosignal": { + "hashes": [ + "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc", + "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17" + ], + "markers": "python_version >= '3.7'", + "version": "==1.3.1" + }, + "async-timeout": { + "hashes": [ + "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", + "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028" + ], + "markers": "python_version < '3.11'", + "version": "==4.0.3" + }, + "attrs": { + "hashes": [ + "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30", + "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1" + ], + "markers": "python_version >= '3.7'", + "version": "==23.2.0" + }, "awscli": { "hashes": [ - "sha256:bad4402c4a0ff825a59a43e76e0072152497f4daf7f62c9376fe6e0014fa2a9b", - "sha256:ec5846b00a045445446fa8287ae57be84122a26aee8d7d81e1e5dc1d2ad91c6b" + "sha256:745d7766587279ac380a1d03338fe93c680e8984883b77297d0e3075d6a3c0d2", + "sha256:a5ea9719fbb2317366b93bade5844aae579259f73f8c0484e852b763ac05aea8" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==1.33.13" + "version": "==1.33.14" }, "boto3": { "hashes": [ - "sha256:05e388cb937e82be70bfd7eb0c84cf8011ff35cf582a593873ac21675268683b", - "sha256:dab8f72a6c4e62b4fd70da09e08a6b2a65ea2115b27dd63737142005776ef216" + "sha256:3b2964060620f1bbe9574b5f8d3fb2a4e087faacfc6023c24154b184f1b16443", + "sha256:b5d1681a0d8bf255787c8b37f911d706672d5722c9ace5342cd283a3cdb04820" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==1.34.131" + "version": "==1.34.132" }, "botocore": { "hashes": [ - "sha256:13b011d7b206ce00727dcee26548fa3b550db9046d5a0e90ac25a6e6c8fde6ef", - "sha256:502ddafe1d627fcf1e4c007c86454e5dd011dba7c58bd8e8a5368a79f3e387dc" + "sha256:06ef8b4bd3b3cb5a9b9a4273a543b257be3304030978ba51516b576a65156c39", + "sha256:372a6cfce29e5de9bcf8c95af901d0bc3e27d8aa2295fadee295424f95f43f16" ], "markers": "python_version >= '3.8'", - "version": "==1.34.131" + "version": "==1.34.132" + }, + "cachetools": { + "hashes": [ + "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945", + "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105" + ], + "markers": "python_version >= '3.7'", + "version": "==5.3.3" }, "cerberus": { "hashes": [ @@ -179,6 +292,212 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, + "events": { + "hashes": [ + "sha256:a7286af378ba3e46640ac9825156c93bdba7502174dd696090fdfcd4d80a1abd" + ], + "version": "==0.5" + }, + "frozenlist": { + "hashes": [ + "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7", + "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98", + "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad", + "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5", + "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae", + "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e", + "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a", + "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701", + "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d", + "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6", + "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6", + "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106", + "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75", + "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868", + "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a", + "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0", + "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1", + "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826", + "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec", + "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6", + "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950", + "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19", + "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0", + "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8", + "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a", + "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09", + "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86", + "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c", + "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5", + "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b", + "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b", + "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d", + "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0", + "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea", + "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776", + "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a", + "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897", + "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7", + "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09", + "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9", + "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe", + "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd", + "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742", + "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09", + "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0", + "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932", + "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1", + "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a", + "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49", + "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d", + "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7", + "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480", + "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89", + "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e", + "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b", + "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82", + "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb", + "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068", + "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8", + "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b", + "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb", + "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2", + "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11", + "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b", + "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc", + "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0", + "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497", + "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17", + "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0", + "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2", + "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439", + "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5", + "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac", + "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825", + "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887", + "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced", + "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74" + ], + "markers": "python_version >= '3.8'", + "version": "==1.4.1" + }, + "google-auth": { + "hashes": [ + "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5", + "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688" + ], + "markers": "python_version >= '3.7'", + "version": "==2.30.0" + }, + "google-crc32c": { + "hashes": [ + "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a", + "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876", + "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c", + "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289", + "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298", + "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02", + "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f", + "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2", + "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a", + "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb", + "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210", + "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5", + "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee", + "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c", + "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a", + "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314", + "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd", + "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65", + "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37", + "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4", + "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13", + "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894", + "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31", + "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e", + "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709", + "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740", + "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc", + "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d", + "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c", + "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c", + "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d", + "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906", + "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61", + "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57", + "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c", + "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a", + "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438", + "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946", + "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7", + "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96", + "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091", + "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae", + "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d", + "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88", + "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2", + "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd", + "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541", + "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728", + "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178", + "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968", + "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346", + "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8", + "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93", + "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7", + "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273", + "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462", + "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94", + "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd", + "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e", + "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57", + "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b", + "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9", + "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a", + "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100", + "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325", + "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183", + "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556", + "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4" + ], + "markers": "python_version >= '3.7'", + "version": "==1.5.0" + }, + "google-resumable-media": { + "hashes": [ + "sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c", + "sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33" + ], + "markers": "python_version >= '3.7'", + "version": "==2.7.1" + }, + "h5py": { + "hashes": [ + "sha256:083e0329ae534a264940d6513f47f5ada617da536d8dccbafc3026aefc33c90e", + "sha256:1625fd24ad6cfc9c1ccd44a66dac2396e7ee74940776792772819fc69f3a3731", + "sha256:21dbdc5343f53b2e25404673c4f00a3335aef25521bd5fa8c707ec3833934892", + "sha256:52c416f8eb0daae39dabe71415cb531f95dce2d81e1f61a74537a50c63b28ab3", + "sha256:55106b04e2c83dfb73dc8732e9abad69d83a436b5b82b773481d95d17b9685e1", + "sha256:67462d0669f8f5459529de179f7771bd697389fcb3faab54d63bf788599a48ea", + "sha256:6c4b760082626120031d7902cd983d8c1f424cdba2809f1067511ef283629d4b", + "sha256:731839240c59ba219d4cb3bc5880d438248533366f102402cfa0621b71796b62", + "sha256:754c0c2e373d13d6309f408325343b642eb0f40f1a6ad21779cfa9502209e150", + "sha256:75bd7b3d93fbeee40860fd70cdc88df4464e06b70a5ad9ce1446f5f32eb84007", + "sha256:77b19a40788e3e362b54af4dcf9e6fde59ca016db2c61360aa30b47c7b7cef00", + "sha256:7b7e8f78072a2edec87c9836f25f34203fd492a4475709a18b417a33cfb21fa9", + "sha256:8ec9df3dd2018904c4cc06331951e274f3f3fd091e6d6cc350aaa90fa9b42a76", + "sha256:a76cae64080210389a571c7d13c94a1a6cf8cb75153044fd1f822a962c97aeab", + "sha256:aa6ae84a14103e8dc19266ef4c3e5d7c00b68f21d07f2966f0ca7bdb6c2761fb", + "sha256:bbd732a08187a9e2a6ecf9e8af713f1d68256ee0f7c8b652a32795670fb481ba", + "sha256:c072655ad1d5fe9ef462445d3e77a8166cbfa5e599045f8aa3c19b75315f10e5", + "sha256:d9c944d364688f827dc889cf83f1fca311caf4fa50b19f009d1f2b525edd33a3", + "sha256:ef4e2f338fc763f50a8113890f455e1a70acd42a4d083370ceb80c463d803972", + "sha256:f3736fe21da2b7d8a13fe8fe415f1272d2a1ccdeff4849c1421d2fb30fd533bc", + "sha256:f4e025e852754ca833401777c25888acb96889ee2c27e7e629a19aee288833f0" + ], + "markers": "python_version >= '3.8'", + "version": "==3.11.0" + }, "idna": { "hashes": [ "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc", @@ -187,6 +506,113 @@ "markers": "python_version >= '3.5'", "version": "==3.7" }, + "ijson": { + "hashes": [ + "sha256:0015354011303175eae7e2ef5136414e91de2298e5a2e9580ed100b728c07e51", + "sha256:034642558afa57351a0ffe6de89e63907c4cf6849070cc10a3b2542dccda1afe", + "sha256:0420c24e50389bc251b43c8ed379ab3e3ba065ac8262d98beb6735ab14844460", + "sha256:04366e7e4a4078d410845e58a2987fd9c45e63df70773d7b6e87ceef771b51ee", + "sha256:0b003501ee0301dbf07d1597482009295e16d647bb177ce52076c2d5e64113e0", + "sha256:0ee57a28c6bf523d7cb0513096e4eb4dac16cd935695049de7608ec110c2b751", + "sha256:192e4b65495978b0bce0c78e859d14772e841724d3269fc1667dc6d2f53cc0ea", + "sha256:1efb521090dd6cefa7aafd120581947b29af1713c902ff54336b7c7130f04c47", + "sha256:25fd49031cdf5fd5f1fd21cb45259a64dad30b67e64f745cc8926af1c8c243d3", + "sha256:2636cb8c0f1023ef16173f4b9a233bcdb1df11c400c603d5f299fac143ca8d70", + "sha256:29ce02af5fbf9ba6abb70765e66930aedf73311c7d840478f1ccecac53fefbf3", + "sha256:2af323a8aec8a50fa9effa6d640691a30a9f8c4925bd5364a1ca97f1ac6b9b5c", + "sha256:30cfea40936afb33b57d24ceaf60d0a2e3d5c1f2335ba2623f21d560737cc730", + "sha256:33afc25057377a6a43c892de34d229a86f89ea6c4ca3dd3db0dcd17becae0dbb", + "sha256:36aa56d68ea8def26778eb21576ae13f27b4a47263a7a2581ab2ef58b8de4451", + "sha256:3917b2b3d0dbbe3296505da52b3cb0befbaf76119b2edaff30bd448af20b5400", + "sha256:3aba5c4f97f4e2ce854b5591a8b0711ca3b0c64d1b253b04ea7b004b0a197ef6", + "sha256:3c556f5553368dff690c11d0a1fb435d4ff1f84382d904ccc2dc53beb27ba62e", + "sha256:3dc1fb02c6ed0bae1b4bf96971258bf88aea72051b6e4cebae97cff7090c0607", + "sha256:3e8d8de44effe2dbd0d8f3eb9840344b2d5b4cc284a14eb8678aec31d1b6bea8", + "sha256:40ee3821ee90be0f0e95dcf9862d786a7439bd1113e370736bfdf197e9765bfb", + "sha256:44367090a5a876809eb24943f31e470ba372aaa0d7396b92b953dda953a95d14", + "sha256:45ff05de889f3dc3d37a59d02096948ce470699f2368b32113954818b21aa74a", + "sha256:4690e3af7b134298055993fcbea161598d23b6d3ede11b12dca6815d82d101d5", + "sha256:473f5d921fadc135d1ad698e2697025045cd8ed7e5e842258295012d8a3bc702", + "sha256:47c144117e5c0e2babb559bc8f3f76153863b8dd90b2d550c51dab5f4b84a87f", + "sha256:4ac6c3eeed25e3e2cb9b379b48196413e40ac4e2239d910bb33e4e7f6c137745", + "sha256:4b72178b1e565d06ab19319965022b36ef41bcea7ea153b32ec31194bec032a2", + "sha256:4e9ffe358d5fdd6b878a8a364e96e15ca7ca57b92a48f588378cef315a8b019e", + "sha256:501dce8eaa537e728aa35810656aa00460a2547dcb60937c8139f36ec344d7fc", + "sha256:5378d0baa59ae422905c5f182ea0fd74fe7e52a23e3821067a7d58c8306b2191", + "sha256:542c1e8fddf082159a5d759ee1412c73e944a9a2412077ed00b303ff796907dc", + "sha256:63afea5f2d50d931feb20dcc50954e23cef4127606cc0ecf7a27128ed9f9a9e6", + "sha256:658ba9cad0374d37b38c9893f4864f284cdcc7d32041f9808fba8c7bcaadf134", + "sha256:6b661a959226ad0d255e49b77dba1d13782f028589a42dc3172398dd3814c797", + "sha256:72e3488453754bdb45c878e31ce557ea87e1eb0f8b4fc610373da35e8074ce42", + "sha256:7914d0cf083471856e9bc2001102a20f08e82311dfc8cf1a91aa422f9414a0d6", + "sha256:7ab00721304af1ae1afa4313ecfa1bf16b07f55ef91e4a5b93aeaa3e2bd7917c", + "sha256:7d0b6b637d05dbdb29d0bfac2ed8425bb369e7af5271b0cc7cf8b801cb7360c2", + "sha256:7e2b3e9ca957153557d06c50a26abaf0d0d6c0ddf462271854c968277a6b5372", + "sha256:7f172e6ba1bee0d4c8f8ebd639577bfe429dee0f3f96775a067b8bae4492d8a0", + "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675", + "sha256:844c0d1c04c40fd1b60f148dc829d3f69b2de789d0ba239c35136efe9a386529", + "sha256:8643c255a25824ddd0895c59f2319c019e13e949dc37162f876c41a283361527", + "sha256:8795e88adff5aa3c248c1edce932db003d37a623b5787669ccf205c422b91e4a", + "sha256:87c727691858fd3a1c085d9980d12395517fcbbf02c69fbb22dede8ee03422da", + "sha256:8851584fb931cffc0caa395f6980525fd5116eab8f73ece9d95e6f9c2c326c4c", + "sha256:891f95c036df1bc95309951940f8eea8537f102fa65715cdc5aae20b8523813b", + "sha256:8c85447569041939111b8c7dbf6f8fa7a0eb5b2c4aebb3c3bec0fb50d7025121", + "sha256:8e0ff16c224d9bfe4e9e6bd0395826096cda4a3ef51e6c301e1b61007ee2bd24", + "sha256:8f83f553f4cde6d3d4eaf58ec11c939c94a0ec545c5b287461cafb184f4b3a14", + "sha256:8f890d04ad33262d0c77ead53c85f13abfb82f2c8f078dfbf24b78f59534dfdd", + "sha256:8fdf3721a2aa7d96577970f5604bd81f426969c1822d467f07b3d844fa2fecc7", + "sha256:907f3a8674e489abdcb0206723e5560a5cb1fa42470dcc637942d7b10f28b695", + "sha256:92355f95a0e4da96d4c404aa3cff2ff033f9180a9515f813255e1526551298c1", + "sha256:97a9aea46e2a8371c4cf5386d881de833ed782901ac9f67ebcb63bb3b7d115af", + "sha256:988e959f2f3d59ebd9c2962ae71b97c0df58323910d0b368cc190ad07429d1bb", + "sha256:99f5c8ab048ee4233cc4f2b461b205cbe01194f6201018174ac269bf09995749", + "sha256:9cd5c03c63ae06d4f876b9844c5898d0044c7940ff7460db9f4cd984ac7862b5", + "sha256:a3b730ef664b2ef0e99dec01b6573b9b085c766400af363833e08ebc1e38eb2f", + "sha256:a716e05547a39b788deaf22725490855337fc36613288aa8ae1601dc8c525553", + "sha256:a7ec759c4a0fc820ad5dc6a58e9c391e7b16edcb618056baedbedbb9ea3b1524", + "sha256:aaa6bfc2180c31a45fac35d40e3312a3d09954638ce0b2e9424a88e24d262a13", + "sha256:ad04cf38164d983e85f9cba2804566c0160b47086dcca4cf059f7e26c5ace8ca", + "sha256:b2f73f0d0fce5300f23a1383d19b44d103bb113b57a69c36fd95b7c03099b181", + "sha256:b325f42e26659df1a0de66fdb5cde8dd48613da9c99c07d04e9fb9e254b7ee1c", + "sha256:b51bab2c4e545dde93cb6d6bb34bf63300b7cd06716f195dd92d9255df728331", + "sha256:b5c3e285e0735fd8c5a26d177eca8b52512cdd8687ca86ec77a0c66e9c510182", + "sha256:b73b493af9e947caed75d329676b1b801d673b17481962823a3e55fe529c8b8b", + "sha256:b9d85a02e77ee8ea6d9e3fd5d515bcc3d798d9c1ea54817e5feb97a9bc5d52fe", + "sha256:bdcfc88347fd981e53c33d832ce4d3e981a0d696b712fbcb45dcc1a43fe65c65", + "sha256:c594c0abe69d9d6099f4ece17763d53072f65ba60b372d8ba6de8695ce6ee39e", + "sha256:c8a9befb0c0369f0cf5c1b94178d0d78f66d9cebb9265b36be6e4f66236076b8", + "sha256:cd174b90db68c3bcca273e9391934a25d76929d727dc75224bf244446b28b03b", + "sha256:d5576415f3d76290b160aa093ff968f8bf6de7d681e16e463a0134106b506f49", + "sha256:d654d045adafdcc6c100e8e911508a2eedbd2a1b5f93f930ba13ea67d7704ee9", + "sha256:d92e339c69b585e7b1d857308ad3ca1636b899e4557897ccd91bb9e4a56c965b", + "sha256:da3b6987a0bc3e6d0f721b42c7a0198ef897ae50579547b0345f7f02486898f5", + "sha256:dd26b396bc3a1e85f4acebeadbf627fa6117b97f4c10b177d5779577c6607744", + "sha256:de7c1ddb80fa7a3ab045266dca169004b93f284756ad198306533b792774f10a", + "sha256:df3ab5e078cab19f7eaeef1d5f063103e1ebf8c26d059767b26a6a0ad8b250a3", + "sha256:e0155a8f079c688c2ccaea05de1ad69877995c547ba3d3612c1c336edc12a3a5", + "sha256:e10c14535abc7ddf3fd024aa36563cd8ab5d2bb6234a5d22c77c30e30fa4fb2b", + "sha256:e4396b55a364a03ff7e71a34828c3ed0c506814dd1f50e16ebed3fc447d5188e", + "sha256:e5589225c2da4bb732c9c370c5961c39a6db72cf69fb2a28868a5413ed7f39e6", + "sha256:e6576cdc36d5a09b0c1a3d81e13a45d41a6763188f9eaae2da2839e8a4240bce", + "sha256:e6850ae33529d1e43791b30575070670070d5fe007c37f5d06aebc1dd152ab3f", + "sha256:e9afd97339fc5a20f0542c971f90f3ca97e73d3050cdc488d540b63fae45329a", + "sha256:ead50635fb56577c07eff3e557dac39533e0fe603000684eea2af3ed1ad8f941", + "sha256:ed1336a2a6e5c427f419da0154e775834abcbc8ddd703004108121c6dd9eba9d", + "sha256:f0c819f83e4f7b7f7463b2dc10d626a8be0c85fbc7b3db0edc098c2b16ac968e", + "sha256:f64f01795119880023ba3ce43072283a393f0b90f52b66cc0ea1a89aa64a9ccb", + "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34", + "sha256:ff835906f84451e143f31c4ce8ad73d83ef4476b944c2a2da91aec8b649570e1" + ], + "version": "==3.3.0" + }, + "jinja2": { + "hashes": [ + "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", + "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d" + ], + "markers": "python_version >= '3.7'", + "version": "==3.1.4" + }, "jmespath": { "hashes": [ "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", @@ -195,6 +621,285 @@ "markers": "python_version >= '3.7'", "version": "==1.0.1" }, + "jsonschema": { + "hashes": [ + "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7", + "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802" + ], + "markers": "python_version >= '3.8'", + "version": "==4.22.0" + }, + "jsonschema-specifications": { + "hashes": [ + "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc", + "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c" + ], + "markers": "python_version >= '3.8'", + "version": "==2023.12.1" + }, + "markupsafe": { + "hashes": [ + "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf", + "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", + "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", + "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3", + "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532", + "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", + "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", + "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df", + "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4", + "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", + "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", + "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", + "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8", + "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371", + "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2", + "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465", + "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52", + "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6", + "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", + "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", + "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", + "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0", + "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029", + "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", + "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a", + "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", + "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", + "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", + "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf", + "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9", + "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", + "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", + "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3", + "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", + "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46", + "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", + "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a", + "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", + "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", + "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", + "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea", + "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", + "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", + "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e", + "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", + "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f", + "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50", + "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", + "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", + "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", + "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff", + "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2", + "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", + "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", + "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf", + "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", + "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5", + "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab", + "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", + "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.5" + }, + "multidict": { + "hashes": [ + "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556", + "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c", + "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29", + "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b", + "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8", + "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7", + "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd", + "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40", + "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6", + "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3", + "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c", + "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9", + "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5", + "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae", + "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442", + "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9", + "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc", + "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c", + "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea", + "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5", + "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50", + "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182", + "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453", + "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e", + "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600", + "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733", + "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda", + "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241", + "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461", + "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e", + "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e", + "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b", + "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e", + "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7", + "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386", + "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd", + "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9", + "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf", + "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee", + "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5", + "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a", + "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271", + "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54", + "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4", + "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496", + "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb", + "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319", + "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3", + "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f", + "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527", + "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed", + "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604", + "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef", + "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8", + "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5", + "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5", + "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626", + "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c", + "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d", + "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c", + "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc", + "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc", + "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b", + "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38", + "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450", + "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1", + "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f", + "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3", + "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755", + "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226", + "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a", + "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046", + "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf", + "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479", + "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e", + "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1", + "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a", + "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83", + "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929", + "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93", + "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a", + "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c", + "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44", + "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89", + "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba", + "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e", + "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da", + "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24", + "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423", + "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef" + ], + "markers": "python_version >= '3.7'", + "version": "==6.0.5" + }, + "numpy": { + "hashes": [ + "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f", + "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238", + "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f", + "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95", + "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a", + "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a", + "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2", + "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2", + "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f", + "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609", + "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f", + "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad", + "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86", + "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65", + "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb", + "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995", + "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a", + "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85", + "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4", + "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275", + "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1", + "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196", + "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d", + "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e", + "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514", + "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f", + "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6", + "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4", + "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44", + "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df", + "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581", + "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787", + "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5", + "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc", + "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871", + "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54", + "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2", + "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98", + "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9", + "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864", + "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de", + "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289", + "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b", + "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c", + "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9" + ], + "markers": "python_version >= '3.9'", + "version": "==2.0.0" + }, + "opensearch-benchmark": { + "hashes": [ + "sha256:9a06c38ea62a6a0e69c95457bcc9c5c4efd9dcdf6e52ba61b5fbc63a96e0dfa1", + "sha256:da152af68b212ec4640be9b4b50723f50b22c02efc39938e29ec7a23802085a7" + ], + "index": "pypi", + "markers": "python_version < '3.12' and python_version >= '3.8'", + "version": "==1.7.0" + }, + "opensearch-py": { + "extras": [ + "async" + ], + "hashes": [ + "sha256:0b7c27e8ed84c03c99558406927b6161f186a72502ca6d0325413d8e5523ba96", + "sha256:b6e78b685dd4e9c016d7a4299cf1de69e299c88322e3f81c716e6e23fe5683c1" + ], + "markers": "python_version >= '3.8' and python_version < '4'", + "version": "==2.6.0" + }, + "psutil": { + "hashes": [ + "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35", + "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0", + "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c", + "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1", + "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3", + "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c", + "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd", + "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3", + "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0", + "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2", + "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6", + "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d", + "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c", + "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0", + "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132", + "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14", + "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==6.0.0" + }, + "py-cpuinfo": { + "hashes": [ + "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", + "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5" + ], + "version": "==9.0.0" + }, "pyasn1": { "hashes": [ "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c", @@ -203,12 +908,20 @@ "markers": "python_version >= '3.8'", "version": "==0.6.0" }, + "pyasn1-modules": { + "hashes": [ + "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6", + "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b" + ], + "markers": "python_version >= '3.8'", + "version": "==0.4.0" + }, "python-dateutil": { "hashes": [ "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==2.9.0.post0" }, "pyyaml": { @@ -269,6 +982,14 @@ "markers": "python_version >= '3.6'", "version": "==6.0.1" }, + "referencing": { + "hashes": [ + "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", + "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de" + ], + "markers": "python_version >= '3.8'", + "version": "==0.35.1" + }, "requests": { "hashes": [ "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", @@ -278,6 +999,111 @@ "markers": "python_version >= '3.8'", "version": "==2.32.3" }, + "rpds-py": { + "hashes": [ + "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee", + "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc", + "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc", + "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944", + "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20", + "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7", + "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4", + "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6", + "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6", + "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93", + "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633", + "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0", + "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360", + "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8", + "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139", + "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7", + "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a", + "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9", + "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26", + "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724", + "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72", + "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b", + "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09", + "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100", + "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3", + "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261", + "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3", + "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9", + "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b", + "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3", + "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de", + "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d", + "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e", + "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8", + "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff", + "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5", + "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c", + "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e", + "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e", + "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4", + "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8", + "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922", + "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338", + "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d", + "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8", + "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2", + "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72", + "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80", + "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644", + "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae", + "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163", + "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104", + "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d", + "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60", + "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a", + "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d", + "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07", + "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49", + "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10", + "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f", + "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2", + "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8", + "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7", + "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88", + "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65", + "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0", + "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909", + "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8", + "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c", + "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184", + "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397", + "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a", + "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346", + "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590", + "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333", + "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb", + "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74", + "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e", + "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d", + "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa", + "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f", + "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53", + "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1", + "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac", + "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0", + "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd", + "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611", + "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f", + "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c", + "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5", + "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab", + "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc", + "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43", + "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da", + "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac", + "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843", + "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e", + "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89", + "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64" + ], + "markers": "python_version >= '3.8'", + "version": "==0.18.1" + }, "rsa": { "hashes": [ "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2", @@ -288,27 +1114,253 @@ }, "s3transfer": { "hashes": [ - "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19", - "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d" + "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6", + "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69" ], "markers": "python_version >= '3.8'", - "version": "==0.10.1" + "version": "==0.10.2" }, "six": { "hashes": [ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==1.16.0" }, + "tabulate": { + "hashes": [ + "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", + "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f" + ], + "markers": "python_version >= '3.7'", + "version": "==0.9.0" + }, + "thespian": { + "hashes": [ + "sha256:c987a8042ba2303e22371f38a67354593dd81c4c11ba1eba7f6657409288d5ed" + ], + "version": "==3.10.6" + }, "urllib3": { "hashes": [ "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" ], - "markers": "python_version >= '3.8'", + "markers": "python_version >= '3.10'", "version": "==2.2.2" + }, + "wheel": { + "hashes": [ + "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85", + "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81" + ], + "markers": "python_version >= '3.8'", + "version": "==0.43.0" + }, + "yappi": { + "hashes": [ + "sha256:084688828c0a8c181ebe59bbf15dcd5a5db2f689eada59d5c277b997c4dccf43", + "sha256:1ba7d12c18bc0d092463ad126a95a1b2b8c261c47b0e3bd4cb2fd7479469141c", + "sha256:1cd7453e99ebf56491254b0f4c28ae95c5e0ce55043eb17d1ab02c974cbd7416", + "sha256:1fd7d2da3e32f3d029220356e9b5b24754a7cd708d6e3830405e3dc04ec74153", + "sha256:237b1ac310ef364db6d2a1817de93a346d1ed98abfa3053810dbbdcab9ca9300", + "sha256:23c668e9ce87d70b126f73970cff997a2ab1964b947859ee50580af23964a096", + "sha256:2c7c2c9048b2f9fbd5da9cc65bdad73571023a30b5c34f62d97d9a7d47cbe9f5", + "sha256:30d294a88baffc3cb13a66fe408ecf8973c927fb3498f327df5af7cc657cdc80", + "sha256:31050972159a026876a06b5eec97f2dbaaaa291ebf3cf07a0d5506cce37ef339", + "sha256:36278de1ecf3a781322fb5f9511abc0b66bff327ca87a9e868dc2e376ad1b11a", + "sha256:474e9f3ba9394c19dd2f7dc257123e3918c178638597d507ee2094f19d938a39", + "sha256:4ae77cfe71682beec6f15ddd6dfb5912436c489b38eb2c3355f4481c07b9c4bf", + "sha256:638bd2458a6dfaa278e8977a8fdf44c8626003c12d8c94d82338ef8aa6ac245f", + "sha256:6678f046e6bffc68ef2d7781f9fc90b932ca6e90ea966371318ed904c4c38b8d", + "sha256:677d992c41b239441eee399ac39ea7601010ddb5acb92bf997de7589f9ee2cc1", + "sha256:69a4aced8c86bcc91fbecc3924ca9bd0a91ed715531c8a039199ef325ebb7046", + "sha256:6a16dd3fb61cfa7ffeb287312ddfe2a5c61a53693b009d3a7a3f1b8affb9568a", + "sha256:72aff63e74b87ffbf121211246922df9ac5dffc8deabdc6964f5b7f399799d0a", + "sha256:819c43539f55a9f40118ab8b3ce7cb743d66f3af63c7ce984c114533f750b263", + "sha256:830ffb677b7a9f9886bc7789e9cb75d5b3ad9af5f43d56d48e56431f92f04bcc", + "sha256:88dee431bba79866692f444110695133181efb2a6969ab63752f4424787f79c8", + "sha256:8a3c970f43f6d9bbc7159b535fbef492cb21576c094e77a673362ad827d9e00a", + "sha256:8ddbe1475964f145b028f8bf120a58903d8f6c7bdd1be0a16c1471ba2d8646ca", + "sha256:93352217bf560bea09b1cb43a17361bd2d41864698fa7ae46ce1066266c1af76", + "sha256:a1cb70d46827a137350fb84b8fddecd7acec0a11834c763209875788b738f873", + "sha256:a2cefe387bc747afcf0b26c9548e242113e17fac3de2674d900e97eb58a328f6", + "sha256:a6797f189b7b89154d6c7c53ac769a22f0adb7bd88ea5b8f6c65106a286afad6", + "sha256:a69ce9d7acc71419092f158ab4851d982e90ecbffbe6abf7d95516f3f741b57f", + "sha256:a9aaf72009d8c03067294151ee0470ac7a6dfa7b33baab40b198d6c1ef00430a", + "sha256:a9ea8dada24af41c11059f70741d8fce707aaf6862a9306f2ab63bde35f0ce9e", + "sha256:acfbf4c80b6ee0513ad35a6e4a1f633aa2f93357517f9701aed6ad8cd56544d4", + "sha256:b2f8f0877e3b85b6d2074d2fb541085cd519481f3df9c7e7011fc3867c364c7e", + "sha256:b42f7934fe96fd330488f9def51dd8e2fecc5cc9a71dceab8a27a41406b31332", + "sha256:c12da5f310d81779056566259fef644a9c14ac1ec9a2b1b8a3fc62beb4ca6980", + "sha256:c7c22bb39d3f2f294766f4940848d11b8ad1c43f9cf0a594ed695b07016007fc", + "sha256:cdaa263ba667aac9bf7bdc0d96fd10e2761a287f01fe87dc136f064ab7696af3", + "sha256:d1a8be1f55875e2120d7cddcb7e98c77a79ed87715d6292874c782fcd7da2c50", + "sha256:d49f972a8901972b104a2b65953ae8cbe005d5c09e0974422195bb780b2c5001", + "sha256:d4c7c5de3ae439c53c6c6c98d30d4b063c6fc353428ba3d81b57d91f1c41f654", + "sha256:d58e60aac43041d109f0931917204ef02ac01004b9579fe173f2847fbc69655b", + "sha256:d9f6ac7cd8797850bb6fb4cef8364ed34051be6f47d7da74be3a9261eef4bbfb", + "sha256:e33c097402e101a51f9963654108e7625853ddc979b562e8381f761cce99ae13", + "sha256:e37817722a052632de21674ec8acf59645c08df786920362879a76493037c99e", + "sha256:e40a872d146ddddae337181f8808aa6c3b37ffa66bd38a18f009b9e2f2c08b99", + "sha256:e4959c1dcfb6da8441d05915bfbb9c697e9f11655568f65b87c341e543bd65d5", + "sha256:e6ec9eabc974f9df868faa67461a9d167e9612083f59338f755f4aa61a2552b7", + "sha256:e9bc33b8ec9bce8b2575a4c3878b3cd223d08eb728669924699e5ac937e7b515", + "sha256:ef61f5fed7c19dddad5b7565df5b7bdfa861d51c15b01a90d283c4d3c97c42e2", + "sha256:f0f82e4895d04d6fd7bed2bea0abcc84271bdd990371cb053132753b6f5afb73", + "sha256:fb5bbb4c6b996736554cb8f41e7fb6d5ee6096b7c4f54112cce8cf953a92c0a4" + ], + "markers": "python_version >= '3.6'", + "version": "==1.6.0" + }, + "yarl": { + "hashes": [ + "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51", + "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce", + "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559", + "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0", + "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81", + "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc", + "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4", + "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c", + "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130", + "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136", + "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e", + "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec", + "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7", + "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1", + "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455", + "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099", + "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129", + "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10", + "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142", + "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98", + "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa", + "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7", + "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525", + "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c", + "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9", + "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c", + "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8", + "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b", + "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf", + "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23", + "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd", + "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27", + "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f", + "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece", + "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434", + "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec", + "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff", + "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78", + "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d", + "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863", + "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53", + "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31", + "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15", + "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5", + "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b", + "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57", + "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3", + "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1", + "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f", + "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad", + "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c", + "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7", + "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2", + "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b", + "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2", + "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b", + "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9", + "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be", + "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e", + "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984", + "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4", + "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074", + "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2", + "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392", + "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91", + "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541", + "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf", + "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572", + "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66", + "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575", + "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14", + "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5", + "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1", + "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e", + "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551", + "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17", + "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead", + "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0", + "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe", + "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234", + "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0", + "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7", + "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34", + "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42", + "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385", + "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78", + "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be", + "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958", + "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749", + "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec" + ], + "markers": "python_version >= '3.7'", + "version": "==1.9.4" + }, + "zstandard": { + "hashes": [ + "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd", + "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2", + "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356", + "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf", + "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004", + "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69", + "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019", + "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a", + "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440", + "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b", + "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775", + "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e", + "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc", + "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d", + "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09", + "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c", + "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe", + "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88", + "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94", + "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08", + "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0", + "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a", + "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292", + "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93", + "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70", + "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8", + "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2", + "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45", + "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202", + "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3", + "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb", + "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4", + "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d", + "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c", + "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f", + "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26", + "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303", + "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df", + "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e", + "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73", + "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c", + "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2", + "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0", + "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375", + "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912", + "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e" + ], + "markers": "python_version >= '3.8'", + "version": "==0.22.0" } }, "develop": { @@ -480,7 +1532,6 @@ "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad", "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16" ], - "index": "pypi", "markers": "python_version < '3.11'", "version": "==1.2.1" }, @@ -565,7 +1616,7 @@ "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" ], - "markers": "python_version >= '3.8'", + "markers": "python_version >= '3.10'", "version": "==2.2.2" } } diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py index 81ba09da9..62ea4e7d5 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py @@ -57,27 +57,28 @@ def cluster_group(ctx): @cluster_group.command(name="cat-indices") +@click.option("--refresh", is_flag=True, default=False) @click.pass_obj -def cat_indices_cmd(ctx): +def cat_indices_cmd(ctx, refresh): """Simple program that calls `_cat/indices` on both a source and target cluster.""" if ctx.json: click.echo( json.dumps( { "source_cluster": logic_clusters.cat_indices( - ctx.env.source_cluster, as_json=True + ctx.env.source_cluster, as_json=True, refresh=refresh ), "target_cluster": logic_clusters.cat_indices( - ctx.env.target_cluster, as_json=True + ctx.env.target_cluster, as_json=True, refresh=refresh ), } ) ) return click.echo("SOURCE CLUSTER") - click.echo(logic_clusters.cat_indices(ctx.env.source_cluster)) + click.echo(logic_clusters.cat_indices(ctx.env.source_cluster, refresh=refresh)) click.echo("TARGET CLUSTER") - click.echo(logic_clusters.cat_indices(ctx.env.target_cluster)) + click.echo(logic_clusters.cat_indices(ctx.env.target_cluster, refresh=refresh)) @cluster_group.command(name="connection-check") diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/environment.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/environment.py index 45fbb2f60..c2185b1e5 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/environment.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/environment.py @@ -8,6 +8,7 @@ from console_link.models.snapshot import FileSystemSnapshot, Snapshot, S3Snapshot from console_link.models.replayer_base import Replayer from console_link.models.replayer_ecs import ECSReplayer +from console_link.models.replayer_docker import DockerReplayer from console_link.models.kafka import Kafka, MSK, StandardKafka import yaml from cerberus import Validator @@ -26,7 +27,7 @@ def get_snapshot(config: Dict, source_cluster: Cluster): def get_replayer(config: Dict): if 'ecs' in config: return ECSReplayer(config) - raise ValueError("Invalid replayer config") + return DockerReplayer(config) def get_kafka(config: Dict): diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/clusters.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/clusters.py index baa69e674..c72f5cc35 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/clusters.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/clusters.py @@ -12,7 +12,16 @@ class ConnectionResult: cluster_version: str -def cat_indices(cluster: Cluster, as_json=False): +def call_api(cluster: Cluster, path: str, method=HttpMethod.GET, data=None, headers=None, timeout=None, + session=None, raise_error=False): + r = cluster.call_api(path=path, method=method, data=data, headers=headers, timeout=timeout, session=session, + raise_error=raise_error) + return r + + +def cat_indices(cluster: Cluster, refresh=False, as_json=False): + if refresh: + cluster.call_api('/_refresh') as_json_suffix = "?format=json" if as_json else "?v" cat_indices_path = f"/_cat/indices/_all{as_json_suffix}" r = cluster.call_api(cat_indices_path) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/replay.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/replay.py index 714f3cdc9..7a337e379 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/replay.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/replay.py @@ -25,7 +25,7 @@ def start(replayer: Replayer, *args, **kwargs) -> Tuple[ExitCode, str]: except Exception as e: logger.error(f"Failed to start replayer: {e}") return ExitCode.FAILURE, f"Failure when starting replayer: {type(e).__name__} {e}" - + if result.success: return ExitCode.SUCCESS, "Replayer started successfully." + "\n" + result.display() return ExitCode.FAILURE, "Replayer start failed." + "\n" + result.display() @@ -64,13 +64,13 @@ def scale(replayer: Replayer, units: int, *args, **kwargs) -> Tuple[ExitCode, st def status(replayer: Replayer, *args, **kwargs) -> Tuple[ExitCode, str]: logger.info("Getting replayer status") try: - status = replayer.get_status(*args, **kwargs) + result = replayer.get_status(*args, **kwargs) except NotImplementedError: logger.error(f"Status is not implemented for replayer {type(replayer).__name__}") return ExitCode.FAILURE, f"Status is not implemented for replayer: {type(replayer).__name__}" except Exception as e: logger.error(f"Failed to get status of replayer: {e}") return ExitCode.FAILURE, f"Failure when getting status of replayer: {type(e).__name__} {e}" - if status: - return ExitCode.SUCCESS, status.value - return ExitCode.FAILURE, "Replayer status retrieval failed." + "\n" + status + if result.success: + return ExitCode.SUCCESS, result.value[0].name + return ExitCode.FAILURE, "Replayer status retrieval failed." + "\n" + result.value[1] diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/snapshot.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/snapshot.py index ba4bbcefd..a9310a002 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/snapshot.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/snapshot.py @@ -17,3 +17,12 @@ def create(snapshot: Snapshot, *args, **kwargs) -> CommandResult: def status(snapshot: Snapshot, *args, **kwargs) -> CommandResult: logger.info("Getting snapshot status") return snapshot.status(*args, **kwargs) + + +def delete(snapshot: Snapshot, *args, **kwargs) -> CommandResult: + logger.info(f"Deleting snapshot with {args=} and {kwargs=}") + try: + return snapshot.delete(*args, **kwargs) + except Exception as e: + logger.error(f"Failure running delete snapshot: {e}") + return CommandResult(status=False, message=f"Failure running delete snapshot: {e}") diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_rfs.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_rfs.py index 02a813c20..3e2f64325 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_rfs.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_rfs.py @@ -106,13 +106,13 @@ def stop(self, *args, **kwargs) -> CommandResult: def scale(self, units: int, *args, **kwargs) -> CommandResult: logger.info(f"Scaling RFS backfill by setting desired count to {units} instances") return self.ecs_client.set_desired_count(units) - + def get_status(self, deep_check, *args, **kwargs) -> CommandResult: logger.info(f"Getting status of RFS backfill, with {deep_check=}") instance_statuses = self.ecs_client.get_instance_statuses() if not instance_statuses: return CommandResult(False, "Failed to get instance statuses") - + status_string = str(instance_statuses) if deep_check: try: @@ -185,7 +185,7 @@ def _get_detailed_status(self) -> Optional[str]: def parse_query_response(query: dict, cluster: Cluster, label: str) -> Optional[int]: try: - response = cluster.call_api("/.migrations_working_state/_search", json_body=query) + response = cluster.call_api("/.migrations_working_state/_search", data=query) except Exception as e: logger.error(f"Failed to execute query: {e}") return None diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py index c3fa5a217..480b3d3cb 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py @@ -76,7 +76,8 @@ def __init__(self, config: Dict) -> None: raise ValueError("Invalid config file for cluster", v.errors) self.endpoint = config["endpoint"] - self.allow_insecure = config.get("allow_insecure", False) + self.allow_insecure = config.get("allow_insecure", False) if self.endpoint.startswith( + "https") else config.get("allow_insecure", True) if 'no_auth' in config: self.auth_type = AuthMethod.NO_AUTH elif 'basic_auth' in config: @@ -85,11 +86,13 @@ def __init__(self, config: Dict) -> None: elif 'sigv4' in config: self.auth_type = AuthMethod.SIGV4 - def call_api(self, path, method: HttpMethod = HttpMethod.GET, timeout=None, - json_body=None, **kwargs) -> requests.Response: + def call_api(self, path, method: HttpMethod = HttpMethod.GET, data=None, headers=None, + timeout=None, session=None, raise_error=True, **kwargs) -> requests.Response: """ Calls an API on the cluster. """ + if session is None: + session = requests.Session() if self.auth_type == AuthMethod.BASIC_AUTH: assert self.auth_details is not None # for mypy's sake auth = HTTPBasicAuth( @@ -101,38 +104,34 @@ def call_api(self, path, method: HttpMethod = HttpMethod.GET, timeout=None, else: raise NotImplementedError(f"Auth type {self.auth_type} not implemented") - if json_body is not None: - data = json_body - else: - data = None - - logger.info(f"Making api call to {self.endpoint}{path}") - # Extract query parameters from kwargs params = kwargs.get('params', {}) - - r = requests.request( + + logger.info(f"Performing request: {method.name} {self.endpoint}{path}") + r = session.request( method.name, f"{self.endpoint}{path}", - params=params, verify=(not self.allow_insecure), + params=params, auth=auth, - timeout=timeout, - json=data + data=data, + headers=headers, + timeout=timeout ) - logger.debug(f"Cluster API call request: {r.request}") - r.raise_for_status() + logger.info(f"Received response: {r.status_code} {method.name} {self.endpoint}{path} - {r.text[:1000]}") + if raise_error: + r.raise_for_status() return r def execute_benchmark_workload(self, workload: str, workload_params='target_throughput:0.5,bulk_size:10,bulk_indexing_clients:1,' 'search_clients:1'): - client_options = "" + client_options = "verify_certs:false" if not self.allow_insecure: - client_options += "use_ssl:true,verify_certs:false" + client_options += ",use_ssl:true" if self.auth_type == AuthMethod.BASIC_AUTH: if self.auth_details['password'] is not None: - client_options += (f"basic_auth_user:{self.auth_details['username']}," + client_options += (f",basic_auth_user:{self.auth_details['username']}," f"basic_auth_password:{self.auth_details['password']}") else: raise NotImplementedError(f"Auth type {self.auth_type} with AWS Secret ARN is not currently support " @@ -142,7 +141,8 @@ def execute_benchmark_workload(self, workload: str, f"benchmark workloads") # Note -- we should censor the password when logging this command logger.info(f"Running opensearch-benchmark with '{workload}' workload") - subprocess.run(f"opensearch-benchmark execute-test --distribution-version=1.0.0 " - f"--target-host={self.endpoint} --workload={workload} --pipeline=benchmark-only --test-mode " - f"--kill-running-processes --workload-params={workload_params} " - f"--client-options={client_options}", shell=True) + command = (f"opensearch-benchmark execute-test --distribution-version=1.0.0 --target-host={self.endpoint} " + f"--workload={workload} --pipeline=benchmark-only --test-mode --kill-running-processes " + f"--workload-params={workload_params} --client-options={client_options}") + logger.info(f"Executing command: {command}") + subprocess.run(command, shell=True) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_base.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_base.py index 5b78ec6a5..d714e848d 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_base.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_base.py @@ -9,6 +9,7 @@ DOCKER_REPLAY_SCHEMA = { "type": "dict", + "nullable": True, "schema": { "socket": {"type": "string", "required": False} } @@ -36,7 +37,7 @@ } -ReplayStatus = Enum("ReplayStatus", ["NOT_STARTED", "RUNNING", "STOPPED", "FAILED"]) +ReplayStatus = Enum("ReplayStatus", ["NOT_STARTED", "STARTING", "RUNNING", "STOPPED", "FAILED"]) class Replayer(ABC): @@ -62,7 +63,7 @@ def stop(self, *args, **kwargs) -> CommandResult: pass @abstractmethod - def get_status(self, *args, **kwargs) -> ReplayStatus: + def get_status(self, *args, **kwargs) -> CommandResult: """Return a status""" pass diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_docker.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_docker.py new file mode 100644 index 000000000..54330bc19 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_docker.py @@ -0,0 +1,28 @@ +from typing import Dict +from console_link.models.command_result import CommandResult +from console_link.models.replayer_base import Replayer, ReplayStatus + +import logging + +logger = logging.getLogger(__name__) + + +class DockerReplayer(Replayer): + def __init__(self, config: Dict) -> None: + super().__init__(config) + + def start(self, *args, **kwargs) -> CommandResult: + logger.warning("Start command is not implemented for Docker Replayer") + return CommandResult(success=True, value="No action performed, action is unimplemented") + + def stop(self, *args, **kwargs) -> CommandResult: + logger.warning("Stop command is not implemented for Docker Replayer") + return CommandResult(success=True, value="No action performed, action is unimplemented") + + def get_status(self, *args, **kwargs) -> CommandResult: + logger.warning("Get status command is not implemented for Docker Replayer and " + "always assumes service is running") + return CommandResult(True, (ReplayStatus.RUNNING, "Docker Replayer is assumed to be running")) + + def scale(self, units: int, *args, **kwargs) -> CommandResult: + raise NotImplementedError() diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_ecs.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_ecs.py index 2b5216abc..c4c7fca12 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_ecs.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/replayer_ecs.py @@ -23,8 +23,19 @@ def stop(self, *args, **kwargs) -> CommandResult: logger.info("Stopping ECS replayer by setting desired count to 0 instances") return self.ecs_client.set_desired_count(0) - def get_status(self, *args, **kwargs) -> ReplayStatus: - raise NotImplementedError() + def get_status(self, *args, **kwargs) -> CommandResult: + # Simple implementation that only checks ECS service status currently + instance_statuses = self.ecs_client.get_instance_statuses() + if not instance_statuses: + return CommandResult(False, "Failed to get instance statuses") + + status_string = str(instance_statuses) + + if instance_statuses.running > 0: + return CommandResult(True, (ReplayStatus.RUNNING, status_string)) + elif instance_statuses.pending > 0: + return CommandResult(True, (ReplayStatus.STARTING, status_string)) + return CommandResult(True, (ReplayStatus.STOPPED, status_string)) def scale(self, units: int, *args, **kwargs) -> CommandResult: logger.info(f"Scaling ECS replayer by setting desired count to {units} instances") diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/snapshot.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/snapshot.py index 484c3fbff..28d78ba38 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/snapshot.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/snapshot.py @@ -59,6 +59,11 @@ def status(self, *args, **kwargs) -> CommandResult: """Get the status of the snapshot.""" pass + @abstractmethod + def delete(self, *args, **kwargs) -> CommandResult: + """Delete a snapshot.""" + pass + S3_SNAPSHOT_SCHEMA = { 'snapshot_name': {'type': 'string', 'required': True}, @@ -112,6 +117,9 @@ def status(self, *args, deep_check=False, **kwargs) -> CommandResult: return get_snapshot_status_full(self.source_cluster, self.snapshot_name) return get_snapshot_status(self.source_cluster, self.snapshot_name) + def delete(self, *args, **kwargs) -> CommandResult: + return delete_snapshot(self.source_cluster, self.snapshot_name) + class FileSystemSnapshot(Snapshot): def __init__(self, config: Dict, source_cluster: Cluster) -> None: @@ -149,6 +157,9 @@ def create(self, *args, **kwargs) -> CommandResult: def status(self, *args, **kwargs) -> CommandResult: raise NotImplementedError("Status check for FileSystemSnapshot is not implemented yet.") + def delete(self, *args, **kwargs) -> CommandResult: + return delete_snapshot(self.source_cluster, self.snapshot_name) + def parse_args(): parser = argparse.ArgumentParser(description="Elasticsearch snapshot status checker.") @@ -282,3 +293,11 @@ def get_snapshot_status_full(cluster: Cluster, snapshot: str, return CommandResult(success=True, value=f"{state}\n{message}") except Exception as e: return CommandResult(success=False, value=f"Failed to get full snapshot status: {str(e)}") + + +def delete_snapshot(cluster: Cluster, snapshot_name: str, repository: str = 'migration_assistant_repo'): + repository = repository if repository != '*' else get_repository_for_snapshot(cluster, snapshot_name) + + path = f"/_snapshot/{repository}/{snapshot_name}" + response = cluster.call_api(path, HttpMethod.DELETE) + logging.debug(f"Raw delete snapshot status response: {response.text}") diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/services.yaml b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/services.yaml index 835c98ae2..7daa49aba 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/services.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/services.yaml @@ -16,6 +16,8 @@ metrics_source: backfill: reindex_from_snapshot: docker: +replay: + docker: snapshot: snapshot_name: "snapshot_2023_01_01" fs: diff --git a/test/Pipfile b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/Pipfile similarity index 83% rename from test/Pipfile rename to TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/Pipfile index 88df1e49e..21813952a 100644 --- a/test/Pipfile +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/Pipfile @@ -4,6 +4,7 @@ verify_ssl = true name = "pypi" [packages] +console-link = {file = "../console_link", editable = true} certifi = "==2024.7.4" charset-normalizer = "==3.1.0" idna = "==3.7" @@ -16,7 +17,7 @@ requests = ">=2.32.3" urllib3 = ">=2.0.7" requests-aws4auth = "*" boto3 = "*" - +pytest-sugar = "*" [dev-packages] [requires] diff --git a/test/Pipfile.lock b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/Pipfile.lock similarity index 66% rename from test/Pipfile.lock rename to TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/Pipfile.lock index 65b5899e8..6c4762256 100644 --- a/test/Pipfile.lock +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e1a3d55892ace5dfd9e6deb4863fa576f432f1a3de368c3ba6a3718dbd92f2e0" + "sha256": "9c47ebdc5129d551502e8b3c517fc39e723d1ea3220b484206ad0254491fc42e" }, "pipfile-spec": 6, "requires": { @@ -18,20 +18,27 @@ "default": { "boto3": { "hashes": [ - "sha256:b781d267dd5e7583966e05697f6bd45e2f46c01dc619ba0860b042963ee69296", - "sha256:c163fb7135a94e7b8c8c478a44071c843f05e212fa4bec3105f8a437ecbf1bcb" + "sha256:23ca8d8f7a30c3bbd989808056b5fc5d68ff5121c02c722c6167b6b1bb7f8726", + "sha256:578bbd5e356005719b6b610d03edff7ea1b0824d078afe62d3fb8bea72f83a87" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==1.34.130" + "version": "==1.34.140" }, "botocore": { "hashes": [ - "sha256:a242b3b0a836b14f308a309565cd63e88654cec238f9b73abbbd3c0526db4c81", - "sha256:a3b36e9dac1ed31c4cb3a5c5e540a7d8a9b90ff1d17f87734e674154b41776d8" + "sha256:43940d3a67d946ba3301631ba4078476a75f1015d4fb0fb0272d0b754b2cf9de", + "sha256:86302b2226c743b9eec7915a4c6cfaffd338ae03989cd9ee181078ef39d1ab39" ], "markers": "python_version >= '3.8'", - "version": "==1.34.130" + "version": "==1.34.140" + }, + "cerberus": { + "hashes": [ + "sha256:7649a5815024d18eb7c6aa5e7a95355c649a53aacfc9b050e9d0bf6bfa2af372", + "sha256:81011e10266ef71b6ec6d50e60171258a5b134d69f8fb387d16e4936d0d47642" + ], + "version": "==1.3.5" }, "certifi": { "hashes": [ @@ -124,6 +131,18 @@ "markers": "python_full_version >= '3.7.0'", "version": "==3.1.0" }, + "click": { + "hashes": [ + "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", + "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.7" + }, + "console-link": { + "editable": true, + "file": "../console_link" + }, "exceptiongroup": { "hashes": [ "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad", @@ -193,6 +212,14 @@ "markers": "python_version >= '3.7'", "version": "==7.3.1" }, + "pytest-sugar": { + "hashes": [ + "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a", + "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd" + ], + "index": "pypi", + "version": "==1.0.0" + }, "pytest-xdist": { "hashes": [ "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93", @@ -210,6 +237,63 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==2.9.0.post0" }, + "pyyaml": { + "hashes": [ + "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", + "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", + "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", + "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", + "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", + "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", + "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595", + "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", + "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", + "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", + "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", + "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", + "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", + "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", + "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", + "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", + "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", + "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6", + "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", + "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", + "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", + "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", + "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", + "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", + "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", + "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", + "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", + "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", + "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", + "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef", + "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", + "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd", + "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3", + "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0", + "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515", + "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c", + "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c", + "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924", + "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", + "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", + "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", + "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", + "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", + "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", + "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", + "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", + "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", + "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", + "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585", + "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d", + "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f" + ], + "markers": "python_version >= '3.6'", + "version": "==6.0.1" + }, "requests": { "hashes": [ "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", @@ -230,11 +314,11 @@ }, "s3transfer": { "hashes": [ - "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19", - "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d" + "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6", + "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69" ], "markers": "python_version >= '3.8'", - "version": "==0.10.1" + "version": "==0.10.2" }, "six": { "hashes": [ @@ -244,6 +328,14 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==1.16.0" }, + "termcolor": { + "hashes": [ + "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63", + "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a" + ], + "markers": "python_version >= '3.8'", + "version": "==2.4.0" + }, "tomli": { "hashes": [ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/README.md b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/README.md new file mode 100644 index 000000000..e6b2c8b37 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/README.md @@ -0,0 +1,44 @@ +### E2E Integration Testing +Developers can run a test script which will verify the end-to-end Docker Solution. + +#### Compatibility +* Python >= 3.7 + +#### Pre-requisites + +* Have all containers from Docker solution running. + +To run the test script, users must navigate to this directory, +install the required packages and then run the script: + +``` +pip install -r requirements.txt +pytest tests.py +``` + +#### Notes + +##### Ports Setup +The test script, by default, uses the ports assigned to the containers in this +[docker-compose file](../../../docker-compose.yml), so if the Docker solution in +its current setup started with no issues, then the test script will run as is. If for any reason +the user changed the ports in that file, they must also either, provide the following parameters variables: +`proxy_endpoint`, `source_endpoint`, and `target_endpoint` respectively, or update the default value + for them in [conftest.py](integ_test/conftest.py). + + +#### Script Parameters + +This script accepts various parameters to customize its behavior. Below is a list of available parameters along with their default values and acceptable choices: + +- `--unique_id`: The unique identifier to apply to created indices/documents. + - Default: Generated uuid +- `--config_file_path`: The services yaml config file path for the console library. + - Default: `/etc/migration_services.yaml` + + +#### Clean Up +The test script is implemented with a setup and teardown functions that are ran after +each and every test where additions made to the endpoints are deleted, *mostly* cleaning up after themselves, however, +as we log all operations going through the proxy (which is capturing the traffic), those are only being +deleted after the Docker solution is shut down. diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/backfill_tests.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/backfill_tests.py new file mode 100644 index 000000000..87f6e8987 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/backfill_tests.py @@ -0,0 +1,111 @@ +import logging +import pytest +import unittest +from http import HTTPStatus +from console_link.logic.clusters import run_test_benchmarks, connection_check, clear_indices, ConnectionResult +from console_link.models.cluster import Cluster +from console_link.models.backfill_base import Backfill +from console_link.models.backfill_rfs import RFSBackfill +from console_link.models.command_result import CommandResult +from console_link.models.snapshot import Snapshot +from console_link.models.metadata import Metadata +from console_link.cli import Context +from common_operations import (get_document, create_document, create_index, check_doc_counts_match, + EXPECTED_BENCHMARK_DOCS) + +logger = logging.getLogger(__name__) + + +def preload_data(source_cluster: Cluster, target_cluster: Cluster): + # Confirm source and target connection + source_con_result: ConnectionResult = connection_check(source_cluster) + assert source_con_result.connection_established is True + target_con_result: ConnectionResult = connection_check(target_cluster) + assert target_con_result.connection_established is True + + # Clear any existing non-system indices + clear_indices(source_cluster) + clear_indices(target_cluster) + + # Preload data that test cases will verify is migrated + # test_backfill_0001 + index_name = f"test_backfill_0001_{pytest.unique_id}" + doc_id = "backfill_0001_doc" + create_index(cluster=source_cluster, index_name=index_name) + create_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, + expected_status_code=HTTPStatus.CREATED) + + # test_backfill_0002 + run_test_benchmarks(source_cluster) + + +@pytest.fixture(scope="class") +def setup_backfill(request): + config_path = request.config.getoption("--config_file_path") + unique_id = request.config.getoption("--unique_id") + pytest.console_env = Context(config_path).env + pytest.unique_id = unique_id + preload_data(source_cluster=pytest.console_env.source_cluster, + target_cluster=pytest.console_env.target_cluster) + backfill: Backfill = pytest.console_env.backfill + assert backfill is not None + metadata: Metadata = pytest.console_env.metadata + assert metadata is not None + backfill.create() + if isinstance(backfill, RFSBackfill): + snapshot: Snapshot = pytest.console_env.snapshot + status_result: CommandResult = snapshot.status() + if status_result.success: + snapshot.delete() + snapshot.create(wait=True) + metadata.migrate() + backfill.start() + backfill.scale(units=10) + else: + metadata.migrate() + backfill.start() + + +@pytest.fixture(scope="session", autouse=True) +def cleanup_after_tests(): + # Setup code + logger.info("Starting backfill tests...") + + yield + + # Teardown code + logger.info("Stopping backfill...") + backfill: Backfill = pytest.console_env.backfill + backfill.stop() + + +@pytest.mark.usefixtures("setup_backfill") +class BackfillTests(unittest.TestCase): + + def test_backfill_0001_single_document(self): + index_name = f"test_backfill_0001_{pytest.unique_id}" + doc_id = "backfill_0001_doc" + source_cluster: Cluster = pytest.console_env.source_cluster + target_cluster: Cluster = pytest.console_env.target_cluster + + # Assert preloaded document exists + get_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, test_case=self) + + # TODO Determine when backfill is completed + + get_document(cluster=target_cluster, index_name=index_name, doc_id=doc_id, max_attempts=30, delay=30.0, + test_case=self) + + def test_backfill_0002_sample_benchmarks(self): + source_cluster: Cluster = pytest.console_env.source_cluster + target_cluster: Cluster = pytest.console_env.target_cluster + + # Confirm documents on source + check_doc_counts_match(cluster=source_cluster, expected_index_details=EXPECTED_BENCHMARK_DOCS, + test_case=self) + + # TODO Determine when backfill is completed + + # Confirm documents on target after backfill + check_doc_counts_match(cluster=target_cluster, expected_index_details=EXPECTED_BENCHMARK_DOCS, + max_attempts=40, delay=30.0, test_case=self) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/common_operations.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/common_operations.py new file mode 100644 index 000000000..39131095e --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/common_operations.py @@ -0,0 +1,216 @@ +import datetime +import random +import string +import json +import time +import logging +from requests.exceptions import ConnectionError, SSLError +from typing import Dict, List +from unittest import TestCase +from console_link.logic.clusters import call_api +from console_link.models.cluster import HttpMethod, Cluster +from console_link.models.replayer_base import Replayer, ReplayStatus + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX_IGNORE_LIST = ["test_", ".", "searchguard", "sg7", "security-auditlog"] + +EXPECTED_BENCHMARK_DOCS = { + "geonames": {"docs.count": "1000"}, + "logs-221998": {"docs.count": "1000"}, + "logs-211998": {"docs.count": "1000"}, + "logs-231998": {"docs.count": "1000"}, + "logs-241998": {"docs.count": "1000"}, + "logs-181998": {"docs.count": "1000"}, + "logs-201998": {"docs.count": "1000"}, + "logs-191998": {"docs.count": "1000"}, + "sonested": {"docs.count": "2977"}, + "reindexed-logs": {"docs.count": "0"}, + "nyc_taxis": {"docs.count": "1000"} +} + + +class ClusterAPIRequestError(Exception): + pass + + +class ReplayerNotActiveError(Exception): + pass + + +def execute_api_call(cluster: Cluster, path: str, method=HttpMethod.GET, data=None, headers=None, timeout=None, + session=None, expected_status_code: int = 200, max_attempts: int = 10, delay: float = 2.5, + test_case=None): + api_exception = None + last_received_status = None + last_response = None + for _ in range(1, max_attempts + 1): + try: + response = call_api(cluster=cluster, path=path, method=method, data=data, headers=headers, timeout=timeout, + session=session, raise_error=False) + last_response = response + if response.status_code == expected_status_code: + break + else: + # Ensure that our final captured exception is accurate + api_exception = None + last_received_status = response.status_code + logger.debug(f"Status code returned: {response.status_code} did not" + f" match the expected status code: {expected_status_code}." + f" Trying again in {delay} seconds.") + except (ConnectionError, SSLError) as e: + last_response = None + api_exception = e + logger.debug(f"Received exception: {e}. Unable to connect to server. Please check all containers are up" + f" and ports are setup properly. Trying again in {delay} seconds.") + time.sleep(delay) + + if api_exception: + error_message = f"Unable to connect to server. Underlying exception: {api_exception}" + raise ClusterAPIRequestError(error_message) + else: + error_message = (f"Failed to receive desired status code of {expected_status_code} and instead " + f"received {last_received_status} for request: {method.name} {path}") + if test_case is not None: + test_case.assertEqual(expected_status_code, last_response.status_code, error_message) + elif expected_status_code != last_response.status_code: + raise ClusterAPIRequestError(error_message) + return last_response + + +def create_index(index_name: str, cluster: Cluster, **kwargs): + return execute_api_call(cluster=cluster, method=HttpMethod.PUT, path=f"/{index_name}", + **kwargs) + + +def get_index(index_name: str, cluster: Cluster, **kwargs): + return execute_api_call(cluster=cluster, method=HttpMethod.GET, path=f"/{index_name}", + **kwargs) + + +def delete_index(index_name: str, cluster: Cluster, **kwargs): + return execute_api_call(cluster=cluster, method=HttpMethod.DELETE, path=f"/{index_name}", + **kwargs) + + +def create_document(index_name: str, doc_id: str, cluster: Cluster, data: dict = None, **kwargs): + if data is None: + data = { + 'title': 'Test Document', + 'content': 'This is a sample document for testing OpenSearch.' + } + headers = {'Content-Type': 'application/json'} + return execute_api_call(cluster=cluster, method=HttpMethod.PUT, path=f"/{index_name}/_doc/{doc_id}", + data=json.dumps(data), headers=headers, **kwargs) + + +def get_document(index_name: str, doc_id: str, cluster: Cluster, **kwargs): + return execute_api_call(cluster=cluster, method=HttpMethod.GET, path=f"/{index_name}/_doc/{doc_id}", + **kwargs) + + +def delete_document(index_name: str, doc_id: str, cluster: Cluster, **kwargs): + return execute_api_call(cluster=cluster, method=HttpMethod.DELETE, path=f"/{index_name}/_doc/{doc_id}", + **kwargs) + + +def index_matches_ignored_index(index_name: str, index_prefix_ignore_list: List[str]): + for prefix in index_prefix_ignore_list: + if index_name.startswith(prefix): + return True + return False + + +def get_all_index_details(cluster: Cluster, index_prefix_ignore_list=None, **kwargs) -> Dict[str, Dict[str, str]]: + all_index_details = execute_api_call(cluster=cluster, path="/_cat/indices?format=json", **kwargs).json() + index_dict = {} + for index_details in all_index_details: + valid_index = not index_matches_ignored_index(index_name=index_details['index'], + index_prefix_ignore_list=index_prefix_ignore_list) + if index_prefix_ignore_list is None or valid_index: + index_dict[index_details['index']] = index_details + return index_dict + + +def check_doc_counts_match(cluster: Cluster, + expected_index_details: Dict[str, Dict[str, str]], + test_case: TestCase, + index_prefix_ignore_list=None, + max_attempts: int = 5, + delay: float = 2.5): + if index_prefix_ignore_list is None: + index_prefix_ignore_list = DEFAULT_INDEX_IGNORE_LIST + + error_message = "" + for attempt in range(1, max_attempts + 1): + # Refresh documents + execute_api_call(cluster=cluster, path="/_refresh") + actual_index_details = get_all_index_details(cluster=cluster, index_prefix_ignore_list=index_prefix_ignore_list) + logger.debug(f"Received actual indices: {actual_index_details}") + if actual_index_details.keys() != expected_index_details.keys(): + error_message = (f"Indices are different: \n Expected: {expected_index_details.keys()} \n " + f"Actual: {actual_index_details.keys()}") + logger.debug(f"Error on attempt {attempt}: {error_message}") + else: + for index_details in actual_index_details.values(): + index_name = index_details['index'] + actual_doc_count = index_details['docs.count'] + expected_doc_count = expected_index_details[index_name]['docs.count'] + if actual_doc_count != expected_doc_count: + error_message = (f"Index {index_name} has {actual_doc_count} documents but {expected_doc_count} " + f"were expected") + logger.debug(f"Error on attempt {attempt}: {error_message}") + break + if not error_message: + return True + if attempt != max_attempts: + error_message = "" + time.sleep(delay) + test_case.fail(error_message) + + +def check_doc_match(test_case: TestCase, index_name: str, doc_id: str, source_cluster: Cluster, + target_cluster: Cluster): + source_response = get_document(index_name=index_name, doc_id=doc_id, cluster=source_cluster) + target_response = get_document(index_name=index_name, doc_id=doc_id, cluster=target_cluster) + + source_document = source_response.json() + source_content = source_document['_source'] + target_document = target_response.json() + target_content = target_document['_source'] + test_case.assertEqual(source_content, target_content) + + +def generate_large_doc(size_mib): + # Calculate number of characters needed (1 char = 1 byte) + num_chars = size_mib * 1024 * 1024 + + # Generate random string of the desired length + large_string = ''.join(random.choices(string.ascii_letters + string.digits, k=num_chars)) + + return { + "timestamp": datetime.datetime.now().isoformat(), + "large_field": large_string + } + + +def wait_for_running_replayer(replayer: Replayer, + test_case: TestCase = None, + max_attempts: int = 25, + delay: float = 3.0): + error_message = "" + for attempt in range(1, max_attempts + 1): + cmd_result = replayer.get_status() + status = cmd_result.value[0] + logger.debug(f"Received status {status} for Replayer on attempt {attempt}") + if status == ReplayStatus.RUNNING: + return + error_message = (f"Received replayer status of {status} but expecting to receive: {ReplayStatus.RUNNING} " + f"after {max_attempts} attempts") + if attempt != max_attempts: + error_message = "" + time.sleep(delay) + if test_case: + test_case.fail(error_message) + else: + raise ReplayerNotActiveError(error_message) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/conftest.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/conftest.py new file mode 100644 index 000000000..3efb7fbe2 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/conftest.py @@ -0,0 +1,25 @@ +# conftest.py +import pytest +import uuid +import logging + + +def pytest_configure(config): + # Configure logging + logging.basicConfig(level=logging.DEBUG, + format='%(asctime)s - %(levelname)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S') + + # This line ensures that log messages are displayed on the console during test runs + logging.getLogger().setLevel(logging.DEBUG) + + +def pytest_addoption(parser): + parser.addoption("--unique_id", action="store", default=uuid.uuid4().hex) + parser.addoption("--config_file_path", action="store", default="/etc/migration_services.yaml", + help="Path to config file for console library") + + +@pytest.fixture +def unique_id(pytestconfig): + return pytestconfig.getoption("unique_id") diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/metric_operations.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/metric_operations.py new file mode 100644 index 000000000..1befd12ca --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/metric_operations.py @@ -0,0 +1,80 @@ +import functools +import logging +import time +import pytest +from typing import List, Dict, Tuple +from unittest import TestCase +from console_link.models.metrics_source import MetricsSource, CloudwatchMetricsSource +from console_link.logic.metrics import get_metric_data + +logger = logging.getLogger(__name__) + + +# Note that the names of metrics are a bit different in a local vs cloud deployment. +# The transformation is somewhat hardcoded here--the user should put in the local name, and if its +# a cloud deployment, everything after the first `_` will be discarded. This should generally cause +# things to match, but it's possible there are edge cases that it doesn't account for +# Note as well, that currently the only way of assuming data is correlated with a given test is via +# the lookback time. Soon, we should implement a way to add a specific ID to metrics from a given run +# and check for the presence of that ID. +def assert_metric_has_data(component: str, metric: str, lookback_minutes: int, test_case: TestCase): + metrics_source: MetricsSource = pytest.console_env.metrics_source + + metric_data: List[Tuple[str, float]] = get_metric_data(metrics_source=metrics_source, + component=component, + metric_name=metric, + statistic="Sum", + lookback=lookback_minutes) + logger.info(f"Received the following data from get_metric_data: {metric_data}") + test_case.assertNotEqual( + len(metric_data), 0, + f"Metric {metric} for component {component} does not exist or does " + f"not have data within the last {lookback_minutes} minutes" + ) + + +def assert_metrics(expected_metrics: Dict[str, List[str]], test_case: TestCase, deployment_type: str, + lookback_minutes=2, wait_before_check_seconds=60): + """ + This is the method invoked by the `@assert_metrics_present` decorator. + params: + expected_metrics: a dictionary of component->[metrics], for each metric that should be verified. + lookback_minutes: the number of minutes into the past to query for metrics + wait_before_check_seconds: the time in seconds to delay before checking for the presence of metrics + """ + logger.debug(f"Waiting {wait_before_check_seconds} before checking for metrics.") + time.sleep(wait_before_check_seconds) + for component, expected_comp_metrics in expected_metrics.items(): + if component == "captureProxy" and deployment_type == "cloud": + # We currently do not emit captureProxy metrics from a non-standalone proxy, which is the scenario + # tested in our e2e tests. Therefore, we don't want to assert metrics exist in this situation. We + # should remove this clause as soon as we start testing the standalone proxy scenario. + logger.warning("Skipping metric verification for captureProxy metrics in a cloud deployment.") + continue + for expected_metric in expected_comp_metrics: + if deployment_type == 'cloud': + expected_metric = expected_metric.split('_', 1)[0] + assert_metric_has_data(component, expected_metric, lookback_minutes, test_case) + + +def assert_metrics_present(*wrapper_args, **wrapper_kwargs): + def decorator(test_func): + @functools.wraps(test_func) + def wrapper(self, *args, **kwargs): + # Run the original test function + try: + test_func(self, *args, **kwargs) + test_passed = True + except AssertionError as e: + test_passed = False + raise e + finally: + if test_passed: + metrics_source: MetricsSource = pytest.console_env.metrics_source + deployment_type = "docker" + if isinstance(metrics_source, CloudwatchMetricsSource): + deployment_type = "cloud" + # Only look for metrics if the test passed + assert_metrics(test_case=self, deployment_type=deployment_type, *wrapper_args, **wrapper_kwargs) + return wrapper + return decorator diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/replayer_tests.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/replayer_tests.py new file mode 100644 index 000000000..a78342dfa --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/integ_test/replayer_tests.py @@ -0,0 +1,247 @@ +import pytest +import unittest +import requests +import logging +import secrets +import string +import time +from http import HTTPStatus +from requests import Session +from requests.adapters import HTTPAdapter +from console_link.models.replayer_base import Replayer +from console_link.logic.kafka import delete_topic +from console_link.models.kafka import Kafka +from console_link.logic.clusters import connection_check, clear_indices, run_test_benchmarks, ConnectionResult +from console_link.models.cluster import Cluster, AuthMethod +from console_link.cli import Context + +from common_operations import (get_index, create_index, delete_index, get_document, create_document, delete_document, + check_doc_match, check_doc_counts_match, generate_large_doc, execute_api_call, + wait_for_running_replayer, EXPECTED_BENCHMARK_DOCS) +from metric_operations import assert_metrics_present + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="class") +def setup_replayer(request): + config_path = request.config.getoption("--config_file_path") + unique_id = request.config.getoption("--unique_id") + pytest.console_env = Context(config_path).env + pytest.unique_id = unique_id + source_cluster: Cluster = pytest.console_env.source_cluster + target_cluster: Cluster = pytest.console_env.target_cluster + kafka: Kafka = pytest.console_env.kafka + replayer: Replayer = pytest.console_env.replay + assert replayer is not None + + # Confirm source and target connection + source_con_result: ConnectionResult = connection_check(source_cluster) + assert source_con_result.connection_established is True + target_con_result: ConnectionResult = connection_check(target_cluster) + assert target_con_result.connection_established is True + + # Clear any existing non-system indices + clear_indices(source_cluster) + clear_indices(target_cluster) + + # Delete existing Kafka topic to clear records + delete_topic(kafka=kafka, topic_name="logging-traffic-topic") + + logger.info("Starting replayer...") + # TODO provide support for actually starting/stopping Replayer in Docker + replayer.start() + wait_for_running_replayer(replayer=replayer) + + +@pytest.fixture(scope="session", autouse=True) +def cleanup_after_tests(): + # Setup code + logger.info("Starting replayer tests...") + + yield + + # Teardown code + logger.info("Stopping replayer...") + replayer: Replayer = pytest.console_env.replay + assert replayer is not None + replayer.stop() + + +@pytest.mark.usefixtures("setup_replayer") +class ReplayerTests(unittest.TestCase): + + @assert_metrics_present({ + 'captureProxy': ['kafkaCommitCount_total'], + 'replayer': ['kafkaCommitCount_total'] + }) + def test_replayer_0001_empty_index(self): + # This test will verify that an index will be created (then deleted) on the target cluster when one is created + # on the source cluster by going through the proxy first. It will verify that the traffic is captured by the + # proxy and that the traffic reaches the source cluster, replays said traffic to the target cluster by the + # replayer. + + source_cluster: Cluster = pytest.console_env.source_cluster + target_cluster: Cluster = pytest.console_env.target_cluster + index_name = f"test_replayer_0001_{pytest.unique_id}" + + create_index(cluster=source_cluster, index_name=index_name, test_case=self) + get_index(cluster=source_cluster, index_name=index_name, test_case=self) + get_index(cluster=target_cluster, index_name=index_name, test_case=self) + delete_index(cluster=source_cluster, index_name=index_name, test_case=self) + get_index(cluster=source_cluster, index_name=index_name, expected_status_code=HTTPStatus.NOT_FOUND, + test_case=self) + get_index(cluster=target_cluster, index_name=index_name, expected_status_code=HTTPStatus.NOT_FOUND, + test_case=self) + + def test_replayer_0002_single_document(self): + # This test will verify that a document will be created (then deleted) on the target cluster when one is created + # on the source cluster by going through the proxy first. It will verify that the traffic is captured by the + # proxy and that the traffic reaches the source cluster, replays said traffic to the target cluster by the + # replayer. + + source_cluster: Cluster = pytest.console_env.source_cluster + target_cluster: Cluster = pytest.console_env.target_cluster + index_name = f"test_replayer_0002_{pytest.unique_id}" + doc_id = "replayer_0002_doc" + + create_index(cluster=source_cluster, index_name=index_name, test_case=self) + get_index(cluster=source_cluster, index_name=index_name, test_case=self) + get_index(cluster=target_cluster, index_name=index_name, test_case=self) + create_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, + expected_status_code=HTTPStatus.CREATED, test_case=self) + check_doc_match(source_cluster=source_cluster, target_cluster=target_cluster, + index_name=index_name, doc_id=doc_id, test_case=self) + delete_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, test_case=self) + get_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, + expected_status_code=HTTPStatus.NOT_FOUND, test_case=self) + get_document(cluster=target_cluster, index_name=index_name, doc_id=doc_id, + expected_status_code=HTTPStatus.NOT_FOUND, test_case=self) + delete_index(cluster=source_cluster, index_name=index_name) + get_index(cluster=source_cluster, index_name=index_name, expected_status_code=HTTPStatus.NOT_FOUND, + test_case=self) + get_index(cluster=target_cluster, index_name=index_name, expected_status_code=HTTPStatus.NOT_FOUND, + test_case=self) + + def test_replayer_0003_negativeAuth_invalidCreds(self): + # This test sends negative credentials to the clusters to validate that unauthorized access is prevented. + source_cluster: Cluster = pytest.console_env.source_cluster + if source_cluster.auth_type != AuthMethod.BASIC_AUTH or source_cluster.auth_details['password'] is None: + self.skipTest("Test case is only valid for a basic auth source cluster with username and password") + + alphabet = string.ascii_letters + string.digits + for _ in range(10): + username = ''.join(secrets.choice(alphabet) for _ in range(8)) + password = ''.join(secrets.choice(alphabet) for _ in range(8)) + + credentials = [ + (username, password), + (source_cluster.auth_details['username'], password), + (username, source_cluster.auth_details['password']) + ] + + for user, pw in credentials: + response = requests.get(source_cluster.endpoint, auth=(user, pw), + verify=not source_cluster.allow_insecure) + self.assertEqual(response.status_code, HTTPStatus.UNAUTHORIZED) + + def test_replayer_0004_negativeAuth_missingCreds(self): + # This test will use no credentials at all + source_cluster: Cluster = pytest.console_env.source_cluster + if source_cluster.auth_type != AuthMethod.BASIC_AUTH: + self.skipTest("Test case is only valid for a basic auth source cluster") + + # With an empty authorization header + response_with_header = requests.get(source_cluster.endpoint, auth=('', ''), + verify=not source_cluster.allow_insecure) + self.assertEqual(response_with_header.status_code, HTTPStatus.UNAUTHORIZED) + + # Without an authorization header. + response_no_header = requests.get(source_cluster.endpoint, verify=not source_cluster.allow_insecure) + self.assertEqual(response_no_header.status_code, HTTPStatus.UNAUTHORIZED) + + def test_replayer_0005_invalidIncorrectUri(self): + # This test will send an invalid URI + source_cluster: Cluster = pytest.console_env.source_cluster + invalid_uri = "/invalidURI" + execute_api_call(source_cluster, path=invalid_uri, expected_status_code=HTTPStatus.NOT_FOUND, test_case=self) + + # This test will send an incorrect URI + incorrect_uri = "/_cluster/incorrectUri" + execute_api_call(source_cluster, path=incorrect_uri, expected_status_code=HTTPStatus.METHOD_NOT_ALLOWED, + test_case=self) + + def test_replayer_0006_OSB(self): + source_cluster: Cluster = pytest.console_env.source_cluster + target_cluster: Cluster = pytest.console_env.target_cluster + + run_test_benchmarks(cluster=source_cluster) + # Confirm documents on source + check_doc_counts_match(cluster=source_cluster, expected_index_details=EXPECTED_BENCHMARK_DOCS, + test_case=self) + # Confirm documents on target after replay + check_doc_counts_match(cluster=target_cluster, expected_index_details=EXPECTED_BENCHMARK_DOCS, + test_case=self) + + def test_replayer_0007_timeBetweenRequestsOnSameConnection(self): + # This test will verify that the replayer functions correctly when + # requests on the same connection on the proxy that has a minute gap + source_cluster: Cluster = pytest.console_env.source_cluster + target_cluster: Cluster = pytest.console_env.target_cluster + seconds_between_requests = 60 # 1 minute + + proxy_single_connection_session = Session() + adapter = HTTPAdapter(pool_connections=1, pool_maxsize=1, max_retries=1) + proxy_single_connection_session.mount(source_cluster.endpoint, adapter) + + index_name = f"test_replayer_0007_{pytest.unique_id}" + + number_of_docs = 3 + + for doc_id_int in range(number_of_docs): + doc_id = str(doc_id_int) + create_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, + expected_status_code=HTTPStatus.CREATED, session=proxy_single_connection_session, + test_case=self) + if doc_id_int + 1 < number_of_docs: + time.sleep(seconds_between_requests) + + try: + for doc_id_int in range(number_of_docs): + doc_id = str(doc_id_int) + check_doc_match(source_cluster=source_cluster, target_cluster=target_cluster, + index_name=index_name, doc_id=doc_id, test_case=self) + finally: + proxy_single_connection_session.close() + + @unittest.skip("Flaky test needs resolution") + def test_replayer_0008_largeRequest(self): + source_cluster: Cluster = pytest.console_env.source_cluster + target_cluster: Cluster = pytest.console_env.target_cluster + index_name = f"test_replayer_0008_{pytest.unique_id}" + doc_id = "replayer_0008_doc" + + # Create large document, 99MiB + # Default max 100MiB in ES/OS settings (http.max_content_length) + large_doc = generate_large_doc(size_mib=99) + + # Measure the time taken by the create_document call + # Send large request to proxy and verify response + start_time = time.time() + create_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, data=large_doc, + expected_status_code=HTTPStatus.CREATED, test_case=self) + end_time = time.time() + duration = end_time - start_time + + # Set wait time to double the response time or 5 seconds + wait_time_seconds = min(round(duration, 3) * 2, 5) + + # Wait for the measured duration + logger.debug(f"Waiting {wait_time_seconds} seconds for" + f" replay of large doc creation") + + time.sleep(wait_time_seconds) + + # Verify document created on source and target + check_doc_match(source_cluster=source_cluster, target_cluster=target_cluster, index_name=index_name, + doc_id=doc_id, test_case=self) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/setupIntegTests.sh b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/setupIntegTests.sh deleted file mode 100755 index db6f5b5fd..000000000 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/setupIntegTests.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash - -# Example usage: ./setupIntegTests.sh https://github.com/opensearch-project/opensearch-migrations.git main http://opense-clust-test.elb.us-east-1.amazonaws.com:19200 http://opense-clust-test.elb.us-east-1.amazonaws.com:9200 integ_min_1705067071260_127 - -git_http_url=$1 -branch=$2 -source_endpoint=$3 -proxy_endpoint=$4 -unique_id=$5 - -mkdir -p /root/integ-tests -cd /root/integ-tests || exit -git init -remote_exists=$(git remote -v | grep origin) -if [ -z "${remote_exists}" ]; then - echo "No remote detected, adding 'origin'" - git remote add -f origin "$git_http_url" -else - echo "Existing 'origin' remote detected, updating to $git_http_url" - git remote set-url origin "$git_http_url" -fi - -git config core.sparseCheckout true -# Check file exists and contains sparse-checkout -if test -f .git/info/sparse-checkout; then - sparse_entry=$(cat .git/info/sparse-checkout | grep "/test") - if [ -z "${sparse_entry}" ]; then - echo "No '/test' entry in '.git/info/sparse-checkout' file detected, will attempt to add" - git remote add -f origin "$git_http_url" - else - echo "Have detected '/test' entry in '.git/info/sparse-checkout' file, no changes needed" - fi -else - echo "File '.git/info/sparse-checkout' not found, will attempt to create" - echo "/test" >> .git/info/sparse-checkout -fi - -git pull origin "$branch" -cd test || exit -pip install pipenv - -pipenv install -echo "Starting 'tests.py'" -# TODO Add support to dynamically skip certain tests depending on setup. Currently, forcing negative auth tests not to run here as the source/target cluster has no auth -set -o xtrace -pipenv run pytest tests.py::E2ETests::test_0001_index tests.py::E2ETests::test_0002_document tests.py::E2ETests::test_0005_invalidIncorrectUri tests.py::E2ETests::test_0006_OSB --proxy_endpoint="${proxy_endpoint}" --source_endpoint="${source_endpoint}" --target_endpoint="${MIGRATION_DOMAIN_ENDPOINT}" --source_auth_type="none" --target_auth_type="none" --source_verify_ssl=False --target_verify_ssl=False --deployment_type="cloud" --unique_id="${unique_id}" --verbose --junitxml="./reports/${unique_id}.xml" -set +o xtrace -deactivate \ No newline at end of file diff --git a/deployment/cdk/opensearch-service-migration/lib/lambda/msk-public-endpoint-handler.ts b/deployment/cdk/opensearch-service-migration/lib/lambda/msk-public-endpoint-handler.ts index 0830bfe86..8c6f053b8 100644 --- a/deployment/cdk/opensearch-service-migration/lib/lambda/msk-public-endpoint-handler.ts +++ b/deployment/cdk/opensearch-service-migration/lib/lambda/msk-public-endpoint-handler.ts @@ -198,4 +198,4 @@ export const handler = async (event: any, context: Context): Promise => { await delay(20000); } await invokeNextLambda(JSON.stringify(payloadData), context.functionName) -}; \ No newline at end of file +}; diff --git a/deployment/cdk/opensearch-service-migration/lib/migration-assistance-stack.ts b/deployment/cdk/opensearch-service-migration/lib/migration-assistance-stack.ts index e1cf5f7a1..95cdf3422 100644 --- a/deployment/cdk/opensearch-service-migration/lib/migration-assistance-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/migration-assistance-stack.ts @@ -1,5 +1,13 @@ import {RemovalPolicy, Stack} from "aws-cdk-lib"; -import {IPeer, IVpc, Peer, Port, SecurityGroup, SubnetFilter, SubnetType} from "aws-cdk-lib/aws-ec2"; +import { + IPeer, + IVpc, + Peer, + Port, + SecurityGroup, + SubnetFilter, + SubnetType +} from "aws-cdk-lib/aws-ec2"; import {FileSystem} from 'aws-cdk-lib/aws-efs'; import {Construct} from "constructs"; import {CfnCluster, CfnConfiguration} from "aws-cdk-lib/aws-msk"; @@ -9,7 +17,7 @@ import {LogGroup, RetentionDays} from "aws-cdk-lib/aws-logs"; import {NamespaceType} from "aws-cdk-lib/aws-servicediscovery"; import {StreamingSourceType} from "./streaming-source-type"; import {Bucket, BucketEncryption} from "aws-cdk-lib/aws-s3"; -import {MigrationSSMParameter, createMigrationStringParameter, parseRemovalPolicy} from "./common-utilities"; +import {createMigrationStringParameter, MigrationSSMParameter, parseRemovalPolicy} from "./common-utilities"; export interface MigrationStackProps extends StackPropsExt { readonly vpc: IVpc, @@ -44,6 +52,23 @@ export class MigrationAssistanceStack extends Stack { } } + // This function exists to overcome the limitation on the vpc.selectSubnets() call which requires the subnet + // type to be provided or else an empty list will be returned if public subnets are provided, thus this function + // tries different subnet types if unable to select the provided subnetIds + selectSubnetsFromTypes(vpc: IVpc, subnetIds: string[]): string[] { + const subnetsTypeList = [SubnetType.PRIVATE_WITH_EGRESS, SubnetType.PUBLIC, SubnetType.PRIVATE_ISOLATED] + for (const subnetType of subnetsTypeList) { + const subnets = vpc.selectSubnets({ + subnetType: subnetType, + subnetFilters: [SubnetFilter.byIds(subnetIds)] + }) + if (subnets.subnetIds.length == subnetIds.length) { + return subnets.subnetIds + } + } + throw Error(`Unable to find subnet ids: ${subnetIds} in VPC: ${vpc.vpcId}. Please ensure all subnet ids exist and are of the same subnet type`) + } + validateAndReturnVPCSubnetsForMSK(vpc: IVpc, brokerNodeCount: number, azCount: number, specifiedSubnetIds?: string[]): string[] { if (specifiedSubnetIds) { if (specifiedSubnetIds.length !== 2 && specifiedSubnetIds.length !== 3) { @@ -53,10 +78,7 @@ export class MigrationAssistanceStack extends Stack { throw new Error(`The MSK broker node count (${brokerNodeCount} nodes inferred) must be a multiple of the number of AZs (${specifiedSubnetIds.length} AZs inferred from provided 'mskSubnetIds'). The node count can be set with the 'mskBrokerNodeCount' context option.`) } - const selectSubnets = vpc.selectSubnets({ - subnetFilters: [SubnetFilter.byIds(specifiedSubnetIds)] - }) - return selectSubnets.subnetIds + return this.selectSubnetsFromTypes(vpc, specifiedSubnetIds) } if (azCount !== 2 && azCount !== 3) { throw new Error(`MSK requires subnets for 2 or 3 AZs, but have detected an AZ count of ${azCount} has been provided with 'mskAZCount'`) @@ -244,4 +266,4 @@ export class MigrationAssistanceStack extends Stack { parameter: MigrationSSMParameter.CLOUD_MAP_NAMESPACE_ID }); } -} \ No newline at end of file +} diff --git a/deployment/cdk/opensearch-service-migration/lib/msk-utility-stack.ts b/deployment/cdk/opensearch-service-migration/lib/msk-utility-stack.ts index 5eec0d8c6..d84d58af6 100644 --- a/deployment/cdk/opensearch-service-migration/lib/msk-utility-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/msk-utility-stack.ts @@ -104,6 +104,8 @@ export class MSKUtilityStack extends Stack { handle: wcHandle.ref }) waitCondition.node.addDependency(customResource); + // CFN limitation prevents accessing a specific field like "BROKER_ENDPOINTS" in the response data, so this + // will actually be more data than just broker endpoints brokerEndpoints = waitCondition.attrData.toString() } // If public endpoints are not enabled we will launch a simple Lambda custom resource to retrieve the private broker endpoints diff --git a/jenkins/migrationIntegPipelines/rfsBackfillE2EPipeline.groovy b/jenkins/migrationIntegPipelines/rfsBackfillE2EPipeline.groovy index 224852dfc..32d715f51 100644 --- a/jenkins/migrationIntegPipelines/rfsBackfillE2EPipeline.groovy +++ b/jenkins/migrationIntegPipelines/rfsBackfillE2EPipeline.groovy @@ -59,8 +59,5 @@ defaultIntegPipeline( migrationContextId: migrationContextId, gitUrl: gitUrl, gitBranch: gitBranch, - stageId: stageId, - finishStep: { - echo 'Skipping step for RFS' - } + stageId: stageId ) diff --git a/jenkins/sharedLibrary/vars/defaultIntegPipeline.groovy b/jenkins/sharedLibrary/vars/defaultIntegPipeline.groovy index a1c641e11..91773b7c5 100644 --- a/jenkins/sharedLibrary/vars/defaultIntegPipeline.groovy +++ b/jenkins/sharedLibrary/vars/defaultIntegPipeline.groovy @@ -82,7 +82,10 @@ def call(Map config = [:]) { } else { def time = new Date().getTime() def uniqueId = "integ_min_${time}_${currentBuild.number}" - sh "sudo ./awsRunIntegTests.sh --stage ${stageId} --migrations-git-url ${gitUrl} --migrations-git-branch ${gitBranch} --unique-id ${uniqueId}" + def test_dir = "/root/lib/integ_test/integ_test" + def test_result_file = "${test_dir}/reports/${uniqueId}/report.xml" + def command = "pytest --log-file=${test_dir}/reports/${uniqueId}/pytest.log --junitxml=${test_result_file} ${test_dir}/replayer_tests.py --unique_id ${uniqueId} -s" + sh "sudo ./awsRunIntegTests.sh --command ${command} --test-result-file ${test_result_file} --stage ${stageId}" } } } @@ -99,7 +102,7 @@ def call(Map config = [:]) { if (config.finishStep) { config.finishStep() } else { - sh "sudo ./awsE2ESolutionSetup.sh --stage ${stageId} --run-post-actions" + sh "echo 'Default post step performs no actions'" } } } diff --git a/test/README.md b/test/README.md index f7c523634..1865c29a7 100644 --- a/test/README.md +++ b/test/README.md @@ -78,81 +78,5 @@ Source Context substitutable values - -### Docker E2E Testing -Developers can run a test script which will verify the end-to-end Docker Solution. - -#### Compatibility -* Python >= 3.7 - -#### Pre-requisites - -* Have all containers from Docker solution running. - -To run the test script, users must navigate to this directory, -install the required packages and then run the script: - -``` -cd test -pipenv install -pipenv run pytest tests.py -``` - -#### Notes - -##### Ports Setup -The test script, by default, uses the ports assigned to the containers in this -[docker-compose file](../TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml), so if the Docker solution in -its current setup started with no issues, then the test script will run as is. If for any reason -the user changed the ports in that file, they must also either, provide the following parameters variables: -`proxy_endpoint`, `source_endpoint`, and `target_endpoint` respectively, or update the default value - for them in [conftest.py](conftest.py). - - -#### Script Parameters - -This script accepts various parameters to customize its behavior. Below is a list of available parameters along with their default values and acceptable choices: - -- `--proxy_endpoint`: The endpoint for the proxy endpoint. - - Default: `https://localhost:9200` - -- `--source_endpoint`: The endpoint for the source endpoint. - - Default: `https://localhost:19200` - -- `--target_endpoint`: The endpoint for the target endpoint. - - Default: `https://localhost:29200` - -- `--source_auth_type`: Specifies the authentication type for the source endpoint. - - Default: `basic` - - Choices: `none`, `basic`, `sigv4` - -- `--source_verify_ssl`: Determines whether to verify the SSL certificate for the source endpoint. - - Default: `False` - - Choices: `True`, `False` - -- `--target_auth_type`: Specifies the authentication type for the target endpoint. - - Default: `basic` - - Choices: `none`, `basic`, `sigv4` - -- `--target_verify_ssl`: Determines whether to verify the SSL certificate for the target endpoint. - - Default: `False` - - Choices: `True`, `False` - -- `--source_username`: Username for authentication with the source endpoint. - - Default: `admin` - -- `--source_password`: Password for authentication with the source endpoint. - - Default: `admin` - -- `--target_username`: Username for authentication with the target endpoint. - - Default: `admin` - -- `--target_password`: Password for authentication with the target endpoint. - - Default: `myStrongPassword123!` - - -#### Clean Up -The test script is implemented with a setup and teardown functions that are ran after -each and every test where additions made to the endpoints are deleted, *mostly* cleaning up after themselves, however, -as we log all operations going through the proxy (which is capturing the traffic), those are only being -deleted after the Docker solution is shut down. \ No newline at end of file +### Running Integration Tests +Details can be found in the integration testing README [here](../TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/integ_test/README.md) diff --git a/test/awsE2ESolutionSetup.sh b/test/awsE2ESolutionSetup.sh index cdcc24042..8de8ce9d4 100755 --- a/test/awsE2ESolutionSetup.sh +++ b/test/awsE2ESolutionSetup.sh @@ -71,21 +71,6 @@ validate_required_options () { fi } -restore_and_record () { - deploy_stage=$1 - source_lb_endpoint=$(aws cloudformation describe-stacks --stack-name "$SOURCE_INFRA_STACK_NAME" --query "Stacks[0].Outputs[?OutputKey==\`loadbalancerurl\`].OutputValue" --output text) - source_endpoint="http://${source_lb_endpoint}:19200" - kafka_brokers=$(aws ssm get-parameter --name "/migration/$deploy_stage/default/kafkaBrokers" --query 'Parameter.Value' --output text) - console_task_arn=$(aws ecs list-tasks --cluster migration-${deploy_stage}-ecs-cluster --family "migration-${deploy_stage}-migration-console" | jq --raw-output '.taskArns[0]') - - # Print final doc counts and Kafka topic status - unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${console_task_arn}" --container "migration-console" --interactive --command "./catIndices.sh --source-endpoint $source_endpoint --source-no-auth --target-no-auth" - unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${console_task_arn}" --container "migration-console" --interactive --command "./kafka-tools/kafka/bin/kafka-consumer-groups.sh --bootstrap-server ${kafka_brokers} --timeout 100000 --describe --group logging-group-default --command-config kafka-tools/aws/msk-iam-auth.properties" - - # Turn off Replayer - aws ecs update-service --cluster "migration-${deploy_stage}-ecs-cluster" --service "migration-${deploy_stage}-traffic-replayer-default" --desired-count 0 > /dev/null 2>&1 -} - # One-time required service-linked-role creation for AWS accounts which do not have these roles, will ignore/fail if # any of these roles already exist create_service_linked_roles () { @@ -139,7 +124,6 @@ usage() { echo " --migrations-git-url The Github http url used for building the capture proxy on setups with a dedicated source cluster, default is 'https://github.com/opensearch-project/opensearch-migrations.git'." echo " --migrations-git-branch The Github branch associated with the 'git-url' to pull from, default is 'main'." echo " --stage The stage name to use for naming/grouping of AWS deployment resources, default is 'aws-integ'." - echo " --run-post-actions Flag to enable only running post test actions for cleaning up and recording a test run." echo " --create-service-linked-roles Flag to create required service linked roles for the AWS account" echo " --bootstrap-region Flag to CDK bootstrap the region to allow CDK deployments" echo " --skip-capture-proxy Flag to skip setting up the Capture Proxy on source cluster nodes" @@ -151,7 +135,6 @@ usage() { } STAGE='aws-integ' -RUN_POST_ACTIONS=false CREATE_SLR=false BOOTSTRAP_REGION=false SKIP_CAPTURE_PROXY=false @@ -175,10 +158,6 @@ while [[ $# -gt 0 ]]; do BOOTSTRAP_REGION=true shift # past argument ;; - --run-post-actions) - RUN_POST_ACTIONS=true - shift # past argument - ;; --skip-capture-proxy) SKIP_CAPTURE_PROXY=true shift # past argument @@ -248,11 +227,6 @@ SOURCE_INFRA_STACK_NAME="opensearch-infra-stack-ec2-source-$STAGE" SOURCE_GEN_CONTEXT_FILE="$TMP_DIR_PATH/generatedSourceContext.json" MIGRATION_GEN_CONTEXT_FILE="$TMP_DIR_PATH/generatedMigrationContext.json" -if [ "$RUN_POST_ACTIONS" = true ] ; then - restore_and_record "$STAGE" - exit 0 -fi - if [ "$CREATE_SLR" = true ] ; then create_service_linked_roles fi @@ -320,4 +294,4 @@ if [ "$SKIP_CAPTURE_PROXY" = false ] ; then echo "Error: enabling capture proxy on source cluster, exiting." exit 1 fi -fi \ No newline at end of file +fi diff --git a/test/awsRunIntegTests.sh b/test/awsRunIntegTests.sh index 0245df5f4..6c2591352 100755 --- a/test/awsRunIntegTests.sh +++ b/test/awsRunIntegTests.sh @@ -5,42 +5,37 @@ usage() { echo "Script to run integrations tests on AWS Migration Console" echo "" echo "Usage: " - echo " ./awsRunIntegTests.sh [--unique-id] [--migrations-git-url] [--migrations-git-branch] [--stage]" + echo " ./awsRunIntegTests.sh [--command] [--test-result-file] [--stage]" echo "" echo "Options:" - echo " --unique-id Identifier for labeling integ test artifacts, e.g. 'full_run_123'." - echo " --migrations-git-url The Github http url used for pulling the integration tests onto the migration console, default is 'https://github.com/opensearch-project/opensearch-migrations.git'." - echo " --migrations-git-branch The Github branch associated with the 'git-url' to pull from, default is 'main'." - echo " --stage The stage used for CDK deployment, default is 'aws-integ'." + echo " --command Provide test command to execute on the Migration Console" + echo " --test-result-file The test result file to check for success or failure" + echo " --stage Deployment stage name" echo "" exit 1 } epoch_seconds=$(date +%s) -UNIQUE_ID="test_${epoch_seconds}_1" -STAGE='aws-integ' -MIGRATIONS_GIT_URL='https://github.com/opensearch-project/opensearch-migrations.git' -MIGRATIONS_GIT_BRANCH='main' +unique_id="test_${epoch_seconds}_1" +test_dir="/root/lib/integ_test/integ_test" +STAGE="aws-integ" +TEST_RESULT_FILE="${test_dir}/reports/${unique_id}/report.xml" +COMMAND="pytest --log-file=${test_dir}/reports/${unique_id}/pytest.log --junitxml=${TEST_RESULT_FILE} ${test_dir}/replayer_tests.py --unique_id ${unique_id} -s" while [[ $# -gt 0 ]]; do case $1 in - --unique-id) - UNIQUE_ID="$2" + --command) + COMMAND="$2" shift # past argument shift # past value ;; - --stage) - STAGE="$2" - shift # past argument - shift # past value - ;; - --migrations-git-url) - MIGRATIONS_GIT_URL="$2" + --test-result-file) + TEST_RESULT_FILE="$2" shift # past argument shift # past value ;; - --migrations-git-branch) - MIGRATIONS_GIT_BRANCH="$2" + --stage) + STAGE="$2" shift # past argument shift # past value ;; @@ -57,39 +52,14 @@ while [[ $# -gt 0 ]]; do esac done -SOURCE_INFRA_STACK_NAME="opensearch-infra-stack-ec2-source-$STAGE" - task_arn=$(aws ecs list-tasks --cluster migration-${STAGE}-ecs-cluster --family "migration-${STAGE}-migration-console" | jq --raw-output '.taskArns[0]') -# Delete and re-create topic -kafka_brokers=$(aws ssm get-parameter --name "/migration/${STAGE}/default/kafkaBrokers" --query 'Parameter.Value' --output text) -unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "./kafka-tools/kafka/bin/kafka-topics.sh --bootstrap-server ${kafka_brokers} --delete --topic logging-traffic-topic --command-config kafka-tools/aws/msk-iam-auth.properties" -echo "Done deleting 'logging-traffic-topic'" -unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "./kafka-tools/kafka/bin/kafka-topics.sh --bootstrap-server ${kafka_brokers} --create --topic logging-traffic-topic --command-config kafka-tools/aws/msk-iam-auth.properties" -echo "Done creating 'logging-traffic-topic'" - -# Remove all non-system indices -source_lb_endpoint=$(aws cloudformation describe-stacks --stack-name "$SOURCE_INFRA_STACK_NAME" --query "Stacks[0].Outputs[?OutputKey==\`loadbalancerurl\`].OutputValue" --output text) -source_endpoint="http://${source_lb_endpoint}:19200" -proxy_endpoint="http://${source_lb_endpoint}:9200" -target_endpoint=$(aws ssm get-parameter --name "/migration/${STAGE}/default/osClusterEndpoint" --query 'Parameter.Value' --output text) -echo "Clearing non-system source indices" -unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "curl -XDELETE '${source_endpoint}/*,-.*,-searchguard*,-sg7*?ignore_unavailable=true'" -echo "Clearing non-system target indices" -unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "curl -XDELETE '${target_endpoint}/*,-.*,.migrations_working_state?ignore_unavailable=true'" -echo "Print initial source and target indices after clearing indices: " -unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "./catIndices.sh --source-endpoint ${source_endpoint} --source-no-auth --target-no-auth" - -# Spin up Replayer container and wait for service to be stable -aws ecs update-service --cluster "migration-${STAGE}-ecs-cluster" --service "migration-${STAGE}-traffic-replayer-default" --desired-count 1 > /dev/null 2>&1 -echo "Waiting for Replayer to be stable..." -aws ecs wait services-stable --cluster "migration-${STAGE}-ecs-cluster" --service "migration-${STAGE}-traffic-replayer-default" # Kickoff integration tests set -o xtrace -unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "./setupIntegTests.sh ${MIGRATIONS_GIT_URL} ${MIGRATIONS_GIT_BRANCH} ${source_endpoint} ${proxy_endpoint} ${UNIQUE_ID}" +unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "${COMMAND}" +test_output=$(unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "awk '/failures/ && /errors/' ${TEST_RESULT_FILE}") set +o xtrace -test_output=$(unbuffer aws ecs execute-command --cluster "migration-${STAGE}-ecs-cluster" --task "${task_arn}" --container "migration-console" --interactive --command "awk '/failures/ && /errors/' /root/integ-tests/test/reports/${UNIQUE_ID}.xml") -echo "Fetch integ test summary: " +echo "Integration test generated summary: " echo "$test_output" failure_output=$(echo "$test_output" | grep -o "failures=\"0\"") if [ -z "$failure_output" ]; then @@ -101,4 +71,4 @@ if [ -z "$errors_output" ]; then echo "Errored test detected in output, failing step" exit 1 fi -exit 0 \ No newline at end of file +exit 0 diff --git a/test/conftest.py b/test/conftest.py deleted file mode 100644 index 03b96d86f..000000000 --- a/test/conftest.py +++ /dev/null @@ -1,95 +0,0 @@ -# conftest.py -import pytest -import uuid -import logging - - -def pytest_configure(config): - # Configure logging - logging.basicConfig(level=logging.DEBUG, - format='%(asctime)s - %(levelname)s - %(message)s', - datefmt='%Y-%m-%d %H:%M:%S') - - # This line ensures that log messages are displayed on the console during test runs - logging.getLogger().setLevel(logging.DEBUG) - - -def pytest_addoption(parser): - parser.addoption("--proxy_endpoint", action="store", default="https://localhost:9200") - parser.addoption("--source_endpoint", action="store", default="https://localhost:19200") - parser.addoption("--target_endpoint", action="store", default="https://localhost:29200") - parser.addoption("--source_auth_type", action="store", default="basic", choices=["none", "basic", "sigv4"]) - parser.addoption("--source_verify_ssl", action="store", default="False", choices=["True", "False"]) - parser.addoption("--target_auth_type", action="store", default="basic", choices=["none", "basic", "sigv4"]) - parser.addoption("--target_verify_ssl", action="store", default="False", choices=["True", "False"]) - parser.addoption("--deployment_type", action="store", default="local", choices=["local", "cloud"]) - parser.addoption("--source_username", action="store", default="admin") - parser.addoption("--source_password", action="store", default="admin") - parser.addoption("--target_username", action="store", default="admin") - parser.addoption("--target_password", action="store", default="myStrongPassword123!") - parser.addoption("--unique_id", action="store", default=uuid.uuid4().hex) - - -@pytest.fixture -def proxy_endpoint(pytestconfig): - return pytestconfig.getoption("proxy_endpoint") - - -@pytest.fixture -def source_endpoint(pytestconfig): - return pytestconfig.getoption("source_endpoint") - - -@pytest.fixture -def target_endpoint(pytestconfig): - return pytestconfig.getoption("target_endpoint") - - -@pytest.fixture -def source_auth_type(pytestconfig): - return pytestconfig.getoption("source_auth_type") - - -@pytest.fixture -def source_username(pytestconfig): - return pytestconfig.getoption("source_username") - - -@pytest.fixture -def source_password(pytestconfig): - return pytestconfig.getoption("source_password") - - -@pytest.fixture -def target_auth_type(pytestconfig): - return pytestconfig.getoption("target_auth_type") - - -@pytest.fixture -def target_username(pytestconfig): - return pytestconfig.getoption("target_username") - - -@pytest.fixture -def target_password(pytestconfig): - return pytestconfig.getoption("target_password") - - -@pytest.fixture -def target_verify_ssl(pytestconfig): - return pytestconfig.getoption("target_verify_ssl") - - -@pytest.fixture -def source_verify_ssl(pytestconfig): - return pytestconfig.getoption("source_verify_ssl") - - -@pytest.fixture -def deployment_type(pytestconfig): - return pytestconfig.getoption("deployment_type") - - -@pytest.fixture -def unique_id(pytestconfig): - return pytestconfig.getoption("unique_id") diff --git a/test/operations.py b/test/operations.py deleted file mode 100644 index c72e62989..000000000 --- a/test/operations.py +++ /dev/null @@ -1,102 +0,0 @@ -import datetime -import random -import string -import json -from requests import Session -import shlex -import subprocess -import logging - -logger = logging.getLogger(__name__) - - -def create_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False, session: Session = Session()): - response = session.put(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl) - - return response - - -def check_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False, session: Session = Session()): - response = session.get(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl) - - return response - - -def delete_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False, session: Session = Session()): - response = session.delete(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl) - - return response - - -def delete_document(endpoint: str, index_name: str, doc_id: str, auth, - verify_ssl: bool = False, session: Session = Session()): - response = session.delete(f'{endpoint}/{index_name}/_doc/{doc_id}', auth=auth, verify=verify_ssl) - - return response - - -def generate_large_doc(size_mib): - # Calculate number of characters needed (1 char = 1 byte) - num_chars = size_mib * 1024 * 1024 - - # Generate random string of the desired length - large_string = ''.join(random.choices(string.ascii_letters + string.digits, k=num_chars)) - - return { - "timestamp": datetime.datetime.now().isoformat(), - "large_field": large_string - } - - -def create_document(endpoint: str, index_name: str, doc_id: str, auth, - verify_ssl: bool = False, doc_body: dict = None, session: Session = Session()): - if doc_body is None: - document = { - 'title': 'Test Document', - 'content': 'This is a sample document for testing OpenSearch.' - } - else: - document = doc_body - - url = f'{endpoint}/{index_name}/_doc/{doc_id}' - headers = {'Content-Type': 'application/json'} - response = session.put(url, headers=headers, data=json.dumps(document), auth=auth, verify=verify_ssl) - - return response - - -def get_document(endpoint: str, index_name: str, doc_id: str, auth, - verify_ssl: bool = False, session: Session = Session()): - url = f'{endpoint}/{index_name}/_doc/{doc_id}' - headers = {'Content-Type': 'application/json'} - response = session.get(url, headers=headers, auth=auth, verify=verify_ssl) - - return response - - -class ContainerNotFoundError(Exception): - def __init__(self, container_filter): - super().__init__(f"No containers matching the filter '{container_filter}' were found.") - - -def run_migration_console_command(deployment_type: str, command: str): - if deployment_type == "local": - filter_criteria = 'name=\"migration-console\"' - cmd = f'docker ps --format=\"{{{{.ID}}}}\" --filter {filter_criteria}' - - get_container_process = subprocess.run(shlex.split(cmd), stdout=subprocess.PIPE, text=True) - container_id = get_container_process.stdout.strip().replace('"', '') - - if container_id: - cmd_exec = f"docker exec {container_id} bash -c '{command}'" - logger.warning(f"Running command: {cmd_exec} on container {container_id}") - process = subprocess.run(cmd_exec, shell=True, capture_output=True, text=True) - return process.returncode, process.stdout, process.stderr - else: - raise ContainerNotFoundError(filter_criteria) - - else: - # In a cloud deployment case, we run the e2e tests directly on the migration console, so it's just a local call - logger.warning(f"Running command: {command} locally") - process = subprocess.run(command, shell=True, capture_output=True) - return process.returncode, process.stdout, process.stderr diff --git a/test/setup.py b/test/setup.py deleted file mode 100644 index 76c9ad109..000000000 --- a/test/setup.py +++ /dev/null @@ -1,10 +0,0 @@ -import setuptools - -setuptools.setup( - name='integ_test', - version='0.1', - description='End-to-End integrations test', - author='OpenSearch Migrations', - packages=setuptools.find_packages(), - python_requires=">=3.10" -) diff --git a/test/tests.py b/test/tests.py deleted file mode 100644 index b18f7ef9b..000000000 --- a/test/tests.py +++ /dev/null @@ -1,436 +0,0 @@ -import boto3 -import functools -import json -import logging -import pytest -import requests -import secrets -import string -from operations import generate_large_doc -import time -import unittest -from http import HTTPStatus -from requests import Session -from requests.adapters import HTTPAdapter -from requests.exceptions import ConnectionError, SSLError -from requests_aws4auth import AWS4Auth -from typing import Tuple, Callable, List, Dict - -from operations import create_index, check_index, create_document, \ - delete_document, delete_index, get_document, run_migration_console_command - -logger = logging.getLogger(__name__) - - -def get_indices(endpoint, auth, verify): - response = requests.get(f'{endpoint}/_cat/indices', auth=auth, verify=verify) - indices = [] - response_lines = response.text.strip().split('\n') - for line in response_lines: - parts = line.split() - index_name = parts[2] - indices.append(index_name) - return indices - - -def get_doc_count(endpoint, index, auth, verify): - response = requests.get(f'{endpoint}/{index}/_count', auth=auth, verify=verify) - count = json.loads(response.text)['count'] - return count - - -def assert_metrics_present(*wrapper_args, **wrapper_kwargs): - def decorator(test_func): - @functools.wraps(test_func) - def wrapper(self, *args, **kwargs): - # Run the original test function - try: - test_func(self, *args, **kwargs) - test_passed = True - except AssertionError as e: - test_passed = False - raise e - finally: - if test_passed: - # Only look for metrics if the test passed - self.assert_metrics(*wrapper_args, **wrapper_kwargs) - return wrapper - return decorator - - -# The following "retry_request" function's purpose is to retry a certain request for "max_attempts" -# times every "delay" seconds IF the requests returned a status code other than what's expected. -# So this "retry_request" function's arguments are a request function's name and whatever arguments that function -# expects, and the status code the request function is expecting to get. -def retry_request(request: Callable, args: Tuple = (), max_attempts: int = 15, delay: float = 1.5, - expected_status_code: HTTPStatus = None): - for attempt in range(1, max_attempts + 1): - try: - result = request(*args) - if result.status_code == expected_status_code: - return result - else: - logger.warning(f"Status code returned: {result.status_code} did not" - f" match the expected status code: {expected_status_code}." - f" Trying again in {delay} seconds.") - time.sleep(delay) - except ConnectionError as e: - logger.error(f"Received exception: {e}. Unable to connect to server. Please check all containers are up" - f" and ports are setup properly") - logger.warning(f"Trying again in {delay} seconds.") - time.sleep(delay) - continue - except SSLError as e: - logger.error(f"Received exception: {e}. Unable to connect to server. Please check all containers are up" - f"and ports are setup properly") - logger.warning(f"Trying again in {delay} seconds.") - time.sleep(delay) - continue - logger.error(f"Couldn't get the expected status code: {expected_status_code} while making the request:" - f"{request.__name__} using the following arguments: {args}.") - raise Exception(f"All {max_attempts} retry attempts failed. Please check the logs for more information.") - - -class E2ETests(unittest.TestCase): - - @pytest.fixture(autouse=True) - def init_fixtures(self, proxy_endpoint, source_endpoint, target_endpoint, source_auth_type, source_username, - source_password, target_auth_type, target_username, target_password, target_verify_ssl, - source_verify_ssl, deployment_type, unique_id): - self.proxy_endpoint = proxy_endpoint - self.source_endpoint = source_endpoint - self.target_endpoint = target_endpoint - self.source_auth_type = source_auth_type - self.source_auth = self.setup_authentication(source_auth_type, source_username, source_password) - self.source_username = source_username - self.source_password = source_password - self.target_auth_type = target_auth_type - self.target_auth = self.setup_authentication(target_auth_type, target_username, target_password) - self.target_username = target_username - self.target_password = target_password - self.source_verify_ssl = source_verify_ssl.lower() == 'true' - self.target_verify_ssl = target_verify_ssl.lower() == 'true' - self.deployment_type = deployment_type - self.unique_id = unique_id - - def setup_authentication(self, auth_type, username, password): - if auth_type == "basic": - return (username, password) - elif auth_type == "sigv4": - session = boto3.Session() - credentials = session.get_credentials() - aws_auth = AWS4Auth(credentials.access_key, credentials.secret_key, session.region_name, 'es', - session_token=credentials.token) - return aws_auth - return None - - def does_index_match_ignored_index(self, index_name: str): - for prefix in self.index_prefix_ignore_list: - if index_name.startswith(prefix): - return True - return False - - def assert_source_target_doc_match(self, index_name, doc_id, doc_body: dict = None): - source_response = get_document(self.source_endpoint, index_name, doc_id, self.source_auth, - self.source_verify_ssl) - self.assertEqual(source_response.status_code, HTTPStatus.OK) - - target_response = retry_request(get_document, args=(self.target_endpoint, index_name, doc_id, - self.target_auth, self.target_verify_ssl), - expected_status_code=HTTPStatus.OK) - self.assertEqual(target_response.status_code, HTTPStatus.OK) - - # Comparing the document's content on both endpoints, asserting - # that they match. - source_document = source_response.json() - source_content = source_document['_source'] - target_document = target_response.json() - target_content = target_document['_source'] - self.assertEqual(source_content, target_content) - if doc_body is not None: - self.assertEqual(source_content, doc_body) - - def set_common_values(self): - self.index_prefix_ignore_list = ["test_", ".", "searchguard", "sg7", "security-auditlog"] - - def setUp(self): - self.set_common_values() - - # Note that the names of metrics are a bit different in a local vs cloud deployment. - # The transformation is somewhat hardcoded here--the user should put in the local name, and if its - # a cloud deployment, everything after the first `_` will be discarded. This should generally cause - # things to match, but it's possible there are edge cases that it doesn't account for - # Note as well, that currently the only way of assuming data is correlated with a given test is via - # the lookback time. Soon, we should implement a way to add a specific ID to metrics from a given run - # and check for the presence of that ID. - def assert_metric_has_data(self, component: str, metric: str, lookback_minutes: int): - command = f"console --json metrics get-data {component} {metric} --lookback {lookback_minutes}" - returncode, stdout, stderr = run_migration_console_command( - self.deployment_type, - command - ) - self.assertEqual(returncode, 0, f"Return code from `{command}` was non-zero. Stderr output: {stderr}") - data = json.loads(stdout) - self.assertNotEqual( - len(data), 0, - f"Metric {metric} for component {component} does not exist or does " - f"not have data within the last {lookback_minutes} minutes" - ) - - def assert_metrics(self, expected_metrics: Dict[str, List[str]], lookback_minutes=2, wait_before_check_seconds=60): - """ - This is the method invoked by the `@assert_metrics_present` decorator. - params: - expected_metrics: a dictionary of component->[metrics], for each metric that should be verified. - lookback_minutes: the number of minutes into the past to query for metrics - wait_before_check_seconds: the time in seconds to delay before checking for the presence of metrics - """ - logger.debug(f"Waiting {wait_before_check_seconds} before checking for metrics.") - time.sleep(wait_before_check_seconds) - for component, expected_comp_metrics in expected_metrics.items(): - if component == "captureProxy" and self.deployment_type == "cloud": - # We currently do not emit captureProxy metrics from a non-standalone proxy, which is the scenario - # tested in our e2e tests. Therefore, we don't want to assert metrics exist in this situation. We - # should remove this clause as soon as we start testing the standalone proxy scenario. - logger.warning("Skipping metric verification for captureProxy metrics in a cloud deployment.") - continue - for expected_metric in expected_comp_metrics: - if self.deployment_type == 'cloud': - expected_metric = expected_metric.split('_', 1)[0] - self.assert_metric_has_data(component, expected_metric, lookback_minutes) - - @assert_metrics_present({ - 'captureProxy': ['kafkaCommitCount_total'], - 'replayer': ['kafkaCommitCount_total'] - }) - def test_0001_index(self): - # This test will verify that an index will be created (then deleted) on the target cluster when one is created - # on the source cluster by going through the proxy first. It will verify that the traffic is captured by the - # proxy and that the traffic reaches the source cluster, replays said traffic to the target cluster by the - # replayer. - - index_name = f"test_0001_{self.unique_id}" - # Creating an index, then asserting that the index was created on both targets. - proxy_response = retry_request(create_index, args=(self.proxy_endpoint, index_name, self.source_auth, - self.source_verify_ssl), - expected_status_code=HTTPStatus.OK) - self.assertEqual(proxy_response.status_code, HTTPStatus.OK) - - target_response = retry_request(check_index, args=(self.target_endpoint, index_name, self.target_auth, - self.target_verify_ssl), - expected_status_code=HTTPStatus.OK) - self.assertEqual(target_response.status_code, HTTPStatus.OK) - source_response = retry_request(check_index, args=(self.source_endpoint, index_name, self.source_auth, - self.source_verify_ssl), - expected_status_code=HTTPStatus.OK) - self.assertEqual(source_response.status_code, HTTPStatus.OK) - - proxy_response = retry_request(delete_index, args=(self.proxy_endpoint, index_name, self.source_auth, - self.source_verify_ssl), - expected_status_code=HTTPStatus.OK) - self.assertEqual(proxy_response.status_code, HTTPStatus.OK) - - target_response = retry_request(check_index, args=(self.target_endpoint, index_name, self.target_auth, - self.target_verify_ssl), - expected_status_code=HTTPStatus.NOT_FOUND) - self.assertEqual(target_response.status_code, HTTPStatus.NOT_FOUND) - source_response = retry_request(check_index, args=(self.source_endpoint, index_name, self.source_auth, - self.source_verify_ssl), - expected_status_code=HTTPStatus.NOT_FOUND) - self.assertEqual(source_response.status_code, HTTPStatus.NOT_FOUND) - - def test_0002_document(self): - # This test will verify that a document will be created (then deleted) on the target cluster when one is created - # on the source cluster by going through the proxy first. It will verify that the traffic is captured by the - # proxy and that the traffic reaches the source cluster, replays said traffic to the target cluster by the - # replayer. - - index_name = f"test_0002_{self.unique_id}" - doc_id = "7" - # Creating an index, then asserting that the index was created on both targets. - proxy_response = retry_request(create_index, args=(self.proxy_endpoint, index_name, self.source_auth, - self.source_verify_ssl), - expected_status_code=HTTPStatus.OK) - self.assertEqual(proxy_response.status_code, HTTPStatus.OK) - - target_response = retry_request(check_index, args=(self.target_endpoint, index_name, self.target_auth, - self.target_verify_ssl), - expected_status_code=HTTPStatus.OK) - self.assertEqual(target_response.status_code, HTTPStatus.OK) - source_response = retry_request(check_index, args=(self.source_endpoint, index_name, self.source_auth, - self.source_verify_ssl), - expected_status_code=HTTPStatus.OK) - self.assertEqual(source_response.status_code, HTTPStatus.OK) - - # Creating a document, then asserting that the document was created on both targets. - proxy_response = create_document(self.proxy_endpoint, index_name, doc_id, self.source_auth, - self.source_verify_ssl) - self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED) - - self.assert_source_target_doc_match(index_name, doc_id) - - # Deleting the document that was created then asserting that it was deleted on both targets. - proxy_response = delete_document(self.proxy_endpoint, index_name, doc_id, self.source_auth, - self.source_verify_ssl) - self.assertEqual(proxy_response.status_code, HTTPStatus.OK) - - target_response = retry_request(get_document, args=(self.target_endpoint, index_name, doc_id, - self.target_auth, self.target_verify_ssl), - expected_status_code=HTTPStatus.NOT_FOUND) - self.assertEqual(target_response.status_code, HTTPStatus.NOT_FOUND) - source_response = retry_request(get_document, args=(self.source_endpoint, index_name, doc_id, - self.source_auth, self.source_verify_ssl), - expected_status_code=HTTPStatus.NOT_FOUND) - self.assertEqual(source_response.status_code, HTTPStatus.NOT_FOUND) - - # Deleting the index that was created then asserting that it was deleted on both targets. - proxy_response = delete_index(self.proxy_endpoint, index_name, self.source_auth, self.source_verify_ssl) - self.assertEqual(proxy_response.status_code, HTTPStatus.OK) - - target_response = retry_request(check_index, args=(self.target_endpoint, index_name, self.target_auth, - self.target_verify_ssl), - expected_status_code=HTTPStatus.NOT_FOUND) - self.assertEqual(target_response.status_code, HTTPStatus.NOT_FOUND) - source_response = retry_request(check_index, args=(self.source_endpoint, index_name, self.source_auth, - self.source_verify_ssl), - expected_status_code=HTTPStatus.NOT_FOUND) - self.assertEqual(source_response.status_code, HTTPStatus.NOT_FOUND) - - def test_0003_negativeAuth_invalidCreds(self): - # This test sends negative credentials to the clusters to validate that unauthorized access is prevented. - alphabet = string.ascii_letters + string.digits - for _ in range(10): - username = ''.join(secrets.choice(alphabet) for _ in range(8)) - password = ''.join(secrets.choice(alphabet) for _ in range(8)) - - credentials = [ - (username, password), - (self.source_username, password), - (username, self.source_password) - ] - - for user, pw in credentials: - response = requests.get(self.proxy_endpoint, auth=(user, pw), verify=self.source_verify_ssl) - self.assertEqual(response.status_code, HTTPStatus.UNAUTHORIZED) - - def test_0004_negativeAuth_missingCreds(self): - # This test will use no credentials at all - # With an empty authorization header - response = requests.get(self.proxy_endpoint, auth=('', ''), verify=self.source_verify_ssl) - self.assertEqual(response.status_code, HTTPStatus.UNAUTHORIZED) - - # Without an authorization header. - response = requests.get(self.proxy_endpoint, verify=self.source_verify_ssl) - self.assertEqual(response.status_code, HTTPStatus.UNAUTHORIZED) - - def test_0005_invalidIncorrectUri(self): - # This test will send an invalid URI - invalidUri = "/invalidURI" - response = requests.get(f'{self.proxy_endpoint}{invalidUri}', auth=self.source_auth, - verify=self.source_verify_ssl) - self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) - - # This test will send an incorrect URI - incorrectUri = "/_cluster/incorrectUri" - response = requests.get(f'{self.proxy_endpoint}{incorrectUri}', auth=self.source_auth, - verify=self.source_verify_ssl) - self.assertEqual(response.status_code, HTTPStatus.METHOD_NOT_ALLOWED) - - def test_0006_OSB(self): - cmd = "/root/runTestBenchmarks.sh" - - if self.deployment_type == "cloud": - if self.source_auth_type == "none": - auth_string = " --no-auth" - elif self.source_auth_type == "basic": - auth_string = f" --auth-user {self.source_username} --auth-pass {self.source_password}" - else: - auth_string = "" - - cmd += f" --endpoint {self.proxy_endpoint} {auth_string}" - sleep_time = 360 - else: - sleep_time = 5 - - returncode, _, stderr = run_migration_console_command(self.deployment_type, cmd) - self.assertEqual(returncode, 0, f"Running command {cmd} failed with stderr output:\n{stderr}") - time.sleep(sleep_time) - - source_indices = get_indices(self.source_endpoint, self.source_auth, self.source_verify_ssl) - valid_source_indices = set([index for index in source_indices - if not self.does_index_match_ignored_index(index)]) - target_indices = get_indices(self.target_endpoint, self.target_auth, self.target_verify_ssl) - valid_target_indices = set([index for index in target_indices - if not self.does_index_match_ignored_index(index)]) - - self.assertTrue(valid_source_indices, "No valid indices found on source after running OpenSearch Benchmark") - self.assertEqual(valid_source_indices, valid_target_indices, - f"Valid indices for source and target are not equal - Source = {valid_source_indices}, " - f"Target = {valid_target_indices}") - - for index in valid_source_indices: - source_count = get_doc_count(self.source_endpoint, index, self.source_auth, self.source_verify_ssl) - target_count = get_doc_count(self.target_endpoint, index, self.target_auth, self.target_verify_ssl) - if source_count != target_count: - self.assertEqual(source_count, target_count, f'{index}: doc counts do not match - ' - f'Source = {source_count}, Target = {target_count}') - - def test_0007_timeBetweenRequestsOnSameConnection(self): - # This test will verify that the replayer functions correctly when - # requests on the same connection on the proxy that has a minute gap - seconds_between_requests = 60 # 1 minute - - proxy_single_connection_session = Session() - adapter = HTTPAdapter(pool_connections=1, pool_maxsize=1, max_retries=1) - proxy_single_connection_session.mount(self.proxy_endpoint, adapter) - - index_name = f"test_0007_{self.unique_id}" - - number_of_docs = 3 - - for doc_id_int in range(number_of_docs): - doc_id = str(doc_id_int) - proxy_response = create_document(self.proxy_endpoint, index_name, doc_id, self.source_auth, - self.source_verify_ssl, session=proxy_single_connection_session) - self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED) - - if doc_id_int + 1 < number_of_docs: - time.sleep(seconds_between_requests) - - try: - for doc_id_int in range(number_of_docs): - doc_id = str(doc_id_int) - self.assert_source_target_doc_match(index_name, doc_id) - finally: - proxy_single_connection_session.close() - - @unittest.skip - def test_0008_largeRequest(self): - index_name = f"test_0008_{self.unique_id}" - doc_id = "1" - - # Create large document, 99MiB - # Default max 100MiB in ES/OS settings (http.max_content_length) - large_doc = generate_large_doc(size_mib=99) - - # Measure the time taken by the create_document call - # Send large request to proxy and verify response - start_time = time.time() - proxy_response = create_document(self.proxy_endpoint, index_name, doc_id, self.source_auth, - self.source_verify_ssl, doc_body=large_doc) - end_time = time.time() - duration = end_time - start_time - - # Set wait time to double the response time or 5 seconds - wait_time_seconds = min(round(duration, 3) * 2, 5) - - self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED) - - # Wait for the measured duration - logger.debug(f"Waiting {wait_time_seconds} seconds for" - f" replay of large doc creation") - - time.sleep(wait_time_seconds) - - # Verify document created on source and target - self.assert_source_target_doc_match(index_name, doc_id, doc_body=large_doc)