diff --git a/.gitignore b/.gitignore index 54f5cbbf..e13b9a03 100644 --- a/.gitignore +++ b/.gitignore @@ -52,3 +52,7 @@ gradle-app.setting # .env files server/docker/.env /server/data/ + +# manual test files +server/src/test/resources/test_output/ + diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts index 585e73e3..d9fff832 100644 --- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ -1,5 +1,5 @@ /* - * Copyright (C) 2023-2024 Hedera Hashgraph, LLC + * Copyright (C) 2024 Hedera Hashgraph, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,7 +20,7 @@ repositories { gradlePluginPortal() } dependencies { implementation("com.adarshr:gradle-test-logger-plugin:4.0.0") - implementation("com.autonomousapps:dependency-analysis-gradle-plugin:1.31.0") + implementation("com.autonomousapps:dependency-analysis-gradle-plugin:1.32.0") implementation("com.diffplug.spotless:spotless-plugin-gradle:6.25.0") implementation("com.google.protobuf:protobuf-gradle-plugin:0.9.4") implementation("com.gradle.publish:plugin-publish-plugin:1.2.1") @@ -28,7 +28,7 @@ dependencies { implementation("io.github.gradle-nexus:publish-plugin:1.3.0") implementation("me.champeau.jmh:jmh-gradle-plugin:0.7.2") implementation("net.swiftzer.semver:semver:1.3.0") - implementation("org.gradlex:java-module-dependencies:1.6.5") implementation("org.gradlex:extra-java-module-info:1.8") implementation("org.gradlex:jvm-dependency-conflict-resolution:2.0") + implementation("org.gradlex:java-module-dependencies:1.6.5") } diff --git a/buildSrc/src/main/kotlin/Utils.kt b/buildSrc/src/main/kotlin/Utils.kt index b19b892d..f6ce8f4b 100644 --- a/buildSrc/src/main/kotlin/Utils.kt +++ b/buildSrc/src/main/kotlin/Utils.kt @@ -31,7 +31,7 @@ class Utils { lines = gradlePropFile.readLines(Charsets.UTF_8) } - var versionStr = "version=${newVersion.toString()}" + var versionStr = "version=$newVersion" val finalLines: List diff --git a/buildSrc/src/main/kotlin/com.hedera.block.jpms-modules.gradle.kts b/buildSrc/src/main/kotlin/com.hedera.block.jpms-modules.gradle.kts index 2be03727..568b13af 100644 --- a/buildSrc/src/main/kotlin/com.hedera.block.jpms-modules.gradle.kts +++ b/buildSrc/src/main/kotlin/com.hedera.block.jpms-modules.gradle.kts @@ -31,16 +31,11 @@ jvmDependencyConflicts.patch { "com.google.code.findbugs:jsr305", "com.google.errorprone:error_prone_annotations", "com.google.guava:listenablefuture", - "com.google.j2objc:j2objc-annotations", "org.checkerframework:checker-compat-qual", "org.checkerframework:checker-qual", "org.codehaus.mojo:animal-sniffer-annotations" ) - module("com.google.guava:guava") { - annotationLibraries.forEach { removeDependency(it) } - removeDependency("com.google.guava:failureaccess") - } module("io.grpc:grpc-api") { annotationLibraries.forEach { removeDependency(it) } } module("io.grpc:grpc-core") { annotationLibraries.forEach { removeDependency(it) } } module("io.grpc:grpc-context") { annotationLibraries.forEach { removeDependency(it) } } @@ -50,61 +45,92 @@ jvmDependencyConflicts.patch { removeDependency(/* dependency = */ "com.google.protobuf:protobuf-javalite") addApiDependency("com.google.protobuf:protobuf-java") } + module("io.grpc:grpc-services") { annotationLibraries.forEach { removeDependency(it) } } module("io.grpc:grpc-stub") { annotationLibraries.forEach { removeDependency(it) } } + module("io.grpc:grpc-testing") { annotationLibraries.forEach { removeDependency(it) } } module("io.grpc:grpc-util") { annotationLibraries.forEach { removeDependency(it) } } - // Added for metrics and logging, but also several platform classes + module("com.google.guava:guava") { + (annotationLibraries - + "com.google.code.findbugs:jsr305" - + "com.google.errorprone:error_prone_annotations" - + "org.checkerframework:checker-qual") + .forEach { removeDependency(it) } + } + module("com.google.protobuf:protobuf-java-util") { + annotationLibraries.forEach { removeDependency(it) } + } module("io.prometheus:simpleclient") { removeDependency("io.prometheus:simpleclient_tracer_otel") removeDependency("io.prometheus:simpleclient_tracer_otel_agent") } + module("junit:junit") { removeDependency("org.hamcrest:hamcrest-core") } + module("org.hyperledger.besu:secp256k1") { addApiDependency("net.java.dev.jna:jna") } } // Fix or enhance the 'module-info.class' of third-party Modules. This is about the // 'module-info.class' inside the Jar files. In our full Java Modules setup every // Jar needs to have this file. If it is missing, it is added by what is configured here. extraJavaModuleInfo { - failOnAutomaticModules = false // Only allow Jars with 'module-info' on all module paths + failOnAutomaticModules = true // Only allow Jars with 'module-info' on all module paths - module("com.google.api.grpc:proto-google-common-protos", "com.google.api.grpc.common") - module("com.google.guava:guava", "com.google.common") { + module("io.grpc:grpc-api", "io.grpc") { exportAllPackages() requireAllDefinedDependencies() requires("java.logging") } - module("com.google.protobuf:protobuf-java", "com.google.protobuf") { + + module("io.grpc:grpc-core", "io.grpc.internal") + module("io.grpc:grpc-context", "io.grpc.context") + module("io.grpc:grpc-stub", "io.grpc.stub") { exportAllPackages() requireAllDefinedDependencies() requires("java.logging") } - module("io.grpc:grpc-api", "io.grpc") { + module("io.grpc:grpc-testing", "io.grpc.testing") + module("io.grpc:grpc-services", "io.grpc.services") + module("io.grpc:grpc-util", "io.grpc.util") + module("io.grpc:grpc-protobuf", "io.grpc.protobuf") + module("io.grpc:grpc-protobuf-lite", "io.grpc.protobuf.lite") + module("com.github.spotbugs:spotbugs-annotations", "com.github.spotbugs.annotations") + module("com.google.code.findbugs:jsr305", "java.annotation") { + exportAllPackages() + mergeJar("javax.annotation:javax.annotation-api") + } + module("com.google.errorprone:error_prone_annotations", "com.google.errorprone.annotations") + module("com.google.j2objc:j2objc-annotations", "com.google.j2objc.annotations") + module("com.google.protobuf:protobuf-java", "com.google.protobuf") { exportAllPackages() requireAllDefinedDependencies() requires("java.logging") } - module("io.grpc:grpc-core", "io.grpc.internal") - module("io.grpc:grpc-context", "io.grpc.context") - module("io.grpc:grpc-protobuf", "io.grpc.protobuf") - module("io.grpc:grpc-protobuf-lite", "io.grpc.protobuf.lite") - module("io.grpc:grpc-stub", "io.grpc.stub") { + module("com.google.guava:guava", "com.google.common") { exportAllPackages() requireAllDefinedDependencies() requires("java.logging") } - module("io.grpc:grpc-util", "io.grpc.util") + module("com.google.guava:failureaccess", "com.google.common.util.concurrent.internal") + module("com.google.api.grpc:proto-google-common-protos", "com.google.api.grpc.common") + module("io.perfmark:perfmark-api", "io.perfmark") + module("javax.inject:javax.inject", "javax.inject") - module("junit:junit", "junit") - module("org.mockito:mockito-core", "org.mockito") - module("org.mockito:mockito-junit-jupiter", "org.mockito.junit.jupiter") + module("commons-codec:commons-codec", "org.apache.commons.codec") + module("org.apache.commons:commons-math3", "org.apache.commons.math3") + module("org.apache.commons:commons-collections4", "org.apache.commons.collections4") + module("com.esaulpaugh:headlong", "headlong") - // spotbugs - module("com.github.spotbugs:spotbugs-annotations", "com.github.spotbugs.annotations") - module("com.google.code.findbugs:jsr305", "java.annotation") { exportAllPackages() } + module("org.checkerframework:checker-qual", "org.checkerframework.checker.qual") + module("net.i2p.crypto:eddsa", "net.i2p.crypto.eddsa") + module("org.antlr:antlr4-runtime", "org.antlr.antlr4.runtime") // needed for metrics and logging, but also several platform classes module("com.goterl:resource-loader", "resource.loader") module("com.goterl:lazysodium-java", "lazysodium.java") module("org.hyperledger.besu:secp256k1", "org.hyperledger.besu.nativelib.secp256k1") + module("net.java.dev.jna:jna", "com.sun.jna") { + exportAllPackages() + requires("java.logging") + } module("io.prometheus:simpleclient", "io.prometheus.simpleclient") module("io.prometheus:simpleclient_common", "io.prometheus.simpleclient_common") module("io.prometheus:simpleclient_httpserver", "io.prometheus.simpleclient.httpserver") { @@ -117,4 +143,20 @@ extraJavaModuleInfo { module("com.google.auto.service:auto-service-annotations", "com.google.auto.service") module("com.google.auto.service:auto-service", "com.google.auto.service.processor") module("com.google.auto:auto-common", "com.google.auto.common") + + // Test clients only + module("com.google.protobuf:protobuf-java-util", "com.google.protobuf.util") + module("junit:junit", "junit") + module("org.hamcrest:hamcrest", "org.hamcrest") + module("org.json:json", "org.json") + module("org.mockito:mockito-core", "org.mockito") + module("org.objenesis:objenesis", "org.objenesis") + module("org.rnorth.duct-tape:duct-tape", "org.rnorth.ducttape") + module("org.testcontainers:junit-jupiter", "org.testcontainers.junit.jupiter") + module("org.testcontainers:testcontainers", "org.testcontainers") + module("org.mockito:mockito-junit-jupiter", "org.mockito.junit.jupiter") } + +// Make 'javax.annotation:javax.annotation-api' discoverable for merging it into +// 'com.google.code.findbugs:jsr305' +dependencies { "javaModulesMergeJars"("javax.annotation:javax.annotation-api:1.3.2") } diff --git a/buildSrc/src/main/kotlin/com.hedera.block.protobuf.gradle.kts b/buildSrc/src/main/kotlin/com.hedera.block.protobuf.gradle.kts new file mode 100644 index 00000000..359e50c5 --- /dev/null +++ b/buildSrc/src/main/kotlin/com.hedera.block.protobuf.gradle.kts @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import com.google.protobuf.gradle.id +import com.hedera.block.tasks.GitClone + +plugins { + id("java-library") + id("com.hedera.block.conventions") + id("com.google.protobuf") +} + +tasks.register("cloneHederaProtobufs") { + url = "https://github.com/hashgraph/hedera-protobufs.git" + offline = gradle.startParameter.isOffline + localCloneDirectory = layout.buildDirectory.dir("hedera-protobufs") +} + +// Configure Protobuf Plugin to download protoc executable rather than using local installed version +protobuf { + val libs = the().named("libs") + protoc { artifact = "com.google.protobuf:protoc:" + libs.findVersion("google.proto").get() } + plugins { + // Add GRPC plugin as we need to generate GRPC services + id("grpc") { + artifact = + "io.grpc:protoc-gen-grpc-java:" + libs.findVersion("grpc.protobuf.grpc").get() + } + } + generateProtoTasks { all().forEach { it.plugins { id("grpc") } } } +} + +tasks.javadoc { + options { + this as StandardJavadocDocletOptions + // There are violations in the generated protobuf code + addStringOption("Xdoclint:-reference,-html", "-quiet") + } +} + +// Give JUnit more ram and make it execute tests in parallel +tasks.test { + // We are running a lot of tests 10s of thousands, so they need to run in parallel. Make each + // class run in parallel. + systemProperties["junit.jupiter.execution.parallel.enabled"] = true + systemProperties["junit.jupiter.execution.parallel.mode.default"] = "concurrent" + // limit amount of threads, so we do not use all CPU + systemProperties["junit.jupiter.execution.parallel.config.dynamic.factor"] = "0.9" +} diff --git a/buildSrc/src/main/kotlin/com.hedera.block.spotless-java-conventions.gradle.kts b/buildSrc/src/main/kotlin/com.hedera.block.spotless-java-conventions.gradle.kts index f78f5017..54f05116 100644 --- a/buildSrc/src/main/kotlin/com.hedera.block.spotless-java-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/com.hedera.block.spotless-java-conventions.gradle.kts @@ -18,7 +18,7 @@ plugins { id("com.diffplug.spotless") } spotless { java { - targetExclude("build/generated/sources/**/*.java") + targetExclude("build/generated/**/*.java", "build/generated/**/*.proto") // enable toggle comment support toggleOffOn() // don't need to set target, it is inferred from java diff --git a/buildSrc/src/main/kotlin/com/hedera/block/tasks/GitClone.kt b/buildSrc/src/main/kotlin/com/hedera/block/tasks/GitClone.kt new file mode 100644 index 00000000..d47c11dc --- /dev/null +++ b/buildSrc/src/main/kotlin/com/hedera/block/tasks/GitClone.kt @@ -0,0 +1,91 @@ +/* + * Copyright (C) 2022-2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.block.tasks + +import javax.inject.Inject +import org.gradle.api.DefaultTask +import org.gradle.api.file.DirectoryProperty +import org.gradle.api.provider.Property +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.Optional +import org.gradle.api.tasks.OutputDirectory +import org.gradle.api.tasks.TaskAction +import org.gradle.process.ExecOperations + +abstract class GitClone : DefaultTask() { + + @get:Input abstract val url: Property + + @get:Input @get:Optional abstract val tag: Property + + @get:Input @get:Optional abstract val branch: Property + + @get:Input abstract val offline: Property + + @get:OutputDirectory abstract val localCloneDirectory: DirectoryProperty + + @get:Inject protected abstract val exec: ExecOperations + + init { + // If a 'branch' is configured, the task is never up-to-date as it may change + outputs.upToDateWhen { !branch.isPresent } + } + + @TaskAction + fun cloneOrUpdate() { + if (!tag.isPresent && !branch.isPresent || tag.isPresent && branch.isPresent) { + throw RuntimeException("Define either 'tag' or 'branch'") + } + + val localClone = localCloneDirectory.get() + if (!offline.get()) { + exec.exec { + if (!localClone.dir(".git").asFile.exists()) { + workingDir = localClone.asFile.parentFile + commandLine( + "git", + "clone", + url.get(), + "-q" + ) + } else { + workingDir = localClone.asFile + commandLine("git", "fetch", "-q") + } + } + } + if (tag.isPresent) { + exec.exec { + workingDir = localClone.asFile + commandLine("git", "checkout", tag.get(), "-q") + } + exec.exec { + workingDir = localClone.asFile + commandLine("git", "reset", "--hard", tag.get(), "-q") + } + } else { + exec.exec { + workingDir = localClone.asFile + commandLine("git", "checkout", branch.get(), "-q") + } + exec.exec { + workingDir = localClone.asFile + commandLine("git", "reset", "--hard", "origin/${branch.get()}", "-q") + } + } + } +} diff --git a/codecov.yml b/codecov.yml index 2f913892..e21c9366 100644 --- a/codecov.yml +++ b/codecov.yml @@ -20,3 +20,4 @@ coverage: ignore: - "server/src/main/java/com/hedera/block/server/Server.java" + - "server/src/main/java/com/hedera/block/server/Translator.java" diff --git a/gradle/modules.properties b/gradle/modules.properties index cf419591..16df503d 100644 --- a/gradle/modules.properties +++ b/gradle/modules.properties @@ -12,5 +12,23 @@ com.lmax.disruptor=com.lmax:disruptor io.helidon.webserver=io.helidon.webserver:helidon-webserver io.helidon.webserver.grpc=io.helidon.webserver:helidon-webserver-grpc io.helidon.webserver.testing.junit5=io.helidon.webserver.testing.junit5:helidon-webserver-testing-junit5 -io.grpc=io.grpc:grpc-stub -grpc.protobuf=io.grpc:grpc-protobuf +google.proto=com.google.protobuf:protoc + +org.antlr.antlr4.runtime=org.antlr:antlr4-runtime + +com.google.common=com.google.guava:guava +io.grpc.protobuf=io.grpc:grpc-protobuf +io.grpc.stub=io.grpc:grpc-stub +io.grpc=io.grpc:grpc-api +com.hedera.pbj.runtime=com.hedera.pbj:pbj-runtime + +com.google.protobuf=com.google.protobuf:protobuf-java +com.google.protobuf.util=com.google.protobuf:protobuf-java-util +com.apache.commons.codec=commons-codec:commons-codec +org.apache.commons.collections4=org.apache.commons:commons-collections4 +org.apache.commons.io=commons-io:commons-io +org.apache.commons.lang3=org.apache.commons:commons-lang3 +org.apache.commons.compress=org.apache.commons:commons-compress + +java.annotation=javax.annotation:javax.annotation-api +org.apache.logging.log4j.slf4j2.impl=org.apache.logging.log4j:log4j-slf4j2-impl diff --git a/protos/build.gradle.kts b/protos/build.gradle.kts deleted file mode 100644 index fe8b866f..00000000 --- a/protos/build.gradle.kts +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (C) 2024 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -plugins { - id("java-library") - id("com.hedera.block.repositories") - id("com.hedera.block.protos") -} diff --git a/protos/src/main/java/module-info.java b/protos/src/main/java/module-info.java deleted file mode 100644 index a3ef40ae..00000000 --- a/protos/src/main/java/module-info.java +++ /dev/null @@ -1,5 +0,0 @@ -module com.hedera.block.protos { - exports com.hedera.block.protos; - - requires transitive com.google.protobuf; -} \ No newline at end of file diff --git a/protos/src/main/protobuf/blockstream.proto b/protos/src/main/protobuf/blockstream.proto deleted file mode 100644 index 5b9f4538..00000000 --- a/protos/src/main/protobuf/blockstream.proto +++ /dev/null @@ -1,166 +0,0 @@ -syntax = "proto3"; - -/*- - * Copyright (C) 2024 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -option java_package = "com.hedera.block.protos"; -option java_outer_classname = "BlockStreamService"; - -service BlockStreamGrpcService { - - rpc publishBlockStream (stream PublishStreamRequest) returns (stream PublishStreamResponse) {} - - rpc subscribeBlockStream (SubscribeStreamRequest) returns (stream SubscribeStreamResponse) {} - - rpc singleBlock(SingleBlockRequest) returns (SingleBlockResponse) {} -} - -message PublishStreamRequest { - BlockItem block_item = 1; -} - -message PublishStreamResponse { - oneof response { - /** - * A response sent for each item and for each block. - */ - ItemAcknowledgement acknowledgement = 1; - - /** - * A response sent when a stream ends. - */ - EndOfStream status = 2; - } - - message ItemAcknowledgement { - bytes item_ack = 1; - } - - message EndOfStream { - PublishStreamResponseCode status = 1; - } - - /** - * An enumeration indicating the status of this request. - * - * This enumeration describes the reason a block stream - * (sent via `writeBlockStream`) ended. - */ - enum PublishStreamResponseCode { - /** - * An "unset value" flag, this value SHALL NOT be used.
- * This status indicates the server software failed to set a - * status, and SHALL be considered a software defect. - */ - STREAM_ITEMS_UNKNOWN = 0; - - /** - * The request succeeded.
- * No errors occurred and the source node orderly ended the stream. - */ - STREAM_ITEMS_SUCCESS = 1; - - /** - * The delay between items was too long.
- * The source MUST start a new stream before the failed block. - */ - STREAM_ITEMS_TIMEOUT = 2; - - /** - * An item was received out-of-order.
- * The source MUST start a new stream before the failed block. - */ - STREAM_ITEMS_OUT_OF_ORDER = 3; - - /** - * A block state proof item could not be validated.
- * The source MUST start a new stream before the failed block. - */ - STREAM_ITEMS_BAD_STATE_PROOF = 4; - } -} - -message SubscribeStreamRequest { - uint64 start_block_number = 1; -} - -message SubscribeStreamResponse { - oneof response { - SubscribeStreamResponseCode status = 1; - BlockItem block_item = 2; - } - - enum SubscribeStreamResponseCode { - READ_STREAM_UNKNOWN = 0; - READ_STREAM_INSUFFICIENT_BALANCE = 1; - READ_STREAM_SUCCESS = 2; - READ_STREAM_INVALID_START_BLOCK_NUMBER = 3; - READ_STREAM_INVALID_END_BLOCK_NUMBER = 4; - } -} - - - -message Block { - repeated BlockItem block_items = 1; -} - -/** - * A BlockItem is a simple message that contains an id and a value. - * This specification is a simple example meant to expedite development. - * It will be replaced with a PBJ implementation in the future. - */ -message BlockItem { - - oneof items { - BlockHeader header = 1; - EventMetadata start_event = 2; - BlockProof state_proof = 3; - } - - string value = 4; -} - -message BlockHeader { - uint64 block_number = 1; -} - -message EventMetadata { - uint64 creator_id = 1; -} - -message BlockProof { - uint64 block = 1; -} - -message SingleBlockRequest { - uint64 block_number = 1; -} - -message SingleBlockResponse { - oneof response { - SingleBlockResponseCode status = 1; - Block block = 2; - } - - enum SingleBlockResponseCode { - READ_BLOCK_UNKNOWN = 0; - READ_BLOCK_INSUFFICIENT_BALANCE = 1; - READ_BLOCK_SUCCESS = 2; - READ_BLOCK_NOT_FOUND = 3; - READ_BLOCK_NOT_AVAILABLE = 4; - } -} diff --git a/server/build.gradle.kts b/server/build.gradle.kts index 797efade..4e1f45d5 100644 --- a/server/build.gradle.kts +++ b/server/build.gradle.kts @@ -27,6 +27,7 @@ application { mainModuleInfo { annotationProcessor("com.google.auto.service.processor") runtimeOnly("com.swirlds.config.impl") + runtimeOnly("org.apache.logging.log4j.slf4j2.impl") } testModuleInfo { diff --git a/server/src/main/java/com/hedera/block/server/BlockStreamService.java b/server/src/main/java/com/hedera/block/server/BlockStreamService.java index 8b2a0a43..3b820b09 100644 --- a/server/src/main/java/com/hedera/block/server/BlockStreamService.java +++ b/server/src/main/java/com/hedera/block/server/BlockStreamService.java @@ -16,18 +16,34 @@ package com.hedera.block.server; -import static com.hedera.block.protos.BlockStreamService.*; -import static com.hedera.block.server.Constants.*; +import static com.hedera.block.server.Constants.CLIENT_STREAMING_METHOD_NAME; +import static com.hedera.block.server.Constants.SERVER_STREAMING_METHOD_NAME; +import static com.hedera.block.server.Constants.SERVICE_NAME; +import static com.hedera.block.server.Constants.SINGLE_BLOCK_METHOD_NAME; +import static com.hedera.block.server.Translator.fromPbj; +import static com.hedera.block.server.Translator.toPbj; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.DEBUG; +import static java.lang.System.Logger.Level.ERROR; import com.google.protobuf.Descriptors; +import com.google.protobuf.InvalidProtocolBufferException; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.consumer.ConsumerStreamResponseObserver; import com.hedera.block.server.data.ObjectEvent; import com.hedera.block.server.mediator.StreamMediator; import com.hedera.block.server.metrics.MetricsService; import com.hedera.block.server.persistence.storage.read.BlockReader; -import com.hedera.block.server.producer.ItemAckBuilder; import com.hedera.block.server.producer.ProducerBlockItemObserver; +import com.hedera.hapi.block.SingleBlockRequest; +import com.hedera.hapi.block.SingleBlockResponse; +import com.hedera.hapi.block.SingleBlockResponseCode; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.SubscribeStreamResponseCode; +import com.hedera.hapi.block.protoc.BlockService; +import com.hedera.hapi.block.stream.Block; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.ParseException; import edu.umd.cs.findbugs.annotations.NonNull; import io.grpc.stub.StreamObserver; import io.helidon.webserver.grpc.GrpcService; @@ -42,9 +58,8 @@ */ public class BlockStreamService implements GrpcService { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); - private final ItemAckBuilder itemAckBuilder; private final StreamMediator> streamMediator; private final ServiceStatus serviceStatus; private final BlockReader blockReader; @@ -54,7 +69,6 @@ public class BlockStreamService implements GrpcService { * Constructor for the BlockStreamService class. It initializes the BlockStreamService with the * given parameters. * - * @param itemAckBuilder the item acknowledgement builder to send responses back to the producer * @param streamMediator the stream mediator to proxy block items from the producer to the * subscribers and manage the subscription lifecycle for subscribers * @param blockReader the block reader to fetch blocks from storage for unary singleBlock @@ -63,14 +77,12 @@ public class BlockStreamService implements GrpcService { * stop the service and web server in the event of an unrecoverable exception */ BlockStreamService( - @NonNull final ItemAckBuilder itemAckBuilder, @NonNull final StreamMediator> streamMediator, @NonNull final BlockReader blockReader, @NonNull final ServiceStatus serviceStatus, @NonNull final BlockNodeContext blockNodeContext) { - this.itemAckBuilder = itemAckBuilder; this.streamMediator = streamMediator; this.blockReader = blockReader; this.serviceStatus = serviceStatus; @@ -86,7 +98,7 @@ public class BlockStreamService implements GrpcService { @NonNull @Override public Descriptors.FileDescriptor proto() { - return com.hedera.block.protos.BlockStreamService.getDescriptor(); + return BlockService.getDescriptor(); } /** @@ -110,32 +122,32 @@ public String serviceName() { */ @Override public void update(@NonNull final Routing routing) { - routing.bidi(CLIENT_STREAMING_METHOD_NAME, this::publishBlockStream); - routing.serverStream(SERVER_STREAMING_METHOD_NAME, this::subscribeBlockStream); - routing.unary(SINGLE_BLOCK_METHOD_NAME, this::singleBlock); + routing.bidi(CLIENT_STREAMING_METHOD_NAME, this::protocPublishBlockStream); + routing.serverStream(SERVER_STREAMING_METHOD_NAME, this::protocSubscribeBlockStream); + routing.unary(SINGLE_BLOCK_METHOD_NAME, this::protocSingleBlock); } - StreamObserver publishBlockStream( - @NonNull final StreamObserver publishStreamResponseObserver) { - LOGGER.log( - System.Logger.Level.DEBUG, - "Executing bidirectional publishBlockStream gRPC method"); + StreamObserver protocPublishBlockStream( + @NonNull + final StreamObserver + publishStreamResponseObserver) { + LOGGER.log(DEBUG, "Executing bidirectional publishBlockStream gRPC method"); return new ProducerBlockItemObserver( - streamMediator, publishStreamResponseObserver, itemAckBuilder, serviceStatus); + streamMediator, publishStreamResponseObserver, serviceStatus); } - void subscribeBlockStream( - @NonNull final SubscribeStreamRequest subscribeStreamRequest, + void protocSubscribeBlockStream( + @NonNull + final com.hedera.hapi.block.protoc.SubscribeStreamRequest + subscribeStreamRequest, @NonNull - final StreamObserver subscribeStreamResponseObserver) { - LOGGER.log( - System.Logger.Level.DEBUG, - "Executing Server Streaming subscribeBlockStream gRPC Service"); + final StreamObserver + subscribeStreamResponseObserver) { + LOGGER.log(DEBUG, "Executing Server Streaming subscribeBlockStream gRPC method"); // Return a custom StreamObserver to handle streaming blocks from the producer. if (serviceStatus.isRunning()) { - @NonNull final var streamObserver = new ConsumerStreamResponseObserver( blockNodeContext, @@ -146,48 +158,62 @@ void subscribeBlockStream( streamMediator.subscribe(streamObserver); } else { LOGGER.log( - System.Logger.Level.ERROR, + ERROR, "Server Streaming subscribeBlockStream gRPC Service is not currently running"); subscribeStreamResponseObserver.onNext(buildSubscribeStreamNotAvailableResponse()); } } - void singleBlock( + void protocSingleBlock( + @NonNull final com.hedera.hapi.block.protoc.SingleBlockRequest singleBlockRequest, + @NonNull + final StreamObserver + singleBlockResponseStreamObserver) { + LOGGER.log(DEBUG, "Executing Unary singleBlock gRPC method"); + + try { + final SingleBlockRequest pbjSingleBlockRequest = + toPbj(SingleBlockRequest.PROTOBUF, singleBlockRequest.toByteArray()); + singleBlock(pbjSingleBlockRequest, singleBlockResponseStreamObserver); + } catch (ParseException e) { + LOGGER.log(ERROR, "Error parsing protoc SingleBlockRequest: {0}", singleBlockRequest); + singleBlockResponseStreamObserver.onNext(buildSingleBlockNotAvailableResponse()); + } + } + + private void singleBlock( @NonNull final SingleBlockRequest singleBlockRequest, - @NonNull final StreamObserver singleBlockResponseStreamObserver) { + @NonNull + final StreamObserver + singleBlockResponseStreamObserver) { - LOGGER.log(System.Logger.Level.DEBUG, "Executing Unary singleBlock gRPC method"); + LOGGER.log(DEBUG, "Executing Unary singleBlock gRPC method"); if (serviceStatus.isRunning()) { - final long blockNumber = singleBlockRequest.getBlockNumber(); + final long blockNumber = singleBlockRequest.blockNumber(); try { - @NonNull final Optional blockOpt = blockReader.read(blockNumber); + final Optional blockOpt = blockReader.read(blockNumber); if (blockOpt.isPresent()) { - LOGGER.log( - System.Logger.Level.DEBUG, - "Successfully returning block number: {0}", - blockNumber); + LOGGER.log(DEBUG, "Successfully returning block number: {0}", blockNumber); singleBlockResponseStreamObserver.onNext( - buildSingleBlockResponse(blockOpt.get())); + fromPbjSingleBlockSuccessResponse(blockOpt.get())); - @NonNull final MetricsService metricsService = blockNodeContext.metricsService(); metricsService.singleBlocksRetrieved.increment(); } else { - LOGGER.log( - System.Logger.Level.DEBUG, "Block number {0} not found", blockNumber); + LOGGER.log(DEBUG, "Block number {0} not found", blockNumber); singleBlockResponseStreamObserver.onNext(buildSingleBlockNotFoundResponse()); } } catch (IOException e) { - LOGGER.log( - System.Logger.Level.ERROR, "Error reading block number: {0}", blockNumber); + LOGGER.log(ERROR, "Error reading block number: {0}", blockNumber); + singleBlockResponseStreamObserver.onNext(buildSingleBlockNotAvailableResponse()); + } catch (ParseException e) { + LOGGER.log(ERROR, "Error parsing block number: {0}", blockNumber); singleBlockResponseStreamObserver.onNext(buildSingleBlockNotAvailableResponse()); } } else { - LOGGER.log( - System.Logger.Level.ERROR, - "Unary singleBlock gRPC method is not currently running"); + LOGGER.log(ERROR, "Unary singleBlock gRPC method is not currently running"); singleBlockResponseStreamObserver.onNext(buildSingleBlockNotAvailableResponse()); } @@ -198,28 +224,46 @@ void singleBlock( // TODO: Fix this error type once it's been standardized in `hedera-protobufs` // this should not be success @NonNull - static SubscribeStreamResponse buildSubscribeStreamNotAvailableResponse() { - return SubscribeStreamResponse.newBuilder() - .setStatus(SubscribeStreamResponse.SubscribeStreamResponseCode.READ_STREAM_SUCCESS) - .build(); + static com.hedera.hapi.block.protoc.SubscribeStreamResponse + buildSubscribeStreamNotAvailableResponse() { + final SubscribeStreamResponse response = + SubscribeStreamResponse.newBuilder() + .status(SubscribeStreamResponseCode.READ_STREAM_SUCCESS) + .build(); + + return fromPbj(response); } @NonNull - static SingleBlockResponse buildSingleBlockNotAvailableResponse() { - return SingleBlockResponse.newBuilder() - .setStatus(SingleBlockResponse.SingleBlockResponseCode.READ_BLOCK_NOT_AVAILABLE) - .build(); + static com.hedera.hapi.block.protoc.SingleBlockResponse buildSingleBlockNotAvailableResponse() { + final SingleBlockResponse response = + SingleBlockResponse.newBuilder() + .status(SingleBlockResponseCode.READ_BLOCK_NOT_AVAILABLE) + .build(); + + return fromPbj(response); } @NonNull - static SingleBlockResponse buildSingleBlockNotFoundResponse() { - return SingleBlockResponse.newBuilder() - .setStatus(SingleBlockResponse.SingleBlockResponseCode.READ_BLOCK_NOT_FOUND) - .build(); + static com.hedera.hapi.block.protoc.SingleBlockResponse buildSingleBlockNotFoundResponse() + throws InvalidProtocolBufferException { + final SingleBlockResponse response = + SingleBlockResponse.newBuilder() + .status(SingleBlockResponseCode.READ_BLOCK_NOT_FOUND) + .build(); + + return fromPbj(response); } @NonNull - private static SingleBlockResponse buildSingleBlockResponse(@NonNull final Block block) { - return SingleBlockResponse.newBuilder().setBlock(block).build(); + static com.hedera.hapi.block.protoc.SingleBlockResponse fromPbjSingleBlockSuccessResponse( + @NonNull final Block block) { + final SingleBlockResponse singleBlockResponse = + SingleBlockResponse.newBuilder() + .status(SingleBlockResponseCode.READ_BLOCK_SUCCESS) + .block(block) + .build(); + + return fromPbj(singleBlockResponse); } } diff --git a/server/src/main/java/com/hedera/block/server/Constants.java b/server/src/main/java/com/hedera/block/server/Constants.java index d21f6285..5b9850af 100644 --- a/server/src/main/java/com/hedera/block/server/Constants.java +++ b/server/src/main/java/com/hedera/block/server/Constants.java @@ -23,7 +23,7 @@ public final class Constants { private Constants() {} /** Constant mapped to the name of the service in the .proto file */ - @NonNull public static final String SERVICE_NAME = "BlockStreamGrpcService"; + @NonNull public static final String SERVICE_NAME = "BlockStreamService"; /** Constant mapped to the publishBlockStream service method name in the .proto file */ @NonNull public static final String CLIENT_STREAMING_METHOD_NAME = "publishBlockStream"; diff --git a/server/src/main/java/com/hedera/block/server/Server.java b/server/src/main/java/com/hedera/block/server/Server.java index a454af26..7fc021bc 100644 --- a/server/src/main/java/com/hedera/block/server/Server.java +++ b/server/src/main/java/com/hedera/block/server/Server.java @@ -16,7 +16,8 @@ package com.hedera.block.server; -import static com.hedera.block.protos.BlockStreamService.*; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.INFO; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.config.BlockNodeContextFactory; @@ -30,7 +31,9 @@ import com.hedera.block.server.persistence.storage.read.BlockReader; import com.hedera.block.server.persistence.storage.write.BlockAsDirWriterBuilder; import com.hedera.block.server.persistence.storage.write.BlockWriter; -import com.hedera.block.server.producer.ItemAckBuilder; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.stream.Block; +import com.hedera.hapi.block.stream.BlockItem; import edu.umd.cs.findbugs.annotations.NonNull; import io.helidon.webserver.WebServer; import io.helidon.webserver.grpc.GrpcRouting; @@ -40,7 +43,7 @@ /** Main class for the block node server */ public class Server { - private static final System.Logger LOGGER = System.getLogger(Server.class.getName()); + private static final Logger LOGGER = System.getLogger(Server.class.getName()); private Server() {} @@ -51,24 +54,21 @@ private Server() {} */ public static void main(final String[] args) { - LOGGER.log(System.Logger.Level.INFO, "Starting BlockNode Server"); + LOGGER.log(INFO, "Starting BlockNode Server"); try { // init context, metrics, and configuration. - @NonNull final BlockNodeContext blockNodeContext = BlockNodeContextFactory.create(); + final BlockNodeContext blockNodeContext = BlockNodeContextFactory.create(); + final ServiceStatus serviceStatus = new ServiceStatusImpl(); - @NonNull final ServiceStatus serviceStatus = new ServiceStatusImpl(); - - @NonNull final BlockWriter blockWriter = BlockAsDirWriterBuilder.newBuilder(blockNodeContext).build(); - @NonNull + final StreamMediator> streamMediator = LiveStreamMediatorBuilder.newBuilder( blockWriter, blockNodeContext, serviceStatus) .build(); - @NonNull final BlockReader blockReader = BlockAsDirReaderBuilder.newBuilder( blockNodeContext @@ -76,25 +76,21 @@ public static void main(final String[] args) { .getConfigData(PersistenceStorageConfig.class)) .build(); - @NonNull final BlockStreamService blockStreamService = buildBlockStreamService( streamMediator, blockReader, serviceStatus, blockNodeContext); - @NonNull final GrpcRouting.Builder grpcRouting = GrpcRouting.builder().service(blockStreamService); - @NonNull final HealthService healthService = new HealthServiceImpl(serviceStatus); + final HealthService healthService = new HealthServiceImpl(serviceStatus); - @NonNull final HttpRouting.Builder httpRouting = HttpRouting.builder() .register(healthService.getHealthRootPath(), healthService); // Build the web server // TODO: make port server a configurable value. - @NonNull final WebServer webServer = WebServer.builder() .port(8080) @@ -109,9 +105,7 @@ public static void main(final String[] args) { webServer.start(); // Log the server status - LOGGER.log( - System.Logger.Level.INFO, - "Block Node Server started at port: " + webServer.port()); + LOGGER.log(INFO, "Block Node Server started at port: " + webServer.port()); } catch (IOException e) { throw new RuntimeException(e); } @@ -126,7 +120,6 @@ private static BlockStreamService buildBlockStreamService( @NonNull final ServiceStatus serviceStatus, @NonNull final BlockNodeContext blockNodeContext) { - return new BlockStreamService( - new ItemAckBuilder(), streamMediator, blockReader, serviceStatus, blockNodeContext); + return new BlockStreamService(streamMediator, blockReader, serviceStatus, blockNodeContext); } } diff --git a/server/src/main/java/com/hedera/block/server/Translator.java b/server/src/main/java/com/hedera/block/server/Translator.java new file mode 100644 index 00000000..ec98baee --- /dev/null +++ b/server/src/main/java/com/hedera/block/server/Translator.java @@ -0,0 +1,190 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.block.server; + +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.ERROR; +import static java.util.Objects.requireNonNull; + +import com.google.protobuf.InvalidProtocolBufferException; +import com.hedera.hapi.block.PublishStreamRequest; +import com.hedera.hapi.block.PublishStreamResponse; +import com.hedera.hapi.block.SingleBlockResponse; +import com.hedera.hapi.block.SubscribeStreamRequest; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.pbj.runtime.Codec; +import com.hedera.pbj.runtime.ParseException; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import com.hedera.pbj.runtime.io.stream.WritableStreamingData; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +/** + * Translator class to convert between PBJ and google protoc objects. + * + *

TODO: Remove this class once the Helidon PBJ gRPC work is integrated. + */ +public final class Translator { + private static final Logger LOGGER = System.getLogger(Translator.class.getName()); + + private static final String INVALID_BUFFER_MESSAGE = + "Invalid protocol buffer converting %s from PBJ to protoc for %s"; + + private Translator() {} + + /** + * Converts a {@link SingleBlockResponse} to a {@link + * com.hedera.hapi.block.protoc.SingleBlockResponse}. + * + * @param singleBlockResponse the {@link SingleBlockResponse} to convert + * @return the converted {@link com.hedera.hapi.block.protoc.SingleBlockResponse} + */ + @NonNull + public static com.hedera.hapi.block.protoc.SingleBlockResponse fromPbj( + @NonNull final SingleBlockResponse singleBlockResponse) { + try { + final byte[] pbjBytes = asBytes(SingleBlockResponse.PROTOBUF, singleBlockResponse); + return com.hedera.hapi.block.protoc.SingleBlockResponse.parseFrom(pbjBytes); + } catch (InvalidProtocolBufferException e) { + final String message = + INVALID_BUFFER_MESSAGE.formatted("SingleBlockResponse", singleBlockResponse); + LOGGER.log(ERROR, message); + throw new RuntimeException(message, e); + } + } + + /** + * Converts a {@link com.hedera.hapi.block.PublishStreamResponse} to a {@link + * com.hedera.hapi.block.protoc.PublishStreamResponse}. + * + * @param publishStreamResponse the {@link com.hedera.hapi.block.PublishStreamResponse} to + * convert + * @return the converted {@link com.hedera.hapi.block.protoc.PublishStreamResponse} + */ + @NonNull + public static com.hedera.hapi.block.protoc.PublishStreamResponse fromPbj( + @NonNull final com.hedera.hapi.block.PublishStreamResponse publishStreamResponse) { + try { + final byte[] pbjBytes = asBytes(PublishStreamResponse.PROTOBUF, publishStreamResponse); + return com.hedera.hapi.block.protoc.PublishStreamResponse.parseFrom(pbjBytes); + } catch (InvalidProtocolBufferException e) { + final String message = + INVALID_BUFFER_MESSAGE.formatted( + "PublishStreamResponse", publishStreamResponse); + LOGGER.log(ERROR, message); + throw new RuntimeException(message, e); + } + } + + /** + * Converts a {@link com.hedera.hapi.block.PublishStreamRequest} to a {@link + * com.hedera.hapi.block.protoc.PublishStreamRequest}. + * + * @param publishStreamRequest the {@link com.hedera.hapi.block.PublishStreamRequest} to convert + * @return the converted {@link com.hedera.hapi.block.protoc.PublishStreamRequest} + */ + @NonNull + public static com.hedera.hapi.block.protoc.PublishStreamRequest fromPbj( + @NonNull final com.hedera.hapi.block.PublishStreamRequest publishStreamRequest) { + try { + final byte[] pbjBytes = asBytes(PublishStreamRequest.PROTOBUF, publishStreamRequest); + return com.hedera.hapi.block.protoc.PublishStreamRequest.parseFrom(pbjBytes); + } catch (InvalidProtocolBufferException e) { + final String message = + INVALID_BUFFER_MESSAGE.formatted("PublishStreamRequest", publishStreamRequest); + LOGGER.log(ERROR, message); + throw new RuntimeException(message, e); + } + } + + /** + * Converts a {@link com.hedera.hapi.block.SubscribeStreamResponse} to a {@link + * com.hedera.hapi.block.protoc.SubscribeStreamResponse}. + * + * @param subscribeStreamResponse the {@link com.hedera.hapi.block.SubscribeStreamResponse} to + * convert + * @return the converted {@link com.hedera.hapi.block.protoc.SubscribeStreamResponse} + */ + @NonNull + public static com.hedera.hapi.block.protoc.SubscribeStreamResponse fromPbj( + @NonNull final com.hedera.hapi.block.SubscribeStreamResponse subscribeStreamResponse) { + try { + final byte[] pbjBytes = + asBytes(SubscribeStreamResponse.PROTOBUF, subscribeStreamResponse); + return com.hedera.hapi.block.protoc.SubscribeStreamResponse.parseFrom(pbjBytes); + } catch (InvalidProtocolBufferException e) { + final String message = + INVALID_BUFFER_MESSAGE.formatted( + "SubscribeStreamResponse", subscribeStreamResponse); + LOGGER.log(ERROR, message); + throw new RuntimeException(message, e); + } + } + + /** + * Converts a {@link com.hedera.hapi.block.SubscribeStreamRequest} to a {@link + * com.hedera.hapi.block.protoc.SubscribeStreamRequest}. + * + * @param subscribeStreamRequest the {@link com.hedera.hapi.block.SubscribeStreamRequest} to + * convert + * @return the converted {@link com.hedera.hapi.block.protoc.SubscribeStreamRequest} + */ + @NonNull + public static com.hedera.hapi.block.protoc.SubscribeStreamRequest fromPbj( + @NonNull final com.hedera.hapi.block.SubscribeStreamRequest subscribeStreamRequest) { + try { + final byte[] pbjBytes = + asBytes(SubscribeStreamRequest.PROTOBUF, subscribeStreamRequest); + return com.hedera.hapi.block.protoc.SubscribeStreamRequest.parseFrom(pbjBytes); + } catch (InvalidProtocolBufferException e) { + final String message = + INVALID_BUFFER_MESSAGE.formatted( + "SubscribeStreamRequest", subscribeStreamRequest); + LOGGER.log(ERROR, message); + throw new RuntimeException(message, e); + } + } + + /** + * Converts protoc bytes to a PBJ record of the same type. + * + * @param the type of PBJ record to convert to + * @param codec the record codec to convert the bytes to a PBJ record + * @param bytes the protoc bytes to convert to a PBJ record + * @return the converted PBJ record + * @throws ParseException if the conversion between the protoc bytes and PBJ objects fails + */ + @NonNull + public static T toPbj( + @NonNull final Codec codec, @NonNull final byte[] bytes) throws ParseException { + return codec.parse(Bytes.wrap(bytes)); + } + + @NonNull + private static byte[] asBytes(@NonNull Codec codec, @NonNull T tx) { + requireNonNull(codec); + requireNonNull(tx); + try { + final var bytes = new ByteArrayOutputStream(); + codec.write(tx, new WritableStreamingData(bytes)); + return bytes.toByteArray(); + } catch (IOException e) { + throw new RuntimeException("Unable to convert from PBJ to bytes", e); + } + } +} diff --git a/server/src/main/java/com/hedera/block/server/consumer/ConsumerStreamResponseObserver.java b/server/src/main/java/com/hedera/block/server/consumer/ConsumerStreamResponseObserver.java index 91a9f6cc..3860bcde 100644 --- a/server/src/main/java/com/hedera/block/server/consumer/ConsumerStreamResponseObserver.java +++ b/server/src/main/java/com/hedera/block/server/consumer/ConsumerStreamResponseObserver.java @@ -16,12 +16,17 @@ package com.hedera.block.server.consumer; -import static com.hedera.block.protos.BlockStreamService.BlockItem; -import static com.hedera.block.protos.BlockStreamService.SubscribeStreamResponse; +import static com.hedera.block.server.Translator.fromPbj; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.DEBUG; +import static java.lang.System.Logger.Level.ERROR; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.data.ObjectEvent; import com.hedera.block.server.mediator.SubscriptionHandler; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.OneOf; import com.lmax.disruptor.EventHandler; import edu.umd.cs.findbugs.annotations.NonNull; import io.grpc.stub.ServerCallStreamObserver; @@ -38,17 +43,22 @@ public class ConsumerStreamResponseObserver implements EventHandler> { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); - private final StreamObserver subscribeStreamResponseObserver; + private final StreamObserver + subscribeStreamResponseObserver; private final SubscriptionHandler> subscriptionHandler; private final long timeoutThresholdMillis; private final InstantSource producerLivenessClock; private long producerLivenessMillis; - private boolean streamStarted; private final AtomicBoolean isResponsePermitted = new AtomicBoolean(true); + private final ResponseSender statusResponseSender = new StatusResponseSender(); + private final ResponseSender blockItemResponseSender = new BlockItemResponseSender(); + + private static final String PROTOCOL_VIOLATION_MESSAGE = + "Protocol Violation. %s is OneOf type %s but %s is null.\n%s"; /** * The onCancel handler to execute when the consumer cancels the stream. This handler is @@ -80,7 +90,8 @@ public ConsumerStreamResponseObserver( final SubscriptionHandler> subscriptionHandler, @NonNull - final StreamObserver subscribeStreamResponseObserver) { + final StreamObserver + subscribeStreamResponseObserver) { this.timeoutThresholdMillis = context.configuration() @@ -93,7 +104,7 @@ public ConsumerStreamResponseObserver( // unsubscribe this observer. if (subscribeStreamResponseObserver instanceof - ServerCallStreamObserver + ServerCallStreamObserver serverCallStreamObserver) { onCancel = @@ -102,9 +113,7 @@ public ConsumerStreamResponseObserver( // Do not allow additional responses to be sent. isResponsePermitted.set(false); subscriptionHandler.unsubscribe(this); - LOGGER.log( - System.Logger.Level.DEBUG, - "Consumer cancelled stream. Observer unsubscribed."); + LOGGER.log(DEBUG, "Consumer cancelled stream. Observer unsubscribed."); }; serverCallStreamObserver.setOnCancelHandler(onCancel); @@ -114,9 +123,7 @@ public ConsumerStreamResponseObserver( // Do not allow additional responses to be sent. isResponsePermitted.set(false); subscriptionHandler.unsubscribe(this); - LOGGER.log( - System.Logger.Level.DEBUG, - "Consumer completed stream. Observer unsubscribed."); + LOGGER.log(DEBUG, "Consumer completed stream. Observer unsubscribed."); }; serverCallStreamObserver.setOnCloseHandler(onClose); } @@ -150,28 +157,74 @@ public void onEvent( if (currentMillis - producerLivenessMillis > timeoutThresholdMillis) { subscriptionHandler.unsubscribe(this); LOGGER.log( - System.Logger.Level.DEBUG, - "Unsubscribed ConsumerBlockItemObserver due to producer timeout"); + DEBUG, + "Producer liveness timeout. Unsubscribed ConsumerBlockItemObserver."); } else { // Refresh the producer liveness and pass the BlockItem to the downstream observer. producerLivenessMillis = currentMillis; - // Only start sending BlockItems after we've reached - // the beginning of a block. - @NonNull final SubscribeStreamResponse subscribeStreamResponse = event.get(); - @NonNull final BlockItem blockItem = subscribeStreamResponse.getBlockItem(); - if (!streamStarted && blockItem.hasHeader()) { + final SubscribeStreamResponse subscribeStreamResponse = event.get(); + final ResponseSender responseSender = getResponseSender(subscribeStreamResponse); + responseSender.send(subscribeStreamResponse); + } + } + } + + @NonNull + private ResponseSender getResponseSender( + @NonNull final SubscribeStreamResponse subscribeStreamResponse) { + + final OneOf oneOfTypeOneOf = + subscribeStreamResponse.response(); + return switch (oneOfTypeOneOf.kind()) { + case STATUS -> statusResponseSender; + case BLOCK_ITEM -> blockItemResponseSender; + default -> throw new IllegalArgumentException( + "Unknown response type: " + oneOfTypeOneOf.kind()); + }; + } + + private interface ResponseSender { + void send(@NonNull final SubscribeStreamResponse subscribeStreamResponse); + } + + private final class BlockItemResponseSender implements ResponseSender { + private boolean streamStarted = false; + + public void send(@NonNull final SubscribeStreamResponse subscribeStreamResponse) { + + // Only start sending BlockItems after we've reached + // the beginning of a block. + final BlockItem blockItem = subscribeStreamResponse.blockItem(); + if (blockItem == null) { + final String message = + PROTOCOL_VIOLATION_MESSAGE.formatted( + "SubscribeStreamResponse", + "BLOCK_ITEM", + "block_item", + subscribeStreamResponse); + LOGGER.log(ERROR, message); + throw new IllegalArgumentException(message); + } else { + if (!streamStarted && blockItem.hasBlockHeader()) { streamStarted = true; } if (streamStarted) { - LOGGER.log( - System.Logger.Level.DEBUG, - "Send BlockItem downstream: {0} ", - blockItem); - subscribeStreamResponseObserver.onNext(subscribeStreamResponse); + LOGGER.log(DEBUG, "Sending BlockItem downstream: {0}", blockItem); + subscribeStreamResponseObserver.onNext(fromPbj(subscribeStreamResponse)); } } } } + + private final class StatusResponseSender implements ResponseSender { + public void send(@NonNull final SubscribeStreamResponse subscribeStreamResponse) { + LOGGER.log( + DEBUG, + "Sending SubscribeStreamResponse downstream: {0} ", + subscribeStreamResponse); + subscribeStreamResponseObserver.onNext(fromPbj(subscribeStreamResponse)); + } + } } diff --git a/server/src/main/java/com/hedera/block/server/mediator/LiveStreamMediatorBuilder.java b/server/src/main/java/com/hedera/block/server/mediator/LiveStreamMediatorBuilder.java index 661c8f51..68ae63cc 100644 --- a/server/src/main/java/com/hedera/block/server/mediator/LiveStreamMediatorBuilder.java +++ b/server/src/main/java/com/hedera/block/server/mediator/LiveStreamMediatorBuilder.java @@ -16,13 +16,12 @@ package com.hedera.block.server.mediator; -import static com.hedera.block.protos.BlockStreamService.BlockItem; -import static com.hedera.block.protos.BlockStreamService.SubscribeStreamResponse; - import com.hedera.block.server.ServiceStatus; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.data.ObjectEvent; import com.hedera.block.server.persistence.storage.write.BlockWriter; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.stream.BlockItem; import com.lmax.disruptor.BatchEventProcessor; import com.lmax.disruptor.EventHandler; import edu.umd.cs.findbugs.annotations.NonNull; diff --git a/server/src/main/java/com/hedera/block/server/mediator/LiveStreamMediatorImpl.java b/server/src/main/java/com/hedera/block/server/mediator/LiveStreamMediatorImpl.java index 015eb09e..587d2a7a 100644 --- a/server/src/main/java/com/hedera/block/server/mediator/LiveStreamMediatorImpl.java +++ b/server/src/main/java/com/hedera/block/server/mediator/LiveStreamMediatorImpl.java @@ -16,14 +16,18 @@ package com.hedera.block.server.mediator; -import static com.hedera.block.protos.BlockStreamService.BlockItem; -import static com.hedera.block.protos.BlockStreamService.SubscribeStreamResponse; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.DEBUG; +import static java.lang.System.Logger.Level.ERROR; import com.hedera.block.server.ServiceStatus; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.data.ObjectEvent; import com.hedera.block.server.metrics.MetricsService; import com.hedera.block.server.persistence.storage.write.BlockWriter; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.SubscribeStreamResponseCode; +import com.hedera.hapi.block.stream.BlockItem; import com.lmax.disruptor.BatchEventProcessor; import com.lmax.disruptor.BatchEventProcessorBuilder; import com.lmax.disruptor.EventHandler; @@ -46,7 +50,7 @@ class LiveStreamMediatorImpl implements StreamMediator> { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); private final RingBuffer> ringBuffer; private final ExecutorService executor; @@ -59,6 +63,7 @@ class LiveStreamMediatorImpl private final BlockWriter blockWriter; private final ServiceStatus serviceStatus; private final BlockNodeContext blockNodeContext; + private final MetricsService metricsService; /** * Constructs a new LiveStreamMediatorImpl instance with the given subscribers, block writer, @@ -86,14 +91,14 @@ class LiveStreamMediatorImpl this.blockWriter = blockWriter; // Initialize and start the disruptor - @NonNull final Disruptor> disruptor = // TODO: replace ring buffer size with a configurable value, create a MediatorConfig - new Disruptor<>(ObjectEvent::new, 1024, DaemonThreadFactory.INSTANCE); + new Disruptor<>(ObjectEvent::new, 2048, DaemonThreadFactory.INSTANCE); this.ringBuffer = disruptor.start(); this.executor = Executors.newCachedThreadPool(DaemonThreadFactory.INSTANCE); this.serviceStatus = serviceStatus; this.blockNodeContext = blockNodeContext; + this.metricsService = blockNodeContext.metricsService(); } /** @@ -110,14 +115,12 @@ public void publish(@NonNull final BlockItem blockItem) throws IOException { if (serviceStatus.isRunning()) { // Publish the block for all subscribers to receive - LOGGER.log(System.Logger.Level.DEBUG, "Publishing BlockItem: {0}", blockItem); - @NonNull + LOGGER.log(DEBUG, "Publishing BlockItem: {0}", blockItem); final var subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItem).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItem).build(); ringBuffer.publishEvent((event, sequence) -> event.set(subscribeStreamResponse)); // Increment the block item counter - @NonNull final MetricsService metricsService = blockNodeContext.metricsService(); metricsService.liveBlockItems.increment(); try { @@ -127,27 +130,27 @@ public void publish(@NonNull final BlockItem blockItem) throws IOException { // Disable BlockItem publication for upstream producers serviceStatus.setRunning(false); LOGGER.log( - System.Logger.Level.ERROR, + ERROR, "An exception occurred while attempting to persist the BlockItem: " + blockItem, e); - LOGGER.log(System.Logger.Level.DEBUG, "Send a response to end the stream"); + LOGGER.log(DEBUG, "Send a response to end the stream"); // Publish the block for all subscribers to receive - @NonNull final SubscribeStreamResponse endStreamResponse = buildEndStreamResponse(); + final SubscribeStreamResponse endStreamResponse = buildEndStreamResponse(); ringBuffer.publishEvent((event, sequence) -> event.set(endStreamResponse)); // Unsubscribe all downstream consumers - for (@NonNull final var subscriber : subscribers.keySet()) { - LOGGER.log(System.Logger.Level.DEBUG, "Unsubscribing: {0}", subscriber); + for (final var subscriber : subscribers.keySet()) { + LOGGER.log(DEBUG, "Unsubscribing: {0}", subscriber); unsubscribe(subscriber); } throw e; } } else { - LOGGER.log(System.Logger.Level.ERROR, "StreamMediator is not accepting BlockItems"); + LOGGER.log(ERROR, "StreamMediator is not accepting BlockItems"); } } @@ -156,7 +159,6 @@ public void subscribe( @NonNull final EventHandler> handler) { // Initialize the batch event processor and set it on the ring buffer - @NonNull final var batchEventProcessor = new BatchEventProcessorBuilder() .build(ringBuffer, ringBuffer.newBarrier(), handler); @@ -175,9 +177,9 @@ public void unsubscribe( @NonNull final EventHandler> handler) { // Remove the subscriber - @NonNull final var batchEventProcessor = subscribers.remove(handler); + final var batchEventProcessor = subscribers.remove(handler); if (batchEventProcessor == null) { - LOGGER.log(System.Logger.Level.ERROR, "Subscriber not found: {0}", handler); + LOGGER.log(ERROR, "Subscriber not found: {0}", handler); } else { @@ -203,7 +205,7 @@ private static SubscribeStreamResponse buildEndStreamResponse() { // SubscribeStreamResponseCode. // TODO: Replace READ_STREAM_SUCCESS (2) with a generic error code? return SubscribeStreamResponse.newBuilder() - .setStatus(SubscribeStreamResponse.SubscribeStreamResponseCode.READ_STREAM_SUCCESS) + .status(SubscribeStreamResponseCode.READ_STREAM_SUCCESS) .build(); } diff --git a/server/src/main/java/com/hedera/block/server/persistence/storage/FileUtils.java b/server/src/main/java/com/hedera/block/server/persistence/storage/FileUtils.java index 680d15fb..7e572745 100644 --- a/server/src/main/java/com/hedera/block/server/persistence/storage/FileUtils.java +++ b/server/src/main/java/com/hedera/block/server/persistence/storage/FileUtils.java @@ -16,6 +16,8 @@ package com.hedera.block.server.persistence.storage; +import static java.lang.System.Logger; + import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.Files; @@ -28,7 +30,7 @@ /** FileUtils methods provide common functionality for the storage package. */ public final class FileUtils { - private static final System.Logger LOGGER = System.getLogger(FileUtils.class.getName()); + private static final Logger LOGGER = System.getLogger(FileUtils.class.getName()); private FileUtils() {} diff --git a/server/src/main/java/com/hedera/block/server/persistence/storage/PersistenceStorageConfig.java b/server/src/main/java/com/hedera/block/server/persistence/storage/PersistenceStorageConfig.java index eb91065f..f2f0d1fe 100644 --- a/server/src/main/java/com/hedera/block/server/persistence/storage/PersistenceStorageConfig.java +++ b/server/src/main/java/com/hedera/block/server/persistence/storage/PersistenceStorageConfig.java @@ -16,6 +16,8 @@ package com.hedera.block.server.persistence.storage; +import static java.lang.System.Logger.Level.ERROR; + import com.swirlds.config.api.ConfigData; import com.swirlds.config.api.ConfigProperty; import java.io.IOException; @@ -49,8 +51,7 @@ public record PersistenceStorageConfig(@ConfigProperty(defaultValue = "") String // Create Directory if it does not exist if (Files.notExists(path)) { try { - FileUtils.createPathIfNotExists( - path, System.Logger.Level.ERROR, FileUtils.defaultPerms); + FileUtils.createPathIfNotExists(path, ERROR, FileUtils.defaultPerms); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReader.java b/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReader.java index de1fc4b0..4ff7484f 100644 --- a/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReader.java +++ b/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReader.java @@ -16,12 +16,17 @@ package com.hedera.block.server.persistence.storage.read; -import static com.hedera.block.protos.BlockStreamService.Block; -import static com.hedera.block.protos.BlockStreamService.Block.Builder; -import static com.hedera.block.protos.BlockStreamService.BlockItem; import static com.hedera.block.server.Constants.BLOCK_FILE_EXTENSION; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.DEBUG; +import static java.lang.System.Logger.Level.ERROR; +import static java.lang.System.Logger.Level.INFO; import com.hedera.block.server.persistence.storage.PersistenceStorageConfig; +import com.hedera.hapi.block.stream.Block; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.ParseException; +import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.File; import java.io.FileInputStream; @@ -31,6 +36,8 @@ import java.nio.file.Path; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; +import java.util.ArrayList; +import java.util.List; import java.util.Optional; import java.util.Set; @@ -40,7 +47,7 @@ */ class BlockAsDirReader implements BlockReader { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); private final Path blockNodeRootPath; private final FileAttribute> filePerms; @@ -56,12 +63,12 @@ class BlockAsDirReader implements BlockReader { @NonNull final PersistenceStorageConfig config, @NonNull final FileAttribute> filePerms) { - LOGGER.log(System.Logger.Level.INFO, "Initializing FileSystemBlockReader"); + LOGGER.log(INFO, "Initializing FileSystemBlockReader"); - @NonNull final Path blockNodeRootPath = Path.of(config.rootPath()); + final Path blockNodeRootPath = Path.of(config.rootPath()); - LOGGER.log(System.Logger.Level.INFO, config.toString()); - LOGGER.log(System.Logger.Level.INFO, "Block Node Root Path: " + blockNodeRootPath); + LOGGER.log(INFO, config.toString()); + LOGGER.log(INFO, "Block Node Root Path: " + blockNodeRootPath); this.blockNodeRootPath = blockNodeRootPath; this.filePerms = filePerms; @@ -77,7 +84,7 @@ class BlockAsDirReader implements BlockReader { */ @NonNull @Override - public Optional read(final long blockNumber) throws IOException { + public Optional read(final long blockNumber) throws IOException, ParseException { // Verify path attributes of the block node root path if (isPathDisqualified(blockNodeRootPath)) { @@ -86,7 +93,7 @@ public Optional read(final long blockNumber) throws IOException { // Verify path attributes of the block directory within the // block node root path - @NonNull final Path blockPath = blockNodeRootPath.resolve(String.valueOf(blockNumber)); + final Path blockPath = blockNodeRootPath.resolve(String.valueOf(blockNumber)); if (isPathDisqualified(blockPath)) { return Optional.empty(); } @@ -102,34 +109,37 @@ public Optional read(final long blockNumber) throws IOException { // 10.blk), the loop will directly fetch the BlockItems in order based on // their file names. The loop will exit when it attempts to read a // BlockItem file that does not exist (e.g., 11.blk). - @NonNull final Builder builder = Block.newBuilder(); + final Block.Builder builder = Block.newBuilder(); + final List blockItems = new ArrayList<>(); for (int i = 1; ; i++) { - @NonNull final Path blockItemPath = blockPath.resolve(i + BLOCK_FILE_EXTENSION); - @NonNull + final Path blockItemPath = blockPath.resolve(i + BLOCK_FILE_EXTENSION); final Optional blockItemOpt = readBlockItem(blockItemPath.toString()); if (blockItemOpt.isPresent()) { - builder.addBlockItems(blockItemOpt.get()); + blockItems.add(blockItemOpt.get()); continue; } break; } + builder.items(blockItems); + // Return the Block return Optional.of(builder.build()); } catch (IOException io) { - LOGGER.log(System.Logger.Level.ERROR, "Error reading block: " + blockPath, io); - + LOGGER.log(ERROR, "Error reading block: " + blockPath, io); throw io; } } @NonNull private Optional readBlockItem(@NonNull final String blockItemPath) - throws IOException { + throws IOException, ParseException { + + try (final FileInputStream fis = new FileInputStream(blockItemPath)) { - try (FileInputStream fis = new FileInputStream(blockItemPath)) { - return Optional.of(BlockItem.parseFrom(fis)); + BlockItem blockItem = BlockItem.PROTOBUF.parse(Bytes.wrap(fis.readAllBytes())); + return Optional.of(blockItem); } catch (FileNotFoundException io) { final File f = new File(blockItemPath); if (!f.exists()) { @@ -144,6 +154,9 @@ private Optional readBlockItem(@NonNull final String blockItemPath) // FileNotFound is also thrown when a file cannot be read. // So re-throw here to make a different decision upstream. throw io; + } catch (ParseException e) { + LOGGER.log(ERROR, "Error parsing block item: " + blockItemPath, e); + throw e; } } @@ -153,16 +166,13 @@ private boolean isPathDisqualified(@NonNull final Path path) { // This code path gets hit if a consumer // requests a block that does not exist. // Only log this as a debug message. - LOGGER.log(System.Logger.Level.DEBUG, "Path not found: {0}", path); + LOGGER.log(DEBUG, "Path not found: {0}", path); return true; } if (!path.toFile().canRead()) { - LOGGER.log(System.Logger.Level.ERROR, "Path not readable: {0}", path); - LOGGER.log( - System.Logger.Level.ERROR, - "Attempting to repair the path permissions: {0}", - path); + LOGGER.log(ERROR, "Path not readable: {0}", path); + LOGGER.log(ERROR, "Attempting to repair the path permissions: {0}", path); try { // If resetting the permissions fails or @@ -172,14 +182,13 @@ private boolean isPathDisqualified(@NonNull final Path path) { return true; } } catch (IOException e) { - LOGGER.log( - System.Logger.Level.ERROR, "Error setting permissions on: {0}" + path, e); + LOGGER.log(ERROR, "Error setting permissions on: {0}" + path, e); return true; } } if (!path.toFile().isDirectory()) { - LOGGER.log(System.Logger.Level.ERROR, "Path is not a directory: {0}", path); + LOGGER.log(ERROR, "Path is not a directory: {0}", path); return true; } diff --git a/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReaderBuilder.java b/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReaderBuilder.java index b61d9fe9..0ee7936d 100644 --- a/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReaderBuilder.java +++ b/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReaderBuilder.java @@ -16,10 +16,9 @@ package com.hedera.block.server.persistence.storage.read; -import static com.hedera.block.protos.BlockStreamService.Block; - import com.hedera.block.server.persistence.storage.FileUtils; import com.hedera.block.server.persistence.storage.PersistenceStorageConfig; +import com.hedera.hapi.block.stream.Block; import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; diff --git a/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockReader.java b/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockReader.java index 9f3442b4..1a24bccd 100644 --- a/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockReader.java +++ b/server/src/main/java/com/hedera/block/server/persistence/storage/read/BlockReader.java @@ -16,6 +16,7 @@ package com.hedera.block.server.persistence.storage.read; +import com.hedera.pbj.runtime.ParseException; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.util.Optional; @@ -33,7 +34,10 @@ public interface BlockReader { * @param blockNumber the block number of the block to read * @return the block with the given block number * @throws IOException if an I/O error occurs fetching the block + * @throws ParseException if the PBJ codec encounters a problem caused by I/O issues, malformed + * input data, or any other reason that prevents the parse() method from completing the + * operation when fetching the block. */ @NonNull - Optional read(final long blockNumber) throws IOException; + Optional read(final long blockNumber) throws IOException, ParseException; } diff --git a/server/src/main/java/com/hedera/block/server/persistence/storage/remove/BlockAsDirRemover.java b/server/src/main/java/com/hedera/block/server/persistence/storage/remove/BlockAsDirRemover.java index 85d4c28f..3a108594 100644 --- a/server/src/main/java/com/hedera/block/server/persistence/storage/remove/BlockAsDirRemover.java +++ b/server/src/main/java/com/hedera/block/server/persistence/storage/remove/BlockAsDirRemover.java @@ -16,8 +16,10 @@ package com.hedera.block.server.persistence.storage.remove; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.ERROR; + import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -32,7 +34,7 @@ */ public class BlockAsDirRemover implements BlockRemover { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); private final Path blockNodeRootPath; private final FileAttribute> filePerms; @@ -61,9 +63,9 @@ public void remove(final long id) throws IOException { // Calculate the block path and proactively set the permissions // for removal - @NonNull final Path blockPath = blockNodeRootPath.resolve(String.valueOf(id)); + final Path blockPath = blockNodeRootPath.resolve(String.valueOf(id)); if (Files.notExists(blockPath)) { - LOGGER.log(System.Logger.Level.ERROR, "Block does not exist: {0}", id); + LOGGER.log(ERROR, "Block does not exist: {0}", id); return; } @@ -71,7 +73,7 @@ public void remove(final long id) throws IOException { // Best effort to delete the block if (!delete(blockPath.toFile())) { - LOGGER.log(System.Logger.Level.ERROR, "Failed to delete block: {0}", id); + LOGGER.log(ERROR, "Failed to delete block: {0}", id); } } @@ -80,9 +82,9 @@ private static boolean delete(@NonNull final File file) { // Recursively delete the contents // of the directory if (file.isDirectory()) { - @Nullable final File[] files = file.listFiles(); + final File[] files = file.listFiles(); if (files != null) { - for (@NonNull final File f : files) { + for (final File f : files) { delete(f); } } diff --git a/server/src/main/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriter.java b/server/src/main/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriter.java index df59417e..fdb34642 100644 --- a/server/src/main/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriter.java +++ b/server/src/main/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriter.java @@ -16,14 +16,18 @@ package com.hedera.block.server.persistence.storage.write; -import static com.hedera.block.protos.BlockStreamService.BlockItem; import static com.hedera.block.server.Constants.BLOCK_FILE_EXTENSION; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.DEBUG; +import static java.lang.System.Logger.Level.ERROR; +import static java.lang.System.Logger.Level.INFO; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.metrics.MetricsService; import com.hedera.block.server.persistence.storage.FileUtils; import com.hedera.block.server.persistence.storage.PersistenceStorageConfig; import com.hedera.block.server.persistence.storage.remove.BlockRemover; +import com.hedera.hapi.block.stream.BlockItem; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.FileOutputStream; import java.io.IOException; @@ -44,7 +48,7 @@ */ class BlockAsDirWriter implements BlockWriter { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); private final Path blockNodeRootPath; private long blockNodeFileNameIndex; @@ -67,20 +71,20 @@ class BlockAsDirWriter implements BlockWriter { @NonNull final BlockNodeContext blockNodeContext) throws IOException { - LOGGER.log(System.Logger.Level.INFO, "Initializing FileSystemBlockStorage"); + LOGGER.log(INFO, "Initializing FileSystemBlockStorage"); PersistenceStorageConfig config = blockNodeContext.configuration().getConfigData(PersistenceStorageConfig.class); final Path blockNodeRootPath = Path.of(config.rootPath()); - LOGGER.log(System.Logger.Level.INFO, "Block Node Root Path: " + blockNodeRootPath); + LOGGER.log(INFO, "Block Node Root Path: " + blockNodeRootPath); this.blockNodeRootPath = blockNodeRootPath; this.blockRemover = blockRemover; this.filePerms = filePerms; // Initialize the block node root directory if it does not exist - FileUtils.createPathIfNotExists(blockNodeRootPath, System.Logger.Level.INFO, filePerms); + FileUtils.createPathIfNotExists(blockNodeRootPath, INFO, filePerms); this.blockNodeContext = blockNodeContext; } @@ -94,21 +98,18 @@ class BlockAsDirWriter implements BlockWriter { @Override public void write(@NonNull final BlockItem blockItem) throws IOException { - if (blockItem.hasHeader()) { + if (blockItem.hasBlockHeader()) { resetState(blockItem); } - @NonNull final Path blockItemFilePath = calculateBlockItemPath(); + final Path blockItemFilePath = calculateBlockItemPath(); for (int retries = 0; ; retries++) { try { write(blockItemFilePath, blockItem); break; } catch (IOException e) { - LOGGER.log( - System.Logger.Level.ERROR, - "Error writing the BlockItem protobuf to a file: ", - e); + LOGGER.log(ERROR, "Error writing the BlockItem protobuf to a file: ", e); // Remove the block if repairing the permissions fails if (retries > 0) { @@ -120,9 +121,7 @@ public void write(@NonNull final BlockItem blockItem) throws IOException { // and the blockItem path repairPermissions(blockNodeRootPath); repairPermissions(calculateBlockPath()); - LOGGER.log( - System.Logger.Level.INFO, - "Retrying to write the BlockItem protobuf to a file"); + LOGGER.log(INFO, "Retrying to write the BlockItem protobuf to a file"); } } } @@ -137,18 +136,12 @@ public void write(@NonNull final BlockItem blockItem) throws IOException { */ protected void write(@NonNull final Path blockItemFilePath, @NonNull final BlockItem blockItem) throws IOException { - try (@NonNull - final FileOutputStream fos = new FileOutputStream(blockItemFilePath.toString())) { - blockItem.writeTo(fos); - LOGGER.log( - System.Logger.Level.DEBUG, - "Successfully wrote the block item file: {0}", - blockItemFilePath); + try (final FileOutputStream fos = new FileOutputStream(blockItemFilePath.toString())) { + + BlockItem.PROTOBUF.toBytes(blockItem).writeTo(fos); + LOGGER.log(DEBUG, "Successfully wrote the block item file: {0}", blockItemFilePath); } catch (IOException e) { - LOGGER.log( - System.Logger.Level.ERROR, - "Error writing the BlockItem protobuf to a file: ", - e); + LOGGER.log(ERROR, "Error writing the BlockItem protobuf to a file: ", e); throw e; } } @@ -156,20 +149,20 @@ protected void write(@NonNull final Path blockItemFilePath, @NonNull final Block private void resetState(@NonNull final BlockItem blockItem) throws IOException { // Here a "block" is represented as a directory of BlockItems. // Create the "block" directory based on the block_number - currentBlockDir = Path.of(String.valueOf(blockItem.getHeader().getBlockNumber())); + currentBlockDir = Path.of(String.valueOf(blockItem.blockHeader().number())); // Check the blockNodeRootPath permissions and // attempt to repair them if possible repairPermissions(blockNodeRootPath); // Construct the path to the block directory - FileUtils.createPathIfNotExists(calculateBlockPath(), System.Logger.Level.DEBUG, filePerms); + FileUtils.createPathIfNotExists(calculateBlockPath(), DEBUG, filePerms); // Reset blockNodeFileNameIndex = 0; // Increment the block counter - @NonNull final MetricsService metricsService = blockNodeContext.metricsService(); + final MetricsService metricsService = blockNodeContext.metricsService(); metricsService.blocksPersisted.increment(); } @@ -177,7 +170,7 @@ private void repairPermissions(@NonNull final Path path) throws IOException { final boolean isWritable = Files.isWritable(path); if (!isWritable) { LOGGER.log( - System.Logger.Level.ERROR, + ERROR, "Block node root directory is not writable. Attempting to change the" + " permissions."); @@ -185,10 +178,7 @@ private void repairPermissions(@NonNull final Path path) throws IOException { // Attempt to restore the permissions on the block node root directory Files.setPosixFilePermissions(path, filePerms.value()); } catch (IOException e) { - LOGGER.log( - System.Logger.Level.ERROR, - "Error setting permissions on the path: " + path, - e); + LOGGER.log(ERROR, "Error setting permissions on the path: " + path, e); throw e; } } @@ -197,7 +187,7 @@ private void repairPermissions(@NonNull final Path path) throws IOException { @NonNull private Path calculateBlockItemPath() { // Build the path to a .blk file - @NonNull final Path blockPath = calculateBlockPath(); + final Path blockPath = calculateBlockPath(); blockNodeFileNameIndex++; return blockPath.resolve(blockNodeFileNameIndex + BLOCK_FILE_EXTENSION); } diff --git a/server/src/main/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriterBuilder.java b/server/src/main/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriterBuilder.java index f0f1e8fe..f509f2a9 100644 --- a/server/src/main/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriterBuilder.java +++ b/server/src/main/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriterBuilder.java @@ -16,13 +16,12 @@ package com.hedera.block.server.persistence.storage.write; -import static com.hedera.block.protos.BlockStreamService.BlockItem; - import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.persistence.storage.FileUtils; import com.hedera.block.server.persistence.storage.PersistenceStorageConfig; import com.hedera.block.server.persistence.storage.remove.BlockAsDirRemover; import com.hedera.block.server.persistence.storage.remove.BlockRemover; +import com.hedera.hapi.block.stream.BlockItem; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.Path; diff --git a/server/src/main/java/com/hedera/block/server/producer/ItemAckBuilder.java b/server/src/main/java/com/hedera/block/server/producer/ItemAckBuilder.java deleted file mode 100644 index 2e34dfd3..00000000 --- a/server/src/main/java/com/hedera/block/server/producer/ItemAckBuilder.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (C) 2024 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.block.server.producer; - -import static com.hedera.block.protos.BlockStreamService.BlockItem; -import static com.hedera.block.protos.BlockStreamService.PublishStreamResponse.ItemAcknowledgement; -import static com.hedera.block.server.producer.Util.getFakeHash; - -import com.google.protobuf.ByteString; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.io.IOException; -import java.security.NoSuchAlgorithmException; - -/** - * The ItemAckBuilder class defines a simple item acknowledgement builder used to create an - * acknowledgement type response. This is a placeholder and should be replaced with real hash - * functionality once the hedera-protobufs types are integrated. - */ -public class ItemAckBuilder { - - /** Constructor for the ItemAckBuilder class. */ - public ItemAckBuilder() {} - - /** - * Builds an item acknowledgement for the given block item. - * - * @param blockItem the block item to build the acknowledgement for - * @return the item acknowledgement for the given block item - * @throws IOException thrown if an I/O error occurs while building the acknowledgement - * @throws NoSuchAlgorithmException thrown if the SHA-384 algorithm is not available - */ - @NonNull - public ItemAcknowledgement buildAck(@NonNull final BlockItem blockItem) - throws IOException, NoSuchAlgorithmException { - // TODO: Use real hash and real hedera-protobufs types - return ItemAcknowledgement.newBuilder() - .setItemAck(ByteString.copyFrom(getFakeHash(blockItem))) - .build(); - } -} diff --git a/server/src/main/java/com/hedera/block/server/producer/ProducerBlockItemObserver.java b/server/src/main/java/com/hedera/block/server/producer/ProducerBlockItemObserver.java index 19911080..d8bb5fb3 100644 --- a/server/src/main/java/com/hedera/block/server/producer/ProducerBlockItemObserver.java +++ b/server/src/main/java/com/hedera/block/server/producer/ProducerBlockItemObserver.java @@ -16,11 +16,23 @@ package com.hedera.block.server.producer; -import static com.hedera.block.protos.BlockStreamService.*; -import static com.hedera.block.protos.BlockStreamService.PublishStreamResponse.*; +import static com.hedera.block.server.Translator.fromPbj; +import static com.hedera.block.server.Translator.toPbj; +import static com.hedera.block.server.producer.Util.getFakeHash; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.DEBUG; +import static java.lang.System.Logger.Level.ERROR; import com.hedera.block.server.ServiceStatus; import com.hedera.block.server.mediator.Publisher; +import com.hedera.hapi.block.Acknowledgement; +import com.hedera.hapi.block.EndOfStream; +import com.hedera.hapi.block.ItemAcknowledgement; +import com.hedera.hapi.block.PublishStreamResponse; +import com.hedera.hapi.block.PublishStreamResponseCode; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.ParseException; +import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import io.grpc.stub.StreamObserver; import java.io.IOException; @@ -32,13 +44,14 @@ * connection to the upstream producer (e.g. block items streamed from the Consensus Node to the * server). */ -public class ProducerBlockItemObserver implements StreamObserver { +public class ProducerBlockItemObserver + implements StreamObserver { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); - private final StreamObserver publishStreamResponseObserver; + private final StreamObserver + publishStreamResponseObserver; private final Publisher publisher; - private final ItemAckBuilder itemAckBuilder; private final ServiceStatus serviceStatus; /** @@ -50,21 +63,19 @@ public class ProducerBlockItemObserver implements StreamObserver publisher, - @NonNull final StreamObserver publishStreamResponseObserver, - @NonNull final ItemAckBuilder itemAckBuilder, + @NonNull + final StreamObserver + publishStreamResponseObserver, @NonNull final ServiceStatus serviceStatus) { this.publisher = publisher; this.publishStreamResponseObserver = publishStreamResponseObserver; - this.itemAckBuilder = itemAckBuilder; this.serviceStatus = serviceStatus; } @@ -76,11 +87,14 @@ public ProducerBlockItemObserver( * @param publishStreamRequest the PublishStreamRequest received from the upstream producer */ @Override - public void onNext(@NonNull final PublishStreamRequest publishStreamRequest) { - - @NonNull final BlockItem blockItem = publishStreamRequest.getBlockItem(); + public void onNext( + @NonNull final com.hedera.hapi.block.protoc.PublishStreamRequest publishStreamRequest) { try { + + final BlockItem blockItem = + toPbj(BlockItem.PROTOBUF, publishStreamRequest.getBlockItem().toByteArray()); + // Publish the block to all the subscribers unless // there's an issue with the StreamMediator. if (serviceStatus.isRunning()) { @@ -93,43 +107,69 @@ public void onNext(@NonNull final PublishStreamRequest publishStreamRequest) { publishStreamResponseObserver.onNext(buildSuccessStreamResponse(blockItem)); } catch (IOException | NoSuchAlgorithmException e) { - @NonNull final var errorResponse = buildErrorStreamResponse(); + final var errorResponse = buildErrorStreamResponse(); publishStreamResponseObserver.onNext(errorResponse); - LOGGER.log(System.Logger.Level.ERROR, "Error calculating hash: ", e); + LOGGER.log(ERROR, "Error calculating hash: ", e); } } else { // Close the upstream connection to the producer(s) - @NonNull final var errorResponse = buildErrorStreamResponse(); + final var errorResponse = buildErrorStreamResponse(); publishStreamResponseObserver.onNext(errorResponse); - LOGGER.log(System.Logger.Level.DEBUG, "StreamMediator is not accepting BlockItems"); + LOGGER.log(DEBUG, "StreamMediator is not accepting BlockItems"); } } catch (IOException io) { - @NonNull final var errorResponse = buildErrorStreamResponse(); + final var errorResponse = buildErrorStreamResponse(); publishStreamResponseObserver.onNext(errorResponse); - LOGGER.log(System.Logger.Level.ERROR, "Exception thrown publishing BlockItem: ", io); - - LOGGER.log(System.Logger.Level.ERROR, "Shutting down the web server"); + LOGGER.log(ERROR, "Exception thrown publishing BlockItem: ", io); + LOGGER.log(ERROR, "Shutting down the web server"); + serviceStatus.stopWebServer(); + } catch (ParseException e) { + final var errorResponse = buildErrorStreamResponse(); + publishStreamResponseObserver.onNext(errorResponse); + LOGGER.log( + ERROR, + "Error parsing inbound block item from a producer: " + + publishStreamRequest.getBlockItem(), + e); serviceStatus.stopWebServer(); } } @NonNull - private PublishStreamResponse buildSuccessStreamResponse(@NonNull final BlockItem blockItem) - throws IOException, NoSuchAlgorithmException { - @NonNull final ItemAcknowledgement itemAck = itemAckBuilder.buildAck(blockItem); - return PublishStreamResponse.newBuilder().setAcknowledgement(itemAck).build(); + private com.hedera.hapi.block.protoc.PublishStreamResponse buildSuccessStreamResponse( + @NonNull final BlockItem blockItem) throws IOException, NoSuchAlgorithmException { + final Acknowledgement ack = buildAck(blockItem); + return fromPbj(PublishStreamResponse.newBuilder().acknowledgement(ack).build()); } @NonNull - private static PublishStreamResponse buildErrorStreamResponse() { + private static com.hedera.hapi.block.protoc.PublishStreamResponse buildErrorStreamResponse() { // TODO: Replace this with a real error enum. - @NonNull final EndOfStream endOfStream = EndOfStream.newBuilder() - .setStatus(PublishStreamResponseCode.STREAM_ITEMS_UNKNOWN) + .status(PublishStreamResponseCode.STREAM_ITEMS_UNKNOWN) .build(); - return PublishStreamResponse.newBuilder().setStatus(endOfStream).build(); + return fromPbj(PublishStreamResponse.newBuilder().status(endOfStream).build()); + } + + /** + * Protected method meant for testing. Builds an Acknowledgement for the block item. + * + * @param blockItem the block item to build the Acknowledgement for + * @return the Acknowledgement for the block item + * @throws NoSuchAlgorithmException if the hash algorithm is not supported + */ + @NonNull + protected Acknowledgement buildAck(@NonNull final BlockItem blockItem) + throws NoSuchAlgorithmException { + final ItemAcknowledgement itemAck = + ItemAcknowledgement.newBuilder() + // TODO: Replace this with a real hash generator + .itemHash(Bytes.wrap(getFakeHash(blockItem))) + .build(); + + return Acknowledgement.newBuilder().itemAck(itemAck).build(); } /** @@ -140,7 +180,7 @@ private static PublishStreamResponse buildErrorStreamResponse() { */ @Override public void onError(@NonNull final Throwable t) { - LOGGER.log(System.Logger.Level.ERROR, "onError method invoked with an exception: ", t); + LOGGER.log(ERROR, "onError method invoked with an exception: ", t); publishStreamResponseObserver.onError(t); } @@ -150,7 +190,7 @@ public void onError(@NonNull final Throwable t) { */ @Override public void onCompleted() { - LOGGER.log(System.Logger.Level.DEBUG, "ProducerBlockStreamObserver completed"); + LOGGER.log(DEBUG, "ProducerBlockStreamObserver completed"); publishStreamResponseObserver.onCompleted(); } } diff --git a/server/src/main/java/com/hedera/block/server/producer/Util.java b/server/src/main/java/com/hedera/block/server/producer/Util.java index 2ca68559..144ffa1e 100644 --- a/server/src/main/java/com/hedera/block/server/producer/Util.java +++ b/server/src/main/java/com/hedera/block/server/producer/Util.java @@ -16,11 +16,8 @@ package com.hedera.block.server.producer; -import static com.hedera.block.protos.BlockStreamService.BlockItem; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.ObjectOutputStream; +import com.hedera.hapi.block.stream.BlockItem; +import edu.umd.cs.findbugs.annotations.NonNull; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -34,23 +31,12 @@ private Util() {} * * @param blockItem the block item to get the fake hash for * @return the fake hash for the given block item - * @throws IOException thrown if an I/O error occurs while getting the fake hash * @throws NoSuchAlgorithmException thrown if the SHA-384 algorithm is not available */ - public static byte[] getFakeHash(BlockItem blockItem) - throws IOException, NoSuchAlgorithmException { - - try (final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); - final ObjectOutputStream objectOutputStream = - new ObjectOutputStream(byteArrayOutputStream)) { - objectOutputStream.writeObject(blockItem); - - // Get the serialized bytes - byte[] serializedObject = byteArrayOutputStream.toByteArray(); - - // Calculate the SHA-384 hash - MessageDigest digest = MessageDigest.getInstance("SHA-384"); - return digest.digest(serializedObject); - } + public static byte[] getFakeHash(@NonNull final BlockItem blockItem) + throws NoSuchAlgorithmException { + // Calculate the SHA-384 hash + MessageDigest digest = MessageDigest.getInstance("SHA-384"); + return digest.digest(BlockItem.PROTOBUF.toBytes(blockItem).toByteArray()); } } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 379ccf9b..6fdbf821 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -2,11 +2,19 @@ /** Runtime module of the server. */ module com.hedera.block.server { + exports com.hedera.block.server; exports com.hedera.block.server.consumer; exports com.hedera.block.server.persistence.storage; + exports com.hedera.block.server.persistence.storage.write; + exports com.hedera.block.server.persistence.storage.read; + exports com.hedera.block.server.persistence.storage.remove; + exports com.hedera.block.server.config; + exports com.hedera.block.server.mediator; + exports com.hedera.block.server.data; - requires com.hedera.block.protos; + requires com.hedera.block.stream; requires com.google.protobuf; + requires com.hedera.pbj.runtime; requires com.lmax.disruptor; requires com.swirlds.common; requires com.swirlds.config.api; diff --git a/server/src/test/java/com/hedera/block/server/BlockStreamServiceIT.java b/server/src/test/java/com/hedera/block/server/BlockStreamServiceIntegrationTest.java similarity index 66% rename from server/src/test/java/com/hedera/block/server/BlockStreamServiceIT.java rename to server/src/test/java/com/hedera/block/server/BlockStreamServiceIntegrationTest.java index 0fddbe65..afe27cd5 100644 --- a/server/src/test/java/com/hedera/block/server/BlockStreamServiceIT.java +++ b/server/src/test/java/com/hedera/block/server/BlockStreamServiceIntegrationTest.java @@ -16,11 +16,17 @@ package com.hedera.block.server; -import static com.hedera.block.protos.BlockStreamService.*; -import static com.hedera.block.protos.BlockStreamService.PublishStreamResponse.*; +import static com.hedera.block.server.Translator.fromPbj; +import static com.hedera.block.server.producer.Util.getFakeHash; import static com.hedera.block.server.util.PersistTestUtils.generateBlockItems; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.INFO; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.data.ObjectEvent; @@ -34,11 +40,26 @@ import com.hedera.block.server.persistence.storage.remove.BlockRemover; import com.hedera.block.server.persistence.storage.write.BlockAsDirWriterBuilder; import com.hedera.block.server.persistence.storage.write.BlockWriter; -import com.hedera.block.server.producer.ItemAckBuilder; import com.hedera.block.server.util.TestConfigUtil; import com.hedera.block.server.util.TestUtils; +import com.hedera.hapi.block.Acknowledgement; +import com.hedera.hapi.block.EndOfStream; +import com.hedera.hapi.block.ItemAcknowledgement; +import com.hedera.hapi.block.PublishStreamRequest; +import com.hedera.hapi.block.PublishStreamResponse; +import com.hedera.hapi.block.PublishStreamResponseCode; +import com.hedera.hapi.block.SingleBlockResponse; +import com.hedera.hapi.block.SingleBlockResponseCode; +import com.hedera.hapi.block.SubscribeStreamRequest; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.SubscribeStreamResponseCode; +import com.hedera.hapi.block.stream.Block; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.ParseException; +import com.hedera.pbj.runtime.io.buffer.Bytes; import com.lmax.disruptor.BatchEventProcessor; import com.lmax.disruptor.EventHandler; +import edu.umd.cs.findbugs.annotations.NonNull; import io.grpc.stub.StreamObserver; import io.helidon.webserver.WebServer; import java.io.IOException; @@ -47,7 +68,11 @@ import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.security.NoSuchAlgorithmException; -import java.util.*; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -57,24 +82,45 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -public class BlockStreamServiceIT { +public class BlockStreamServiceIntegrationTest { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); @Mock private StreamMediator> streamMediator; - @Mock private StreamObserver publishStreamResponseObserver; - @Mock private StreamObserver singleBlockResponseStreamObserver; + @Mock + private StreamObserver + publishStreamResponseObserver; - @Mock private SubscribeStreamRequest subscribeStreamRequest; + @Mock + private StreamObserver + singleBlockResponseStreamObserver; - @Mock private StreamObserver subscribeStreamObserver1; - @Mock private StreamObserver subscribeStreamObserver2; - @Mock private StreamObserver subscribeStreamObserver3; + @Mock private com.hedera.hapi.block.protoc.SubscribeStreamRequest subscribeStreamRequest; - @Mock private StreamObserver subscribeStreamObserver4; - @Mock private StreamObserver subscribeStreamObserver5; - @Mock private StreamObserver subscribeStreamObserver6; + @Mock + private StreamObserver + subscribeStreamObserver1; + + @Mock + private StreamObserver + subscribeStreamObserver2; + + @Mock + private StreamObserver + subscribeStreamObserver3; + + @Mock + private StreamObserver + subscribeStreamObserver4; + + @Mock + private StreamObserver + subscribeStreamObserver5; + + @Mock + private StreamObserver + subscribeStreamObserver6; @Mock private WebServer webServer; @Mock private ServiceStatus serviceStatus; @@ -93,7 +139,7 @@ public class BlockStreamServiceIT { @BeforeEach public void setUp() throws IOException { testPath = Files.createTempDirectory(TEMP_DIR); - LOGGER.log(System.Logger.Level.INFO, "Created temp directory: " + testPath.toString()); + LOGGER.log(INFO, "Created temp directory: " + testPath.toString()); blockNodeContext = TestConfigUtil.getTestBlockNodeContext( @@ -112,28 +158,24 @@ public void testPublishBlockStreamRegistrationAndExecution() final BlockStreamService blockStreamService = new BlockStreamService( - new ItemAckBuilder(), - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); // Enable the serviceStatus when(serviceStatus.isRunning()).thenReturn(true); - final StreamObserver streamObserver = - blockStreamService.publishBlockStream(publishStreamResponseObserver); + final StreamObserver streamObserver = + blockStreamService.protocPublishBlockStream(publishStreamResponseObserver); final BlockItem blockItem = generateBlockItems(1).getFirst(); final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockItem).build(); + PublishStreamRequest.newBuilder().blockItem(blockItem).build(); // Calling onNext() as Helidon will - streamObserver.onNext(publishStreamRequest); + streamObserver.onNext(fromPbj(publishStreamRequest)); - final ItemAcknowledgement itemAck = new ItemAckBuilder().buildAck(blockItem); + final Acknowledgement itemAck = buildAck(blockItem); final PublishStreamResponse publishStreamResponse = - PublishStreamResponse.newBuilder().setAcknowledgement(itemAck).build(); + PublishStreamResponse.newBuilder().acknowledgement(itemAck).build(); // Verify the BlockItem message is sent to the mediator verify(streamMediator, timeout(testTimeout).times(1)).publish(blockItem); @@ -141,7 +183,7 @@ public void testPublishBlockStreamRegistrationAndExecution() // Verify our custom StreamObserver implementation builds and sends // a response back to the producer verify(publishStreamResponseObserver, timeout(testTimeout).times(1)) - .onNext(publishStreamResponse); + .onNext(fromPbj(publishStreamResponse)); // Close the stream as Helidon does streamObserver.onCompleted(); @@ -165,28 +207,27 @@ public void testSubscribeBlockStream() throws IOException { // Build the BlockStreamService final BlockStreamService blockStreamService = new BlockStreamService( - new ItemAckBuilder(), - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); // Subscribe the consumers - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver1); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver2); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver3); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver1); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver2); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver3); // Subscribe the producer - final StreamObserver streamObserver = - blockStreamService.publishBlockStream(publishStreamResponseObserver); + final StreamObserver streamObserver = + blockStreamService.protocPublishBlockStream(publishStreamResponseObserver); // Build the BlockItem final List blockItems = generateBlockItems(1); final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockItems.getFirst()).build(); + PublishStreamRequest.newBuilder().blockItem(blockItems.getFirst()).build(); // Calling onNext() with a BlockItem - streamObserver.onNext(publishStreamRequest); + streamObserver.onNext(fromPbj(publishStreamRequest)); // Verify the counter was incremented assertEquals(1, blockNodeContext.metricsService().liveBlockItems.get()); @@ -194,14 +235,14 @@ public void testSubscribeBlockStream() throws IOException { verify(blockWriter, timeout(testTimeout).times(1)).write(blockItems.getFirst()); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItems.getFirst()).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItems.getFirst()).build(); verify(subscribeStreamObserver1, timeout(testTimeout).times(1)) - .onNext(subscribeStreamResponse); + .onNext(fromPbj(subscribeStreamResponse)); verify(subscribeStreamObserver2, timeout(testTimeout).times(1)) - .onNext(subscribeStreamResponse); + .onNext(fromPbj(subscribeStreamResponse)); verify(subscribeStreamObserver3, timeout(testTimeout).times(1)) - .onNext(subscribeStreamResponse); + .onNext(fromPbj(subscribeStreamResponse)); } @Test @@ -214,19 +255,22 @@ public void testFullHappyPath() throws IOException { when(serviceStatus.isRunning()).thenReturn(true); // Pass a StreamObserver to the producer as Helidon does - final StreamObserver streamObserver = - blockStreamService.publishBlockStream(publishStreamResponseObserver); + final StreamObserver streamObserver = + blockStreamService.protocPublishBlockStream(publishStreamResponseObserver); final List blockItems = generateBlockItems(numberOfBlocks); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver1); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver2); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver3); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver1); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver2); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver3); for (BlockItem blockItem : blockItems) { final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockItem).build(); - streamObserver.onNext(publishStreamRequest); + PublishStreamRequest.newBuilder().blockItem(blockItem).build(); + streamObserver.onNext(fromPbj(publishStreamRequest)); } verifySubscribeStreamResponse( @@ -250,38 +294,41 @@ public void testFullWithSubscribersAddedDynamically() throws IOException { when(serviceStatus.isRunning()).thenReturn(true); // Pass a StreamObserver to the producer as Helidon does - final StreamObserver streamObserver = - blockStreamService.publishBlockStream(publishStreamResponseObserver); + final StreamObserver streamObserver = + blockStreamService.protocPublishBlockStream(publishStreamResponseObserver); final List blockItems = generateBlockItems(numberOfBlocks); // Subscribe the initial consumers - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver1); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver2); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver3); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver1); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver2); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver3); for (int i = 0; i < blockItems.size(); i++) { final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockItems.get(i)).build(); + PublishStreamRequest.newBuilder().blockItem(blockItems.get(i)).build(); // Add a new subscriber if (i == 51) { - blockStreamService.subscribeBlockStream( + blockStreamService.protocSubscribeBlockStream( subscribeStreamRequest, subscribeStreamObserver4); } // Transmit the BlockItem - streamObserver.onNext(publishStreamRequest); + streamObserver.onNext(fromPbj(publishStreamRequest)); // Add a new subscriber if (i == 76) { - blockStreamService.subscribeBlockStream( + blockStreamService.protocSubscribeBlockStream( subscribeStreamRequest, subscribeStreamObserver5); } // Add a new subscriber if (i == 88) { - blockStreamService.subscribeBlockStream( + blockStreamService.protocSubscribeBlockStream( subscribeStreamRequest, subscribeStreamObserver6); } } @@ -326,18 +373,21 @@ public void testSubAndUnsubWhileStreaming() throws IOException { when(serviceStatus.isRunning()).thenReturn(true); // Pass a StreamObserver to the producer as Helidon does - final StreamObserver streamObserver = - blockStreamService.publishBlockStream(publishStreamResponseObserver); + final StreamObserver streamObserver = + blockStreamService.protocPublishBlockStream(publishStreamResponseObserver); final List blockItems = generateBlockItems(numberOfBlocks); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver1); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver2); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver3); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver1); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver2); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver3); for (int i = 0; i < blockItems.size(); i++) { final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockItems.get(i)).build(); + PublishStreamRequest.newBuilder().blockItem(blockItems.get(i)).build(); // Remove a subscriber if (i == 10) { @@ -352,12 +402,12 @@ public void testSubAndUnsubWhileStreaming() throws IOException { // Add a new subscriber if (i == 51) { - blockStreamService.subscribeBlockStream( + blockStreamService.protocSubscribeBlockStream( subscribeStreamRequest, subscribeStreamObserver4); } // Transmit the BlockItem - streamObserver.onNext(publishStreamRequest); + streamObserver.onNext(fromPbj(publishStreamRequest)); if (i == 70) { final var k = subscribers.firstEntry().getKey(); @@ -366,13 +416,13 @@ public void testSubAndUnsubWhileStreaming() throws IOException { // Add a new subscriber if (i == 76) { - blockStreamService.subscribeBlockStream( + blockStreamService.protocSubscribeBlockStream( subscribeStreamRequest, subscribeStreamObserver5); } // Add a new subscriber if (i == 88) { - blockStreamService.subscribeBlockStream( + blockStreamService.protocSubscribeBlockStream( subscribeStreamRequest, subscribeStreamObserver6); } } @@ -398,7 +448,8 @@ public void testSubAndUnsubWhileStreaming() throws IOException { } @Test - public void testMediatorExceptionHandlingWhenPersistenceFailure() throws IOException { + public void testMediatorExceptionHandlingWhenPersistenceFailure() + throws IOException, ParseException { final ConcurrentHashMap< EventHandler>, BatchEventProcessor>> @@ -416,13 +467,16 @@ public void testMediatorExceptionHandlingWhenPersistenceFailure() throws IOExcep buildBlockStreamService(streamMediator, blockReader, serviceStatus); // Subscribe the consumers - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver1); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver2); - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver3); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver1); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver2); + blockStreamService.protocSubscribeBlockStream( + subscribeStreamRequest, subscribeStreamObserver3); // Initialize the producer - final StreamObserver streamObserver = - blockStreamService.publishBlockStream(publishStreamResponseObserver); + final StreamObserver streamObserver = + blockStreamService.protocPublishBlockStream(publishStreamResponseObserver); // Change the permissions on the file system to trigger an // IOException when the BlockPersistenceHandler tries to write @@ -432,49 +486,55 @@ public void testMediatorExceptionHandlingWhenPersistenceFailure() throws IOExcep // Transmit a BlockItem final List blockItems = generateBlockItems(1); final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockItems.getFirst()).build(); - streamObserver.onNext(publishStreamRequest); + PublishStreamRequest.newBuilder().blockItem(blockItems.getFirst()).build(); + streamObserver.onNext(fromPbj(publishStreamRequest)); // Simulate another producer attempting to connect to the Block Node after the exception. // Later, verify they received a response indicating the stream is closed. - final StreamObserver expectedNoOpStreamObserver = - blockStreamService.publishBlockStream(publishStreamResponseObserver); - expectedNoOpStreamObserver.onNext(publishStreamRequest); + final StreamObserver + expectedNoOpStreamObserver = + blockStreamService.protocPublishBlockStream(publishStreamResponseObserver); + expectedNoOpStreamObserver.onNext(fromPbj(publishStreamRequest)); // Build a request to invoke the singleBlock service - final SingleBlockRequest singleBlockRequest = - SingleBlockRequest.newBuilder().setBlockNumber(1).build(); + final com.hedera.hapi.block.protoc.SingleBlockRequest singleBlockRequest = + com.hedera.hapi.block.protoc.SingleBlockRequest.newBuilder() + .setBlockNumber(1) + .build(); + // Simulate a consumer attempting to connect to the Block Node after the exception. - blockStreamService.singleBlock(singleBlockRequest, singleBlockResponseStreamObserver); + blockStreamService.protocSingleBlock(singleBlockRequest, singleBlockResponseStreamObserver); // Build a request to invoke the subscribeBlockStream service final SubscribeStreamRequest subscribeStreamRequest = - SubscribeStreamRequest.newBuilder().setStartBlockNumber(1).build(); + SubscribeStreamRequest.newBuilder().startBlockNumber(1).build(); // Simulate a consumer attempting to connect to the Block Node after the exception. - blockStreamService.subscribeBlockStream(subscribeStreamRequest, subscribeStreamObserver4); + blockStreamService.protocSubscribeBlockStream( + fromPbj(subscribeStreamRequest), subscribeStreamObserver4); // The BlockItem passed through since it was published // before the IOException was thrown. final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItems.getFirst()).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItems.getFirst()).build(); verify(subscribeStreamObserver1, timeout(testTimeout).times(1)) - .onNext(subscribeStreamResponse); + .onNext(fromPbj(subscribeStreamResponse)); verify(subscribeStreamObserver2, timeout(testTimeout).times(1)) - .onNext(subscribeStreamResponse); + .onNext(fromPbj(subscribeStreamResponse)); verify(subscribeStreamObserver3, timeout(testTimeout).times(1)) - .onNext(subscribeStreamResponse); + .onNext(fromPbj(subscribeStreamResponse)); // Verify all the consumers received the end of stream response // TODO: Fix the response code when it's available final SubscribeStreamResponse endStreamResponse = SubscribeStreamResponse.newBuilder() - .setStatus( - SubscribeStreamResponse.SubscribeStreamResponseCode - .READ_STREAM_SUCCESS) + .status(SubscribeStreamResponseCode.READ_STREAM_SUCCESS) .build(); - verify(subscribeStreamObserver1, timeout(testTimeout).times(1)).onNext(endStreamResponse); - verify(subscribeStreamObserver2, timeout(testTimeout).times(1)).onNext(endStreamResponse); - verify(subscribeStreamObserver3, timeout(testTimeout).times(1)).onNext(endStreamResponse); + verify(subscribeStreamObserver1, timeout(testTimeout).times(1)) + .onNext(fromPbj(endStreamResponse)); + verify(subscribeStreamObserver2, timeout(testTimeout).times(1)) + .onNext(fromPbj(endStreamResponse)); + verify(subscribeStreamObserver3, timeout(testTimeout).times(1)) + .onNext(fromPbj(endStreamResponse)); // Verify all the consumers were unsubscribed for (final var s : subscribers.keySet()) { @@ -485,12 +545,12 @@ public void testMediatorExceptionHandlingWhenPersistenceFailure() throws IOExcep // error code indicating the service is not available. final EndOfStream endOfStream = EndOfStream.newBuilder() - .setStatus(PublishStreamResponseCode.STREAM_ITEMS_UNKNOWN) + .status(PublishStreamResponseCode.STREAM_ITEMS_UNKNOWN) .build(); final var endOfStreamResponse = - PublishStreamResponse.newBuilder().setStatus(endOfStream).build(); + PublishStreamResponse.newBuilder().status(endOfStream).build(); verify(publishStreamResponseObserver, timeout(testTimeout).times(2)) - .onNext(endOfStreamResponse); + .onNext(fromPbj(endOfStreamResponse)); verify(webServer, timeout(testTimeout).times(1)).stop(); // Now verify the block was removed from the file system. @@ -503,22 +563,19 @@ public void testMediatorExceptionHandlingWhenPersistenceFailure() throws IOExcep // error code indicating the service is not available. final SingleBlockResponse expectedSingleBlockNotAvailable = SingleBlockResponse.newBuilder() - .setStatus( - SingleBlockResponse.SingleBlockResponseCode - .READ_BLOCK_NOT_AVAILABLE) + .status(SingleBlockResponseCode.READ_BLOCK_NOT_AVAILABLE) .build(); + verify(singleBlockResponseStreamObserver, timeout(testTimeout).times(1)) - .onNext(expectedSingleBlockNotAvailable); + .onNext(fromPbj(expectedSingleBlockNotAvailable)); // TODO: Fix the response code when it's available final SubscribeStreamResponse expectedSubscriberStreamNotAvailable = SubscribeStreamResponse.newBuilder() - .setStatus( - SubscribeStreamResponse.SubscribeStreamResponseCode - .READ_STREAM_SUCCESS) + .status(SubscribeStreamResponseCode.READ_STREAM_SUCCESS) .build(); verify(subscribeStreamObserver4, timeout(testTimeout).times(1)) - .onNext(expectedSubscriberStreamNotAvailable); + .onNext(fromPbj(expectedSubscriberStreamNotAvailable)); } private void removeRootPathWritePerms(final PersistenceStorageConfig config) @@ -531,7 +588,7 @@ private static void verifySubscribeStreamResponse( int numberOfBlocks, int blockItemsToWait, int blockItemsToSkip, - StreamObserver streamObserver, + StreamObserver streamObserver, List blockItems) { // Each block has 10 BlockItems. Verify all the BlockItems @@ -554,14 +611,17 @@ private static void verifySubscribeStreamResponse( final SubscribeStreamResponse stateProofStreamResponse = buildSubscribeStreamResponse(stateProofBlockItem); - verify(streamObserver, timeout(testTimeout).times(1)).onNext(headerSubStreamResponse); - verify(streamObserver, timeout(testTimeout).times(8)).onNext(bodySubStreamResponse); - verify(streamObserver, timeout(testTimeout).times(1)).onNext(stateProofStreamResponse); + verify(streamObserver, timeout(testTimeout).times(1)) + .onNext(fromPbj(headerSubStreamResponse)); + verify(streamObserver, timeout(testTimeout).times(8)) + .onNext(fromPbj(bodySubStreamResponse)); + verify(streamObserver, timeout(testTimeout).times(1)) + .onNext(fromPbj(stateProofStreamResponse)); } } private static SubscribeStreamResponse buildSubscribeStreamResponse(BlockItem blockItem) { - return SubscribeStreamResponse.newBuilder().setBlockItem(blockItem).build(); + return SubscribeStreamResponse.newBuilder().blockItem(blockItem).build(); } private BlockStreamService buildBlockStreamService() throws IOException { @@ -605,7 +665,16 @@ private BlockStreamService buildBlockStreamService( final BlockNodeContext blockNodeContext = TestConfigUtil.getTestBlockNodeContext(); - return new BlockStreamService( - new ItemAckBuilder(), streamMediator, blockReader, serviceStatus, blockNodeContext); + return new BlockStreamService(streamMediator, blockReader, serviceStatus, blockNodeContext); + } + + public static Acknowledgement buildAck(@NonNull final BlockItem blockItem) + throws NoSuchAlgorithmException { + ItemAcknowledgement itemAck = + ItemAcknowledgement.newBuilder() + .itemHash(Bytes.wrap(getFakeHash(blockItem))) + .build(); + + return Acknowledgement.newBuilder().itemAck(itemAck).build(); } } diff --git a/server/src/test/java/com/hedera/block/server/BlockStreamServiceTest.java b/server/src/test/java/com/hedera/block/server/BlockStreamServiceTest.java index ad4bf1ac..e5b71a1d 100644 --- a/server/src/test/java/com/hedera/block/server/BlockStreamServiceTest.java +++ b/server/src/test/java/com/hedera/block/server/BlockStreamServiceTest.java @@ -16,17 +16,31 @@ package com.hedera.block.server; -import static com.hedera.block.protos.BlockStreamService.*; import static com.hedera.block.server.BlockStreamService.buildSingleBlockNotAvailableResponse; import static com.hedera.block.server.BlockStreamService.buildSingleBlockNotFoundResponse; -import static com.hedera.block.server.Constants.*; +import static com.hedera.block.server.BlockStreamService.fromPbjSingleBlockSuccessResponse; +import static com.hedera.block.server.Constants.CLIENT_STREAMING_METHOD_NAME; +import static com.hedera.block.server.Constants.SERVER_STREAMING_METHOD_NAME; +import static com.hedera.block.server.Constants.SINGLE_BLOCK_METHOD_NAME; +import static com.hedera.block.server.Translator.fromPbj; import static com.hedera.block.server.util.PersistTestUtils.generateBlockItems; +import static com.hedera.block.server.util.PersistTestUtils.reverseByteArray; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.INFO; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.google.protobuf.Descriptors; +import com.google.protobuf.InvalidProtocolBufferException; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.data.ObjectEvent; import com.hedera.block.server.mediator.StreamMediator; @@ -35,9 +49,14 @@ import com.hedera.block.server.persistence.storage.read.BlockReader; import com.hedera.block.server.persistence.storage.write.BlockAsDirWriterBuilder; import com.hedera.block.server.persistence.storage.write.BlockWriter; -import com.hedera.block.server.producer.ItemAckBuilder; import com.hedera.block.server.util.TestConfigUtil; import com.hedera.block.server.util.TestUtils; +import com.hedera.hapi.block.SingleBlockResponse; +import com.hedera.hapi.block.SingleBlockResponseCode; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.stream.Block; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.ParseException; import io.grpc.stub.ServerCalls; import io.grpc.stub.StreamObserver; import io.helidon.webserver.grpc.GrpcService; @@ -58,9 +77,7 @@ @ExtendWith(MockitoExtension.class) public class BlockStreamServiceTest { - @Mock private StreamObserver responseObserver; - - @Mock private ItemAckBuilder itemAckBuilder; + @Mock private StreamObserver responseObserver; @Mock private StreamMediator> streamMediator; @@ -68,7 +85,7 @@ public class BlockStreamServiceTest { @Mock private ServiceStatus serviceStatus; - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); private static final String TEMP_DIR = "block-node-unit-test-dir"; @@ -79,7 +96,7 @@ public class BlockStreamServiceTest { @BeforeEach public void setUp() throws IOException { testPath = Files.createTempDirectory(TEMP_DIR); - LOGGER.log(System.Logger.Level.INFO, "Created temp directory: " + testPath.toString()); + LOGGER.log(INFO, "Created temp directory: " + testPath.toString()); blockNodeContext = TestConfigUtil.getTestBlockNodeContext( @@ -97,17 +114,12 @@ public void testServiceName() throws IOException, NoSuchAlgorithmException { final BlockStreamService blockStreamService = new BlockStreamService( - itemAckBuilder, - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); // Verify the service name assertEquals(Constants.SERVICE_NAME, blockStreamService.serviceName()); // Verify other methods not invoked - verify(itemAckBuilder, never()).buildAck(any(BlockItem.class)); verify(streamMediator, never()).publish(any(BlockItem.class)); } @@ -115,32 +127,23 @@ public void testServiceName() throws IOException, NoSuchAlgorithmException { public void testProto() throws IOException, NoSuchAlgorithmException { final BlockStreamService blockStreamService = new BlockStreamService( - itemAckBuilder, - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); Descriptors.FileDescriptor fileDescriptor = blockStreamService.proto(); // Verify the current rpc methods - assertEquals(3, fileDescriptor.getServices().getFirst().getMethods().size()); + assertEquals(5, fileDescriptor.getServices().getFirst().getMethods().size()); // Verify other methods not invoked - verify(itemAckBuilder, never()).buildAck(any(BlockItem.class)); verify(streamMediator, never()).publish(any(BlockItem.class)); } @Test - void testSingleBlockHappyPath() throws IOException { + void testSingleBlockHappyPath() throws IOException, ParseException { final BlockReader blockReader = BlockAsDirReaderBuilder.newBuilder(config).build(); final BlockStreamService blockStreamService = new BlockStreamService( - itemAckBuilder, - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); // Enable the serviceStatus when(serviceStatus.isRunning()).thenReturn(true); @@ -161,90 +164,109 @@ void testSingleBlockHappyPath() throws IOException { } // Build a response to verify what's passed to the response observer - final SingleBlockResponse expectedSingleBlockResponse = - SingleBlockResponse.newBuilder().setBlock(blockOpt.get()).build(); + final com.hedera.hapi.block.protoc.SingleBlockResponse expectedSingleBlockResponse = + fromPbjSingleBlockSuccessResponse(blockOpt.get()); // Build a request to invoke the service - final SingleBlockRequest singleBlockRequest = - SingleBlockRequest.newBuilder().setBlockNumber(1).build(); + final com.hedera.hapi.block.protoc.SingleBlockRequest singleBlockRequest = + com.hedera.hapi.block.protoc.SingleBlockRequest.newBuilder() + .setBlockNumber(1) + .build(); // Call the service - blockStreamService.singleBlock(singleBlockRequest, responseObserver); + blockStreamService.protocSingleBlock(singleBlockRequest, responseObserver); verify(responseObserver, times(1)).onNext(expectedSingleBlockResponse); } @Test - void testSingleBlockNotFoundPath() throws IOException { + void testSingleBlockNotFoundPath() throws IOException, ParseException { // Get the block so we can verify the response payload when(blockReader.read(1)).thenReturn(Optional.empty()); // Build a response to verify what's passed to the response observer - final SingleBlockResponse expectedNotFound = buildSingleBlockNotFoundResponse(); + final com.hedera.hapi.block.protoc.SingleBlockResponse expectedNotFound = + buildSingleBlockNotFoundResponse(); // Build a request to invoke the service - final SingleBlockRequest singleBlockRequest = - SingleBlockRequest.newBuilder().setBlockNumber(1).build(); + final com.hedera.hapi.block.protoc.SingleBlockRequest singleBlockRequest = + com.hedera.hapi.block.protoc.SingleBlockRequest.newBuilder() + .setBlockNumber(1) + .build(); // Call the service final BlockStreamService blockStreamService = new BlockStreamService( - itemAckBuilder, - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); // Enable the serviceStatus when(serviceStatus.isRunning()).thenReturn(true); - blockStreamService.singleBlock(singleBlockRequest, responseObserver); + blockStreamService.protocSingleBlock(singleBlockRequest, responseObserver); verify(responseObserver, times(1)).onNext(expectedNotFound); } @Test - void testSingleBlockServiceNotAvailable() { + void testSingleBlockServiceNotAvailable() throws InvalidProtocolBufferException { final BlockStreamService blockStreamService = new BlockStreamService( - itemAckBuilder, - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); // Set the service status to not running when(serviceStatus.isRunning()).thenReturn(false); - final SingleBlockResponse expectedNotAvailable = buildSingleBlockNotAvailableResponse(); + final com.hedera.hapi.block.protoc.SingleBlockResponse expectedNotAvailable = + buildSingleBlockNotAvailableResponse(); // Build a request to invoke the service - final SingleBlockRequest singleBlockRequest = - SingleBlockRequest.newBuilder().setBlockNumber(1).build(); - blockStreamService.singleBlock(singleBlockRequest, responseObserver); + final com.hedera.hapi.block.protoc.SingleBlockRequest singleBlockRequest = + com.hedera.hapi.block.protoc.SingleBlockRequest.newBuilder() + .setBlockNumber(1) + .build(); + blockStreamService.protocSingleBlock(singleBlockRequest, responseObserver); verify(responseObserver, times(1)).onNext(expectedNotAvailable); } @Test - public void testSingleBlockIOExceptionPath() throws IOException { + public void testSingleBlockIOExceptionPath() throws IOException, ParseException { final BlockStreamService blockStreamService = new BlockStreamService( - itemAckBuilder, - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); - // Set the service status to not running when(serviceStatus.isRunning()).thenReturn(true); when(blockReader.read(1)).thenThrow(new IOException("Test exception")); - final SingleBlockResponse expectedNotAvailable = buildSingleBlockNotAvailableResponse(); + final com.hedera.hapi.block.protoc.SingleBlockResponse expectedNotAvailable = + buildSingleBlockNotAvailableResponse(); + + // Build a request to invoke the service + final com.hedera.hapi.block.protoc.SingleBlockRequest singleBlockRequest = + com.hedera.hapi.block.protoc.SingleBlockRequest.newBuilder() + .setBlockNumber(1) + .build(); + blockStreamService.protocSingleBlock(singleBlockRequest, responseObserver); + verify(responseObserver, times(1)).onNext(expectedNotAvailable); + } + + @Test + public void testSingleBlockParseExceptionPath() throws IOException, ParseException { + final BlockStreamService blockStreamService = + new BlockStreamService( + streamMediator, blockReader, serviceStatus, blockNodeContext); + + when(serviceStatus.isRunning()).thenReturn(true); + when(blockReader.read(1)).thenThrow(new ParseException("Test exception")); + + final com.hedera.hapi.block.protoc.SingleBlockResponse expectedNotAvailable = + buildSingleBlockNotAvailableResponse(); // Build a request to invoke the service - final SingleBlockRequest singleBlockRequest = - SingleBlockRequest.newBuilder().setBlockNumber(1).build(); - blockStreamService.singleBlock(singleBlockRequest, responseObserver); + final com.hedera.hapi.block.protoc.SingleBlockRequest singleBlockRequest = + com.hedera.hapi.block.protoc.SingleBlockRequest.newBuilder() + .setBlockNumber(1) + .build(); + blockStreamService.protocSingleBlock(singleBlockRequest, responseObserver); verify(responseObserver, times(1)).onNext(expectedNotAvailable); } @@ -253,11 +275,7 @@ public void testUpdateInvokesRoutingWithLambdas() { final BlockStreamService blockStreamService = new BlockStreamService( - itemAckBuilder, - streamMediator, - blockReader, - serviceStatus, - blockNodeContext); + streamMediator, blockReader, serviceStatus, blockNodeContext); GrpcService.Routing routing = mock(GrpcService.Routing.class); blockStreamService.update(routing); @@ -271,4 +289,32 @@ public void testUpdateInvokesRoutingWithLambdas() { verify(routing, timeout(50).times(1)) .unary(eq(SINGLE_BLOCK_METHOD_NAME), any(ServerCalls.UnaryMethod.class)); } + + @Test + public void testProtocParseExceptionHandling() { + // TODO: We might be able to remove this test once we can remove the Translator class + + final BlockStreamService blockStreamService = + new BlockStreamService( + streamMediator, blockReader, serviceStatus, blockNodeContext); + + // Build a request to invoke the service + final com.hedera.hapi.block.protoc.SingleBlockRequest singleBlockRequest = + spy( + com.hedera.hapi.block.protoc.SingleBlockRequest.newBuilder() + .setBlockNumber(1) + .build()); + + // Create a corrupted set of bytes to provoke a parse exception + byte[] okBytes = singleBlockRequest.toByteArray(); + when(singleBlockRequest.toByteArray()).thenReturn(reverseByteArray(okBytes)); + + final SingleBlockResponse expectedSingleBlockErrorResponse = + SingleBlockResponse.newBuilder() + .status(SingleBlockResponseCode.READ_BLOCK_NOT_AVAILABLE) + .build(); + // Call the service + blockStreamService.protocSingleBlock(singleBlockRequest, responseObserver); + verify(responseObserver, times(1)).onNext(fromPbj(expectedSingleBlockErrorResponse)); + } } diff --git a/server/src/test/java/com/hedera/block/server/consumer/ConsumerStreamResponseObserverTest.java b/server/src/test/java/com/hedera/block/server/consumer/ConsumerStreamResponseObserverTest.java index 5fd2c44b..79c10814 100644 --- a/server/src/test/java/com/hedera/block/server/consumer/ConsumerStreamResponseObserverTest.java +++ b/server/src/test/java/com/hedera/block/server/consumer/ConsumerStreamResponseObserverTest.java @@ -16,14 +16,26 @@ package com.hedera.block.server.consumer; -import static com.hedera.block.protos.BlockStreamService.*; +import static com.hedera.block.server.Translator.fromPbj; import static com.hedera.block.server.util.PersistTestUtils.generateBlockItems; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.data.ObjectEvent; import com.hedera.block.server.mediator.StreamMediator; import com.hedera.block.server.util.TestConfigUtil; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.hapi.block.stream.BlockProof; +import com.hedera.hapi.block.stream.input.EventHeader; +import com.hedera.hapi.block.stream.output.BlockHeader; +import com.hedera.hapi.platform.event.EventCore; import io.grpc.stub.ServerCallStreamObserver; import io.grpc.stub.StreamObserver; import java.io.IOException; @@ -42,10 +54,17 @@ public class ConsumerStreamResponseObserverTest { private final long TEST_TIME = 1_719_427_664_950L; @Mock private StreamMediator> streamMediator; - @Mock private StreamObserver responseStreamObserver; + + @Mock + private StreamObserver + responseStreamObserver; + @Mock private ObjectEvent objectEvent; - @Mock private ServerCallStreamObserver serverCallStreamObserver; + @Mock + private ServerCallStreamObserver + serverCallStreamObserver; + @Mock private InstantSource testClock; final BlockNodeContext testContext; @@ -67,17 +86,17 @@ public void testProducerTimeoutWithinWindow() { new ConsumerStreamResponseObserver( testContext, testClock, streamMediator, responseStreamObserver); - final BlockHeader blockHeader = BlockHeader.newBuilder().setBlockNumber(1).build(); - final BlockItem blockItem = BlockItem.newBuilder().setHeader(blockHeader).build(); + final BlockHeader blockHeader = BlockHeader.newBuilder().number(1).build(); + final BlockItem blockItem = BlockItem.newBuilder().blockHeader(blockHeader).build(); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItem).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItem).build(); when(objectEvent.get()).thenReturn(subscribeStreamResponse); consumerBlockItemObserver.onEvent(objectEvent, 0, true); // verify the observer is called with the next BlockItem - verify(responseStreamObserver).onNext(subscribeStreamResponse); + verify(responseStreamObserver).onNext(fromPbj(subscribeStreamResponse)); // verify the mediator is NOT called to unsubscribe the observer verify(streamMediator, never()).unsubscribe(consumerBlockItemObserver); @@ -121,7 +140,7 @@ public void testResponseNotPermittedAfterCancel() { final List blockItems = generateBlockItems(1); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItems.getFirst()).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItems.getFirst()).build(); when(objectEvent.get()).thenReturn(subscribeStreamResponse); // Confirm that the observer is called with the first BlockItem @@ -134,7 +153,8 @@ public void testResponseNotPermittedAfterCancel() { consumerStreamResponseObserver.onEvent(objectEvent, 0, true); // Confirm that canceling the observer allowed only 1 response to be sent. - verify(serverCallStreamObserver, timeout(50).times(1)).onNext(subscribeStreamResponse); + verify(serverCallStreamObserver, timeout(50).times(1)) + .onNext(fromPbj(subscribeStreamResponse)); } @Test @@ -146,7 +166,7 @@ public void testResponseNotPermittedAfterClose() { final List blockItems = generateBlockItems(1); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItems.getFirst()).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItems.getFirst()).build(); when(objectEvent.get()).thenReturn(subscribeStreamResponse); // Confirm that the observer is called with the first BlockItem @@ -159,7 +179,8 @@ public void testResponseNotPermittedAfterClose() { consumerStreamResponseObserver.onEvent(objectEvent, 0, true); // Confirm that canceling the observer allowed only 1 response to be sent. - verify(serverCallStreamObserver, timeout(50).times(1)).onNext(subscribeStreamResponse); + verify(serverCallStreamObserver, timeout(50).times(1)) + .onNext(fromPbj(subscribeStreamResponse)); } @Test @@ -177,19 +198,17 @@ public void testConsumerNotToSendBeforeBlockHeader() { for (int i = 1; i <= 10; i++) { if (i % 2 == 0) { - final EventMetadata eventMetadata = - EventMetadata.newBuilder().setCreatorId(i).build(); - final BlockItem blockItem = - BlockItem.newBuilder().setStartEvent(eventMetadata).build(); + final EventHeader eventHeader = + EventHeader.newBuilder().eventCore(EventCore.newBuilder().build()).build(); + final BlockItem blockItem = BlockItem.newBuilder().eventHeader(eventHeader).build(); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItem).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItem).build(); when(objectEvent.get()).thenReturn(subscribeStreamResponse); } else { - final BlockProof blockProof = BlockProof.newBuilder().setBlock(i).build(); - final BlockItem blockItem = - BlockItem.newBuilder().setStateProof(blockProof).build(); + final BlockProof blockProof = BlockProof.newBuilder().block(i).build(); + final BlockItem blockItem = BlockItem.newBuilder().blockProof(blockProof).build(); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItem).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItem).build(); when(objectEvent.get()).thenReturn(subscribeStreamResponse); } @@ -198,11 +217,49 @@ public void testConsumerNotToSendBeforeBlockHeader() { final BlockItem blockItem = BlockItem.newBuilder().build(); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItem).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItem).build(); // Confirm that the observer was called with the next BlockItem // since we never send a BlockItem with a Header to start the stream. - verify(responseStreamObserver, timeout(50).times(0)).onNext(subscribeStreamResponse); + verify(responseStreamObserver, timeout(50).times(0)) + .onNext(fromPbj(subscribeStreamResponse)); + } + + @Test + public void testSubscriberStreamResponseIsBlockItemWhenBlockItemIsNull() { + + // The generated objects contain safeguards to prevent a SubscribeStreamResponse + // being created with a null BlockItem. Here, I have to used a spy() to even + // manufacture this scenario. This should not happen in production. + final BlockItem blockItem = BlockItem.newBuilder().build(); + final SubscribeStreamResponse subscribeStreamResponse = + spy(SubscribeStreamResponse.newBuilder().blockItem(blockItem).build()); + + when(subscribeStreamResponse.blockItem()).thenReturn(null); + when(objectEvent.get()).thenReturn(subscribeStreamResponse); + + final var consumerBlockItemObserver = + new ConsumerStreamResponseObserver( + testContext, testClock, streamMediator, responseStreamObserver); + assertThrows( + IllegalArgumentException.class, + () -> consumerBlockItemObserver.onEvent(objectEvent, 0, true)); + } + + @Test + public void testSubscribeStreamResponseTypeNotSupported() { + + final SubscribeStreamResponse subscribeStreamResponse = + SubscribeStreamResponse.newBuilder().build(); + when(objectEvent.get()).thenReturn(subscribeStreamResponse); + + final var consumerBlockItemObserver = + new ConsumerStreamResponseObserver( + testContext, testClock, streamMediator, responseStreamObserver); + + assertThrows( + IllegalArgumentException.class, + () -> consumerBlockItemObserver.onEvent(objectEvent, 0, true)); } private static class TestConsumerStreamResponseObserver extends ConsumerStreamResponseObserver { @@ -211,7 +268,8 @@ public TestConsumerStreamResponseObserver( BlockNodeContext context, InstantSource producerLivenessClock, StreamMediator> subscriptionHandler, - StreamObserver subscribeStreamResponseObserver) { + StreamObserver + subscribeStreamResponseObserver) { super( context, producerLivenessClock, diff --git a/server/src/test/java/com/hedera/block/server/mediator/LiveStreamMediatorImplTest.java b/server/src/test/java/com/hedera/block/server/mediator/LiveStreamMediatorImplTest.java index 1d8eb177..6fcffe6b 100644 --- a/server/src/test/java/com/hedera/block/server/mediator/LiveStreamMediatorImplTest.java +++ b/server/src/test/java/com/hedera/block/server/mediator/LiveStreamMediatorImplTest.java @@ -16,10 +16,17 @@ package com.hedera.block.server.mediator; -import static com.hedera.block.protos.BlockStreamService.*; +import static com.hedera.block.server.Translator.fromPbj; import static com.hedera.block.server.util.PersistTestUtils.generateBlockItems; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.hedera.block.server.ServiceStatusImpl; import com.hedera.block.server.config.BlockNodeContext; @@ -29,6 +36,9 @@ import com.hedera.block.server.data.ObjectEvent; import com.hedera.block.server.persistence.storage.write.BlockWriter; import com.hedera.block.server.util.TestConfigUtil; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.hapi.block.stream.output.BlockHeader; import com.lmax.disruptor.EventHandler; import edu.umd.cs.findbugs.annotations.NonNull; import io.grpc.stub.ServerCallStreamObserver; @@ -51,11 +61,19 @@ public class LiveStreamMediatorImplTest { @Mock private BlockWriter blockWriter; - @Mock private StreamObserver streamObserver1; - @Mock private StreamObserver streamObserver2; - @Mock private StreamObserver streamObserver3; + @Mock + private StreamObserver streamObserver1; + + @Mock + private StreamObserver streamObserver2; + + @Mock + private StreamObserver streamObserver3; + + @Mock + private ServerCallStreamObserver + serverCallStreamObserver; - @Mock private ServerCallStreamObserver serverCallStreamObserver; @Mock private InstantSource testClock; private final long TIMEOUT_THRESHOLD_MILLIS = 100L; @@ -178,10 +196,10 @@ blockWriter, blockNodeContext, new ServiceStatusImpl()) streamMediator.isSubscribed(concreteObserver3), "Expected the mediator to have observer3 subscribed"); - final BlockHeader blockHeader = BlockHeader.newBuilder().setBlockNumber(1).build(); - final BlockItem blockItem = BlockItem.newBuilder().setHeader(blockHeader).build(); + final BlockHeader blockHeader = BlockHeader.newBuilder().number(1).build(); + final BlockItem blockItem = BlockItem.newBuilder().blockHeader(blockHeader).build(); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItem).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItem).build(); // Acting as a producer, notify the mediator of a new block streamMediator.publish(blockItem); @@ -189,9 +207,12 @@ blockWriter, blockNodeContext, new ServiceStatusImpl()) assertEquals(1, blockNodeContext.metricsService().liveBlockItems.get()); // Confirm each subscriber was notified of the new block - verify(streamObserver1, timeout(testTimeout).times(1)).onNext(subscribeStreamResponse); - verify(streamObserver2, timeout(testTimeout).times(1)).onNext(subscribeStreamResponse); - verify(streamObserver3, timeout(testTimeout).times(1)).onNext(subscribeStreamResponse); + verify(streamObserver1, timeout(testTimeout).times(1)) + .onNext(fromPbj(subscribeStreamResponse)); + verify(streamObserver2, timeout(testTimeout).times(1)) + .onNext(fromPbj(subscribeStreamResponse)); + verify(streamObserver3, timeout(testTimeout).times(1)) + .onNext(fromPbj(subscribeStreamResponse)); // Confirm the BlockStorage write method was called verify(blockWriter).write(blockItem); @@ -366,7 +387,8 @@ public TestConsumerStreamResponseObserver( final InstantSource producerLivenessClock, final StreamMediator> streamMediator, - final StreamObserver responseStreamObserver) { + final StreamObserver + responseStreamObserver) { super(context, producerLivenessClock, streamMediator, responseStreamObserver); } diff --git a/server/src/test/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReaderTest.java b/server/src/test/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReaderTest.java index a7d8deaa..e3b7cac8 100644 --- a/server/src/test/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReaderTest.java +++ b/server/src/test/java/com/hedera/block/server/persistence/storage/read/BlockAsDirReaderTest.java @@ -16,9 +16,12 @@ package com.hedera.block.server.persistence.storage.read; -import static com.hedera.block.protos.BlockStreamService.Block; -import static com.hedera.block.protos.BlockStreamService.BlockItem; import static com.hedera.block.server.Constants.BLOCK_FILE_EXTENSION; +import static com.hedera.block.server.util.PersistTestUtils.generateBlockItems; +import static com.hedera.block.server.util.PersistTestUtils.reverseByteArray; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.ERROR; +import static java.lang.System.Logger.Level.INFO; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; @@ -32,7 +35,11 @@ import com.hedera.block.server.util.PersistTestUtils; import com.hedera.block.server.util.TestConfigUtil; import com.hedera.block.server.util.TestUtils; +import com.hedera.hapi.block.stream.Block; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.ParseException; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.nio.file.Files; @@ -48,7 +55,7 @@ public class BlockAsDirReaderTest { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); private static final String TEMP_DIR = "block-node-unit-test-dir"; @@ -60,7 +67,7 @@ public class BlockAsDirReaderTest { @BeforeEach public void setUp() throws IOException { testPath = Files.createTempDirectory(TEMP_DIR); - LOGGER.log(System.Logger.Level.INFO, "Created temp directory: " + testPath.toString()); + LOGGER.log(INFO, "Created temp directory: " + testPath.toString()); blockNodeContext = TestConfigUtil.getTestBlockNodeContext( @@ -71,22 +78,20 @@ public void setUp() throws IOException { @AfterEach public void tearDown() { if (!TestUtils.deleteDirectory(testPath.toFile())) { - LOGGER.log( - System.Logger.Level.ERROR, - "Failed to delete temp directory: " + testPath.toString()); + LOGGER.log(ERROR, "Failed to delete temp directory: " + testPath.toString()); } } @Test - public void testReadBlockDoesNotExist() throws IOException { + public void testReadBlockDoesNotExist() throws IOException, ParseException { final BlockReader blockReader = BlockAsDirReaderBuilder.newBuilder(config).build(); final Optional blockOpt = blockReader.read(10000); assertTrue(blockOpt.isEmpty()); } @Test - public void testReadPermsRepairSucceeded() throws IOException { - final List blockItems = PersistTestUtils.generateBlockItems(1); + public void testReadPermsRepairSucceeded() throws IOException, ParseException { + final List blockItems = generateBlockItems(1); final BlockWriter blockWriter = BlockAsDirWriterBuilder.newBuilder(blockNodeContext).build(); @@ -101,12 +106,12 @@ public void testReadPermsRepairSucceeded() throws IOException { final BlockReader blockReader = BlockAsDirReaderBuilder.newBuilder(config).build(); final Optional blockOpt = blockReader.read(1); assertFalse(blockOpt.isEmpty()); - assertEquals(10, blockOpt.get().getBlockItemsList().size()); + assertEquals(10, blockOpt.get().items().size()); } @Test - public void testRemoveBlockReadPermsRepairFailed() throws IOException { - final List blockItems = PersistTestUtils.generateBlockItems(1); + public void testRemoveBlockReadPermsRepairFailed() throws IOException, ParseException { + final List blockItems = generateBlockItems(1); final BlockWriter blockWriter = BlockAsDirWriterBuilder.newBuilder(blockNodeContext).build(); @@ -129,7 +134,7 @@ public void testRemoveBlockReadPermsRepairFailed() throws IOException { @Test public void testRemoveBlockItemReadPerms() throws IOException { - final List blockItems = PersistTestUtils.generateBlockItems(1); + final List blockItems = generateBlockItems(1); final BlockWriter blockWriter = BlockAsDirWriterBuilder.newBuilder(blockNodeContext).build(); @@ -144,12 +149,13 @@ public void testRemoveBlockItemReadPerms() throws IOException { } @Test - public void testPathIsNotDirectory() throws IOException { - final List blockItems = PersistTestUtils.generateBlockItems(1); + public void testPathIsNotDirectory() throws IOException, ParseException { + final List blockItems = generateBlockItems(1); final Path blockNodeRootPath = Path.of(config.rootPath()); // Write a file named "1" where a directory should be - writeFileToPath(blockNodeRootPath.resolve(Path.of("1")), blockItems.getFirst()); + PersistTestUtils.writeBlockItemToPath( + blockNodeRootPath.resolve(Path.of("1")), blockItems.getFirst()); // Should return empty because the path is not a directory final BlockReader blockReader = BlockAsDirReaderBuilder.newBuilder(config).build(); @@ -158,9 +164,9 @@ public void testPathIsNotDirectory() throws IOException { } @Test - public void testRepairReadPermsFails() throws IOException { + public void testRepairReadPermsFails() throws IOException, ParseException { - final List blockItems = PersistTestUtils.generateBlockItems(1); + final List blockItems = generateBlockItems(1); final BlockWriter blockWriter = BlockAsDirWriterBuilder.newBuilder(blockNodeContext).build(); @@ -181,7 +187,7 @@ public void testRepairReadPermsFails() throws IOException { } @Test - public void testBlockNodePathReadFails() throws IOException { + public void testBlockNodePathReadFails() throws IOException, ParseException { // Remove read perm on the root path removePathReadPerms(Path.of(config.rootPath())); @@ -196,12 +202,42 @@ public void testBlockNodePathReadFails() throws IOException { assertTrue(blockOpt.isEmpty()); } - private void writeFileToPath(final Path path, final BlockItem blockItem) throws IOException { - try (FileOutputStream fos = new FileOutputStream(path.toString())) { - blockItem.writeTo(fos); - LOGGER.log( - System.Logger.Level.INFO, "Successfully wrote the block item file: {0}", path); + @Test + public void testParseExceptionHandling() throws IOException, ParseException { + final List blockItems = generateBlockItems(1); + + final BlockWriter blockWriter = + BlockAsDirWriterBuilder.newBuilder(blockNodeContext).build(); + for (final BlockItem blockItem : blockItems) { + blockWriter.write(blockItem); + } + + // Read the block back and confirm it's read successfully + final BlockReader blockReader = BlockAsDirReaderBuilder.newBuilder(config).build(); + final Optional blockOpt = blockReader.read(1); + assertFalse(blockOpt.isEmpty()); + + final PersistenceStorageConfig persistenceStorageConfig = + blockNodeContext.configuration().getConfigData(PersistenceStorageConfig.class); + final Path blockNodeRootPath = Path.of(persistenceStorageConfig.rootPath()); + Path blockPath = blockNodeRootPath.resolve(String.valueOf(1)); + + byte[] bytes; + try (final FileInputStream fis = + new FileInputStream(blockPath.resolve("1" + BLOCK_FILE_EXTENSION).toFile())) { + bytes = fis.readAllBytes(); + } + + // Corrupt the block item file by reversing the bytes + try (final FileOutputStream fos = + new FileOutputStream(blockPath.resolve("1" + BLOCK_FILE_EXTENSION).toFile())) { + byte[] reversedBytes = reverseByteArray(bytes); + fos.write(reversedBytes); } + + // Read the block. The block item file is corrupted, so the read should fail with a + // ParseException + assertThrows(ParseException.class, () -> blockReader.read(1)); } public static void removeBlockReadPerms(int blockNumber, final PersistenceStorageConfig config) diff --git a/server/src/test/java/com/hedera/block/server/persistence/storage/remove/BlockAsDirRemoverTest.java b/server/src/test/java/com/hedera/block/server/persistence/storage/remove/BlockAsDirRemoverTest.java index ec9ab0b8..bbe824d9 100644 --- a/server/src/test/java/com/hedera/block/server/persistence/storage/remove/BlockAsDirRemoverTest.java +++ b/server/src/test/java/com/hedera/block/server/persistence/storage/remove/BlockAsDirRemoverTest.java @@ -16,10 +16,10 @@ package com.hedera.block.server.persistence.storage.remove; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.INFO; import static org.junit.jupiter.api.Assertions.assertEquals; -import com.hedera.block.protos.BlockStreamService.Block; -import com.hedera.block.protos.BlockStreamService.BlockItem; import com.hedera.block.server.config.BlockNodeContext; import com.hedera.block.server.persistence.storage.FileUtils; import com.hedera.block.server.persistence.storage.PersistenceStorageConfig; @@ -30,6 +30,9 @@ import com.hedera.block.server.util.PersistTestUtils; import com.hedera.block.server.util.TestConfigUtil; import com.hedera.block.server.util.TestUtils; +import com.hedera.hapi.block.stream.Block; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.ParseException; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -41,7 +44,7 @@ public class BlockAsDirRemoverTest { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); private static final String TEMP_DIR = "block-node-unit-test-dir"; @@ -52,7 +55,7 @@ public class BlockAsDirRemoverTest { @BeforeEach public void setUp() throws IOException { testPath = Files.createTempDirectory(TEMP_DIR); - LOGGER.log(System.Logger.Level.INFO, "Created temp directory: " + testPath.toString()); + LOGGER.log(INFO, "Created temp directory: " + testPath.toString()); blockNodeContext = TestConfigUtil.getTestBlockNodeContext( @@ -61,10 +64,10 @@ public void setUp() throws IOException { } @Test - public void testRemoveNonExistentBlock() throws IOException { + public void testRemoveNonExistentBlock() throws IOException, ParseException { // Write a block - final List blockItems = PersistTestUtils.generateBlockItems(1); + final var blockItems = PersistTestUtils.generateBlockItems(1); final BlockWriter blockWriter = BlockAsDirWriterBuilder.newBuilder(blockNodeContext).build(); @@ -82,7 +85,8 @@ public void testRemoveNonExistentBlock() throws IOException { Optional blockOpt = blockReader.read(1); assert (blockOpt.isPresent()); assertEquals( - blockItems.getFirst().getHeader(), blockOpt.get().getBlockItems(0).getHeader()); + blockItems.getFirst().blockHeader(), + blockOpt.get().items().getFirst().blockHeader()); // Now remove the block blockRemover.remove(1); @@ -93,7 +97,7 @@ public void testRemoveNonExistentBlock() throws IOException { } @Test - public void testRemoveBlockWithPermException() throws IOException { + public void testRemoveBlockWithPermException() throws IOException, ParseException { // Write a block final List blockItems = PersistTestUtils.generateBlockItems(1); @@ -114,7 +118,8 @@ public void testRemoveBlockWithPermException() throws IOException { Optional blockOpt = blockReader.read(1); assert (blockOpt.isPresent()); assertEquals( - blockItems.getFirst().getHeader(), blockOpt.get().getBlockItems(0).getHeader()); + blockItems.getFirst().blockHeader(), + blockOpt.get().items().getFirst().blockHeader()); // Now remove the block blockRemover = new BlockAsDirRemover(testPath, FileUtils.defaultPerms); diff --git a/server/src/test/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriterTest.java b/server/src/test/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriterTest.java index 4d01af24..d1196503 100644 --- a/server/src/test/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriterTest.java +++ b/server/src/test/java/com/hedera/block/server/persistence/storage/write/BlockAsDirWriterTest.java @@ -16,9 +16,10 @@ package com.hedera.block.server.persistence.storage.write; -import static com.hedera.block.protos.BlockStreamService.Block; -import static com.hedera.block.protos.BlockStreamService.BlockItem; import static com.hedera.block.server.persistence.storage.read.BlockAsDirReaderTest.removeBlockReadPerms; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.ERROR; +import static java.lang.System.Logger.Level.INFO; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; @@ -32,6 +33,9 @@ import com.hedera.block.server.util.PersistTestUtils; import com.hedera.block.server.util.TestConfigUtil; import com.hedera.block.server.util.TestUtils; +import com.hedera.hapi.block.stream.Block; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.pbj.runtime.ParseException; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.Files; @@ -48,7 +52,7 @@ public class BlockAsDirWriterTest { - private final System.Logger LOGGER = System.getLogger(getClass().getName()); + private final Logger LOGGER = System.getLogger(getClass().getName()); private static final String TEMP_DIR = "block-node-unit-test-dir"; private static final String PERSISTENCE_STORAGE_ROOT_PATH_KEY = "persistence.storage.rootPath"; @@ -60,7 +64,7 @@ public class BlockAsDirWriterTest { @BeforeEach public void setUp() throws IOException { testPath = Files.createTempDirectory(TEMP_DIR); - LOGGER.log(System.Logger.Level.INFO, "Created temp directory: " + testPath.toString()); + LOGGER.log(INFO, "Created temp directory: " + testPath.toString()); blockNodeContext = TestConfigUtil.getTestBlockNodeContext( @@ -71,14 +75,12 @@ public void setUp() throws IOException { @AfterEach public void tearDown() { if (!TestUtils.deleteDirectory(testPath.toFile())) { - LOGGER.log( - System.Logger.Level.ERROR, - "Failed to delete temp directory: " + testPath.toString()); + LOGGER.log(ERROR, "Failed to delete temp directory: " + testPath.toString()); } } @Test - public void testWriterAndReaderHappyPath() throws IOException { + public void testWriterAndReaderHappyPath() throws IOException, ParseException { // Write a block final List blockItems = PersistTestUtils.generateBlockItems(1); @@ -99,12 +101,12 @@ public void testWriterAndReaderHappyPath() throws IOException { boolean hasStartEvent = false; Block block = blockOpt.get(); - for (BlockItem blockItem : block.getBlockItemsList()) { - if (blockItem.hasHeader()) { + for (BlockItem blockItem : block.items()) { + if (blockItem.hasBlockHeader()) { hasHeader = true; - } else if (blockItem.hasStateProof()) { + } else if (blockItem.hasBlockProof()) { hasBlockProof = true; - } else if (blockItem.hasStartEvent()) { + } else if (blockItem.hasEventHeader()) { hasStartEvent = true; } } @@ -115,7 +117,7 @@ public void testWriterAndReaderHappyPath() throws IOException { } @Test - public void testRemoveBlockWritePerms() throws IOException { + public void testRemoveBlockWritePerms() throws IOException, ParseException { final List blockItems = PersistTestUtils.generateBlockItems(1); @@ -133,8 +135,8 @@ public void testRemoveBlockWritePerms() throws IOException { BlockReader blockReader = BlockAsDirReaderBuilder.newBuilder(testConfig).build(); Optional blockOpt = blockReader.read(1); assertFalse(blockOpt.isEmpty()); - assertEquals(1, blockOpt.get().getBlockItemsList().size()); - assertTrue(blockOpt.get().getBlockItems(0).hasHeader()); + assertEquals(1, blockOpt.get().items().size()); + assertTrue(blockOpt.get().items().get(0).hasBlockHeader()); // Remove all permissions on the block directory and // attempt to write the next block item @@ -144,8 +146,8 @@ public void testRemoveBlockWritePerms() throws IOException { // There should now be 2 blockItems in the block blockOpt = blockReader.read(1); assertFalse(blockOpt.isEmpty()); - assertEquals(2, blockOpt.get().getBlockItemsList().size()); - assertFalse(blockOpt.get().getBlockItems(1).hasHeader()); + assertEquals(2, blockOpt.get().items().size()); + assertFalse(blockOpt.get().items().get(1).hasBlockHeader()); // Remove read permission on the block directory removeBlockReadPerms(1, testConfig); @@ -154,8 +156,8 @@ public void testRemoveBlockWritePerms() throws IOException { // There should now be 3 blockItems in the block blockOpt = blockReader.read(1); assertFalse(blockOpt.isEmpty()); - assertEquals(3, blockOpt.get().getBlockItemsList().size()); - assertFalse(blockOpt.get().getBlockItems(1).hasHeader()); + assertEquals(3, blockOpt.get().items().size()); + assertFalse(blockOpt.get().items().get(1).hasBlockHeader()); } @Test @@ -176,7 +178,7 @@ public void testUnrecoverableIOExceptionOnWrite() throws IOException { } @Test - public void testRemoveRootDirReadPerm() throws IOException { + public void testRemoveRootDirReadPerm() throws IOException, ParseException { final List blockItems = PersistTestUtils.generateBlockItems(1); final BlockWriter blockWriter = @@ -200,11 +202,11 @@ public void testRemoveRootDirReadPerm() throws IOException { BlockReader blockReader = BlockAsDirReaderBuilder.newBuilder(testConfig).build(); Optional blockOpt = blockReader.read(1); assertFalse(blockOpt.isEmpty()); - assertEquals(10, blockOpt.get().getBlockItemsList().size()); + assertEquals(10, blockOpt.get().items().size()); } @Test - public void testPartialBlockRemoval() throws IOException { + public void testPartialBlockRemoval() throws IOException, ParseException { final List blockItems = PersistTestUtils.generateBlockItems(3); final BlockRemover blockRemover = new BlockAsDirRemover(Path.of(testConfig.rootPath()), FileUtils.defaultPerms); @@ -245,13 +247,13 @@ public void testPartialBlockRemoval() throws IOException { // Confirm blocks 1 and 2 still exist blockOpt = blockReader.read(1); assertFalse(blockOpt.isEmpty()); - assertEquals(10, blockOpt.get().getBlockItemsList().size()); - assertEquals(1, blockOpt.get().getBlockItems(0).getHeader().getBlockNumber()); + assertEquals(10, blockOpt.get().items().size()); + assertEquals(1, blockOpt.get().items().getFirst().blockHeader().number()); blockOpt = blockReader.read(2); assertFalse(blockOpt.isEmpty()); - assertEquals(10, blockOpt.get().getBlockItemsList().size()); - assertEquals(2, blockOpt.get().getBlockItems(0).getHeader().getBlockNumber()); + assertEquals(10, blockOpt.get().items().size()); + assertEquals(2, blockOpt.get().items().getFirst().blockHeader().number()); } private void removeRootWritePerms(final PersistenceStorageConfig config) throws IOException { diff --git a/server/src/test/java/com/hedera/block/server/producer/ProducerBlockItemObserverTest.java b/server/src/test/java/com/hedera/block/server/producer/ProducerBlockItemObserverTest.java index b8fb861f..774a3c6b 100644 --- a/server/src/test/java/com/hedera/block/server/producer/ProducerBlockItemObserverTest.java +++ b/server/src/test/java/com/hedera/block/server/producer/ProducerBlockItemObserverTest.java @@ -16,17 +16,19 @@ package com.hedera.block.server.producer; -import static com.hedera.block.protos.BlockStreamService.*; -import static com.hedera.block.protos.BlockStreamService.PublishStreamResponse.ItemAcknowledgement; +import static com.hedera.block.server.Translator.fromPbj; import static com.hedera.block.server.producer.Util.getFakeHash; import static com.hedera.block.server.util.PersistTestUtils.generateBlockItems; +import static com.hedera.block.server.util.PersistTestUtils.reverseByteArray; import static com.hedera.block.server.util.TestConfigUtil.CONSUMER_TIMEOUT_THRESHOLD_KEY; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; -import com.google.protobuf.ByteString; -import com.hedera.block.protos.BlockStreamService; +import com.google.protobuf.InvalidProtocolBufferException; import com.hedera.block.server.ServiceStatus; import com.hedera.block.server.ServiceStatusImpl; import com.hedera.block.server.config.BlockNodeContext; @@ -38,6 +40,17 @@ import com.hedera.block.server.mediator.StreamMediator; import com.hedera.block.server.persistence.storage.write.BlockWriter; import com.hedera.block.server.util.TestConfigUtil; +import com.hedera.hapi.block.Acknowledgement; +import com.hedera.hapi.block.EndOfStream; +import com.hedera.hapi.block.ItemAcknowledgement; +import com.hedera.hapi.block.PublishStreamRequest; +import com.hedera.hapi.block.PublishStreamResponse; +import com.hedera.hapi.block.PublishStreamResponseCode; +import com.hedera.hapi.block.SubscribeStreamResponse; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.hapi.block.stream.output.BlockHeader; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; import io.grpc.stub.StreamObserver; import java.io.IOException; import java.security.NoSuchAlgorithmException; @@ -52,15 +65,22 @@ @ExtendWith(MockitoExtension.class) public class ProducerBlockItemObserverTest { - @Mock private ItemAckBuilder itemAckBuilder; @Mock private StreamMediator> streamMediator; - @Mock private StreamObserver publishStreamResponseObserver; + + @Mock + private StreamObserver + publishStreamResponseObserver; @Mock private BlockWriter blockWriter; - @Mock private StreamObserver streamObserver1; - @Mock private StreamObserver streamObserver2; - @Mock private StreamObserver streamObserver3; + @Mock + private StreamObserver streamObserver1; + + @Mock + private StreamObserver streamObserver2; + + @Mock + private StreamObserver streamObserver3; @Mock private ServiceStatus serviceStatus; @Mock private InstantSource testClock; @@ -71,29 +91,23 @@ public void testProducerOnNext() throws IOException, NoSuchAlgorithmException { final List blockItems = generateBlockItems(1); final ProducerBlockItemObserver producerBlockItemObserver = new ProducerBlockItemObserver( - streamMediator, - publishStreamResponseObserver, - new ItemAckBuilder(), - serviceStatus); + streamMediator, publishStreamResponseObserver, serviceStatus); when(serviceStatus.isRunning()).thenReturn(true); final BlockItem blockHeader = blockItems.getFirst(); final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockHeader).build(); - producerBlockItemObserver.onNext(publishStreamRequest); + PublishStreamRequest.newBuilder().blockItem(blockHeader).build(); + producerBlockItemObserver.onNext(fromPbj(publishStreamRequest)); verify(streamMediator, timeout(50).times(1)).publish(blockHeader); - final ItemAcknowledgement itemAck = - ItemAcknowledgement.newBuilder() - .setItemAck(ByteString.copyFrom(getFakeHash(blockHeader))) - .build(); - final BlockStreamService.PublishStreamResponse publishStreamResponse = - BlockStreamService.PublishStreamResponse.newBuilder() - .setAcknowledgement(itemAck) - .build(); - verify(publishStreamResponseObserver, timeout(50).times(1)).onNext(publishStreamResponse); + final Acknowledgement ack = buildAck(blockHeader); + final PublishStreamResponse publishStreamResponse = + PublishStreamResponse.newBuilder().acknowledgement(ack).build(); + + verify(publishStreamResponseObserver, timeout(50).times(1)) + .onNext(fromPbj(publishStreamResponse)); // Helidon will call onCompleted after onNext producerBlockItemObserver.onCompleted(); @@ -149,31 +163,28 @@ blockWriter, blockNodeContext, new ServiceStatusImpl()) streamMediator.isSubscribed(concreteObserver3), "Expected the mediator to have observer3 subscribed"); - final BlockHeader blockHeader = BlockHeader.newBuilder().setBlockNumber(1).build(); - final BlockItem blockItem = BlockItem.newBuilder().setHeader(blockHeader).build(); + final BlockHeader blockHeader = BlockHeader.newBuilder().number(1).build(); + final BlockItem blockItem = BlockItem.newBuilder().blockHeader(blockHeader).build(); final SubscribeStreamResponse subscribeStreamResponse = - SubscribeStreamResponse.newBuilder().setBlockItem(blockItem).build(); + SubscribeStreamResponse.newBuilder().blockItem(blockItem).build(); when(serviceStatus.isRunning()).thenReturn(true); final ProducerBlockItemObserver producerBlockItemObserver = new ProducerBlockItemObserver( - streamMediator, - publishStreamResponseObserver, - new ItemAckBuilder(), - serviceStatus); + streamMediator, publishStreamResponseObserver, serviceStatus); final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockItem).build(); - producerBlockItemObserver.onNext(publishStreamRequest); + PublishStreamRequest.newBuilder().blockItem(blockItem).build(); + producerBlockItemObserver.onNext(fromPbj(publishStreamRequest)); // Confirm the block item counter was incremented assertEquals(1, blockNodeContext.metricsService().liveBlockItems.get()); // Confirm each subscriber was notified of the new block - verify(streamObserver1, timeout(50).times(1)).onNext(subscribeStreamResponse); - verify(streamObserver2, timeout(50).times(1)).onNext(subscribeStreamResponse); - verify(streamObserver3, timeout(50).times(1)).onNext(subscribeStreamResponse); + verify(streamObserver1, timeout(50).times(1)).onNext(fromPbj(subscribeStreamResponse)); + verify(streamObserver2, timeout(50).times(1)).onNext(fromPbj(subscribeStreamResponse)); + verify(streamObserver3, timeout(50).times(1)).onNext(fromPbj(subscribeStreamResponse)); // Confirm the BlockStorage write method was // called despite the absence of subscribers @@ -184,10 +195,7 @@ blockWriter, blockNodeContext, new ServiceStatusImpl()) public void testOnError() { final ProducerBlockItemObserver producerBlockItemObserver = new ProducerBlockItemObserver( - streamMediator, - publishStreamResponseObserver, - new ItemAckBuilder(), - serviceStatus); + streamMediator, publishStreamResponseObserver, serviceStatus); final Throwable t = new Throwable("Test error"); producerBlockItemObserver.onError(t); @@ -195,33 +203,97 @@ public void testOnError() { } @Test - public void testItemAckBuilderExceptionTest() throws IOException, NoSuchAlgorithmException { - - final ProducerBlockItemObserver producerBlockItemObserver = - new ProducerBlockItemObserver( - streamMediator, - publishStreamResponseObserver, - itemAckBuilder, - serviceStatus); + public void testItemAckBuilderExceptionTest() { when(serviceStatus.isRunning()).thenReturn(true); - when(itemAckBuilder.buildAck(any())) - .thenThrow(new NoSuchAlgorithmException("Test exception")); + + final ProducerBlockItemObserver testProducerBlockItemObserver = + new TestProducerBlockItemObserver( + streamMediator, publishStreamResponseObserver, serviceStatus); final List blockItems = generateBlockItems(1); final BlockItem blockHeader = blockItems.getFirst(); final PublishStreamRequest publishStreamRequest = - PublishStreamRequest.newBuilder().setBlockItem(blockHeader).build(); - producerBlockItemObserver.onNext(publishStreamRequest); - - final PublishStreamResponse.EndOfStream endOfStream = - PublishStreamResponse.EndOfStream.newBuilder() - .setStatus( - PublishStreamResponse.PublishStreamResponseCode - .STREAM_ITEMS_UNKNOWN) + PublishStreamRequest.newBuilder().blockItem(blockHeader).build(); + testProducerBlockItemObserver.onNext(fromPbj(publishStreamRequest)); + + final EndOfStream endOfStream = + EndOfStream.newBuilder() + .status(PublishStreamResponseCode.STREAM_ITEMS_UNKNOWN) .build(); final PublishStreamResponse errorResponse = - PublishStreamResponse.newBuilder().setStatus(endOfStream).build(); - verify(publishStreamResponseObserver, timeout(50).times(1)).onNext(errorResponse); + PublishStreamResponse.newBuilder().status(endOfStream).build(); + verify(publishStreamResponseObserver, timeout(50).times(1)).onNext(fromPbj(errorResponse)); + } + + @Test + public void testBlockItemThrowsParseException() throws InvalidProtocolBufferException { + final ProducerBlockItemObserver producerBlockItemObserver = + new ProducerBlockItemObserver( + streamMediator, publishStreamResponseObserver, serviceStatus); + + // Create a pbj block item + final List blockItems = generateBlockItems(1); + final BlockItem blockHeader = blockItems.getFirst(); + + // Convert the block item to a protoc and add a spy to reverse the bytes to + // provoke a ParseException + final byte[] pbjBytes = BlockItem.PROTOBUF.toBytes(blockHeader).toByteArray(); + final com.hedera.hapi.block.stream.protoc.BlockItem protocBlockItem = + spy(com.hedera.hapi.block.stream.protoc.BlockItem.parseFrom(pbjBytes)); + + // set up the spy to pass the reversed bytes when called + final byte[] reversedBytes = reverseByteArray(protocBlockItem.toByteArray()); + when(protocBlockItem.toByteArray()).thenReturn(reversedBytes); + + // create the PublishStreamRequest with the spy block item + final com.hedera.hapi.block.protoc.PublishStreamRequest protocPublishStreamRequest = + com.hedera.hapi.block.protoc.PublishStreamRequest.newBuilder() + .setBlockItem(protocBlockItem) + .build(); + + // call the producerBlockItemObserver + producerBlockItemObserver.onNext(protocPublishStreamRequest); + + // TODO: Replace this with a real error enum. + final EndOfStream endOfStream = + EndOfStream.newBuilder() + .status(PublishStreamResponseCode.STREAM_ITEMS_UNKNOWN) + .build(); + fromPbj(PublishStreamResponse.newBuilder().status(endOfStream).build()); + + // verify the ProducerBlockItemObserver has sent an error response + verify(publishStreamResponseObserver, timeout(50).times(1)) + .onNext(fromPbj(PublishStreamResponse.newBuilder().status(endOfStream).build())); + + verify(serviceStatus, timeout(50).times(1)).stopWebServer(); + } + + private static class TestProducerBlockItemObserver extends ProducerBlockItemObserver { + public TestProducerBlockItemObserver( + StreamMediator> streamMediator, + StreamObserver + publishStreamResponseObserver, + ServiceStatus serviceStatus) { + super(streamMediator, publishStreamResponseObserver, serviceStatus); + } + + @NonNull + @Override + protected Acknowledgement buildAck(@NonNull final BlockItem blockItem) + throws NoSuchAlgorithmException { + throw new NoSuchAlgorithmException("test no such algorithm exception"); + } + } + + @NonNull + private static Acknowledgement buildAck(@NonNull final BlockItem blockItem) + throws NoSuchAlgorithmException { + ItemAcknowledgement itemAck = + ItemAcknowledgement.newBuilder() + .itemHash(Bytes.wrap(getFakeHash(blockItem))) + .build(); + + return Acknowledgement.newBuilder().itemAck(itemAck).build(); } } diff --git a/server/src/test/java/com/hedera/block/server/util/PersistTestUtils.java b/server/src/test/java/com/hedera/block/server/util/PersistTestUtils.java index a8a3dc44..219d52ae 100644 --- a/server/src/test/java/com/hedera/block/server/util/PersistTestUtils.java +++ b/server/src/test/java/com/hedera/block/server/util/PersistTestUtils.java @@ -16,18 +16,42 @@ package com.hedera.block.server.util; -import static com.hedera.block.protos.BlockStreamService.BlockItem; -import static com.hedera.block.protos.BlockStreamService.BlockProof; -import static com.hedera.block.protos.BlockStreamService.EventMetadata; +import static java.lang.System.Logger; +import static java.lang.System.Logger.Level.INFO; -import com.hedera.block.protos.BlockStreamService; +import com.hedera.hapi.block.stream.BlockItem; +import com.hedera.hapi.block.stream.BlockProof; +import com.hedera.hapi.block.stream.input.EventHeader; +import com.hedera.hapi.block.stream.output.BlockHeader; +import com.hedera.hapi.node.base.SemanticVersion; +import com.hedera.hapi.platform.event.EventCore; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Path; import java.util.ArrayList; import java.util.List; public final class PersistTestUtils { + private static final Logger LOGGER = System.getLogger(PersistTestUtils.class.getName()); + private PersistTestUtils() {} + public static void writeBlockItemToPath(final Path path, final BlockItem blockItem) + throws IOException { + + Bytes bytes = BlockItem.PROTOBUF.toBytes(blockItem); + writeBytesToPath(path, bytes.toByteArray()); + } + + public static void writeBytesToPath(final Path path, final byte[] bytes) throws IOException { + try (FileOutputStream fos = new FileOutputStream(path.toString())) { + fos.write(bytes); + LOGGER.log(INFO, "Successfully wrote the bytes to file: {0}", path); + } + } + public static List generateBlockItems(int numOfBlocks) { List blockItems = new ArrayList<>(); @@ -38,26 +62,35 @@ public static List generateBlockItems(int numOfBlocks) { // First block is always the header blockItems.add( BlockItem.newBuilder() - .setHeader( - BlockStreamService.BlockHeader.newBuilder() - .setBlockNumber(i) + .blockHeader( + BlockHeader.newBuilder() + .number(i) + .softwareVersion( + SemanticVersion.newBuilder() + .major(1) + .minor(0) + .build()) .build()) - .setValue("block-item-" + (j)) .build()); break; case 10: // Last block is always the state proof blockItems.add( BlockItem.newBuilder() - .setStateProof(BlockProof.newBuilder().setBlock(i).build()) + .blockProof(BlockProof.newBuilder().block(i).build()) .build()); break; default: // Middle blocks are events blockItems.add( BlockItem.newBuilder() - .setStartEvent( - EventMetadata.newBuilder().setCreatorId(i).build()) + .eventHeader( + EventHeader.newBuilder() + .eventCore( + EventCore.newBuilder() + .creatorNodeId(i) + .build()) + .build()) .build()); break; } @@ -66,4 +99,17 @@ public static List generateBlockItems(int numOfBlocks) { return blockItems; } + + public static byte[] reverseByteArray(byte[] input) { + if (input == null || input.length == 0) { + return input; + } + + byte[] reversed = new byte[input.length]; + for (int i = 0; i < input.length; i++) { + reversed[i] = input[input.length - 1 - i]; + } + + return reversed; + } } diff --git a/server/src/test/resources/batch_consumer.sh b/server/src/test/resources/batch_consumer.sh new file mode 100755 index 00000000..74c18b62 --- /dev/null +++ b/server/src/test/resources/batch_consumer.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +# Function to handle SIGINT +cleanup() { + echo "Caught SIGINT signal! Terminating all background processes..." + for pid in "${pids[@]}"; do + kill "$pid" + echo "Terminated process with PID: $pid" + done + exit 1 +} + +# Trap SIGINT (Ctrl+C) and call cleanup function +trap cleanup SIGINT + +# Check if the script received an integer argument +if [ -z "$1" ] || ! [[ "$1" =~ ^[0-9]+$ ]]; then + echo "Usage: $0 " + exit 1 +fi + +# Number of times to invoke the consumer script +num_iterations=$1 +pids=() +dir_name="test_output" + +# Check if the directory exists +if [ -d "./$dir_name" ]; then + echo "Directory '$dir_name' exists. Removing files inside it..." + # Remove files inside the directory + rm -f "./$dir_name"/* +else + echo "Directory '$dir_name' does not exist. Creating it..." + # Create the directory + mkdir "./$dir_name" +fi + + +# Loop to invoke consumer.sh and store PIDs +for ((i = 0; i < num_iterations; i++)); do + ./consumer.sh 1 > "./$dir_name/bg_pid_$i.txt" 2>&1 & + bg_pid=$! + pids+=("$bg_pid") + echo "Started consumer.sh with PID: $bg_pid" +done + +# Wait for all background processes to complete +for pid in "${pids[@]}"; do + wait "$pid" +done diff --git a/server/src/test/resources/block_service.proto b/server/src/test/resources/block_service.proto new file mode 100644 index 00000000..460f63a3 --- /dev/null +++ b/server/src/test/resources/block_service.proto @@ -0,0 +1,209 @@ +syntax = "proto3"; + +package com.hedera.hapi.block; + +option java_package = "com.hedera.hapi.block.protoc"; +option java_multiple_files = true; + +//import "basic_types.proto"; +//import "stream/block.proto"; +//import "stream/block_item.proto"; + +message PublishStreamRequest { + BlockItem block_item = 1; +} + +message PublishStreamResponse { + oneof response { + Acknowledgement acknowledgement = 1; + EndOfStream status = 2; + } + + message Acknowledgement { + oneof acknowledgements { + BlockAcknowledgement block_ack = 1; + ItemAcknowledgement item_ack = 2; + } + } + + message ItemAcknowledgement { + bytes item_hash = 1; + } + + message BlockAcknowledgement { + uint64 block_number = 1; + bytes block_root_hash = 2; + bool block_already_exists = 3; + } + + message EndOfStream { + PublishStreamResponseCode status = 1; + uint64 block_number = 2; + } +} + +enum PublishStreamResponseCode { + STREAM_ITEMS_UNKNOWN = 0; + STREAM_ITEMS_SUCCESS = 1; + STREAM_ITEMS_TIMEOUT = 2; + STREAM_ITEMS_OUT_OF_ORDER = 3; + STREAM_ITEMS_BAD_STATE_PROOF = 4; + STREAM_ITEMS_BEHIND = 5; +} + +message SingleBlockRequest { + uint64 block_number = 1; + bool allow_unverified = 2; + bool retrieve_latest = 3; +} + +message SingleBlockResponse { + SingleBlockResponseCode status = 1; + Block block = 2; +} + +enum SingleBlockResponseCode { + READ_BLOCK_UNKNOWN = 0; + READ_BLOCK_INSUFFICIENT_BALANCE = 1; + READ_BLOCK_SUCCESS = 2; + READ_BLOCK_NOT_FOUND = 3; + READ_BLOCK_NOT_AVAILABLE = 4; + ALLOW_UNVERIFIED_REQUIRED = 5; + VERIFIED_BLOCK_UNAVAILABLE = 6; +} + +message SubscribeStreamRequest { + uint64 start_block_number = 1; + uint64 end_block_number = 2; + bool allow_unverified = 3; +} + +message SubscribeStreamResponse { + oneof response { + SubscribeStreamResponseCode status = 1; + BlockItem block_item = 2; + } +} + +enum SubscribeStreamResponseCode { + READ_STREAM_UNKNOWN = 0; + READ_STREAM_INSUFFICIENT_BALANCE = 1; + READ_STREAM_SUCCESS = 2; + READ_STREAM_INVALID_START_BLOCK_NUMBER = 3; + READ_STREAM_INVALID_END_BLOCK_NUMBER = 4; +} + +//message StateSnapshotRequest { +// uint64 last_block_number = 2; +// bool retrieve_latest = 3; +//} +// +//message StateSnapshotResponse { +// StateSnapshotResponseCode status = 1; +// uint64 last_block_number = 2; +// string snapshot_reference = 3; +//} +// +//enum StateSnapshotResponseCode { +// STATE_SNAPSHOT_UNKNOWN = 0; +// STATE_SNAPSHOT_INSUFFICIENT_BALANCE = 1; +// STATE_SNAPSHOT_SUCCESS = 2; +//} +// +//message ServerStatusRequest {} +//message ServerStatusResponse { +// uint64 first_available_block = 1; +// uint64 last_available_block = 2; +// bool only_latest_state = 3; +// BlockNodeVersions version_information = 4; +//} +// +//message BlockNodeVersions { +// proto.SemanticVersion address_book_version = 1; +// proto.SemanticVersion stream_proto_version = 2; +// proto.SemanticVersion software_version = 3; +//} + +service BlockStreamService { +// rpc serverStatus(ServerStatusRequest) returns (ServerStatusResponse); + rpc singleBlock(SingleBlockRequest) returns (SingleBlockResponse); +// rpc stateSnapshot(StateSnapshotRequest) returns (StateSnapshotResponse); + rpc publishBlockStream (stream PublishStreamRequest) returns (stream PublishStreamResponse); + rpc subscribeBlockStream(SubscribeStreamRequest) returns (stream SubscribeStreamResponse); +} + +// block.proto + +message Block { + repeated BlockItem items = 1; +} + +// block_item.proto + +message BlockItem { + oneof item { + BlockHeader block_header = 1; + EventHeader event_header = 2; +// RoundHeader round_header = 3; +// EventTransaction event_transaction = 4; +// TransactionResult transaction_result = 5; +// TransactionOutput transaction_output = 6; +// StateChanges state_changes = 7; +// FilteredItemHash filtered_item_hash = 8; +// BlockProof block_proof = 9; + BlockProof block_proof = 3; +// RecordFileItem record_file = 10; + } +} + +//message FilteredItemHash { +// bytes item_hash = 1; +// uint64 filtered_path = 3; +//} + +// output/block_header.proto + +message BlockHeader { + SemanticVersion hapi_proto_version = 1; + SemanticVersion software_version = 2; + uint64 number = 3; +// bytes previous_block_hash = 4; +// proto.Timestamp first_transaction_consensus_time = 5; +// proto.BlockHashAlgorithm hash_algorithm = 6; +// proto.SemanticVersion address_book_version = 7; +} + +message SemanticVersion { + int32 major = 1; + int32 minor = 2; + int32 patch = 3; + string pre = 4; + string build = 5; +} + +// input/event_metadata.proto + +message EventHeader { + EventCore event_core = 1; +// bytes signature = 2; +} + +// platform/event/event_core.proto + +message EventCore { + int64 creator_node_id = 1; +// int64 birth_round = 2; +// proto.Timestamp time_created = 3; +// repeated EventDescriptor parents = 4; +// proto.SemanticVersion version = 17; // This field is temporary until birth_round migration is complete. Field number 17 chosen to avoid polluting cheaper 1 byte field numbers 1-16 +} + +// block_proof.proto + +message BlockProof { + uint64 block = 1; +// bytes previous_block_root_hash = 2; +// bytes start_of_block_state_root_hash = 3; +// bytes block_signature = 4; +// repeated MerkleSiblingHash sibling_hashes = 5; +} diff --git a/server/src/test/resources/consumer.sh b/server/src/test/resources/consumer.sh index 153146b3..1d3fbdb2 100755 --- a/server/src/test/resources/consumer.sh +++ b/server/src/test/resources/consumer.sh @@ -5,21 +5,34 @@ usage_error() { exit 1 } +cleanup() { + echo "Caught SIGINT signal! Terminating the background process..." + kill "$grp_pid" + exit 0 +} + # An integer is expected as the first parameter if [ "$#" -lt 1 ] || ! [[ "$1" =~ ^[0-9]+$ ]]; then usage_error fi +trap cleanup SIGINT +trap cleanup SIGTERM + # If the script reaches here, the parameters are valid echo "Param is: $1" # Use environment variables or default values GRPC_SERVER=${GRPC_SERVER:-"localhost:8080"} -GRPC_METHOD=${GRPC_METHOD:-"BlockStreamGrpcService/subscribeBlockStream"} -PATH_TO_PROTO=${PATH_TO_PROTO:-"../../../../protos/src/main/protobuf/blockstream.proto"} -PROTO_IMPORT_PATH=${PROTO_IMPORT_PATH:-"../../../../protos/src/main/protobuf"} +GRPC_METHOD=${GRPC_METHOD:-"com.hedera.hapi.block.BlockStreamService/subscribeBlockStream"} +PATH_TO_PROTO="./block_service.proto" echo "Starting consumer..." # Response block messages from the gRPC server are printed to stdout. -echo "{\"start_block_number\": $1}" | grpcurl -plaintext -import-path $PROTO_IMPORT_PATH -proto $PATH_TO_PROTO -d @ $GRPC_SERVER $GRPC_METHOD +echo "{\"start_block_number\": $1}" | grpcurl -plaintext -proto $PATH_TO_PROTO -d @ $GRPC_SERVER $GRPC_METHOD & +grp_pid=$! +echo "Started consumer with PID: $grp_pid" + +# Wait for the background process to complete +wait "$grp_pid" diff --git a/server/src/test/resources/producer.sh b/server/src/test/resources/producer.sh index 3f2d2ce5..e35a9a38 100755 --- a/server/src/test/resources/producer.sh +++ b/server/src/test/resources/producer.sh @@ -21,10 +21,39 @@ if [ "$#" -eq 2 ]; then echo "The optional positive integer is: $2" fi +generate_header() { + local block_header_number=$1 + + # Interpolate the integer parameter into the JSON template + local result + result=$(echo "$header_template" | jq --argjson number "$block_header_number" ".block_item.block_header.number = $block_header_number") + + echo "$result" +} + +generate_event() { + local creator_node_id=$1 + + # Interpolate the integer parameter into the JSON template + local result + result=$(echo "$event_template" | jq --argjson creator_id "$creator_node_id" ".block_item.event_header.event_core.creator_node_id = $creator_node_id") + + echo "$result" +} + +generate_block_proof() { + local block_number=$1 + + # Interpolate the integer parameter into the JSON template + local result + result=$(echo "$block_proof_template" | jq --argjson block "$block_number" ".block_item.block_proof.block = $block_number") + + echo "$result" +} GRPC_SERVER="localhost:8080" -GRPC_METHOD="BlockStreamGrpcService/publishBlockStream" -PATH_TO_PROTO="../../../../protos/src/main/protobuf/blockstream.proto" +GRPC_METHOD="com.hedera.hapi.block.BlockStreamService/publishBlockStream" +PATH_TO_PROTO="./block_service.proto" echo "Starting producer..." @@ -38,6 +67,15 @@ function cleanup { # Trap SIGINT trap cleanup SIGINT +# Read the JSON template from the file +header_template=$(cat "templates/header_template.json") + +# Read the JSON template from the file +block_proof_template=$(cat "templates/block_proof_template.json") + +# Read the JSON template from the file +event_template=$(cat "templates/event_template.json") + # Generate and push messages to the gRPC server as a producer. # Response messages from the gRPC server are printed to stdout. ( @@ -48,13 +86,15 @@ trap cleanup SIGINT # Generate 10 BlockItems per Block for ((i=1; i<=$block_items; i++)) do - if [[ $i -eq 1 ]]; then - echo "{\"block_item\": {\"header\": {\"block_number\": $iter},\"value\": \"Payload[...]\"}}" + result=$(generate_header $iter) + echo "$result" elif [[ $i -eq $block_items ]]; then - echo "{\"block_item\": {\"state_proof\": {\"block\": $iter},\"value\": \"Payload[...]\"}}" + result=$(generate_block_proof $iter) + echo "$result" else - echo "{\"block_item\": {\"start_event\": {\"creator_id\": $i},\"value\": \"Payload[...]\"}}" + result=$(generate_event $i) + echo "$result" fi sleep 0.01 @@ -66,7 +106,7 @@ trap cleanup SIGINT ((iter++)) done -) | grpcurl -vv -plaintext -proto $PATH_TO_PROTO -d @ $GRPC_SERVER $GRPC_METHOD & +) | grpcurl -plaintext -proto $PATH_TO_PROTO -d @ $GRPC_SERVER $GRPC_METHOD & GRPC_PID=$! diff --git a/server/src/test/resources/templates/block_proof_template.json b/server/src/test/resources/templates/block_proof_template.json new file mode 100644 index 00000000..ca1f17e1 --- /dev/null +++ b/server/src/test/resources/templates/block_proof_template.json @@ -0,0 +1,7 @@ +{ + "block_item": { + "block_proof": { + "block": -1 + } + } +} diff --git a/server/src/test/resources/templates/event_template.json b/server/src/test/resources/templates/event_template.json new file mode 100644 index 00000000..2efbcc74 --- /dev/null +++ b/server/src/test/resources/templates/event_template.json @@ -0,0 +1,12 @@ +{ + "block_item": + { + "event_header": + { + "event_core": + { + "creator_node_id": 0 + } + } + } +} diff --git a/server/src/test/resources/templates/header_template.json b/server/src/test/resources/templates/header_template.json new file mode 100644 index 00000000..2484e0f8 --- /dev/null +++ b/server/src/test/resources/templates/header_template.json @@ -0,0 +1,21 @@ +{ + "block_item": { + "block_header": { + "hapi_proto_version": { + "build": "dolor occaecat", + "major": 1, + "minor": 0, + "patch": 0, + "pre": "qui ut quis adipisicing" + }, + "number": -1, + "software_version": { + "build": "sunt sint dolor", + "major": 2, + "minor": 0, + "patch": 0, + "pre": "est" + } + } + } +} diff --git a/settings.gradle.kts b/settings.gradle.kts index 04488289..28546250 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -18,7 +18,8 @@ plugins { id("com.gradle.enterprise").version("3.15.1") } -include(":protos") +// Include the subprojects +include(":stream") include(":server") includeBuild(".") // https://github.com/gradle/gradle/issues/21490#issuecomment-1458887481 @@ -30,6 +31,9 @@ gradleEnterprise { } } +// The HAPI API version to use for Protobuf sources. +val hapiProtoVersion = "0.53.0" + dependencyResolutionManagement { versionCatalogs { create("libs") { @@ -52,15 +56,44 @@ dependencyResolutionManagement { version("com.google.auto.service.processor", "1.1.1") version("com.google.auto.service", "1.1.1") version("org.hyperledger.besu.nativelib.secp256k1", "0.8.2") + + // PBJ dependencies + version("io.grpc", "1.64.0") + version("io.grpc.protobuf", "1.64.0") + version("io.grpc.stub", "1.64.0") + + plugin("pbj", "com.hedera.pbj.pbj-compiler").version("0.9.2") + version("com.hedera.pbj.runtime", "0.9.2") + version("org.antlr.antlr4.runtime", "4.13.1") + + version("java.annotation", "1.3.2") + version("javax.inject", "1") + version("com.google.protobuf", "3.21.7") + version("com.google.protobuf.util", "3.21.7") + + version("hapi-proto", hapiProtoVersion) + version("com.google.common", "33.0.0-jre") + + version("org.apache.commons.codec", "1.15") + version("org.apache.commons.collections4", "4.4") + version("org.apache.commons.io", "2.15.1") + version("org.apache.commons.lang3", "3.14.0") + version("org.apache.commons.compress", "1.26.0") + version("org.apache.logging.log4j.slf4j2.impl", "2.21.1") + // Testing only versions version("org.assertj.core", "3.23.1") version("org.junit.jupiter.api", "5.10.2") version("org.mockito", "5.8.0") version("org.mockito.junit.jupiter", "5.8.0") + version("google.proto", "3.21.10") + version("grpc.protobuf.grpc", "1.45.1") + } } } + // Build cache configuration val isCiServer = System.getenv().containsKey("CI") val gradleCacheUsername: String? = System.getenv("GRADLE_CACHE_USERNAME") diff --git a/stream/build.gradle.kts b/stream/build.gradle.kts new file mode 100644 index 00000000..19f27872 --- /dev/null +++ b/stream/build.gradle.kts @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2022-2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id("com.hedera.block.protobuf") + alias(libs.plugins.pbj) +} + +group = "com.hedera.block" + +description = "Hedera API" + +// Remove the following line to enable all 'javac' lint checks that we have turned on by default +// and then fix the reported issues. +tasks.withType().configureEach { + options.compilerArgs.add("-Xlint:-exports,-deprecation,-removal") +} + +// Add downloaded HAPI repo protobuf files into build directory and add to sources to build them +tasks.cloneHederaProtobufs { + // uncomment below to use a specific tag + // tag = "v0.53.0" or a specific commit like "0047255" + tag = "0047255" + + // uncomment below to use a specific branch + // branch = "main" +} + +sourceSets { + main { + pbj { + srcDir(tasks.cloneHederaProtobufs.flatMap { it.localCloneDirectory.dir("services") }) + srcDir(tasks.cloneHederaProtobufs.flatMap { it.localCloneDirectory.dir("block") }) + srcDir(tasks.cloneHederaProtobufs.flatMap { it.localCloneDirectory.dir("platform") }) + srcDir(tasks.cloneHederaProtobufs.flatMap { it.localCloneDirectory.dir("streams") }) + } + proto { + srcDir(tasks.cloneHederaProtobufs.flatMap { it.localCloneDirectory.dir("services") }) + srcDir(tasks.cloneHederaProtobufs.flatMap { it.localCloneDirectory.dir("block") }) + srcDir(tasks.cloneHederaProtobufs.flatMap { it.localCloneDirectory.dir("platform") }) + srcDir(tasks.cloneHederaProtobufs.flatMap { it.localCloneDirectory.dir("streams") }) + } + } +} + +testModuleInfo { + // we depend on the protoc compiled hapi during test as we test our pbj generated code + // against it to make sure it is compatible + requires("com.google.protobuf.util") + requires("org.junit.jupiter.api") + requires("org.junit.jupiter.params") +} diff --git a/stream/src/main/java/module-info.java b/stream/src/main/java/module-info.java new file mode 100644 index 00000000..f557cc45 --- /dev/null +++ b/stream/src/main/java/module-info.java @@ -0,0 +1,18 @@ +module com.hedera.block.stream { + exports com.hedera.hapi.block; + exports com.hedera.hapi.block.protoc; + exports com.hedera.hapi.block.stream.protoc; + exports com.hedera.hapi.block.stream; + exports com.hedera.hapi.block.stream.input; + exports com.hedera.hapi.block.stream.output; + + requires transitive com.google.common; + requires transitive com.google.protobuf; + requires transitive com.hedera.pbj.runtime; + requires transitive io.grpc.stub; + requires transitive io.grpc; + requires io.grpc.protobuf; + requires org.antlr.antlr4.runtime; + requires static com.github.spotbugs.annotations; + requires static java.annotation; +}