diff --git a/.github/workflows/zxc-compile-pbj-code.yaml b/.github/workflows/zxc-compile-pbj-code.yaml index 22aa154c..49addb44 100644 --- a/.github/workflows/zxc-compile-pbj-code.yaml +++ b/.github/workflows/zxc-compile-pbj-code.yaml @@ -103,15 +103,14 @@ jobs: build-root-directory: ${{ env.PBJ_CORE }} arguments: assemble -# Removed for now until pbj-core can be made compatible with spotless -# - name: Gradle Check (PBJ Core) -# uses: gradle/actions/setup-gradle@0bdd871935719febd78681f197cd39af5b6e16a6 # v4.2.2 -# if: ${{ inputs.enable-unit-tests && steps.gradle-build.conclusion == 'success' && !cancelled() }} -# with: -# gradle-version: ${{ inputs.gradle-version }} -# build-root-directory: ${{ env.PBJ_CORE }} -# arguments: check -# + - name: Gradle Check (PBJ Core) + uses: gradle/actions/setup-gradle@dbbdc275be76ac10734476cc723d82dfe7ec6eda # v3.4.2 + if: ${{ inputs.enable-unit-tests && steps.gradle-build.conclusion == 'success' && !cancelled() }} + with: + gradle-version: ${{ inputs.gradle-version }} + build-root-directory: ${{ env.PBJ_CORE }} + arguments: check + - name: Publish JUnit Test Report (PBJ Core) uses: step-security/publish-unit-test-result-action@4519d7c9f71dd765f8bbb98626268780f23bab28 # v2.17.0 if: ${{ inputs.enable-unit-tests && steps.gradle-build.conclusion == 'success' && !cancelled() && always() }} diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/DefaultPbjSourceDirectorySet.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/DefaultPbjSourceDirectorySet.java index 79c236e3..9c331896 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/DefaultPbjSourceDirectorySet.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/DefaultPbjSourceDirectorySet.java @@ -7,12 +7,10 @@ import org.gradle.api.internal.tasks.TaskDependencyFactory; /** Source directory set for PBJ, a directory full of .proto source files */ -public abstract class DefaultPbjSourceDirectorySet extends DefaultSourceDirectorySet - implements PbjSourceDirectorySet { +public abstract class DefaultPbjSourceDirectorySet extends DefaultSourceDirectorySet implements PbjSourceDirectorySet { @Inject - public DefaultPbjSourceDirectorySet( - SourceDirectorySet sourceSet, TaskDependencyFactory taskDependencyFactory) { + public DefaultPbjSourceDirectorySet(SourceDirectorySet sourceSet, TaskDependencyFactory taskDependencyFactory) { super(sourceSet, taskDependencyFactory); } } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompiler.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompiler.java index 3ffadb62..22b7cdb0 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompiler.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompiler.java @@ -1,19 +1,4 @@ -/* - * Copyright (C) 2023-2025 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - +// SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.compiler; import com.hedera.pbj.compiler.impl.ContextualLookupHelper; diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerPlugin.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerPlugin.java index d7466202..079fa908 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerPlugin.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerPlugin.java @@ -43,11 +43,8 @@ public void apply(Project project) { // for the 'main' source set we: // 1) Add a new 'pbj' virtual directory mapping PbjSourceDirectorySet pbjSourceSet = - createPbjSourceDirectorySet( - ((DefaultSourceSet) mainSrcSet).getDisplayName(), getObjectFactory()); - mainSrcSet - .getExtensions() - .add(PbjSourceDirectorySet.class, PbjSourceDirectorySet.NAME, pbjSourceSet); + createPbjSourceDirectorySet(((DefaultSourceSet) mainSrcSet).getDisplayName(), getObjectFactory()); + mainSrcSet.getExtensions().add(PbjSourceDirectorySet.class, PbjSourceDirectorySet.NAME, pbjSourceSet); pbjSourceSet.getFilter().include("**/*.proto"); pbjSourceSet.srcDir("src/" + mainSrcSet.getName() + "/proto"); mainSrcSet.getAllSource().source(pbjSourceSet); @@ -56,31 +53,20 @@ public void apply(Project project) { // naming conventions via call to sourceSet.getTaskName() final String taskName = mainSrcSet.getTaskName("generate", "PbjSource"); - TaskProvider pbjCompiler = - project.getTasks() - .register( - taskName, - PbjCompilerTask.class, - pbjTask -> { - pbjTask.setDescription( - "Processes the " - + mainSrcSet.getName() - + " Pbj grammars."); - // 4) set up convention mapping for default sources (allows user - // to not have to specify) - pbjTask.setSource(pbjSourceSet); - pbjTask.getJavaMainOutputDirectory().set(outputDirectoryMain); - pbjTask.getJavaTestOutputDirectory().set(outputDirectoryTest); - }); + TaskProvider pbjCompiler = project.getTasks() + .register(taskName, PbjCompilerTask.class, pbjTask -> { + pbjTask.setDescription("Processes the " + mainSrcSet.getName() + " Pbj grammars."); + // 4) set up convention mapping for default sources (allows user + // to not have to specify) + pbjTask.setSource(pbjSourceSet); + pbjTask.getJavaMainOutputDirectory().set(outputDirectoryMain); + pbjTask.getJavaTestOutputDirectory().set(outputDirectoryTest); + }); // 5) register fact that pbj should be run before compiling by informing the 'java' part // of the source set that it contains code produced by the pbj compiler - mainSrcSet - .getJava() - .srcDir(pbjCompiler.flatMap(PbjCompilerTask::getJavaMainOutputDirectory)); - testSrcSet - .getJava() - .srcDir(pbjCompiler.flatMap(PbjCompilerTask::getJavaTestOutputDirectory)); + mainSrcSet.getJava().srcDir(pbjCompiler.flatMap(PbjCompilerTask::getJavaMainOutputDirectory)); + testSrcSet.getJava().srcDir(pbjCompiler.flatMap(PbjCompilerTask::getJavaTestOutputDirectory)); } /** @@ -94,10 +80,8 @@ private static PbjSourceDirectorySet createPbjSourceDirectorySet( String parentDisplayName, ObjectFactory objectFactory) { String name = parentDisplayName + ".pbj"; String displayName = parentDisplayName + " Pbj source"; - PbjSourceDirectorySet pbjSourceSet = - objectFactory.newInstance( - DefaultPbjSourceDirectorySet.class, - objectFactory.sourceDirectorySet(name, displayName)); + PbjSourceDirectorySet pbjSourceSet = objectFactory.newInstance( + DefaultPbjSourceDirectorySet.class, objectFactory.sourceDirectorySet(name, displayName)); pbjSourceSet.getFilter().include("**/*.proto"); return pbjSourceSet; } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java index ae2ee7fa..bcb8df1f 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java @@ -3,7 +3,6 @@ import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; import edu.umd.cs.findbugs.annotations.NonNull; - import java.io.BufferedReader; import java.io.File; import java.io.FileReader; @@ -33,6 +32,7 @@ public final class Common { public static final int TYPE_FIXED32 = 5; /** Number of bits used to represent the tag type */ static final int TAG_TYPE_BITS = 3; + private static final Pattern COMPARABLE_PATTERN = Pattern.compile("[)] implements Comparable<\\w+> [{]"); /** @@ -77,8 +77,9 @@ public static String capitalizeFirstLetter(String name) { */ @NonNull public static String snakeToCamel(@NonNull String name, boolean firstUpper) { - final String out = Arrays.stream(name.split("_")).map(Common::capitalizeFirstLetter).collect( - Collectors.joining("")); + final String out = Arrays.stream(name.split("_")) + .map(Common::capitalizeFirstLetter) + .collect(Collectors.joining("")); return (firstUpper ? Character.toUpperCase(out.charAt(0)) : Character.toLowerCase(out.charAt(0))) + out.substring(1); } @@ -136,8 +137,8 @@ public static String buildCleanFieldJavaDoc(int fieldNumber, Protobuf3Parser.Doc * * @return clean comment */ - public static String buildCleanFieldJavaDoc(List fieldNumbers, - Protobuf3Parser.DocCommentContext docContext) { + public static String buildCleanFieldJavaDoc( + List fieldNumbers, Protobuf3Parser.DocCommentContext docContext) { final String cleanedComment = docContext == null ? "" : cleanJavaDocComment(docContext.getText()); final String fieldNumComment = "(" + fieldNumbers.stream().map(Objects::toString).collect(Collectors.joining(", ")) + ") "; @@ -152,13 +153,14 @@ public static String buildCleanFieldJavaDoc(List fieldNumbers, * @return clean multi-line content of the comment */ public static String cleanJavaDocComment(String fieldComment) { - return cleanDocStr(fieldComment - .replaceAll("/\\*\\*[\n\r\s\t]*\\*[\t\s]*|[\n\r\s\t]*\\*/", "") // remove java doc - .replaceAll("\n\s+\\*\s+", "\n") // remove indenting and * - .replaceAll("\n\s+\\*\s*\n", "\n\n") // remove indenting and * - .replaceAll("/\\*\\*", "") // remove indenting and /** at beginning of comment. - .trim() // Remove leading and trailing spaces. - ); + return cleanDocStr( + fieldComment + .replaceAll("/\\*\\*[\n\r\s\t]*\\*[\t\s]*|[\n\r\s\t]*\\*/", "") // remove java doc + .replaceAll("\n\s+\\*\s+", "\n") // remove indenting and * + .replaceAll("\n\s+\\*\s*\n", "\n\n") // remove indenting and * + .replaceAll("/\\*\\*", "") // remove indenting and /** at beginning of comment. + .trim() // Remove leading and trailing spaces. + ); } /** @@ -170,18 +172,19 @@ public static String cleanJavaDocComment(String fieldComment) { */ @SuppressWarnings("RegExpSingleCharAlternation") public static String cleanDocStr(String docStr) { - return docStr - .replaceAll("<(/?)tt>", "<$1code>") // tt tags are not supported in javadoc + return docStr.replaceAll("<(/?)tt>", "<$1code>") // tt tags are not supported in javadoc .replaceAll(" < ", " < ") // escape loose less than .replaceAll(" > ", " > ") // escape loose less than .replaceAll(" & ", " & ") // .replaceAll("

([^<]*?)

", "%%%%%$1%%%%") // replace closed paragraphs temporarily .replaceAll("

((\\s|\\n)*?)($|<[^>]+>)", "$1$2$3") // remove

at end of paragraph - .replaceAll("

((.|\\n)*?)([\\s\\n]*)(%%%%%|

|\\n@\\w+ |$|<[^>]+>)", + .replaceAll( + "

((.|\\n)*?)([\\s\\n]*)(%%%%%|

|\\n@\\w+ |$|<[^>]+>)", "

$1

$3$4") // clean up loose paragraphs // Do second pass as we can miss some

that were caught as closers in first pass .replaceAll("

([^<]*?)

", "%%%%%$1%%%%") // replace closed paragraphs temporarily - .replaceAll("

((.|\\n)*?)([\\s\\n]*)(%%%%%|

|\\n@\\w+ |$|<[^>]+>)", + .replaceAll( + "

((.|\\n)*?)([\\s\\n]*)(%%%%%|

|\\n@\\w+ |$|<[^>]+>)", "

$1

$3$4") // clean up loose paragraphs // restore completed paragraphs .replaceAll("%%%%%", "

") // replace back to paragraphs @@ -224,75 +227,83 @@ public static String getFieldsHashCode(final List fields, String generate } else if (f.repeated()) { generatedCodeSoFar = getRepeatedHashCodeGeneration(generatedCodeSoFar, f); } else { - if (f.type() == Field.FieldType.FIXED32 || - f.type() == Field.FieldType.INT32 || - f.type() == Field.FieldType.SFIXED32 || - f.type() == Field.FieldType.SINT32 || - f.type() == Field.FieldType.UINT32) { - generatedCodeSoFar += ( - """ + if (f.type() == Field.FieldType.FIXED32 + || f.type() == Field.FieldType.INT32 + || f.type() == Field.FieldType.SFIXED32 + || f.type() == Field.FieldType.SINT32 + || f.type() == Field.FieldType.UINT32) { + generatedCodeSoFar += + (""" if ($fieldName != DEFAULT.$fieldName) { result = 31 * result + Integer.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); - } else if (f.type() == Field.FieldType.FIXED64 || - f.type() == Field.FieldType.INT64 || - f.type() == Field.FieldType.SFIXED64 || - f.type() == Field.FieldType.SINT64 || - f.type() == Field.FieldType.UINT64) { - generatedCodeSoFar += ( - """ + """) + .replace("$fieldName", f.nameCamelFirstLower()); + } else if (f.type() == Field.FieldType.FIXED64 + || f.type() == Field.FieldType.INT64 + || f.type() == Field.FieldType.SFIXED64 + || f.type() == Field.FieldType.SINT64 + || f.type() == Field.FieldType.UINT64) { + generatedCodeSoFar += + (""" if ($fieldName != DEFAULT.$fieldName) { result = 31 * result + Long.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.BOOL) { - generatedCodeSoFar += ( - """ + generatedCodeSoFar += + (""" if ($fieldName != DEFAULT.$fieldName) { result = 31 * result + Boolean.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.FLOAT) { - generatedCodeSoFar += ( - """ + generatedCodeSoFar += + (""" if ($fieldName != DEFAULT.$fieldName) { result = 31 * result + Float.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.DOUBLE) { - generatedCodeSoFar += ( - """ + generatedCodeSoFar += + (""" if ($fieldName != DEFAULT.$fieldName) { result = 31 * result + Double.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.BYTES) { - generatedCodeSoFar += ( - """ + generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + $fieldName.hashCode(); } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.ENUM) { - generatedCodeSoFar += ( - """ + generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + Integer.hashCode($fieldName.protoOrdinal()); } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.MAP) { generatedCodeSoFar += getMapHashCodeGeneration(generatedCodeSoFar, f); - } else if (f.type() == Field.FieldType.STRING || - f.parent() == null) { // process sub message - generatedCodeSoFar += ( - """ + } else if (f.type() == Field.FieldType.STRING || f.parent() == null) { // process sub message + generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + $fieldName.hashCode(); } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); } else { - throw new RuntimeException("Unexpected field type for getting HashCode - " + f.type().toString()); + throw new RuntimeException("Unexpected field type for getting HashCode - " + + f.type().toString()); } } } @@ -310,48 +321,55 @@ public static String getFieldsHashCode(final List fields, String generate @NonNull private static String getPrimitiveWrapperHashCodeGeneration(String generatedCodeSoFar, Field f) { switch (f.messageType()) { - case "StringValue" -> generatedCodeSoFar += ( - """ + case "StringValue" -> generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + $fieldName.hashCode(); } - """).replace("$fieldName", f.nameCamelFirstLower()); - case "BoolValue" -> generatedCodeSoFar += ( - """ + """) + .replace("$fieldName", f.nameCamelFirstLower()); + case "BoolValue" -> generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + Boolean.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); - case "Int32Value", "UInt32Value" -> generatedCodeSoFar += ( - """ + """) + .replace("$fieldName", f.nameCamelFirstLower()); + case "Int32Value", "UInt32Value" -> generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + Integer.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); - case "Int64Value", "UInt64Value" -> generatedCodeSoFar += ( - """ + """) + .replace("$fieldName", f.nameCamelFirstLower()); + case "Int64Value", "UInt64Value" -> generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + Long.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); - case "FloatValue" -> generatedCodeSoFar += ( - """ + """) + .replace("$fieldName", f.nameCamelFirstLower()); + case "FloatValue" -> generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + Float.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); - case "DoubleValue" -> generatedCodeSoFar += ( - """ + """) + .replace("$fieldName", f.nameCamelFirstLower()); + case "DoubleValue" -> generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + Double.hashCode($fieldName); } - """).replace("$fieldName", f.nameCamelFirstLower()); - case "BytesValue" -> generatedCodeSoFar += ( - """ + """) + .replace("$fieldName", f.nameCamelFirstLower()); + case "BytesValue" -> generatedCodeSoFar += + (""" if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) { result = 31 * result + ($fieldName == null ? 0 : $fieldName.hashCode()); } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); default -> throw new UnsupportedOperationException("Unhandled optional message type:" + f.messageType()); } return generatedCodeSoFar; @@ -367,8 +385,8 @@ private static String getPrimitiveWrapperHashCodeGeneration(String generatedCode */ @NonNull private static String getRepeatedHashCodeGeneration(String generatedCodeSoFar, Field f) { - generatedCodeSoFar += ( - """ + generatedCodeSoFar += + (""" java.util.List list$$fieldName = $fieldName; if (list$$fieldName != null) { for (Object o : list$$fieldName) { @@ -379,7 +397,8 @@ private static String getRepeatedHashCodeGeneration(String generatedCodeSoFar, F } } } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); return generatedCodeSoFar; } @@ -393,8 +412,8 @@ private static String getRepeatedHashCodeGeneration(String generatedCodeSoFar, F */ @NonNull private static String getMapHashCodeGeneration(String generatedCodeSoFar, final Field f) { - generatedCodeSoFar += ( - """ + generatedCodeSoFar += + (""" for (Object k : ((PbjMap) $fieldName).getSortedKeys()) { if (k != null) { result = 31 * result + k.hashCode(); @@ -408,7 +427,8 @@ private static String getMapHashCodeGeneration(String generatedCodeSoFar, final result = 31 * result; } } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); return generatedCodeSoFar; } @@ -433,66 +453,72 @@ public static String getFieldsEqualsStatements(final List fields, String generatedCodeSoFar = getRepeatedEqualsGeneration(generatedCodeSoFar, f); } else { f.nameCamelFirstLower(); - if (f.type() == Field.FieldType.FIXED32 || - f.type() == Field.FieldType.INT32 || - f.type() == Field.FieldType.SFIXED32 || - f.type() == Field.FieldType.SINT32 || - f.type() == Field.FieldType.UINT32) { + if (f.type() == Field.FieldType.FIXED32 + || f.type() == Field.FieldType.INT32 + || f.type() == Field.FieldType.SFIXED32 + || f.type() == Field.FieldType.SINT32 + || f.type() == Field.FieldType.UINT32) { generatedCodeSoFar += """ if ($fieldName != thatObj.$fieldName) { return false; } - """.replace("$fieldName", f.nameCamelFirstLower()); - } else if (f.type() == Field.FieldType.FIXED64 || - f.type() == Field.FieldType.INT64 || - f.type() == Field.FieldType.SFIXED64 || - f.type() == Field.FieldType.SINT64 || - f.type() == Field.FieldType.UINT64) { + """ + .replace("$fieldName", f.nameCamelFirstLower()); + } else if (f.type() == Field.FieldType.FIXED64 + || f.type() == Field.FieldType.INT64 + || f.type() == Field.FieldType.SFIXED64 + || f.type() == Field.FieldType.SINT64 + || f.type() == Field.FieldType.UINT64) { generatedCodeSoFar += """ if ($fieldName != thatObj.$fieldName) { return false; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.BOOL) { generatedCodeSoFar += """ if ($fieldName != thatObj.$fieldName) { return false; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.FLOAT) { generatedCodeSoFar += """ if ($fieldName != thatObj.$fieldName) { return false; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.DOUBLE) { generatedCodeSoFar += """ if ($fieldName != thatObj.$fieldName) { return false; } - """.replace("$fieldName", f.nameCamelFirstLower()); - } else if (f.type() == Field.FieldType.STRING || - f.type() == Field.FieldType.BYTES || - f.type() == Field.FieldType.ENUM || - f.type() == Field.FieldType.MAP || - f.parent() == null /* Process a sub-message */) { - generatedCodeSoFar += ( """ + .replace("$fieldName", f.nameCamelFirstLower()); + } else if (f.type() == Field.FieldType.STRING + || f.type() == Field.FieldType.BYTES + || f.type() == Field.FieldType.ENUM + || f.type() == Field.FieldType.MAP + || f.parent() == null /* Process a sub-message */) { + generatedCodeSoFar += + (""" if ($fieldName == null && thatObj.$fieldName != null) { return false; } if ($fieldName != null && !$fieldName.equals(thatObj.$fieldName)) { return false; } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); } else { - throw new IllegalArgumentException( - "Unexpected field type for getting Equals - " + f.type().toString()); + throw new IllegalArgumentException("Unexpected field type for getting Equals - " + + f.type().toString()); } } } @@ -510,16 +536,24 @@ public static String getFieldsEqualsStatements(final List fields, String @NonNull private static String getPrimitiveWrapperEqualsGeneration(String generatedCodeSoFar, Field f) { switch (f.messageType()) { - case "StringValue", "BoolValue", "Int32Value", "UInt32Value", "Int64Value", "UInt64Value", "FloatValue", - "DoubleValue", "BytesValue" -> generatedCodeSoFar += ( - """ + case "StringValue", + "BoolValue", + "Int32Value", + "UInt32Value", + "Int64Value", + "UInt64Value", + "FloatValue", + "DoubleValue", + "BytesValue" -> generatedCodeSoFar += + (""" if (this.$fieldName == null && thatObj.$fieldName != null) { return false; } if (this.$fieldName != null && !$fieldName.equals(thatObj.$fieldName)) { return false; } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); default -> throw new UnsupportedOperationException("Unhandled optional message type:" + f.messageType()); } return generatedCodeSoFar; @@ -535,16 +569,17 @@ private static String getPrimitiveWrapperEqualsGeneration(String generatedCodeSo */ @NonNull private static String getRepeatedEqualsGeneration(String generatedCodeSoFar, Field f) { - generatedCodeSoFar += ( - """ + generatedCodeSoFar += + (""" if (this.$fieldName == null && thatObj.$fieldName != null) { return false; } - + if (this.$fieldName != null && !$fieldName.equals(thatObj.$fieldName)) { return false; } - """).replace("$fieldName", f.nameCamelFirstLower()); + """) + .replace("$fieldName", f.nameCamelFirstLower()); return generatedCodeSoFar; } @@ -557,25 +592,26 @@ private static String getRepeatedEqualsGeneration(String generatedCodeSoFar, Fie * * @return The generated code for compareTo method body */ - public static String getFieldsCompareToStatements(final List fields, String generatedCodeSoFar, - File destinationSrcDir) { + public static String getFieldsCompareToStatements( + final List fields, String generatedCodeSoFar, File destinationSrcDir) { for (Field f : fields) { if (f.optionalValueType()) { generatedCodeSoFar += getPrimitiveWrapperCompareToGeneration(f); } else if (f.repeated()) { throw new UnsupportedOperationException("Repeated fields are not supported in compareTo method"); } else { - if (f.type() == Field.FieldType.FIXED32 || - f.type() == Field.FieldType.INT32 || - f.type() == Field.FieldType.SFIXED32 || - f.type() == Field.FieldType.SINT32) { + if (f.type() == Field.FieldType.FIXED32 + || f.type() == Field.FieldType.INT32 + || f.type() == Field.FieldType.SFIXED32 + || f.type() == Field.FieldType.SINT32) { generatedCodeSoFar += """ result = Integer.compare($fieldName, thatObj.$fieldName); if (result != 0) { return result; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.UINT32) { generatedCodeSoFar += """ @@ -583,19 +619,21 @@ public static String getFieldsCompareToStatements(final List fields, Stri if (result != 0) { return result; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); - } else if (f.type() == Field.FieldType.FIXED64 || - f.type() == Field.FieldType.INT64 || - f.type() == Field.FieldType.SFIXED64 || - f.type() == Field.FieldType.SINT64) { + } else if (f.type() == Field.FieldType.FIXED64 + || f.type() == Field.FieldType.INT64 + || f.type() == Field.FieldType.SFIXED64 + || f.type() == Field.FieldType.SINT64) { generatedCodeSoFar += """ result = Long.compare($fieldName, thatObj.$fieldName); if (result != 0) { return result; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.UINT64) { generatedCodeSoFar += """ @@ -603,7 +641,8 @@ public static String getFieldsCompareToStatements(final List fields, Stri if (result != 0) { return result; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.BOOL) { generatedCodeSoFar += """ @@ -611,7 +650,8 @@ public static String getFieldsCompareToStatements(final List fields, Stri if (result != 0) { return result; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.FLOAT) { generatedCodeSoFar += """ @@ -619,7 +659,8 @@ public static String getFieldsCompareToStatements(final List fields, Stri if (result != 0) { return result; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } else if (f.type() == Field.FieldType.DOUBLE) { generatedCodeSoFar += """ @@ -627,17 +668,18 @@ public static String getFieldsCompareToStatements(final List fields, Stri if (result != 0) { return result; } - """.replace("$fieldName", f.nameCamelFirstLower()); - } else if (f.type() == Field.FieldType.STRING || - f.type() == Field.FieldType.BYTES || - f.type() == Field.FieldType.ENUM) { + """ + .replace("$fieldName", f.nameCamelFirstLower()); + } else if (f.type() == Field.FieldType.STRING + || f.type() == Field.FieldType.BYTES + || f.type() == Field.FieldType.ENUM) { generatedCodeSoFar += generateCompareToForObject(f); } else if (f.type() == Field.FieldType.MESSAGE || f.type() == Field.FieldType.ONE_OF) { verifyComparable(f, destinationSrcDir); generatedCodeSoFar += generateCompareToForObject(f); } else { - throw new IllegalArgumentException( - "Unexpected field type for getting CompareTo - " + f.type().toString()); + throw new IllegalArgumentException("Unexpected field type for getting CompareTo - " + + f.type().toString()); } } } @@ -659,7 +701,8 @@ private static String generateCompareToForObject(Field f) { if (result != 0) { return result; } - """.replace("$fieldName", f.nameCamelFirstLower()); + """ + .replace("$fieldName", f.nameCamelFirstLower()); } /** @@ -684,8 +727,9 @@ private static void verifyComparable(final Field field, File destinationSrcDir) return; } } - throw new IllegalArgumentException(("Field %s.%s specified in `pbj.comparable` option must implement " + - "`Comparable` interface but it doesn't.").formatted(className, field.nameCamelFirstLower())); + throw new IllegalArgumentException(("Field %s.%s specified in `pbj.comparable` option must implement " + + "`Comparable` interface but it doesn't.") + .formatted(className, field.nameCamelFirstLower())); } catch (IOException e) { throw new RuntimeException(e); } @@ -719,21 +763,21 @@ private static String getPrimitiveWrapperCompareToGeneration(Field f) { } """; - final String compareStatement = switch (f.messageType()) { - case "StringValue", "BytesValue" -> "$fieldName.compareTo(thatObj.$fieldName)"; - case "BoolValue" -> "java.lang.Boolean.compare($fieldName, thatObj.$fieldName)"; - case "Int32Value" -> "java.lang.Integer.compare($fieldName, thatObj.$fieldName)"; - case "UInt32Value" -> "java.lang.Integer.compareUnsigned($fieldName, thatObj.$fieldName)"; - case "Int64Value" -> "java.lang.Long.compare($fieldName, thatObj.$fieldName)"; - case "UInt64Value" -> "java.lang.Long.compareUnsigned($fieldName, thatObj.$fieldName)"; - case "FloatValue" -> "java.lang.Float.compare($fieldName, thatObj.$fieldName)"; - case "DoubleValue" -> "java.lang.Double.compare($fieldName, thatObj.$fieldName)"; - default -> throw new UnsupportedOperationException("Unhandled optional message type:" + f.messageType()); - }; + final String compareStatement = + switch (f.messageType()) { + case "StringValue", "BytesValue" -> "$fieldName.compareTo(thatObj.$fieldName)"; + case "BoolValue" -> "java.lang.Boolean.compare($fieldName, thatObj.$fieldName)"; + case "Int32Value" -> "java.lang.Integer.compare($fieldName, thatObj.$fieldName)"; + case "UInt32Value" -> "java.lang.Integer.compareUnsigned($fieldName, thatObj.$fieldName)"; + case "Int64Value" -> "java.lang.Long.compare($fieldName, thatObj.$fieldName)"; + case "UInt64Value" -> "java.lang.Long.compareUnsigned($fieldName, thatObj.$fieldName)"; + case "FloatValue" -> "java.lang.Float.compare($fieldName, thatObj.$fieldName)"; + case "DoubleValue" -> "java.lang.Double.compare($fieldName, thatObj.$fieldName)"; + default -> throw new UnsupportedOperationException( + "Unhandled optional message type:" + f.messageType()); + }; - return template - .replace("$compareStatement", compareStatement) - .replace("$fieldName", f.nameCamelFirstLower()); + return template.replace("$compareStatement", compareStatement).replace("$fieldName", f.nameCamelFirstLower()); } /** @@ -761,8 +805,8 @@ public static String removingLeadingDot(String text) { * @return File object for java file */ public static File getJavaFile(File srcDir, String javaPackage, String className) { - File packagePath = new File( - srcDir.getPath() + File.separatorChar + javaPackage.replaceAll("\\.", "\\" + File.separator)); + File packagePath = + new File(srcDir.getPath() + File.separatorChar + javaPackage.replaceAll("\\.", "\\" + File.separator)); //noinspection ResultOfMethodCallIgnored packagePath.mkdirs(); return new File(packagePath, className + ".java"); diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java index 8dcc11b6..6491339e 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java @@ -1,14 +1,14 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.compiler.impl; -import java.io.File; -import java.util.List; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.EnumDefContext; +import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.FieldContext; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.MessageDefContext; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.MessageTypeContext; -import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.FieldContext; -import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.Type_Context; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.OneofFieldContext; +import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.Type_Context; +import java.io.File; +import java.util.List; /** * Wrapper around LookupHelper adding the context of which protobuf source file the lookup is happening within. This @@ -93,7 +93,8 @@ public String getPackageForEnum(FileType fileType, EnumDefContext enumDef) { * @return java package to put model class in */ public String getPackageFieldMessageType(final FileType fileType, final FieldContext fieldContext) { - return lookupHelper.getPackage(srcProtoFileContext, fileType, fieldContext.type_().messageType()); + return lookupHelper.getPackage( + srcProtoFileContext, fileType, fieldContext.type_().messageType()); } /** @@ -115,7 +116,8 @@ public String getPackageFieldMessageType(final FileType fileType, final Type_Con * @return java package to put model class in */ public String getPackageOneofFieldMessageType(final FileType fileType, final OneofFieldContext fieldContext) { - return lookupHelper.getPackage(srcProtoFileContext, fileType, fieldContext.type_().messageType()); + return lookupHelper.getPackage( + srcProtoFileContext, fileType, fieldContext.type_().messageType()); } /** diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java index b3596c57..d062ebec 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java @@ -9,7 +9,6 @@ import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; import edu.umd.cs.findbugs.annotations.NonNull; - import java.util.Set; /** @@ -56,7 +55,7 @@ default long maxSize() { * @return this fields name converted */ default String nameCamelFirstUpper() { - return snakeToCamel(name(),true); + return snakeToCamel(name(), true); } /** @@ -66,7 +65,7 @@ default String nameCamelFirstUpper() { */ @NonNull default String nameCamelFirstLower() { - return snakeToCamel(name(),false); + return snakeToCamel(name(), false); } /** @@ -113,8 +112,8 @@ default String nameCamelFirstLower() { * @param codecImports if imports for this field's generated codec classes should be added * @param testImports if imports for this field's generated test classes should be added */ - void addAllNeededImports(Set imports, boolean modelImports, - boolean codecImports, final boolean testImports); + void addAllNeededImports( + Set imports, boolean modelImports, boolean codecImports, final boolean testImports); /** * Get the java code to parse the value for this field from input @@ -199,7 +198,9 @@ default com.hedera.pbj.compiler.impl.OneOfField parent() { * or null if the type is not a message. */ static String extractMessageTypeName(final Protobuf3Parser.Type_Context typeContext) { - return typeContext.messageType() == null ? null : typeContext.messageType().messageName().getText(); + return typeContext.messageType() == null + ? null + : typeContext.messageType().messageName().getText(); } /** @@ -210,8 +211,10 @@ static String extractMessageTypePackage( final Protobuf3Parser.Type_Context typeContext, final com.hedera.pbj.compiler.impl.FileType fileType, final com.hedera.pbj.compiler.impl.ContextualLookupHelper lookupHelper) { - return typeContext.messageType() == null || typeContext.messageType().messageName().getText() == null ? null : - lookupHelper.getPackageFieldMessageType(fileType, typeContext); + return typeContext.messageType() == null + || typeContext.messageType().messageName().getText() == null + ? null + : lookupHelper.getPackageFieldMessageType(fileType, typeContext); } /** @@ -253,9 +256,9 @@ enum FieldType { /** Protobuf bytes field type */ BYTES("Bytes", "Bytes", "Bytes.EMPTY", TYPE_LENGTH_DELIMITED), /** Protobuf oneof field type, this is not a true field type in protobuf. Needed here for a few edge cases */ - ONE_OF("OneOf", "OneOf", "null", 0 ),// BAD TYPE + ONE_OF("OneOf", "OneOf", "null", 0), // BAD TYPE // On the wire, a map is a repeated Message {key, value}, sorted in the natural order of keys for determinism. - MAP("Map", "Map", "Collections.EMPTY_MAP", TYPE_LENGTH_DELIMITED ); + MAP("Map", "Map", "Collections.EMPTY_MAP", TYPE_LENGTH_DELIMITED); /** The type of field type in Java code */ public final String javaType; @@ -328,7 +331,9 @@ public String javaType(boolean repeated) { * @param lookupHelper Lookup helper with global context * @return The field type enum for parser context */ - static FieldType of(Protobuf3Parser.Type_Context typeContext, final com.hedera.pbj.compiler.impl.ContextualLookupHelper lookupHelper) { + static FieldType of( + Protobuf3Parser.Type_Context typeContext, + final com.hedera.pbj.compiler.impl.ContextualLookupHelper lookupHelper) { if (typeContext.enumType() != null) { return FieldType.ENUM; } else if (typeContext.messageType() != null) { @@ -365,7 +370,7 @@ static FieldType of(Protobuf3Parser.Type_Context typeContext, final com.hedera. } else if (typeContext.BYTES() != null) { return FieldType.BYTES; } else { - throw new IllegalArgumentException("Unknown field type: "+typeContext); + throw new IllegalArgumentException("Unknown field type: " + typeContext); } } @@ -376,7 +381,9 @@ static FieldType of(Protobuf3Parser.Type_Context typeContext, final com.hedera. * @param lookupHelper Lookup helper with global context * @return The field type enum for parser context */ - static FieldType of(Protobuf3Parser.KeyTypeContext typeContext, final com.hedera.pbj.compiler.impl.ContextualLookupHelper lookupHelper) { + static FieldType of( + Protobuf3Parser.KeyTypeContext typeContext, + final com.hedera.pbj.compiler.impl.ContextualLookupHelper lookupHelper) { if (typeContext.INT32() != null) { return FieldType.INT32; } else if (typeContext.UINT32() != null) { diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java index 72e0588e..00c1c550 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java @@ -13,10 +13,6 @@ import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.TopLevelDefContext; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import org.antlr.v4.runtime.CharStreams; -import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.ParserRuleContext; - import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -31,6 +27,9 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import org.antlr.v4.runtime.CharStreams; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.ParserRuleContext; /** * Class that manages packages and enum names that are used more than one place in code generation @@ -38,8 +37,7 @@ @SuppressWarnings({"unused", "DuplicatedCode"}) public final class LookupHelper { /** REGEX pattern to match options in special option comments */ - private static final Pattern OPTION_COMMENT = - Pattern.compile("//\\s+<<<\\s*([\\w.]+)\\s*=\\s*\"([^\"]+)\"\\s*>>>"); + private static final Pattern OPTION_COMMENT = Pattern.compile("//\\s+<<<\\s*([\\w.]+)\\s*=\\s*\"([^\"]+)\"\\s*>>>"); /** The option name for PBJ package at file level */ private static final String PBJ_PACKAGE_OPTION_NAME = "pbj.java_package"; /** The option name for PBJ package at msgDef level */ @@ -56,8 +54,7 @@ public final class LookupHelper { public static final String PROTO_EXTENSIION = ".proto"; // Exception message templates - private static final String METHOD_WRONG_CONTEXT_MESSAGE = - "%s only supports MessageDefContext or EnumDefContext"; + private static final String METHOD_WRONG_CONTEXT_MESSAGE = "%s only supports MessageDefContext or EnumDefContext"; private static final String FAILED_TO_FIND_LOCAL_MSG_MAP_MESSAGE = "Failed to find messageMapLocal for proto file [%s]"; private static final String FAILED_TO_FIND_MSG_TYPE_MESSAGE = @@ -66,9 +63,12 @@ public final class LookupHelper { "Could not find %s package for message or enum [%s] in file [%s]"; private static final String LIMITED_CONTEXT_OPTIONS_SUPPORT_MESSAGE = "%s only supports MessageDefContext, EnumDefContext or MessageTypeContext not [%s]"; - private static final String FILE_MISSING_PACKAGE_OPTION_MESSAGE = "%sProto file [%s] does not contain \"%s\" or \"%s\" options.%n"; - private static final String IMPORT_MATCHED_MULTIPLE_MESSAGE = "Import \"%s\" in proto file \"%s\" matched more than 1 file in src files [%s]"; - private static final String IMPORT_NOT_FOUND_MESSAGE = "Import \"%s\" in proto file \"%s\" can not be found in src files."; + private static final String FILE_MISSING_PACKAGE_OPTION_MESSAGE = + "%sProto file [%s] does not contain \"%s\" or \"%s\" options.%n"; + private static final String IMPORT_MATCHED_MULTIPLE_MESSAGE = + "Import \"%s\" in proto file \"%s\" matched more than 1 file in src files [%s]"; + private static final String IMPORT_NOT_FOUND_MESSAGE = + "Import \"%s\" in proto file \"%s\" can not be found in src files."; /** * Map from fully qualified msgDef name to fully qualified pbj java package, not including java @@ -126,9 +126,7 @@ public String getUnqualifiedProtoName(final ParserRuleContext context) { return enumDef.enumName().getText(); } else if (context instanceof final MessageTypeContext msgTypeContext) { final String messageType = msgTypeContext.getText(); - return messageType.contains(".") - ? messageType.substring(messageType.lastIndexOf('.') + 1) - : messageType; + return messageType.contains(".") ? messageType.substring(messageType.lastIndexOf('.') + 1) : messageType; } else { throw new UnsupportedOperationException(METHOD_WRONG_CONTEXT_MESSAGE.formatted("getUnqualifiedProtoName")); } @@ -143,8 +141,7 @@ public String getUnqualifiedProtoName(final ParserRuleContext context) { * @param context The parser context for a message, enum or a message type. * @return fully qualified proto name */ - public String getFullyQualifiedProtoName( - final File protoSrcFile, final ParserRuleContext context) { + public String getFullyQualifiedProtoName(final File protoSrcFile, final ParserRuleContext context) { if (context instanceof final MessageTypeContext msgTypeContext) { final String messageType = msgTypeContext.getText(); // check if fully qualified @@ -154,8 +151,7 @@ public String getFullyQualifiedProtoName( // check local file message types final var messageMapLocal = msgAndEnumByFile.get(protoSrcFile.getAbsolutePath()); if (messageMapLocal == null) { - throw new PbjCompilerException( - FAILED_TO_FIND_LOCAL_MSG_MAP_MESSAGE.formatted(protoSrcFile)); + throw new PbjCompilerException(FAILED_TO_FIND_LOCAL_MSG_MAP_MESSAGE.formatted(protoSrcFile)); } final String nameFoundInLocalFile = messageMapLocal.get(messageType); if (nameFoundInLocalFile != null) { @@ -174,9 +170,11 @@ public String getFullyQualifiedProtoName( } } // we failed to find - final Object[] importsArray = protoFileImports.get(protoSrcFile.getAbsolutePath()).toArray(); + final Object[] importsArray = + protoFileImports.get(protoSrcFile.getAbsolutePath()).toArray(); final String importsString = Arrays.toString(importsArray); - throw new PbjCompilerException(FAILED_TO_FIND_MSG_TYPE_MESSAGE.formatted(messageType, protoSrcFile, importsString)); + throw new PbjCompilerException( + FAILED_TO_FIND_MSG_TYPE_MESSAGE.formatted(messageType, protoSrcFile, importsString)); } else if (context instanceof MessageDefContext || context instanceof EnumDefContext) { final Map fileMap = msgAndEnumByFile.get(protoSrcFile.getAbsolutePath()); if (fileMap == null) { @@ -184,7 +182,8 @@ public String getFullyQualifiedProtoName( } return fileMap.get(getUnqualifiedProtoName(context)); } else { - throw new UnsupportedOperationException(METHOD_WRONG_CONTEXT_MESSAGE.formatted("getFullyQualifiedProtoName")); + throw new UnsupportedOperationException( + METHOD_WRONG_CONTEXT_MESSAGE.formatted("getFullyQualifiedProtoName")); } } @@ -196,8 +195,7 @@ public String getFullyQualifiedProtoName( * @param context Parser Context, a message or enum * @return java package to put model class in */ - String getUnqualifiedClass( - final File protoSrcFile, final FileType fileType, final ParserRuleContext context) { + String getUnqualifiedClass(final File protoSrcFile, final FileType fileType, final ParserRuleContext context) { final String name; final boolean isEnum; if (context instanceof final MessageTypeContext msgType) { @@ -236,8 +234,7 @@ String getUnqualifiedClass( * @return java package to put model class in */ @Nullable - String getPackage( - final File protoSrcFile, final FileType fileType, final ParserRuleContext context) { + String getPackage(final File protoSrcFile, final FileType fileType, final ParserRuleContext context) { if (context instanceof MessageDefContext || context instanceof EnumDefContext || context instanceof MessageTypeContext) { @@ -247,27 +244,28 @@ String getPackage( } else if (fileType == FileType.PROTOC) { final String protocPackage = protocPackageMap.get(qualifiedProtoName); if (protocPackage == null) { - throw new PbjCompilerException(PACKAGE_NOT_FOUND_MESSAGE.formatted("protoc", qualifiedProtoName, protoSrcFile)); + throw new PbjCompilerException( + PACKAGE_NOT_FOUND_MESSAGE.formatted("protoc", qualifiedProtoName, protoSrcFile)); } return protocPackage; } else { final String basePackage = pbjPackageMap.get(qualifiedProtoName); if (basePackage == null) { - throw new PbjCompilerException(PACKAGE_NOT_FOUND_MESSAGE.formatted("pbj", qualifiedProtoName, protoSrcFile)); + throw new PbjCompilerException( + PACKAGE_NOT_FOUND_MESSAGE.formatted("pbj", qualifiedProtoName, protoSrcFile)); } return switch (fileType) { //noinspection ConstantConditions case MODEL, PROTOC -> basePackage; case SCHEMA -> basePackage + '.' + FileAndPackageNamesConfig.SCHEMAS_SUBPACKAGE; - case CODEC, JSON_CODEC -> basePackage - + '.' - + FileAndPackageNamesConfig.CODECS_SUBPACKAGE; + case CODEC, JSON_CODEC -> basePackage + '.' + FileAndPackageNamesConfig.CODECS_SUBPACKAGE; case TEST -> basePackage + '.' + FileAndPackageNamesConfig.TESTS_SUBPACKAGE; }; } } else { - throw new UnsupportedOperationException(LIMITED_CONTEXT_OPTIONS_SUPPORT_MESSAGE.formatted("getPackageForMsgOrEnum", context.getClass().getName())); + throw new UnsupportedOperationException(LIMITED_CONTEXT_OPTIONS_SUPPORT_MESSAGE.formatted( + "getPackageForMsgOrEnum", context.getClass().getName())); } } @@ -279,8 +277,7 @@ String getPackage( * @param context Parser Context, a message or enum * @return fully qualified Java class name */ - String getFullyQualifiedClass( - final File protoSrcFile, final FileType fileType, final ParserRuleContext context) { + String getFullyQualifiedClass(final File protoSrcFile, final FileType fileType, final ParserRuleContext context) { if (context instanceof MessageDefContext || context instanceof EnumDefContext || context instanceof MessageTypeContext) { @@ -288,8 +285,7 @@ String getFullyQualifiedClass( final String messageName = getUnqualifiedClass(protoSrcFile, fileType, context); // protoc supports nested classes so need parent classes/messages final String parentClasses; - if (fileType == FileType.PROTOC - && context.getParent() instanceof MessageElementContext) { + if (fileType == FileType.PROTOC && context.getParent() instanceof MessageElementContext) { final StringBuilder sb = new StringBuilder(); ParserRuleContext parent = context.getParent(); while (!(parent instanceof TopLevelDefContext)) { @@ -378,19 +374,18 @@ private void build(final Iterable allSrcFiles) { // ignore pbj_custom_options.proto file continue; } else if (pbjJavaPackage == null && protocJavaPackage == null) { - throw new PbjCompilerException( - FILE_MISSING_PACKAGE_OPTION_MESSAGE.formatted( - "", file.getAbsolutePath(), PBJ_PACKAGE_OPTION_NAME, - PROTOC_JAVA_PACKAGE_OPTION_NAME)); + throw new PbjCompilerException(FILE_MISSING_PACKAGE_OPTION_MESSAGE.formatted( + "", file.getAbsolutePath(), PBJ_PACKAGE_OPTION_NAME, PROTOC_JAVA_PACKAGE_OPTION_NAME)); } else if (pbjJavaPackage == null) { System.err.printf(FILE_MISSING_PACKAGE_OPTION_MESSAGE.formatted( - "WARNING, ", file.getAbsolutePath(), PBJ_PACKAGE_OPTION_NAME, - PROTOC_JAVA_PACKAGE_OPTION_NAME)); + "WARNING, ", + file.getAbsolutePath(), + PBJ_PACKAGE_OPTION_NAME, + PROTOC_JAVA_PACKAGE_OPTION_NAME)); } // process imports final Set fileImports = - protoFileImports.computeIfAbsent( - fullQualifiedFile, key -> new HashSet<>()); + protoFileImports.computeIfAbsent(fullQualifiedFile, key -> new HashSet<>()); for (final var importStatement : parsedDoc.importStatement()) { final String importedFileName = normalizeFileName(importStatement.strLit().getText()); @@ -401,45 +396,29 @@ private void build(final Iterable allSrcFiles) { } // now scan all src files to find import as there can be many src // directories - final List matchingSrcFiles = - StreamSupport.stream(allSrcFiles.spliterator(), false) - .filter( - srcFile -> - srcFile.getAbsolutePath() - .endsWith( - FileSystems.getDefault() - .getSeparator() - + importedFileName)) - .toList(); + final List matchingSrcFiles = StreamSupport.stream(allSrcFiles.spliterator(), false) + .filter(srcFile -> srcFile.getAbsolutePath() + .endsWith(FileSystems.getDefault().getSeparator() + importedFileName)) + .toList(); if (matchingSrcFiles.size() == 1) { fileImports.add(matchingSrcFiles.get(0).getAbsolutePath()); } else if (matchingSrcFiles.size() > 1) { - throw new PbjCompilerException( - IMPORT_MATCHED_MULTIPLE_MESSAGE.formatted( - importedFileName, file.getAbsolutePath(), - Arrays.toString(matchingSrcFiles.toArray()))); + throw new PbjCompilerException(IMPORT_MATCHED_MULTIPLE_MESSAGE.formatted( + importedFileName, + file.getAbsolutePath(), + Arrays.toString(matchingSrcFiles.toArray()))); } else { throw new PbjCompilerException( - IMPORT_NOT_FOUND_MESSAGE.formatted( - importedFileName, file.getAbsolutePath())); + IMPORT_NOT_FOUND_MESSAGE.formatted(importedFileName, file.getAbsolutePath())); } } // process message and enum defs - final String fileLevelJavaPackage = - (pbjJavaPackage != null) ? pbjJavaPackage : protocJavaPackage; + final String fileLevelJavaPackage = (pbjJavaPackage != null) ? pbjJavaPackage : protocJavaPackage; for (final var item : parsedDoc.topLevelDef()) { if (item.messageDef() != null) - buildMessage( - fullQualifiedFile, - fileLevelJavaPackage, - protocJavaPackage, - item.messageDef()); + buildMessage(fullQualifiedFile, fileLevelJavaPackage, protocJavaPackage, item.messageDef()); if (item.enumDef() != null) - buildEnum( - fullQualifiedFile, - fileLevelJavaPackage, - protocJavaPackage, - item.enumDef()); + buildEnum(fullQualifiedFile, fileLevelJavaPackage, protocJavaPackage, item.enumDef()); } } catch (final IOException e) { throw new RuntimeException(e); @@ -527,22 +506,17 @@ private void buildMessage( // insert into maps pbjPackageMap.put(fullyQualifiedMessage, messagePbjPackage); protocPackageMap.put(fullyQualifiedMessage, fileLevelProtocJavaPackage); - msgAndEnumByFile.computeIfAbsent(fullQualifiedFile, fqf -> new HashMap<>()) + msgAndEnumByFile + .computeIfAbsent(fullQualifiedFile, fqf -> new HashMap<>()) .put(msgName, fullyQualifiedMessage); // handle child messages and enums for (final var item : msgDef.messageBody().messageElement()) { if (item.messageDef() != null) { - buildMessage(fullQualifiedFile, - messagePbjPackage, - fileLevelProtocJavaPackage, - item.messageDef()); + buildMessage(fullQualifiedFile, messagePbjPackage, fileLevelProtocJavaPackage, item.messageDef()); } if (item.enumDef() != null) { - buildEnum(fullQualifiedFile, - messagePbjPackage, - fileLevelProtocJavaPackage, - item.enumDef()); + buildEnum(fullQualifiedFile, messagePbjPackage, fileLevelProtocJavaPackage, item.enumDef()); } } } @@ -565,14 +539,15 @@ static List extractComparableFields(final MessageDefContext msgDef) { final Set regularFieldNames = msgDef.messageBody().messageElement().stream() .filter(v -> v.field() != null) .filter(v -> { - if(v.field().REPEATED() != null){ + if (v.field().REPEATED() != null) { repeatedFields.add(v.field().fieldName().getText()); return false; } else { return true; } }) - .map(v -> v.field().fieldName().getText()).collect(Collectors.toSet()); + .map(v -> v.field().fieldName().getText()) + .collect(Collectors.toSet()); final Set oneOfFieldNames = msgDef.messageBody().messageElement().stream() .filter(v -> v.oneof() != null) .map(v -> v.oneof().oneofName().getText()) @@ -583,15 +558,16 @@ static List extractComparableFields(final MessageDefContext msgDef) { return Arrays.stream(optionValue.split(",")) .map(String::trim) .peek(v -> { - if(repeatedFields.contains(v)){ - throw new IllegalArgumentException("Field `%s` specified in `%s` option is repeated. Repeated fields are not supported by this option." - .formatted(v, PBJ_COMPARABLE_OPTION_NAME)); + if (repeatedFields.contains(v)) { + throw new IllegalArgumentException( + "Field `%s` specified in `%s` option is repeated. Repeated fields are not supported by this option." + .formatted(v, PBJ_COMPARABLE_OPTION_NAME)); } if (!allFieldNames.contains(v)) { - throw new IllegalArgumentException( - "Field '%s' specified in %s option is not found.".formatted(v, PBJ_COMPARABLE_OPTION_NAME)); + throw new IllegalArgumentException("Field '%s' specified in %s option is not found." + .formatted(v, PBJ_COMPARABLE_OPTION_NAME)); } - }) + }) .collect(Collectors.toList()); } } @@ -642,7 +618,8 @@ private void buildEnum( pbjPackageMap.put(fullQualifiedEnumName, enumPbjPackage); protocPackageMap.put(fullQualifiedEnumName, fileLevelProtocJavaPackage); enumNames.add(fullQualifiedEnumName); - msgAndEnumByFile.computeIfAbsent(fullQualifiedFile, fqf -> new HashMap<>()) + msgAndEnumByFile + .computeIfAbsent(fullQualifiedFile, fqf -> new HashMap<>()) .put(enumName, fullQualifiedEnumName); } @@ -655,22 +632,23 @@ private void buildEnum( * MessageDefContext or EnumDefContext * @return part of fully qualified protobuf name */ - private static String getFullyQualifiedProtoNameForMsgOrEnum( - final ParserRuleContext ruleContext) { + private static String getFullyQualifiedProtoNameForMsgOrEnum(final ParserRuleContext ruleContext) { String thisName = ""; if (ruleContext instanceof final Protobuf3Parser.ProtoContext parsedDoc) { // get proto package final var packageStatement = parsedDoc.packageStatement().stream().findFirst(); - thisName = packageStatement.isEmpty() ? "" : packageStatement.get().fullIdent().getText(); + thisName = packageStatement.isEmpty() + ? "" + : packageStatement.get().fullIdent().getText(); } else if (ruleContext instanceof final EnumDefContext enumDef) { final String parentPart = getFullyQualifiedProtoNameForMsgOrEnum(enumDef.getParent()); thisName = getFullyQualifiedProtoNameForMsgOrEnum(enumDef.getParent()) - + "." - + enumDef.enumName().getText(); + + "." + + enumDef.enumName().getText(); } else if (ruleContext instanceof final MessageDefContext msgDef) { thisName = getFullyQualifiedProtoNameForMsgOrEnum(msgDef.getParent()) - + "." - + msgDef.messageName().getText(); + + "." + + msgDef.messageName().getText(); } else if (ruleContext.getParent() != null) { thisName = getFullyQualifiedProtoNameForMsgOrEnum(ruleContext.getParent()); } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java index d3729956..34629d7e 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java @@ -1,12 +1,12 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.compiler.impl; -import java.util.Set; -import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; - import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; import static com.hedera.pbj.compiler.impl.SingleField.getDeprecatedOption; +import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; +import java.util.Set; + /** * A field of type map. *

@@ -36,8 +36,8 @@ public record MapField( String javaDefault, String parserFieldsSetMethodCase, String comment, - boolean deprecated -) implements Field { + boolean deprecated) + implements Field { /** * Construct a MapField instance out of a MapFieldContext and a lookup helper. @@ -53,7 +53,8 @@ public MapField(Protobuf3Parser.MapFieldContext mapContext, final ContextualLook null, null, null, - "An internal, private map entry key for %s".formatted(mapContext.mapName().getText()), + "An internal, private map entry key for %s" + .formatted(mapContext.mapName().getText()), false, null), new SingleField( @@ -65,7 +66,8 @@ public MapField(Protobuf3Parser.MapFieldContext mapContext, final ContextualLook Field.extractMessageTypePackage(mapContext.type_(), FileType.MODEL, lookupHelper), Field.extractMessageTypePackage(mapContext.type_(), FileType.CODEC, lookupHelper), Field.extractMessageTypePackage(mapContext.type_(), FileType.TEST, lookupHelper), - "An internal, private map entry value for %s".formatted(mapContext.mapName().getText()), + "An internal, private map entry value for %s" + .formatted(mapContext.mapName().getText()), false, null), false, // maps cannot be repeated @@ -78,16 +80,17 @@ public MapField(Protobuf3Parser.MapFieldContext mapContext, final ContextualLook null, "PbjMap.EMPTY", "", - Common.buildCleanFieldJavaDoc(Integer.parseInt(mapContext.fieldNumber().getText()), mapContext.docComment()), - getDeprecatedOption(mapContext.fieldOptions()) - ); + Common.buildCleanFieldJavaDoc( + Integer.parseInt(mapContext.fieldNumber().getText()), mapContext.docComment()), + getDeprecatedOption(mapContext.fieldOptions())); } /** * Composes the Java generic type of the map field, e.g. "<Integer, String>" for a Map<Integer, String>. */ public String javaGenericType() { - final String fieldTypeName = valueField().type() == FieldType.MESSAGE ? ((SingleField)valueField()).messageType() + final String fieldTypeName = valueField().type() == FieldType.MESSAGE + ? ((SingleField) valueField()).messageType() : valueField().type().boxedType; return "<%s, %s>".formatted(keyField.type().boxedType, fieldTypeName); } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java index 0b8c83a6..3e204868 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java @@ -2,7 +2,6 @@ package com.hedera.pbj.compiler.impl; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; - import java.util.ArrayList; import java.util.List; import java.util.Set; @@ -18,8 +17,8 @@ public record OneOfField( List fields, boolean repeated, boolean deprecated, - boolean comparable -) implements Field { + boolean comparable) + implements Field { /** * Create a OneOf field from parser context * @@ -27,23 +26,30 @@ public record OneOfField( * @param parentMessageName the name of the parent message * @param lookupHelper helper for accessing global context */ - public OneOfField(final Protobuf3Parser.OneofContext oneOfContext, final String parentMessageName, final ContextualLookupHelper lookupHelper) { - this(parentMessageName, - oneOfContext.oneofName().getText(), - Common.buildCleanFieldJavaDoc( - oneOfContext.oneofField().stream().map(field -> Integer.parseInt(field.fieldNumber().getText())).toList(), - oneOfContext.docComment()), - new ArrayList<>(oneOfContext.oneofField().size()), - false, - getDeprecatedOption(oneOfContext.optionStatement()), - isComparable(oneOfContext, lookupHelper) - ); - for (var field: oneOfContext.oneofField()) { + public OneOfField( + final Protobuf3Parser.OneofContext oneOfContext, + final String parentMessageName, + final ContextualLookupHelper lookupHelper) { + this( + parentMessageName, + oneOfContext.oneofName().getText(), + Common.buildCleanFieldJavaDoc( + oneOfContext.oneofField().stream() + .map(field -> + Integer.parseInt(field.fieldNumber().getText())) + .toList(), + oneOfContext.docComment()), + new ArrayList<>(oneOfContext.oneofField().size()), + false, + getDeprecatedOption(oneOfContext.optionStatement()), + isComparable(oneOfContext, lookupHelper)); + for (var field : oneOfContext.oneofField()) { fields.add(new SingleField(field, this, lookupHelper)); } } - private static boolean isComparable(Protobuf3Parser.OneofContext oneOfContext, ContextualLookupHelper lookupHelper) { + private static boolean isComparable( + Protobuf3Parser.OneofContext oneOfContext, ContextualLookupHelper lookupHelper) { final boolean comparable; final List comparableFields = lookupHelper.getComparableFields(((Protobuf3Parser.MessageDefContext) oneOfContext.getParent().getParent().getParent())); @@ -97,17 +103,17 @@ public String javaFieldTypeBase() { */ @Override public String methodNameType() { - throw new UnsupportedOperationException("mapToWriteMethod can not handle "+type()); + throw new UnsupportedOperationException("mapToWriteMethod can not handle " + type()); } /** * {@inheritDoc} */ @Override - public void addAllNeededImports(final Set imports, boolean modelImports, - boolean codecImports, final boolean testImports) { + public void addAllNeededImports( + final Set imports, boolean modelImports, boolean codecImports, final boolean testImports) { imports.add("com.hedera.pbj.runtime"); - for (var field:fields) { + for (var field : fields) { field.addAllNeededImports(imports, modelImports, codecImports, testImports); } } @@ -125,7 +131,7 @@ public String parseCode() { */ @Override public String javaDefault() { - return Common.camelToUpperSnake(name)+"_UNSET"; + return Common.camelToUpperSnake(name) + "_UNSET"; } /** @@ -187,7 +193,9 @@ private static boolean getDeprecatedOption(List messageType; - case ENUM -> Common.snakeToCamel(messageType, true); - default -> type.javaType; - }; + String fieldType = + switch (type) { + case MESSAGE -> messageType; + case ENUM -> Common.snakeToCamel(messageType, true); + default -> type.javaType; + }; fieldType = switch (fieldType) { case "StringValue" -> "String"; case "Int32Value", "UInt32Value" -> "Integer"; @@ -121,8 +141,7 @@ private String javaFieldType(boolean considerRepeated) { case "DoubleValue" -> "Double"; case "BoolValue" -> "Boolean"; case "BytesValue" -> "Bytes"; - default -> fieldType; - }; + default -> fieldType;}; if (considerRepeated && repeated) { fieldType = switch (fieldType) { case "int" -> "List"; @@ -130,14 +149,13 @@ private String javaFieldType(boolean considerRepeated) { case "float" -> "List"; case "double" -> "List"; case "boolean" -> "List"; - default -> "List<%s>".formatted(fieldType); - }; + default -> "List<%s>".formatted(fieldType);}; } return fieldType; } public String javaFieldTypeForTest() { - return switch(type) { + return switch (type) { case MESSAGE -> messageType; case ENUM -> Common.snakeToCamel(messageType, true); default -> type.javaType; @@ -149,7 +167,7 @@ public String javaFieldTypeForTest() { */ @Override public String methodNameType() { - return switch(type()) { + return switch (type()) { case BOOL -> "Boolean"; case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> "Integer"; case INT64, SINT64, UINT64, FIXED64, SFIXED64 -> "Long"; @@ -167,7 +185,8 @@ public String methodNameType() { * {@inheritDoc} */ @Override - public void addAllNeededImports(Set imports, boolean modelImports, boolean codecImports, final boolean testImports) { + public void addAllNeededImports( + Set imports, boolean modelImports, boolean codecImports, final boolean testImports) { if (repeated || optionalValueType()) imports.add("java.util"); if (type == FieldType.BYTES) imports.add("com.hedera.pbj.runtime.io.buffer"); if (messageTypeModelPackage != null && modelImports) imports.add(messageTypeModelPackage); @@ -197,7 +216,7 @@ public String javaDefault() { } else if (repeated) { return "Collections.emptyList()"; } else if (type == FieldType.ENUM) { - return messageType+".fromProtobufOrdinal(0)"; + return messageType + ".fromProtobufOrdinal(0)"; } else { return type.javaDefault; } @@ -218,19 +237,20 @@ public String schemaFieldsDef() { // spotless:on boolean isPartOfOneOf = parent != null; if (optionalValueType()) { - final String optionalBaseFieldType = switch (messageType) { - case "StringValue" -> "STRING"; - case "Int32Value" -> "INT32"; - case "UInt32Value" -> "UINT32"; - case "Int64Value" -> "INT64"; - case "UInt64Value" -> "UINT64"; - case "FloatValue" -> "FLOAT"; - case "DoubleValue" -> "DOUBLE"; - case "BoolValue" -> "BOOL"; - case "BytesValue" -> "BYTES"; - default -> throw new UnsupportedOperationException( - "Unsupported optional field type found: %s in %s".formatted(type.javaType, this)); - }; + final String optionalBaseFieldType = + switch (messageType) { + case "StringValue" -> "STRING"; + case "Int32Value" -> "INT32"; + case "UInt32Value" -> "UINT32"; + case "Int64Value" -> "INT64"; + case "UInt64Value" -> "UINT64"; + case "FloatValue" -> "FLOAT"; + case "DoubleValue" -> "DOUBLE"; + case "BoolValue" -> "BOOL"; + case "BytesValue" -> "BYTES"; + default -> throw new UnsupportedOperationException( + "Unsupported optional field type found: %s in %s".formatted(type.javaType, this)); + }; // spotless:off return ("%s public static final FieldDefinition %s =" + " new FieldDefinition(\"%s\", FieldType.%s, %s, %s, %s, %d);%n") @@ -263,16 +283,24 @@ public String parserFieldsSetMethodCase() { if (optionalValueType()) { if (parent != null) { // one of return "case %d -> this.%s = new %s<>(%s.%sOneOfType.%s, input);" - .formatted(fieldNumber, fieldNameToSet, parent.className(), parent.parentMessageName(), - Common.snakeToCamel(parent.name(), true), Common.camelToUpperSnake(name)); + .formatted( + fieldNumber, + fieldNameToSet, + parent.className(), + parent.parentMessageName(), + Common.snakeToCamel(parent.name(), true), + Common.camelToUpperSnake(name)); } else { return "case %d -> this.%s = input;".formatted(fieldNumber, fieldNameToSet); } } else if (type == FieldType.MESSAGE) { - final String valueToSet = parent != null ? - "new %s<>(%s.%3$sOneOfType.%3$s, %%modelClass.PROTOBUF.parse(input))" - .formatted(parent.className(), parent.parentMessageName(), Common.snakeToCamel(parent.name(), true)) - : parseCode(); + final String valueToSet = parent != null + ? "new %s<>(%s.%3$sOneOfType.%3$s, %%modelClass.PROTOBUF.parse(input))" + .formatted( + parent.className(), + parent.parentMessageName(), + Common.snakeToCamel(parent.name(), true)) + : parseCode(); if (repeated) { // spotless:off return @@ -287,21 +315,25 @@ public String parserFieldsSetMethodCase() { .formatted(fieldNumber, fieldNameToSet, fieldNameToSet, fieldNameToSet, valueToSet); // spotless:on } else { - return "case %d -> this.%s = %s;".formatted(fieldNumber, fieldNameToSet,valueToSet); + return "case %d -> this.%s = %s;".formatted(fieldNumber, fieldNameToSet, valueToSet); } } else if (type == FieldType.ENUM) { if (repeated) { - return "case %d -> this.%s = input.stream().map(%s::fromProtobufOrdinal).toList();".formatted(fieldNumber, fieldNameToSet, - Common.snakeToCamel(messageType, true)); + return "case %d -> this.%s = input.stream().map(%s::fromProtobufOrdinal).toList();" + .formatted(fieldNumber, fieldNameToSet, Common.snakeToCamel(messageType, true)); } else { - return "case %d -> this.%s = %s.fromProtobufOrdinal(input);".formatted(fieldNumber, fieldNameToSet, - Common.snakeToCamel(messageType, true)); + return "case %d -> this.%s = %s.fromProtobufOrdinal(input);" + .formatted(fieldNumber, fieldNameToSet, Common.snakeToCamel(messageType, true)); } } else if (repeated && (type == FieldType.STRING || type == FieldType.BYTES)) { - final String valueToSet = parent != null ? - "new %s<>(%s.%sOneOfType.%s,input)".formatted(parent.className(), parent.parentMessageName(), - Common.snakeToCamel(parent.name(), true), Common.camelToUpperSnake(name)) : - "input"; + final String valueToSet = parent != null + ? "new %s<>(%s.%sOneOfType.%s,input)" + .formatted( + parent.className(), + parent.parentMessageName(), + Common.snakeToCamel(parent.name(), true), + Common.camelToUpperSnake(name)) + : "input"; // spotless:off return """ @@ -315,11 +347,15 @@ public String parserFieldsSetMethodCase() { .formatted(fieldNumber, fieldNameToSet, fieldNameToSet, fieldNameToSet, valueToSet); // spotless:on } else { - final String valueToSet = parent != null ? - "new %s<>(%s.%sOneOfType.%s,input)".formatted(parent.className(), parent.parentMessageName(), - Common.snakeToCamel(parent.name(), true), Common.camelToUpperSnake(name)) : - "input"; - return "case %d -> this.%s = %s;".formatted(fieldNumber, fieldNameToSet,valueToSet); + final String valueToSet = parent != null + ? "new %s<>(%s.%sOneOfType.%s,input)" + .formatted( + parent.className(), + parent.parentMessageName(), + Common.snakeToCamel(parent.name(), true), + Common.camelToUpperSnake(name)) + : "input"; + return "case %d -> this.%s = %s;".formatted(fieldNumber, fieldNameToSet, valueToSet); } } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java index c041fe59..6faef37f 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java @@ -1,10 +1,12 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.compiler.impl.generators; +import static com.hedera.pbj.compiler.impl.Common.*; +import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; + import com.hedera.pbj.compiler.impl.ContextualLookupHelper; import com.hedera.pbj.compiler.impl.FileType; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; - import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -14,9 +16,6 @@ import java.util.Map; import java.util.stream.Collectors; -import static com.hedera.pbj.compiler.impl.Common.*; -import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; - /** * Code for generating enum code */ @@ -34,34 +33,44 @@ record EnumValue(String name, boolean deprecated, String javaDoc) {} * @param lookupHelper Lookup helper for package information * @throws IOException if there was a problem writing generated code */ - public static void generateEnumFile(Protobuf3Parser.EnumDefContext enumDef, File destinationSrcDir, - final ContextualLookupHelper lookupHelper) throws IOException { + public static void generateEnumFile( + Protobuf3Parser.EnumDefContext enumDef, File destinationSrcDir, final ContextualLookupHelper lookupHelper) + throws IOException { final String enumName = enumDef.enumName().getText(); final String modelPackage = lookupHelper.getPackageForEnum(FileType.MODEL, enumDef); - final String javaDocComment = (enumDef.docComment()== null) ? "" : - cleanDocStr(enumDef.docComment().getText().replaceAll("\n \\*\s*\n","\n *
\n")); + final String javaDocComment = (enumDef.docComment() == null) + ? "" + : cleanDocStr(enumDef.docComment().getText().replaceAll("\n \\*\s*\n", "\n *
\n")); String deprecated = ""; final Map enumValues = new HashMap<>(); int maxIndex = 0; - for (var item: enumDef.enumBody().enumElement()) { + for (var item : enumDef.enumBody().enumElement()) { if (item.enumField() != null && item.enumField().ident() != null) { final var enumValueName = item.enumField().ident().getText(); - final var enumNumber = Integer.parseInt(item.enumField().intLit().getText()); + final var enumNumber = + Integer.parseInt(item.enumField().intLit().getText()); final String enumValueJavaDoc = cleanDocStr( - (item.enumField().docComment() == null || item.enumField().docComment().getText().isBlank()) ? - enumValueName : - item.enumField().docComment().getText() - .replaceAll("[\t ]*/\\*\\*([\n\t ]+\\*\s+)?","") // remove doc start indenting - .replaceAll("/\\*\\*","") // remove doc start - .replaceAll("[\n\t ]+\\*/","") // remove doc end - .replaceAll("\n[\t\s]+\\*\\*?","\n") // remove doc indenting - .replaceAll("/n\s*/n","/n") // remove empty lines - ); + (item.enumField().docComment() == null + || item.enumField() + .docComment() + .getText() + .isBlank()) + ? enumValueName + : item.enumField() + .docComment() + .getText() + .replaceAll("[\t ]*/\\*\\*([\n\t ]+\\*\s+)?", "") // remove doc start indenting + .replaceAll("/\\*\\*", "") // remove doc start + .replaceAll("[\n\t ]+\\*/", "") // remove doc end + .replaceAll("\n[\t\s]+\\*\\*?", "\n") // remove doc indenting + .replaceAll("/n\s*/n", "/n") // remove empty lines + ); maxIndex = Math.max(maxIndex, enumNumber); // extract if the enum is marks as deprecated boolean deprecatedEnumValue = false; - if(item.enumField().enumValueOptions() != null && item.enumField().enumValueOptions().enumValueOption() != null) { - for(var option:item.enumField().enumValueOptions().enumValueOption()) { + if (item.enumField().enumValueOptions() != null + && item.enumField().enumValueOptions().enumValueOption() != null) { + for (var option : item.enumField().enumValueOptions().enumValueOption()) { if ("deprecated".equals(option.optionName().getText())) { deprecatedEnumValue = true; } else { @@ -69,22 +78,23 @@ public static void generateEnumFile(Protobuf3Parser.EnumDefContext enumDef, File } } } - enumValues.put(enumNumber, new EnumValue(enumValueName, deprecatedEnumValue,enumValueJavaDoc)); - } else if (item.optionStatement() != null){ + enumValues.put(enumNumber, new EnumValue(enumValueName, deprecatedEnumValue, enumValueJavaDoc)); + } else if (item.optionStatement() != null) { if ("deprecated".equals(item.optionStatement().optionName().getText())) { deprecated = "@Deprecated "; } else { - System.err.printf("Unhandled Option: %s%n", item.optionStatement().getText()); + System.err.printf( + "Unhandled Option: %s%n", item.optionStatement().getText()); } } else { System.err.printf("EnumGenerator Warning - Unknown element: %s -- %s%n", item, item.getText()); } } try (FileWriter javaWriter = new FileWriter(getJavaFile(destinationSrcDir, modelPackage, enumName))) { - javaWriter.write( - "package %s;\n\n%s".formatted(modelPackage, createEnum(javaDocComment, deprecated, enumName, - maxIndex, enumValues, false)) - ); + javaWriter.write("package %s;\n\n%s" + .formatted( + modelPackage, + createEnum(javaDocComment, deprecated, enumName, maxIndex, enumValues, false))); } } @@ -99,8 +109,13 @@ public static void generateEnumFile(Protobuf3Parser.EnumDefContext enumDef, File * @param addUnknown when true we add an enum value for one of * @return string code for enum */ - static String createEnum(String javaDocComment, String deprecated, String enumName, - int maxIndex, Map enumValues, boolean addUnknown) { + static String createEnum( + String javaDocComment, + String deprecated, + String enumName, + int maxIndex, + Map enumValues, + boolean addUnknown) { final List enumValuesCode = new ArrayList<>(maxIndex); if (addUnknown) { // spotless:off diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java index e8ef58e0..d7f4e49f 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java @@ -5,7 +5,6 @@ import com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator; import com.hedera.pbj.compiler.impl.generators.protobuf.CodecGenerator; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; - import java.io.File; import java.io.IOException; import java.util.List; @@ -23,8 +22,7 @@ public interface Generator { SchemaGenerator.class, CodecGenerator.class, JsonCodecGenerator.class, - TestGenerator.class - ); + TestGenerator.class); /** * Generate a code from protobuf message type @@ -35,7 +33,10 @@ public interface Generator { * @param lookupHelper Lookup helper for global context lookups * @throws IOException if there was a problem writing generated code */ - void generate(final Protobuf3Parser.MessageDefContext msgDef, final File destinationSrcDir, - File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException; - + void generate( + final Protobuf3Parser.MessageDefContext msgDef, + final File destinationSrcDir, + File destinationTestSrcDir, + final ContextualLookupHelper lookupHelper) + throws IOException; } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java index 397317a4..12b632f2 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java @@ -55,7 +55,8 @@ public final class ModelGenerator implements Generator { hashCode += hashCode << 10; hashCode ^= hashCode >>> 24; hashCode += hashCode << 30; - """.indent(DEFAULT_INDENT); + """ + .indent(DEFAULT_INDENT); /** * Generating method that assembles all the previously generated pieces together @@ -71,7 +72,8 @@ public final class ModelGenerator implements Generator { * @return the generated code */ @NonNull - private static String generateClass(final String modelPackage, + private static String generateClass( + final String modelPackage, final Set imports, final String javaDocComment, final String deprecated, @@ -143,9 +145,8 @@ private static String getFieldAnnotations(final Field field) { * @return the filtered fields */ @NonNull - private static List filterComparableFields(final MessageDefContext msgDef, - final ContextualLookupHelper lookupHelper, - final List fields) { + private static List filterComparableFields( + final MessageDefContext msgDef, final ContextualLookupHelper lookupHelper, final List fields) { final Map fieldByName = fields.stream().collect(toMap(Field::name, f -> f)); final List comparableFields = lookupHelper.getComparableFields(msgDef); return comparableFields.stream().map(fieldByName::get).collect(Collectors.toList()); @@ -161,8 +162,8 @@ private static List filterComparableFields(final MessageDefContext msgDef * @return the generated code */ @NonNull - private static String generateCompareTo(final List fields, final String javaRecordName, - final File destinationSrcDir) { + private static String generateCompareTo( + final List fields, final String javaRecordName, final File destinationSrcDir) { // spotless:off String bodyContent = """ @@ -370,8 +371,8 @@ private static String generateConstructorCodeForField(final Field f) { * @return the generated code */ @NonNull - private static String generateCodecFields(final MessageDefContext msgDef, final ContextualLookupHelper lookupHelper, - final String javaRecordName) { + private static String generateCodecFields( + final MessageDefContext msgDef, final ContextualLookupHelper lookupHelper, final String javaRecordName) { // spotless:off return """ /** Protobuf codec for reading and writing in protobuf format */ @@ -398,7 +399,8 @@ private static String generateCodecFields(final MessageDefContext msgDef, final * @param imports the imports to use for the code generation * @param hasMethods the has methods to use for the code generation */ - private static void generateCodeForField(final ContextualLookupHelper lookupHelper, + private static void generateCodeForField( + final ContextualLookupHelper lookupHelper, final Protobuf3Parser.MessageElementContext item, final List fields, final Set imports, @@ -474,7 +476,8 @@ private static void generateCodeForField(final ContextualLookupHelper lookupHelp * * @return the generated code */ - private static List generateCodeForOneOf(final ContextualLookupHelper lookupHelper, + private static List generateCodeForOneOf( + final ContextualLookupHelper lookupHelper, final Protobuf3Parser.MessageElementContext item, final String javaRecordName, final Set imports, @@ -553,7 +556,8 @@ private static List generateCodeForOneOf(final ContextualLookupHelper lo """ /** * Enum for the type of "%s" oneof value - */""".formatted(oneOfField.name()); + */""" + .formatted(oneOfField.name()); final String enumString = createEnum(enumComment, "", enumName, maxIndex, enumValues, true) .indent(DEFAULT_INDENT * 2); oneofEnums.add(enumString); @@ -602,8 +606,8 @@ private static void generateBuilderMethods( final OneOfField parentOneOfField = field.parent(); final String fieldName = field.nameCamelFirstLower(); if (parentOneOfField != null) { - final String oneOfEnumValue = "%s.%s" - .formatted(parentOneOfField.getEnumClassRef(), camelToUpperSnake(field.name())); + final String oneOfEnumValue = + "%s.%s".formatted(parentOneOfField.getEnumClassRef(), camelToUpperSnake(field.name())); prefix = "%s%s,".formatted(" new %s<>(".formatted(parentOneOfField.className()), oneOfEnumValue); postfix = ")"; fieldToSet = parentOneOfField.nameCamelFirstLower(); @@ -721,13 +725,13 @@ private static void generateBuilderMethods( * * @return the generated code */ - private static String generateBuilder(final MessageDefContext msgDef, final List fields, - final ContextualLookupHelper lookupHelper) { + private static String generateBuilder( + final MessageDefContext msgDef, final List fields, final ContextualLookupHelper lookupHelper) { final String javaRecordName = msgDef.messageName().getText(); final List builderMethods = new ArrayList<>(); for (final Field field : fields) { if (field.type() == Field.FieldType.ONE_OF) { - final OneOfField oneOfField = (OneOfField)field; + final OneOfField oneOfField = (OneOfField) field; for (final Field subField : oneOfField.fields()) { generateBuilderMethods(builderMethods, msgDef, subField, lookupHelper); } @@ -782,8 +786,8 @@ public Builder() {} * * @return the generated code */ - private static String getDefaultValue(final Field field, final MessageDefContext msgDef, - final ContextualLookupHelper lookupHelper) { + private static String getDefaultValue( + final Field field, final MessageDefContext msgDef, final ContextualLookupHelper lookupHelper) { if (field.type() == Field.FieldType.ONE_OF) { return lookupHelper.getFullyQualifiedMessageClassname(FileType.CODEC, msgDef) + "." + field.javaDefault(); } else { @@ -796,9 +800,12 @@ private static String getDefaultValue(final Field field, final MessageDefContext * *

Generates a new model object, as a Java Record type. */ - public void generate(final MessageDefContext msgDef, + public void generate( + final MessageDefContext msgDef, final File destinationSrcDir, - final File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException { + final File destinationTestSrcDir, + final ContextualLookupHelper lookupHelper) + throws IOException { // The javaRecordName will be something like "AccountID". final var javaRecordName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef); @@ -841,7 +848,8 @@ public void generate(final MessageDefContext msgDef, if ("deprecated".equals(item.optionStatement().optionName().getText())) { deprecated = "@Deprecated "; } else { - System.err.printf("Unhandled Option: %s%n", item.optionStatement().getText()); + System.err.printf( + "Unhandled Option: %s%n", item.optionStatement().getText()); } } else if (item.reserved() == null) { // ignore reserved and warn about anything else System.err.printf("ModelGenerator Warning - Unknown element: %s -- %s%n", item, item.getText()); @@ -852,9 +860,10 @@ public void generate(final MessageDefContext msgDef, // The javadoc comment to use for the model class, which comes **directly** from the protobuf schema, // but is cleaned up and formatted for use in JavaDoc. - final String docComment = (msgDef.docComment() == null || msgDef.docComment().getText().isBlank()) - ? javaRecordName : - cleanJavaDocComment(msgDef.docComment().getText()); + final String docComment = + (msgDef.docComment() == null || msgDef.docComment().getText().isBlank()) + ? javaRecordName + : cleanJavaDocComment(msgDef.docComment().getText()); String javaDocComment = "/**\n * " + docComment.replaceAll("\n", "\n * "); if (fields.isEmpty()) { javaDocComment += "\n */"; @@ -916,11 +925,15 @@ public void generate(final MessageDefContext msgDef, // === Build file try (final FileWriter javaWriter = new FileWriter(javaFile)) { - javaWriter.write( - generateClass(modelPackage, imports, javaDocComment, deprecated, javaRecordName, fields, - bodyContent, hasComparableFields) - ); + javaWriter.write(generateClass( + modelPackage, + imports, + javaDocComment, + deprecated, + javaRecordName, + fields, + bodyContent, + hasComparableFields)); } } - } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java index 3a2ca913..50b44885 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java @@ -9,7 +9,6 @@ import com.hedera.pbj.compiler.impl.OneOfField; import com.hedera.pbj.compiler.impl.SingleField; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; - import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -28,8 +27,12 @@ public final class SchemaGenerator implements Generator { /** * {@inheritDoc} */ - public void generate(final Protobuf3Parser.MessageDefContext msgDef, final File destinationSrcDir, - File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException { + public void generate( + final Protobuf3Parser.MessageDefContext msgDef, + final File destinationSrcDir, + File destinationTestSrcDir, + final ContextualLookupHelper lookupHelper) + throws IOException { final String modelClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef); final String schemaClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.SCHEMA, msgDef); final String schemaPackage = lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef); @@ -57,8 +60,8 @@ public void generate(final Protobuf3Parser.MessageDefContext msgDef, final File } final List flattenedFields = fields.stream() - .flatMap(field -> field instanceof OneOfField ? ((OneOfField)field).fields().stream() : - Stream.of(field)) + .flatMap(field -> + field instanceof OneOfField ? ((OneOfField) field).fields().stream() : Stream.of(field)) .collect(Collectors.toList()); // spotless:off try (FileWriter javaWriter = new FileWriter(javaFile)) { diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java index 3c5b7a08..80a99de9 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java @@ -12,7 +12,6 @@ import com.hedera.pbj.compiler.impl.OneOfField; import com.hedera.pbj.compiler.impl.SingleField; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; - import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -31,25 +30,30 @@ public final class TestGenerator implements Generator { /** * {@inheritDoc} */ - public void generate(Protobuf3Parser.MessageDefContext msgDef, File destinationSrcDir, - File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException { + public void generate( + Protobuf3Parser.MessageDefContext msgDef, + File destinationSrcDir, + File destinationTestSrcDir, + final ContextualLookupHelper lookupHelper) + throws IOException { final var modelClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef); final var testClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.TEST, msgDef); final String testPackage = lookupHelper.getPackageForMessage(FileType.TEST, msgDef); - final String protoCJavaFullQualifiedClass = lookupHelper.getFullyQualifiedMessageClassname(FileType.PROTOC,msgDef); + final String protoCJavaFullQualifiedClass = + lookupHelper.getFullyQualifiedMessageClassname(FileType.PROTOC, msgDef); final File javaFile = Common.getJavaFile(destinationTestSrcDir, testPackage, testClassName); final List fields = new ArrayList<>(); final Set imports = new TreeSet<>(); imports.add("com.hedera.pbj.runtime.io.buffer"); imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef)); - for (final var item: msgDef.messageBody().messageElement()) { + for (final var item : msgDef.messageBody().messageElement()) { if (item.messageDef() != null) { // process sub messages generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper); } else if (item.oneof() != null) { // process one ofs final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper); fields.add(field); field.addAllNeededImports(imports, true, false, true); - for(var subField : field.fields()) { + for (var subField : field.fields()) { subField.addAllNeededImports(imports, true, false, true); } } else if (item.mapField() != null) { // process map fields @@ -215,16 +219,19 @@ private static String generateTestData(String modelClassName, Field field, boole // spotless:on } else if (field instanceof final MapField mapField) { // e.g. INTEGER_TESTS_LIST - final String keyOptions = getOptionsForFieldType(mapField.keyField().type(), mapField.keyField().javaFieldType()); + final String keyOptions = getOptionsForFieldType( + mapField.keyField().type(), mapField.keyField().javaFieldType()); // e.g. STRING_TESTS_LIST, or, say, CustomMessageTest.ARGUMENTS - final String valueOptions = getOptionsForFieldType(mapField.valueField().type(), mapField.valueField().javaFieldType()); + final String valueOptions = getOptionsForFieldType( + mapField.valueField().type(), mapField.valueField().javaFieldType()); // A cartesian product is nice to use, but it doesn't seem reasonable from the performance perspective. // Instead, we want to test three cases: // 1. Empty map // 2. Map with a single entry // 3. Map with multiple (e.g. two) entries - // Note that keys and value options lists may be pretty small. E.g. Boolean would only have 2 elements. So we use mod. + // Note that keys and value options lists may be pretty small. E.g. Boolean would only have 2 elements. So + // we use mod. // Also note that we assume there's at least one element in each list. // spotless:off return """ @@ -241,7 +248,7 @@ private static String generateTestData(String modelClassName, Field field, boole .replace("$valueOptions", valueOptions); // spotless:on } else { - return getOptionsForFieldType(field.type(), ((SingleField)field).javaFieldTypeForTest()); + return getOptionsForFieldType(field.type(), ((SingleField) field).javaFieldTypeForTest()); } } @@ -272,9 +279,11 @@ private static String getOptionsForFieldType(Field.FieldType fieldType, String j case STRING -> "STRING_TESTS_LIST"; case BYTES -> "BYTES_TESTS_LIST"; case ENUM -> "Arrays.asList(%s.values())".formatted(javaFieldType); - case ONE_OF -> throw new RuntimeException("Should never happen, should have been caught in generateTestData()"); + case ONE_OF -> throw new RuntimeException( + "Should never happen, should have been caught in generateTestData()"); case MESSAGE -> "%s%s.ARGUMENTS".formatted(javaFieldType, FileAndPackageNamesConfig.TEST_JAVA_FILE_SUFFIX); - case MAP -> throw new RuntimeException("Should never happen, should have been caught in generateTestData()"); + case MAP -> throw new RuntimeException( + "Should never happen, should have been caught in generateTestData()"); }; } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java index 1daf7476..99eefa87 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java @@ -10,7 +10,6 @@ import com.hedera.pbj.compiler.impl.SingleField; import com.hedera.pbj.compiler.impl.generators.Generator; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; - import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -29,8 +28,12 @@ public final class JsonCodecGenerator implements Generator { /** * {@inheritDoc} */ - public void generate(Protobuf3Parser.MessageDefContext msgDef, final File destinationSrcDir, - File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException { + public void generate( + Protobuf3Parser.MessageDefContext msgDef, + final File destinationSrcDir, + File destinationTestSrcDir, + final ContextualLookupHelper lookupHelper) + throws IOException { final String modelClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef); final String codecClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.JSON_CODEC, msgDef); final String codecPackage = lookupHelper.getPackageForMessage(FileType.JSON_CODEC, msgDef); @@ -41,7 +44,7 @@ public void generate(Protobuf3Parser.MessageDefContext msgDef, final File destin imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef)); imports.add(lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef)); - for(var item: msgDef.messageBody().messageElement()) { + for (var item : msgDef.messageBody().messageElement()) { if (item.messageDef() != null) { // process sub messages generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper); } else if (item.oneof() != null) { // process one ofs diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java index f28afe99..a545a1db 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java @@ -1,17 +1,16 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.compiler.impl.generators.json; +import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; +import static com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator.toJsonFieldName; + import com.hedera.pbj.compiler.impl.Common; import com.hedera.pbj.compiler.impl.Field; import com.hedera.pbj.compiler.impl.MapField; import com.hedera.pbj.compiler.impl.OneOfField; - import java.util.List; import java.util.stream.Collectors; -import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; -import static com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator.toJsonFieldName; - /** * Code to generate the parse method for Codec classes. */ @@ -26,27 +25,28 @@ class JsonCodecParseMethodGenerator { * @return code for constants */ static String generateUnsetOneOfConstants(final List fields) { - return "\n" + fields.stream() - .filter(f -> f instanceof OneOfField) - .map(f -> { - final OneOfField field = (OneOfField)f; - return """ + return "\n" + + fields.stream() + .filter(f -> f instanceof OneOfField) + .map(f -> { + final OneOfField field = (OneOfField) f; + return """ /** Constant for an unset oneof for $fieldName */ public static final $className<$enum> $unsetFieldName = new $className<>($enum.UNSET,null); """ - .replace("$className", field.className()) - .replace("$enum", field.getEnumClassRef()) - .replace("$fieldName", field.name()) - .replace("$unsetFieldName", Common.camelToUpperSnake(field.name())+"_UNSET") - .replace("$unsetFieldName", field.getEnumClassRef()); - }) - .collect(Collectors.joining("\n")); + .replace("$className", field.className()) + .replace("$enum", field.getEnumClassRef()) + .replace("$fieldName", field.name()) + .replace("$unsetFieldName", Common.camelToUpperSnake(field.name()) + "_UNSET") + .replace("$unsetFieldName", field.getEnumClassRef()); + }) + .collect(Collectors.joining("\n")); } static String generateParseObjectMethod(final String modelClassName, final List fields) { return """ /** - * Parses a HashObject object from JSON parse tree for object JSONParser.ObjContext. + * Parses a HashObject object from JSON parse tree for object JSONParser.ObjContext. * Throws an UnknownFieldException wrapped in a ParseException if in strict mode ONLY. * * @param root The JSON parsed object tree to parse data from @@ -84,12 +84,18 @@ static String generateParseObjectMethod(final String modelClassName, final List< } } """ - .replace("$modelClassName",modelClassName) - .replace("$fieldDefs",fields.stream().map(field -> " %s temp_%s = %s;".formatted(field.javaFieldType(), - field.name(), field.javaDefault())).collect(Collectors.joining("\n"))) - .replace("$fieldsList",fields.stream().map(field -> "temp_"+field.name()).collect(Collectors.joining(", "))) - .replace("$caseStatements",generateCaseStatements(fields)) - .indent(DEFAULT_INDENT); + .replace("$modelClassName", modelClassName) + .replace( + "$fieldDefs", + fields.stream() + .map(field -> " %s temp_%s = %s;" + .formatted(field.javaFieldType(), field.name(), field.javaDefault())) + .collect(Collectors.joining("\n"))) + .replace( + "$fieldsList", + fields.stream().map(field -> "temp_" + field.name()).collect(Collectors.joining(", "))) + .replace("$caseStatements", generateCaseStatements(fields)) + .indent(DEFAULT_INDENT); } /** @@ -101,19 +107,20 @@ static String generateParseObjectMethod(final String modelClassName, final List< */ private static String generateCaseStatements(final List fields) { StringBuilder sb = new StringBuilder(); - for(Field field: fields) { + for (Field field : fields) { if (field instanceof final OneOfField oneOfField) { - for(final Field subField: oneOfField.fields()) { - sb.append("case \"" + toJsonFieldName(subField.name()) +"\" /* [" + subField.fieldNumber() + "] */ " + - ": temp_" + oneOfField.name() + " = new %s<>(\n".formatted(oneOfField.className()) + - oneOfField.getEnumClassRef().indent(DEFAULT_INDENT) +"."+Common.camelToUpperSnake(subField.name())+ - ", \n".indent(DEFAULT_INDENT)); + for (final Field subField : oneOfField.fields()) { + sb.append("case \"" + toJsonFieldName(subField.name()) + "\" /* [" + subField.fieldNumber() + + "] */ " + ": temp_" + + oneOfField.name() + " = new %s<>(\n".formatted(oneOfField.className()) + + oneOfField.getEnumClassRef().indent(DEFAULT_INDENT) + + "." + Common.camelToUpperSnake(subField.name()) + ", \n".indent(DEFAULT_INDENT)); generateFieldCaseStatement(sb, subField, "kvPair.value()"); sb.append("); break;\n"); } } else { - sb.append("case \"" + toJsonFieldName(field.name()) +"\" /* [" + field.fieldNumber() + "] */ " + - ": temp_" + field.name()+" = "); + sb.append("case \"" + toJsonFieldName(field.name()) + "\" /* [" + field.fieldNumber() + "] */ " + + ": temp_" + field.name() + " = "); generateFieldCaseStatement(sb, field, "kvPair.value()"); sb.append("; break;\n"); } @@ -128,11 +135,12 @@ private static String generateCaseStatements(final List fields) { * @param origSB StringBuilder to append code to * @param valueGetter normally a "kvPair.value()", but may be different e.g. for maps parsing */ - private static void generateFieldCaseStatement(final StringBuilder origSB, final Field field, final String valueGetter) { + private static void generateFieldCaseStatement( + final StringBuilder origSB, final Field field, final String valueGetter) { final StringBuilder sb = new StringBuilder(); if (field.repeated()) { if (field.type() == Field.FieldType.MESSAGE) { - sb.append("parseObjArray($valueGetter.arr(), "+field.messageType()+".JSON, maxDepth - 1)"); + sb.append("parseObjArray($valueGetter.arr(), " + field.messageType() + ".JSON, maxDepth - 1)"); } else { sb.append("$valueGetter.arr().value().stream().map(v -> "); switch (field.type()) { @@ -168,18 +176,19 @@ private static void generateFieldCaseStatement(final StringBuilder origSB, final generateFieldCaseStatement(keySB, mapField.keyField(), "mapKV"); generateFieldCaseStatement(valueSB, mapField.valueField(), "mapKV.value()"); - sb.append(""" + sb.append( + """ $valueGetter.getChild(JSONParser.ObjContext.class, 0).pair().stream() .collect(Collectors.toMap( mapKV -> $mapEntryKey, new UncheckedThrowingFunction<>(mapKV -> $mapEntryValue) ))""" - .replace("$mapEntryKey", keySB.toString()) - .replace("$mapEntryValue", valueSB.toString()) - ); + .replace("$mapEntryKey", keySB.toString()) + .replace("$mapEntryValue", valueSB.toString())); } else { switch (field.type()) { - case MESSAGE -> sb.append(field.javaFieldType() + ".JSON.parse($valueGetter.getChild(JSONParser.ObjContext.class, 0), false, maxDepth - 1)"); + case MESSAGE -> sb.append(field.javaFieldType() + + ".JSON.parse($valueGetter.getChild(JSONParser.ObjContext.class, 0), false, maxDepth - 1)"); case ENUM -> sb.append(field.javaFieldType() + ".fromString($valueGetter.STRING().getText())"); case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> sb.append("parseInteger($valueGetter)"); case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> sb.append("parseLong($valueGetter)"); @@ -188,7 +197,7 @@ private static void generateFieldCaseStatement(final StringBuilder origSB, final case STRING -> sb.append("unescape($valueGetter.STRING().getText())"); case BOOL -> sb.append("parseBoolean($valueGetter)"); case BYTES -> sb.append("Bytes.fromBase64($valueGetter.STRING().getText())"); - default -> throw new RuntimeException("Unknown field type ["+field.type()+"]"); + default -> throw new RuntimeException("Unknown field type [" + field.type() + "]"); } } origSB.append(sb.toString().replace("$valueGetter", valueGetter)); diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java index 22a9eb8a..b3916ce4 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java @@ -1,21 +1,20 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.compiler.impl.generators.json; +import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; +import static com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator.toJsonFieldName; + import com.hedera.pbj.compiler.impl.Common; import com.hedera.pbj.compiler.impl.Field; import com.hedera.pbj.compiler.impl.MapField; import com.hedera.pbj.compiler.impl.OneOfField; import com.hedera.pbj.compiler.impl.SingleField; import edu.umd.cs.findbugs.annotations.NonNull; - import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; -import static com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator.toJsonFieldName; - /** * Code to generate the write method for Codec classes. */ @@ -24,14 +23,18 @@ final class JsonCodecWriteMethodGenerator { static String generateWriteMethod(final String modelClassName, final List fields) { final List fieldsToWrite = fields.stream() - .flatMap(field -> field.type() == Field.FieldType.ONE_OF ? ((OneOfField)field).fields().stream() : Stream.of(field)) + .flatMap(field -> field.type() == Field.FieldType.ONE_OF + ? ((OneOfField) field).fields().stream() + : Stream.of(field)) .sorted(Comparator.comparingInt(Field::fieldNumber)) .toList(); final String fieldWriteLines = fieldsToWrite.stream() - .map(field -> generateFieldWriteLines(field, modelClassName, "data.%s()".formatted(field.nameCamelFirstLower()))) - .collect(Collectors.joining("\n")).indent(DEFAULT_INDENT); + .map(field -> generateFieldWriteLines( + field, modelClassName, "data.%s()".formatted(field.nameCamelFirstLower()))) + .collect(Collectors.joining("\n")) + .indent(DEFAULT_INDENT); - return """ + return """ /** * Returns JSON string representing an item. * @@ -60,12 +63,11 @@ public String toJSON(@NonNull $modelClass data, String indent, boolean inline) { return sb.toString(); } """ - .replace("$modelClass", modelClassName) - .replace("$fieldWriteLines", fieldWriteLines) - .indent(DEFAULT_INDENT); + .replace("$modelClass", modelClassName) + .replace("$fieldWriteLines", fieldWriteLines) + .indent(DEFAULT_INDENT); } - /** * Generate lines of code for writing field * @@ -78,42 +80,49 @@ private static String generateFieldWriteLines(final Field field, final String mo final String fieldDef = Common.camelToUpperSnake(field.name()); final String fieldName = '\"' + toJsonFieldName(field.name()) + '\"'; final String basicFieldCode = generateBasicFieldLines(field, getValueCode, fieldDef, fieldName, "childIndent"); - String prefix = "// ["+field.fieldNumber()+"] - "+field.name() + "\n"; + String prefix = "// [" + field.fieldNumber() + "] - " + field.name() + "\n"; if (field.parent() != null) { final OneOfField oneOfField = field.parent(); - final String oneOfType = modelClassName+"."+oneOfField.nameCamelFirstUpper()+"OneOfType"; - prefix += "if (data."+oneOfField.nameCamelFirstLower()+"().kind() == "+ oneOfType +"."+ - Common.camelToUpperSnake(field.name())+")"; + final String oneOfType = modelClassName + "." + oneOfField.nameCamelFirstUpper() + "OneOfType"; + prefix += "if (data." + oneOfField.nameCamelFirstLower() + "().kind() == " + oneOfType + "." + + Common.camelToUpperSnake(field.name()) + ")"; prefix += "\n"; return prefix + "fieldLines.add(" + basicFieldCode + ");"; } else { if (field.repeated()) { - return prefix + "if (!data." + field.nameCamelFirstLower() + "().isEmpty()) fieldLines.add(" + basicFieldCode + ");"; + return prefix + "if (!data." + field.nameCamelFirstLower() + "().isEmpty()) fieldLines.add(" + + basicFieldCode + ");"; } else if (field.type() == Field.FieldType.BYTES) { - return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() + - " && data." + field.nameCamelFirstLower() + "() != null" + - " && data." + field.nameCamelFirstLower() + "().length() > 0) fieldLines.add(" + basicFieldCode + ");"; + return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() + " && data." + + field.nameCamelFirstLower() + "() != null" + " && data." + + field.nameCamelFirstLower() + "().length() > 0) fieldLines.add(" + basicFieldCode + ");"; } else if (field.type() == Field.FieldType.MAP) { - return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() + - " && !data." + field.nameCamelFirstLower() + "().isEmpty()) fieldLines.add(" + basicFieldCode + ");"; + return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() + + " && !data." + field.nameCamelFirstLower() + "().isEmpty()) fieldLines.add(" + basicFieldCode + + ");"; } else { - return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() + ") fieldLines.add(" + basicFieldCode + ");"; + return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() + + ") fieldLines.add(" + basicFieldCode + ");"; } } } @NonNull - private static String generateBasicFieldLines(Field field, String getValueCode, String fieldDef, String fieldName, String childIndent) { + private static String generateBasicFieldLines( + Field field, String getValueCode, String fieldDef, String fieldName, String childIndent) { if (field.optionalValueType()) { return switch (field.messageType()) { - case "StringValue", "BoolValue", "Int32Value", - "UInt32Value", "FloatValue", - "DoubleValue", "BytesValue" -> "field(%s, %s)" - .formatted(fieldName, getValueCode); - case "Int64Value", "UInt64Value" -> "field(%s, %s, true)" - .formatted(fieldName, getValueCode); - default -> throw new UnsupportedOperationException("Unhandled optional message type:" + field.messageType()); + case "StringValue", + "BoolValue", + "Int32Value", + "UInt32Value", + "FloatValue", + "DoubleValue", + "BytesValue" -> "field(%s, %s)".formatted(fieldName, getValueCode); + case "Int64Value", "UInt64Value" -> "field(%s, %s, true)".formatted(fieldName, getValueCode); + default -> throw new UnsupportedOperationException( + "Unhandled optional message type:" + field.messageType()); }; } else if (field.repeated()) { return switch (field.type()) { @@ -121,8 +130,10 @@ private static String generateBasicFieldLines(Field field, String getValueCode, .replace("$fieldName", fieldName) .replace("$fieldDef", fieldDef) .replace("$valueCode", getValueCode) - .replace("$codec", ((SingleField) field).messageTypeModelPackage() + "." + - Common.capitalizeFirstLetter(field.messageType()) + ".JSON"); + .replace( + "$codec", + ((SingleField) field).messageTypeModelPackage() + "." + + Common.capitalizeFirstLetter(field.messageType()) + ".JSON"); default -> "arrayField($fieldName, $fieldDef, $valueCode)" .replace("$fieldName", fieldName) .replace("$fieldDef", fieldDef) @@ -135,11 +146,11 @@ private static String generateBasicFieldLines(Field field, String getValueCode, "v", Common.camelToUpperSnake(mapField.valueField().name()), "n", - "indent" - ); + "indent"); return "field(%s, %s, $kEncoder, $vComposer)" .formatted(fieldName, getValueCode) - // Maps in protobuf can only have simple scalar and not floating keys, so toString should do a good job. + // Maps in protobuf can only have simple scalar and not floating keys, so toString should do a good + // job. // Also see https://protobuf.dev/programming-guides/proto3/#json .replace("$kEncoder", "k -> escape(k.toString())") .replace("$vComposer", "(n, v) -> " + vComposerMethod); @@ -154,10 +165,11 @@ private static String generateBasicFieldLines(Field field, String getValueCode, .replace("$fieldName", fieldName) .replace("$fieldDef", fieldDef) .replace("$valueCode", getValueCode) - .replace("$codec", ((SingleField) field).messageTypeModelPackage() + "." + - Common.capitalizeFirstLetter(field.messageType()) + ".JSON"); - default -> "field(%s, %s)" - .formatted(fieldName, getValueCode); + .replace( + "$codec", + ((SingleField) field).messageTypeModelPackage() + "." + + Common.capitalizeFirstLetter(field.messageType()) + ".JSON"); + default -> "field(%s, %s)".formatted(fieldName, getValueCode); }; } } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java index 2c4312d3..60c579f0 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java @@ -4,7 +4,6 @@ import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; import com.hedera.pbj.compiler.impl.Field; - import java.util.List; /** diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java index 9ffc7175..d236571d 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java @@ -10,7 +10,6 @@ import com.hedera.pbj.compiler.impl.SingleField; import com.hedera.pbj.compiler.impl.generators.Generator; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser; - import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -29,8 +28,12 @@ public final class CodecGenerator implements Generator { /** * {@inheritDoc} */ - public void generate(Protobuf3Parser.MessageDefContext msgDef, final File destinationSrcDir, - File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException { + public void generate( + Protobuf3Parser.MessageDefContext msgDef, + final File destinationSrcDir, + File destinationTestSrcDir, + final ContextualLookupHelper lookupHelper) + throws IOException { final String modelClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef); final String codecClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.CODEC, msgDef); final String codecPackage = lookupHelper.getPackageForMessage(FileType.CODEC, msgDef); @@ -41,7 +44,7 @@ public void generate(Protobuf3Parser.MessageDefContext msgDef, final File destin imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef)); imports.add(lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef)); - for(var item: msgDef.messageBody().messageElement()) { + for (var item : msgDef.messageBody().messageElement()) { if (item.messageDef() != null) { // process sub messages generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper); } else if (item.oneof() != null) { // process one ofs diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java index 011a05f1..fb355ce5 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java @@ -4,7 +4,6 @@ import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT; import com.hedera.pbj.compiler.impl.Field; - import java.util.List; /** diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java index 0cac8636..56de44ca 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java @@ -8,7 +8,6 @@ import com.hedera.pbj.compiler.impl.MapField; import com.hedera.pbj.compiler.impl.OneOfField; import com.hedera.pbj.compiler.impl.SingleField; - import java.util.Comparator; import java.util.List; import java.util.function.Function; @@ -23,10 +22,7 @@ class CodecMeasureRecordMethodGenerator { static String generateMeasureMethod(final String modelClassName, final List fields) { final String fieldSizeOfLines = buildFieldSizeOfLines( - modelClassName, - fields, - field -> "data.%s()".formatted(field.nameCamelFirstLower()), - true); + modelClassName, fields, field -> "data.%s()".formatted(field.nameCamelFirstLower()), true); // spotless:off return """ /** @@ -53,10 +49,14 @@ static String buildFieldSizeOfLines( final Function getValueBuilder, boolean skipDefault) { return fields.stream() - .flatMap(field -> field.type() == Field.FieldType.ONE_OF ? ((OneOfField)field).fields().stream() : Stream.of(field)) + .flatMap(field -> field.type() == Field.FieldType.ONE_OF + ? ((OneOfField) field).fields().stream() + : Stream.of(field)) .sorted(Comparator.comparingInt(Field::fieldNumber)) - .map(field -> generateFieldSizeOfLines(field, modelClassName, getValueBuilder.apply(field), skipDefault)) - .collect(Collectors.joining("\n")).indent(DEFAULT_INDENT); + .map(field -> + generateFieldSizeOfLines(field, modelClassName, getValueBuilder.apply(field), skipDefault)) + .collect(Collectors.joining("\n")) + .indent(DEFAULT_INDENT); } /** @@ -68,61 +68,57 @@ static String buildFieldSizeOfLines( * @param skipDefault true if default value of the field should result in size zero * @return java code for adding fields size to "size" variable */ - private static String generateFieldSizeOfLines(final Field field, final String modelClassName, String getValueCode, boolean skipDefault) { + private static String generateFieldSizeOfLines( + final Field field, final String modelClassName, String getValueCode, boolean skipDefault) { final String fieldDef = Common.camelToUpperSnake(field.name()); - String prefix = "// ["+field.fieldNumber()+"] - "+field.name(); + String prefix = "// [" + field.fieldNumber() + "] - " + field.name(); prefix += "\n"; if (field.parent() != null) { final OneOfField oneOfField = field.parent(); - final String oneOfType = modelClassName+"."+oneOfField.nameCamelFirstUpper()+"OneOfType"; - getValueCode = "data."+oneOfField.nameCamelFirstLower()+"().as()"; - prefix += "if (data."+oneOfField.nameCamelFirstLower()+"().kind() == "+ oneOfType +"."+ - Common.camelToUpperSnake(field.name())+")"; + final String oneOfType = modelClassName + "." + oneOfField.nameCamelFirstUpper() + "OneOfType"; + getValueCode = "data." + oneOfField.nameCamelFirstLower() + "().as()"; + prefix += "if (data." + oneOfField.nameCamelFirstLower() + "().kind() == " + oneOfType + "." + + Common.camelToUpperSnake(field.name()) + ")"; prefix += "\n"; } final String writeMethodName = field.methodNameType(); if (field.optionalValueType()) { - return prefix + switch (field.messageType()) { - case "StringValue" -> "size += sizeOfOptionalString(%s, %s);" - .formatted(fieldDef,getValueCode); - case "BoolValue" -> "size += sizeOfOptionalBoolean(%s, %s);" - .formatted(fieldDef, getValueCode); - case "Int32Value","UInt32Value" -> "size += sizeOfOptionalInteger(%s, %s);" - .formatted(fieldDef, getValueCode); - case "Int64Value","UInt64Value" -> "size += sizeOfOptionalLong(%s, %s);" - .formatted(fieldDef, getValueCode); - case "FloatValue" -> "size += sizeOfOptionalFloat(%s, %s);" - .formatted(fieldDef, getValueCode); - case "DoubleValue" -> "size += sizeOfOptionalDouble(%s, %s);" - .formatted(fieldDef, getValueCode); - case "BytesValue" -> "size += sizeOfOptionalBytes(%s, %s);" - .formatted(fieldDef, getValueCode); - default -> throw new UnsupportedOperationException("Unhandled optional message type:"+field.messageType()); - }; + return prefix + + switch (field.messageType()) { + case "StringValue" -> "size += sizeOfOptionalString(%s, %s);".formatted(fieldDef, getValueCode); + case "BoolValue" -> "size += sizeOfOptionalBoolean(%s, %s);".formatted(fieldDef, getValueCode); + case "Int32Value", "UInt32Value" -> "size += sizeOfOptionalInteger(%s, %s);" + .formatted(fieldDef, getValueCode); + case "Int64Value", "UInt64Value" -> "size += sizeOfOptionalLong(%s, %s);" + .formatted(fieldDef, getValueCode); + case "FloatValue" -> "size += sizeOfOptionalFloat(%s, %s);".formatted(fieldDef, getValueCode); + case "DoubleValue" -> "size += sizeOfOptionalDouble(%s, %s);".formatted(fieldDef, getValueCode); + case "BytesValue" -> "size += sizeOfOptionalBytes(%s, %s);".formatted(fieldDef, getValueCode); + default -> throw new UnsupportedOperationException( + "Unhandled optional message type:" + field.messageType()); + }; } else if (field.repeated()) { - return prefix + switch (field.type()) { - case ENUM -> "size += sizeOfEnumList(%s, %s);" - .formatted(fieldDef, getValueCode); - case MESSAGE -> "size += sizeOfMessageList($fieldDef, $valueCode, $codec);" - .replace("$fieldDef", fieldDef) - .replace("$valueCode", getValueCode) - .replace("$codec", ((SingleField) field).messageTypeModelPackage() + "." + - Common.capitalizeFirstLetter(field.messageType()) + ".PROTOBUF"); - default -> "size += sizeOf%sList(%s, %s);" - .formatted(writeMethodName, fieldDef, getValueCode); - }; + return prefix + + switch (field.type()) { + case ENUM -> "size += sizeOfEnumList(%s, %s);".formatted(fieldDef, getValueCode); + case MESSAGE -> "size += sizeOfMessageList($fieldDef, $valueCode, $codec);" + .replace("$fieldDef", fieldDef) + .replace("$valueCode", getValueCode) + .replace( + "$codec", + ((SingleField) field).messageTypeModelPackage() + "." + + Common.capitalizeFirstLetter(field.messageType()) + ".PROTOBUF"); + default -> "size += sizeOf%sList(%s, %s);".formatted(writeMethodName, fieldDef, getValueCode); + }; } else if (field.type() == Field.FieldType.MAP) { final MapField mapField = (MapField) field; final List mapEntryFields = List.of(mapField.keyField(), mapField.valueField()); final Function getValueBuilder = mapEntryField -> mapEntryField == mapField.keyField() ? "k" : (mapEntryField == mapField.valueField() ? "v" : null); final String fieldSizeOfLines = CodecMeasureRecordMethodGenerator.buildFieldSizeOfLines( - field.name(), - mapEntryFields, - getValueBuilder, - false); + field.name(), mapEntryFields, getValueBuilder, false); // spotless:off return prefix + """ if (!$map.isEmpty()) { @@ -146,23 +142,34 @@ private static String generateFieldSizeOfLines(final Field field, final String m .replace("$fieldSizeOfLines", fieldSizeOfLines.indent(DEFAULT_INDENT)); // spotless:on } else { - return prefix + switch(field.type()) { - case ENUM -> "size += sizeOfEnum(%s, %s);" - .formatted(fieldDef, getValueCode); - case STRING -> "size += sizeOfString(%s, %s, %s);" - .formatted(fieldDef, getValueCode, skipDefault); - case MESSAGE -> "size += sizeOfMessage($fieldDef, $valueCode, $codec);" - .replace("$fieldDef", fieldDef) - .replace("$valueCode", getValueCode) - .replace("$codec", ((SingleField)field).messageTypeModelPackage() + "." + - Common.capitalizeFirstLetter(field.messageType())+ ".PROTOBUF"); - case BOOL -> "size += sizeOfBoolean(%s, %s, %s);" - .formatted(fieldDef, getValueCode, skipDefault); - case INT32, UINT32, SINT32, FIXED32, SFIXED32, INT64, SINT64, UINT64, FIXED64, SFIXED64, BYTES -> "size += sizeOf%s(%s, %s, %s);" - .formatted(writeMethodName, fieldDef, getValueCode, skipDefault); - default -> "size += sizeOf%s(%s, %s);" - .formatted(writeMethodName, fieldDef, getValueCode); - }; + return prefix + + switch (field.type()) { + case ENUM -> "size += sizeOfEnum(%s, %s);".formatted(fieldDef, getValueCode); + case STRING -> "size += sizeOfString(%s, %s, %s);" + .formatted(fieldDef, getValueCode, skipDefault); + case MESSAGE -> "size += sizeOfMessage($fieldDef, $valueCode, $codec);" + .replace("$fieldDef", fieldDef) + .replace("$valueCode", getValueCode) + .replace( + "$codec", + ((SingleField) field).messageTypeModelPackage() + "." + + Common.capitalizeFirstLetter(field.messageType()) + ".PROTOBUF"); + case BOOL -> "size += sizeOfBoolean(%s, %s, %s);" + .formatted(fieldDef, getValueCode, skipDefault); + case INT32, + UINT32, + SINT32, + FIXED32, + SFIXED32, + INT64, + SINT64, + UINT64, + FIXED64, + SFIXED64, + BYTES -> "size += sizeOf%s(%s, %s, %s);" + .formatted(writeMethodName, fieldDef, getValueCode, skipDefault); + default -> "size += sizeOf%s(%s, %s);".formatted(writeMethodName, fieldDef, getValueCode); + }; } } } diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java index 83b476f5..4e1bd1a1 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java @@ -9,7 +9,6 @@ import com.hedera.pbj.compiler.impl.OneOfField; import com.hedera.pbj.compiler.impl.PbjCompilerException; import edu.umd.cs.findbugs.annotations.NonNull; - import java.util.List; import java.util.stream.Collectors; @@ -172,10 +171,10 @@ static String generateParseLoop(final String caseStatements, @NonNull final Stri */ private static String generateCaseStatements(final List fields) { StringBuilder sb = new StringBuilder(); - for (Field field: fields) { + for (Field field : fields) { if (field instanceof final OneOfField oneOfField) { - for (final Field subField: oneOfField.fields()) { - generateFieldCaseStatement(sb,subField); + for (final Field subField : oneOfField.fields()) { + generateFieldCaseStatement(sb, subField); } } else if (field.repeated() && field.type().wireType() != Common.TYPE_LENGTH_DELIMITED) { // for repeated fields that are not length encoded there are 2 forms they can be stored in file. @@ -239,7 +238,9 @@ private static void generateFieldCaseStatementPacked(final StringBuilder sb, fin * @param sb StringBuilder to append code to */ private static void generateFieldCaseStatement(final StringBuilder sb, final Field field) { - final int wireType = field.optionalValueType() ? Common.TYPE_LENGTH_DELIMITED : field.type().wireType(); + final int wireType = field.optionalValueType() + ? Common.TYPE_LENGTH_DELIMITED + : field.type().wireType(); final int fieldNum = field.fieldNumber(); final int tag = Common.getTag(wireType, fieldNum); // spotless:off diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java index 842cb143..510a6afc 100644 --- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java +++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java @@ -8,7 +8,6 @@ import com.hedera.pbj.compiler.impl.MapField; import com.hedera.pbj.compiler.impl.OneOfField; import com.hedera.pbj.compiler.impl.SingleField; - import java.util.Comparator; import java.util.List; import java.util.function.Function; @@ -22,10 +21,7 @@ final class CodecWriteMethodGenerator { static String generateWriteMethod(final String modelClassName, final List fields) { final String fieldWriteLines = buildFieldWriteLines( - modelClassName, - fields, - field -> "data.%s()".formatted(field.nameCamelFirstLower()), - true); + modelClassName, fields, field -> "data.%s()".formatted(field.nameCamelFirstLower()), true); // spotless:off return """ @@ -52,7 +48,9 @@ private static String buildFieldWriteLines( final Function getValueBuilder, final boolean skipDefault) { return fields.stream() - .flatMap(field -> field.type() == Field.FieldType.ONE_OF ? ((OneOfField)field).fields().stream() : Stream.of(field)) + .flatMap(field -> field.type() == Field.FieldType.ONE_OF + ? ((OneOfField) field).fields().stream() + : Stream.of(field)) .sorted(Comparator.comparingInt(Field::fieldNumber)) .map(field -> generateFieldWriteLines(field, modelClassName, getValueBuilder.apply(field), skipDefault)) .collect(Collectors.joining("\n")) @@ -68,7 +66,8 @@ private static String buildFieldWriteLines( * @param skipDefault skip writing the field if it has default value (for non-oneOf only) * @return java code to write field to output */ - private static String generateFieldWriteLines(final Field field, final String modelClassName, String getValueCode, boolean skipDefault) { + private static String generateFieldWriteLines( + final Field field, final String modelClassName, String getValueCode, boolean skipDefault) { final String fieldDef = Common.camelToUpperSnake(field.name()); String prefix = "// [%d] - %s%n".formatted(field.fieldNumber(), field.name()); diff --git a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java index 372458c5..a414906c 100644 --- a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java +++ b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java @@ -1,11 +1,11 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.compiler.impl; +import static org.junit.jupiter.api.Assertions.*; + import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; - /** * Test the common utility methods. */ @@ -24,29 +24,34 @@ void doubleAsterisk() { @Test @DisplayName("Test comment with params and return") void commentWithParamsAndReturn() { - String str = "/**\n* Clean up a java doc style comment removing all the \"*\" etc.\n*\n* @param fieldComment raw Java doc style comment\n* @return clean multi-line content of the comment\n*/\n"; + String str = + "/**\n* Clean up a java doc style comment removing all the \"*\" etc.\n*\n* @param fieldComment raw Java doc style comment\n* @return clean multi-line content of the comment\n*/\n"; String result = Common.cleanJavaDocComment(str); - String expected = "Clean up a java doc style comment removing all the \"*\" etc.\n*\n* @param fieldComment raw Java doc style comment\n* @return clean multi-line content of the comment"; + String expected = + "Clean up a java doc style comment removing all the \"*\" etc.\n*\n* @param fieldComment raw Java doc style comment\n* @return clean multi-line content of the comment"; assertEquals(expected, result); } @Test @DisplayName("Test one line comment on lultiple lines") void oneLineOnMultipleLines() { - String str = """ + String str = + """ /** * The capacity of this sequence will be the difference between the initial position and the length of the delegate */ """; String result = Common.cleanJavaDocComment(str); - String expected = "The capacity of this sequence will be the difference between the initial position and the length of the delegate"; + String expected = + "The capacity of this sequence will be the difference between the initial position and the length of the delegate"; assertEquals(expected, result); } @Test @DisplayName("Test params, throws and returns") void oneParamsThrowsAndReturns() { - String str = """ + String str = + """ /** * Reads the signed byte at current {@link #position()}, and then increments the {@link #position()} by 1. * @@ -55,9 +60,10 @@ void oneParamsThrowsAndReturns() { * @throws DataAccessException If an I/O error occurs */"""; String result = Common.cleanJavaDocComment(str); - String expected = """ + String expected = + """ Reads the signed byte at current {@link #position()}, and then increments the {@link #position()} by 1. - + @return The signed byte at the current {@link #position()} @throws BufferUnderflowException If there are no bytes remaining in this sequence @throws DataAccessException If an I/O error occurs"""; @@ -67,7 +73,8 @@ void oneParamsThrowsAndReturns() { @Test @DisplayName("Test params, throws and returns") void oneParamsThrowsAndReturnsWithMore() { - String str = """ + String str = + """ /** * Read bytes starting at current {@link #position()} into the {@code dst} array, up to the size of the {@code dst} * array. If {@code dst} is larger than the remaining bytes in the sequence, only the remaining bytes are read. @@ -105,7 +112,8 @@ void oneParamsThrowsAndReturnsWithMore() { */ """; String result = Common.cleanJavaDocComment(str); - String expected = """ + String expected = + """ Read bytes starting at current {@link #position()} into the {@code dst} array, up to the size of the {@code dst} array. If {@code dst} is larger than the remaining bytes in the sequence, only the remaining bytes are read. The total number of bytes actually read are returned. The bytes will be placed starting at index 0 of the array. @@ -113,29 +121,29 @@ void oneParamsThrowsAndReturnsWithMore() { sequence, then 0 is returned.

Non-closed P between two paragraphs.

- +

P at beginning of paragraph.With lots of text. Lipsum dolor sit amet, consectetur adipiscing elit. Nulla nec purus nec.

- +

P at beginning of paragraph 2.With lots of text. Lipsum dolor sit amet, consectetur adipiscing elit. Nulla nec purus nec.

- - + +
  • Item 1 - with loose P before another tag
  • Item 2
- +

Simple closed paragraph.

- +

New line closed paragraph.

- +

Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be incremented by the number of bytes read prior to the exception.

- + @param dst The destination array. Cannot be null. @throws NullPointerException if {@code dst} is null @throws DataAccessException If an I/O error occurs @@ -146,8 +154,9 @@ void oneParamsThrowsAndReturnsWithMore() { @Test @DisplayName("Test params, throws and returns more") void oneParamsThrowsAndReturnsWithMore2() { - String str = """ - + String str = + """ + /** * Read bytes starting at the current {@link #position()} into the {@code dst} array, up to {@code maxLength} * number of bytes. If {@code maxLength} is larger than the remaining bytes in the sequence, only the remaining @@ -175,19 +184,20 @@ void oneParamsThrowsAndReturnsWithMore2() { */ """; String result = Common.cleanJavaDocComment(str); - String expected = """ + String expected = + """ Read bytes starting at the current {@link #position()} into the {@code dst} array, up to {@code maxLength} number of bytes. If {@code maxLength} is larger than the remaining bytes in the sequence, only the remaining bytes are read. The total number of bytes actually read are returned. The bytes will be placed starting at index {@code offset} of the array. The {@link #position()} will be incremented by the number of bytes read. If no bytes are available in the sequence, then 0 is returned. - +

The {@code dst} array may be partially written to at the time that any of the declared exceptions are thrown.

- +

Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be incremented by the number of bytes read prior to the exception.

- + @param dst The array into which bytes are to be written @param offset The offset within the {@code dst} array of the first byte to be written; must be non-negative and no larger than {@code dst.length - maxLength}. @@ -205,7 +215,8 @@ void oneParamsThrowsAndReturnsWithMore2() { @Test @DisplayName("Test params, throws and returns more 2") void oneParamsThrowsAndReturnsWithMore3() { - String str = """ + String str = + """ /** * Reads the next four bytes at the current {@link #position()}, composing them into an int value according to * specified byte order, and then increments the {@link #position()} by four. @@ -217,10 +228,11 @@ void oneParamsThrowsAndReturnsWithMore3() { */\ """; String result = Common.cleanJavaDocComment(str); - String expected = """ + String expected = + """ Reads the next four bytes at the current {@link #position()}, composing them into an int value according to specified byte order, and then increments the {@link #position()} by four. - + @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used. @return The int value at the current {@link #position()} @throws BufferUnderflowException If there are fewer than four bytes remaining diff --git a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java index 9004d1f1..76d7ff35 100644 --- a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java +++ b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java @@ -13,18 +13,19 @@ import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.MessageBodyContext; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.MessageDefContext; import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.MessageElementContext; +import java.util.List; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.util.List; - class LookupHelperTest { @Mock MessageDefContext defContext; + @Mock Protobuf3Parser.OptionCommentContext optionComment; + @BeforeEach void setUp() { MockitoAnnotations.openMocks(this); @@ -45,6 +46,7 @@ void testNormalizeFileName_alreadyNormalized() { String fileName = "common.proto"; assertEquals(fileName, normalizeFileName(fileName)); } + private static void normalizeAndVerify(String fileName) { if (System.getProperty("os.name").toLowerCase().contains("windows")) { String expected = "state\\common.proto"; @@ -84,17 +86,18 @@ void testExtractComparableFields_notApplicableComment() { @Test void testExtractComparableFields_commentWithUnkownField() { - when(optionComment.getText()).thenReturn("// <<>>"); + when(optionComment.getText()) + .thenReturn("// <<>>"); when(defContext.optionComment()).thenReturn(optionComment); final var messageBody = mock(MessageBodyContext.class); final var int32Number = createMessageElement("int32Number"); final var int64Number = createMessageElement("int64Number"); final var text = createMessageElement("text"); - when(messageBody.messageElement()).thenReturn(asList( - int32Number, int64Number, text - )); + when(messageBody.messageElement()).thenReturn(asList(int32Number, int64Number, text)); when(defContext.messageBody()).thenReturn(messageBody); - assertThrows(IllegalArgumentException.class, () -> extractComparableFields(defContext), + assertThrows( + IllegalArgumentException.class, + () -> extractComparableFields(defContext), "Should throw IllegalArgumentException"); } @@ -106,9 +109,7 @@ void testExtractComparableFields_validComment() { final var int32Number = createMessageElement("int32Number"); final var int64Number = createMessageElement("int64Number"); final var text = createMessageElement("text"); - when(messageBody.messageElement()).thenReturn(asList( - int32Number, int64Number, text - )); + when(messageBody.messageElement()).thenReturn(asList(int32Number, int64Number, text)); when(defContext.messageBody()).thenReturn(messageBody); List comparableFields = extractComparableFields(defContext); assertEquals(3, comparableFields.size(), "Should return 3 fields"); @@ -127,6 +128,4 @@ private static MessageElementContext createMessageElement(final String fieldName return messageElement; } - - } diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/GrpcHeaders.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/GrpcHeaders.java index 2e10573b..ae927fe2 100644 --- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/GrpcHeaders.java +++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/GrpcHeaders.java @@ -24,28 +24,20 @@ final class GrpcHeaders { static final Header OK = createCached(GRPC_STATUS, GrpcStatus.OK.ordinal()); static final Header CANCELLED = createCached(GRPC_STATUS, GrpcStatus.CANCELLED.ordinal()); static final Header UNKNOWN = createCached(GRPC_STATUS, GrpcStatus.UNKNOWN.ordinal()); - static final Header INVALID_ARGUMENT = - createCached(GRPC_STATUS, GrpcStatus.INVALID_ARGUMENT.ordinal()); - static final Header DEADLINE_EXCEEDED = - createCached(GRPC_STATUS, GrpcStatus.DEADLINE_EXCEEDED.ordinal()); + static final Header INVALID_ARGUMENT = createCached(GRPC_STATUS, GrpcStatus.INVALID_ARGUMENT.ordinal()); + static final Header DEADLINE_EXCEEDED = createCached(GRPC_STATUS, GrpcStatus.DEADLINE_EXCEEDED.ordinal()); static final Header NOT_FOUND = createCached(GRPC_STATUS, GrpcStatus.NOT_FOUND.ordinal()); - static final Header ALREADY_EXISTS = - createCached(GRPC_STATUS, GrpcStatus.ALREADY_EXISTS.ordinal()); - static final Header PERMISSION_DENIED = - createCached(GRPC_STATUS, GrpcStatus.PERMISSION_DENIED.ordinal()); - static final Header RESOURCE_EXHAUSTED = - createCached(GRPC_STATUS, GrpcStatus.RESOURCE_EXHAUSTED.ordinal()); - static final Header FAILED_PRECONDITION = - createCached(GRPC_STATUS, GrpcStatus.FAILED_PRECONDITION.ordinal()); + static final Header ALREADY_EXISTS = createCached(GRPC_STATUS, GrpcStatus.ALREADY_EXISTS.ordinal()); + static final Header PERMISSION_DENIED = createCached(GRPC_STATUS, GrpcStatus.PERMISSION_DENIED.ordinal()); + static final Header RESOURCE_EXHAUSTED = createCached(GRPC_STATUS, GrpcStatus.RESOURCE_EXHAUSTED.ordinal()); + static final Header FAILED_PRECONDITION = createCached(GRPC_STATUS, GrpcStatus.FAILED_PRECONDITION.ordinal()); static final Header ABORTED = createCached(GRPC_STATUS, GrpcStatus.ABORTED.ordinal()); static final Header OUT_OF_RANGE = createCached(GRPC_STATUS, GrpcStatus.OUT_OF_RANGE.ordinal()); - static final Header UNIMPLEMENTED = - createCached(GRPC_STATUS, GrpcStatus.UNIMPLEMENTED.ordinal()); + static final Header UNIMPLEMENTED = createCached(GRPC_STATUS, GrpcStatus.UNIMPLEMENTED.ordinal()); static final Header INTERNAL = createCached(GRPC_STATUS, GrpcStatus.INTERNAL.ordinal()); static final Header UNAVAILABLE = createCached(GRPC_STATUS, GrpcStatus.UNAVAILABLE.ordinal()); static final Header DATA_LOSS = createCached(GRPC_STATUS, GrpcStatus.DATA_LOSS.ordinal()); - static final Header UNAUTHENTICATED = - createCached(GRPC_STATUS, GrpcStatus.UNAUTHENTICATED.ordinal()); + static final Header UNAUTHENTICATED = createCached(GRPC_STATUS, GrpcStatus.UNAUTHENTICATED.ordinal()); private GrpcHeaders() { // prevent instantiation diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjMethodRoute.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjMethodRoute.java index 53620d61..0fa55956 100644 --- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjMethodRoute.java +++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjMethodRoute.java @@ -19,22 +19,42 @@ */ final class PbjMethodRoute extends PbjRoute { private static final String SEP = "/"; - @NonNull private final ServiceInterface service; - @NonNull private final ServiceInterface.Method method; - @NonNull private final String fullPath; - @NonNull private final PathMatcher pathMatcher; + + @NonNull + private final ServiceInterface service; + + @NonNull + private final ServiceInterface.Method method; + + @NonNull + private final String fullPath; + + @NonNull + private final PathMatcher pathMatcher; // Metrics related fields. These can safely be reused across threads and invocations private static final String SCOPE = "vendor"; private static final String SERVICE_TAG = "service"; private static final String METHOD_TAG = "method"; private static final String FAILURE_TAG = "failure"; - @NonNull private final Counter requestCounter; - @NonNull private final Counter failedGrpcRequestCounter; - @NonNull private final Counter failedHttpRequestCounter; - @NonNull private final Counter failedUnknownRequestCounter; - @NonNull private final Counter failedResponseCounter; - @NonNull private final Counter deadlineExceededCounter; + + @NonNull + private final Counter requestCounter; + + @NonNull + private final Counter failedGrpcRequestCounter; + + @NonNull + private final Counter failedHttpRequestCounter; + + @NonNull + private final Counter failedUnknownRequestCounter; + + @NonNull + private final Counter failedResponseCounter; + + @NonNull + private final Counter deadlineExceededCounter; /** * Constructor @@ -42,9 +62,7 @@ final class PbjMethodRoute extends PbjRoute { * @param service The service that the method belongs to * @param method The method that this route represents */ - PbjMethodRoute( - @NonNull final ServiceInterface service, - @NonNull final ServiceInterface.Method method) { + PbjMethodRoute(@NonNull final ServiceInterface service, @NonNull final ServiceInterface.Method method) { this.service = requireNonNull(service); this.method = requireNonNull(method); @@ -54,54 +72,40 @@ final class PbjMethodRoute extends PbjRoute { this.pathMatcher = PathMatchers.exact(fullPath); final var metricRegistry = Metrics.globalRegistry(); - this.requestCounter = - metricRegistry.getOrCreate( - Counter.builder("pbj.grpc.requests") - .scope(SCOPE) - .addTag(Tag.create(SERVICE_TAG, serviceName)) - .addTag(Tag.create(METHOD_TAG, methodName)) - .description("The number of gRPC requests")); - this.failedGrpcRequestCounter = - metricRegistry.getOrCreate( - Counter.builder("pbj.grpc.failed.requests") - .scope(SCOPE) - .addTag(Tag.create(SERVICE_TAG, serviceName)) - .addTag(Tag.create(METHOD_TAG, methodName)) - .addTag(Tag.create(FAILURE_TAG, "grpc-exception")) - .description("The number of failed gRPC requests")); - this.failedHttpRequestCounter = - metricRegistry.getOrCreate( - Counter.builder("pbj.grpc.failed.requests") - .scope(SCOPE) - .addTag(Tag.create(SERVICE_TAG, serviceName)) - .addTag(Tag.create(METHOD_TAG, methodName)) - .addTag(Tag.create(FAILURE_TAG, "http-exception")) - .description("The number of failed HTTP requests")); - this.failedUnknownRequestCounter = - metricRegistry.getOrCreate( - Counter.builder("pbj.grpc.failed.requests") - .scope(SCOPE) - .addTag(Tag.create(SERVICE_TAG, serviceName)) - .addTag(Tag.create(METHOD_TAG, methodName)) - .addTag(Tag.create(FAILURE_TAG, "unknown-exception")) - .description("The number of failed unknown requests")); - this.failedResponseCounter = - metricRegistry.getOrCreate( - Counter.builder("pbj.grpc.failed.responses") - .scope(SCOPE) - .addTag(Tag.create(SERVICE_TAG, serviceName)) - .addTag(Tag.create(METHOD_TAG, methodName)) - .addTag(Tag.create(FAILURE_TAG, "response")) - .description("The number of failed responses")); - this.deadlineExceededCounter = - metricRegistry.getOrCreate( - Counter.builder("pbj.grpc.deadline.exceeded") - .scope(SCOPE) - .addTag(Tag.create(SERVICE_TAG, serviceName)) - .addTag(Tag.create(METHOD_TAG, methodName)) - .description( - "The number of gRPC requests that exceeded their" - + " deadline")); + this.requestCounter = metricRegistry.getOrCreate(Counter.builder("pbj.grpc.requests") + .scope(SCOPE) + .addTag(Tag.create(SERVICE_TAG, serviceName)) + .addTag(Tag.create(METHOD_TAG, methodName)) + .description("The number of gRPC requests")); + this.failedGrpcRequestCounter = metricRegistry.getOrCreate(Counter.builder("pbj.grpc.failed.requests") + .scope(SCOPE) + .addTag(Tag.create(SERVICE_TAG, serviceName)) + .addTag(Tag.create(METHOD_TAG, methodName)) + .addTag(Tag.create(FAILURE_TAG, "grpc-exception")) + .description("The number of failed gRPC requests")); + this.failedHttpRequestCounter = metricRegistry.getOrCreate(Counter.builder("pbj.grpc.failed.requests") + .scope(SCOPE) + .addTag(Tag.create(SERVICE_TAG, serviceName)) + .addTag(Tag.create(METHOD_TAG, methodName)) + .addTag(Tag.create(FAILURE_TAG, "http-exception")) + .description("The number of failed HTTP requests")); + this.failedUnknownRequestCounter = metricRegistry.getOrCreate(Counter.builder("pbj.grpc.failed.requests") + .scope(SCOPE) + .addTag(Tag.create(SERVICE_TAG, serviceName)) + .addTag(Tag.create(METHOD_TAG, methodName)) + .addTag(Tag.create(FAILURE_TAG, "unknown-exception")) + .description("The number of failed unknown requests")); + this.failedResponseCounter = metricRegistry.getOrCreate(Counter.builder("pbj.grpc.failed.responses") + .scope(SCOPE) + .addTag(Tag.create(SERVICE_TAG, serviceName)) + .addTag(Tag.create(METHOD_TAG, methodName)) + .addTag(Tag.create(FAILURE_TAG, "response")) + .description("The number of failed responses")); + this.deadlineExceededCounter = metricRegistry.getOrCreate(Counter.builder("pbj.grpc.deadline.exceeded") + .scope(SCOPE) + .addTag(Tag.create(SERVICE_TAG, serviceName)) + .addTag(Tag.create(METHOD_TAG, methodName)) + .description("The number of gRPC requests that exceeded their" + " deadline")); } @Override diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java index 61aed6e5..afd89352 100644 --- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java +++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java @@ -59,15 +59,13 @@ * created for each new connection, and each connection is made to a specific method endpoint. */ final class PbjProtocolHandler implements Http2SubProtocolSelector.SubProtocolHandler { - private static final System.Logger LOGGER = - System.getLogger(PbjProtocolHandler.class.getName()); + private static final System.Logger LOGGER = System.getLogger(PbjProtocolHandler.class.getName()); /** The only grpc-encoding supported by this implementation. */ private static final String IDENTITY = "identity"; /** A pre-created and cached *response* header for "grpc-encoding: identity". */ - private static final Header GRPC_ENCODING_IDENTITY = - HeaderValues.createCached("grpc-encoding", IDENTITY); + private static final Header GRPC_ENCODING_IDENTITY = HeaderValues.createCached("grpc-encoding", IDENTITY); /** The regular expression used to parse the grpc-timeout header. */ private static final String GRPC_TIMEOUT_REGEX = "(\\d{1,8})([HMSmun])"; @@ -194,8 +192,7 @@ public void init() { // See https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md // In addition, "application/grpc" is interpreted as "application/grpc+proto". final var requestHeaders = headers.httpHeaders(); - final var requestContentType = - requestHeaders.contentType().orElse(null); + final var requestContentType = requestHeaders.contentType().orElse(null); final var ct = requestContentType == null ? "" : requestContentType.text(); final var contentType = switch (ct) { @@ -260,8 +257,7 @@ public void init() { // If the grpc-timeout header is present, determine when that timeout would occur, or // default to a future that is so far in the future it will never happen. final var timeout = requestHeaders.value(GRPC_TIMEOUT); - deadlineFuture = - timeout.map(this::scheduleDeadline).orElse(new NoopScheduledFuture<>()); + deadlineFuture = timeout.map(this::scheduleDeadline).orElse(new NoopScheduledFuture<>()); // At this point, the request itself is valid. Maybe it will still fail to be handled by // the service interface, but as far as the protocol is concerned, this was a valid @@ -279,12 +275,11 @@ public void init() { // Create the "options" to make available to the ServiceInterface. These options are // used to decide on the best way to parse or handle the request. - final var options = - new Options( - Optional.ofNullable(headers.authority()), // the client (see http2 spec) - contentType.equals(APPLICATION_GRPC_PROTO), - contentType.equals(APPLICATION_GRPC_JSON), - contentType); + final var options = new Options( + Optional.ofNullable(headers.authority()), // the client (see http2 spec) + contentType.equals(APPLICATION_GRPC_PROTO), + contentType.equals(APPLICATION_GRPC_JSON), + contentType); // Setup the subscribers. The "outgoing" subscriber will send messages to the client. // This is given to the "open" method on the service to allow it to send messages to @@ -347,82 +342,72 @@ public void data(@NonNull final Http2FrameHeader header, @NonNull final BufferDa // bytes before the stream is closed. while (data.available() > 0) { switch (currentReadState) { - case START: - { - // Read whether this message is compressed. We do not currently support - // compression. - final var isCompressed = (data.read() == 1); - if (isCompressed) { - // The error will eventually result in the stream being closed - throw new GrpcException( - GrpcStatus.UNIMPLEMENTED, "Compression is not supported"); - } - currentReadState = ReadState.READ_LENGTH; - numOfPartReadBytes = 0; - break; + case START: { + // Read whether this message is compressed. We do not currently support + // compression. + final var isCompressed = (data.read() == 1); + if (isCompressed) { + // The error will eventually result in the stream being closed + throw new GrpcException(GrpcStatus.UNIMPLEMENTED, "Compression is not supported"); } - case READ_LENGTH: - { - // if I have not read a full int yet then read more from available bytes - if (numOfPartReadBytes < Integer.BYTES) { - // we do not have enough bytes yet to read a 4 byte int - // read the bytes we do have and store them for next time - final int bytesToRead = - Math.min( - data.available(), - Integer.BYTES - numOfPartReadBytes); - data.read(partReadLengthBytes, numOfPartReadBytes, bytesToRead); - numOfPartReadBytes += bytesToRead; - } - // check if we have read all the 4 bytes of the length int32 - if (numOfPartReadBytes == Integer.BYTES) { - final long length = - ((long) partReadLengthBytes[0] & 0xFF) << 24 - | ((long) partReadLengthBytes[1] & 0xFF) << 16 - | ((long) partReadLengthBytes[2] & 0xFF) << 8 - | ((long) partReadLengthBytes[3] & 0xFF); - if (length > config.maxMessageSizeBytes()) { - throw new GrpcException( - GrpcStatus.INVALID_ARGUMENT, - "Message size exceeds maximum allowed size"); - } - // Create a buffer to hold the message. We sadly cannot reuse this buffer - // because once we have filled it and wrapped it in Bytes and sent it to the - // handler, some user code may grab and hold that Bytes object for an arbitrary - // amount of time, and if we were to scribble into the same byte array, we - // would break the application. So we need a new buffer each time :-( - entityBytes = new byte[(int) length]; - entityBytesIndex = 0; - // done with length now, so move on to next state - currentReadState = ReadState.READ_ENTITY_BYTES; - } - break; + currentReadState = ReadState.READ_LENGTH; + numOfPartReadBytes = 0; + break; + } + case READ_LENGTH: { + // if I have not read a full int yet then read more from available bytes + if (numOfPartReadBytes < Integer.BYTES) { + // we do not have enough bytes yet to read a 4 byte int + // read the bytes we do have and store them for next time + final int bytesToRead = Math.min(data.available(), Integer.BYTES - numOfPartReadBytes); + data.read(partReadLengthBytes, numOfPartReadBytes, bytesToRead); + numOfPartReadBytes += bytesToRead; } - case READ_ENTITY_BYTES: - { - // By the time we get here, entityBytes is no longer null. It may be empty, or it - // may already have been partially populated from a previous iteration. It may be - // that the number of bytes available to be read is larger than just this one - // message. So we need to be careful to read, from what is available, only up to - // the message length, and to leave the rest for the next iteration. - final int available = data.available(); - final int numBytesToRead = - Math.min(entityBytes.length - entityBytesIndex, available); - data.read(entityBytes, entityBytesIndex, numBytesToRead); - entityBytesIndex += numBytesToRead; - - // If we have completed reading the message, then we can proceed. - if (entityBytesIndex == entityBytes.length) { - currentReadState = ReadState.START; - // Grab and wrap the bytes and reset to being reading the next - // message - final var bytes = Bytes.wrap(entityBytes); - pipeline.onNext(bytes); - entityBytesIndex = 0; - entityBytes = null; + // check if we have read all the 4 bytes of the length int32 + if (numOfPartReadBytes == Integer.BYTES) { + final long length = ((long) partReadLengthBytes[0] & 0xFF) << 24 + | ((long) partReadLengthBytes[1] & 0xFF) << 16 + | ((long) partReadLengthBytes[2] & 0xFF) << 8 + | ((long) partReadLengthBytes[3] & 0xFF); + if (length > config.maxMessageSizeBytes()) { + throw new GrpcException( + GrpcStatus.INVALID_ARGUMENT, "Message size exceeds maximum allowed size"); } - break; + // Create a buffer to hold the message. We sadly cannot reuse this buffer + // because once we have filled it and wrapped it in Bytes and sent it to the + // handler, some user code may grab and hold that Bytes object for an arbitrary + // amount of time, and if we were to scribble into the same byte array, we + // would break the application. So we need a new buffer each time :-( + entityBytes = new byte[(int) length]; + entityBytesIndex = 0; + // done with length now, so move on to next state + currentReadState = ReadState.READ_ENTITY_BYTES; + } + break; + } + case READ_ENTITY_BYTES: { + // By the time we get here, entityBytes is no longer null. It may be empty, or it + // may already have been partially populated from a previous iteration. It may be + // that the number of bytes available to be read is larger than just this one + // message. So we need to be careful to read, from what is available, only up to + // the message length, and to leave the rest for the next iteration. + final int available = data.available(); + final int numBytesToRead = Math.min(entityBytes.length - entityBytesIndex, available); + data.read(entityBytes, entityBytesIndex, numBytesToRead); + entityBytesIndex += numBytesToRead; + + // If we have completed reading the message, then we can proceed. + if (entityBytesIndex == entityBytes.length) { + currentReadState = ReadState.START; + // Grab and wrap the bytes and reset to being reading the next + // message + final var bytes = Bytes.wrap(entityBytes); + pipeline.onNext(bytes); + entityBytesIndex = 0; + entityBytes = null; } + break; + } } } @@ -488,28 +473,24 @@ private ScheduledFuture scheduleDeadline(@NonNull final String timeout) { if (matcher.matches()) { final var num = Integer.parseInt(matcher.group(1)); final var unit = matcher.group(2); - final var deadline = - System.nanoTime() - * TimeUnit.NANOSECONDS.convert( - num, - switch (unit) { - case "H" -> TimeUnit.HOURS; - case "M" -> TimeUnit.MINUTES; - case "S" -> TimeUnit.SECONDS; - case "m" -> TimeUnit.MILLISECONDS; - case "u" -> TimeUnit.MICROSECONDS; - case "n" -> TimeUnit.NANOSECONDS; - // This should NEVER be reachable, because the matcher - // would not have matched. - default -> throw new GrpcException( - GrpcStatus.INTERNAL, "Invalid unit: " + unit); - }); - return deadlineDetector.scheduleDeadline( - deadline, - () -> { - route.deadlineExceededCounter().increment(); - pipeline.onError(new GrpcException(GrpcStatus.DEADLINE_EXCEEDED)); - }); + final var deadline = System.nanoTime() + * TimeUnit.NANOSECONDS.convert( + num, + switch (unit) { + case "H" -> TimeUnit.HOURS; + case "M" -> TimeUnit.MINUTES; + case "S" -> TimeUnit.SECONDS; + case "m" -> TimeUnit.MILLISECONDS; + case "u" -> TimeUnit.MICROSECONDS; + case "n" -> TimeUnit.NANOSECONDS; + // This should NEVER be reachable, because the matcher + // would not have matched. + default -> throw new GrpcException(GrpcStatus.INTERNAL, "Invalid unit: " + unit); + }); + return deadlineDetector.scheduleDeadline(deadline, () -> { + route.deadlineExceededCounter().increment(); + pipeline.onError(new GrpcException(GrpcStatus.DEADLINE_EXCEEDED)); + }); } return new NoopScheduledFuture<>(); @@ -549,10 +530,7 @@ private void sendResponseHeaders( final var http2Headers = Http2Headers.create(grpcHeaders); streamWriter.writeHeaders( - http2Headers, - streamId, - Http2Flag.HeaderFlags.create(Http2Flag.END_OF_HEADERS), - flowControl); + http2Headers, streamId, Http2Flag.HeaderFlags.create(Http2Flag.END_OF_HEADERS), flowControl); } /** @@ -568,9 +546,14 @@ private void sendResponseHeaders( * */ private class TrailerBuilder { - @NonNull private GrpcStatus grpcStatus = GrpcStatus.OK; - @Nullable private String statusMessage; - @NonNull private final List
customMetadata = emptyList(); // Never set + @NonNull + private GrpcStatus grpcStatus = GrpcStatus.OK; + + @Nullable + private String statusMessage; + + @NonNull + private final List
customMetadata = emptyList(); // Never set /** * Sets the gRPC status to return. Normally, the HTTP status will always be 200, while the @@ -600,9 +583,7 @@ public final void send() { * Actually sends the headers. This method exists so that "trailers-only" can call it to * send the normal headers. */ - protected void send( - @NonNull final WritableHeaders httpHeaders, - @NonNull final Http2Headers http2Headers) { + protected void send(@NonNull final WritableHeaders httpHeaders, @NonNull final Http2Headers http2Headers) { httpHeaders.set(requireNonNull(GrpcHeaders.header(requireNonNull(grpcStatus)))); httpHeaders.set(GRPC_ACCEPT_ENCODING, IDENTITY); customMetadata.forEach(httpHeaders::set); @@ -614,8 +595,7 @@ protected void send( streamWriter.writeHeaders( http2Headers, streamId, - Http2Flag.HeaderFlags.create( - Http2Flag.END_OF_HEADERS | Http2Flag.END_OF_STREAM), + Http2Flag.HeaderFlags.create(Http2Flag.END_OF_HEADERS | Http2Flag.END_OF_STREAM), flowControl); } } @@ -651,9 +631,7 @@ public TrailerOnlyBuilder httpStatus(@Nullable final Status httpStatus) { * @param http2Headers The HTTP2 pseudo-headers */ @Override - protected void send( - @NonNull final WritableHeaders httpHeaders, - @NonNull final Http2Headers http2Headers) { + protected void send(@NonNull final WritableHeaders httpHeaders, @NonNull final Http2Headers http2Headers) { http2Headers.status(httpStatus); httpHeaders.contentType(requireNonNull(contentType)); super.send(httpHeaders, http2Headers); @@ -679,12 +657,8 @@ public void onNext(@NonNull final Bytes response) { bufferData.write(0); // 0 means no compression bufferData.writeUnsignedInt32(length); bufferData.write(response.toByteArray()); - final var header = - Http2FrameHeader.create( - bufferData.available(), - Http2FrameTypes.DATA, - Http2Flag.DataFlags.create(0), - streamId); + final var header = Http2FrameHeader.create( + bufferData.available(), Http2FrameTypes.DATA, Http2Flag.DataFlags.create(0), streamId); // This method may throw an UncheckedIOException. If this happens, the connection with the client // has been violently terminated, and we should raise the error, and we should throw an exception @@ -722,19 +696,17 @@ public void onComplete() { deadlineFuture.cancel(false); - currentStreamState.getAndUpdate( - currentValue -> { - if (requireNonNull(currentValue) == Http2StreamState.OPEN) { - return Http2StreamState.HALF_CLOSED_LOCAL; - } - return Http2StreamState.CLOSED; - }); + currentStreamState.getAndUpdate(currentValue -> { + if (requireNonNull(currentValue) == Http2StreamState.OPEN) { + return Http2StreamState.HALF_CLOSED_LOCAL; + } + return Http2StreamState.CLOSED; + }); } } /** Simple implementation of the {@link ServiceInterface.RequestOptions} interface. */ - private record Options( - Optional authority, boolean isProtobuf, boolean isJson, String contentType) + private record Options(Optional authority, boolean isProtobuf, boolean isJson, String contentType) implements ServiceInterface.RequestOptions {} /** diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolProvider.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolProvider.java index 7d5df767..40753d20 100644 --- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolProvider.java +++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolProvider.java @@ -36,8 +36,7 @@ public Class protocolConfigType() { @Override @NonNull - public Http2SubProtocolSelector create( - @NonNull final PbjConfig config, @NonNull final ProtocolConfigs configs) { + public Http2SubProtocolSelector create(@NonNull final PbjConfig config, @NonNull final ProtocolConfigs configs) { return new PbjProtocolSelector(config); } } diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java index 338b5f3b..772bd7fd 100644 --- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java +++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java @@ -5,20 +5,12 @@ import com.hedera.pbj.grpc.helidon.config.PbjConfig; import edu.umd.cs.findbugs.annotations.NonNull; -import io.helidon.common.buffers.BufferData; import io.helidon.http.HttpPrologue; import io.helidon.http.Method; -import io.helidon.http.Status; -import io.helidon.http.WritableHeaders; -import io.helidon.http.http2.FlowControl; -import io.helidon.http.http2.Http2Flag; -import io.helidon.http.http2.Http2FrameHeader; import io.helidon.http.http2.Http2Headers; -import io.helidon.http.http2.Http2RstStream; import io.helidon.http.http2.Http2Settings; import io.helidon.http.http2.Http2StreamState; import io.helidon.http.http2.Http2StreamWriter; -import io.helidon.http.http2.Http2WindowUpdate; import io.helidon.http.http2.StreamFlowControl; import io.helidon.metrics.api.Counter; import io.helidon.metrics.api.Metrics; @@ -42,8 +34,7 @@ class PbjProtocolSelector implements Http2SubProtocolSelector { private final PbjConfig config; private final DeadlineDetector deadlineDetector; - private final ScheduledExecutorService deadlineExecutorService = - Executors.newSingleThreadScheduledExecutor(); + private final ScheduledExecutorService deadlineExecutorService = Executors.newSingleThreadScheduledExecutor(); private final Counter requestCounter; private final Counter failedRequestCounter; @@ -53,22 +44,15 @@ class PbjProtocolSelector implements Http2SubProtocolSelector { */ PbjProtocolSelector(@NonNull final PbjConfig config) { this.config = requireNonNull(config); - this.deadlineDetector = - (deadline, onDeadlineExceeded) -> - deadlineExecutorService.schedule( - onDeadlineExceeded, deadline, TimeUnit.NANOSECONDS); + this.deadlineDetector = (deadline, onDeadlineExceeded) -> + deadlineExecutorService.schedule(onDeadlineExceeded, deadline, TimeUnit.NANOSECONDS); final var metricRegistry = Metrics.globalRegistry(); - this.requestCounter = - metricRegistry.getOrCreate( - Counter.builder("pbj.grpc.requests") - .scope("vendor") - .description("The number of gRPC requests")); - this.failedRequestCounter = - metricRegistry.getOrCreate( - Counter.builder("pbj.grpc.request.failures") - .scope("vendor") - .description("The number of failed gRPC requests")); + this.requestCounter = metricRegistry.getOrCreate( + Counter.builder("pbj.grpc.requests").scope("vendor").description("The number of gRPC requests")); + this.failedRequestCounter = metricRegistry.getOrCreate(Counter.builder("pbj.grpc.request.failures") + .scope("vendor") + .description("The number of failed gRPC requests")); } /** @@ -114,8 +98,7 @@ public SubProtocolResult subProtocol( final var route = routing.findRoute(prologue); if (route == null) { this.failedRequestCounter.increment(); - return new SubProtocolResult( - true, new RouteNotFoundHandler(streamWriter, streamId, currentStreamState)); + return new SubProtocolResult(true, new RouteNotFoundHandler(streamWriter, streamId, currentStreamState)); } // This is a valid call! diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRouting.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRouting.java index 4b22c294..ddb905c8 100644 --- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRouting.java +++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRouting.java @@ -23,7 +23,8 @@ */ public class PbjRouting implements Routing { /** The list of routes. */ - @NonNull private final List routes; + @NonNull + private final List routes; /** * Create a new instance. This is private, so it can only be created using the builder method. diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjServiceRoute.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjServiceRoute.java index 6e618e12..04aa64e3 100644 --- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjServiceRoute.java +++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjServiceRoute.java @@ -24,10 +24,9 @@ class PbjServiceRoute extends PbjRoute { */ PbjServiceRoute(@NonNull final ServiceInterface service) { this.serviceName = requireNonNull(service).serviceName(); - this.routes = - service.methods().stream() - .map(method -> new PbjMethodRoute(service, method)) - .toList(); + this.routes = service.methods().stream() + .map(method -> new PbjMethodRoute(service, method)) + .toList(); } @Override @@ -40,11 +39,7 @@ PbjMethodRoute toPbjMethodRoute(@NonNull final HttpPrologue prologue) { } } throw new IllegalStateException( - "PbjServiceRoute(" - + serviceName - + ") accepted prologue, " - + "but cannot provide route: " - + prologue); + "PbjServiceRoute(" + serviceName + ") accepted prologue, " + "but cannot provide route: " + prologue); } @Override diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java index d3075e58..139b1135 100644 --- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java +++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java @@ -14,14 +14,12 @@ import io.helidon.http.http2.Http2StreamWriter; import io.helidon.http.http2.Http2WindowUpdate; import io.helidon.webserver.http2.spi.Http2SubProtocolSelector; - import java.util.Objects; /** * A handler for the case where the path is not found. */ -final class RouteNotFoundHandler - implements Http2SubProtocolSelector.SubProtocolHandler { +final class RouteNotFoundHandler implements Http2SubProtocolSelector.SubProtocolHandler { private final Http2StreamWriter streamWriter; private final int streamId; private Http2StreamState currentStreamState; @@ -50,8 +48,7 @@ public void init() { streamWriter.writeHeaders( http2Headers, streamId, - Http2Flag.HeaderFlags.create( - Http2Flag.END_OF_HEADERS | Http2Flag.END_OF_STREAM), + Http2Flag.HeaderFlags.create(Http2Flag.END_OF_HEADERS | Http2Flag.END_OF_STREAM), FlowControl.Outbound.NOOP); currentStreamState = Http2StreamState.HALF_CLOSED_LOCAL; } diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterService.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterService.java index fcfd640c..31c301eb 100644 --- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterService.java +++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterService.java @@ -100,8 +100,7 @@ default Pipeline open( } @NonNull - private HelloRequest parseRequest( - @NonNull final Bytes message, @NonNull final RequestOptions options) + private HelloRequest parseRequest(@NonNull final Bytes message, @NonNull final RequestOptions options) throws InvalidProtocolBufferException { Objects.requireNonNull(message); @@ -119,8 +118,7 @@ private HelloRequest parseRequest( } @NonNull - private Bytes createReply( - @NonNull final HelloReply reply, @NonNull final RequestOptions options) + private Bytes createReply(@NonNull final HelloReply reply, @NonNull final RequestOptions options) throws InvalidProtocolBufferException { Objects.requireNonNull(reply); diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java index a1b653ee..e070bd55 100644 --- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java +++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java @@ -23,8 +23,7 @@ public HelloReply sayHello(HelloRequest request) { // Streams of stuff coming from the client, with a single response. @Override - public Pipeline sayHelloStreamRequest( - Pipeline replies) { + public Pipeline sayHelloStreamRequest(Pipeline replies) { final var names = new ArrayList(); return new Pipeline<>() { @Override @@ -49,10 +48,9 @@ public void onError(Throwable throwable) { @Override public void onComplete() { - final var reply = - HelloReply.newBuilder() - .setMessage("Hello " + String.join(", ", names)) - .build(); + final var reply = HelloReply.newBuilder() + .setMessage("Hello " + String.join(", ", names)) + .build(); replies.onNext(reply); replies.onComplete(); } @@ -60,8 +58,7 @@ public void onComplete() { } @Override - public void sayHelloStreamReply( - HelloRequest request, Pipeline replies) { + public void sayHelloStreamReply(HelloRequest request, Pipeline replies) { for (int i = 0; i < 10; i++) { replies.onNext(HelloReply.newBuilder().setMessage("Hello!").build()); } @@ -70,8 +67,7 @@ public void sayHelloStreamReply( } @Override - public Pipeline sayHelloStreamBidi( - Pipeline replies) { + public Pipeline sayHelloStreamBidi(Pipeline replies) { // Here we receive info from the client. In this case, it is a stream of requests with // names. We will respond with a stream of replies. return new Pipeline<>() { @@ -87,8 +83,9 @@ public void onSubscribe(Flow.Subscription subscription) { @Override public void onNext(HelloRequest item) { - replies.onNext( - HelloReply.newBuilder().setMessage("Hello " + item.getName()).build()); + replies.onNext(HelloReply.newBuilder() + .setMessage("Hello " + item.getName()) + .build()); } @Override diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java index 7f24e03a..b1dffda5 100644 --- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java +++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java @@ -59,8 +59,7 @@ class PbjProtocolHandlerTest { @BeforeEach void setUp() { - headers = Http2Headers.create(WritableHeaders.create() - .add(HeaderNames.CONTENT_TYPE, "application/grpc+proto")); + headers = Http2Headers.create(WritableHeaders.create().add(HeaderNames.CONTENT_TYPE, "application/grpc+proto")); streamWriter = new StreamWriterStub(); streamId = 1; flowControl = new OutboundFlowControlStub(); @@ -112,13 +111,16 @@ void unsupportedContentType(String contentType) { // Content-Type: application/grpc // Grpc-Message: invalid gRPC request content-type "" // Grpc-Status: 3 - final var responseHeaders = responseHeaderFrame.httpHeaders().stream() - .collect(Collectors.toMap(Header::name, Header::values)); - assertThat(responseHeaders).contains( - entry("grpc-status", "" + GrpcStatus.INVALID_ARGUMENT.ordinal()), - entry("grpc-message", UriEncoding.encodeUri("invalid gRPC request content-type \"" + contentType + "\"")), - entry("Content-Type", "application/grpc"), - entry("grpc-accept-encoding", "identity")); + final var responseHeaders = + responseHeaderFrame.httpHeaders().stream().collect(Collectors.toMap(Header::name, Header::values)); + assertThat(responseHeaders) + .contains( + entry("grpc-status", "" + GrpcStatus.INVALID_ARGUMENT.ordinal()), + entry( + "grpc-message", + UriEncoding.encodeUri("invalid gRPC request content-type \"" + contentType + "\"")), + entry("Content-Type", "application/grpc"), + entry("grpc-accept-encoding", "identity")); // The stream should be closed assertThat(handler.streamState()).isEqualTo(Http2StreamState.CLOSED); @@ -189,13 +191,17 @@ void unsupportedGrpcEncodings(String encoding) { // Content-Type: application/grpc // Grpc-Message: grpc: Decompressor is not installed for grpc-encoding "[bad encoding here]" // Grpc-Status: 12 - final var responseHeaders = responseHeaderFrame.httpHeaders().stream() - .collect(Collectors.toMap(Header::name, Header::values)); - assertThat(responseHeaders).contains( - entry("grpc-status", "" + GrpcStatus.UNIMPLEMENTED.ordinal()), - entry("grpc-message", UriEncoding.encodeUri("Decompressor is not installed for grpc-encoding \"" + encoding + "\"")), - entry("Content-Type", "application/grpc"), - entry("grpc-accept-encoding", "identity")); + final var responseHeaders = + responseHeaderFrame.httpHeaders().stream().collect(Collectors.toMap(Header::name, Header::values)); + assertThat(responseHeaders) + .contains( + entry("grpc-status", "" + GrpcStatus.UNIMPLEMENTED.ordinal()), + entry( + "grpc-message", + UriEncoding.encodeUri( + "Decompressor is not installed for grpc-encoding \"" + encoding + "\"")), + entry("Content-Type", "application/grpc"), + entry("grpc-accept-encoding", "identity")); // The stream should be closed assertThat(handler.streamState()).isEqualTo(Http2StreamState.CLOSED); @@ -209,16 +215,21 @@ void unsupportedGrpcEncodings(String encoding) { * * @param encoding */ - @ValueSource(strings = { - // Simple identity strings with qualifiers - "identity", "identity;q=0.5", "identity;", "identity;nonsense", - // an identity with and without a qualifier in a list of encodings - "gzip, deflate;q=0.5, identity;q=0.1", - "gzip, deflate;q=0.5, identity", - "gzip, identity;q=0.1, deflate;q=0.5", - "gzip, identity, deflate;q=0.5", - "identity;q=.9, deflate;q=0.5, gzip;q=0.1, br;q=0.1", - "identity, deflate;q=0.5, gzip;q=0.1, br;q=0.1"}) + @ValueSource( + strings = { + // Simple identity strings with qualifiers + "identity", + "identity;q=0.5", + "identity;", + "identity;nonsense", + // an identity with and without a qualifier in a list of encodings + "gzip, deflate;q=0.5, identity;q=0.1", + "gzip, deflate;q=0.5, identity", + "gzip, identity;q=0.1, deflate;q=0.5", + "gzip, identity, deflate;q=0.5", + "identity;q=.9, deflate;q=0.5, gzip;q=0.1, br;q=0.1", + "identity, deflate;q=0.5, gzip;q=0.1, br;q=0.1" + }) @ParameterizedTest void supportedComplexEncodingsWithIdentity(String encoding) { final var h = WritableHeaders.create(); @@ -247,11 +258,10 @@ void supportedComplexEncodingsWithIdentity(String encoding) { final var responseHeaderFrame = streamWriter.writtenHeaders.getFirst(); assertThat(responseHeaderFrame.status()).isEqualTo(Status.OK_200); - final var responseHeaders = responseHeaderFrame.httpHeaders().stream() - .collect(Collectors.toMap(Header::name, Header::values)); - assertThat(responseHeaders).contains( - entry("Content-Type", "application/grpc+proto"), - entry("grpc-accept-encoding", "identity")); + final var responseHeaders = + responseHeaderFrame.httpHeaders().stream().collect(Collectors.toMap(Header::name, Header::values)); + assertThat(responseHeaders) + .contains(entry("Content-Type", "application/grpc+proto"), entry("grpc-accept-encoding", "identity")); // The stream should be closed assertThat(handler.streamState()).isEqualTo(Http2StreamState.HALF_CLOSED_REMOTE); @@ -274,14 +284,17 @@ public void writeData(Http2FrameData frame, FlowControl.Outbound flowControl) { // Within this test, the replyRef will be set once when the setup is complete, and then // will be available for the test code to use to call onNext, onError, etc. as required. final var replyRef = new AtomicReference>(); - route = new PbjMethodRoute(new GreeterServiceImpl() { - @Override - public void sayHelloStreamReply(HelloRequest request, Pipeline replies) { - replyRef.set(replies); - } - }, GreeterService.GreeterMethod.sayHelloStreamReply); + route = new PbjMethodRoute( + new GreeterServiceImpl() { + @Override + public void sayHelloStreamReply(HelloRequest request, Pipeline replies) { + replyRef.set(replies); + } + }, + GreeterService.GreeterMethod.sayHelloStreamReply); - final var handler = new PbjProtocolHandler(headers, streamWriter, streamId, flowControl, currentStreamState, config, route, deadlineDetector); + final var handler = new PbjProtocolHandler( + headers, streamWriter, streamId, flowControl, currentStreamState, config, route, deadlineDetector); handler.init(); sendAllData(handler, createRequestData("Alice")); @@ -292,8 +305,7 @@ public void sayHelloStreamReply(HelloRequest request, Pipeline replies.onNext(failingReply)) - .isInstanceOf(Exception.class); + assertThatThrownBy(() -> replies.onNext(failingReply)).isInstanceOf(Exception.class); assertThat(route.requestCounter().count()).isEqualTo(1); assertThat(route.failedGrpcRequestCounter().count()).isEqualTo(0); @@ -330,7 +342,8 @@ private BufferData createDataFrameBytes(Bytes data) { } private Http2FrameHeader createDataFrameHeader(int length) { - return Http2FrameHeader.create(length + 5, Http2FrameTypes.DATA, Http2Flag.DataFlags.create(Http2Flags.END_STREAM), streamId); + return Http2FrameHeader.create( + length + 5, Http2FrameTypes.DATA, Http2Flag.DataFlags.create(Http2Flags.END_STREAM), streamId); } private static final class OutboundFlowControlStub implements FlowControl.Outbound { @@ -346,9 +359,7 @@ public Http2FrameData[] cut(Http2FrameData frame) { } @Override - public void blockTillUpdate() { - - } + public void blockTillUpdate() {} @Override public int maxFrameSize() { @@ -356,14 +367,10 @@ public int maxFrameSize() { } @Override - public void decrementWindowSize(int decrement) { - - } + public void decrementWindowSize(int decrement) {} @Override - public void resetStreamWindowSize(int size) { - - } + public void resetStreamWindowSize(int size) {} @Override public int getRemainingWindowSize() { @@ -375,7 +382,6 @@ private static class StreamWriterStub implements Http2StreamWriter { private final List writtenDataFrames = new ArrayList<>(); private final List writtenHeaders = new ArrayList<>(); - @Override public void write(Http2FrameData frame) { writtenDataFrames.add(frame); @@ -387,13 +393,19 @@ public void writeData(Http2FrameData frame, FlowControl.Outbound flowControl) { } @Override - public int writeHeaders(Http2Headers headers, int streamId, Http2Flag.HeaderFlags flags, FlowControl.Outbound flowControl) { + public int writeHeaders( + Http2Headers headers, int streamId, Http2Flag.HeaderFlags flags, FlowControl.Outbound flowControl) { writtenHeaders.add(headers); return 0; } @Override - public int writeHeaders(Http2Headers headers, int streamId, Http2Flag.HeaderFlags flags, Http2FrameData dataFrame, FlowControl.Outbound flowControl) { + public int writeHeaders( + Http2Headers headers, + int streamId, + Http2Flag.HeaderFlags flags, + Http2FrameData dataFrame, + FlowControl.Outbound flowControl) { writtenHeaders.add(headers); writtenDataFrames.add(dataFrame); return 0; @@ -486,9 +498,8 @@ public List methods() { @NonNull @Override public Pipeline open( - @NonNull Method method, - @NonNull RequestOptions opts, - @NonNull Pipeline responses) throws GrpcException { + @NonNull Method method, @NonNull RequestOptions opts, @NonNull Pipeline responses) + throws GrpcException { this.calledMethod = method; this.opts = opts; return new Pipeline<>() { diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjTest.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjTest.java index c6eea83d..9dd48df6 100644 --- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjTest.java +++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjTest.java @@ -57,12 +57,9 @@ import org.junit.jupiter.params.provider.ValueSource; class PbjTest { - private static final MediaType APPLICATION_GRPC_PROTO = - HttpMediaType.create("application/grpc+proto"); - private static final MediaType APPLICATION_GRPC_JSON = - HttpMediaType.create("application/grpc+json"); - private static final MediaType APPLICATION_GRPC_STRING = - HttpMediaType.create("application/grpc+string"); + private static final MediaType APPLICATION_GRPC_PROTO = HttpMediaType.create("application/grpc+proto"); + private static final MediaType APPLICATION_GRPC_JSON = HttpMediaType.create("application/grpc+json"); + private static final MediaType APPLICATION_GRPC_STRING = HttpMediaType.create("application/grpc+string"); private static final MediaType APPLICATION_RANDOM = HttpMediaType.create("application/random"); private static final String SAY_HELLO_PATH = "/greeter.Greeter/sayHello"; @@ -83,16 +80,17 @@ static void setup() { PROXY = new GreeterProxy(); // Set up the server - SERVER = - WebServer.builder() - .port(8080) - .addRouting(PbjRouting.builder().service(PROXY)) - .build() - .start(); + SERVER = WebServer.builder() + .port(8080) + .addRouting(PbjRouting.builder().service(PROXY)) + .build() + .start(); CLIENT = Http2Client.builder().baseUri("http://localhost:8080").build(); - CHANNEL = ManagedChannelBuilder.forAddress("localhost", 8080).usePlaintext().build(); + CHANNEL = ManagedChannelBuilder.forAddress("localhost", 8080) + .usePlaintext() + .build(); } @AfterAll @@ -128,11 +126,10 @@ class Http2Tests { */ @Test void badCaseOnPathIsNotFound() { - try (var response = - CLIENT.post() - .contentType(APPLICATION_GRPC_PROTO) - .path(SAY_HELLO_PATH.toUpperCase()) - .submit(messageBytes(SIMPLE_REQUEST))) { + try (var response = CLIENT.post() + .contentType(APPLICATION_GRPC_PROTO) + .path(SAY_HELLO_PATH.toUpperCase()) + .submit(messageBytes(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(200); assertThat(grpcStatus(response)).isEqualTo(GrpcStatus.NOT_FOUND); } @@ -150,11 +147,10 @@ void badCaseOnPathIsNotFound() { @ParameterizedTest @ValueSource(strings = {"GET", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD", "TRACE"}) void mustUsePost(final String methodName) { - try (var response = - CLIENT.method(Method.create(methodName)) - .contentType(APPLICATION_GRPC_PROTO) - .path(SAY_HELLO_PATH) - .request()) { + try (var response = CLIENT.method(Method.create(methodName)) + .contentType(APPLICATION_GRPC_PROTO) + .path(SAY_HELLO_PATH) + .request()) { // This is consistent with existing behavior on Helidon, but I would have expected // the response code @@ -180,8 +176,7 @@ class ContentTypeTests { */ @Test void contentTypeMustBeSet() { - try (var response = - CLIENT.post().path(SAY_HELLO_PATH).submit(messageBytes(SIMPLE_REQUEST))) { + try (var response = CLIENT.post().path(SAY_HELLO_PATH).submit(messageBytes(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(415); } @@ -193,11 +188,10 @@ void contentTypeMustBeSet() { */ @Test void contentTypeMustStartWithApplicationGrpc() { - try (var response = - CLIENT.post() - .path(SAY_HELLO_PATH) - .contentType(APPLICATION_RANDOM) - .submit(messageBytes(SIMPLE_REQUEST))) { + try (var response = CLIENT.post() + .path(SAY_HELLO_PATH) + .contentType(APPLICATION_RANDOM) + .submit(messageBytes(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(415); } @@ -206,18 +200,16 @@ void contentTypeMustStartWithApplicationGrpc() { /** Verify that "application/grpc+json" requests are accepted */ @Test void contentTypeCanBeJSON() { - try (var response = - CLIENT.post() - .path(SAY_HELLO_PATH) - .contentType(APPLICATION_GRPC_JSON) - .submit(messageBytesJson(SIMPLE_REQUEST))) { + try (var response = CLIENT.post() + .path(SAY_HELLO_PATH) + .contentType(APPLICATION_GRPC_JSON) + .submit(messageBytesJson(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(200); assertThat(response.headers().contentType().orElseThrow().text()) .isEqualTo("application/grpc+json"); - final var reply = - decodeJsonReply(new ReadableStreamingData(response.inputStream())); + final var reply = decodeJsonReply(new ReadableStreamingData(response.inputStream())); assertThat(reply).isEqualTo(SIMPLE_REPLY); } } @@ -229,11 +221,10 @@ void contentTypeCanBeJSON() { @ParameterizedTest @ValueSource(strings = {"application/grpc+proto", "application/grpc"}) void contentTypeCanBeProtobuf(final String contentType) { - try (var response = - CLIENT.post() - .path(SAY_HELLO_PATH) - .contentType(MediaTypes.create(contentType)) - .submit(messageBytes(SIMPLE_REQUEST))) { + try (var response = CLIENT.post() + .path(SAY_HELLO_PATH) + .contentType(MediaTypes.create(contentType)) + .submit(messageBytes(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(200); assertThat(response.headers().contentType().orElseThrow().text()) @@ -247,11 +238,10 @@ void contentTypeCanBeProtobuf(final String contentType) { /** Verify that a custom suffix of the content type is supported */ @Test void contentTypeCanBeCustom() throws IOException { - try (var response = - CLIENT.post() - .path(SAY_HELLO_PATH) - .contentType(APPLICATION_GRPC_STRING) - .submit(messageBytes("dude".getBytes(StandardCharsets.UTF_8)))) { + try (var response = CLIENT.post() + .path(SAY_HELLO_PATH) + .contentType(APPLICATION_GRPC_STRING) + .submit(messageBytes("dude".getBytes(StandardCharsets.UTF_8)))) { assertThat(response.status().code()).isEqualTo(200); assertThat(response.headers().contentType().orElseThrow().text()) @@ -273,19 +263,22 @@ class GrpcEncodingTests { */ @Test void acceptEncodingExcludesAllSupportedEncodings() { - try (var response = - CLIENT.post() - .contentType(APPLICATION_GRPC_PROTO) - .path(SAY_HELLO_PATH) - .header(HeaderNames.create("grpc-accept-encoding"), "gzip, deflate") - .submit(messageBytes(SIMPLE_REQUEST))) { + try (var response = CLIENT.post() + .contentType(APPLICATION_GRPC_PROTO) + .path(SAY_HELLO_PATH) + .header(HeaderNames.create("grpc-accept-encoding"), "gzip, deflate") + .submit(messageBytes(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(200); response.entity().consume(); assertThat(grpcStatus(response)).isEqualTo(GrpcStatus.OK); - assertThat(response.headers().get(HeaderNames.create("grpc-encoding")).get()) + assertThat(response.headers() + .get(HeaderNames.create("grpc-encoding")) + .get()) .isEqualTo("identity"); - assertThat(response.headers().get(HeaderNames.create("grpc-accept-encoding")).get()) + assertThat(response.headers() + .get(HeaderNames.create("grpc-accept-encoding")) + .get()) .isEqualTo("identity"); } } @@ -348,16 +341,17 @@ class CompressionTests { @ParameterizedTest @ValueSource(strings = {"gzip", "deflate", "random"}) void compressionNotSupported(final String grpcEncoding) { - try (var response = - CLIENT.post() - .contentType(APPLICATION_GRPC_PROTO) - .path(SAY_HELLO_PATH) - .header(HeaderNames.create("grpc-encoding"), grpcEncoding) - .submit(messageBytes(SIMPLE_REQUEST))) { + try (var response = CLIENT.post() + .contentType(APPLICATION_GRPC_PROTO) + .path(SAY_HELLO_PATH) + .header(HeaderNames.create("grpc-encoding"), grpcEncoding) + .submit(messageBytes(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(200); assertThat(grpcStatus(response)).isEqualTo(GrpcStatus.UNIMPLEMENTED); - assertThat(response.headers().get(HeaderNames.create("grpc-accept-encoding")).get()) + assertThat(response.headers() + .get(HeaderNames.create("grpc-accept-encoding")) + .get()) .isEqualTo("identity"); } } @@ -368,16 +362,17 @@ void compressionNotSupported(final String grpcEncoding) { */ @Test void identityIfNotSpecified() { - try (var response = - CLIENT.post() - .contentType(APPLICATION_GRPC_PROTO) - .path(SAY_HELLO_PATH) - .submit(messageBytes(SIMPLE_REQUEST))) { + try (var response = CLIENT.post() + .contentType(APPLICATION_GRPC_PROTO) + .path(SAY_HELLO_PATH) + .submit(messageBytes(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(200); response.entity().consume(); assertThat(grpcStatus(response)).isEqualTo(GrpcStatus.OK); - assertThat(response.headers().get(HeaderNames.create("grpc-accept-encoding")).get()) + assertThat(response.headers() + .get(HeaderNames.create("grpc-accept-encoding")) + .get()) .isEqualTo("identity"); } } @@ -388,17 +383,18 @@ void identityIfNotSpecified() { */ @Test void identityIfSpecified() { - try (var response = - CLIENT.post() - .contentType(APPLICATION_GRPC_PROTO) - .path(SAY_HELLO_PATH) - .header(HeaderNames.create("grpc-encoding"), "identity") - .submit(messageBytes(SIMPLE_REQUEST))) { + try (var response = CLIENT.post() + .contentType(APPLICATION_GRPC_PROTO) + .path(SAY_HELLO_PATH) + .header(HeaderNames.create("grpc-encoding"), "identity") + .submit(messageBytes(SIMPLE_REQUEST))) { assertThat(response.status().code()).isEqualTo(200); response.entity().consume(); assertThat(grpcStatus(response)).isEqualTo(GrpcStatus.OK); - assertThat(response.headers().get(HeaderNames.create("grpc-accept-encoding")).get()) + assertThat(response.headers() + .get(HeaderNames.create("grpc-accept-encoding")) + .get()) .isEqualTo("identity"); } } @@ -408,26 +404,23 @@ void identityIfSpecified() { class DeadlineTests { @Test void deadlineExceeded() { - PROXY.svc = - new GreeterAdapter() { - @Override - public HelloReply sayHello(HelloRequest request) { - try { - // This should be plenty of time. Shouldn't be flaky... - Thread.sleep(Duration.ofSeconds(1)); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - - return HelloReply.newBuilder() - .setMessage("Hello " + request.getName()) - .build(); - } - }; + PROXY.svc = new GreeterAdapter() { + @Override + public HelloReply sayHello(HelloRequest request) { + try { + // This should be plenty of time. Shouldn't be flaky... + Thread.sleep(Duration.ofSeconds(1)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } - final var stub = - GreeterGrpc.newBlockingStub(CHANNEL) - .withDeadline(Deadline.after(1, TimeUnit.NANOSECONDS)); + return HelloReply.newBuilder() + .setMessage("Hello " + request.getName()) + .build(); + } + }; + + final var stub = GreeterGrpc.newBlockingStub(CHANNEL).withDeadline(Deadline.after(1, TimeUnit.NANOSECONDS)); try { //noinspection ResultOfMethodCallIgnored @@ -462,13 +455,12 @@ void unaryCall() { @ParameterizedTest @ValueSource(ints = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) void exceptionThrownDuringHandling(final int grpcStatusCode) { - PROXY.svc = - new GreeterAdapter() { - @Override - public HelloReply sayHello(HelloRequest request) { - throw new GrpcException(GrpcStatus.values()[grpcStatusCode]); - } - }; + PROXY.svc = new GreeterAdapter() { + @Override + public HelloReply sayHello(HelloRequest request) { + throw new GrpcException(GrpcStatus.values()[grpcStatusCode]); + } + }; final var stub = GreeterGrpc.newBlockingStub(CHANNEL); @@ -485,22 +477,19 @@ public HelloReply sayHello(HelloRequest request) { void exceptionThrownWhileOpening() { // Try this list of exceptions final var exceptions = - List.of( - new GrpcException(GrpcStatus.UNKNOWN), - new RuntimeException("Error opening")); + List.of(new GrpcException(GrpcStatus.UNKNOWN), new RuntimeException("Error opening")); for (final var ex : exceptions) { - PROXY.svc = - new GreeterAdapter() { - @Override - @NonNull - public Pipeline open( - @NonNull Method method, - @NonNull RequestOptions options, - @NonNull Pipeline replies) { - throw ex; - } - }; + PROXY.svc = new GreeterAdapter() { + @Override + @NonNull + public Pipeline open( + @NonNull Method method, + @NonNull RequestOptions options, + @NonNull Pipeline replies) { + throw ex; + } + }; final var stub = GreeterGrpc.newBlockingStub(CHANNEL); @@ -509,8 +498,7 @@ public Pipeline open( stub.sayHello(SIMPLE_REQUEST); fail("An exception should have been thrown"); } catch (StatusRuntimeException e) { - assertThat(e.getStatus().getCode().value()) - .isEqualTo(GrpcStatus.UNKNOWN.ordinal()); + assertThat(e.getStatus().getCode().value()).isEqualTo(GrpcStatus.INTERNAL.ordinal()); } } } @@ -524,7 +512,8 @@ void streamingServer() { final var replies = stub.sayHelloStreamReply(SIMPLE_REQUEST); final var messages = new ArrayList(); replies.forEachRemaining(messages::add); - assertThat(messages).hasSize(10).allMatch(reply -> reply.getMessage().equals("Hello!")); + assertThat(messages).hasSize(10).allMatch(reply -> reply.getMessage() + .equals("Hello!")); } } @@ -534,26 +523,23 @@ class StreamingClientTests { void streamingClient() throws InterruptedException { final var latch = new CountDownLatch(1); final var response = new AtomicReference(); - final var requestObserver = - GreeterGrpc.newStub(CHANNEL) - .sayHelloStreamRequest( - new StreamObserver<>() { - @Override - public void onNext(HelloReply helloReply) { - response.set(helloReply); - } - - @Override - public void onError(Throwable throwable) { - // FUTURE: Test this failure condition - System.err.println("Error: " + throwable.getMessage()); - } - - @Override - public void onCompleted() { - latch.countDown(); - } - }); + final var requestObserver = GreeterGrpc.newStub(CHANNEL).sayHelloStreamRequest(new StreamObserver<>() { + @Override + public void onNext(HelloReply helloReply) { + response.set(helloReply); + } + + @Override + public void onError(Throwable throwable) { + // FUTURE: Test this failure condition + System.err.println("Error: " + throwable.getMessage()); + } + + @Override + public void onCompleted() { + latch.countDown(); + } + }); requestObserver.onNext(HelloRequest.newBuilder().setName("Alice").build()); requestObserver.onNext(HelloRequest.newBuilder().setName("Bob").build()); @@ -563,8 +549,9 @@ public void onCompleted() { assertThat(latch.await(1, TimeUnit.MINUTES)).isTrue(); assertThat(response.get()) - .isEqualTo( - HelloReply.newBuilder().setMessage("Hello Alice, Bob, Carol").build()); + .isEqualTo(HelloReply.newBuilder() + .setMessage("Hello Alice, Bob, Carol") + .build()); } } @@ -574,26 +561,23 @@ class BidiStreamingTests { void streamingBidi() throws InterruptedException { final var latch = new CountDownLatch(1); final var response = new ArrayList(); - final var requestObserver = - GreeterGrpc.newStub(CHANNEL) - .sayHelloStreamBidi( - new StreamObserver<>() { - @Override - public void onNext(HelloReply helloReply) { - response.add(helloReply); - } - - @Override - public void onError(Throwable throwable) { - latch.countDown(); - fail("Encountered unexpected exception", throwable); - } - - @Override - public void onCompleted() { - latch.countDown(); - } - }); + final var requestObserver = GreeterGrpc.newStub(CHANNEL).sayHelloStreamBidi(new StreamObserver<>() { + @Override + public void onNext(HelloReply helloReply) { + response.add(helloReply); + } + + @Override + public void onError(Throwable throwable) { + latch.countDown(); + fail("Encountered unexpected exception", throwable); + } + + @Override + public void onCompleted() { + latch.countDown(); + } + }); requestObserver.onNext(HelloRequest.newBuilder().setName("Alice").build()); requestObserver.onNext(HelloRequest.newBuilder().setName("Bob").build()); @@ -603,9 +587,7 @@ public void onCompleted() { //noinspection ResultOfMethodCallIgnored latch.await(1, TimeUnit.MINUTES); - assertThat(response) - .hasSize(3) - .allMatch(reply -> reply.getMessage().startsWith("Hello")); + assertThat(response).hasSize(3).allMatch(reply -> reply.getMessage().startsWith("Hello")); } } @@ -613,8 +595,7 @@ public void onCompleted() { class ConcurrencyTests { private static final int NUM_CONCURRENT = 10; private static final int NUM_REQUESTS = 100_000; - private final ConcurrentLinkedQueue failures = - new ConcurrentLinkedQueue<>(); + private final ConcurrentLinkedQueue failures = new ConcurrentLinkedQueue<>(); private final CountDownLatch latch = new CountDownLatch(NUM_REQUESTS); private final ExecutorService executor = Executors.newVirtualThreadPerTaskExecutor(); private final AtomicInteger nextClientId = new AtomicInteger(0); @@ -626,8 +607,9 @@ void setup() { // there is a practical limit to the number of concurrent channels. If the deque is // empty, there are no available channels. for (int i = 0; i < NUM_CONCURRENT; i++) { - final var channel = - ManagedChannelBuilder.forAddress("localhost", 8080).usePlaintext().build(); + final var channel = ManagedChannelBuilder.forAddress("localhost", 8080) + .usePlaintext() + .build(); channels.offer(channel); } @@ -636,22 +618,20 @@ void setup() { @AfterEach void teardown() { channels.forEach(ManagedChannel::shutdownNow); - channels.forEach( - c -> { - try { - c.awaitTermination(1, TimeUnit.SECONDS); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + channels.forEach(c -> { + try { + c.awaitTermination(1, TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); } // FUTURE Try to test a bad client that sends multiple messages for a unary call @Test - @Disabled( - "This test passes locally but fails in CI. More work is needed to see why. It is" - + " timing dependent.") + @Disabled("This test passes locally but fails in CI. More work is needed to see why. It is" + + " timing dependent.") void manyConcurrentUnaryCalls() throws InterruptedException { // For each virtual client, execute the query and get the reply. Put the reply here in // this map. The key @@ -782,18 +762,15 @@ default HelloReply sayHello(HelloRequest request) { } @Override - default Pipeline sayHelloStreamRequest( - Pipeline replies) { + default Pipeline sayHelloStreamRequest(Pipeline replies) { return null; } @Override - default void sayHelloStreamReply( - HelloRequest request, Pipeline replies) {} + default void sayHelloStreamReply(HelloRequest request, Pipeline replies) {} @Override - default Pipeline sayHelloStreamBidi( - Pipeline replies) { + default Pipeline sayHelloStreamBidi(Pipeline replies) { return null; } } @@ -809,30 +786,25 @@ public HelloReply sayHello(HelloRequest request) { @Override @NonNull - public Pipeline sayHelloStreamRequest( - Pipeline replies) { + public Pipeline sayHelloStreamRequest(Pipeline replies) { return svc.sayHelloStreamRequest(replies); } @Override - public void sayHelloStreamReply( - HelloRequest request, Pipeline replies) { + public void sayHelloStreamReply(HelloRequest request, Pipeline replies) { svc.sayHelloStreamReply(request, replies); } @Override @NonNull - public Pipeline sayHelloStreamBidi( - Pipeline replies) { + public Pipeline sayHelloStreamBidi(Pipeline replies) { return svc.sayHelloStreamBidi(replies); } @Override @NonNull public Pipeline open( - @NonNull Method method, - @NonNull RequestOptions options, - @NonNull Pipeline replies) { + @NonNull Method method, @NonNull RequestOptions options, @NonNull Pipeline replies) { return svc.open(method, options, replies); } } diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java index 11a98271..cd6250f7 100644 --- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java +++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java @@ -105,5 +105,4 @@ public void directToBytes(final Blackhole blackhole) { blackhole.consume(bytes); } } - } diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java index aa9b22b3..a5e315f7 100644 --- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java +++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java @@ -79,5 +79,4 @@ public void directUnsafeGet(final Blackhole blackhole) { blackhole.consume(UnsafeUtils.getDirectBufferByte(directBuffer, i)); } } - } diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java index 6c8ebe39..83d4aa16 100644 --- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java +++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java @@ -44,5 +44,4 @@ public void testUnsafeGetLong(final Blackhole blackhole) { blackhole.consume(UnsafeUtils.getLong(array, i)); } } - } diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java index 8080952c..9731dda9 100644 --- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java +++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java @@ -63,5 +63,4 @@ public void writeRandomAccessData() { assert out.position() == SIZE; assert bout.toByteArray().length == SIZE; } - } diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java index 55cc0a6d..d0f1e4dc 100644 --- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java +++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java @@ -8,6 +8,14 @@ import com.hedera.pbj.runtime.io.buffer.BufferedData; import com.hedera.pbj.runtime.io.stream.ReadableStreamingData; import com.hedera.pbj.runtime.io.stream.WritableStreamingData; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Random; +import java.util.concurrent.TimeUnit; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -22,15 +30,6 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.io.UncheckedIOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Random; -import java.util.concurrent.TimeUnit; - @SuppressWarnings("unused") @State(Scope.Benchmark) @Fork(1) @@ -40,73 +39,74 @@ @BenchmarkMode(Mode.Throughput) public class WriteBufferedDataBench { - public static final FieldDefinition BYTES_FIELD = new FieldDefinition("bytesField", FieldType.BYTES, false, false, false, 17); - final static BufferedData sampleData; - final static byte[] sampleWrittenData; + public static final FieldDefinition BYTES_FIELD = + new FieldDefinition("bytesField", FieldType.BYTES, false, false, false, 17); + static final BufferedData sampleData; + static final byte[] sampleWrittenData; - static { - final Random random = new Random(6262266); - byte[] data = new byte[1024*16]; - random.nextBytes(data); - sampleData = BufferedData.wrap(data); + static { + final Random random = new Random(6262266); + byte[] data = new byte[1024 * 16]; + random.nextBytes(data); + sampleData = BufferedData.wrap(data); - ByteArrayOutputStream bout = new ByteArrayOutputStream(); - try (WritableStreamingData out = new WritableStreamingData(bout)) { - for (int i = 0; i < 100; i++) { - random.nextBytes(data); - ProtoWriterTools.writeBytes(out, BYTES_FIELD, sampleData); - } - } catch (IOException e) { - e.printStackTrace(); - } - sampleWrittenData = bout.toByteArray(); - } + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + try (WritableStreamingData out = new WritableStreamingData(bout)) { + for (int i = 0; i < 100; i++) { + random.nextBytes(data); + ProtoWriterTools.writeBytes(out, BYTES_FIELD, sampleData); + } + } catch (IOException e) { + e.printStackTrace(); + } + sampleWrittenData = bout.toByteArray(); + } - Path tempFileWriting; - Path tempFileReading; - OutputStream fout; - WritableStreamingData dataOut; + Path tempFileWriting; + Path tempFileReading; + OutputStream fout; + WritableStreamingData dataOut; - @Setup - public void prepare() { - try { - tempFileWriting = Files.createTempFile("WriteBytesBench", "dat"); - tempFileWriting.toFile().deleteOnExit(); - fout = Files.newOutputStream(tempFileWriting); - dataOut = new WritableStreamingData(fout); - tempFileReading = Files.createTempFile("WriteBytesBench", "dat"); - tempFileReading.toFile().deleteOnExit(); - Files.write(tempFileReading, sampleWrittenData); - } catch (IOException e) { - e.printStackTrace(); - throw new UncheckedIOException(e); - } - } + @Setup + public void prepare() { + try { + tempFileWriting = Files.createTempFile("WriteBytesBench", "dat"); + tempFileWriting.toFile().deleteOnExit(); + fout = Files.newOutputStream(tempFileWriting); + dataOut = new WritableStreamingData(fout); + tempFileReading = Files.createTempFile("WriteBytesBench", "dat"); + tempFileReading.toFile().deleteOnExit(); + Files.write(tempFileReading, sampleWrittenData); + } catch (IOException e) { + e.printStackTrace(); + throw new UncheckedIOException(e); + } + } - @TearDown - public void cleanUp() { - try { - dataOut.close(); - fout.close(); - } catch (IOException e){ - e.printStackTrace(); - throw new UncheckedIOException(e); - } - } + @TearDown + public void cleanUp() { + try { + dataOut.close(); + fout.close(); + } catch (IOException e) { + e.printStackTrace(); + throw new UncheckedIOException(e); + } + } - @Benchmark - public void writeBytes(Blackhole blackhole) throws IOException { - ProtoWriterTools.writeBytes(dataOut, BYTES_FIELD, sampleData); - } + @Benchmark + public void writeBytes(Blackhole blackhole) throws IOException { + ProtoWriterTools.writeBytes(dataOut, BYTES_FIELD, sampleData); + } - @Benchmark - @OperationsPerInvocation(100) - public void readBytes(Blackhole blackhole) throws IOException { - try (ReadableStreamingData in = new ReadableStreamingData(Files.newInputStream(tempFileReading)) ) { - for (int i = 0; i < 100; i++) { - blackhole.consume(in.readVarInt(false)); - blackhole.consume(ProtoParserTools.readBytes(in)); - } - } - } + @Benchmark + @OperationsPerInvocation(100) + public void readBytes(Blackhole blackhole) throws IOException { + try (ReadableStreamingData in = new ReadableStreamingData(Files.newInputStream(tempFileReading))) { + for (int i = 0; i < 100; i++) { + blackhole.consume(in.readVarInt(false)); + blackhole.consume(ProtoParserTools.readBytes(in)); + } + } + } } diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java index c8779bb9..0c2ad767 100644 --- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java +++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java @@ -39,73 +39,74 @@ @BenchmarkMode(Mode.Throughput) public class WriteBytesBench { - public static final FieldDefinition BYTES_FIELD = new FieldDefinition("bytesField", FieldType.BYTES, false, false, false, 17); - final static Bytes sampleData; - final static byte[] sampleWrittenData; + public static final FieldDefinition BYTES_FIELD = + new FieldDefinition("bytesField", FieldType.BYTES, false, false, false, 17); + static final Bytes sampleData; + static final byte[] sampleWrittenData; - static { - final Random random = new Random(6262266); - byte[] data = new byte[1024*16]; - random.nextBytes(data); - sampleData = Bytes.wrap(data); + static { + final Random random = new Random(6262266); + byte[] data = new byte[1024 * 16]; + random.nextBytes(data); + sampleData = Bytes.wrap(data); - ByteArrayOutputStream bout = new ByteArrayOutputStream(); - try (WritableStreamingData out = new WritableStreamingData(bout)) { - for (int i = 0; i < 100; i++) { - random.nextBytes(data); - ProtoWriterTools.writeBytes(out, BYTES_FIELD, sampleData); - } - } catch (IOException e) { - e.printStackTrace(); - } - sampleWrittenData = bout.toByteArray(); - } + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + try (WritableStreamingData out = new WritableStreamingData(bout)) { + for (int i = 0; i < 100; i++) { + random.nextBytes(data); + ProtoWriterTools.writeBytes(out, BYTES_FIELD, sampleData); + } + } catch (IOException e) { + e.printStackTrace(); + } + sampleWrittenData = bout.toByteArray(); + } - Path tempFileWriting; - Path tempFileReading; - OutputStream fout; - WritableStreamingData dataOut; + Path tempFileWriting; + Path tempFileReading; + OutputStream fout; + WritableStreamingData dataOut; - @Setup - public void prepare() { - try { - tempFileWriting = Files.createTempFile("WriteBytesBench", "dat"); - tempFileWriting.toFile().deleteOnExit(); - fout = Files.newOutputStream(tempFileWriting); - dataOut = new WritableStreamingData(fout); - tempFileReading = Files.createTempFile("WriteBytesBench", "dat"); - tempFileReading.toFile().deleteOnExit(); - Files.write(tempFileReading, sampleWrittenData); - } catch (IOException e) { - e.printStackTrace(); - throw new UncheckedIOException(e); - } - } + @Setup + public void prepare() { + try { + tempFileWriting = Files.createTempFile("WriteBytesBench", "dat"); + tempFileWriting.toFile().deleteOnExit(); + fout = Files.newOutputStream(tempFileWriting); + dataOut = new WritableStreamingData(fout); + tempFileReading = Files.createTempFile("WriteBytesBench", "dat"); + tempFileReading.toFile().deleteOnExit(); + Files.write(tempFileReading, sampleWrittenData); + } catch (IOException e) { + e.printStackTrace(); + throw new UncheckedIOException(e); + } + } - @TearDown - public void cleanUp() { - try { - dataOut.close(); - fout.close(); - } catch (IOException e){ - e.printStackTrace(); - throw new UncheckedIOException(e); - } - } + @TearDown + public void cleanUp() { + try { + dataOut.close(); + fout.close(); + } catch (IOException e) { + e.printStackTrace(); + throw new UncheckedIOException(e); + } + } - @Benchmark - public void writeBytes(Blackhole blackhole) throws IOException { - ProtoWriterTools.writeBytes(dataOut, BYTES_FIELD, sampleData); - } + @Benchmark + public void writeBytes(Blackhole blackhole) throws IOException { + ProtoWriterTools.writeBytes(dataOut, BYTES_FIELD, sampleData); + } - @Benchmark - @OperationsPerInvocation(100) - public void readBytes(Blackhole blackhole) throws IOException { - try (ReadableStreamingData in = new ReadableStreamingData(Files.newInputStream(tempFileReading)) ) { - for (int i = 0; i < 100; i++) { - blackhole.consume(in.readVarInt(false)); - blackhole.consume(ProtoParserTools.readBytes(in)); - } - } - } + @Benchmark + @OperationsPerInvocation(100) + public void readBytes(Blackhole blackhole) throws IOException { + try (ReadableStreamingData in = new ReadableStreamingData(Files.newInputStream(tempFileReading))) { + for (int i = 0; i < 100; i++) { + blackhole.consume(in.readVarInt(false)); + blackhole.consume(ProtoParserTools.readBytes(in)); + } + } + } } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java index 8e66506c..b3717714 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java @@ -39,7 +39,8 @@ public interface Codec { * @return The parsed object. It must not return null. * @throws ParseException If parsing fails */ - @NonNull T parse(@NonNull ReadableSequentialData input, final boolean strictMode, final int maxDepth) throws ParseException; + @NonNull + T parse(@NonNull ReadableSequentialData input, final boolean strictMode, final int maxDepth) throws ParseException; /** * Parses an object from the {@link Bytes} and returns it. @@ -59,7 +60,8 @@ public interface Codec { * @return The parsed object. It must not return null. * @throws ParseException If parsing fails */ - @NonNull default T parse(@NonNull Bytes bytes, final boolean strictMode, final int maxDepth) throws ParseException { + @NonNull + default T parse(@NonNull Bytes bytes, final boolean strictMode, final int maxDepth) throws ParseException { return parse(bytes.toReadableSequentialData(), strictMode, maxDepth); } @@ -70,7 +72,8 @@ public interface Codec { * @return The parsed object. It must not return null. * @throws ParseException If parsing fails */ - @NonNull default T parse(@NonNull ReadableSequentialData input) throws ParseException { + @NonNull + default T parse(@NonNull ReadableSequentialData input) throws ParseException { return parse(input, false, Integer.MAX_VALUE); } @@ -81,7 +84,8 @@ public interface Codec { * @return The parsed object. It must not return null. * @throws ParseException If parsing fails */ - @NonNull default T parse(@NonNull Bytes bytes) throws ParseException { + @NonNull + default T parse(@NonNull Bytes bytes) throws ParseException { return parse(bytes.toReadableSequentialData()); } @@ -96,7 +100,8 @@ public interface Codec { * @return The parsed object. It must not return null. * @throws ParseException If parsing fails */ - @NonNull default T parseStrict(@NonNull ReadableSequentialData input) throws ParseException { + @NonNull + default T parseStrict(@NonNull ReadableSequentialData input) throws ParseException { return parse(input, true, Integer.MAX_VALUE); } @@ -111,7 +116,8 @@ public interface Codec { * @return The parsed object. It must not return null. * @throws ParseException If parsing fails */ - @NonNull default T parseStrict(@NonNull Bytes bytes) throws ParseException { + @NonNull + default T parseStrict(@NonNull Bytes bytes) throws ParseException { return parseStrict(bytes.toReadableSequentialData()); } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java index 9c0326a5..608c8c88 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java @@ -46,7 +46,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return (31 + Integer.hashCode(((EnumWithProtoMetadata)kind).protoOrdinal())) * 31 + return (31 + Integer.hashCode(((EnumWithProtoMetadata) kind).protoOrdinal())) * 31 + (value == null ? 0 : value.hashCode()); } @@ -63,4 +63,3 @@ public int compareTo(ComparableOneOf thatObj) { return value.compareTo(thatObj.value); } } - diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java index 4683bfa7..ef6e87f8 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java @@ -24,7 +24,8 @@ * @param oneOf Whether this is a field is part of a oneOf * @param number The field number. Must be >= 0. */ -public record FieldDefinition(String name, FieldType type, boolean repeated, boolean optional, boolean oneOf, int number) { +public record FieldDefinition( + String name, FieldType type, boolean repeated, boolean optional, boolean oneOf, int number) { /** * Construct new FieldDefinition, standard record all args constructor with extra checks * diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java index 14b703a9..dc4e2bdf 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java @@ -5,53 +5,53 @@ * Enumeration of possible types of fields. */ public enum FieldType { - /** Protobuf 64bit Double Type */ - DOUBLE, - /** Protobuf 32bit Float Type */ - FLOAT, - /** Protobuf 32bit Signed Integer Type */ - INT32, - /** Protobuf 64bit Signed Long Type */ - INT64, - /** Protobuf 32bit Unsigned Long Type */ - UINT32, - /** Protobuf 64bit Unsigned Long Type */ - UINT64, - /** Protobuf 32bit Signed Integer Type, ZigZag encoded */ - SINT32, - /** Protobuf 64bit Signed Long Type, ZigZag encoded */ - SINT64, - /** Protobuf 32bit Unsigned Integer Type, not varint encoded, just little endian */ - FIXED32, - /** Protobuf 64bit Unsigned Long Type, not varint encoded, just little endian */ - FIXED64, - /** Protobuf 32bit Signed Integer Type, not varint encoded, just little endian */ - SFIXED32, - /** Protobuf 64bit Signed Long Type, not varint encoded, just little endian */ - SFIXED64, - /** Protobuf 1 byte boolean type */ - BOOL, - /** Protobuf UTF8 String type */ - STRING, - /** Protobuf bytes type */ - BYTES, - /** Protobuf enum type */ - ENUM, - /** Protobuf sub-message type */ - MESSAGE, - /** Protobuf map type */ - MAP; + /** Protobuf 64bit Double Type */ + DOUBLE, + /** Protobuf 32bit Float Type */ + FLOAT, + /** Protobuf 32bit Signed Integer Type */ + INT32, + /** Protobuf 64bit Signed Long Type */ + INT64, + /** Protobuf 32bit Unsigned Long Type */ + UINT32, + /** Protobuf 64bit Unsigned Long Type */ + UINT64, + /** Protobuf 32bit Signed Integer Type, ZigZag encoded */ + SINT32, + /** Protobuf 64bit Signed Long Type, ZigZag encoded */ + SINT64, + /** Protobuf 32bit Unsigned Integer Type, not varint encoded, just little endian */ + FIXED32, + /** Protobuf 64bit Unsigned Long Type, not varint encoded, just little endian */ + FIXED64, + /** Protobuf 32bit Signed Integer Type, not varint encoded, just little endian */ + SFIXED32, + /** Protobuf 64bit Signed Long Type, not varint encoded, just little endian */ + SFIXED64, + /** Protobuf 1 byte boolean type */ + BOOL, + /** Protobuf UTF8 String type */ + STRING, + /** Protobuf bytes type */ + BYTES, + /** Protobuf enum type */ + ENUM, + /** Protobuf sub-message type */ + MESSAGE, + /** Protobuf map type */ + MAP; - /** - * Optional values have an inner field, with a standard definition for every FieldType. We create singleton - * instances here for them to avoid them having to be created on every use. Placing them on the enum avoid a switch. - */ - final FieldDefinition optionalFieldDefinition; + /** + * Optional values have an inner field, with a standard definition for every FieldType. We create singleton + * instances here for them to avoid them having to be created on every use. Placing them on the enum avoid a switch. + */ + final FieldDefinition optionalFieldDefinition; - /** - * Constructor, creates optionalFieldDefinition automatically - */ - FieldType() { - optionalFieldDefinition = new FieldDefinition("value",this,false,1); - } + /** + * Constructor, creates optionalFieldDefinition automatically + */ + FieldType() { + optionalFieldDefinition = new FieldDefinition("value", this, false, 1); + } } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java index 242ea055..b2f7f034 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java @@ -7,7 +7,6 @@ import com.hedera.pbj.runtime.jsonparser.JSONParser; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Objects; @@ -22,9 +21,9 @@ public interface JsonCodec extends Codec { // then we should strongly enforce Codec works with Records. This will reduce bugs // where people try to use a mutable object. - /** {@inheritDoc} */ - default @NonNull T parse(@NonNull ReadableSequentialData input, final boolean strictMode, final int maxDepth) throws ParseException { + default @NonNull T parse(@NonNull ReadableSequentialData input, final boolean strictMode, final int maxDepth) + throws ParseException { try { return parse(JsonTools.parseJson(input), strictMode, maxDepth); } catch (IOException ex) { @@ -39,10 +38,9 @@ public interface JsonCodec extends Codec { * @return Parsed HashObject model object or null if data input was null or empty * @throws ParseException If parsing fails */ - @NonNull T parse( - @Nullable final JSONParser.ObjContext root, - final boolean strictMode, - final int maxDepth) throws ParseException; + @NonNull + T parse(@Nullable final JSONParser.ObjContext root, final boolean strictMode, final int maxDepth) + throws ParseException; /** * Writes an item to the given {@link WritableSequentialData}. @@ -88,7 +86,7 @@ default String toJSON(@NonNull T item) { default int measure(@NonNull ReadableSequentialData input) throws ParseException { final long startPosition = input.position(); parse(input); - return (int)(input.position() - startPosition); + return (int) (input.position() - startPosition); } /** diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java index eefc6bb2..80590353 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java @@ -7,13 +7,7 @@ import com.hedera.pbj.runtime.jsonparser.JSONParser; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import org.antlr.v4.runtime.CharStreams; -import org.antlr.v4.runtime.CodePointBuffer; -import org.antlr.v4.runtime.CodePointCharStream; -import org.antlr.v4.runtime.CommonTokenStream; - import java.io.IOException; -import java.io.UncheckedIOException; import java.nio.CharBuffer; import java.util.Base64; import java.util.List; @@ -21,6 +15,10 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; +import org.antlr.v4.runtime.CharStreams; +import org.antlr.v4.runtime.CodePointBuffer; +import org.antlr.v4.runtime.CodePointCharStream; +import org.antlr.v4.runtime.CommonTokenStream; /** * Class of static utility methods for working with JSON. All generated JSON is designed to be @@ -83,7 +81,7 @@ public static String unescape(@Nullable String string) { */ public static String escape(@Nullable String string) { if (string == null) return null; - return string.replaceAll("\n","\\\\n").replaceAll("\r","\\\\r"); + return string.replaceAll("\n", "\\\\n").replaceAll("\r", "\\\\r"); } // ==================================================================================================== @@ -101,7 +99,7 @@ public static JSONParser.ObjContext parseJson(@NonNull final ReadableSequentialD final JSONParser parser = new JSONParser(new CommonTokenStream(lexer)); final JSONParser.JsonContext jsonContext = parser.json(); final JSONParser.ValueContext valueContext = jsonContext.value(); - return valueContext.obj(); + return valueContext.obj(); } /** @@ -113,11 +111,12 @@ public static JSONParser.ObjContext parseJson(@NonNull final ReadableSequentialD public static JSONParser.ObjContext parseJson(@NonNull final CharBuffer input) { CodePointBuffer.Builder codePointBufferBuilder = CodePointBuffer.builder(input.remaining()); codePointBufferBuilder.append(input); - final JSONLexer lexer = new JSONLexer(CodePointCharStream.fromBuffer(codePointBufferBuilder.build(), "CharBuffer")); + final JSONLexer lexer = + new JSONLexer(CodePointCharStream.fromBuffer(codePointBufferBuilder.build(), "CharBuffer")); final JSONParser parser = new JSONParser(new CommonTokenStream(lexer)); final JSONParser.JsonContext jsonContext = parser.json(); final JSONParser.ValueContext valueContext = jsonContext.value(); - return valueContext.obj(); + return valueContext.obj(); } /** @@ -136,7 +135,8 @@ public static List parseObjArray(JSONParser.ArrContext arrContext, JsonCo } catch (ParseException e) { throw new UncheckedParseException(e); } - }).toList(); + }) + .toList(); } /** @@ -146,7 +146,10 @@ public static List parseObjArray(JSONParser.ArrContext arrContext, JsonCo * @return the parsed integer */ public static int parseInteger(JSONParser.ValueContext valueContext) { - return Integer.parseInt(valueContext.STRING() != null ? valueContext.STRING().getText() : valueContext.NUMBER().getText()); + return Integer.parseInt( + valueContext.STRING() != null + ? valueContext.STRING().getText() + : valueContext.NUMBER().getText()); } /** @@ -156,7 +159,10 @@ public static int parseInteger(JSONParser.ValueContext valueContext) { * @return the parsed long */ public static long parseLong(JSONParser.ValueContext valueContext) { - return Long.parseLong(valueContext.STRING() != null ? valueContext.STRING().getText() : valueContext.NUMBER().getText()); + return Long.parseLong( + valueContext.STRING() != null + ? valueContext.STRING().getText() + : valueContext.NUMBER().getText()); } /** @@ -166,7 +172,10 @@ public static long parseLong(JSONParser.ValueContext valueContext) { * @return the parsed float */ public static float parseFloat(JSONParser.ValueContext valueContext) { - return Float.parseFloat(valueContext.STRING() != null ? valueContext.STRING().getText() : valueContext.NUMBER().getText()); + return Float.parseFloat( + valueContext.STRING() != null + ? valueContext.STRING().getText() + : valueContext.NUMBER().getText()); } /** @@ -176,7 +185,10 @@ public static float parseFloat(JSONParser.ValueContext valueContext) { * @return the parsed double */ public static double parseDouble(JSONParser.ValueContext valueContext) { - return Double.parseDouble(valueContext.STRING() != null ? valueContext.STRING().getText() : valueContext.NUMBER().getText()); + return Double.parseDouble( + valueContext.STRING() != null + ? valueContext.STRING().getText() + : valueContext.NUMBER().getText()); } /** @@ -260,11 +272,9 @@ private static String rawFieldCode(String fieldName, String rawValue) { * @param value the value of the field * @return the JSON string */ - public static String field(String indent, String fieldName, - JsonCodec codec, @Nullable final T value) { + public static String field(String indent, String fieldName, JsonCodec codec, @Nullable final T value) { if (value != null) { - return '"' + toJsonFieldName(fieldName) + '"' + ": " + - codec.toJSON(value, indent, true); + return '"' + toJsonFieldName(fieldName) + '"' + ": " + codec.toJSON(value, indent, true); } else { return '"' + toJsonFieldName(fieldName) + '"' + ": null"; } @@ -312,7 +322,8 @@ public static String field(String fieldName, byte[] value) { * @param vComposer a composer of a "key":value strings - basically, a JsonTools::field method for the value type * @return the JSON string */ - public static String field(String fieldName, Map value, Function kEncoder, BiFunction vComposer) { + public static String field( + String fieldName, Map value, Function kEncoder, BiFunction vComposer) { assert !value.isEmpty(); StringBuilder sb = new StringBuilder(); PbjMap pbjMap = (PbjMap) value; @@ -373,9 +384,9 @@ public static String field(String fieldName, float value) { if (Float.isNaN(value)) { return rawFieldCode(fieldName, "\"NaN\""); } else if (Float.isInfinite(value)) { - return rawFieldCode(fieldName, "\""+(value < 0 ? "-Infinity" : "Infinity")+"\""); + return rawFieldCode(fieldName, "\"" + (value < 0 ? "-Infinity" : "Infinity") + "\""); } else { - return rawFieldCode(fieldName, Float.toString(value) ); + return rawFieldCode(fieldName, Float.toString(value)); } } @@ -390,7 +401,7 @@ public static String field(String fieldName, double value) { if (Double.isNaN(value)) { return rawFieldCode(fieldName, "\"NaN\""); } else if (Double.isInfinite(value)) { - return rawFieldCode(fieldName, "\""+(value < 0 ? "-Infinity" : "Infinity")+"\""); + return rawFieldCode(fieldName, "\"" + (value < 0 ? "-Infinity" : "Infinity") + "\""); } else { return rawFieldCode(fieldName, Double.toString(value)); } @@ -481,8 +492,7 @@ public static String field(String fieldName, Double value) { * @return the JSON string * @param the type of the items in the array */ - public static String arrayField(String fieldName, - FieldDefinition fieldDefinition, List items) { + public static String arrayField(String fieldName, FieldDefinition fieldDefinition, List items) { if (items != null) { if (items.isEmpty()) { return rawFieldCode(fieldName, "[]"); @@ -496,18 +506,21 @@ public static String arrayField(String fieldName, case STRING -> '"' + escape((String) item) + '"'; case BYTES -> '"' + ((Bytes) item).toBase64() + '"'; case INT32, SINT32, UINT32, FIXED32, SFIXED32 -> Integer.toString((Integer) item); - case INT64, SINT64, UINT64, FIXED64, SFIXED64 -> '"' + Long.toString((Long) item) + '"'; + case INT64, SINT64, UINT64, FIXED64, SFIXED64 -> '"' + + Long.toString((Long) item) + + '"'; case FLOAT -> Float.toString((Float) item); case DOUBLE -> Double.toString((Double) item); case BOOL -> Boolean.toString((Boolean) item); - case ENUM -> '"' + ((EnumWithProtoMetadata)item).protoName() + '"'; - case MESSAGE -> throw new UnsupportedOperationException("No expected here should have called other arrayField() method"); + case ENUM -> '"' + ((EnumWithProtoMetadata) item).protoName() + '"'; + case MESSAGE -> throw new UnsupportedOperationException( + "No expected here should have called other arrayField() method"); case MAP -> throw new UnsupportedOperationException("Arrays of maps not supported"); }; } }) .collect(Collectors.joining(", ")); - return rawFieldCode(fieldName, "["+values+"]"); + return rawFieldCode(fieldName, "[" + values + "]"); } } return null; @@ -523,15 +536,14 @@ public static String arrayField(String fieldName, * @return the JSON string * @param the type of the items in the array */ - public static String arrayField(String indent, String fieldName, - JsonCodec codec, List items) { + public static String arrayField(String indent, String fieldName, JsonCodec codec, List items) { if (items != null) { if (items.isEmpty()) { return rawFieldCode(fieldName, "[]"); } else { StringBuilder code = new StringBuilder('"' + fieldName + '"' + ": ["); for (int i = 0; i < items.size(); i++) { - var item = items.get(i); + var item = items.get(i); code.append(codec.toJSON(item, indent, true)); if (i < items.size() - 1) { code.append(", "); @@ -543,5 +555,4 @@ public static String arrayField(String indent, String fieldName, } return null; } - } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java index 8f91de88..1fbc5958 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java @@ -8,16 +8,16 @@ */ public class MalformedProtobufException extends IOException { - /** - * Construct new MalformedProtobufException - * - * @param message error message - */ - public MalformedProtobufException(final String message) { - super(message); - } + /** + * Construct new MalformedProtobufException + * + * @param message error message + */ + public MalformedProtobufException(final String message) { + super(message); + } - public MalformedProtobufException(final String message, final Throwable cause) { - super(message, cause); - } + public MalformedProtobufException(final String message, final Throwable cause) { + super(message, cause); + } } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java index 1efdc720..7d42e03b 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java @@ -49,9 +49,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return (31 + Integer.hashCode(((EnumWithProtoMetadata)kind).protoOrdinal())) * 31 + return (31 + Integer.hashCode(((EnumWithProtoMetadata) kind).protoOrdinal())) * 31 + (value == null ? 0 : value.hashCode()); } - } - diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java index 457f7b36..1ec4b5a2 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java @@ -26,7 +26,8 @@ public class PbjMap implements Map { private PbjMap(final Map map) { this.map = Collections.unmodifiableMap(map); - this.sortedKeys = Collections.unmodifiableList(map.keySet().stream().sorted().toList()); + this.sortedKeys = + Collections.unmodifiableList(map.keySet().stream().sorted().toList()); } /** diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java index 76c2c3be..5fbfd2ad 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java @@ -1,9 +1,8 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime; -import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.pbj.runtime.io.ReadableSequentialData; - +import com.hedera.pbj.runtime.io.buffer.Bytes; import java.io.IOException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; @@ -76,7 +75,7 @@ public static Map addToMap(Map map, final K key, final V valu * @param input The input data to read from * @return the read int */ - public static int readInt32(final ReadableSequentialData input) { + public static int readInt32(final ReadableSequentialData input) { return input.readVarInt(false); } @@ -255,7 +254,8 @@ public static String readString(final ReadableSequentialData input, final long m try { // Shouldn't use `new String()` because we want to error out on malformed UTF-8 bytes. - return StandardCharsets.UTF_8.newDecoder() + return StandardCharsets.UTF_8 + .newDecoder() .onMalformedInput(CodingErrorAction.REPORT) .onUnmappableCharacter(CodingErrorAction.REPORT) .decode(bb) @@ -334,9 +334,9 @@ public static void skipField(final ReadableSequentialData input, final ProtoCons switch (wireType) { case WIRE_TYPE_FIXED_64_BIT -> input.skip(8); case WIRE_TYPE_FIXED_32_BIT -> input.skip(4); - // The value for "zigZag" when calling varint doesn't matter because we are just reading past - // the varint, we don't care how to interpret it (zigzag is only used for interpretation of - // the bytes, not how many of them there are) + // The value for "zigZag" when calling varint doesn't matter because we are just reading past + // the varint, we don't care how to interpret it (zigzag is only used for interpretation of + // the bytes, not how many of them there are) case WIRE_TYPE_VARINT_OR_ZIGZAG -> input.readVarLong(false); case WIRE_TYPE_DELIMITED -> { final int length = input.readVarInt(false); diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java index 0dfb397f..aec8fc59 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java @@ -3,13 +3,11 @@ import com.hedera.pbj.runtime.io.buffer.BufferedData; import com.hedera.pbj.runtime.io.buffer.Bytes; - import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Map; /** * Static tools and test cases used by generated test classes. @@ -21,10 +19,10 @@ public final class ProtoTestTools { /** Size for reusable test buffers */ - private static final int BUFFER_SIZE = 1024*1024; + private static final int BUFFER_SIZE = 1024 * 1024; /** Size for reusable test char buffers */ - private static final int CHAR_BUFFER_SIZE = 2*1024*1024; + private static final int CHAR_BUFFER_SIZE = 2 * 1024 * 1024; /** Instance should never be created */ private ProtoTestTools() {} @@ -129,12 +127,12 @@ public static List addNull(final List list) { * @param the type for lists */ public static List> generateListArguments(final List list) { - ArrayList> outputList = new ArrayList<>((list.size()/5)+1); + ArrayList> outputList = new ArrayList<>((list.size() / 5) + 1); outputList.add(Collections.emptyList()); int i = 0; while (i < list.size()) { - final int itemsToUse = Math.min(5, list.size()-i); - outputList.add(list.subList(i, i+itemsToUse)); + final int itemsToUse = Math.min(5, list.size() - i); + outputList.add(list.subList(i, i + itemsToUse)); i += itemsToUse; } return outputList; @@ -144,7 +142,8 @@ public static List> generateListArguments(final List list) { // Standard lists of values to test with /** integer type test cases */ - public static final List INTEGER_TESTS_LIST = List.of(Integer.MIN_VALUE, -42, -21, 0, 21, 42, Integer.MAX_VALUE); + public static final List INTEGER_TESTS_LIST = + List.of(Integer.MIN_VALUE, -42, -21, 0, 21, 42, Integer.MAX_VALUE); /** unsigned integer type test cases */ public static final List UNSIGNED_INTEGER_TESTS_LIST = List.of(0, 1, 2, Integer.MAX_VALUE); /** long type test cases */ @@ -152,17 +151,18 @@ public static List> generateListArguments(final List list) { /** unsigned long type test cases */ public static final List UNSIGNED_LONG_TESTS_LIST = List.of(0L, 21L, 42L, Long.MAX_VALUE); /** bytes float test cases */ - public static final List FLOAT_TESTS_LIST = List.of(Float.MIN_NORMAL, -102.7f, -5f, 1.7f, 0f, 3f, 5.2f, 42.1f, Float.MAX_VALUE); + public static final List FLOAT_TESTS_LIST = + List.of(Float.MIN_NORMAL, -102.7f, -5f, 1.7f, 0f, 3f, 5.2f, 42.1f, Float.MAX_VALUE); /** double type test cases */ - public static final List DOUBLE_TESTS_LIST = List.of(Double.MIN_NORMAL, -102.7d, -5d, 1.7d, 0d, 3d, 5.2d, 42.1d, Double.MAX_VALUE); + public static final List DOUBLE_TESTS_LIST = + List.of(Double.MIN_NORMAL, -102.7d, -5d, 1.7d, 0d, 3d, 5.2d, 42.1d, Double.MAX_VALUE); /** boolean type test cases */ public static final List BOOLEAN_TESTS_LIST = List.of(true, false); /** bytes type test cases */ - public static final List BYTES_TESTS_LIST = List.of( - Bytes.wrap(new byte[0]), - Bytes.wrap(new byte[]{0b001}), - Bytes.wrap(new byte[]{0b001, 0b010, 0b011, (byte)0xFF, Byte.MIN_VALUE, Byte.MAX_VALUE}) - ); + public static final List BYTES_TESTS_LIST = + List.of(Bytes.wrap(new byte[0]), Bytes.wrap(new byte[] {0b001}), Bytes.wrap(new byte[] { + 0b001, 0b010, 0b011, (byte) 0xFF, Byte.MIN_VALUE, Byte.MAX_VALUE + })); /** string type test cases, small as possible to make tests fast, there is a separate integration test with extra tests */ public static final List STRING_TESTS_LIST = List.of( @@ -170,6 +170,5 @@ public static List> generateListArguments(final List list) { """ This a small to speed tests Couple extended chars ©« あめ بِها - """ - ); + """); } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java index dd8db3dd..b1bc93b4 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java @@ -2,7 +2,6 @@ package com.hedera.pbj.runtime; import com.hedera.pbj.runtime.io.WritableSequentialData; - import java.io.IOException; /** diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java index 0f01d9ed..8a0152dd 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java @@ -1,12 +1,13 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime; +import static com.hedera.pbj.runtime.ProtoConstants.*; + import com.hedera.pbj.runtime.io.WritableSequentialData; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.pbj.runtime.io.buffer.RandomAccessData; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; - import java.io.IOException; import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; @@ -14,8 +15,6 @@ import java.util.function.Consumer; import java.util.function.ToIntFunction; -import static com.hedera.pbj.runtime.ProtoConstants.*; - /** * Static helper methods for Writers */ @@ -67,7 +66,8 @@ public static void writeTag(final WritableSequentialData out, final FieldDefinit * @param field The field to include in tag * @param wireType The field wire type to include in tag */ - public static void writeTag(final WritableSequentialData out, final FieldDefinition field, final ProtoConstants wireType) { + public static void writeTag( + final WritableSequentialData out, final FieldDefinition field, final ProtoConstants wireType) { out.writeVarInt((field.number() << TAG_TYPE_BITS) | wireType.ordinal(), false); } @@ -76,7 +76,6 @@ private static RuntimeException unsupported() { return new RuntimeException("Unsupported field type. Bug in ProtoOutputStream, shouldn't happen."); } - // ================================================================================================================ // STANDARD WRITE METHODS @@ -100,10 +99,11 @@ public static void writeInteger(WritableSequentialData out, FieldDefinition fiel * @param skipDefault default value results in no-op for non-oneOf */ public static void writeInteger(WritableSequentialData out, FieldDefinition field, int value, boolean skipDefault) { - assert switch(field.type()) { - case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> true; - default -> false; - } : "Not an integer type " + field; + assert switch (field.type()) { + case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> true; + default -> false; + } + : "Not an integer type " + field; assert !field.repeated() : "Use writeIntegerList with repeated types"; if (skipDefault && !field.oneOf() && value == 0) { @@ -152,10 +152,11 @@ public static void writeLong(WritableSequentialData out, FieldDefinition field, * @param skipDefault default value results in no-op for non-oneOf */ public static void writeLong(WritableSequentialData out, FieldDefinition field, long value, boolean skipDefault) { - assert switch(field.type()) { - case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> true; - default -> false; - } : "Not a long type " + field; + assert switch (field.type()) { + case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> true; + default -> false; + } + : "Not a long type " + field; assert !field.repeated() : "Use writeLongList with repeated types"; if (skipDefault && !field.oneOf() && value == 0) { return; @@ -234,13 +235,14 @@ public static void writeBoolean(WritableSequentialData out, FieldDefinition fiel * @param value the boolean value to write * @param skipDefault default value results in no-op for non-oneOf */ - public static void writeBoolean(WritableSequentialData out, FieldDefinition field, boolean value, boolean skipDefault) { + public static void writeBoolean( + WritableSequentialData out, FieldDefinition field, boolean value, boolean skipDefault) { assert field.type() == FieldType.BOOL : "Not a boolean type " + field; assert !field.repeated() : "Use writeBooleanList with repeated types"; // In the case of oneOf we write the value even if it is default value of false if (value || field.oneOf() || !skipDefault) { writeTag(out, field, WIRE_TYPE_VARINT_OR_ZIGZAG); - out.writeByte(value ? (byte)1 : 0); + out.writeByte(value ? (byte) 1 : 0); } } @@ -270,8 +272,8 @@ public static void writeEnum(WritableSequentialData out, FieldDefinition field, * @param value the string value to write * @throws IOException If a I/O error occurs */ - public static void writeString(final WritableSequentialData out, final FieldDefinition field, - final String value) throws IOException { + public static void writeString(final WritableSequentialData out, final FieldDefinition field, final String value) + throws IOException { writeString(out, field, value, true); } @@ -284,8 +286,9 @@ public static void writeString(final WritableSequentialData out, final FieldDefi * @param skipDefault default value results in no-op for non-oneOf * @throws IOException If a I/O error occurs */ - public static void writeString(final WritableSequentialData out, final FieldDefinition field, - final String value, boolean skipDefault) throws IOException { + public static void writeString( + final WritableSequentialData out, final FieldDefinition field, final String value, boolean skipDefault) + throws IOException { assert field.type() == FieldType.STRING : "Not a string type " + field; assert !field.repeated() : "Use writeStringList with repeated types"; writeStringNoChecks(out, field, value, skipDefault); @@ -301,8 +304,8 @@ public static void writeString(final WritableSequentialData out, final FieldDefi * @param value the string value to write * @throws IOException If a I/O error occurs */ - public static void writeOneRepeatedString(final WritableSequentialData out, final FieldDefinition field, - final String value) throws IOException { + public static void writeOneRepeatedString( + final WritableSequentialData out, final FieldDefinition field, final String value) throws IOException { assert field.type() == FieldType.STRING : "Not a string type " + field; assert field.repeated() : "writeOneRepeatedString can only be used with repeated fields"; writeStringNoChecks(out, field, value); @@ -316,8 +319,8 @@ public static void writeOneRepeatedString(final WritableSequentialData out, fina * @param value the string value to write * @throws IOException If a I/O error occurs */ - private static void writeStringNoChecks(final WritableSequentialData out, final FieldDefinition field, - final String value) throws IOException { + private static void writeStringNoChecks( + final WritableSequentialData out, final FieldDefinition field, final String value) throws IOException { writeStringNoChecks(out, field, value, true); } @@ -330,8 +333,9 @@ private static void writeStringNoChecks(final WritableSequentialData out, final * @param skipDefault default value results in no-op for non-oneOf * @throws IOException If a I/O error occurs */ - private static void writeStringNoChecks(final WritableSequentialData out, final FieldDefinition field, - final String value, boolean skipDefault) throws IOException { + private static void writeStringNoChecks( + final WritableSequentialData out, final FieldDefinition field, final String value, boolean skipDefault) + throws IOException { // When not a oneOf don't write default value if (skipDefault && !field.oneOf() && (value == null || value.isEmpty())) { return; @@ -350,8 +354,9 @@ private static void writeStringNoChecks(final WritableSequentialData out, final * @param value the bytes value to write * @throws IOException If a I/O error occurs */ - public static void writeBytes(final WritableSequentialData out, final FieldDefinition field, - final RandomAccessData value) throws IOException { + public static void writeBytes( + final WritableSequentialData out, final FieldDefinition field, final RandomAccessData value) + throws IOException { writeBytes(out, field, value, true); } @@ -365,8 +370,12 @@ public static void writeBytes(final WritableSequentialData out, final FieldDefin * @param skipDefault default value results in no-op for non-oneOf * @throws IOException If a I/O error occurs */ - public static void writeBytes(final WritableSequentialData out, final FieldDefinition field, - final RandomAccessData value, boolean skipDefault) throws IOException { + public static void writeBytes( + final WritableSequentialData out, + final FieldDefinition field, + final RandomAccessData value, + boolean skipDefault) + throws IOException { assert field.type() == FieldType.BYTES : "Not a byte[] type " + field; assert !field.repeated() : "Use writeBytesList with repeated types"; writeBytesNoChecks(out, field, value, skipDefault); @@ -383,8 +392,9 @@ public static void writeBytes(final WritableSequentialData out, final FieldDefin * @param value the bytes value to write * @throws IOException If a I/O error occurs */ - public static void writeOneRepeatedBytes(final WritableSequentialData out, final FieldDefinition field, - final RandomAccessData value) throws IOException { + public static void writeOneRepeatedBytes( + final WritableSequentialData out, final FieldDefinition field, final RandomAccessData value) + throws IOException { assert field.type() == FieldType.BYTES : "Not a byte[] type " + field; assert field.repeated() : "writeOneRepeatedBytes can only be used with repeated fields"; writeBytesNoChecks(out, field, value, true); @@ -399,8 +409,12 @@ public static void writeOneRepeatedBytes(final WritableSequentialData out, final * @param skipZeroLength this is true for normal single bytes and false for repeated lists * @throws IOException If a I/O error occurs */ - private static void writeBytesNoChecks(final WritableSequentialData out, final FieldDefinition field, - final RandomAccessData value, final boolean skipZeroLength) throws IOException { + private static void writeBytesNoChecks( + final WritableSequentialData out, + final FieldDefinition field, + final RandomAccessData value, + final boolean skipZeroLength) + throws IOException { // When not a oneOf don't write default value if (!field.oneOf() && (skipZeroLength && (value.length() == 0))) { return; @@ -425,8 +439,9 @@ private static void writeBytesNoChecks(final WritableSequentialData out, final F * @throws IOException If a I/O error occurs * @param type of message */ - public static void writeMessage(final WritableSequentialData out, final FieldDefinition field, - final T message, final Codec codec) throws IOException { + public static void writeMessage( + final WritableSequentialData out, final FieldDefinition field, final T message, final Codec codec) + throws IOException { assert field.type() == FieldType.MESSAGE : "Not a message type " + field; assert !field.repeated() : "Use writeMessageList with repeated types"; writeMessageNoChecks(out, field, message, codec); @@ -445,8 +460,9 @@ public static void writeMessage(final WritableSequentialData out, final Fiel * @throws IOException If a I/O error occurs * @param type of message */ - public static void writeOneRepeatedMessage(final WritableSequentialData out, final FieldDefinition field, - final T message, final Codec codec) throws IOException { + public static void writeOneRepeatedMessage( + final WritableSequentialData out, final FieldDefinition field, final T message, final Codec codec) + throws IOException { assert field.type() == FieldType.MESSAGE : "Not a message type " + field; assert field.repeated() : "writeOneRepeatedMessage can only be used with repeated fields"; writeMessageNoChecks(out, field, message, codec); @@ -462,8 +478,9 @@ public static void writeOneRepeatedMessage(final WritableSequentialData out, * @throws IOException If a I/O error occurs * @param type of message */ - private static void writeMessageNoChecks(final WritableSequentialData out, final FieldDefinition field, - final T message, final Codec codec) throws IOException { + private static void writeMessageNoChecks( + final WritableSequentialData out, final FieldDefinition field, final T message, final Codec codec) + throws IOException { // When not a oneOf don't write default value if (field.oneOf() && message == null) { writeTag(out, field, WIRE_TYPE_DELIMITED); @@ -485,8 +502,8 @@ public static void writeMap( final ProtoWriter kWriter, final ProtoWriter vWriter, final ToIntFunction sizeOfK, - final ToIntFunction sizeOfV - ) throws IOException { + final ToIntFunction sizeOfV) + throws IOException { // https://protobuf.dev/programming-guides/proto3/#maps // On the wire, a map is equivalent to: // message MapFieldEntry { @@ -520,7 +537,8 @@ public static void writeMap( * @param field the descriptor for the field we are writing * @param value the optional integer value to write */ - public static void writeOptionalInteger(WritableSequentialData out, FieldDefinition field, @Nullable Integer value) { + public static void writeOptionalInteger( + WritableSequentialData out, FieldDefinition field, @Nullable Integer value) { if (value != null) { writeTag(out, field, WIRE_TYPE_DELIMITED); final var newField = field.type().optionalFieldDefinition; @@ -557,7 +575,7 @@ public static void writeOptionalFloat(WritableSequentialData out, FieldDefinitio writeTag(out, field, WIRE_TYPE_DELIMITED); final var newField = field.type().optionalFieldDefinition; out.writeVarInt(sizeOfFloat(newField, value), false); - writeFloat(out,newField,value); + writeFloat(out, newField, value); } } @@ -573,7 +591,7 @@ public static void writeOptionalDouble(WritableSequentialData out, FieldDefiniti writeTag(out, field, WIRE_TYPE_DELIMITED); final var newField = field.type().optionalFieldDefinition; out.writeVarInt(sizeOfDouble(newField, value), false); - writeDouble(out,newField,value); + writeDouble(out, newField, value); } } @@ -584,7 +602,8 @@ public static void writeOptionalDouble(WritableSequentialData out, FieldDefiniti * @param field the descriptor for the field we are writing * @param value the optional boolean value to write */ - public static void writeOptionalBoolean(WritableSequentialData out, FieldDefinition field, @Nullable Boolean value) { + public static void writeOptionalBoolean( + WritableSequentialData out, FieldDefinition field, @Nullable Boolean value) { if (value != null) { writeTag(out, field, WIRE_TYPE_DELIMITED); final var newField = field.type().optionalFieldDefinition; @@ -601,7 +620,8 @@ public static void writeOptionalBoolean(WritableSequentialData out, FieldDefinit * @param value the optional string value to write * @throws IOException If a I/O error occurs */ - public static void writeOptionalString(WritableSequentialData out, FieldDefinition field, @Nullable String value) throws IOException { + public static void writeOptionalString(WritableSequentialData out, FieldDefinition field, @Nullable String value) + throws IOException { if (value != null) { writeTag(out, field, WIRE_TYPE_DELIMITED); final var newField = field.type().optionalFieldDefinition; @@ -618,14 +638,15 @@ public static void writeOptionalString(WritableSequentialData out, FieldDefiniti * @param value the optional bytes value to write * @throws IOException If a I/O error occurs */ - public static void writeOptionalBytes(WritableSequentialData out, FieldDefinition field, @Nullable Bytes value) throws IOException { + public static void writeOptionalBytes(WritableSequentialData out, FieldDefinition field, @Nullable Bytes value) + throws IOException { if (value != null) { writeTag(out, field, WIRE_TYPE_DELIMITED); final var newField = field.type().optionalFieldDefinition; final int size = sizeOfBytes(newField, value); out.writeVarInt(size, false); if (size > 0) { - writeBytes(out,newField, value); + writeBytes(out, newField, value); } } } @@ -633,7 +654,6 @@ public static void writeOptionalBytes(WritableSequentialData out, FieldDefinitio // ================================================================================================================ // LIST VERSIONS OF WRITE METHODS - /** * Write a list of integers to data output * @@ -642,10 +662,11 @@ public static void writeOptionalBytes(WritableSequentialData out, FieldDefinitio * @param list the list of integers value to write */ public static void writeIntegerList(WritableSequentialData out, FieldDefinition field, List list) { - assert switch(field.type()) { - case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> true; - default -> false; - } : "Not an integer type " + field; + assert switch (field.type()) { + case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> true; + default -> false; + } + : "Not an integer type " + field; assert field.repeated() : "Use writeInteger with non-repeated types"; // When not a oneOf don't write default value @@ -685,7 +706,7 @@ assert switch(field.type()) { int size = 0; for (int i = 0; i < listSize; i++) { final int val = list.get(i); - size += sizeOfUnsignedVarInt64(((long)val << 1) ^ ((long)val >> 63)); + size += sizeOfUnsignedVarInt64(((long) val << 1) ^ ((long) val >> 63)); } writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarInt(size, false); @@ -698,7 +719,7 @@ assert switch(field.type()) { // The bytes in protobuf are in little-endian order -- backwards for Java. // Smallest byte first. writeTag(out, field, WIRE_TYPE_DELIMITED); - out.writeVarLong((long)list.size() * FIXED32_SIZE, false); + out.writeVarLong((long) list.size() * FIXED32_SIZE, false); for (int i = 0; i < listSize; i++) { final int val = list.get(i); out.writeInt(val, ByteOrder.LITTLE_ENDIAN); @@ -716,10 +737,11 @@ assert switch(field.type()) { * @param list the list of longs value to write */ public static void writeLongList(WritableSequentialData out, FieldDefinition field, List list) { - assert switch(field.type()) { - case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> true; - default -> false; - } : "Not a long type " + field; + assert switch (field.type()) { + case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> true; + default -> false; + } + : "Not a long type " + field; assert field.repeated() : "Use writeLong with non-repeated types"; // When not a oneOf don't write default value @@ -759,7 +781,7 @@ assert switch(field.type()) { // The bytes in protobuf are in little-endian order -- backwards for Java. // Smallest byte first. writeTag(out, field, WIRE_TYPE_DELIMITED); - out.writeVarLong((long)list.size() * FIXED64_SIZE, false); + out.writeVarLong((long) list.size() * FIXED64_SIZE, false); for (int i = 0; i < listSize; i++) { final long val = list.get(i); out.writeLong(val, ByteOrder.LITTLE_ENDIAN); @@ -846,7 +868,8 @@ public static void writeBooleanList(WritableSequentialData out, FieldDefinition * @param field the descriptor for the field we are writing * @param list the list of enums value to write */ - public static void writeEnumList(WritableSequentialData out, FieldDefinition field, List list) { + public static void writeEnumList( + WritableSequentialData out, FieldDefinition field, List list) { assert field.type() == FieldType.ENUM : "Not an enum type " + field; assert field.repeated() : "Use writeEnum with non-repeated types"; // When not a oneOf don't write default value @@ -873,7 +896,8 @@ public static void writeEnumList(WritableSequentialData out, FieldDefinition fie * @param list the list of strings value to write * @throws IOException If a I/O error occurs */ - public static void writeStringList(WritableSequentialData out, FieldDefinition field, List list) throws IOException { + public static void writeStringList(WritableSequentialData out, FieldDefinition field, List list) + throws IOException { assert field.type() == FieldType.STRING : "Not a string type " + field; assert field.repeated() : "Use writeString with non-repeated types"; // When not a oneOf don't write default value @@ -885,7 +909,7 @@ public static void writeStringList(WritableSequentialData out, FieldDefinition f final String value = list.get(i); writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarInt(sizeOfStringNoTag(value), false); - Utf8Tools.encodeUtf8(value,out); + Utf8Tools.encodeUtf8(value, out); } } @@ -899,7 +923,8 @@ public static void writeStringList(WritableSequentialData out, FieldDefinition f * @throws IOException If a I/O error occurs * @param type of message */ - public static void writeMessageList(WritableSequentialData out, FieldDefinition field, List list, Codec codec) throws IOException { + public static void writeMessageList( + WritableSequentialData out, FieldDefinition field, List list, Codec codec) throws IOException { assert field.type() == FieldType.MESSAGE : "Not a message type " + field; assert field.repeated() : "Use writeMessage with non-repeated types"; // When not a oneOf don't write default value @@ -920,7 +945,9 @@ public static void writeMessageList(WritableSequentialData out, FieldDefinit * @param list the list of bytes objects value to write * @throws IOException If a I/O error occurs */ - public static void writeBytesList(WritableSequentialData out, FieldDefinition field, List list) throws IOException { + public static void writeBytesList( + WritableSequentialData out, FieldDefinition field, List list) + throws IOException { assert field.type() == FieldType.BYTES : "Not a message type " + field; assert field.repeated() : "Use writeBytes with non-repeated types"; // When not a oneOf don't write default value @@ -943,10 +970,7 @@ public static void writeBytesList(WritableSequentialData out, FieldDefinition fi * @param the type of the data output that extends WritableSequentialData */ public static void writeDelimited( - final T out, - final FieldDefinition field, - final int size, - final Consumer writer) { + final T out, final FieldDefinition field, final int size, final Consumer writer) { writeTag(out, field); out.writeVarInt(size, false); writer.accept(out); @@ -1082,7 +1106,7 @@ public static int sizeOfOptionalInteger(FieldDefinition field, @Nullable Integer public static int sizeOfOptionalLong(FieldDefinition field, @Nullable Long value) { if (value != null) { final long longValue = value; - final int size = sizeOfLong(field.type().optionalFieldDefinition, longValue); + final int size = sizeOfLong(field.type().optionalFieldDefinition, longValue); return sizeOfTag(field, WIRE_TYPE_DELIMITED) + sizeOfUnsignedVarInt32(size) + size; } return 0; @@ -1187,7 +1211,8 @@ public static int sizeOfInteger(FieldDefinition field, int value, boolean skipDe return switch (field.type()) { case INT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfVarInt32(value); case UINT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfUnsignedVarInt32(value); - case SINT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfUnsignedVarInt64(((long)value << 1) ^ ((long)value >> 63)); + case SINT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + + sizeOfUnsignedVarInt64(((long) value << 1) ^ ((long) value >> 63)); case SFIXED32, FIXED32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + FIXED32_SIZE; default -> throw unsupported(); }; @@ -1216,7 +1241,8 @@ public static int sizeOfLong(FieldDefinition field, long value, boolean skipDefa if (skipDefault && !field.oneOf() && value == 0) return 0; return switch (field.type()) { case INT64, UINT64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfUnsignedVarInt64(value); - case SINT64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfUnsignedVarInt64((value << 1) ^ (value >> 63)); + case SINT64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + + sizeOfUnsignedVarInt64((value << 1) ^ (value >> 63)); case SFIXED64, FIXED64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + FIXED64_SIZE; default -> throw unsupported(); }; @@ -1269,7 +1295,6 @@ public static int sizeOfBoolean(FieldDefinition field, boolean value, boolean sk return (value || field.oneOf() || !skipDefault) ? sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + 1 : 0; } - /** * Get number of bytes that would be needed to encode an enum field * diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java index 2a44ab2d..ba398c49 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java @@ -11,9 +11,7 @@ * @param The type of the response message */ public record RpcMethodDefinition( - String path, - Class requestType, - Class responseType) { + String path, Class requestType, Class responseType) { /** * Create a new builder for a {@link RpcMethodDefinition}. diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java index e47b2cc5..fdbff74d 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java @@ -15,7 +15,8 @@ public interface RpcServiceDefinition { * * @return The base path of the service */ - @NonNull String basePath(); + @NonNull + String basePath(); /** * The set of methods that are defined for this service. @@ -23,5 +24,6 @@ public interface RpcServiceDefinition { * @return The set of methods */ @SuppressWarnings("java:S1452") - @NonNull Set> methods(); + @NonNull + Set> methods(); } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java index 357e96f8..9d18074e 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java @@ -10,5 +10,4 @@ *
  • public static FieldDefinition getField(final int fieldNumber) {...}
  • * */ -public interface Schema { -} +public interface Schema {} diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java index 1ba66f94..5461c0b2 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java @@ -46,8 +46,7 @@ static int encodedLength(final CharSequence sequence) throws IOException { if (utf8Length < utf16Length) { // Necessary and sufficient condition for overflow because of maximum 3x expansion - throw new IllegalArgumentException( - "UTF-8 length does not fit in int: " + (utf8Length + (1L << 32))); + throw new IllegalArgumentException("UTF-8 length does not fit in int: " + (utf8Length + (1L << 32))); } return utf8Length; } @@ -97,9 +96,7 @@ static void encodeUtf8(final CharSequence in, final WritableSequentialData out) // Benchmarks show put performs better than putShort here (for HotSpot). out.writeByte3( - (byte) (0xE0 | (c >>> 12)), - (byte) (0x80 | (0x3F & (c >>> 6))), - (byte) (0x80 | (0x3F & c))); + (byte) (0xE0 | (c >>> 12)), (byte) (0x80 | (0x3F & (c >>> 6))), (byte) (0x80 | (0x3F & c))); } else { // Four bytes (1111 xxxx 10xx xxxx 10xx xxxx 10xx xxxx) // Minimum code point represented by a surrogate pair is 0x10000, 17 bits, four UTF-8 bytes diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java index 7a01d63d..54e8605f 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java @@ -12,7 +12,7 @@ public interface Pipeline extends Flow.Subscriber { /** * Called when an END_STREAM frame is received from the client. */ - default void clientEndStreamReceived() { } + default void clientEndStreamReceived() {} /** * {@inheritDoc} diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java index e3564686..29d35188 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java @@ -386,8 +386,7 @@ public interface ServerStreamingMethod { * @param The type of the request message. * @param The type of the response message. */ - public interface BidiStreamingMethod - extends ExceptionalFunction, Pipeline> {} + public interface BidiStreamingMethod extends ExceptionalFunction, Pipeline> {} /** * A convenient base class for the different builders. All builders have to hold state for request and @@ -523,7 +522,7 @@ public void onNext(@NonNull final Bytes message) { final var replyBytes = responseMapper.apply(reply); replies.onNext(replyBytes); onComplete(); - } catch (RuntimeException e) { + } catch (RuntimeException e) { replies.onError(e); throw e; } catch (Exception e) { @@ -551,32 +550,28 @@ private static final class BidiStreamingBuilderImpl extends PipelineBuilde @Override @NonNull - public BidiStreamingBuilderImpl mapRequest( - @NonNull final ExceptionalFunction mapper) { + public BidiStreamingBuilderImpl mapRequest(@NonNull final ExceptionalFunction mapper) { this.requestMapper = mapper; return this; } @Override @NonNull - public BidiStreamingBuilderImpl method( - @NonNull final BidiStreamingMethod method) { + public BidiStreamingBuilderImpl method(@NonNull final BidiStreamingMethod method) { this.method = method; return this; } @Override @NonNull - public BidiStreamingBuilderImpl mapResponse( - @NonNull final ExceptionalFunction mapper) { + public BidiStreamingBuilderImpl mapResponse(@NonNull final ExceptionalFunction mapper) { this.responseMapper = mapper; return this; } @Override @NonNull - public BidiStreamingBuilderImpl respondTo( - @NonNull final Pipeline replies) { + public BidiStreamingBuilderImpl respondTo(@NonNull final Pipeline replies) { this.replies = replies; return this; } @@ -614,7 +609,7 @@ public void onNext(@NonNull final Bytes message) { try { final var request = requestMapper.apply(message); incoming.onNext(request); - } catch (RuntimeException e) { + } catch (RuntimeException e) { replies.onError(e); throw e; } catch (Exception e) { @@ -649,32 +644,28 @@ private static final class ClientStreamingBuilderImpl extends PipelineBuil @Override @NonNull - public ClientStreamingBuilderImpl mapRequest( - @NonNull final ExceptionalFunction mapper) { + public ClientStreamingBuilderImpl mapRequest(@NonNull final ExceptionalFunction mapper) { this.requestMapper = mapper; return this; } @Override @NonNull - public ClientStreamingBuilderImpl method( - @NonNull final ClientStreamingMethod method) { + public ClientStreamingBuilderImpl method(@NonNull final ClientStreamingMethod method) { this.method = method; return this; } @Override @NonNull - public ClientStreamingBuilderImpl mapResponse( - @NonNull final ExceptionalFunction mapper) { + public ClientStreamingBuilderImpl mapResponse(@NonNull final ExceptionalFunction mapper) { this.responseMapper = mapper; return this; } @Override @NonNull - public ClientStreamingBuilderImpl respondTo( - @NonNull final Pipeline replies) { + public ClientStreamingBuilderImpl respondTo(@NonNull final Pipeline replies) { this.replies = replies; return this; } @@ -687,8 +678,7 @@ public Pipeline build() { throw new IllegalStateException("The method must be specified."); } replies.onSubscribe(this); - final var responseConverter = - new MapSubscriber(replies, item -> responseMapper.apply(item)); + final var responseConverter = new MapSubscriber(replies, item -> responseMapper.apply(item)); try { incoming = method.apply(responseConverter); @@ -708,7 +698,7 @@ public void onNext(@NonNull final Bytes message) { try { final var request = requestMapper.apply(message); incoming.onNext(request); - } catch (RuntimeException e) { + } catch (RuntimeException e) { replies.onError(e); throw e; } catch (Exception e) { @@ -742,32 +732,28 @@ private static final class ServerStreamingBuilderImpl extends PipelineBuil @Override @NonNull - public ServerStreamingBuilderImpl mapRequest( - @NonNull final ExceptionalFunction mapper) { + public ServerStreamingBuilderImpl mapRequest(@NonNull final ExceptionalFunction mapper) { this.requestMapper = mapper; return this; } @Override @NonNull - public ServerStreamingBuilderImpl method( - @NonNull final ServerStreamingMethod method) { + public ServerStreamingBuilderImpl method(@NonNull final ServerStreamingMethod method) { this.method = method; return this; } @Override @NonNull - public ServerStreamingBuilderImpl mapResponse( - @NonNull final ExceptionalFunction mapper) { + public ServerStreamingBuilderImpl mapResponse(@NonNull final ExceptionalFunction mapper) { this.responseMapper = mapper; return this; } @Override @NonNull - public ServerStreamingBuilderImpl respondTo( - @NonNull final Pipeline replies) { + public ServerStreamingBuilderImpl respondTo(@NonNull final Pipeline replies) { this.replies = replies; return this; } @@ -796,7 +782,7 @@ public void onNext(@NonNull final Bytes message) { try { final var request = requestMapper.apply(message); method.apply(request, responseConverter); - } catch (RuntimeException e) { + } catch (RuntimeException e) { replies.onError(e); throw e; } catch (Exception e) { @@ -848,7 +834,7 @@ public void onNext(T item) { try { final var r = mapper.apply(item); next.onNext(r); - } catch (RuntimeException e) { + } catch (RuntimeException e) { next.onError(e); throw e; } catch (Throwable t) { diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java index 6d9cc0df..94660c4e 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java @@ -121,8 +121,6 @@ interface RequestOptions { */ @NonNull Pipeline open( - @NonNull Method method, - @NonNull RequestOptions opts, - @NonNull Pipeline responses) + @NonNull Method method, @NonNull RequestOptions opts, @NonNull Pipeline responses) throws GrpcException; } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java index 751274b1..da4353ab 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java @@ -6,7 +6,6 @@ import com.hedera.pbj.runtime.io.buffer.RandomAccessData; import com.hedera.pbj.runtime.io.stream.EOFException; import edu.umd.cs.findbugs.annotations.NonNull; - import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.BufferUnderflowException; @@ -352,14 +351,14 @@ default long readLong() throws BufferUnderflowException, UncheckedIOException { final byte b6 = readByte(); final byte b7 = readByte(); final byte b8 = readByte(); - return (((long)b1 << 56) + - ((long)(b2 & 255) << 48) + - ((long)(b3 & 255) << 40) + - ((long)(b4 & 255) << 32) + - ((long)(b5 & 255) << 24) + - ((b6 & 255) << 16) + - ((b7 & 255) << 8) + - (b8 & 255)); + return (((long) b1 << 56) + + ((long) (b2 & 255) << 48) + + ((long) (b3 & 255) << 40) + + ((long) (b4 & 255) << 32) + + ((long) (b5 & 255) << 24) + + ((b6 & 255) << 16) + + ((b7 & 255) << 8) + + (b8 & 255)); } /** @@ -386,14 +385,14 @@ default long readLong(@NonNull final ByteOrder byteOrder) throws BufferUnderflow final byte b3 = readByte(); final byte b2 = readByte(); final byte b1 = readByte(); - return (((long) b1 << 56) + - ((long) (b2 & 255) << 48) + - ((long) (b3 & 255) << 40) + - ((long) (b4 & 255) << 32) + - ((long) (b5 & 255) << 24) + - ((b6 & 255) << 16) + - ((b7 & 255) << 8) + - (b8 & 255)); + return (((long) b1 << 56) + + ((long) (b2 & 255) << 48) + + ((long) (b3 & 255) << 40) + + ((long) (b4 & 255) << 32) + + ((long) (b5 & 255) << 24) + + ((b6 & 255) << 16) + + ((b7 & 255) << 8) + + (b8 & 255)); } else { return readLong(); } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java index 48f554a5..aa7a4bc2 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java @@ -46,8 +46,7 @@ public class UnsafeUtils { } } - private UnsafeUtils() { - } + private UnsafeUtils() {} /** * Get byte array element at a given offset. Identical to arr[offset]. @@ -150,8 +149,8 @@ public static long getLong(final byte[] arr, final int offset) { */ public static void getHeapBufferToArray( final ByteBuffer buffer, final long offset, final byte[] dst, final int dstOffset, final int length) { - UNSAFE.copyMemory(buffer.array(), BYTE_ARRAY_BASE_OFFSET + offset, - dst, BYTE_ARRAY_BASE_OFFSET + dstOffset, length); + UNSAFE.copyMemory( + buffer.array(), BYTE_ARRAY_BASE_OFFSET + offset, dst, BYTE_ARRAY_BASE_OFFSET + dstOffset, length); } /** @@ -161,8 +160,7 @@ public static void getHeapBufferToArray( public static void getDirectBufferToArray( final ByteBuffer buffer, final long offset, final byte[] dst, final int dstOffset, final int length) { final long address = UNSAFE.getLong(buffer, DIRECT_BYTEBUFFER_ADDRESS_OFFSET); - UNSAFE.copyMemory(null, address + offset, - dst, BYTE_ARRAY_BASE_OFFSET + dstOffset, length); + UNSAFE.copyMemory(null, address + offset, dst, BYTE_ARRAY_BASE_OFFSET + dstOffset, length); } /** diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java index 311b73b9..f5ef0fe7 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java @@ -68,7 +68,7 @@ default void writeByte4(byte b1, byte b2, byte b3, byte b4) { * @throws UncheckedIOException if an I/O error occurs */ default void writeUnsignedByte(final int b) throws BufferOverflowException, UncheckedIOException { - writeByte((byte)b); + writeByte((byte) b); } /** @@ -414,8 +414,7 @@ default void writeVarInt(final int value, final boolean zigZag) * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link #limit()} * @throws UncheckedIOException if an I/O error occurs */ - default void writeVarLong(long value, final boolean zigZag) - throws BufferOverflowException, UncheckedIOException { + default void writeVarLong(long value, final boolean zigZag) throws BufferOverflowException, UncheckedIOException { if (zigZag) { value = (value << 1) ^ (value >> 63); } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java index af9c8fb3..43929d5b 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java @@ -8,8 +8,8 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.io.InputStream; -import java.io.UncheckedIOException; import java.io.OutputStream; +import java.io.UncheckedIOException; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java index 8a5ccbeb..b6bf933f 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java @@ -1,6 +1,8 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io.buffer; +import static java.util.Objects.requireNonNull; + import com.hedera.pbj.runtime.io.DataEncodingException; import com.hedera.pbj.runtime.io.ReadableSequentialData; import com.hedera.pbj.runtime.io.UnsafeUtils; @@ -15,16 +17,13 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; import java.security.Signature; import java.security.SignatureException; import java.util.Arrays; import java.util.Base64; -import java.util.HexFormat; - -import java.security.MessageDigest; import java.util.Comparator; - -import static java.util.Objects.requireNonNull; +import java.util.HexFormat; /** * An immutable representation of a byte array. This class is designed to be efficient and usable across threads. @@ -36,17 +35,17 @@ public final class Bytes implements RandomAccessData, Comparable { public static final Bytes EMPTY = new Bytes(new byte[0]); /** Sorts {@link Bytes} according to their length, shorter first. */ - public static final Comparator SORT_BY_LENGTH = (Bytes o1, Bytes o2) -> - Comparator.comparingLong(Bytes::length).compare(o1, o2); + public static final Comparator SORT_BY_LENGTH = + (Bytes o1, Bytes o2) -> Comparator.comparingLong(Bytes::length).compare(o1, o2); /** Sorts {@link Bytes} according to their byte values, lower valued bytes first. - * Bytes are compared on a signed basis. - */ + * Bytes are compared on a signed basis. + */ public static final Comparator SORT_BY_SIGNED_VALUE = valueSorter(Byte::compare); /** Sorts {@link Bytes} according to their byte values, lower valued bytes first. - * Bytes are compared on an unsigned basis - */ + * Bytes are compared on an unsigned basis + */ public static final Comparator SORT_BY_UNSIGNED_VALUE = valueSorter(Byte::compareUnsigned); /** byte[] used as backing buffer */ @@ -88,8 +87,8 @@ private Bytes(@NonNull final byte[] data, final int offset, final int length) { this.length = length; if (offset < 0 || offset > data.length) { - throw new IndexOutOfBoundsException("Offset " + offset + " is out of bounds for buffer of length " - + data.length); + throw new IndexOutOfBoundsException( + "Offset " + offset + " is out of bounds for buffer of length " + data.length); } if (length < 0) { @@ -97,8 +96,8 @@ private Bytes(@NonNull final byte[] data, final int offset, final int length) { } if (offset + length > data.length) { - throw new IllegalArgumentException("Length " + length + " is too large buffer of length " - + data.length + " starting at offset " + offset); + throw new IllegalArgumentException("Length " + length + " is too large buffer of length " + data.length + + " starting at offset " + offset); } } @@ -376,6 +375,7 @@ public ReadableSequentialData toReadableSequentialData() { public InputStream toInputStream() { return new InputStream() { private long pos = 0; + @Override public int read() throws IOException { if (length - pos <= 0) { @@ -431,7 +431,7 @@ public String toBase64() { return Base64.getEncoder().encodeToString(buffer); } else { byte[] bytes = new byte[length]; - getBytes(0,bytes); + getBytes(0, bytes); return Base64.getEncoder().encodeToString(bytes); } } @@ -442,7 +442,7 @@ public String toBase64() { * @return Hex encoded string of the bytes in this object. */ public String toHex() { - return HexFormat.of().formatHex(buffer,start,start+length); + return HexFormat.of().formatHex(buffer, start, start + length); } /** @@ -585,7 +585,8 @@ public boolean contains(final long offset, @NonNull final byte[] bytes) { return false; } validateOffset(offset); - return Arrays.equals(buffer, Math.toIntExact(start + offset), Math.toIntExact(start + offset + len), bytes, 0, len); + return Arrays.equals( + buffer, Math.toIntExact(start + offset), Math.toIntExact(start + offset + len), bytes, 0, len); } /** {@inheritDoc} */ @@ -639,8 +640,7 @@ private void validateOffsetLength(final long suppliedOffset, final long supplied if (suppliedOffset + suppliedLength > length) { throw new IndexOutOfBoundsException( "The offset(%d) and length(%d) provided are out of bounds for this Bytes object, which has a length of %d" - .formatted(suppliedOffset, suppliedLength, length) - ); + .formatted(suppliedOffset, suppliedLength, length)); } } @@ -655,8 +655,8 @@ private int calculateOffset(final long suppliedOffset) { } /** Sorts {@link Bytes} according to their byte values, lower valued bytes first. - * Bytes are compared using the passed in Byte Comparator - */ + * Bytes are compared using the passed in Byte Comparator + */ private static Comparator valueSorter(@NonNull final Comparator byteComparator) { return (Bytes o1, Bytes o2) -> { final var val = Math.min(o1.length(), o2.length()); @@ -689,9 +689,9 @@ public Bytes append(@NonNull final Bytes bytes) { // The length field of Bytes is int. The length() returns always an int, // so safe to cast. long length = this.length(); - byte[] newBytes = new byte[(int)(length + (int)bytes.length())]; + byte[] newBytes = new byte[(int) (length + (int) bytes.length())]; this.getBytes(0, newBytes, 0, (int) length); - bytes.getBytes(0, newBytes, (int) length, (int)bytes.length()); + bytes.getBytes(0, newBytes, (int) length, (int) bytes.length()); return Bytes.wrap(newBytes); } @@ -706,10 +706,10 @@ public Bytes append(@NonNull final Bytes bytes) { public Bytes append(@NonNull final RandomAccessData data) { // The length field of Bytes is int. The length(0 returns always an int, // so safe to cast. - byte[] newBytes = new byte[(int)(this.length() + (int)data.length())]; + byte[] newBytes = new byte[(int) (this.length() + (int) data.length())]; int length1 = (int) this.length(); this.getBytes(0, newBytes, 0, length1); - data.getBytes(0, newBytes, length1, (int)data.length()); + data.getBytes(0, newBytes, length1, (int) data.length()); return Bytes.wrap(newBytes); } @@ -751,5 +751,4 @@ private long getVar(int offset, final boolean zigZag) { } throw new DataEncodingException("Malformed var int"); } - } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java index ae95948f..ae127ed2 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java @@ -4,9 +4,8 @@ import com.hedera.pbj.runtime.io.DataEncodingException; import com.hedera.pbj.runtime.io.SequentialData; import edu.umd.cs.findbugs.annotations.NonNull; - -import java.nio.BufferOverflowException; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; @@ -303,14 +302,14 @@ default long getLong(final long offset) { final byte b6 = getByte(offset + 5); final byte b7 = getByte(offset + 6); final byte b8 = getByte(offset + 7); - return (((long)b1 << 56) + - ((long)(b2 & 255) << 48) + - ((long)(b3 & 255) << 40) + - ((long)(b4 & 255) << 32) + - ((long)(b5 & 255) << 24) + - ((b6 & 255) << 16) + - ((b7 & 255) << 8) + - (b8 & 255)); + return (((long) b1 << 56) + + ((long) (b2 & 255) << 48) + + ((long) (b3 & 255) << 40) + + ((long) (b4 & 255) << 32) + + ((long) (b5 & 255) << 24) + + ((b6 & 255) << 16) + + ((b7 & 255) << 8) + + (b8 & 255)); } /** @@ -339,14 +338,14 @@ default long getLong(final long offset, @NonNull final ByteOrder byteOrder) { final byte b3 = getByte(offset + 5); final byte b2 = getByte(offset + 6); final byte b1 = getByte(offset + 7); - return (((long) b1 << 56) + - ((long) (b2 & 255) << 48) + - ((long) (b3 & 255) << 40) + - ((long) (b4 & 255) << 32) + - ((long) (b5 & 255) << 24) + - ((b6 & 255) << 16) + - ((b7 & 255) << 8) + - (b8 & 255)); + return (((long) b1 << 56) + + ((long) (b2 & 255) << 48) + + ((long) (b3 & 255) << 40) + + ((long) (b4 & 255) << 32) + + ((long) (b5 & 255) << 24) + + ((b6 & 255) << 16) + + ((b7 & 255) << 8) + + (b8 & 255)); } else { return getLong(offset); } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java index 179690c3..a8e71429 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java @@ -51,7 +51,8 @@ final class RandomAccessSequenceAdapter implements ReadableSequentialData { this.limit = this.capacity; if (this.start > delegate.length()) { - throw new IllegalArgumentException("Start " + start + " is greater than the delegate length " + delegate.length()); + throw new IllegalArgumentException( + "Start " + start + " is greater than the delegate length " + delegate.length()); } } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java index 84bb308b..4341f04e 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java @@ -4,6 +4,6 @@ import java.nio.BufferUnderflowException; /** This class is used as an exception to signal that the end of stream is reached when reading. */ -public class EOFException extends BufferUnderflowException { +public class EOFException extends BufferUnderflowException { private static final long serialVersionUID = 1799983599892333203L; } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java index 30c37070..a36615dd 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java @@ -1,6 +1,8 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io.stream; +import static java.util.Objects.requireNonNull; + import com.hedera.pbj.runtime.io.DataEncodingException; import com.hedera.pbj.runtime.io.ReadableSequentialData; import com.hedera.pbj.runtime.io.buffer.BufferedData; @@ -16,8 +18,6 @@ import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import static java.util.Objects.requireNonNull; - /** *

    A {@code ReadableSequentialData} backed by an input stream. If the instance is closed, * the underlying {@link InputStream} is closed too. @@ -239,7 +239,6 @@ public long readBytes(@NonNull final BufferedData dst) { return bytesRead; } - @Override public long readVarLong(final boolean zigZag) { if (!hasRemaining()) { @@ -268,5 +267,4 @@ public long readVarLong(final boolean zigZag) { throw new UncheckedIOException(e); } } - } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java index 3f4824df..13dc2925 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java @@ -11,7 +11,6 @@ import java.io.OutputStream; import java.io.UncheckedIOException; import java.nio.BufferOverflowException; -import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.util.Objects; @@ -121,7 +120,7 @@ public void skip(final long count) { // Each byte skipped is a "zero" byte written to the output stream. To make this faster, we will support // writing in chunks instead of a single byte at a time. We will keep writing chunks until we're done. final byte[] zeros = new byte[1024]; - for (int i = 0; i < count;) { + for (int i = 0; i < count; ) { final var toWrite = (int) Math.min(zeros.length, count - i); out.write(zeros, 0, toWrite); i += toWrite; diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java index b1749d24..758e6421 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java @@ -4,7 +4,6 @@ import com.hedera.pbj.runtime.io.ReadableSequentialData; import com.hedera.pbj.runtime.io.WritableSequentialData; import edu.umd.cs.findbugs.annotations.NonNull; - import java.io.UncheckedIOException; import java.nio.BufferUnderflowException; import java.nio.CharBuffer; @@ -70,6 +69,4 @@ public void writeUTF8(@NonNull String value) { public byte readByte() { throw new UnsupportedOperationException(); } - - } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java index 1c5365c8..481528de 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java @@ -10,9 +10,7 @@ * @param function argument type * @param function return type */ -public final record UncheckedThrowingFunction( - ThrowingFunction function -) implements Function { +public final record UncheckedThrowingFunction(ThrowingFunction function) implements Function { /** A function that can throw checked exceptions. */ public static interface ThrowingFunction { diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/CodecWrapper.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/CodecWrapper.java index 45692e7a..658eb92e 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/CodecWrapper.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/CodecWrapper.java @@ -1,3 +1,4 @@ +// SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime; import com.hedera.pbj.runtime.io.ReadableSequentialData; @@ -23,8 +24,7 @@ class CodecWrapper implements Codec { @NonNull @Override - public T parse(@NonNull ReadableSequentialData input, boolean strictMode, int maxDepth) - throws ParseException { + public T parse(@NonNull ReadableSequentialData input, boolean strictMode, int maxDepth) throws ParseException { throw new UnsupportedOperationException(); } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java index b05e6c0a..2d4c9fd1 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java @@ -19,7 +19,6 @@ import static com.hedera.pbj.runtime.ProtoWriterTools.writeString; import static com.hedera.pbj.runtime.ProtoWriterToolsTest.createFieldDefinition; import static com.hedera.pbj.runtime.ProtoWriterToolsTest.randomVarSizeString; -import static java.lang.Integer.MAX_VALUE; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -27,13 +26,6 @@ import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.pbj.runtime.io.stream.ReadableStreamingData; import com.hedera.pbj.runtime.test.UncheckedThrowingFunction; -import net.bytebuddy.utility.RandomString; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.EnumSource; -import org.junit.jupiter.params.provider.ValueSource; -import test.proto.Apple; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.BufferUnderflowException; @@ -42,6 +34,12 @@ import java.util.function.Function; import java.util.function.Supplier; import java.util.random.RandomGenerator; +import net.bytebuddy.utility.RandomString; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.ValueSource; +import test.proto.Apple; class ProtoParserToolsTest { @@ -49,7 +47,8 @@ class ProtoParserToolsTest { @Test void testReadInt32() { - testRead(rng::nextInt, + testRead( + rng::nextInt, (d, v) -> d.writeVarInt(v, false), ProtoParserTools::readInt32, // in this case the size may up to 10 bytes in case of negative numbers, @@ -59,7 +58,8 @@ void testReadInt32() { @Test void testReadInt64() { - testRead(rng::nextLong, + testRead( + rng::nextLong, (d, v) -> d.writeVarLong(v, false), ProtoParserTools::readInt64, // in this case the size may be 10 bytes, because we don't use zigzag encoding @@ -68,8 +68,8 @@ void testReadInt64() { @Test void testReadUint32() { - testRead(() -> - rng.nextInt(0, Integer.MAX_VALUE), + testRead( + () -> rng.nextInt(0, Integer.MAX_VALUE), (d, v) -> d.writeVarInt(v, false), ProtoParserTools::readUint32, // the size may vary from 1 to 5 bytes @@ -78,7 +78,8 @@ void testReadUint32() { @Test void testReadUint64() { - testRead(rng::nextLong, + testRead( + rng::nextLong, (d, v) -> d.writeVarLong(v, false), ProtoParserTools::readUint64, // the size may vary from 1 to 10 bytes @@ -88,13 +89,17 @@ void testReadUint64() { @ParameterizedTest @ValueSource(ints = {0, 1}) void testReadBool(final int value) { - testRead(() -> value != 0, (d, v) -> d.writeVarInt(value, false), input -> { - try { - return ProtoParserTools.readBool(input); - } catch (IOException e) { - throw new RuntimeException(e); - } - }, 1); + testRead( + () -> value != 0, + (d, v) -> d.writeVarInt(value, false), + input -> { + try { + return ProtoParserTools.readBool(input); + } catch (IOException e) { + throw new RuntimeException(e); + } + }, + 1); } @ParameterizedTest @@ -103,26 +108,20 @@ void testReadEnum(int value) { testRead(() -> value, (d, v) -> d.writeVarInt(value, false), ProtoParserTools::readEnum, 1); } - @Test void testReadSignedInt32() { - testRead(rng::nextInt, - (d, v) -> d.writeVarInt(v, true), - ProtoParserTools::readSignedInt32, - Integer.BYTES + 1); + testRead(rng::nextInt, (d, v) -> d.writeVarInt(v, true), ProtoParserTools::readSignedInt32, Integer.BYTES + 1); } @Test void testReadSignedInt64() { - testRead(rng::nextLong, - (d, v) -> d.writeVarLong(v, true), - ProtoParserTools::readSignedInt64, - Long.BYTES + 2); + testRead(rng::nextLong, (d, v) -> d.writeVarLong(v, true), ProtoParserTools::readSignedInt64, Long.BYTES + 2); } @Test void testReadSignedFixedInt32() { - testRead(rng::nextInt, + testRead( + rng::nextInt, (d, v) -> d.writeInt(v, ByteOrder.LITTLE_ENDIAN), ProtoParserTools::readSignedFixed32, Integer.BYTES); @@ -130,7 +129,8 @@ void testReadSignedFixedInt32() { @Test void testReadFixedInt32() { - testRead(rng::nextInt, + testRead( + rng::nextInt, (d, v) -> d.writeInt(v, ByteOrder.LITTLE_ENDIAN), ProtoParserTools::readFixed32, Integer.BYTES); @@ -138,7 +138,8 @@ void testReadFixedInt32() { @Test void testReadSginedFixed64() { - testRead(rng::nextLong, + testRead( + rng::nextLong, (d, v) -> d.writeLong(v, ByteOrder.LITTLE_ENDIAN), ProtoParserTools::readSignedFixed64, Long.BYTES); @@ -146,7 +147,8 @@ void testReadSginedFixed64() { @Test void testReadFixed64() { - testRead(rng::nextLong, + testRead( + rng::nextLong, (d, v) -> d.writeLong(v, ByteOrder.LITTLE_ENDIAN), ProtoParserTools::readFixed64, Long.BYTES); @@ -154,7 +156,8 @@ void testReadFixed64() { @Test void testReadFloat() { - testRead(rng::nextFloat, + testRead( + rng::nextFloat, (d, v) -> d.writeFloat(v, ByteOrder.LITTLE_ENDIAN), ProtoParserTools::readFloat, Long.BYTES); @@ -162,7 +165,8 @@ void testReadFloat() { @Test void testReadDouble() { - testRead(rng::nextDouble, + testRead( + rng::nextDouble, (d, v) -> d.writeDouble(v, ByteOrder.LITTLE_ENDIAN), ProtoParserTools::readDouble, Long.BYTES); @@ -173,7 +177,8 @@ void testReadString() { final int length = rng.nextInt(0, 100); final RandomString randomString = new RandomString(length); - testRead(randomString::nextString, + testRead( + randomString::nextString, (d, v) -> { d.writeVarInt(length, false); // write the size first d.writeUTF8(v); @@ -208,7 +213,6 @@ void testReadString_incomplete() throws IOException { System.arraycopy(bytes, 0, incompleteCopy, 0, bytes.length - 1); final ReadableStreamingData streamingData = new ReadableStreamingData(new ByteArrayInputStream(incompleteCopy)); assertThrows(BufferUnderflowException.class, () -> ProtoParserTools.readString(streamingData)); - } @Test @@ -218,7 +222,8 @@ void testReadBytes() { rng.nextBytes(byteArray); final Bytes bytes = Bytes.wrap(byteArray); - testRead(() -> bytes, + testRead( + () -> bytes, (d, v) -> { d.writeVarInt(length, false); // write the size first d.writeBytes(v); @@ -251,13 +256,14 @@ void testReadBytes_incomplete() throws IOException { final byte[] bytes = data.toInputStream().readAllBytes(); final byte[] incompleteCopy = new byte[bytes.length - 1]; System.arraycopy(bytes, 0, incompleteCopy, 0, bytes.length - 1); - assertThrows(BufferUnderflowException.class, + assertThrows( + BufferUnderflowException.class, () -> ProtoParserTools.readString(new ReadableStreamingData(new ByteArrayInputStream(incompleteCopy)))); - assertThrows(BufferUnderflowException.class, + assertThrows( + BufferUnderflowException.class, () -> ProtoParserTools.readBytes(new ReadableStreamingData(new ByteArrayInputStream(incompleteCopy)))); } - @Test void testReadNextFieldNumber() throws IOException { BufferedData bufferedData = BufferedData.allocate(100); @@ -265,14 +271,16 @@ void testReadNextFieldNumber() throws IOException { final String appleStr = randomVarSizeString(); final Apple apple = Apple.newBuilder().setVariety(appleStr).build(); - writeMessage(bufferedData, definition, apple, + writeMessage( + bufferedData, + definition, + apple, new CodecWrapper<>((data, out) -> out.writeBytes(data.toByteArray()), Apple::getSerializedSize)); bufferedData.flip(); assertEquals(definition.number(), readNextFieldNumber(bufferedData)); } - @Test void testSkipField() throws IOException { final String valToRead = randomVarSizeString(); @@ -308,7 +316,8 @@ void testSkipField_maxSize() throws IOException { data.writeVarInt(maxSize + 1, false); // write the size first data.writeBytes(byteArray); final ReadableStreamingData streamingData = new ReadableStreamingData(data.toInputStream()); - assertThrows(ParseException.class, () -> ProtoParserTools.skipField(streamingData, WIRE_TYPE_DELIMITED, maxSize)); + assertThrows( + ParseException.class, () -> ProtoParserTools.skipField(streamingData, WIRE_TYPE_DELIMITED, maxSize)); } @ParameterizedTest @@ -322,15 +331,15 @@ private static void skipTag(BufferedData data) { data.readVarInt(false); } - private static void testRead(final Supplier valueSupplier, - final BiConsumer valueWriter, - final Function reader, - final int size) { + private static void testRead( + final Supplier valueSupplier, + final BiConsumer valueWriter, + final Function reader, + final int size) { final T value = valueSupplier.get(); final BufferedData data = BufferedData.allocate(size); valueWriter.accept(data, value); data.flip(); assertEquals(value, reader.apply(data)); } - } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java index 45770f84..0a372fe9 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java @@ -69,15 +69,6 @@ import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.pbj.runtime.io.buffer.RandomAccessData; import com.hedera.pbj.runtime.test.Sneaky; -import net.bytebuddy.utility.RandomString; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.EnumSource; -import org.junit.jupiter.params.provider.MethodSource; -import test.proto.Apple; - import java.io.IOException; import java.nio.BufferOverflowException; import java.nio.ByteOrder; @@ -88,6 +79,14 @@ import java.util.random.RandomGenerator; import java.util.stream.Collectors; import java.util.stream.Stream; +import net.bytebuddy.utility.RandomString; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; +import test.proto.Apple; class ProtoWriterToolsTest { @@ -123,7 +122,6 @@ void testWireType() { assertEquals(WIRE_TYPE_VARINT_OR_ZIGZAG, wireType(createFieldDefinition(UINT32))); assertEquals(WIRE_TYPE_VARINT_OR_ZIGZAG, wireType(createFieldDefinition(UINT64))); - assertEquals(WIRE_TYPE_FIXED_32_BIT, wireType(createFieldDefinition(FIXED32))); assertEquals(WIRE_TYPE_FIXED_32_BIT, wireType(createFieldDefinition(SFIXED32))); assertEquals(WIRE_TYPE_FIXED_64_BIT, wireType(createFieldDefinition(FIXED64))); @@ -150,7 +148,9 @@ void testWriteTagSpecialWireType() { FieldDefinition definition = createFieldDefinition(DOUBLE); writeTag(bufferedData, definition, WIRE_TYPE_FIXED_64_BIT); bufferedData.flip(); - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), + bufferedData.readVarInt(false)); } @Test @@ -165,7 +165,9 @@ void testWriteInteger_zero() { private static int nextNonZeroRandomInt() { int ret; - do { ret = RNG.nextInt(); } while (ret == 0); + do { + ret = RNG.nextInt(); + } while (ret == 0); return ret; } @@ -200,7 +202,9 @@ void testWriteInteger_sint32() { } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"SFIXED32", "FIXED32"}) + @EnumSource( + value = FieldType.class, + names = {"SFIXED32", "FIXED32"}) void testWriteInteger_fixed32(FieldType type) { FieldDefinition definition = createFieldDefinition(type); final int valToWrite = nextNonZeroRandomInt(); @@ -211,10 +215,22 @@ void testWriteInteger_fixed32(FieldType type) { } @ParameterizedTest - @EnumSource(value = FieldType.class, names = { - "DOUBLE", "FLOAT", "INT64", "UINT64", "SINT64", - "FIXED64", "SFIXED64", "BOOL", - "STRING", "BYTES", "ENUM", "MESSAGE"}) + @EnumSource( + value = FieldType.class, + names = { + "DOUBLE", + "FLOAT", + "INT64", + "UINT64", + "SINT64", + "FIXED64", + "SFIXED64", + "BOOL", + "STRING", + "BYTES", + "ENUM", + "MESSAGE" + }) void testWriteInteger_unsupported(FieldType type) { FieldDefinition definition = createFieldDefinition(type); assertThrows(RuntimeException.class, () -> writeInteger(bufferedData, definition, RNG.nextInt())); @@ -232,7 +248,9 @@ void testWriteLong_zero() { private static long nextNonZeroRandomLong() { long ret; - do { ret = RNG.nextLong(); } while (ret == 0L); + do { + ret = RNG.nextLong(); + } while (ret == 0L); return ret; } @@ -267,21 +285,37 @@ void testWriteLong_sint64() { } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"SFIXED64", "FIXED64"}) + @EnumSource( + value = FieldType.class, + names = {"SFIXED64", "FIXED64"}) void testWriteLong_fixed64(FieldType type) { FieldDefinition definition = createFieldDefinition(type); final long valToWrite = nextNonZeroRandomLong(); writeLong(bufferedData, definition, valToWrite); bufferedData.flip(); - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), + bufferedData.readVarInt(false)); assertEquals(valToWrite, bufferedData.readLong(ByteOrder.LITTLE_ENDIAN)); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = { - "DOUBLE", "FLOAT", "INT32", "UINT32", "SINT32", - "FIXED32", "SFIXED32", "BOOL", - "STRING", "BYTES", "ENUM", "MESSAGE"}) + @EnumSource( + value = FieldType.class, + names = { + "DOUBLE", + "FLOAT", + "INT32", + "UINT32", + "SINT32", + "FIXED32", + "SFIXED32", + "BOOL", + "STRING", + "BYTES", + "ENUM", + "MESSAGE" + }) void testWriteLong_unsupported(FieldType type) { FieldDefinition definition = createFieldDefinition(type); assertThrows(RuntimeException.class, () -> writeLong(bufferedData, definition, RNG.nextInt())); @@ -289,7 +323,9 @@ void testWriteLong_unsupported(FieldType type) { private static float nextNonZeroRandomFloat() { float ret; - do { ret = RNG.nextFloat(); } while (ret == 0); + do { + ret = RNG.nextFloat(); + } while (ret == 0); return ret; } @@ -305,7 +341,9 @@ void testWriteFloat() { private static double nextNonZeroRandomDouble() { double ret; - do { ret = RNG.nextDouble(); } while (ret == 0); + do { + ret = RNG.nextDouble(); + } while (ret == 0); return ret; } @@ -315,7 +353,9 @@ void testWriteDouble() { final double valToWrite = nextNonZeroRandomDouble(); ProtoWriterTools.writeDouble(bufferedData, definition, valToWrite); bufferedData.flip(); - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), + bufferedData.readVarInt(false)); assertEquals(valToWrite, bufferedData.readDouble(ByteOrder.LITTLE_ENDIAN)); } @@ -380,7 +420,8 @@ void testWriteString() throws IOException { String valToWrite = RANDOM_STRING.nextString(); writeString(bufferedData, definition, valToWrite); bufferedData.flip(); - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); int length = bufferedData.readVarInt(false); assertEquals(valToWrite, new String(bufferedData.readBytes(length).toByteArray())); } @@ -424,7 +465,8 @@ void testWriteBytes() throws IOException { Bytes valToWrite = Bytes.wrap(RANDOM_STRING.nextString()); writeBytes(bufferedData, definition, valToWrite); bufferedData.flip(); - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); int length = bufferedData.readVarInt(false); assertEquals(valToWrite, bufferedData.readBytes(length)); } @@ -449,11 +491,18 @@ void testWriteMessage() throws IOException { FieldDefinition definition = createFieldDefinition(MESSAGE); String appleStr = RANDOM_STRING.nextString(); Apple apple = Apple.newBuilder().setVariety(appleStr).build(); - writeMessage(bufferedData, definition, apple, new CodecWrapper<>((data, out) -> out.writeBytes(data.toByteArray()), Apple::getSerializedSize)); + writeMessage( + bufferedData, + definition, + apple, + new CodecWrapper<>((data, out) -> out.writeBytes(data.toByteArray()), Apple::getSerializedSize)); bufferedData.flip(); - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); int length = bufferedData.readVarInt(false); - assertEquals(appleStr, Apple.parseFrom(bufferedData.readBytes(length).toByteArray()).getVariety()); + assertEquals( + appleStr, + Apple.parseFrom(bufferedData.readBytes(length).toByteArray()).getVariety()); } @Test @@ -465,11 +514,14 @@ void testWriteOneRepeatedMessage() throws IOException { final Apple apple2 = Apple.newBuilder().setVariety(appleStr2).build(); final BufferedData buf1 = BufferedData.allocate(256); final ProtoWriter writer = (data, out) -> out.writeBytes(data.toByteArray()); - ProtoWriterTools.writeMessageList(buf1, definition, List.of(apple1, apple2), new CodecWrapper<>(writer, Apple::getSerializedSize)); + ProtoWriterTools.writeMessageList( + buf1, definition, List.of(apple1, apple2), new CodecWrapper<>(writer, Apple::getSerializedSize)); final Bytes writtenBytes1 = buf1.getBytes(0, buf1.position()); final BufferedData buf2 = BufferedData.allocate(256); - ProtoWriterTools.writeOneRepeatedMessage(buf2, definition, apple1, new CodecWrapper<>(writer, Apple::getSerializedSize)); - ProtoWriterTools.writeOneRepeatedMessage(buf2, definition, apple2, new CodecWrapper<>(writer, Apple::getSerializedSize)); + ProtoWriterTools.writeOneRepeatedMessage( + buf2, definition, apple1, new CodecWrapper<>(writer, Apple::getSerializedSize)); + ProtoWriterTools.writeOneRepeatedMessage( + buf2, definition, apple2, new CodecWrapper<>(writer, Apple::getSerializedSize)); final Bytes writtenBytes2 = buf2.getBytes(0, buf2.position()); assertEquals(writtenBytes1, writtenBytes2); } @@ -477,9 +529,14 @@ void testWriteOneRepeatedMessage() throws IOException { @Test void testWriteOneOfMessage() throws IOException { FieldDefinition definition = createOneOfFieldDefinition(MESSAGE); - writeMessage(bufferedData, definition, null, new CodecWrapper<>((data, out) -> out.writeBytes(data.toByteArray()), Apple::getSerializedSize)); + writeMessage( + bufferedData, + definition, + null, + new CodecWrapper<>((data, out) -> out.writeBytes(data.toByteArray()), Apple::getSerializedSize)); bufferedData.flip(); - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); int length = bufferedData.readVarInt(false); assertEquals(0, length); } @@ -621,7 +678,9 @@ void testWriteOptionalString() throws IOException { assertEquals(valToWrite.length() + TAG_SIZE + MIN_LENGTH_VAR_SIZE, bufferedData.readVarInt(false)); assertTypeDelimitedTag(definition.type().optionalFieldDefinition); assertEquals(valToWrite.length(), bufferedData.readVarInt(false)); - assertEquals(valToWrite, new String(bufferedData.readBytes(valToWrite.length()).toByteArray())); + assertEquals( + valToWrite, + new String(bufferedData.readBytes(valToWrite.length()).toByteArray())); } @Test @@ -683,35 +742,47 @@ void testSizeOfVarInt32() { @Test void testSizeOfLong_int32() { FieldDefinition definition = createFieldDefinition(INT32); - assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE, - sizeOfInteger(definition, randomLargeInt())); + assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE, sizeOfInteger(definition, randomLargeInt())); } @Test void testSizeOfLong_uint32() { FieldDefinition definition = createFieldDefinition(UINT32); - assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE, - sizeOfInteger(definition, randomLargeInt())); + assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE, sizeOfInteger(definition, randomLargeInt())); } @Test void testSizeOfLong_sint32() { FieldDefinition definition = createFieldDefinition(SINT32); - assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE, - sizeOfInteger(definition, randomLargeNegativeInt())); + assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE, sizeOfInteger(definition, randomLargeNegativeInt())); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"SFIXED32", "FIXED32"}) + @EnumSource( + value = FieldType.class, + names = {"SFIXED32", "FIXED32"}) void testSizeOfLong_fixed32(FieldType type) { FieldDefinition definition = createFieldDefinition(type); - assertEquals(TAG_SIZE + Integer.BYTES, - sizeOfInteger(definition, randomLargeNegativeInt())); + assertEquals(TAG_SIZE + Integer.BYTES, sizeOfInteger(definition, randomLargeNegativeInt())); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"DOUBLE", "FLOAT", "INT64", "UINT64", "SINT64", - "FIXED64", "SFIXED64", "BOOL", "STRING", "BYTES", "ENUM", "MESSAGE"}) + @EnumSource( + value = FieldType.class, + names = { + "DOUBLE", + "FLOAT", + "INT64", + "UINT64", + "SINT64", + "FIXED64", + "SFIXED64", + "BOOL", + "STRING", + "BYTES", + "ENUM", + "MESSAGE" + }) void testSizeOfInteger_notSupported(FieldType type) { FieldDefinition definition = createFieldDefinition(type); assertThrows(RuntimeException.class, () -> sizeOfInteger(definition, RNG.nextInt())); @@ -720,36 +791,48 @@ void testSizeOfInteger_notSupported(FieldType type) { @Test void testSizeOfLong_int64() { FieldDefinition definition = createFieldDefinition(INT64); - assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE, - sizeOfLong(definition, randomLargeLong())); + assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE, sizeOfLong(definition, randomLargeLong())); } @Test void testSizeOfLong_uint64() { FieldDefinition definition = createFieldDefinition(UINT64); - assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE, - sizeOfLong(definition, randomLargeLong())); + assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE, sizeOfLong(definition, randomLargeLong())); } @Test void testSizeOfLong_sint64() { FieldDefinition definition = createFieldDefinition(SINT64); long value = randomLargeNegativeLong(); - assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE + 1 /* zigzag encoding */, - sizeOfLong(definition, value)); + assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE + 1 /* zigzag encoding */, sizeOfLong(definition, value)); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"SFIXED64", "FIXED64"}) + @EnumSource( + value = FieldType.class, + names = {"SFIXED64", "FIXED64"}) void testSizeOfLong_fixed64(FieldType type) { FieldDefinition definition = createFieldDefinition(type); - assertEquals(TAG_SIZE + Long.BYTES, - sizeOfLong(definition, randomLargeNegativeInt())); + assertEquals(TAG_SIZE + Long.BYTES, sizeOfLong(definition, randomLargeNegativeInt())); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"DOUBLE", "FLOAT", "INT32", "UINT32", "SINT32", - "FIXED32", "SFIXED32", "BOOL", "STRING", "BYTES", "ENUM", "MESSAGE"}) + @EnumSource( + value = FieldType.class, + names = { + "DOUBLE", + "FLOAT", + "INT32", + "UINT32", + "SINT32", + "FIXED32", + "SFIXED32", + "BOOL", + "STRING", + "BYTES", + "ENUM", + "MESSAGE" + }) void testSizeOfLong_notSupported(FieldType type) { FieldDefinition definition = createFieldDefinition(type); assertThrows(RuntimeException.class, () -> sizeOfLong(definition, RNG.nextLong())); @@ -798,35 +881,59 @@ void testSizeOfVarInt64() { assertEquals(10, sizeOfVarInt64(Long.MIN_VALUE)); } - @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"INT32", "UINT32"}) + @EnumSource( + value = FieldType.class, + names = {"INT32", "UINT32"}) void testSizeOfIntegerList_int32(FieldType type) { FieldDefinition definition = createFieldDefinition(type); - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range [0, 128) */, + assertEquals( + TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range [0, 128) */, sizeOfIntegerList(definition, asList(RNG.nextInt(0, 127), RNG.nextInt(0, 128)))); } @Test void testSizeOfIntegerList_sint32() { FieldDefinition definition = createFieldDefinition(SINT32); - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range (-64, 64) */, + assertEquals( + TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range (-64, 64) */, sizeOfIntegerList(definition, asList(RNG.nextInt(-63, 0), RNG.nextInt(0, 64)))); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"SFIXED32", "FIXED32"}) + @EnumSource( + value = FieldType.class, + names = {"SFIXED32", "FIXED32"}) void testSizeOfIntegerList_fixed(FieldType type) { FieldDefinition definition = createFieldDefinition(type); - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + Integer.BYTES * 2 /* size of two unsigned var longs in the range [0, 128) */, + assertEquals( + TAG_SIZE + + MIN_LENGTH_VAR_SIZE + + Integer.BYTES * 2 /* size of two unsigned var longs in the range [0, 128) */, sizeOfIntegerList(definition, asList(RNG.nextInt(), RNG.nextInt()))); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"DOUBLE", "FLOAT", "INT64", "UINT64", "SINT64", - "FIXED64", "SFIXED64", "BOOL", "STRING", "BYTES", "ENUM", "MESSAGE"}) + @EnumSource( + value = FieldType.class, + names = { + "DOUBLE", + "FLOAT", + "INT64", + "UINT64", + "SINT64", + "FIXED64", + "SFIXED64", + "BOOL", + "STRING", + "BYTES", + "ENUM", + "MESSAGE" + }) void testSizeOfIntegerList_notSupported(FieldType type) { - assertThrows(RuntimeException.class, () -> sizeOfIntegerList(createFieldDefinition(type), asList(RNG.nextInt(), RNG.nextInt()))); + assertThrows( + RuntimeException.class, + () -> sizeOfIntegerList(createFieldDefinition(type), asList(RNG.nextInt(), RNG.nextInt()))); } @Test @@ -836,38 +943,62 @@ void testSizeOfIntegerList_empty() { @Test void testSizeOfOneOfIntegerList_empty() { - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, - sizeOfLongList(createOneOfFieldDefinition(INT64), emptyList())); + assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, sizeOfLongList(createOneOfFieldDefinition(INT64), emptyList())); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"INT64", "UINT64"}) + @EnumSource( + value = FieldType.class, + names = {"INT64", "UINT64"}) void testSizeOfLongList_int64(FieldType type) { FieldDefinition definition = createFieldDefinition(type); - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range [0, 128) */, + assertEquals( + TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range [0, 128) */, sizeOfLongList(definition, asList(RNG.nextLong(0, 127), RNG.nextLong(0, 128)))); } @Test void testSizeOfLongList_sint64() { FieldDefinition definition = createFieldDefinition(SINT64); - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range (-64, 64) */, + assertEquals( + TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range (-64, 64) */, sizeOfLongList(definition, asList(RNG.nextLong(-63, 0), RNG.nextLong(0, 64)))); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"SFIXED64", "FIXED64"}) + @EnumSource( + value = FieldType.class, + names = {"SFIXED64", "FIXED64"}) void testSizeOfLongList_fixed(FieldType type) { FieldDefinition definition = createFieldDefinition(type); - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + Long.BYTES * 2 /* size of two unsigned var longs in the range [0, 128) */, + assertEquals( + TAG_SIZE + + MIN_LENGTH_VAR_SIZE + + Long.BYTES * 2 /* size of two unsigned var longs in the range [0, 128) */, sizeOfLongList(definition, asList(RNG.nextLong(), RNG.nextLong()))); } @ParameterizedTest - @EnumSource(value = FieldType.class, names = {"DOUBLE", "FLOAT", "INT32", "UINT32", "SINT32", - "FIXED32", "SFIXED32", "BOOL", "STRING", "BYTES", "ENUM", "MESSAGE"}) + @EnumSource( + value = FieldType.class, + names = { + "DOUBLE", + "FLOAT", + "INT32", + "UINT32", + "SINT32", + "FIXED32", + "SFIXED32", + "BOOL", + "STRING", + "BYTES", + "ENUM", + "MESSAGE" + }) void testSizeOfLongList_notSupported(FieldType type) { - assertThrows(RuntimeException.class, () -> sizeOfLongList(createFieldDefinition(type), asList(RNG.nextLong(), RNG.nextLong()))); + assertThrows( + RuntimeException.class, + () -> sizeOfLongList(createFieldDefinition(type), asList(RNG.nextLong(), RNG.nextLong()))); } @Test @@ -877,14 +1008,14 @@ void testSizeOfLongList_empty() { @Test void testSizeOfOneOfLongList_empty() { - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, - sizeOfLongList(createOneOfFieldDefinition(INT64), emptyList())); + assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, sizeOfLongList(createOneOfFieldDefinition(INT64), emptyList())); } @Test void testSizeOfFloatList() { FieldDefinition definition = createFieldDefinition(FLOAT); - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2 * Float.BYTES, + assertEquals( + TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2 * Float.BYTES, sizeOfFloatList(definition, asList(RNG.nextFloat(), RNG.nextFloat()))); } @@ -895,14 +1026,14 @@ void testSizeOfFloatList_empty() { @Test void testSizeOfOneOfFloatList_empty() { - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, - sizeOfFloatList(createOneOfFieldDefinition(FLOAT), emptyList())); + assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, sizeOfFloatList(createOneOfFieldDefinition(FLOAT), emptyList())); } @Test void testSizeOfDoubleList() { FieldDefinition definition = createFieldDefinition(DOUBLE); - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2 * Double.BYTES, + assertEquals( + TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2 * Double.BYTES, sizeOfDoubleList(definition, asList(RNG.nextDouble(), RNG.nextDouble()))); } @@ -913,19 +1044,17 @@ void testSizeOfDoubleList_empty() { @Test void testSizeOfOneOfDoubleList_empty() { - assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, - sizeOfDoubleList(createOneOfFieldDefinition(DOUBLE), emptyList())); + assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, sizeOfDoubleList(createOneOfFieldDefinition(DOUBLE), emptyList())); } - @Test - void testSizeOfBooleanList(){ + void testSizeOfBooleanList() { FieldDefinition definition = createFieldDefinition(BOOL); assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2, sizeOfBooleanList(definition, Arrays.asList(true, false))); } @Test - void testSizeOfBooleanList_empty(){ + void testSizeOfBooleanList_empty() { assertEquals(0, sizeOfBooleanList(createFieldDefinition(BOOL), Collections.emptyList())); } @@ -964,7 +1093,8 @@ void testSizeOfStringList() { String str1 = randomVarSizeString(); String str2 = randomVarSizeString(); - assertEquals(MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 + str1.length() + str2.length(), + assertEquals( + MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 + str1.length() + str2.length(), sizeOfStringList(definition, asList(str1, str2))); } @@ -972,14 +1102,12 @@ void testSizeOfStringList() { void testSizeOfStringList_nullAndEmpty() { FieldDefinition definition = createFieldDefinition(STRING); - assertEquals(MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2, - sizeOfStringList(definition, asList(null, ""))); + assertEquals(MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2, sizeOfStringList(definition, asList(null, ""))); } @Test void testSizeOfStringList_empty() { - assertEquals(0, - sizeOfStringList(createOneOfFieldDefinition(STRING), emptyList())); + assertEquals(0, sizeOfStringList(createOneOfFieldDefinition(STRING), emptyList())); } @Test @@ -992,14 +1120,17 @@ void testSizeOfMessageList() { assertEquals( MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 + appleStr1.length() + appleStr2.length(), - sizeOfMessageList(definition, Arrays.asList(apple1, apple2), new CodecWrapper<>(null, v -> v.getVariety().length()))); + sizeOfMessageList( + definition, Arrays.asList(apple1, apple2), new CodecWrapper<>(null, v -> v.getVariety() + .length()))); } @Test void testSizeOfMessageList_empty() { assertEquals( 0, - sizeOfMessageList(createFieldDefinition(MESSAGE), emptyList(), new CodecWrapper<>(null, v -> RNG.nextInt()))); + sizeOfMessageList( + createFieldDefinition(MESSAGE), emptyList(), new CodecWrapper<>(null, v -> RNG.nextInt()))); } @Test @@ -1009,8 +1140,7 @@ void testSizeOfBytesList() { Bytes bytes2 = Bytes.wrap(randomVarSizeString()); assertEquals( - MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 - + bytes1.length() + bytes2.length(), + MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 + bytes1.length() + bytes2.length(), sizeOfBytesList(definition, asList(bytes1, bytes2))); } @@ -1074,7 +1204,7 @@ void testSizeOfString_oneOf() { } @Test - void testSizeOfString_oneOf_null(){ + void testSizeOfString_oneOf_null() { final FieldDefinition definition = createOneOfFieldDefinition(STRING); assertEquals(MIN_LENGTH_VAR_SIZE + TAG_SIZE, sizeOfString(definition, null)); } @@ -1096,7 +1226,7 @@ void testSizeOfBytes_empty() { } @Test - void testSizeOfBytes_oneOf(){ + void testSizeOfBytes_oneOf() { final FieldDefinition definition = createOneOfFieldDefinition(BYTES); final Bytes bytes = Bytes.wrap(randomVarSizeString()); @@ -1104,25 +1234,29 @@ void testSizeOfBytes_oneOf(){ } @Test - void testSizeOfMessage(){ + void testSizeOfMessage() { final FieldDefinition definition = createFieldDefinition(MESSAGE); final String appleStr = randomVarSizeString(); final Apple apple = Apple.newBuilder().setVariety(appleStr).build(); - assertEquals(MIN_LENGTH_VAR_SIZE + TAG_SIZE + appleStr.length(), sizeOfMessage(definition, apple, - new CodecWrapper<>(null, v -> v.getVariety().length()))); + assertEquals( + MIN_LENGTH_VAR_SIZE + TAG_SIZE + appleStr.length(), + sizeOfMessage(definition, apple, new CodecWrapper<>(null, v -> v.getVariety() + .length()))); } @Test void testSizeOfMessage_oneOf_null() { final FieldDefinition definition = createOneOfFieldDefinition(MESSAGE); - assertEquals(MIN_LENGTH_VAR_SIZE + TAG_SIZE, sizeOfMessage(definition, null, new CodecWrapper<>(null, v -> RNG.nextInt()))); + assertEquals( + MIN_LENGTH_VAR_SIZE + TAG_SIZE, + sizeOfMessage(definition, null, new CodecWrapper<>(null, v -> RNG.nextInt()))); } @Test void testSizeOfMessage_null() { final FieldDefinition definition = createFieldDefinition(MESSAGE); - assertEquals(0, sizeOfMessage(definition, null, new CodecWrapper<>(null, v -> RNG.nextInt()))); + assertEquals(0, sizeOfMessage(definition, null, new CodecWrapper<>(null, v -> RNG.nextInt()))); } @Test @@ -1139,8 +1273,7 @@ private static Stream provideWriteIntegerListArguments() { Arguments.of(UINT32, 24, false), Arguments.of(SINT32, 21, true), Arguments.of(FIXED32, 32, false), - Arguments.of(SFIXED32, 32, false) - ); + Arguments.of(SFIXED32, 32, false)); } @ParameterizedTest @@ -1149,16 +1282,8 @@ void testWriteIntegerList(final FieldType type, final int expectedSize, final bo final FieldDefinition definition = createRepeatedFieldDefinition(type); final long start = bufferedData.position(); - ProtoWriterTools.writeIntegerList(bufferedData, definition, List.of( - 0x0f, - 0xff, - 0xfff, - 0xffff, - 0xfffff, - 0xffffff, - 0xfffffff, - 0xffffffff - )); + ProtoWriterTools.writeIntegerList( + bufferedData, definition, List.of(0x0f, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0xfffffff, 0xffffffff)); final long finish = bufferedData.position(); int tag = bufferedData.getVarInt(start, false); @@ -1181,8 +1306,7 @@ private static Stream provideWriteLongListArguments() { Arguments.of(UINT64, 85, false), Arguments.of(SINT64, 78, true), Arguments.of(FIXED64, 128, false), - Arguments.of(SFIXED64, 128, false) - ); + Arguments.of(SFIXED64, 128, false)); } @ParameterizedTest @@ -1191,24 +1315,26 @@ void testWriteLongList(final FieldType type, final int expectedSize, final boole final FieldDefinition definition = createRepeatedFieldDefinition(type); final long start = bufferedData.position(); - ProtoWriterTools.writeLongList(bufferedData, definition, List.of( - 0x0fL, - 0xffL, - 0xfffL, - 0xffffL, - 0xfffffL, - 0xffffffL, - 0xfffffffL, - 0xffffffffL, - 0xfffffffffL, - 0xffffffffffL, - 0xfffffffffffL, - 0xffffffffffffL, - 0xfffffffffffffL, - 0xffffffffffffffL, - 0xfffffffffffffffL, - 0xffffffffffffffffL - )); + ProtoWriterTools.writeLongList( + bufferedData, + definition, + List.of( + 0x0fL, + 0xffL, + 0xfffL, + 0xffffL, + 0xfffffL, + 0xffffffL, + 0xfffffffL, + 0xffffffffL, + 0xfffffffffL, + 0xffffffffffL, + 0xfffffffffffL, + 0xffffffffffffL, + 0xfffffffffffffL, + 0xffffffffffffffL, + 0xfffffffffffffffL, + 0xffffffffffffffffL)); final long finish = bufferedData.position(); int tag = bufferedData.getVarInt(start, false); @@ -1233,11 +1359,7 @@ private static interface ReaderMethod { T read(BufferedData bd, long pos); } - private static final List testEnumList = List.of( - mockEnum(0), - mockEnum(2), - mockEnum(1) - ); + private static final List testEnumList = List.of(mockEnum(0), mockEnum(2), mockEnum(1)); // https://clement-jean.github.io/packed_vs_unpacked_repeated_fields/ private static Stream provideWritePackedListArguments() { @@ -1247,22 +1369,19 @@ private static Stream provideWritePackedListArguments() { (WriterMethod) ProtoWriterTools::writeFloatList, List.of(.1f, .5f, 100.f), 12, - (ReaderMethod) (BufferedData bd, long pos) -> bd.getFloat(pos) - ), + (ReaderMethod) (BufferedData bd, long pos) -> bd.getFloat(pos)), Arguments.of( DOUBLE, (WriterMethod) ProtoWriterTools::writeDoubleList, List.of(.1, .5, 100., 1.7653472635472654e240), 32, - (ReaderMethod) (BufferedData bd, long pos) -> bd.getDouble(pos) - ), + (ReaderMethod) (BufferedData bd, long pos) -> bd.getDouble(pos)), Arguments.of( BOOL, (WriterMethod) ProtoWriterTools::writeBooleanList, List.of(true, false, false, true, true, true), 6, - (ReaderMethod) (BufferedData bd, long pos) -> (bd.getInt(pos) != 0 ? true : false) - ), + (ReaderMethod) (BufferedData bd, long pos) -> (bd.getInt(pos) != 0 ? true : false)), Arguments.of( ENUM, (WriterMethod) ProtoWriterTools::writeEnumList, @@ -1276,12 +1395,9 @@ private static Stream provideWritePackedListArguments() { throw new RuntimeException("Unexpected ordinal " + ordinal + " for test enum list " + testEnumList.stream() - .map(e -> "" + e.protoOrdinal() + ": " + e.protoName()) - .collect(Collectors.joining(",", "{", "}")) - ); - } - ) - ); + .map(e -> "" + e.protoOrdinal() + ": " + e.protoName()) + .collect(Collectors.joining(",", "{", "}"))); + })); } @ParameterizedTest @@ -1308,14 +1424,11 @@ void testWritePackedList( assertEquals(finish - start - sizeOfTag - sizeOfSize, size); - T value = readerMethod.read(bufferedData,start + sizeOfTag + sizeOfSize); + T value = readerMethod.read(bufferedData, start + sizeOfTag + sizeOfSize); assertEquals(list.get(0), value); } - private static record UnpackedField( - T value, - int size - ) {} + private static record UnpackedField(T value, int size) {} // https://clement-jean.github.io/packed_vs_unpacked_repeated_fields/ private static Stream provideWriteUnpackedListArguments() { @@ -1335,13 +1448,9 @@ private static Stream provideWriteUnpackedListArguments() { int sizeOfSize = ProtoWriterTools.sizeOfVarInt32(size); return new UnpackedField<>( new String( - bd.getBytes(pos + sizeOfSize, size).toByteArray(), - StandardCharsets.UTF_8 - ), - sizeOfSize + size - ); - } - ), + bd.getBytes(pos + sizeOfSize, size).toByteArray(), StandardCharsets.UTF_8), + sizeOfSize + size); + }), Arguments.of( BYTES, (WriterMethod) (out, field, list) -> { @@ -1353,19 +1462,13 @@ private static Stream provideWriteUnpackedListArguments() { }, List.of( Bytes.wrap(new byte[] {1, 2, 3}), - Bytes.wrap(new byte[] {(byte)255, 127, 15}), - Bytes.wrap(new byte[] {66, (byte) 218, 7, 55, 11, (byte) 255}) - ), + Bytes.wrap(new byte[] {(byte) 255, 127, 15}), + Bytes.wrap(new byte[] {66, (byte) 218, 7, 55, 11, (byte) 255})), (ReaderMethod>) (BufferedData bd, long pos) -> { int size = bd.getVarInt(pos, false); int sizeOfSize = ProtoWriterTools.sizeOfVarInt32(size); - return new UnpackedField<>( - bd.getBytes(pos + sizeOfSize, size), - sizeOfSize + size - ); - } - ) - ); + return new UnpackedField<>(bd.getBytes(pos + sizeOfSize, size), sizeOfSize + size); + })); } @ParameterizedTest @@ -1397,19 +1500,26 @@ void testWriteUnpackedList( } private void assertVarIntTag(FieldDefinition definition) { - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_VARINT_OR_ZIGZAG.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_VARINT_OR_ZIGZAG.ordinal(), + bufferedData.readVarInt(false)); } private void assertFixed32Tag(FieldDefinition definition) { - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_32_BIT.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_32_BIT.ordinal(), + bufferedData.readVarInt(false)); } private void assertFixed64Tag(FieldDefinition definition) { - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), + bufferedData.readVarInt(false)); } private void assertTypeDelimitedTag(FieldDefinition definition) { - assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); + assertEquals( + (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false)); } static String randomVarSizeString() { @@ -1450,5 +1560,4 @@ static FieldDefinition createOneOfFieldDefinition(FieldType fieldType) { static FieldDefinition createRepeatedFieldDefinition(FieldType fieldType) { return new FieldDefinition(RANDOM_STRING.nextString(), fieldType, true, RNG.nextInt(1, 16)); } - } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java index 4ee92f2e..27b5fb67 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java @@ -1,19 +1,18 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime; -import com.hedera.pbj.runtime.io.buffer.BufferedData; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import com.hedera.pbj.runtime.io.buffer.BufferedData; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; import java.util.HexFormat; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; public class Utf8ToolsTest { private static Stream provideStringsAndLengths() throws UnsupportedEncodingException { @@ -25,16 +24,20 @@ private static Stream provideStringsAndLengths() throws UnsupportedEn Arguments.of("not blank", 9), Arguments.of("\u076c test", 7), Arguments.of("\u076c \uea84 test", 11), - Arguments.of(new String(new byte[] { - (byte) 0b11110001, (byte) 0b10000011, (byte) 0b10000111, (byte) 0b10001111 - }, "UTF-8"), 4) - ); + Arguments.of( + new String( + new byte[] {(byte) 0b11110001, (byte) 0b10000011, (byte) 0b10000111, (byte) 0b10001111}, + "UTF-8"), + 4)); } + @ParameterizedTest @MethodSource("provideStringsAndLengths") void encodedLength(String testStr, int expectedLength) { assertEquals(expectedLength, assertDoesNotThrow(() -> Utf8Tools.encodedLength(testStr))); - assertEquals(testStr.getBytes(StandardCharsets.UTF_8).length, assertDoesNotThrow(() -> Utf8Tools.encodedLength(testStr))); + assertEquals( + testStr.getBytes(StandardCharsets.UTF_8).length, + assertDoesNotThrow(() -> Utf8Tools.encodedLength(testStr))); } @ParameterizedTest @@ -47,9 +50,11 @@ void encodeUtf8(String testStr, int expectedLength) { throw new RuntimeException(e); } bufferedData.flip(); - byte[] bytes = new byte[(int)bufferedData.length()]; + byte[] bytes = new byte[(int) bufferedData.length()]; bufferedData.getBytes(0, bytes); - assertEquals(HexFormat.of().formatHex(testStr.getBytes(StandardCharsets.UTF_8)), HexFormat.of().formatHex(bytes)); + assertEquals( + HexFormat.of().formatHex(testStr.getBytes(StandardCharsets.UTF_8)), + HexFormat.of().formatHex(bytes)); assertEquals(expectedLength, bytes.length); } } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java index a4f4d68e..88bc6d4c 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java @@ -25,8 +25,8 @@ void testOkStatusThrows() { // If the status is OK, then the constructor should throw an IllegalArgumentException. //noinspection ThrowableNotThrown assertThatThrownBy(() -> new GrpcException(GrpcStatus.OK)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("status cannot be OK"); + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("status cannot be OK"); } @Test diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java index 00fd6b96..00b2609f 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java @@ -28,14 +28,14 @@ class PipelinesTest { @Nested @ExtendWith(MockitoExtension.class) class NoopTest { - @Mock Flow.Subscription subscription; + @Mock + Flow.Subscription subscription; @Test void noopWithNullSubscriptionThrowsNPE() { final var noop = Pipelines.noop(); assertThat(noop).isNotNull(); - assertThatThrownBy(() -> noop.onSubscribe(null)) - .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> noop.onSubscribe(null)).isInstanceOf(NullPointerException.class); } @Test @@ -71,8 +71,11 @@ void noopOnCompleteDoesNothing() { @Nested @ExtendWith(MockitoExtension.class) class UnaryTest { - @Mock Pipeline replies; - @Mock Flow.Subscription subscription; + @Mock + Pipeline replies; + + @Mock + Flow.Subscription subscription; @Test void requestMapperIsRequired() { @@ -131,8 +134,7 @@ void nullSubscriptionThrowsNPE() { .respondTo(replies) .build(); - assertThatThrownBy(() -> pipeline.onSubscribe(null)) - .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> pipeline.onSubscribe(null)).isInstanceOf(NullPointerException.class); } @Test @@ -185,9 +187,14 @@ void positive() { @Nested @ExtendWith(MockitoExtension.class) class BidiTest { - @Mock Pipeline client; - @Mock Pipeline replies; - @Mock Flow.Subscription subscription; + @Mock + Pipeline client; + + @Mock + Pipeline replies; + + @Mock + Flow.Subscription subscription; @Test void requestMapperIsRequired() { @@ -246,8 +253,7 @@ void nullSubscriptionThrowsNPE() { .respondTo(replies) .build(); - assertThatThrownBy(() -> pipeline.onSubscribe(null)) - .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> pipeline.onSubscribe(null)).isInstanceOf(NullPointerException.class); } @Test @@ -255,11 +261,14 @@ void onCompleteNextThrowsISE() { final var pipeline = Pipelines.bidiStreaming() .mapRequest(Bytes::asUtf8String) .method(sink -> { - lenient().doAnswer(invocation -> { - final var msg = invocation.getArgument(0, String.class); - sink.onNext(msg.toUpperCase()); - return null; - }).when(client).onNext(any()); + lenient() + .doAnswer(invocation -> { + final var msg = invocation.getArgument(0, String.class); + sink.onNext(msg.toUpperCase()); + return null; + }) + .when(client) + .onNext(any()); return client; }) .mapResponse(Bytes::wrap) @@ -305,7 +314,9 @@ void exceptionDuring_responseConverter_IsHandled() { final var ex = new RuntimeException("Some exception"); Pipelines.bidiStreaming() .mapRequest(Bytes::asUtf8String) - .method(sink -> { throw ex; }) + .method(sink -> { + throw ex; + }) .mapResponse(Bytes::wrap) .respondTo(replies) .build(); @@ -319,10 +330,12 @@ void positive() { .mapRequest(Bytes::asUtf8String) .method(sink -> { doAnswer(invocation -> { - final var msg = invocation.getArgument(0, String.class); - sink.onNext(msg.toUpperCase()); - return null; - }).when(client).onNext(any()); + final var msg = invocation.getArgument(0, String.class); + sink.onNext(msg.toUpperCase()); + return null; + }) + .when(client) + .onNext(any()); return client; }) .mapResponse(Bytes::wrap) @@ -334,17 +347,18 @@ void positive() { pipeline.onNext(Bytes.wrap("hello")); pipeline.onNext(Bytes.wrap("world")); verify(replies, times(2)).onNext(argCaptor.capture()); - assertThat(argCaptor.getAllValues()).containsExactly( - Bytes.wrap("HELLO"), - Bytes.wrap("WORLD")); + assertThat(argCaptor.getAllValues()).containsExactly(Bytes.wrap("HELLO"), Bytes.wrap("WORLD")); } } @Nested @ExtendWith(MockitoExtension.class) class ServerStreamingTest { - @Mock Pipeline replies; - @Mock Flow.Subscription subscription; + @Mock + Pipeline replies; + + @Mock + Flow.Subscription subscription; @Test void requestMapperIsRequired() { @@ -403,8 +417,7 @@ void nullSubscriptionThrowsNPE() { .respondTo(replies) .build(); - assertThatThrownBy(() -> pipeline.onSubscribe(null)) - .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> pipeline.onSubscribe(null)).isInstanceOf(NullPointerException.class); } @Test @@ -428,7 +441,9 @@ void onCompleteNextThrowsISE() { void badRequestMapperCallsOnError() { final var ex = new RuntimeException("Bad bad bad"); final var pipeline = Pipelines.serverStreaming() - .mapRequest(bytes -> { throw ex; }) + .mapRequest(bytes -> { + throw ex; + }) .method((msg, sink) -> sink.onNext(msg.toUpperCase())) .mapResponse(Bytes::wrap) .respondTo(replies) @@ -445,7 +460,9 @@ void badMethodCallsOnError() { final var ex = new RuntimeException("Bad bad bad"); final var pipeline = Pipelines.serverStreaming() .mapRequest(Bytes::asUtf8String) - .method((msg, sink) -> { throw ex; }) + .method((msg, sink) -> { + throw ex; + }) .mapResponse(Bytes::wrap) .respondTo(replies) .build(); @@ -474,8 +491,11 @@ void positive() { @Nested @ExtendWith(MockitoExtension.class) class ClientStreamingTest { - @Mock Pipeline replies; - @Mock Flow.Subscription subscription; + @Mock + Pipeline replies; + + @Mock + Flow.Subscription subscription; @Test void requestMapperIsRequired() { @@ -534,8 +554,7 @@ void nullSubscriptionThrowsNPE() { .respondTo(replies) .build(); - assertThatThrownBy(() -> pipeline.onSubscribe(null)) - .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> pipeline.onSubscribe(null)).isInstanceOf(NullPointerException.class); } @Test @@ -559,7 +578,9 @@ void onCompleteNextThrowsISE() { void badRequestMapperCallsOnError() { final var ex = new RuntimeException("Bad bad bad"); final var pipeline = Pipelines.clientStreaming() - .mapRequest(bytes -> { throw ex; }) + .mapRequest(bytes -> { + throw ex; + }) .method(ConcatenatingHandler::new) .mapResponse(Bytes::wrap) .respondTo(replies) @@ -576,7 +597,9 @@ void badMethodCallsOnError() { final var ex = new RuntimeException("Bad bad bad"); Pipelines.clientStreaming() .mapRequest(Bytes::asUtf8String) - .method(sink -> { throw ex; }) + .method(sink -> { + throw ex; + }) .mapResponse(Bytes::wrap) .respondTo(replies) .build(); diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java index 80d3dba6..945dcaa2 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java @@ -1,204 +1,207 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import com.hedera.pbj.runtime.io.buffer.BufferedData; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.pbj.runtime.io.stream.ReadableStreamingData; import com.hedera.pbj.runtime.io.stream.WritableStreamingData; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.MethodSource; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteOrder; import java.util.Arrays; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.assertArrayEquals; -import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; final class DataTest { static Stream bytesTestCases() { return Stream.of( - Byte.MIN_VALUE, - Byte.MIN_VALUE + 1, - -100, - -66, - -7, - -1, - 0, - 1, - 9, - 51, - 101, - Byte.MAX_VALUE - 1, - Byte.MAX_VALUE).map(Number::byteValue); + Byte.MIN_VALUE, + Byte.MIN_VALUE + 1, + -100, + -66, + -7, + -1, + 0, + 1, + 9, + 51, + 101, + Byte.MAX_VALUE - 1, + Byte.MAX_VALUE) + .map(Number::byteValue); } @ParameterizedTest @MethodSource("bytesTestCases") void byteTest(Byte value) throws IOException { - doTest(value, + doTest( + value, WritableStreamingData::writeByte, - (dout, v) -> dout.writeByte((int)v), + (dout, v) -> dout.writeByte((int) v), BufferedData::writeByte, ReadableStreamingData::readByte, java.io.DataInputStream::readByte, - BufferedData::readByte - ); + BufferedData::readByte); } static Stream unsignedBytesTestCases() { - return Stream.of(0,1,9,51,101,127,128,255).map(Number::intValue); + return Stream.of(0, 1, 9, 51, 101, 127, 128, 255).map(Number::intValue); } @ParameterizedTest @MethodSource("unsignedBytesTestCases") void unsignedByteTest(Integer value) throws IOException { - doTest(value, + doTest( + value, WritableStreamingData::writeUnsignedByte, java.io.DataOutputStream::writeByte, BufferedData::writeUnsignedByte, ReadableStreamingData::readUnsignedByte, java.io.DataInputStream::readUnsignedByte, - ReadableSequentialData::readUnsignedByte - ); + ReadableSequentialData::readUnsignedByte); } static Stream intsTestCases() { return Stream.of( - Integer.MIN_VALUE, - Integer.MIN_VALUE + 1, - -536870912, - -4194304, - -32768, - -100, - -66, - -7, - -1, - 0, - 1, - 9, - 51, - 101, - 32768, - 4194304, - 536870912, - Integer.MAX_VALUE - 1, - Integer.MAX_VALUE).map(Number::intValue); + Integer.MIN_VALUE, + Integer.MIN_VALUE + 1, + -536870912, + -4194304, + -32768, + -100, + -66, + -7, + -1, + 0, + 1, + 9, + 51, + 101, + 32768, + 4194304, + 536870912, + Integer.MAX_VALUE - 1, + Integer.MAX_VALUE) + .map(Number::intValue); } @ParameterizedTest @MethodSource("intsTestCases") void intTest(Integer value) throws IOException { - doTest(value, + doTest( + value, WritableStreamingData::writeInt, java.io.DataOutputStream::writeInt, BufferedData::writeInt, ReadableStreamingData::readInt, java.io.DataInputStream::readInt, - BufferedData::readInt - ); - doTest(value, + BufferedData::readInt); + doTest( + value, (d, v) -> d.writeInt(v, ByteOrder.LITTLE_ENDIAN), (d, v) -> d.writeInt(Integer.reverseBytes(v)), (d, v) -> d.writeInt(v, ByteOrder.LITTLE_ENDIAN), d -> d.readInt(ByteOrder.LITTLE_ENDIAN), d -> Integer.reverseBytes(d.readInt()), - d -> d.readInt(ByteOrder.LITTLE_ENDIAN) - ); + d -> d.readInt(ByteOrder.LITTLE_ENDIAN)); } static Stream unsignedIntsTestCases() { - return Stream.of(0,1,9,51,127,Integer.MAX_VALUE*2L).map(Number::longValue); + return Stream.of(0, 1, 9, 51, 127, Integer.MAX_VALUE * 2L).map(Number::longValue); } @ParameterizedTest @MethodSource("unsignedIntsTestCases") void unsignedIntTest(Long value) throws IOException { - doTest(value, + doTest( + value, WritableStreamingData::writeUnsignedInt, (dout, v) -> dout.writeInt(v.intValue()), BufferedData::writeUnsignedInt, ReadableStreamingData::readUnsignedInt, (dout) -> Integer.toUnsignedLong(dout.readInt()), - BufferedData::readUnsignedInt - ); - doTest(value, + BufferedData::readUnsignedInt); + doTest( + value, (d, v) -> d.writeUnsignedInt(v, ByteOrder.LITTLE_ENDIAN), (d, v) -> d.writeInt(Integer.reverseBytes(v.intValue())), (d, v) -> d.writeUnsignedInt(v, ByteOrder.LITTLE_ENDIAN), d -> d.readUnsignedInt(ByteOrder.LITTLE_ENDIAN), d -> Integer.toUnsignedLong(Integer.reverseBytes(d.readInt())), - d -> d.readUnsignedInt(ByteOrder.LITTLE_ENDIAN) - ); + d -> d.readUnsignedInt(ByteOrder.LITTLE_ENDIAN)); } static Stream longsTestCases() { return Stream.of( - Long.MIN_VALUE, - Long.MIN_VALUE + 1, - Integer.MIN_VALUE - 1L, - Integer.MIN_VALUE, - Integer.MIN_VALUE + 1, - -9007199254740992L, - -35184372088832L, - -137438953472L, - -536870912, - -4194304, - -65536, - -65535, - -65534, - -32768, - -100, - -66, - -7, - -1, - 0, - 1, - 9, - 51, - 101, - 1023, - 1024, - 1025, - 32768, - 4194304, - 536870912, - 137438953472L, - 35184372088832L, - 9007199254740992L, - Integer.MAX_VALUE - 1L, - Integer.MAX_VALUE, - Integer.MAX_VALUE + 1L, - Long.MAX_VALUE - 1L, - Long.MAX_VALUE).map(Number::longValue); -} + Long.MIN_VALUE, + Long.MIN_VALUE + 1, + Integer.MIN_VALUE - 1L, + Integer.MIN_VALUE, + Integer.MIN_VALUE + 1, + -9007199254740992L, + -35184372088832L, + -137438953472L, + -536870912, + -4194304, + -65536, + -65535, + -65534, + -32768, + -100, + -66, + -7, + -1, + 0, + 1, + 9, + 51, + 101, + 1023, + 1024, + 1025, + 32768, + 4194304, + 536870912, + 137438953472L, + 35184372088832L, + 9007199254740992L, + Integer.MAX_VALUE - 1L, + Integer.MAX_VALUE, + Integer.MAX_VALUE + 1L, + Long.MAX_VALUE - 1L, + Long.MAX_VALUE) + .map(Number::longValue); + } @ParameterizedTest @MethodSource("longsTestCases") void longTest(Long value) throws IOException { - doTest(value, + doTest( + value, WritableStreamingData::writeLong, java.io.DataOutputStream::writeLong, BufferedData::writeLong, ReadableStreamingData::readLong, java.io.DataInputStream::readLong, - BufferedData::readLong - ); - doTest(value, + BufferedData::readLong); + doTest( + value, (d, v) -> d.writeLong(v, ByteOrder.LITTLE_ENDIAN), (d, v) -> d.writeLong(Long.reverseBytes(v)), (d, v) -> d.writeLong(v, ByteOrder.LITTLE_ENDIAN), d -> d.readLong(ByteOrder.LITTLE_ENDIAN), d -> Long.reverseBytes(d.readLong()), - d -> d.readLong(ByteOrder.LITTLE_ENDIAN) - ); + d -> d.readLong(ByteOrder.LITTLE_ENDIAN)); } + @ParameterizedTest @MethodSource("intsTestCases") void bytesVarIntTest(int value) throws IOException { @@ -246,50 +249,81 @@ void bytesVarLongTest(long value) throws IOException { } static Stream floatsTestCases() { - return Stream.of(Float.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Float.MAX_VALUE).map(Number::floatValue); + return Stream.of( + Float.MIN_VALUE, + Integer.MIN_VALUE - 1L, + -100, + -66, + -7, + -1, + 0, + 1, + 9, + 51, + 101, + Integer.MAX_VALUE + 1L, + Float.MAX_VALUE) + .map(Number::floatValue); } + @ParameterizedTest @MethodSource("floatsTestCases") void floatTest(Float value) throws IOException { - doTest(value, + doTest( + value, WritableStreamingData::writeFloat, java.io.DataOutputStream::writeFloat, BufferedData::writeFloat, ReadableStreamingData::readFloat, java.io.DataInputStream::readFloat, - BufferedData::readFloat - ); - doTest(value, + BufferedData::readFloat); + doTest( + value, (d, v) -> d.writeFloat(v, ByteOrder.LITTLE_ENDIAN), - (d, v) -> d.writeInt( Integer.reverseBytes(Float.floatToIntBits(v))), + (d, v) -> d.writeInt(Integer.reverseBytes(Float.floatToIntBits(v))), (d, v) -> d.writeFloat(v, ByteOrder.LITTLE_ENDIAN), d -> d.readFloat(ByteOrder.LITTLE_ENDIAN), d -> Float.intBitsToFloat(Integer.reverseBytes(d.readInt())), - d -> d.readFloat(ByteOrder.LITTLE_ENDIAN) - ); + d -> d.readFloat(ByteOrder.LITTLE_ENDIAN)); } + static Stream doublesTestCases() { - return Stream.of(Double.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Double.MAX_VALUE).map(Number::doubleValue); + return Stream.of( + Double.MIN_VALUE, + Integer.MIN_VALUE - 1L, + -100, + -66, + -7, + -1, + 0, + 1, + 9, + 51, + 101, + Integer.MAX_VALUE + 1L, + Double.MAX_VALUE) + .map(Number::doubleValue); } + @ParameterizedTest @MethodSource("doublesTestCases") void doubleTest(Double value) throws IOException { - doTest(value, + doTest( + value, WritableStreamingData::writeDouble, java.io.DataOutputStream::writeDouble, BufferedData::writeDouble, ReadableStreamingData::readDouble, java.io.DataInputStream::readDouble, - BufferedData::readDouble - ); - doTest(value, + BufferedData::readDouble); + doTest( + value, (d, v) -> d.writeDouble(v, ByteOrder.LITTLE_ENDIAN), - (d, v) -> d.writeLong( Long.reverseBytes(Double.doubleToLongBits(v))), + (d, v) -> d.writeLong(Long.reverseBytes(Double.doubleToLongBits(v))), (d, v) -> d.writeDouble(v, ByteOrder.LITTLE_ENDIAN), d -> d.readDouble(ByteOrder.LITTLE_ENDIAN), d -> Double.longBitsToDouble(Long.reverseBytes(d.readLong())), - d -> d.readDouble(ByteOrder.LITTLE_ENDIAN) - ); + d -> d.readDouble(ByteOrder.LITTLE_ENDIAN)); } @ParameterizedTest @@ -434,14 +468,15 @@ void compatInt32Int64(final long num) { // ============================================================================================================== // Generic test case used by all tests :-) - static void doTest(T value, - IoWrite dataOutputWriteMethod, - IoWrite javaDataOutputWriteMethod, - IoWrite dataBufferWriteMethod, - IoRead dataInputReadMethod, - IoRead javaDataInputReadMethod, - IoRead dataBufferReadMethod - ) throws IOException { + static void doTest( + T value, + IoWrite dataOutputWriteMethod, + IoWrite javaDataOutputWriteMethod, + IoWrite dataBufferWriteMethod, + IoRead dataInputReadMethod, + IoRead javaDataInputReadMethod, + IoRead dataBufferReadMethod) + throws IOException { try { // write to byte array with DataIO DataOutputStream ByteArrayOutputStream bout = new ByteArrayOutputStream(); @@ -503,6 +538,7 @@ static void doTest(T value, public interface IoWrite { void write(T t, U u) throws IOException; } + public interface IoRead { U read(T t) throws IOException; } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java index 2ec6eee6..13cca295 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java @@ -1,17 +1,16 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io; +import static org.assertj.core.api.Assertions.assertThat; + import com.hedera.pbj.runtime.io.stream.EOFException; import edu.umd.cs.findbugs.annotations.NonNull; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.io.InputStream; import java.nio.BufferUnderflowException; import java.util.function.Supplier; - -import static org.assertj.core.api.Assertions.assertThat; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; final class ReadableSequentialDataTest extends ReadableSequentialTestBase { @@ -23,7 +22,7 @@ protected ReadableSequentialData emptySequence() { @NonNull private ReadableSequentialData throwingSequence() { - return new StubbedSequence(new byte[] { 1 }, () -> new EOFException()); + return new StubbedSequence(new byte[] {1}, () -> new EOFException()); } @Test @@ -42,7 +41,7 @@ void throwingSequenceTest() { @Test @DisplayName("Verify asInputStream()") void testAsInputStream() throws IOException { - ReadableSequentialData sequence = sequence(new byte[]{1, 2, 3, (byte) 254, (byte) 255}); + ReadableSequentialData sequence = sequence(new byte[] {1, 2, 3, (byte) 254, (byte) 255}); InputStream inputStream = sequence.asInputStream(); assertThat(inputStream.read()).isEqualTo(1); @@ -61,7 +60,7 @@ void testAsInputStream() throws IOException { @NonNull @Override protected ReadableSequentialData fullyUsedSequence() { - final var seq = new StubbedSequence(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + final var seq = new StubbedSequence(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); seq.skip(10); return seq; } @@ -91,7 +90,6 @@ private StubbedSequence(@NonNull final byte[] bytes) { this(bytes, null); } - @Override public long capacity() { return bytes.length; @@ -134,6 +132,5 @@ public byte readByte() { return bytes[(int) position++]; } - } } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java index 2ddba1d2..be718288 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java @@ -7,7 +7,6 @@ import com.hedera.pbj.runtime.io.buffer.BufferedData; import edu.umd.cs.findbugs.annotations.NonNull; - import java.nio.BufferUnderflowException; import java.nio.charset.StandardCharsets; import org.junit.jupiter.api.DisplayName; @@ -25,7 +24,7 @@ public abstract class ReadableSequentialTestBase extends ReadableTestBase { @Override @NonNull - protected abstract ReadableSequentialData sequence(@NonNull byte [] arr); + protected abstract ReadableSequentialData sequence(@NonNull byte[] arr); @Test @DisplayName("Stream with no data") @@ -160,5 +159,4 @@ void skipMoreThanAvailable() { final var stream = sequence("0123456789".getBytes(StandardCharsets.UTF_8)); assertThrows(BufferUnderflowException.class, () -> stream.skip(20)); } - } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java index 84e3a5f4..16b7206d 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java @@ -1,11 +1,21 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io; +import static java.nio.ByteOrder.BIG_ENDIAN; +import static java.nio.ByteOrder.LITTLE_ENDIAN; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + import com.hedera.pbj.runtime.io.buffer.BufferedData; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.pbj.runtime.io.buffer.RandomAccessData; import com.hedera.pbj.runtime.io.stream.ReadableStreamingData; import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.BufferOverflowException; +import java.nio.BufferUnderflowException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -14,16 +24,6 @@ import org.junit.jupiter.params.provider.CsvSource; import org.junit.jupiter.params.provider.ValueSource; -import java.nio.BufferOverflowException; -import java.nio.BufferUnderflowException; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import static java.nio.ByteOrder.BIG_ENDIAN; -import static java.nio.ByteOrder.LITTLE_ENDIAN; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; - /** * Base test class for testing {@link ReadableSequentialData} and {@link RandomAccessData}. Both classes * have basically the same methods, where one has an implicit cursor position and the other has an explicit @@ -144,7 +144,7 @@ void readPastLimit() { @DisplayName("Reading an unsigned byte") void read() { // Given a sequence of bytes (with a single byte that could be interpreted as negative if signed) - final var seq = sequence(new byte[] { (byte) 0b1110_0011 }); + final var seq = sequence(new byte[] {(byte) 0b1110_0011}); // When we read the byte, then we get the expected byte and move the position forward by a single byte final var pos = seq.position(); assertThat(seq.readUnsignedByte()).isEqualTo(0b1110_0011); @@ -181,7 +181,8 @@ void readNullDstThrows() { void negativeOffsetThrows() { // Given a sequence of bytes final var seq = sequence(TEST_BYTES); - // When we try to read bytes using a byte array with a negative offset, then we get an IndexOutOfBoundsException + // When we try to read bytes using a byte array with a negative offset, then we get an + // IndexOutOfBoundsException assertThatThrownBy(() -> seq.readBytes(new byte[10], -1, 10)).isInstanceOf(IndexOutOfBoundsException.class); } @@ -192,8 +193,7 @@ void tooLargeOffsetThrows() { final var seq = sequence(TEST_BYTES); // When we try to read bytes using a byte array with an offset that is too large, // then we get an IndexOutOfBoundsException - assertThatThrownBy(() -> seq.readBytes(new byte[10], 11, 10)) - .isInstanceOf(IndexOutOfBoundsException.class); + assertThatThrownBy(() -> seq.readBytes(new byte[10], 11, 10)).isInstanceOf(IndexOutOfBoundsException.class); // When we try to read bytes using a byte array with an offset + maxLength that is too large, // then we get an IndexOutOfBoundsException assertThatThrownBy(() -> seq.readBytes(new byte[10], 9, 2)) @@ -205,7 +205,8 @@ void tooLargeOffsetThrows() { void negativeLengthThrows() { // Given a sequence of bytes final var seq = sequence(TEST_BYTES); - // When we try to read bytes using a byte array with a negative length, then we get an IllegalArgumentException + // When we try to read bytes using a byte array with a negative length, then we get an + // IllegalArgumentException assertThatThrownBy(() -> seq.readBytes(new byte[10], 0, -1)).isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> seq.readBytes(-1)).isInstanceOf(IllegalArgumentException.class); } @@ -317,7 +318,7 @@ void readZeroDstByteArrayWithOffset() { final var pos = seq.position(); // When we try to read bytes into the dst but with a 0 length, then the position does not change, // and the destination array is empty - assertThat(seq.readBytes(dst, 5,0)).isZero(); + assertThat(seq.readBytes(dst, 5, 0)).isZero(); assertThat(seq.position()).isEqualTo(pos); assertThat(dst).containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0); } @@ -498,7 +499,8 @@ void readDstBufferedData() { } @Test - @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is the same length as the sequence") + @DisplayName( + "Reading bytes into a dst BufferedData with offset where the dst is the same length as the sequence") void readDstBufferedDataWithOffset() { final var seq = sequence(TEST_BYTES); final var dst = BufferedData.allocate(TEST_BYTES.length + 10); @@ -511,7 +513,7 @@ void readDstBufferedDataWithOffset() { } @ParameterizedTest - @ValueSource(ints = { 1, 5, 26 }) + @ValueSource(ints = {1, 5, 26}) @DisplayName("Reading a number of bytes into Bytes where the length is > 0 and <= remaining") void readBytes(final int length) { final var seq = sequence(TEST_BYTES); @@ -546,7 +548,7 @@ void readLargerDstByteArrayWithOffset() { // Then the sequence is exhausted and the array is filled starting at index 5 assertThat(seq.remaining()).isZero(); assertThat(seq.hasRemaining()).isFalse(); - assertThat(Arrays.copyOfRange(arr, 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES); + assertThat(Arrays.copyOfRange(arr, 5, TEST_BYTES.length + 5)).containsExactly(TEST_BYTES); } @Test @@ -576,7 +578,8 @@ void readLargerDstByteBufferWithOffset() { // Then the sequence is exhausted and the buffer is filled starting at index 5 assertThat(seq.remaining()).isZero(); assertThat(seq.hasRemaining()).isFalse(); - assertThat(Arrays.copyOfRange(buffer.array(), 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES); + assertThat(Arrays.copyOfRange(buffer.array(), 5, TEST_BYTES.length + 5)) + .containsExactly(TEST_BYTES); } @Test @@ -611,9 +614,9 @@ void readLargerDstBufferedDataWithOffset() { @ParameterizedTest(name = "offset={0}, length={1}") @CsvSource({ - "-1, 1", // Negative offset - "100, 10", // Offset larger than the dst array size - "5, 10", // Offset+Length larger than the dst array size + "-1, 1", // Negative offset + "100, 10", // Offset larger than the dst array size + "5, 10", // Offset+Length larger than the dst array size }) @DisplayName("Reading bytes where the dst offset and length are bad") void badOffsetLength(int offset, int length) { @@ -1011,7 +1014,19 @@ void readInsufficientDataThrows() { } @ParameterizedTest(name = "value={0}") - @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + @ValueSource( + floats = { + Float.NaN, + Float.NEGATIVE_INFINITY, + Float.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Float.MAX_VALUE, + Float.POSITIVE_INFINITY + }) @DisplayName("Reading a float") void read(float value) { final var seq = sequence(asBytes(c -> c.putFloat(value))); @@ -1026,7 +1041,19 @@ void read(float value) { } @ParameterizedTest(name = "value={0}") - @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + @ValueSource( + floats = { + Float.NaN, + Float.NEGATIVE_INFINITY, + Float.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Float.MAX_VALUE, + Float.POSITIVE_INFINITY + }) @DisplayName("Reading a float in Little Endian") void readLittleEndian(float value) { final var seq = sequence(asBytes(c -> c.putFloat(value), LITTLE_ENDIAN)); @@ -1041,7 +1068,19 @@ void readLittleEndian(float value) { } @ParameterizedTest(name = "value={0}") - @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + @ValueSource( + floats = { + Float.NaN, + Float.NEGATIVE_INFINITY, + Float.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Float.MAX_VALUE, + Float.POSITIVE_INFINITY + }) @DisplayName("Reading a float in Big Endian") void readBigEndian(float value) { final var seq = sequence(asBytes(c -> c.putFloat(value), BIG_ENDIAN)); @@ -1114,7 +1153,19 @@ void readInsufficientDataThrows() { } @ParameterizedTest(name = "value={0}") - @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + @ValueSource( + doubles = { + Double.NaN, + Double.NEGATIVE_INFINITY, + Double.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY + }) @DisplayName("Reading a double") void read(double value) { final var seq = sequence(asBytes(c -> c.putDouble(value))); @@ -1129,7 +1180,19 @@ void read(double value) { } @ParameterizedTest(name = "value={0}") - @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + @ValueSource( + doubles = { + Double.NaN, + Double.NEGATIVE_INFINITY, + Double.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY + }) @DisplayName("Reading a double in Little Endian") void readLittleEndian(double value) { final var seq = sequence(asBytes(c -> c.putDouble(value), LITTLE_ENDIAN)); @@ -1144,7 +1207,19 @@ void readLittleEndian(double value) { } @ParameterizedTest(name = "value={0}") - @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + @ValueSource( + doubles = { + Double.NaN, + Double.NEGATIVE_INFINITY, + Double.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY + }) @DisplayName("Reading a double in Big Endian") void readBigEndian(double value) { final var seq = sequence(asBytes(c -> c.putDouble(value), BIG_ENDIAN)); @@ -1212,14 +1287,14 @@ void readPastLimit() { @ValueSource(booleans = {false, true}) @DisplayName("Reading a varint when less than 4 bytes are available throws BufferUnderflowException") void readInsufficientDataThrows(final boolean zigZag) { - final var seq = sequence(new byte[] { (byte) 0b10101100 }); + final var seq = sequence(new byte[] {(byte) 0b10101100}); assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class); } @Test @DisplayName("Read a varint") void read() { - final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 }); + final var seq = sequence(new byte[] {(byte) 0b10101100, 0b00000010}); final var pos = seq.position(); final var value = seq.readVarInt(false); assertThat(value).isEqualTo(300); @@ -1229,7 +1304,7 @@ void read() { @Test @DisplayName("Read a 3 bytes varint") void read3Bytes() { - final var seq = sequence(new byte[] { (byte) 0b10101100, (byte) 0b10101100, 0b00000010 }); + final var seq = sequence(new byte[] {(byte) 0b10101100, (byte) 0b10101100, 0b00000010}); final var pos = seq.position(); final var value = seq.readVarInt(false); assertThat(value).isEqualTo(38444); @@ -1239,12 +1314,7 @@ void read3Bytes() { @Test @DisplayName("Read a 4 bytes varint") void read4Bytes() { - final var seq = sequence(new byte[] { - (byte) 0b10101100, - (byte) 0b10101100, - (byte) 0b10101100, - 0b00000010 - }); + final var seq = sequence(new byte[] {(byte) 0b10101100, (byte) 0b10101100, (byte) 0b10101100, 0b00000010}); final var pos = seq.position(); final var value = seq.readVarInt(false); assertThat(value).isEqualTo(4920876); @@ -1254,13 +1324,9 @@ void read4Bytes() { @Test @DisplayName("Read a 5 bytes varint") void read5Bytes() { - final var seq = sequence(new byte[] { - (byte) 0b10101100, - (byte) 0b10101100, - (byte) 0b10101100, - (byte) 0b10101100, - 0b00000010 - }); + final var seq = sequence( + new byte[] {(byte) 0b10101100, (byte) 0b10101100, (byte) 0b10101100, (byte) 0b10101100, 0b00000010 + }); final var pos = seq.position(); final var value = seq.readVarInt(false); assertThat(value).isEqualTo(629872172); @@ -1270,7 +1336,7 @@ void read5Bytes() { @Test @DisplayName("Read a varint with zig zag encoding") void readZigZag() { - final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 }); + final var seq = sequence(new byte[] {(byte) 0b10101101, 0b00000010}); final var pos = seq.position(); final var value = seq.readVarInt(true); assertThat(value).isEqualTo(-151); @@ -1313,14 +1379,14 @@ void readPastLimit() { @ValueSource(booleans = {false, true}) @DisplayName("Reading a varlong when less than 4 bytes are available throws BufferUnderflowException") void readInsufficientDataThrows(final boolean zigZag) { - final var seq = sequence(new byte[] { (byte) 0b10101100 }); + final var seq = sequence(new byte[] {(byte) 0b10101100}); assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); } @Test @DisplayName("Read a varlong") void read() { - final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 }); + final var seq = sequence(new byte[] {(byte) 0b10101100, 0b00000010}); final var pos = seq.position(); final var value = seq.readVarLong(false); assertThat(value).isEqualTo(300); @@ -1330,7 +1396,7 @@ void read() { @Test @DisplayName("Read a varlong with zig zag encoding") void readZigZag() { - final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 }); + final var seq = sequence(new byte[] {(byte) 0b10101101, 0b00000010}); final var pos = seq.position(); final var value = seq.readVarLong(true); assertThat(value).isEqualTo(-151); @@ -1342,16 +1408,16 @@ void readZigZag() { void readInvalidVarInt() { // Given a very long sequence of bytes all with the "continuation" bit set final var seq = sequence(new byte[] { - (byte) 0b10101101, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010 + (byte) 0b10101101, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010 }); // When we try to decode an int, the lack of a "terminator" bit causes a DataEncodingException assertThatThrownBy(() -> seq.readVarInt(false)).isInstanceOf(DataEncodingException.class); @@ -1362,16 +1428,16 @@ void readInvalidVarInt() { void readInvalidVarLong() { // Given a very long sequence of bytes all with the "continuation" bit set final var seq = sequence(new byte[] { - (byte) 0b10101101, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010, - (byte) 0b10000010 + (byte) 0b10101101, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010, + (byte) 0b10000010 }); // When we try to decode a long, the lack of a "terminator" bit causes a DataEncodingException assertThatThrownBy(() -> seq.readVarLong(false)).isInstanceOf(DataEncodingException.class); diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java index d4eb7b91..3539a4ee 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java @@ -1,11 +1,12 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.stream.Stream; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import java.util.stream.Stream; -import static org.assertj.core.api.Assertions.assertThat; /** * Test for default methods on {@link SequentialData}. @@ -20,7 +21,7 @@ private static Stream provideArgumentsForRemaining() { Arguments.of(1, 2, 1), // One byte remaining Arguments.of(1, 3, 2), // Two bytes remaining Arguments.of(-1, -1, 0) // Negatives? (error that we handle) - ); + ); } @ParameterizedTest(name = "position={0}, limit={1}") diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java index 913f3288..c0102321 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java @@ -1,24 +1,22 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io; -import com.hedera.pbj.runtime.io.buffer.BufferedData; -import com.hedera.pbj.runtime.io.stream.WritableStreamingData; -import edu.umd.cs.findbugs.annotations.NonNull; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; +import static java.nio.ByteOrder.BIG_ENDIAN; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assumptions.assumeTrue; +import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.function.Consumer; -import static java.nio.ByteOrder.BIG_ENDIAN; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.junit.jupiter.api.Assumptions.assumeTrue; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; /** * Base test class for testing all types of {@link SequentialData} classes. @@ -114,10 +112,11 @@ void clampToCapacity() { final class SkipTest { @ParameterizedTest @CsvSource({ - "-1, 0", // skip -1 bytes, limit is 5, so clamp to 0 - "0, 0", // skip 0 bytes, limit is 5, so clamp to 0 - "3, 3", // skip 3 bytes, limit is 5, so clamp to 3 - "5, 5"}) // skip 5 bytes, limit is 5, so clamp to 5 + "-1, 0", // skip -1 bytes, limit is 5, so clamp to 0 + "0, 0", // skip 0 bytes, limit is 5, so clamp to 0 + "3, 3", // skip 3 bytes, limit is 5, so clamp to 3 + "5, 5" + }) // skip 5 bytes, limit is 5, so clamp to 5 @DisplayName("Skipping relative to the limit will clamp at limit") void skipping(long skip, long expected) { // Given a sequence, and some number of bytes to skip @@ -133,1223 +132,1236 @@ void skipping(long skip, long expected) { } @ParameterizedTest - @CsvSource({ - "7"}) // skip 7 bytes, limit is 5, so throw on skip() + @CsvSource({"7"}) // skip 7 bytes, limit is 5, so throw on skip() @DisplayName("Skipping beyond the limit will throw") void skippingAndThrowing(long skip) { // Given a sequence, and some number of bytes to skip final var seq = sequence(); // When we set the limit to be between the position and capacity, and we skip those bytes seq.limit(5); - assertThatThrownBy(() -> seq.skip(skip)).isInstanceOfAny( - BufferUnderflowException.class, - BufferOverflowException.class - ); + assertThatThrownBy(() -> seq.skip(skip)) + .isInstanceOfAny(BufferUnderflowException.class, BufferOverflowException.class); } } @Nested @DisplayName("writeByte()") final class WriteByteTest { -// @Test -// @DisplayName("Writing a byte to an empty sequence throws BufferOverflowException") -// void writeToEmptyDataThrows() { -// // Given an empty sequence -// final var seq = emptySequence(); -// // When we try to read a byte, then we get a BufferOverflowException -// assertThatThrownBy(() -> seq.writeByte((byte) 1)).isInstanceOf(BufferOverflowException.class); -// } -// -// @Test -// @DisplayName("Reading a byte from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows() { -// // Given a fully read sequence -// final var seq = fullyUsedSequence(); -// // When we try to read a byte, then we get a BufferUnderflowException -// assertThatThrownBy(seq::readByte).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a byte past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// seq.skip(5); -// // When we try to read a byte, then we get a BufferUnderflowException -// assertThatThrownBy(seq::readByte).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading bytes from beginning to end") -// void read() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we read each byte, then we get the expected byte -// for (byte testByte : TEST_BYTES) { -// final var pos = seq.position(); -// assertThat(seq.hasRemaining()).isTrue(); -// assertThat(seq.readByte()).isEqualTo(testByte); -// assertThat(seq.position()).isEqualTo(pos + 1); -// } -// // And when we get to the end, there is no longer anything to be read -// assertThat(seq.hasRemaining()).isFalse(); -// assertThat(seq.remaining()).isZero(); -// } + // @Test + // @DisplayName("Writing a byte to an empty sequence throws BufferOverflowException") + // void writeToEmptyDataThrows() { + // // Given an empty sequence + // final var seq = emptySequence(); + // // When we try to read a byte, then we get a BufferOverflowException + // assertThatThrownBy(() -> seq.writeByte((byte) 1)).isInstanceOf(BufferOverflowException.class); + // } + // + // @Test + // @DisplayName("Reading a byte from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows() { + // // Given a fully read sequence + // final var seq = fullyUsedSequence(); + // // When we try to read a byte, then we get a BufferUnderflowException + // assertThatThrownBy(seq::readByte).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a byte past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // seq.skip(5); + // // When we try to read a byte, then we get a BufferUnderflowException + // assertThatThrownBy(seq::readByte).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading bytes from beginning to end") + // void read() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we read each byte, then we get the expected byte + // for (byte testByte : TEST_BYTES) { + // final var pos = seq.position(); + // assertThat(seq.hasRemaining()).isTrue(); + // assertThat(seq.readByte()).isEqualTo(testByte); + // assertThat(seq.position()).isEqualTo(pos + 1); + // } + // // And when we get to the end, there is no longer anything to be read + // assertThat(seq.hasRemaining()).isFalse(); + // assertThat(seq.remaining()).isZero(); + // } } -// @Nested -// @DisplayName("readUnsignedByte()") -// final class ReadUnsignedByteTest { -// @Test -// @DisplayName("Reading an unsigned byte from an empty sequence throws BufferUnderflowException") -// void readFromEmptyDataThrows() { -// // Given an empty sequence -// final var seq = emptySequence(); -// // When we try to read an unsigned byte, then we get a BufferUnderflowException -// assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an unsigned byte from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows() { -// // Given a fully read sequence -// final var seq = fullyUsedSequence(); -// // When we try to read an unsigned byte, then we get a BufferUnderflowException -// assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an unsigned byte past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// seq.skip(5); -// // When we try to read an unsigned byte, then we get a BufferUnderflowException -// assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an unsigned byte") -// void read() { -// // Given a sequence of bytes (with a single byte that could be interpreted as negative if signed) -// final var seq = sequence(new byte[] { (byte) 0b1110_0011 }); -// // When we read the byte, then we get the expected byte and move the position forward by a single byte -// final var pos = seq.position(); -// assertThat(seq.readUnsignedByte()).isEqualTo(0b1110_0011); -// assertThat(seq.position()).isEqualTo(pos + 1); -// } -// } -// -// @Nested -// @DisplayName("readBytes()") -// final class ReadBytesTest { -// @Test -// @DisplayName("Reading bytes with a null dst throws NullPointerException") -// void readNullDstThrows() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// -// // When we try to read bytes using a null byte array, then we get a NullPointerException -// //noinspection DataFlowIssue -// assertThatThrownBy(() -> seq.readBytes((byte[]) null)).isInstanceOf(NullPointerException.class); -// //noinspection DataFlowIssue -// assertThatThrownBy(() -> seq.readBytes(null, 0, 10)).isInstanceOf(NullPointerException.class); -// -// // When we try to read bytes using a null ByteBuffer, then we get a NullPointerException -// //noinspection DataFlowIssue -// assertThatThrownBy(() -> seq.readBytes((ByteBuffer) null)).isInstanceOf(NullPointerException.class); -// -// // When we try to read bytes using a null BufferedData, then we get a NullPointerException -// //noinspection DataFlowIssue -// assertThatThrownBy(() -> seq.readBytes((BufferedData) null)).isInstanceOf(NullPointerException.class); -// } -// -// @Test -// @DisplayName("Reading bytes with a negative offset throws IndexOutOfBoundsException") -// void negativeOffsetThrows() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we try to read bytes using a byte array with a negative offset, then we get an IndexOutOfBoundsException -// assertThatThrownBy(() -> seq.readBytes(new byte[10], -1, 10)).isInstanceOf(IndexOutOfBoundsException.class); -// } -// -// @Test -// @DisplayName("Reading bytes with an offset that is too large throws IndexOutOfBoundsException") -// void tooLargeOffsetThrows() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we try to read bytes using a byte array with an offset that is too large, -// // then we get an IndexOutOfBoundsException -// assertThatThrownBy(() -> seq.readBytes(new byte[10], 11, 10)) -// .isInstanceOf(IndexOutOfBoundsException.class); -// // When we try to read bytes using a byte array with an offset + maxLength that is too large, -// // then we get an IndexOutOfBoundsException -// assertThatThrownBy(() -> seq.readBytes(new byte[10], 9, 2)) -// .isInstanceOf(IndexOutOfBoundsException.class); -// } -// -// @Test -// @DisplayName("Reading bytes with a negative length throws IllegalArgumentException") -// void negativeLengthThrows() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we try to read bytes using a byte array with a negative length, then we get an IllegalArgumentException -// assertThatThrownBy(() -> seq.readBytes(new byte[10], 0, -1)).isInstanceOf(IllegalArgumentException.class); -// assertThatThrownBy(() -> seq.readBytes(-1)).isInstanceOf(IllegalArgumentException.class); -// } -// -// @Test -// @DisplayName("Reading bytes from an empty sequence is a no-op") -// void readFromEmptyDataIsNoOp() { -// // Given an empty sequence -// final var seq = emptySequence(); -// -// // When we try to read bytes using a byte array, then we get nothing read -// assertThat(seq.readBytes(new byte[10])).isZero(); -// assertThat(seq.readBytes(new byte[10], 0, 2)).isZero(); -// -// // When we try to read bytes using a ByteBuffer, then we get nothing read -// final var byteBuffer = ByteBuffer.allocate(10); -// assertThat(seq.readBytes(byteBuffer)).isZero(); -// -// // When we try to read bytes using a BufferedData, then we get nothing read -// final var bufferedData = BufferedData.allocate(10); -// assertThat(seq.readBytes(bufferedData)).isZero(); -// } -// -// @Test -// @DisplayName("Reading bytes from a fully read sequence is a no-op") -// void readFromFullyReadDataIsNoOp() { -// // Given a fully read sequence -// final var seq = fullyUsedSequence(); -// -// // When we try to read bytes using a byte array, then we get nothing read -// assertThat(seq.readBytes(new byte[10])).isZero(); -// assertThat(seq.readBytes(new byte[10], 0, 2)).isZero(); -// -// // When we try to read bytes using a ByteBuffer, then we get nothing read -// final var byteBuffer = ByteBuffer.allocate(10); -// assertThat(seq.readBytes(byteBuffer)).isZero(); -// -// // When we try to read bytes using a BufferedData, then we get nothing read -// final var bufferedData = BufferedData.allocate(10); -// assertThat(seq.readBytes(bufferedData)).isZero(); -// } -// -// @Test -// @DisplayName("Reading bytes where there is nothing remaining because we are at the limit is a no-op") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// seq.skip(5); -// -// // When we try to read bytes using a byte array, then we get nothing read -// assertThat(seq.readBytes(new byte[10])).isZero(); -// assertThat(seq.readBytes(new byte[10], 0, 2)).isZero(); -// -// // When we try to read bytes using a ByteBuffer, then we get nothing read -// final var byteBuffer = ByteBuffer.allocate(10); -// assertThat(seq.readBytes(byteBuffer)).isZero(); -// -// // When we try to read bytes using a BufferedData, then we get nothing read -// final var bufferedData = BufferedData.allocate(10); -// assertThat(seq.readBytes(bufferedData)).isZero(); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst byte array where the dst has length of 0") -// void readZeroDstByteArray() { -// // Given a sequence of bytes and an empty destination byte array -// final var seq = sequence(TEST_BYTES); -// final var dst = new byte[0]; -// final var pos = seq.position(); -// // When we try to read bytes into the dst, then the position does not change, -// // and the destination array is empty -// assertThat(seq.readBytes(dst)).isZero(); -// assertThat(seq.position()).isEqualTo(pos); -// assertThat(dst).isEmpty(); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst byte array with offset and length where the dst has length of 0") -// void readZeroDstByteArrayWithOffset() { -// // Given a sequence of bytes and a destination byte array -// final var seq = sequence(TEST_BYTES); -// final var dst = new byte[10]; -// final var pos = seq.position(); -// // When we try to read bytes into the dst but with a 0 length, then the position does not change, -// // and the destination array is empty -// assertThat(seq.readBytes(dst, 5,0)).isZero(); -// assertThat(seq.position()).isEqualTo(pos); -// assertThat(dst).containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst ByteBuffer where the dst has length of 0") -// void readZeroDstByteBuffer() { -// // Given a sequence of bytes and an empty destination ByteBuffer -// final var seq = sequence(TEST_BYTES); -// final var dst = ByteBuffer.allocate(0); -// final var pos = seq.position(); -// // When we try to read bytes into the dst, then the position does not change, -// // and the destination buffer is empty -// assertThat(seq.readBytes(dst)).isZero(); -// assertThat(seq.position()).isEqualTo(pos); -// assertThat(dst.position()).isZero(); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst BufferedData where the dst has length of 0") -// void readZeroDstBufferedData() { -// // Given a sequence of bytes and an empty destination BufferedData -// final var seq = sequence(TEST_BYTES); -// final var dst = BufferedData.allocate(0); -// final var pos = seq.position(); -// // When we try to read bytes into the dst, then the position does not change, -// // and the destination buffer is empty -// assertThat(seq.readBytes(dst)).isZero(); -// assertThat(seq.position()).isEqualTo(pos); -// assertThat(dst.position()).isZero(); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst byte array where the dst is smaller than the sequence") -// void readSmallerDstByteArray() { -// // Given a sequence of bytes and a destination byte array -// final var seq = sequence(TEST_BYTES); -// // When we try reading into the dst (twice, once from the beginning and once in the middle) -// for (int i = 0; i < 2; i++) { -// final var dst = new byte[5]; -// final var pos = seq.position(); -// final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); -// assertThat(seq.readBytes(dst)).isEqualTo(5); -// // Then the dst is filled with the bytes from the sequence, and the position is updated -// assertThat(dst).isEqualTo(subset); -// assertThat(seq.position()).isEqualTo(pos + 5); -// } -// } -// -// @Test -// @DisplayName("Reading bytes into a dst byte array with offset where the dst is smaller than the sequence") -// void readSmallerDstByteArrayWithOffset() { -// final var seq = sequence(TEST_BYTES); -// // Do twice, so we read once from sequence at the beginning and once in the middle -// for (int i = 0; i < 2; i++) { -// final var dst = new byte[10]; -// final var pos = seq.position(); -// final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); -// assertThat(seq.readBytes(dst, 3, 5)).isEqualTo(5); -// assertThat(Arrays.copyOfRange(dst, 3, 8)).isEqualTo(subset); -// assertThat(seq.position()).isEqualTo(pos + 5); -// } -// } -// -// @Test -// @DisplayName("Reading bytes into a dst ByteBuffer where the dst is smaller than the sequence") -// void readSmallerDstByteBuffer() { -// final var seq = sequence(TEST_BYTES); -// for (int i = 0; i < 2; i++) { -// final var dst = ByteBuffer.allocate(5); -// final var pos = seq.position(); -// final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); -// assertThat(seq.readBytes(dst)).isEqualTo(5); -// assertThat(dst.array()).isEqualTo(subset); -// assertThat(seq.position()).isEqualTo(pos + 5); -// } -// } -// -// @Test -// @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is smaller than the sequence") -// void readSmallerDstByteBufferWithOffset() { -// final var seq = sequence(TEST_BYTES); -// for (int i = 0; i < 2; i++) { -// final var dst = ByteBuffer.allocate(10); -// dst.position(5); -// final var pos = seq.position(); -// final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); -// assertThat(seq.readBytes(dst)).isEqualTo(5); -// assertThat(dst.slice(5, 5)).isEqualTo(ByteBuffer.wrap(subset)); -// assertThat(seq.position()).isEqualTo(pos + 5); -// } -// } -// -// @Test -// @DisplayName("Reading bytes into a dst BufferedData where the dst is smaller than the sequence") -// void readSmallerDstBufferedData() { -// final var seq = sequence(TEST_BYTES); -// for (int i = 0; i < 2; i++) { -// final var dst = BufferedData.allocate(5); -// final var pos = seq.position(); -// final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); -// assertThat(seq.readBytes(dst)).isEqualTo(5); -// assertThat(dst).isEqualTo(BufferedData.wrap(subset)); -// assertThat(seq.position()).isEqualTo(pos + 5); -// } -// } -// -// @Test -// @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is smaller than the sequence") -// void readSmallerDstBufferedDataWithOffset() { -// final var seq = sequence(TEST_BYTES); -// for (int i = 0; i < 2; i++) { -// final var dst = BufferedData.allocate(10); -// dst.position(5); -// final var pos = seq.position(); -// final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); -// assertThat(seq.readBytes(dst)).isEqualTo(5); -// assertThat(dst.slice(5, 5)).isEqualTo(BufferedData.wrap(subset)); -// assertThat(seq.position()).isEqualTo(pos + 5); -// } -// } -// -// @Test -// @DisplayName("Reading bytes into a dst byte array where the dst is the same length as the sequence") -// void readDstByteArray() { -// final var seq = sequence(TEST_BYTES); -// final var dst = new byte[TEST_BYTES.length]; -// final var pos = seq.position(); -// assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); -// assertThat(dst).isEqualTo(TEST_BYTES); -// assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst byte array with offset where the dst is the same length as the sequence") -// void readDstByteArrayWithOffset() { -// final var seq = sequence(TEST_BYTES); -// final var dst = new byte[TEST_BYTES.length + 10]; -// final var pos = seq.position(); -// assertThat(seq.readBytes(dst, 5, TEST_BYTES.length)).isEqualTo(TEST_BYTES.length); -// assertThat(Arrays.copyOfRange(dst, 5, 5 + TEST_BYTES.length)).isEqualTo(TEST_BYTES); -// assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst ByteBuffer where the dst is the same length as the sequence") -// void readDstByteBuffer() { -// final var seq = sequence(TEST_BYTES); -// final var dst = ByteBuffer.allocate(TEST_BYTES.length); -// final var pos = seq.position(); -// assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); -// assertThat(dst.array()).isEqualTo(TEST_BYTES); -// assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is the same length as the sequence") -// void readDstByteBufferWithOffset() { -// final var seq = sequence(TEST_BYTES); -// final var dst = ByteBuffer.allocate(TEST_BYTES.length + 10); -// final var pos = seq.position(); -// dst.position(5); -// dst.limit(TEST_BYTES.length + 5); -// assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); -// assertThat(dst.slice(5, TEST_BYTES.length)).isEqualTo(ByteBuffer.wrap(TEST_BYTES)); -// assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst BufferedData where the dst is the same length as the sequence") -// void readDstBufferedData() { -// final var seq = sequence(TEST_BYTES); -// final var dst = BufferedData.allocate(TEST_BYTES.length); -// final var pos = seq.position(); -// assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); -// assertThat(dst).isEqualTo(BufferedData.wrap(TEST_BYTES)); -// assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is the same length as the sequence") -// void readDstBufferedDataWithOffset() { -// final var seq = sequence(TEST_BYTES); -// final var dst = BufferedData.allocate(TEST_BYTES.length + 10); -// final var pos = seq.position(); -// dst.position(5); -// dst.limit(TEST_BYTES.length + 5); -// assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); -// assertThat(dst.slice(5, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES)); -// assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst byte array where the dst is larger than the sequence") -// void readLargerDstByteArray() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we read the bytes into a larger byte array -// final var arr = new byte[TEST_BYTES.length + 1]; -// assertThat(seq.readBytes(arr)).isEqualTo(TEST_BYTES.length); -// // Then the sequence is exhausted and the array is filled starting at index 0 -// assertThat(seq.remaining()).isZero(); -// assertThat(seq.hasRemaining()).isFalse(); -// assertThat(arr).startsWith(TEST_BYTES); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst byte array with offset where the dst is larger than the sequence") -// void readLargerDstByteArrayWithOffset() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we read the bytes into a larger byte array with an offset -// final var arr = new byte[TEST_BYTES.length + 10]; -// assertThat(seq.readBytes(arr, 5, TEST_BYTES.length + 1)).isEqualTo(TEST_BYTES.length); -// // Then the sequence is exhausted and the array is filled starting at index 5 -// assertThat(seq.remaining()).isZero(); -// assertThat(seq.hasRemaining()).isFalse(); -// assertThat(Arrays.copyOfRange(arr, 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst ByteBuffer where the dst is larger than the sequence") -// void readLargerDstByteBuffer() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we read the bytes into a larger buffer -// final var buffer = ByteBuffer.allocate(TEST_BYTES.length + 1); -// assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length); -// // Then the sequence is exhausted and the buffer is filled starting at index 0 -// assertThat(seq.remaining()).isZero(); -// assertThat(seq.hasRemaining()).isFalse(); -// assertThat(buffer.array()).startsWith(TEST_BYTES); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is larger than the sequence") -// void readLargerDstByteBufferWithOffset() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we read the bytes into a larger buffer with an offset -// final var buffer = ByteBuffer.allocate(TEST_BYTES.length + 10); -// buffer.position(5); -// buffer.limit(5 + TEST_BYTES.length + 1); -// assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length); -// // Then the sequence is exhausted and the buffer is filled starting at index 5 -// assertThat(seq.remaining()).isZero(); -// assertThat(seq.hasRemaining()).isFalse(); -// assertThat(Arrays.copyOfRange(buffer.array(), 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst BufferedData where the dst is larger than the sequence") -// void readLargerDstBufferedData() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we read the bytes into a larger buffer -// final var buffer = BufferedData.allocate(TEST_BYTES.length + 1); -// assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length); -// // Then the sequence is exhausted and the buffer is filled starting at index 0 -// assertThat(seq.remaining()).isZero(); -// assertThat(seq.hasRemaining()).isFalse(); -// assertThat(buffer.slice(0, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES)); -// } -// -// @Test -// @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is larger than the sequence") -// void readLargerDstBufferedDataWithOffset() { -// // Given a sequence of bytes -// final var seq = sequence(TEST_BYTES); -// // When we read the bytes into a larger buffer with an offset -// final var buffer = BufferedData.allocate(TEST_BYTES.length + 10); -// buffer.position(5); -// buffer.limit(5 + TEST_BYTES.length + 1); -// assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length); -// // Then the sequence is exhausted and the buffer is filled starting at index 5 -// assertThat(seq.remaining()).isZero(); -// assertThat(seq.hasRemaining()).isFalse(); -// assertThat(buffer.slice(5, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES)); -// } -// -// @ParameterizedTest(name = "offset={0}, length={1}") -// @CsvSource({ -// "-1, 1", // Negative offset -// "100, 10", // Offset larger than the dst array size -// "5, 10", // Offset+Length larger than the dst array size -// }) -// @DisplayName("Reading bytes where the dst offset and length are bad") -// void badOffsetLength(int offset, int length) { -// final var seq = sequence(TEST_BYTES); -// assertThatThrownBy(() -> seq.readBytes(new byte[10], offset, length)) -// .isInstanceOf(IndexOutOfBoundsException.class); -// } -// } -// -// @Nested -// @DisplayName("view()") -// final class ViewTest { -// @Test -// @DisplayName("Negative length throws IllegalArgumentException") -// void negativeLength() { -// final var seq = sequence(TEST_BYTES); -// assertThatThrownBy(() -> seq.view(-1)).isInstanceOf(IllegalArgumentException.class); -// } -// -// @Test -// @DisplayName("Length that is greater than remaining throws BufferUnderflowException") -// @Disabled("This has to be tested on the buffer level only, because for a Stream, the limit is too big") -// void lengthGreaterThanRemaining() { -// // TODO Move to buffer tests -// final var seq = sequence(TEST_BYTES); -// seq.skip(1); -// assertThatThrownBy(() -> seq.view(TEST_BYTES.length)).isInstanceOf(BufferUnderflowException.class); -// assertThatThrownBy(() -> seq.view(Integer.MAX_VALUE)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Creating a view past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// seq.skip(5); -// // When we try to create a view with a length past the limit, then we get a BufferUnderflowException -// assertThatThrownBy(() -> seq.view(6)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Length is zero (OK, empty sequence)") -// void lengthIsZero() { -// final var seq = sequence(TEST_BYTES); -// assertThat(seq.view(0).remaining()).isZero(); -// } -// -// @Test -// @DisplayName("Length + Position is less than limit (OK)") -// void lengthPlusPositionIsLessThanLimit() { -// final var seq = sequence(TEST_BYTES); -// seq.skip(5); -// final var view = seq.view(10); -// -// assertThat(view.remaining()).isEqualTo(10); -// assertThat(view.readBytes(10)).isEqualTo(Bytes.wrap(TEST_BYTES).slice(5, 10)); -// } -// -// @Test -// @DisplayName("Length + Position is the limit (OK)") -// void lengthPlusPositionIsTheLimit() { -// // Given a sequence of bytes where the position is 10 bytes from the end -// final var seq = sequence(TEST_BYTES); -// final var startIndex = TEST_BYTES.length - 10; -// assertThat(seq.skip(startIndex)).isEqualTo(16); -// assertThat(seq.position()).isEqualTo(16); -// // When we create a view with a length of 10 bytes -// final var view = seq.view(10); -// // Then we get the last 10 bytes of the sequence, AND it advances the position by that many bytes. -// assertThat(seq.position()).isEqualTo(26); -// // The view, when read, will have all 10 of its bytes -// assertThat(view.remaining()).isEqualTo(10); -// final var bytes = view.readBytes(10); -// assertThat(view.position()).isEqualTo(10); -// // And those bytes will be the last 10 bytes of the sequence -// assertThat(bytes).isEqualTo(Bytes.wrap(TEST_BYTES).slice(startIndex, 10)); -// } -// -// @Test -// @DisplayName("Get sub-sequence of a sub-sequence") -// void subSequenceOfSubSequence() { -// final var seq = sequence(TEST_BYTES); -// final var subSeq = seq.view(10); -// final var subSubSeq = subSeq.view(5); -// assertThat(subSubSeq.remaining()).isEqualTo(5); -// assertThat(subSubSeq.readBytes(5)).isEqualTo(Bytes.wrap(TEST_BYTES).slice(0, 5)); -// } -// } -// -// @Nested -// @DisplayName("readInt()") -// final class ReadIntTest { -// @Test -// @DisplayName("Reading an int from an empty sequence throws BufferUnderflowException") -// void readFromEmptyDataThrows() { -// final var seq = emptySequence(); -// assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an int from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows() { -// final var seq = fullyUsedSequence(); -// assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an int past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// // When we try to read an int, then we get a BufferUnderflowException -// seq.skip(4); // Only 1 byte left, not enough -// assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); -// seq.skip(1); // No bytes left, not enough -// assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an int when less than 4 bytes are available throws BufferUnderflowException") -// void readInsufficientDataThrows() { -// for (int i = 0; i < 3; i++) { -// final var seq = sequence(new byte[i]); -// assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); -// } -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE}) -// @DisplayName("Reading an int") -// void read(int value) { -// // Given a sequence with exactly 1 integer of data -// final var seq = sequence(asBytes(c -> c.putInt(value))); -// final var pos = seq.position(); -// // When we read an int, then it is the same as the one we wrote, and the position has moved forward -// // by 4 bytes -// assertThat(seq.readInt()).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE}) -// @DisplayName("Reading an int in Little Endian") -// void readLittleEndian(int value) { -// final var seq = sequence(asBytes(c -> c.putInt(value), LITTLE_ENDIAN)); -// final var pos = seq.position(); -// assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE}) -// @DisplayName("Reading an int in Big Endian") -// void readBigEndian(int value) { -// final var seq = sequence(asBytes(c -> c.putInt(value), BIG_ENDIAN)); -// final var pos = seq.position(); -// assertThat(seq.readInt(BIG_ENDIAN)).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @Test -// @DisplayName("Read a mixture of big and little endian data") -// void readMixedEndian() { -// final var seq = sequence(asBytes(c -> { -// c.order(BIG_ENDIAN); -// c.putInt(0x01020304); -// c.order(LITTLE_ENDIAN); -// c.putInt(0x05060708); -// c.order(BIG_ENDIAN); -// c.putInt(0x090A0B0C); -// c.order(LITTLE_ENDIAN); -// c.putInt(0x0D0E0F10); -// })); -// assertThat(seq.readInt()).isEqualTo(0x01020304); -// assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x05060708); -// assertThat(seq.readInt()).isEqualTo(0x090A0B0C); -// assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x0D0E0F10); -// } -// } -// -// @Nested -// @DisplayName("readUnsignedInt()") -// final class ReadUnsignedIntTest { -// @Test -// @DisplayName("Reading an unsigned int from an empty sequence throws BufferUnderflowException") -// void readFromEmptyDataThrows() { -// final var seq = emptySequence(); -// assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an unsigned int from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows() { -// final var seq = fullyUsedSequence(); -// assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an unsigned int past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// // When we try to read an unsigned int, then we get a BufferUnderflowException -// seq.skip(4); // Only 1 byte left, not enough -// assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); -// seq.skip(1); // No bytes left, not enough -// assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading an unsigned int when less than 4 bytes are available throws BufferUnderflowException") -// void readInsufficientDataThrows() { -// for (int i = 0; i < 3; i++) { -// final var seq = sequence(new byte[i]); -// assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); -// } -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL}) -// @DisplayName("Reading an unsigned int") -// void read(long value) { -// final var seq = sequence(asBytes(c -> c.putInt((int) value))); -// final var pos = seq.position(); -// assertThat(seq.readUnsignedInt()).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL}) -// @DisplayName("Reading an unsigned int in Little Endian") -// void readLittleEndian(long value) { -// final var seq = sequence(asBytes(c -> c.putInt((int) value), LITTLE_ENDIAN)); -// final var pos = seq.position(); -// assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL}) -// @DisplayName("Reading an unsigned int in Big Endian") -// void readBigEndian(long value) { -// final var seq = sequence(asBytes(c -> c.putInt((int) value), BIG_ENDIAN)); -// final var pos = seq.position(); -// assertThat(seq.readUnsignedInt(BIG_ENDIAN)).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @Test -// @DisplayName("Read a mixture of big and little endian data") -// void readMixedEndian() { -// final var seq = sequence(asBytes(c -> { -// c.order(BIG_ENDIAN); -// c.putInt(0x91020304); -// c.order(LITTLE_ENDIAN); -// c.putInt(0x95060708); -// c.order(BIG_ENDIAN); -// c.putInt(0x990A0B0C); -// c.order(LITTLE_ENDIAN); -// c.putInt(0x9D0E0F10); -// })); -// assertThat(seq.readUnsignedInt()).isEqualTo(0x91020304L); -// assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x95060708L); -// assertThat(seq.readUnsignedInt()).isEqualTo(0x990A0B0CL); -// assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F10L); -// } -// } -// -// @Nested -// @DisplayName("readLong()") -// final class ReadLongTest { -// @Test -// @DisplayName("Reading a long from an empty sequence throws BufferUnderflowException") -// void readFromEmptyDataThrows() { -// final var seq = emptySequence(); -// assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a long from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows() { -// final var seq = fullyUsedSequence(); -// assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a long past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// // When we try to read a long, then we get a BufferUnderflowException -// seq.skip(4); // Only 1 byte left, not enough -// assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); -// seq.skip(1); // No bytes left, not enough -// assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a long when less than 4 bytes are available throws BufferUnderflowException") -// void readInsufficientDataThrows() { -// for (int i = 0; i < 7; i++) { -// final var seq = sequence(new byte[i]); -// assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); -// } -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE}) -// @DisplayName("Reading a long") -// void read(long value) { -// final var seq = sequence(asBytes(c -> c.putLong(value))); -// final var pos = seq.position(); -// assertThat(seq.readLong()).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 8); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE}) -// @DisplayName("Reading a long in Little Endian") -// void readLittleEndian(long value) { -// final var seq = sequence(asBytes(c -> c.putLong(value), LITTLE_ENDIAN)); -// final var pos = seq.position(); -// assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 8); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE}) -// @DisplayName("Reading a long in Big Endian") -// void readBigEndian(long value) { -// final var seq = sequence(asBytes(c -> c.putLong(value), BIG_ENDIAN)); -// final var pos = seq.position(); -// assertThat(seq.readLong(BIG_ENDIAN)).isEqualTo(value); -// assertThat(seq.position()).isEqualTo(pos + 8); -// } -// -// @Test -// @DisplayName("Read a mixture of big and little endian data") -// void readMixedEndian() { -// final var seq = sequence(asBytes(c -> { -// c.order(BIG_ENDIAN); -// c.putLong(0x0102030405060708L); -// c.order(LITTLE_ENDIAN); -// c.putLong(0x05060708090A0B0CL); -// c.order(BIG_ENDIAN); -// c.putLong(0x990A0B0C0D0E0F10L); -// c.order(LITTLE_ENDIAN); -// c.putLong(0x9D0E0F1011121314L); -// })); -// assertThat(seq.readLong()).isEqualTo(0x0102030405060708L); -// assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x05060708090A0B0CL); -// assertThat(seq.readLong()).isEqualTo(0x990A0B0C0D0E0F10L); -// assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F1011121314L); -// } -// } -// -// @Nested -// @DisplayName("readFloat()") -// final class ReadFloatTest { -// @Test -// @DisplayName("Reading a float from an empty sequence throws BufferUnderflowException") -// void readFromEmptyDataThrows() { -// final var seq = emptySequence(); -// assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a float from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows() { -// final var seq = fullyUsedSequence(); -// assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a float past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// // When we try to read a float, then we get a BufferUnderflowException -// seq.skip(4); // Only 1 byte left, not enough -// assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); -// seq.skip(1); // No bytes left, not enough -// assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a float when less than 4 bytes are available throws BufferUnderflowException") -// void readInsufficientDataThrows() { -// for (int i = 0; i < 3; i++) { -// final var seq = sequence(new byte[i]); -// assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); -// } -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) -// @DisplayName("Reading a float") -// void read(float value) { -// final var seq = sequence(asBytes(c -> c.putFloat(value))); -// final var pos = seq.position(); -// final var readFloat = seq.readFloat(); -// if (Float.isNaN(value)) { -// assertThat(readFloat).isNaN(); -// } else { -// assertThat(readFloat).isEqualTo(value); -// } -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) -// @DisplayName("Reading a float in Little Endian") -// void readLittleEndian(float value) { -// final var seq = sequence(asBytes(c -> c.putFloat(value), LITTLE_ENDIAN)); -// final var pos = seq.position(); -// final var readFloat = seq.readFloat(LITTLE_ENDIAN); -// if (Float.isNaN(value)) { -// assertThat(readFloat).isNaN(); -// } else { -// assertThat(readFloat).isEqualTo(value); -// } -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) -// @DisplayName("Reading a float in Big Endian") -// void readBigEndian(float value) { -// final var seq = sequence(asBytes(c -> c.putFloat(value), BIG_ENDIAN)); -// final var pos = seq.position(); -// final var readFloat = seq.readFloat(BIG_ENDIAN); -// if (Float.isNaN(value)) { -// assertThat(readFloat).isNaN(); -// } else { -// assertThat(readFloat).isEqualTo(value); -// } -// assertThat(seq.position()).isEqualTo(pos + 4); -// } -// -// @Test -// @DisplayName("Read a mixture of big and little endian data") -// void readMixedEndian() { -// final var seq = sequence(asBytes(c -> { -// c.putFloat(0x01020304); -// c.order(LITTLE_ENDIAN); -// c.putFloat(0x05060708); -// c.order(BIG_ENDIAN); -// c.putFloat(0x990A0B0C); -// c.order(LITTLE_ENDIAN); -// c.putFloat(0x9D0E0F10); -// })); -// assertThat(seq.readFloat()).isEqualTo(0x01020304); -// assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x05060708); -// assertThat(seq.readFloat()).isEqualTo(0x990A0B0C); -// assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F10); -// } -// } -// -// @Nested -// @DisplayName("readDouble()") -// final class ReadDoubleTest { -// @Test -// @DisplayName("Reading a double from an empty sequence throws BufferUnderflowException") -// void readFromEmptyDataThrows() { -// final var seq = emptySequence(); -// assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a double from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows() { -// final var seq = fullyUsedSequence(); -// assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a double past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// // When we try to read a double, then we get a BufferUnderflowException -// seq.skip(4); // Only 1 byte left, not enough -// assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); -// seq.skip(1); // No bytes left, not enough -// assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a double when less than 4 bytes are available throws BufferUnderflowException") -// void readInsufficientDataThrows() { -// for (int i = 0; i < 7; i++) { -// final var seq = sequence(new byte[i]); -// assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); -// } -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) -// @DisplayName("Reading a double") -// void read(double value) { -// final var seq = sequence(asBytes(c -> c.putDouble(value))); -// final var pos = seq.position(); -// final var readDouble = seq.readDouble(); -// if (Double.isNaN(value)) { -// assertThat(readDouble).isNaN(); -// } else { -// assertThat(readDouble).isEqualTo(value); -// } -// assertThat(seq.position()).isEqualTo(pos + 8); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) -// @DisplayName("Reading a double in Little Endian") -// void readLittleEndian(double value) { -// final var seq = sequence(asBytes(c -> c.putDouble(value), LITTLE_ENDIAN)); -// final var pos = seq.position(); -// final var readDouble = seq.readDouble(LITTLE_ENDIAN); -// if (Double.isNaN(value)) { -// assertThat(readDouble).isNaN(); -// } else { -// assertThat(readDouble).isEqualTo(value); -// } -// assertThat(seq.position()).isEqualTo(pos + 8); -// } -// -// @ParameterizedTest(name = "value={0}") -// @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) -// @DisplayName("Reading a double in Big Endian") -// void readBigEndian(double value) { -// final var seq = sequence(asBytes(c -> c.putDouble(value), BIG_ENDIAN)); -// final var pos = seq.position(); -// final var readDouble = seq.readDouble(BIG_ENDIAN); -// if (Double.isNaN(value)) { -// assertThat(readDouble).isNaN(); -// } else { -// assertThat(readDouble).isEqualTo(value); -// } -// assertThat(seq.position()).isEqualTo(pos + 8); -// } -// -// @Test -// @DisplayName("Read a mixture of big and little endian data") -// void readMixedEndian() { -// final var seq = sequence(asBytes(c -> { -// c.putDouble(0x9102030405060708L); -// c.order(LITTLE_ENDIAN); -// c.putDouble(0x990A0B0C0D0E0F10L); -// c.order(BIG_ENDIAN); -// c.putDouble(0x1112131415161718L); -// c.order(LITTLE_ENDIAN); -// c.putDouble(0x191A1B1C1D1E1F20L); -// })); -// assertThat(seq.readDouble()).isEqualTo(0x9102030405060708L); -// assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x990A0B0C0D0E0F10L); -// assertThat(seq.readDouble()).isEqualTo(0x1112131415161718L); -// assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x191A1B1C1D1E1F20L); -// } -// } -// @Nested -// @DisplayName("readVarInt()") -// final class ReadVarIntTest { -// @ParameterizedTest -// @ValueSource(booleans = {false, true}) -// @DisplayName("Reading a varint from an empty sequence throws BufferUnderflowException") -// void readFromEmptyDataThrows(final boolean zigZag) { -// final var seq = emptySequence(); -// assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @ParameterizedTest -// @ValueSource(booleans = {false, true}) -// @DisplayName("Reading a varint from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows(final boolean zigZag) { -// final var seq = fullyUsedSequence(); -// assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a varint past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// seq.skip(5); -// // When we try to read a varint, then we get a BufferUnderflowException -// assertThatThrownBy(() -> seq.readVarInt(false)).isInstanceOf(BufferUnderflowException.class); -// assertThatThrownBy(() -> seq.readVarInt(true)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @ParameterizedTest -// @ValueSource(booleans = {false, true}) -// @DisplayName("Reading a varint when less than 4 bytes are available throws BufferUnderflowException") -// void readInsufficientDataThrows(final boolean zigZag) { -// final var seq = sequence(new byte[] { (byte) 0b10101100 }); -// assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Read a varint") -// void read() { -// final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 }); -// final var pos = seq.position(); -// final var value = seq.readVarInt(false); -// assertThat(value).isEqualTo(300); -// assertThat(seq.position()).isEqualTo(pos + 2); -// } -// -// @Test -// @DisplayName("Read a varint with zig zag encoding") -// void readZigZag() { -// final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 }); -// final var pos = seq.position(); -// final var value = seq.readVarInt(true); -// assertThat(value).isEqualTo(-151); -// assertThat(seq.position()).isEqualTo(pos + 2); -// } -// } -// -// @Nested -// @DisplayName("readVarLong()") -// final class ReadVarLongTest { -// @ParameterizedTest -// @ValueSource(booleans = {false, true}) -// @DisplayName("Reading a varlong from an empty sequence throws BufferUnderflowException") -// void readFromEmptyDataThrows(final boolean zigZag) { -// final var seq = emptySequence(); -// assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @ParameterizedTest -// @ValueSource(booleans = {false, true}) -// @DisplayName("Reading a varlong from a full read sequence throws BufferUnderflowException") -// void readFromFullyReadDataThrows(final boolean zigZag) { -// final var seq = fullyUsedSequence(); -// assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Reading a varlong past the limit throws BufferUnderflowException") -// void readPastLimit() { -// // Given a sequence of bytes with a limit where position == limit -// final var seq = sequence(TEST_BYTES); -// seq.limit(5); -// seq.skip(5); -// // When we try to read a varlong, then we get a BufferUnderflowException -// assertThatThrownBy(() -> seq.readVarLong(false)).isInstanceOf(BufferUnderflowException.class); -// assertThatThrownBy(() -> seq.readVarLong(true)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @ParameterizedTest -// @ValueSource(booleans = {false, true}) -// @DisplayName("Reading a varlong when less than 4 bytes are available throws BufferUnderflowException") -// void readInsufficientDataThrows(final boolean zigZag) { -// final var seq = sequence(new byte[] { (byte) 0b10101100 }); -// assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); -// } -// -// @Test -// @DisplayName("Read a varlong") -// void read() { -// final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 }); -// final var pos = seq.position(); -// final var value = seq.readVarLong(false); -// assertThat(value).isEqualTo(300); -// assertThat(seq.position()).isEqualTo(pos + 2); -// } -// -// @Test -// @DisplayName("Read a varlong with zig zag encoding") -// void readZigZag() { -// final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 }); -// final var pos = seq.position(); -// final var value = seq.readVarLong(true); -// assertThat(value).isEqualTo(-151); -// assertThat(seq.position()).isEqualTo(pos + 2); -// } -// } + // @Nested + // @DisplayName("readUnsignedByte()") + // final class ReadUnsignedByteTest { + // @Test + // @DisplayName("Reading an unsigned byte from an empty sequence throws BufferUnderflowException") + // void readFromEmptyDataThrows() { + // // Given an empty sequence + // final var seq = emptySequence(); + // // When we try to read an unsigned byte, then we get a BufferUnderflowException + // assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an unsigned byte from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows() { + // // Given a fully read sequence + // final var seq = fullyUsedSequence(); + // // When we try to read an unsigned byte, then we get a BufferUnderflowException + // assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an unsigned byte past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // seq.skip(5); + // // When we try to read an unsigned byte, then we get a BufferUnderflowException + // assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an unsigned byte") + // void read() { + // // Given a sequence of bytes (with a single byte that could be interpreted as negative if signed) + // final var seq = sequence(new byte[] { (byte) 0b1110_0011 }); + // // When we read the byte, then we get the expected byte and move the position forward by a single byte + // final var pos = seq.position(); + // assertThat(seq.readUnsignedByte()).isEqualTo(0b1110_0011); + // assertThat(seq.position()).isEqualTo(pos + 1); + // } + // } + // + // @Nested + // @DisplayName("readBytes()") + // final class ReadBytesTest { + // @Test + // @DisplayName("Reading bytes with a null dst throws NullPointerException") + // void readNullDstThrows() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // + // // When we try to read bytes using a null byte array, then we get a NullPointerException + // //noinspection DataFlowIssue + // assertThatThrownBy(() -> seq.readBytes((byte[]) null)).isInstanceOf(NullPointerException.class); + // //noinspection DataFlowIssue + // assertThatThrownBy(() -> seq.readBytes(null, 0, 10)).isInstanceOf(NullPointerException.class); + // + // // When we try to read bytes using a null ByteBuffer, then we get a NullPointerException + // //noinspection DataFlowIssue + // assertThatThrownBy(() -> seq.readBytes((ByteBuffer) null)).isInstanceOf(NullPointerException.class); + // + // // When we try to read bytes using a null BufferedData, then we get a NullPointerException + // //noinspection DataFlowIssue + // assertThatThrownBy(() -> seq.readBytes((BufferedData) null)).isInstanceOf(NullPointerException.class); + // } + // + // @Test + // @DisplayName("Reading bytes with a negative offset throws IndexOutOfBoundsException") + // void negativeOffsetThrows() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we try to read bytes using a byte array with a negative offset, then we get an + // IndexOutOfBoundsException + // assertThatThrownBy(() -> seq.readBytes(new byte[10], -1, + // 10)).isInstanceOf(IndexOutOfBoundsException.class); + // } + // + // @Test + // @DisplayName("Reading bytes with an offset that is too large throws IndexOutOfBoundsException") + // void tooLargeOffsetThrows() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we try to read bytes using a byte array with an offset that is too large, + // // then we get an IndexOutOfBoundsException + // assertThatThrownBy(() -> seq.readBytes(new byte[10], 11, 10)) + // .isInstanceOf(IndexOutOfBoundsException.class); + // // When we try to read bytes using a byte array with an offset + maxLength that is too large, + // // then we get an IndexOutOfBoundsException + // assertThatThrownBy(() -> seq.readBytes(new byte[10], 9, 2)) + // .isInstanceOf(IndexOutOfBoundsException.class); + // } + // + // @Test + // @DisplayName("Reading bytes with a negative length throws IllegalArgumentException") + // void negativeLengthThrows() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we try to read bytes using a byte array with a negative length, then we get an + // IllegalArgumentException + // assertThatThrownBy(() -> seq.readBytes(new byte[10], 0, + // -1)).isInstanceOf(IllegalArgumentException.class); + // assertThatThrownBy(() -> seq.readBytes(-1)).isInstanceOf(IllegalArgumentException.class); + // } + // + // @Test + // @DisplayName("Reading bytes from an empty sequence is a no-op") + // void readFromEmptyDataIsNoOp() { + // // Given an empty sequence + // final var seq = emptySequence(); + // + // // When we try to read bytes using a byte array, then we get nothing read + // assertThat(seq.readBytes(new byte[10])).isZero(); + // assertThat(seq.readBytes(new byte[10], 0, 2)).isZero(); + // + // // When we try to read bytes using a ByteBuffer, then we get nothing read + // final var byteBuffer = ByteBuffer.allocate(10); + // assertThat(seq.readBytes(byteBuffer)).isZero(); + // + // // When we try to read bytes using a BufferedData, then we get nothing read + // final var bufferedData = BufferedData.allocate(10); + // assertThat(seq.readBytes(bufferedData)).isZero(); + // } + // + // @Test + // @DisplayName("Reading bytes from a fully read sequence is a no-op") + // void readFromFullyReadDataIsNoOp() { + // // Given a fully read sequence + // final var seq = fullyUsedSequence(); + // + // // When we try to read bytes using a byte array, then we get nothing read + // assertThat(seq.readBytes(new byte[10])).isZero(); + // assertThat(seq.readBytes(new byte[10], 0, 2)).isZero(); + // + // // When we try to read bytes using a ByteBuffer, then we get nothing read + // final var byteBuffer = ByteBuffer.allocate(10); + // assertThat(seq.readBytes(byteBuffer)).isZero(); + // + // // When we try to read bytes using a BufferedData, then we get nothing read + // final var bufferedData = BufferedData.allocate(10); + // assertThat(seq.readBytes(bufferedData)).isZero(); + // } + // + // @Test + // @DisplayName("Reading bytes where there is nothing remaining because we are at the limit is a no-op") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // seq.skip(5); + // + // // When we try to read bytes using a byte array, then we get nothing read + // assertThat(seq.readBytes(new byte[10])).isZero(); + // assertThat(seq.readBytes(new byte[10], 0, 2)).isZero(); + // + // // When we try to read bytes using a ByteBuffer, then we get nothing read + // final var byteBuffer = ByteBuffer.allocate(10); + // assertThat(seq.readBytes(byteBuffer)).isZero(); + // + // // When we try to read bytes using a BufferedData, then we get nothing read + // final var bufferedData = BufferedData.allocate(10); + // assertThat(seq.readBytes(bufferedData)).isZero(); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst byte array where the dst has length of 0") + // void readZeroDstByteArray() { + // // Given a sequence of bytes and an empty destination byte array + // final var seq = sequence(TEST_BYTES); + // final var dst = new byte[0]; + // final var pos = seq.position(); + // // When we try to read bytes into the dst, then the position does not change, + // // and the destination array is empty + // assertThat(seq.readBytes(dst)).isZero(); + // assertThat(seq.position()).isEqualTo(pos); + // assertThat(dst).isEmpty(); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst byte array with offset and length where the dst has length of 0") + // void readZeroDstByteArrayWithOffset() { + // // Given a sequence of bytes and a destination byte array + // final var seq = sequence(TEST_BYTES); + // final var dst = new byte[10]; + // final var pos = seq.position(); + // // When we try to read bytes into the dst but with a 0 length, then the position does not change, + // // and the destination array is empty + // assertThat(seq.readBytes(dst, 5,0)).isZero(); + // assertThat(seq.position()).isEqualTo(pos); + // assertThat(dst).containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst ByteBuffer where the dst has length of 0") + // void readZeroDstByteBuffer() { + // // Given a sequence of bytes and an empty destination ByteBuffer + // final var seq = sequence(TEST_BYTES); + // final var dst = ByteBuffer.allocate(0); + // final var pos = seq.position(); + // // When we try to read bytes into the dst, then the position does not change, + // // and the destination buffer is empty + // assertThat(seq.readBytes(dst)).isZero(); + // assertThat(seq.position()).isEqualTo(pos); + // assertThat(dst.position()).isZero(); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst BufferedData where the dst has length of 0") + // void readZeroDstBufferedData() { + // // Given a sequence of bytes and an empty destination BufferedData + // final var seq = sequence(TEST_BYTES); + // final var dst = BufferedData.allocate(0); + // final var pos = seq.position(); + // // When we try to read bytes into the dst, then the position does not change, + // // and the destination buffer is empty + // assertThat(seq.readBytes(dst)).isZero(); + // assertThat(seq.position()).isEqualTo(pos); + // assertThat(dst.position()).isZero(); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst byte array where the dst is smaller than the sequence") + // void readSmallerDstByteArray() { + // // Given a sequence of bytes and a destination byte array + // final var seq = sequence(TEST_BYTES); + // // When we try reading into the dst (twice, once from the beginning and once in the middle) + // for (int i = 0; i < 2; i++) { + // final var dst = new byte[5]; + // final var pos = seq.position(); + // final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); + // assertThat(seq.readBytes(dst)).isEqualTo(5); + // // Then the dst is filled with the bytes from the sequence, and the position is updated + // assertThat(dst).isEqualTo(subset); + // assertThat(seq.position()).isEqualTo(pos + 5); + // } + // } + // + // @Test + // @DisplayName("Reading bytes into a dst byte array with offset where the dst is smaller than the sequence") + // void readSmallerDstByteArrayWithOffset() { + // final var seq = sequence(TEST_BYTES); + // // Do twice, so we read once from sequence at the beginning and once in the middle + // for (int i = 0; i < 2; i++) { + // final var dst = new byte[10]; + // final var pos = seq.position(); + // final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); + // assertThat(seq.readBytes(dst, 3, 5)).isEqualTo(5); + // assertThat(Arrays.copyOfRange(dst, 3, 8)).isEqualTo(subset); + // assertThat(seq.position()).isEqualTo(pos + 5); + // } + // } + // + // @Test + // @DisplayName("Reading bytes into a dst ByteBuffer where the dst is smaller than the sequence") + // void readSmallerDstByteBuffer() { + // final var seq = sequence(TEST_BYTES); + // for (int i = 0; i < 2; i++) { + // final var dst = ByteBuffer.allocate(5); + // final var pos = seq.position(); + // final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); + // assertThat(seq.readBytes(dst)).isEqualTo(5); + // assertThat(dst.array()).isEqualTo(subset); + // assertThat(seq.position()).isEqualTo(pos + 5); + // } + // } + // + // @Test + // @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is smaller than the sequence") + // void readSmallerDstByteBufferWithOffset() { + // final var seq = sequence(TEST_BYTES); + // for (int i = 0; i < 2; i++) { + // final var dst = ByteBuffer.allocate(10); + // dst.position(5); + // final var pos = seq.position(); + // final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); + // assertThat(seq.readBytes(dst)).isEqualTo(5); + // assertThat(dst.slice(5, 5)).isEqualTo(ByteBuffer.wrap(subset)); + // assertThat(seq.position()).isEqualTo(pos + 5); + // } + // } + // + // @Test + // @DisplayName("Reading bytes into a dst BufferedData where the dst is smaller than the sequence") + // void readSmallerDstBufferedData() { + // final var seq = sequence(TEST_BYTES); + // for (int i = 0; i < 2; i++) { + // final var dst = BufferedData.allocate(5); + // final var pos = seq.position(); + // final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); + // assertThat(seq.readBytes(dst)).isEqualTo(5); + // assertThat(dst).isEqualTo(BufferedData.wrap(subset)); + // assertThat(seq.position()).isEqualTo(pos + 5); + // } + // } + // + // @Test + // @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is smaller than the + // sequence") + // void readSmallerDstBufferedDataWithOffset() { + // final var seq = sequence(TEST_BYTES); + // for (int i = 0; i < 2; i++) { + // final var dst = BufferedData.allocate(10); + // dst.position(5); + // final var pos = seq.position(); + // final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5); + // assertThat(seq.readBytes(dst)).isEqualTo(5); + // assertThat(dst.slice(5, 5)).isEqualTo(BufferedData.wrap(subset)); + // assertThat(seq.position()).isEqualTo(pos + 5); + // } + // } + // + // @Test + // @DisplayName("Reading bytes into a dst byte array where the dst is the same length as the sequence") + // void readDstByteArray() { + // final var seq = sequence(TEST_BYTES); + // final var dst = new byte[TEST_BYTES.length]; + // final var pos = seq.position(); + // assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); + // assertThat(dst).isEqualTo(TEST_BYTES); + // assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst byte array with offset where the dst is the same length as the + // sequence") + // void readDstByteArrayWithOffset() { + // final var seq = sequence(TEST_BYTES); + // final var dst = new byte[TEST_BYTES.length + 10]; + // final var pos = seq.position(); + // assertThat(seq.readBytes(dst, 5, TEST_BYTES.length)).isEqualTo(TEST_BYTES.length); + // assertThat(Arrays.copyOfRange(dst, 5, 5 + TEST_BYTES.length)).isEqualTo(TEST_BYTES); + // assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst ByteBuffer where the dst is the same length as the sequence") + // void readDstByteBuffer() { + // final var seq = sequence(TEST_BYTES); + // final var dst = ByteBuffer.allocate(TEST_BYTES.length); + // final var pos = seq.position(); + // assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); + // assertThat(dst.array()).isEqualTo(TEST_BYTES); + // assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is the same length as the + // sequence") + // void readDstByteBufferWithOffset() { + // final var seq = sequence(TEST_BYTES); + // final var dst = ByteBuffer.allocate(TEST_BYTES.length + 10); + // final var pos = seq.position(); + // dst.position(5); + // dst.limit(TEST_BYTES.length + 5); + // assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); + // assertThat(dst.slice(5, TEST_BYTES.length)).isEqualTo(ByteBuffer.wrap(TEST_BYTES)); + // assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst BufferedData where the dst is the same length as the sequence") + // void readDstBufferedData() { + // final var seq = sequence(TEST_BYTES); + // final var dst = BufferedData.allocate(TEST_BYTES.length); + // final var pos = seq.position(); + // assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); + // assertThat(dst).isEqualTo(BufferedData.wrap(TEST_BYTES)); + // assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is the same length as the + // sequence") + // void readDstBufferedDataWithOffset() { + // final var seq = sequence(TEST_BYTES); + // final var dst = BufferedData.allocate(TEST_BYTES.length + 10); + // final var pos = seq.position(); + // dst.position(5); + // dst.limit(TEST_BYTES.length + 5); + // assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length); + // assertThat(dst.slice(5, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES)); + // assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst byte array where the dst is larger than the sequence") + // void readLargerDstByteArray() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we read the bytes into a larger byte array + // final var arr = new byte[TEST_BYTES.length + 1]; + // assertThat(seq.readBytes(arr)).isEqualTo(TEST_BYTES.length); + // // Then the sequence is exhausted and the array is filled starting at index 0 + // assertThat(seq.remaining()).isZero(); + // assertThat(seq.hasRemaining()).isFalse(); + // assertThat(arr).startsWith(TEST_BYTES); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst byte array with offset where the dst is larger than the sequence") + // void readLargerDstByteArrayWithOffset() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we read the bytes into a larger byte array with an offset + // final var arr = new byte[TEST_BYTES.length + 10]; + // assertThat(seq.readBytes(arr, 5, TEST_BYTES.length + 1)).isEqualTo(TEST_BYTES.length); + // // Then the sequence is exhausted and the array is filled starting at index 5 + // assertThat(seq.remaining()).isZero(); + // assertThat(seq.hasRemaining()).isFalse(); + // assertThat(Arrays.copyOfRange(arr, 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst ByteBuffer where the dst is larger than the sequence") + // void readLargerDstByteBuffer() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we read the bytes into a larger buffer + // final var buffer = ByteBuffer.allocate(TEST_BYTES.length + 1); + // assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length); + // // Then the sequence is exhausted and the buffer is filled starting at index 0 + // assertThat(seq.remaining()).isZero(); + // assertThat(seq.hasRemaining()).isFalse(); + // assertThat(buffer.array()).startsWith(TEST_BYTES); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is larger than the sequence") + // void readLargerDstByteBufferWithOffset() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we read the bytes into a larger buffer with an offset + // final var buffer = ByteBuffer.allocate(TEST_BYTES.length + 10); + // buffer.position(5); + // buffer.limit(5 + TEST_BYTES.length + 1); + // assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length); + // // Then the sequence is exhausted and the buffer is filled starting at index 5 + // assertThat(seq.remaining()).isZero(); + // assertThat(seq.hasRemaining()).isFalse(); + // assertThat(Arrays.copyOfRange(buffer.array(), 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst BufferedData where the dst is larger than the sequence") + // void readLargerDstBufferedData() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we read the bytes into a larger buffer + // final var buffer = BufferedData.allocate(TEST_BYTES.length + 1); + // assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length); + // // Then the sequence is exhausted and the buffer is filled starting at index 0 + // assertThat(seq.remaining()).isZero(); + // assertThat(seq.hasRemaining()).isFalse(); + // assertThat(buffer.slice(0, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES)); + // } + // + // @Test + // @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is larger than the + // sequence") + // void readLargerDstBufferedDataWithOffset() { + // // Given a sequence of bytes + // final var seq = sequence(TEST_BYTES); + // // When we read the bytes into a larger buffer with an offset + // final var buffer = BufferedData.allocate(TEST_BYTES.length + 10); + // buffer.position(5); + // buffer.limit(5 + TEST_BYTES.length + 1); + // assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length); + // // Then the sequence is exhausted and the buffer is filled starting at index 5 + // assertThat(seq.remaining()).isZero(); + // assertThat(seq.hasRemaining()).isFalse(); + // assertThat(buffer.slice(5, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES)); + // } + // + // @ParameterizedTest(name = "offset={0}, length={1}") + // @CsvSource({ + // "-1, 1", // Negative offset + // "100, 10", // Offset larger than the dst array size + // "5, 10", // Offset+Length larger than the dst array size + // }) + // @DisplayName("Reading bytes where the dst offset and length are bad") + // void badOffsetLength(int offset, int length) { + // final var seq = sequence(TEST_BYTES); + // assertThatThrownBy(() -> seq.readBytes(new byte[10], offset, length)) + // .isInstanceOf(IndexOutOfBoundsException.class); + // } + // } + // + // @Nested + // @DisplayName("view()") + // final class ViewTest { + // @Test + // @DisplayName("Negative length throws IllegalArgumentException") + // void negativeLength() { + // final var seq = sequence(TEST_BYTES); + // assertThatThrownBy(() -> seq.view(-1)).isInstanceOf(IllegalArgumentException.class); + // } + // + // @Test + // @DisplayName("Length that is greater than remaining throws BufferUnderflowException") + // @Disabled("This has to be tested on the buffer level only, because for a Stream, the limit is too big") + // void lengthGreaterThanRemaining() { + // // TODO Move to buffer tests + // final var seq = sequence(TEST_BYTES); + // seq.skip(1); + // assertThatThrownBy(() -> seq.view(TEST_BYTES.length)).isInstanceOf(BufferUnderflowException.class); + // assertThatThrownBy(() -> seq.view(Integer.MAX_VALUE)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Creating a view past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // seq.skip(5); + // // When we try to create a view with a length past the limit, then we get a BufferUnderflowException + // assertThatThrownBy(() -> seq.view(6)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Length is zero (OK, empty sequence)") + // void lengthIsZero() { + // final var seq = sequence(TEST_BYTES); + // assertThat(seq.view(0).remaining()).isZero(); + // } + // + // @Test + // @DisplayName("Length + Position is less than limit (OK)") + // void lengthPlusPositionIsLessThanLimit() { + // final var seq = sequence(TEST_BYTES); + // seq.skip(5); + // final var view = seq.view(10); + // + // assertThat(view.remaining()).isEqualTo(10); + // assertThat(view.readBytes(10)).isEqualTo(Bytes.wrap(TEST_BYTES).slice(5, 10)); + // } + // + // @Test + // @DisplayName("Length + Position is the limit (OK)") + // void lengthPlusPositionIsTheLimit() { + // // Given a sequence of bytes where the position is 10 bytes from the end + // final var seq = sequence(TEST_BYTES); + // final var startIndex = TEST_BYTES.length - 10; + // assertThat(seq.skip(startIndex)).isEqualTo(16); + // assertThat(seq.position()).isEqualTo(16); + // // When we create a view with a length of 10 bytes + // final var view = seq.view(10); + // // Then we get the last 10 bytes of the sequence, AND it advances the position by that many bytes. + // assertThat(seq.position()).isEqualTo(26); + // // The view, when read, will have all 10 of its bytes + // assertThat(view.remaining()).isEqualTo(10); + // final var bytes = view.readBytes(10); + // assertThat(view.position()).isEqualTo(10); + // // And those bytes will be the last 10 bytes of the sequence + // assertThat(bytes).isEqualTo(Bytes.wrap(TEST_BYTES).slice(startIndex, 10)); + // } + // + // @Test + // @DisplayName("Get sub-sequence of a sub-sequence") + // void subSequenceOfSubSequence() { + // final var seq = sequence(TEST_BYTES); + // final var subSeq = seq.view(10); + // final var subSubSeq = subSeq.view(5); + // assertThat(subSubSeq.remaining()).isEqualTo(5); + // assertThat(subSubSeq.readBytes(5)).isEqualTo(Bytes.wrap(TEST_BYTES).slice(0, 5)); + // } + // } + // + // @Nested + // @DisplayName("readInt()") + // final class ReadIntTest { + // @Test + // @DisplayName("Reading an int from an empty sequence throws BufferUnderflowException") + // void readFromEmptyDataThrows() { + // final var seq = emptySequence(); + // assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an int from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows() { + // final var seq = fullyUsedSequence(); + // assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an int past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // // When we try to read an int, then we get a BufferUnderflowException + // seq.skip(4); // Only 1 byte left, not enough + // assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); + // seq.skip(1); // No bytes left, not enough + // assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an int when less than 4 bytes are available throws BufferUnderflowException") + // void readInsufficientDataThrows() { + // for (int i = 0; i < 3; i++) { + // final var seq = sequence(new byte[i]); + // assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class); + // } + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE}) + // @DisplayName("Reading an int") + // void read(int value) { + // // Given a sequence with exactly 1 integer of data + // final var seq = sequence(asBytes(c -> c.putInt(value))); + // final var pos = seq.position(); + // // When we read an int, then it is the same as the one we wrote, and the position has moved forward + // // by 4 bytes + // assertThat(seq.readInt()).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE}) + // @DisplayName("Reading an int in Little Endian") + // void readLittleEndian(int value) { + // final var seq = sequence(asBytes(c -> c.putInt(value), LITTLE_ENDIAN)); + // final var pos = seq.position(); + // assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE}) + // @DisplayName("Reading an int in Big Endian") + // void readBigEndian(int value) { + // final var seq = sequence(asBytes(c -> c.putInt(value), BIG_ENDIAN)); + // final var pos = seq.position(); + // assertThat(seq.readInt(BIG_ENDIAN)).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @Test + // @DisplayName("Read a mixture of big and little endian data") + // void readMixedEndian() { + // final var seq = sequence(asBytes(c -> { + // c.order(BIG_ENDIAN); + // c.putInt(0x01020304); + // c.order(LITTLE_ENDIAN); + // c.putInt(0x05060708); + // c.order(BIG_ENDIAN); + // c.putInt(0x090A0B0C); + // c.order(LITTLE_ENDIAN); + // c.putInt(0x0D0E0F10); + // })); + // assertThat(seq.readInt()).isEqualTo(0x01020304); + // assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x05060708); + // assertThat(seq.readInt()).isEqualTo(0x090A0B0C); + // assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x0D0E0F10); + // } + // } + // + // @Nested + // @DisplayName("readUnsignedInt()") + // final class ReadUnsignedIntTest { + // @Test + // @DisplayName("Reading an unsigned int from an empty sequence throws BufferUnderflowException") + // void readFromEmptyDataThrows() { + // final var seq = emptySequence(); + // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an unsigned int from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows() { + // final var seq = fullyUsedSequence(); + // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an unsigned int past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // // When we try to read an unsigned int, then we get a BufferUnderflowException + // seq.skip(4); // Only 1 byte left, not enough + // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); + // seq.skip(1); // No bytes left, not enough + // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading an unsigned int when less than 4 bytes are available throws + // BufferUnderflowException") + // void readInsufficientDataThrows() { + // for (int i = 0; i < 3; i++) { + // final var seq = sequence(new byte[i]); + // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class); + // } + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL}) + // @DisplayName("Reading an unsigned int") + // void read(long value) { + // final var seq = sequence(asBytes(c -> c.putInt((int) value))); + // final var pos = seq.position(); + // assertThat(seq.readUnsignedInt()).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL}) + // @DisplayName("Reading an unsigned int in Little Endian") + // void readLittleEndian(long value) { + // final var seq = sequence(asBytes(c -> c.putInt((int) value), LITTLE_ENDIAN)); + // final var pos = seq.position(); + // assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL}) + // @DisplayName("Reading an unsigned int in Big Endian") + // void readBigEndian(long value) { + // final var seq = sequence(asBytes(c -> c.putInt((int) value), BIG_ENDIAN)); + // final var pos = seq.position(); + // assertThat(seq.readUnsignedInt(BIG_ENDIAN)).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @Test + // @DisplayName("Read a mixture of big and little endian data") + // void readMixedEndian() { + // final var seq = sequence(asBytes(c -> { + // c.order(BIG_ENDIAN); + // c.putInt(0x91020304); + // c.order(LITTLE_ENDIAN); + // c.putInt(0x95060708); + // c.order(BIG_ENDIAN); + // c.putInt(0x990A0B0C); + // c.order(LITTLE_ENDIAN); + // c.putInt(0x9D0E0F10); + // })); + // assertThat(seq.readUnsignedInt()).isEqualTo(0x91020304L); + // assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x95060708L); + // assertThat(seq.readUnsignedInt()).isEqualTo(0x990A0B0CL); + // assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F10L); + // } + // } + // + // @Nested + // @DisplayName("readLong()") + // final class ReadLongTest { + // @Test + // @DisplayName("Reading a long from an empty sequence throws BufferUnderflowException") + // void readFromEmptyDataThrows() { + // final var seq = emptySequence(); + // assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a long from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows() { + // final var seq = fullyUsedSequence(); + // assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a long past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // // When we try to read a long, then we get a BufferUnderflowException + // seq.skip(4); // Only 1 byte left, not enough + // assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); + // seq.skip(1); // No bytes left, not enough + // assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a long when less than 4 bytes are available throws BufferUnderflowException") + // void readInsufficientDataThrows() { + // for (int i = 0; i < 7; i++) { + // final var seq = sequence(new byte[i]); + // assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class); + // } + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE}) + // @DisplayName("Reading a long") + // void read(long value) { + // final var seq = sequence(asBytes(c -> c.putLong(value))); + // final var pos = seq.position(); + // assertThat(seq.readLong()).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 8); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE}) + // @DisplayName("Reading a long in Little Endian") + // void readLittleEndian(long value) { + // final var seq = sequence(asBytes(c -> c.putLong(value), LITTLE_ENDIAN)); + // final var pos = seq.position(); + // assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 8); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE}) + // @DisplayName("Reading a long in Big Endian") + // void readBigEndian(long value) { + // final var seq = sequence(asBytes(c -> c.putLong(value), BIG_ENDIAN)); + // final var pos = seq.position(); + // assertThat(seq.readLong(BIG_ENDIAN)).isEqualTo(value); + // assertThat(seq.position()).isEqualTo(pos + 8); + // } + // + // @Test + // @DisplayName("Read a mixture of big and little endian data") + // void readMixedEndian() { + // final var seq = sequence(asBytes(c -> { + // c.order(BIG_ENDIAN); + // c.putLong(0x0102030405060708L); + // c.order(LITTLE_ENDIAN); + // c.putLong(0x05060708090A0B0CL); + // c.order(BIG_ENDIAN); + // c.putLong(0x990A0B0C0D0E0F10L); + // c.order(LITTLE_ENDIAN); + // c.putLong(0x9D0E0F1011121314L); + // })); + // assertThat(seq.readLong()).isEqualTo(0x0102030405060708L); + // assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x05060708090A0B0CL); + // assertThat(seq.readLong()).isEqualTo(0x990A0B0C0D0E0F10L); + // assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F1011121314L); + // } + // } + // + // @Nested + // @DisplayName("readFloat()") + // final class ReadFloatTest { + // @Test + // @DisplayName("Reading a float from an empty sequence throws BufferUnderflowException") + // void readFromEmptyDataThrows() { + // final var seq = emptySequence(); + // assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a float from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows() { + // final var seq = fullyUsedSequence(); + // assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a float past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // // When we try to read a float, then we get a BufferUnderflowException + // seq.skip(4); // Only 1 byte left, not enough + // assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); + // seq.skip(1); // No bytes left, not enough + // assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a float when less than 4 bytes are available throws BufferUnderflowException") + // void readInsufficientDataThrows() { + // for (int i = 0; i < 3; i++) { + // final var seq = sequence(new byte[i]); + // assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class); + // } + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, + // Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + // @DisplayName("Reading a float") + // void read(float value) { + // final var seq = sequence(asBytes(c -> c.putFloat(value))); + // final var pos = seq.position(); + // final var readFloat = seq.readFloat(); + // if (Float.isNaN(value)) { + // assertThat(readFloat).isNaN(); + // } else { + // assertThat(readFloat).isEqualTo(value); + // } + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, + // Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + // @DisplayName("Reading a float in Little Endian") + // void readLittleEndian(float value) { + // final var seq = sequence(asBytes(c -> c.putFloat(value), LITTLE_ENDIAN)); + // final var pos = seq.position(); + // final var readFloat = seq.readFloat(LITTLE_ENDIAN); + // if (Float.isNaN(value)) { + // assertThat(readFloat).isNaN(); + // } else { + // assertThat(readFloat).isEqualTo(value); + // } + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, + // Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + // @DisplayName("Reading a float in Big Endian") + // void readBigEndian(float value) { + // final var seq = sequence(asBytes(c -> c.putFloat(value), BIG_ENDIAN)); + // final var pos = seq.position(); + // final var readFloat = seq.readFloat(BIG_ENDIAN); + // if (Float.isNaN(value)) { + // assertThat(readFloat).isNaN(); + // } else { + // assertThat(readFloat).isEqualTo(value); + // } + // assertThat(seq.position()).isEqualTo(pos + 4); + // } + // + // @Test + // @DisplayName("Read a mixture of big and little endian data") + // void readMixedEndian() { + // final var seq = sequence(asBytes(c -> { + // c.putFloat(0x01020304); + // c.order(LITTLE_ENDIAN); + // c.putFloat(0x05060708); + // c.order(BIG_ENDIAN); + // c.putFloat(0x990A0B0C); + // c.order(LITTLE_ENDIAN); + // c.putFloat(0x9D0E0F10); + // })); + // assertThat(seq.readFloat()).isEqualTo(0x01020304); + // assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x05060708); + // assertThat(seq.readFloat()).isEqualTo(0x990A0B0C); + // assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F10); + // } + // } + // + // @Nested + // @DisplayName("readDouble()") + // final class ReadDoubleTest { + // @Test + // @DisplayName("Reading a double from an empty sequence throws BufferUnderflowException") + // void readFromEmptyDataThrows() { + // final var seq = emptySequence(); + // assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a double from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows() { + // final var seq = fullyUsedSequence(); + // assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a double past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // // When we try to read a double, then we get a BufferUnderflowException + // seq.skip(4); // Only 1 byte left, not enough + // assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); + // seq.skip(1); // No bytes left, not enough + // assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a double when less than 4 bytes are available throws BufferUnderflowException") + // void readInsufficientDataThrows() { + // for (int i = 0; i < 7; i++) { + // final var seq = sequence(new byte[i]); + // assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class); + // } + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, + // 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + // @DisplayName("Reading a double") + // void read(double value) { + // final var seq = sequence(asBytes(c -> c.putDouble(value))); + // final var pos = seq.position(); + // final var readDouble = seq.readDouble(); + // if (Double.isNaN(value)) { + // assertThat(readDouble).isNaN(); + // } else { + // assertThat(readDouble).isEqualTo(value); + // } + // assertThat(seq.position()).isEqualTo(pos + 8); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, + // 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + // @DisplayName("Reading a double in Little Endian") + // void readLittleEndian(double value) { + // final var seq = sequence(asBytes(c -> c.putDouble(value), LITTLE_ENDIAN)); + // final var pos = seq.position(); + // final var readDouble = seq.readDouble(LITTLE_ENDIAN); + // if (Double.isNaN(value)) { + // assertThat(readDouble).isNaN(); + // } else { + // assertThat(readDouble).isEqualTo(value); + // } + // assertThat(seq.position()).isEqualTo(pos + 8); + // } + // + // @ParameterizedTest(name = "value={0}") + // @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, + // 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + // @DisplayName("Reading a double in Big Endian") + // void readBigEndian(double value) { + // final var seq = sequence(asBytes(c -> c.putDouble(value), BIG_ENDIAN)); + // final var pos = seq.position(); + // final var readDouble = seq.readDouble(BIG_ENDIAN); + // if (Double.isNaN(value)) { + // assertThat(readDouble).isNaN(); + // } else { + // assertThat(readDouble).isEqualTo(value); + // } + // assertThat(seq.position()).isEqualTo(pos + 8); + // } + // + // @Test + // @DisplayName("Read a mixture of big and little endian data") + // void readMixedEndian() { + // final var seq = sequence(asBytes(c -> { + // c.putDouble(0x9102030405060708L); + // c.order(LITTLE_ENDIAN); + // c.putDouble(0x990A0B0C0D0E0F10L); + // c.order(BIG_ENDIAN); + // c.putDouble(0x1112131415161718L); + // c.order(LITTLE_ENDIAN); + // c.putDouble(0x191A1B1C1D1E1F20L); + // })); + // assertThat(seq.readDouble()).isEqualTo(0x9102030405060708L); + // assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x990A0B0C0D0E0F10L); + // assertThat(seq.readDouble()).isEqualTo(0x1112131415161718L); + // assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x191A1B1C1D1E1F20L); + // } + // } + // @Nested + // @DisplayName("readVarInt()") + // final class ReadVarIntTest { + // @ParameterizedTest + // @ValueSource(booleans = {false, true}) + // @DisplayName("Reading a varint from an empty sequence throws BufferUnderflowException") + // void readFromEmptyDataThrows(final boolean zigZag) { + // final var seq = emptySequence(); + // assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @ParameterizedTest + // @ValueSource(booleans = {false, true}) + // @DisplayName("Reading a varint from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows(final boolean zigZag) { + // final var seq = fullyUsedSequence(); + // assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a varint past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // seq.skip(5); + // // When we try to read a varint, then we get a BufferUnderflowException + // assertThatThrownBy(() -> seq.readVarInt(false)).isInstanceOf(BufferUnderflowException.class); + // assertThatThrownBy(() -> seq.readVarInt(true)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @ParameterizedTest + // @ValueSource(booleans = {false, true}) + // @DisplayName("Reading a varint when less than 4 bytes are available throws BufferUnderflowException") + // void readInsufficientDataThrows(final boolean zigZag) { + // final var seq = sequence(new byte[] { (byte) 0b10101100 }); + // assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Read a varint") + // void read() { + // final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 }); + // final var pos = seq.position(); + // final var value = seq.readVarInt(false); + // assertThat(value).isEqualTo(300); + // assertThat(seq.position()).isEqualTo(pos + 2); + // } + // + // @Test + // @DisplayName("Read a varint with zig zag encoding") + // void readZigZag() { + // final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 }); + // final var pos = seq.position(); + // final var value = seq.readVarInt(true); + // assertThat(value).isEqualTo(-151); + // assertThat(seq.position()).isEqualTo(pos + 2); + // } + // } + // + // @Nested + // @DisplayName("readVarLong()") + // final class ReadVarLongTest { + // @ParameterizedTest + // @ValueSource(booleans = {false, true}) + // @DisplayName("Reading a varlong from an empty sequence throws BufferUnderflowException") + // void readFromEmptyDataThrows(final boolean zigZag) { + // final var seq = emptySequence(); + // assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @ParameterizedTest + // @ValueSource(booleans = {false, true}) + // @DisplayName("Reading a varlong from a full read sequence throws BufferUnderflowException") + // void readFromFullyReadDataThrows(final boolean zigZag) { + // final var seq = fullyUsedSequence(); + // assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Reading a varlong past the limit throws BufferUnderflowException") + // void readPastLimit() { + // // Given a sequence of bytes with a limit where position == limit + // final var seq = sequence(TEST_BYTES); + // seq.limit(5); + // seq.skip(5); + // // When we try to read a varlong, then we get a BufferUnderflowException + // assertThatThrownBy(() -> seq.readVarLong(false)).isInstanceOf(BufferUnderflowException.class); + // assertThatThrownBy(() -> seq.readVarLong(true)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @ParameterizedTest + // @ValueSource(booleans = {false, true}) + // @DisplayName("Reading a varlong when less than 4 bytes are available throws BufferUnderflowException") + // void readInsufficientDataThrows(final boolean zigZag) { + // final var seq = sequence(new byte[] { (byte) 0b10101100 }); + // assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class); + // } + // + // @Test + // @DisplayName("Read a varlong") + // void read() { + // final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 }); + // final var pos = seq.position(); + // final var value = seq.readVarLong(false); + // assertThat(value).isEqualTo(300); + // assertThat(seq.position()).isEqualTo(pos + 2); + // } + // + // @Test + // @DisplayName("Read a varlong with zig zag encoding") + // void readZigZag() { + // final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 }); + // final var pos = seq.position(); + // final var value = seq.readVarLong(true); + // assertThat(value).isEqualTo(-151); + // assertThat(seq.position()).isEqualTo(pos + 2); + // } + // } } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java index 50affdcb..08e2dd43 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java @@ -26,14 +26,14 @@ private static long getLong(final byte[] arr, final int offset) { final byte b6 = arr[offset + 5]; final byte b7 = arr[offset + 6]; final byte b8 = arr[offset + 7]; - return (((long)b1 << 56) + - ((long)(b2 & 255) << 48) + - ((long)(b3 & 255) << 40) + - ((long)(b4 & 255) << 32) + - ((long)(b5 & 255) << 24) + - ((b6 & 255) << 16) + - ((b7 & 255) << 8) + - (b8 & 255)); + return (((long) b1 << 56) + + ((long) (b2 & 255) << 48) + + ((long) (b3 & 255) << 40) + + ((long) (b4 & 255) << 32) + + ((long) (b5 & 255) << 24) + + ((b6 & 255) << 16) + + ((b7 & 255) << 8) + + (b8 & 255)); } // Tests that UnsafeUtils.getInt() and RandomAccessData.getInt() produce the same results @@ -61,5 +61,4 @@ void getLongTest() { assertEquals(getLong(src, i), UnsafeUtils.getLong(src, i)); } } - } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java index e349ec24..bba3476e 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java @@ -2,7 +2,6 @@ package com.hedera.pbj.runtime.io; import edu.umd.cs.findbugs.annotations.NonNull; - import java.io.UncheckedIOException; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; @@ -39,7 +38,6 @@ private StubbedWritableSequentialData(@NonNull final byte[] bytes) { this.limit = this.bytes.length; } - @Override public long capacity() { return bytes.length; diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java index e67c106d..c5c859dd 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java @@ -1,17 +1,20 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io; +import static java.nio.ByteOrder.BIG_ENDIAN; +import static java.nio.ByteOrder.LITTLE_ENDIAN; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + import com.hedera.pbj.runtime.io.buffer.BufferedData; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.pbj.runtime.io.buffer.RandomAccessData; import com.hedera.pbj.runtime.io.stream.WritableStreamingData; import edu.umd.cs.findbugs.annotations.NonNull; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import org.junit.jupiter.params.provider.ValueSource; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -21,14 +24,12 @@ import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Arrays; -import static java.nio.ByteOrder.BIG_ENDIAN; -import static java.nio.ByteOrder.LITTLE_ENDIAN; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; /** * Base test class for testing {@link WritableSequentialData}. @@ -124,7 +125,7 @@ void write() { // then the position forward by a single byte assertThat(seq.position()).isEqualTo(pos + 1); // and the byte was written unmodified - final var expected = new byte[] { (byte) 0b1110_0011 }; + final var expected = new byte[] {(byte) 0b1110_0011}; assertThat(extractWrittenBytes(seq)).isEqualTo(expected); } } @@ -166,8 +167,10 @@ void readNullSrcThrows() { void negativeOffsetThrows() { // Given a sequence final var seq = sequence(); - // When we try to write bytes using a byte array with a negative offset, then we get an IndexOutOfBoundsException - assertThatThrownBy(() -> seq.writeBytes(new byte[10], -1, 10)).isInstanceOf(IndexOutOfBoundsException.class); + // When we try to write bytes using a byte array with a negative offset, then we get an + // IndexOutOfBoundsException + assertThatThrownBy(() -> seq.writeBytes(new byte[10], -1, 10)) + .isInstanceOf(IndexOutOfBoundsException.class); } @Test @@ -190,11 +193,13 @@ void tooLargeOffsetThrows() { void negativeLengthThrows() { // Given a sequence final var seq = sequence(); - // When we try to write bytes using a byte array with a negative length, then we get an IllegalArgumentException + // When we try to write bytes using a byte array with a negative length, then we get an + // IllegalArgumentException assertThatThrownBy(() -> seq.writeBytes(new byte[10], 0, -1)).isInstanceOf(IllegalArgumentException.class); - // When we try to write bytes using an input stream with a negative length, then we get an IllegalArgumentException + // When we try to write bytes using an input stream with a negative length, then we get an + // IllegalArgumentException final var stream = new ByteArrayInputStream(new byte[10]); - assertThatThrownBy(() -> seq.writeBytes(stream, -1)).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> seq.writeBytes(stream, -1)).isInstanceOf(IllegalArgumentException.class); } @Test @@ -245,13 +250,13 @@ void writePastLimit() { assertThatThrownBy(() -> seq.writeBytes(bytes)).isInstanceOf(BufferOverflowException.class); } - @Test - @DisplayName("Writing bytes from an InputStream with less data than the maxLength returns number of bytes written") + @DisplayName( + "Writing bytes from an InputStream with less data than the maxLength returns number of bytes written") void writingFromInputStreamWithInsufficientData() { // Given a sequence and an input stream with some data final var seq = sequence(); - final var bytes = new byte[] { 1, 2, 3, 4, 5 }; + final var bytes = new byte[] {1, 2, 3, 4, 5}; final var stream = new ByteArrayInputStream(bytes); // When we write the stream data to the sequence, and the max length is larger than the number // of bytes we have to write, @@ -266,7 +271,7 @@ void writingFromInputStreamWithInsufficientData() { void writingFromInputStreamWithNoData() { // Given a sequence and an input stream with no data final var seq = sequence(); - final var bytes = new byte[] { }; + final var bytes = new byte[] {}; final var stream = new ByteArrayInputStream(bytes); // When we write the stream data to the sequence final var numBytesWritten = seq.writeBytes(stream, 10); @@ -279,7 +284,7 @@ void writingFromInputStreamWithNoData() { void writingFromInputStreamWithLotsOfData() { // Given a sequence and an input stream with lots of data final var seq = sequence(); - final var bytes = new byte[1024*1024]; + final var bytes = new byte[1024 * 1024]; for (int i = 0; i < bytes.length; i++) { bytes[i] = (byte) i; } @@ -334,7 +339,7 @@ void writeZeroSrcByteBuffer() { assertThat(extractWrittenBytes(seq)).isEmpty(); } - @Test + @Test @DisplayName("Writing bytes from a src BufferedData where the src has length of 0") void writeZeroSrcBufferedData() { // Given a sequence and an empty src BufferedData @@ -355,7 +360,7 @@ void writeSmallerSrcByteArray() { final var seq = sequence(); seq.limit(10); // When we try writing bytes from the src - final var src = new byte[] { 1, 2, 3, 4, 5 }; + final var src = new byte[] {1, 2, 3, 4, 5}; final var pos = seq.position(); seq.writeBytes(src); // Then the sequence received those bytes and the position is updated @@ -369,12 +374,12 @@ void writeSmallerSrcByteArrayWithOffset() { // Given a sequence with a src byte array who's size is less than the limit final var seq = sequence(); seq.limit(10); - final var src = new byte[] { 1, 2, 3, 4, 5 }; + final var src = new byte[] {1, 2, 3, 4, 5}; // When we try writing bytes from the src final var pos = seq.position(); seq.writeBytes(src, 2, 2); // Then the sequence received those bytes and the position is updated - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4}); assertThat(seq.position()).isEqualTo(pos + 2); } @@ -385,7 +390,7 @@ void writeSmallerSrcByteBuffer() { final var seq = sequence(); seq.limit(10); // When we try writing bytes from the src - final var src = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 }); + final var src = ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}); final var pos = seq.position(); seq.writeBytes(src); // Then the sequence received those bytes and the position is updated @@ -399,13 +404,13 @@ void writeSmallerSrcByteBufferWithOffset() { // Given a sequence with a src ByteBuffer who's size is less than the limit final var seq = sequence(); seq.limit(10); - final var src = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 }); + final var src = ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}); src.position(2); // When we try writing bytes from the src final var pos = seq.position(); seq.writeBytes(src); // Then the sequence received those bytes and the position is updated - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4, 5 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4, 5}); assertThat(seq.position()).isEqualTo(pos + 3); } @@ -416,29 +421,31 @@ void writeSmallerSrcBufferedData() { final var seq = sequence(); seq.limit(10); // When we try writing bytes from the src - final var src = BufferedData.wrap(new byte[] { 1, 2, 3, 4, 5 }); + final var src = BufferedData.wrap(new byte[] {1, 2, 3, 4, 5}); final var pos = seq.position(); seq.writeBytes(src); // Then the sequence received those bytes and the position is updated - final var writtenBytes = new byte[1024]; // make large enough to hold extra bytes should they have been written + final var writtenBytes = + new byte[1024]; // make large enough to hold extra bytes should they have been written assertThat(src.getBytes(0, writtenBytes)).isEqualTo(5); assertThat(extractWrittenBytes(seq)).isEqualTo(Arrays.copyOfRange(writtenBytes, 0, 5)); assertThat(seq.position()).isEqualTo(pos + 5); } @Test - @DisplayName("Writing bytes from a src BufferedData with offset where the src is smaller than the sequence limit") + @DisplayName( + "Writing bytes from a src BufferedData with offset where the src is smaller than the sequence limit") void writeSmallerSrcBufferedDataWithOffset() { // Given a sequence with a src ByteBuffer who's size is less than the limit final var seq = sequence(); seq.limit(10); - final var src = BufferedData.wrap(new byte[] { 1, 2, 3, 4, 5 }); + final var src = BufferedData.wrap(new byte[] {1, 2, 3, 4, 5}); src.position(2); // When we try writing bytes from the src final var pos = seq.position(); seq.writeBytes(src); // Then the sequence received those bytes and the position is updated - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4, 5 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4, 5}); assertThat(seq.position()).isEqualTo(pos + 3); } @@ -449,11 +456,12 @@ void writeSmallerSrcRandomAccessData() { final var seq = sequence(); seq.limit(10); // When we try writing bytes from the src - final var src = Bytes.wrap(new byte[] { 1, 2, 3, 4, 5 }); + final var src = Bytes.wrap(new byte[] {1, 2, 3, 4, 5}); final var pos = seq.position(); seq.writeBytes(src); // Then the sequence received those bytes and the position is updated - final var writtenBytes = new byte[1024]; // make large enough to hold extra bytes should they have been written + final var writtenBytes = + new byte[1024]; // make large enough to hold extra bytes should they have been written assertThat(src.getBytes(0, writtenBytes)).isEqualTo(5); assertThat(extractWrittenBytes(seq)).isEqualTo(Arrays.copyOfRange(writtenBytes, 0, 5)); assertThat(seq.position()).isEqualTo(pos + 5); @@ -465,13 +473,13 @@ void writeSmallerSrcInputStream() { // Given a sequence with a src InputStream with lots of items final var seq = sequence(); seq.limit(10); - final var srcBytes = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 }; + final var srcBytes = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}; final var stream = new ByteArrayInputStream(srcBytes); // When we try writing bytes from the src with a maxLength less than the limit final var pos = seq.position(); seq.writeBytes(stream, 5); // Then the sequence received those fewer bytes and the position is updated - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {1, 2, 3, 4, 5}); assertThat(seq.position()).isEqualTo(pos + 5); } @@ -480,7 +488,7 @@ void writeSmallerSrcInputStream() { void writeSrcByteArray() { final var seq = sequence(); seq.limit(10); - final var src = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; + final var src = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; final var pos = seq.position(); seq.writeBytes(src); assertThat(extractWrittenBytes(seq)).isEqualTo(src); @@ -488,14 +496,15 @@ void writeSrcByteArray() { } @Test - @DisplayName("Writing bytes from a src byte array with offset where the src is the same length as the sequence limit") + @DisplayName( + "Writing bytes from a src byte array with offset where the src is the same length as the sequence limit") void writeSrcByteArrayWithOffset() { final var seq = sequence(); seq.limit(5); - final var src = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; + final var src = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; final var pos = seq.position(); seq.writeBytes(src, 5, 5); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 6, 7, 8, 9, 10 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {6, 7, 8, 9, 10}); assertThat(seq.position()).isEqualTo(pos + 5); } @@ -504,7 +513,7 @@ void writeSrcByteArrayWithOffset() { void writeSrcByteBuffer() { final var seq = sequence(); seq.limit(10); - final var src = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + final var src = ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); final var pos = seq.position(); seq.writeBytes(src); assertThat(extractWrittenBytes(seq)).isEqualTo(src.array()); @@ -512,16 +521,17 @@ void writeSrcByteBuffer() { } @Test - @DisplayName("Writing bytes from a src ByteBuffer with offset where the src is the same length as the sequence limit") + @DisplayName( + "Writing bytes from a src ByteBuffer with offset where the src is the same length as the sequence limit") void writeSrcByteBufferWithOffset() { final var seq = sequence(); seq.limit(5); - final var src = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + final var src = ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); src.position(2); src.limit(7); final var pos = seq.position(); seq.writeBytes(src); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4, 5, 6, 7 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4, 5, 6, 7}); assertThat(seq.position()).isEqualTo(pos + 5); } @@ -532,11 +542,11 @@ void writeSrcDirectByteBufferWithOffset() { final int LEN = 10; seq.limit(LEN); final var src = ByteBuffer.allocateDirect(LEN); - src.put(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + src.put(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); src.flip(); final var pos = seq.position(); seq.writeBytes(src); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); assertThat(seq.position()).isEqualTo(pos + 10); } @@ -545,24 +555,25 @@ void writeSrcDirectByteBufferWithOffset() { void writeSrcBufferedData() { final var seq = sequence(); seq.limit(10); - final var src = BufferedData.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + final var src = BufferedData.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); final var pos = seq.position(); seq.writeBytes(src); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); assertThat(seq.position()).isEqualTo(pos + 10); } @Test - @DisplayName("Writing bytes from a src BufferedData with offset where the src is the same length as the sequence limit") + @DisplayName( + "Writing bytes from a src BufferedData with offset where the src is the same length as the sequence limit") void writeSrcBufferedDataWithOffset() { final var seq = sequence(); seq.limit(5); - final var src = BufferedData.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + final var src = BufferedData.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); src.position(2); src.limit(7); final var pos = seq.position(); seq.writeBytes(src); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4, 5, 6, 7 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4, 5, 6, 7}); assertThat(seq.position()).isEqualTo(pos + 5); } @@ -571,26 +582,27 @@ void writeSrcBufferedDataWithOffset() { void writeSrcRandomAccessData() { final var seq = sequence(); seq.limit(10); - final var src = Bytes.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + final var src = Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); final var pos = seq.position(); seq.writeBytes(src); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); assertThat(seq.position()).isEqualTo(pos + 10); } @Test - @DisplayName("Writing bytes from a src InputStream where the maxLength is the same length as the sequence limit") + @DisplayName( + "Writing bytes from a src InputStream where the maxLength is the same length as the sequence limit") void writeSrcInputStream() { // Given a sequence with a src InputStream with the same number of items as the limit final var seq = sequence(); seq.limit(10); - final var srcBytes = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; + final var srcBytes = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; final var stream = new ByteArrayInputStream(srcBytes); // When we try writing bytes from the src with a maxLength equal to limit final var pos = seq.position(); seq.writeBytes(stream, 10); // Then the sequence received those bytes and the position is updated - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); assertThat(seq.position()).isEqualTo(pos + 10); } @@ -600,13 +612,13 @@ void writeSrcInputStreamLargerThanLimit() { // Given a sequence with a src InputStream with more items than the limit final var seq = sequence(); seq.limit(10); - final var srcBytes = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 }; + final var srcBytes = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}; final var stream = new ByteArrayInputStream(srcBytes); // When we try writing bytes from the src with a maxLength greater than the limit final var pos = seq.position(); seq.writeBytes(stream, 15); // Then the sequence received only up to `limit` bytes and the position is updated - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); assertThat(seq.position()).isEqualTo(pos + 10); } @@ -615,7 +627,7 @@ void writeSrcInputStreamLargerThanLimit() { void writeSrcInputStreamWithTooSmallMaxLength() { // Given a sequence with a src input stream final var seq = sequence(); - final var arr = new byte[] { 1, 2, 3, 4, 5 }; + final var arr = new byte[] {1, 2, 3, 4, 5}; final var src = new ByteArrayInputStream(arr); // When we try writing bytes from the src with a maxLength that is == 0 final var pos = seq.position(); @@ -631,7 +643,7 @@ void writeSrcInputStreamWithNothingRemaining() { // Given a sequence with a src input stream and a seq with nothing remaining final var seq = sequence(); seq.limit(0); - final var arr = new byte[] { 1, 2, 3, 4, 5 }; + final var arr = new byte[] {1, 2, 3, 4, 5}; final var src = new ByteArrayInputStream(arr); // When we try writing bytes from the src with a maxLength that is > 0 final var pos = seq.position(); @@ -654,9 +666,9 @@ void closed() throws IOException { @ParameterizedTest(name = "offset={0}, length={1}") @CsvSource({ - "-1, 1", // Negative offset - "100, 10", // Offset larger than the src array size - "5, 10", // Offset+Length larger than the src array size + "-1, 1", // Negative offset + "100, 10", // Offset larger than the src array size + "5, 10", // Offset+Length larger than the src array size }) @DisplayName("Writing bytes where the src offset and length are bad") void badOffsetLength(int offset, int length) { @@ -777,7 +789,8 @@ void writePastLimit() { // When we try to write an unsigned int, then we get a BufferOverflowException seq.skip(4); // Only 1 byte left, not enough assertThatThrownBy(() -> seq.writeUnsignedInt(1)).isInstanceOf(BufferOverflowException.class); - assertThatThrownBy(() -> seq.writeUnsignedInt(1234, LITTLE_ENDIAN)).isInstanceOf(BufferOverflowException.class); + assertThatThrownBy(() -> seq.writeUnsignedInt(1234, LITTLE_ENDIAN)) + .isInstanceOf(BufferOverflowException.class); } @Test @@ -974,7 +987,19 @@ void writeInsufficientDataThrows() { } @ParameterizedTest(name = "value={0}") - @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + @ValueSource( + floats = { + Float.NaN, + Float.NEGATIVE_INFINITY, + Float.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Float.MAX_VALUE, + Float.POSITIVE_INFINITY + }) @DisplayName("Writing a float") void write(float value) { final var seq = sequence(); @@ -985,7 +1010,19 @@ void write(float value) { } @ParameterizedTest(name = "value={0}") - @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + @ValueSource( + floats = { + Float.NaN, + Float.NEGATIVE_INFINITY, + Float.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Float.MAX_VALUE, + Float.POSITIVE_INFINITY + }) @DisplayName("Writing a float in Little Endian") void writeLittleEndian(float value) { final var seq = sequence(); @@ -996,7 +1033,19 @@ void writeLittleEndian(float value) { } @ParameterizedTest(name = "value={0}") - @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY}) + @ValueSource( + floats = { + Float.NaN, + Float.NEGATIVE_INFINITY, + Float.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Float.MAX_VALUE, + Float.POSITIVE_INFINITY + }) @DisplayName("Writing a float in Big Endian") void writeBigEndian(float value) { final var seq = sequence(); @@ -1064,7 +1113,19 @@ void writeInsufficientDataThrows() { } @ParameterizedTest(name = "value={0}") - @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + @ValueSource( + doubles = { + Double.NaN, + Double.NEGATIVE_INFINITY, + Double.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY + }) @DisplayName("Writing a double") void write(double value) { final var seq = sequence(); @@ -1075,7 +1136,19 @@ void write(double value) { } @ParameterizedTest(name = "value={0}") - @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + @ValueSource( + doubles = { + Double.NaN, + Double.NEGATIVE_INFINITY, + Double.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY + }) @DisplayName("Writing a double in Little Endian") void writeLittleEndian(double value) { final var seq = sequence(); @@ -1086,7 +1159,19 @@ void writeLittleEndian(double value) { } @ParameterizedTest(name = "value={0}") - @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY}) + @ValueSource( + doubles = { + Double.NaN, + Double.NEGATIVE_INFINITY, + Double.MIN_VALUE, + -8.2f, + -1.3f, + 0, + 1.4f, + 8.5f, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY + }) @DisplayName("Writing a double in Big Endian") void writeBigEndian(double value) { final var seq = sequence(); @@ -1115,7 +1200,7 @@ void writeMixedEndian() { })); } } - + @Nested @DisplayName("writeVarInt()") final class WriteVarIntTest { @@ -1151,10 +1236,8 @@ void writeInsufficientDataThrows(final boolean zigZag) { seq.skip(pos); assertThatThrownBy(() -> seq.writeVarInt(1234, zigZag)).isInstanceOf(BufferOverflowException.class); // A subsequent skip() will also throw an exception now that we hit the end of buffer - assertThatThrownBy(() -> seq.skip(1)).isInstanceOfAny( - BufferUnderflowException.class, - BufferOverflowException.class - ); + assertThatThrownBy(() -> seq.skip(1)) + .isInstanceOfAny(BufferUnderflowException.class, BufferOverflowException.class); } @Test @@ -1163,7 +1246,7 @@ void write() { final var seq = sequence(); final var pos = seq.position(); seq.writeVarInt(300, false); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { (byte) 0b10101100, 0b00000010 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {(byte) 0b10101100, 0b00000010}); assertThat(seq.position()).isEqualTo(pos + 2); } @@ -1173,13 +1256,36 @@ void writeZigZag() { final var seq = sequence(); final var pos = seq.position(); seq.writeVarInt(-151, true); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { (byte) 0b10101101, 0b00000010 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {(byte) 0b10101101, 0b00000010}); assertThat(seq.position()).isEqualTo(pos + 2); } @ParameterizedTest - @ValueSource(ints = {0, 1, 2, 3, 7, 8, 9, 1023, 1024, 1025, 65535, 65536, 0x7FFFFFFF, - -1, -2, -7, -1023, -1024, -65535, -65536, -0x7FFFFFFF, -0x80000000}) + @ValueSource( + ints = { + 0, + 1, + 2, + 3, + 7, + 8, + 9, + 1023, + 1024, + 1025, + 65535, + 65536, + 0x7FFFFFFF, + -1, + -2, + -7, + -1023, + -1024, + -65535, + -65536, + -0x7FFFFFFF, + -0x80000000 + }) @DisplayName("Varints must be encoded with less than 5 bytes") void checkVarIntLen(final int num) { final var seq = sequence(); @@ -1233,10 +1339,8 @@ void writeInsufficientDataThrows(final boolean zigZag) { seq.skip(pos); assertThatThrownBy(() -> seq.writeVarLong(3882918382L, zigZag)).isInstanceOf(BufferOverflowException.class); // A subsequent skip() will also throw an exception now that we hit the end of buffer - assertThatThrownBy(() -> seq.skip(1)).isInstanceOfAny( - BufferUnderflowException.class, - BufferOverflowException.class - ); + assertThatThrownBy(() -> seq.skip(1)) + .isInstanceOfAny(BufferUnderflowException.class, BufferOverflowException.class); } @Test @@ -1245,7 +1349,7 @@ void write() { final var seq = sequence(); final var pos = seq.position(); seq.writeVarLong(300, false); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { (byte) 0b10101100, 0b00000010 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {(byte) 0b10101100, 0b00000010}); assertThat(seq.position()).isEqualTo(pos + 2); } @@ -1255,7 +1359,7 @@ void writeZigZag() { final var seq = sequence(); final var pos = seq.position(); seq.writeVarLong(-151, true); - assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { (byte) 0b10101101, 0b00000010 }); + assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {(byte) 0b10101101, 0b00000010}); assertThat(seq.position()).isEqualTo(pos + 2); } } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java index 9bf3c503..00d66591 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java @@ -14,7 +14,7 @@ protected BufferedData allocate(final int size) { @NonNull @Override - protected BufferedData wrap(final byte[] arr) { + protected BufferedData wrap(final byte[] arr) { return new BufferedData(ByteBuffer.wrap(arr)); } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java index f9c5d170..6e375c1f 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java @@ -90,9 +90,47 @@ void toStringWithSlice() { } @ParameterizedTest - @ValueSource(ints = {0, 1, 2, 4, 7, 8, 15, 16, 31, 32, 33, 127, 128, 512, 1000, 1024, 4000, - 16384, 65535, 65536, 65537, 0xFFFFFF, 0x1000000, 0x1000001, 0x7FFFFFFF, - -1, -7, -8, -9, -127, -128, -129, -65535, -65536, -0xFFFFFF, -0x1000000, -0x1000001, -0x80000000}) + @ValueSource( + ints = { + 0, + 1, + 2, + 4, + 7, + 8, + 15, + 16, + 31, + 32, + 33, + 127, + 128, + 512, + 1000, + 1024, + 4000, + 16384, + 65535, + 65536, + 65537, + 0xFFFFFF, + 0x1000000, + 0x1000001, + 0x7FFFFFFF, + -1, + -7, + -8, + -9, + -127, + -128, + -129, + -65535, + -65536, + -0xFFFFFF, + -0x1000000, + -0x1000001, + -0x80000000 + }) @DisplayName("readVarInt() works with views") void sliceThenReadVarInt(final int num) { final var buf = allocate(100); @@ -108,9 +146,47 @@ void sliceThenReadVarInt(final int num) { } @ParameterizedTest - @ValueSource(ints = {0, 1, 2, 4, 7, 8, 15, 16, 31, 32, 33, 127, 128, 512, 1000, 1024, 4000, - 16384, 65535, 65536, 65537, 0xFFFFFF, 0x1000000, 0x1000001, 0x7FFFFFFF, - -1, -7, -8, -9, -127, -128, -129, -65535, -65536, -0xFFFFFF, -0x1000000, -0x1000001, -0x80000000}) + @ValueSource( + ints = { + 0, + 1, + 2, + 4, + 7, + 8, + 15, + 16, + 31, + 32, + 33, + 127, + 128, + 512, + 1000, + 1024, + 4000, + 16384, + 65535, + 65536, + 65537, + 0xFFFFFF, + 0x1000000, + 0x1000001, + 0x7FFFFFFF, + -1, + -7, + -8, + -9, + -127, + -128, + -129, + -65535, + -65536, + -0xFFFFFF, + -0x1000000, + -0x1000001, + -0x80000000 + }) @DisplayName("readVar() won't read beyond 10 bytes") void readVarFromLargeBuffer(final int num) { final var buf = allocate(100); @@ -124,9 +200,42 @@ void readVarFromLargeBuffer(final int num) { } @ParameterizedTest - @ValueSource(longs = {0, 1, 7, 8, 9, 127, 128, 129, 1023, 1024, 1025, 65534, 65535, 65536, - 0xFFFFFFFFL, 0x100000000L, 0x100000001L, 0xFFFFFFFFFFFFL, 0x1000000000000L, 0x1000000000001L, - -1, -7, -8, -9, -127, -128, -129, -65534, -65535, -65536, -0xFFFFFFFFL, -0x100000000L, -0x100000001L}) + @ValueSource( + longs = { + 0, + 1, + 7, + 8, + 9, + 127, + 128, + 129, + 1023, + 1024, + 1025, + 65534, + 65535, + 65536, + 0xFFFFFFFFL, + 0x100000000L, + 0x100000001L, + 0xFFFFFFFFFFFFL, + 0x1000000000000L, + 0x1000000000001L, + -1, + -7, + -8, + -9, + -127, + -128, + -129, + -65534, + -65535, + -65536, + -0xFFFFFFFFL, + -0x100000000L, + -0x100000001L + }) @DisplayName("readVarLong() works with views") void sliceThenReadVarLong(final long num) { final var buf = allocate(256); diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java index 2e1c28ee..472d36ed 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java @@ -14,7 +14,7 @@ protected BufferedData allocate(final int size) { @NonNull @Override - protected BufferedData wrap(final byte[] arr) { + protected BufferedData wrap(final byte[] arr) { return new ByteArrayBufferedData(ByteBuffer.wrap(arr)); } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java index 6625b1c2..920a3c69 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java @@ -79,7 +79,7 @@ void toStringWorks1() { @Test void testReplicate() { - byte[] arr = new byte[] { 0, 1, 2 }; + byte[] arr = new byte[] {0, 1, 2}; // Only wrap the last two elements: Bytes bytes = Bytes.wrap(arr, 1, 2); @@ -112,11 +112,9 @@ void testReplicate() { @DisplayName("Tests wrapping of byte arrays") final class ByteWrappingTest { static Stream byteArraysTestCases() { - return Stream.of( - new byte[0], - new byte[] { 0 }, - new byte[] { Byte.MIN_VALUE, -100, -66, -7, -1, 0, 1, 9, 12, 51, 101, Byte.MAX_VALUE } - ); + return Stream.of(new byte[0], new byte[] {0}, new byte[] { + Byte.MIN_VALUE, -100, -66, -7, -1, 0, 1, 9, 12, 51, 101, Byte.MAX_VALUE + }); } @Test @@ -130,7 +128,7 @@ void nullArrayThrows() { @DisplayName("Getting a byte with a negative offset throws") void getByteWithNegativeOffsetThrows() { // Given a Bytes instance - final RandomAccessData bytes = Bytes.wrap(new byte[] { 1, 2, 3, 4 }); + final RandomAccessData bytes = Bytes.wrap(new byte[] {1, 2, 3, 4}); // When getting a byte with a negative offset // Then an IndexOutOfBoundsException is thrown assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(-1)); @@ -168,17 +166,17 @@ void toByteArrayNon0() { assertNotEquals(byteArray, bytes.toByteArray(0, 5)); } -// @Test -// @DisplayName("Getting a byte with to large of an offset throws") -// void getByteWithLargeOffsetThrows() { -// // Given a Bytes instance -// final RandomAccessData bytes = Bytes.wrap(new byte[] { 1, 2, 3, 4 }); -// // When getting a byte from an offset that is too large -// // Then an IndexOutOfBoundsException is thrown -// assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(4)); -// assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(5)); -// assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(Integer.MAX_VALUE)); -// } + // @Test + // @DisplayName("Getting a byte with to large of an offset throws") + // void getByteWithLargeOffsetThrows() { + // // Given a Bytes instance + // final RandomAccessData bytes = Bytes.wrap(new byte[] { 1, 2, 3, 4 }); + // // When getting a byte from an offset that is too large + // // Then an IndexOutOfBoundsException is thrown + // assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(4)); + // assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(5)); + // assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(Integer.MAX_VALUE)); + // } @ParameterizedTest @MethodSource("byteArraysTestCases") @@ -219,7 +217,7 @@ void equality(final byte[] value) { void notEqual(final byte[] value) { // Given two byte arrays with different bytes, when wrapped final RandomAccessData bytes1 = Bytes.wrap(value); - final RandomAccessData bytes2 = Bytes.wrap(new byte[]{ 1, 39, 28, 92 }); + final RandomAccessData bytes2 = Bytes.wrap(new byte[] {1, 39, 28, 92}); // Then they have different lengths assertNotEquals(bytes1.length(), bytes2.length()); // And they are not equal @@ -237,10 +235,7 @@ void notEqual(final byte[] value) { final class StringWrappingTest { static Stream stringTestCases() { return Stream.of( - "", - "This is a test of the emergency broadcast system", - "Some crazy unicode characters here 🤪" - ); + "", "This is a test of the emergency broadcast system", "Some crazy unicode characters here 🤪"); } @Test @@ -299,12 +294,14 @@ void notEqual(final String value) { assertNotEquals(bytes1.hashCode(), bytes2.hashCode()); } } + @Test @DisplayName("Get Unsigned Bytes") void getUnsignedBytes() { // Given a Bytes instance with bytes that are within the range of signed bytes and some that are // outside the range of signed bytes but within the range of unsigned bytes - final RandomAccessData bytes = Bytes.wrap(new byte[]{0b0000_0000, 0b0000_0001, (byte) 0b1000_0000, (byte) 0b1111_1111}); + final RandomAccessData bytes = + Bytes.wrap(new byte[] {0b0000_0000, 0b0000_0001, (byte) 0b1000_0000, (byte) 0b1111_1111}); // Then reading them as unsigned bytes returns the expected values assertEquals(0, bytes.getUnsignedByte(0)); assertEquals(1, bytes.getUnsignedByte(1)); @@ -352,6 +349,7 @@ void writeToOutputStreamNo0OffsPartial() throws IOException { byte[] comp = {0, 1, 2, 3, 4}; assertArrayEquals(comp, res); } + @Test @DisplayName("Write to OutputStream") void writeToWritableSequentialData() throws IOException { @@ -557,7 +555,7 @@ void verifySignatureBoundsChecks() throws InvalidKeyException { @Test @DisplayName("Tests the signature verification without a mock") void realSignatureTest() throws NoSuchAlgorithmException, InvalidKeyException, SignatureException { - final Bytes bytes = Bytes.wrap(new byte[]{1, 2, 3, 4, 5}); + final Bytes bytes = Bytes.wrap(new byte[] {1, 2, 3, 4, 5}); final KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); // sign the data final Signature signer = Signature.getInstance("SHA256withRSA"); @@ -572,7 +570,7 @@ void realSignatureTest() throws NoSuchAlgorithmException, InvalidKeyException, S // test a bad signature final Signature verifier2 = Signature.getInstance("SHA256withRSA"); verifier2.initVerify(keyPair.getPublic()); - Bytes.wrap(new byte[]{123, 1, 2, 3}).updateSignature(verifier2); + Bytes.wrap(new byte[] {123, 1, 2, 3}).updateSignature(verifier2); assertFalse(signature.verifySignature(verifier2)); } @@ -581,148 +579,151 @@ void realSignatureTest() throws NoSuchAlgorithmException, InvalidKeyException, S // matches prefix.... -// -// -// -// -// static Stream bytesTestCases() { -// return Stream.of(Byte.MIN_VALUE,-100,-66,-7,-1,0,1,9,51,101,Byte.MAX_VALUE).map(Number::byteValue); -// } -// -// @ParameterizedTest -// @MethodSource("bytesTestCases") -// void byteTest(Byte value) { -// final int length = Byte.BYTES; -// DataBuffer db = DataBuffer.allocate(length, false); -// db.writeByte(value); -// db.reset(); -// final Bytes bytes = db.readBytes(length); -// assertEquals(value, bytes.getByte(0)); -// } -// -// static Stream unsignedBytesTestCases() { -// return Stream.of(0,1,9,51,101,127,128,255).map(Number::intValue); -// } -// -// @ParameterizedTest -// @MethodSource("unsignedBytesTestCases") -// void unsignedByteTest(Integer value) { -// final int length = Byte.BYTES; -// DataBuffer db = DataBuffer.allocate(length, false); -// db.writeUnsignedByte(value); -// db.reset(); -// final Bytes bytes = db.readBytes(length); -// assertEquals(value, bytes.getUnsignedByte(0)); -// } -// -// static Stream intsTestCases() { -// return Stream.of(Integer.MIN_VALUE,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE).map(Number::intValue); -// } -// -// @ParameterizedTest -// @MethodSource("intsTestCases") -// void intTest(Integer value) { -// final int length = Integer.BYTES*2; -// DataBuffer db = DataBuffer.allocate(length, false); -// db.writeInt(value); -// db.writeInt(value, ByteOrder.LITTLE_ENDIAN); -// db.reset(); -// final Bytes bytes = db.readBytes(length); -// assertEquals(value, bytes.getInt(0)); -// assertEquals(value, bytes.getInt(Integer.BYTES, ByteOrder.LITTLE_ENDIAN)); -// } -// -// @ParameterizedTest -// @MethodSource("intsTestCases") -// void varIntTest(Integer value) { -// DataBuffer db = DataBuffer.allocate(20, false); -// db.writeVarInt(value, false); -// final int varInt1Size = (int)db.position(); -// db.writeVarInt(value, true); -// db.flip(); -// final Bytes bytes = db.readBytes((int)db.remaining()); -// assertEquals(value, bytes.getVarInt(0, false)); -// assertEquals(value, bytes.getVarInt(varInt1Size, true)); -// } -// -// static Stream unsignedIntsTestCases() { -// return Stream.of(0,1,9,51,127,Integer.MAX_VALUE*2L).map(Number::longValue); -// } -// -// @ParameterizedTest -// @MethodSource("unsignedIntsTestCases") -// void unsignedIntTest(Long value) { -// final int length = Integer.BYTES*2; -// DataBuffer db = DataBuffer.allocate(length, false); -// db.writeUnsignedInt(value); -// db.writeUnsignedInt(value, ByteOrder.LITTLE_ENDIAN); -// db.reset(); -// final Bytes bytes = db.readBytes(length); -// assertEquals(value, bytes.getUnsignedInt(0)); -// assertEquals(value, bytes.getUnsignedInt(Integer.BYTES, ByteOrder.LITTLE_ENDIAN)); -// } -// -// static Stream longsTestCases() { -// return Stream.of(Long.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Long.MAX_VALUE).map(Number::longValue); -// } -// @ParameterizedTest -// @MethodSource("longsTestCases") -// void longTest(Long value) { -// final int length = Long.BYTES*2; -// DataBuffer db = DataBuffer.allocate(length, false); -// db.writeLong(value); -// db.writeLong(value, ByteOrder.LITTLE_ENDIAN); -// db.reset(); -// final Bytes bytes = db.readBytes(length); -// assertEquals(value, bytes.getLong(0)); -// assertEquals(value, bytes.getLong(Long.BYTES, ByteOrder.LITTLE_ENDIAN)); -// } -// -// @ParameterizedTest -// @MethodSource("longsTestCases") -// void varLongTest(Long value) { -// DataBuffer db = DataBuffer.allocate(20, false); -// db.writeVarLong(value, false); -// final int varInt1Size = (int)db.position(); -// db.writeVarLong(value, true); -// db.flip(); -// final Bytes bytes = db.readBytes((int)db.remaining()); -// assertEquals(value, bytes.getVarLong(0, false)); -// assertEquals(value, bytes.getVarLong(varInt1Size, true)); -// } -// -// static Stream floatsTestCases() { -// return Stream.of(Float.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Float.MAX_VALUE).map(Number::floatValue); -// } -// @ParameterizedTest -// @MethodSource("floatsTestCases") -// void floatTest(Float value) { -// final int length = Float.BYTES*2; -// DataBuffer db = DataBuffer.allocate(length, false); -// db.writeFloat(value); -// db.writeFloat(value, ByteOrder.LITTLE_ENDIAN); -// db.reset(); -// final Bytes bytes = db.readBytes(length); -// assertEquals(value, bytes.getFloat(0)); -// assertEquals(value, bytes.getFloat(Float.BYTES, ByteOrder.LITTLE_ENDIAN)); -// } -// -// static Stream doublesTestCases() { -// return Stream.of(Double.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Double.MAX_VALUE).map(Number::doubleValue); -// } -// -// @ParameterizedTest -// @MethodSource("doublesTestCases") -// void doubleTest(Double value) { -// final int length = Double.BYTES * 2; -// DataBuffer db = DataBuffer.allocate(length, false); -// db.writeDouble(value); -// db.writeDouble(value, ByteOrder.LITTLE_ENDIAN); -// db.reset(); -// final Bytes bytes = db.readBytes(length); -// assertEquals(value, bytes.getDouble(0)); -// assertEquals(value, bytes.getDouble(Double.BYTES, ByteOrder.LITTLE_ENDIAN)); -// } + // + // + // + // + // static Stream bytesTestCases() { + // return Stream.of(Byte.MIN_VALUE,-100,-66,-7,-1,0,1,9,51,101,Byte.MAX_VALUE).map(Number::byteValue); + // } + // + // @ParameterizedTest + // @MethodSource("bytesTestCases") + // void byteTest(Byte value) { + // final int length = Byte.BYTES; + // DataBuffer db = DataBuffer.allocate(length, false); + // db.writeByte(value); + // db.reset(); + // final Bytes bytes = db.readBytes(length); + // assertEquals(value, bytes.getByte(0)); + // } + // + // static Stream unsignedBytesTestCases() { + // return Stream.of(0,1,9,51,101,127,128,255).map(Number::intValue); + // } + // + // @ParameterizedTest + // @MethodSource("unsignedBytesTestCases") + // void unsignedByteTest(Integer value) { + // final int length = Byte.BYTES; + // DataBuffer db = DataBuffer.allocate(length, false); + // db.writeUnsignedByte(value); + // db.reset(); + // final Bytes bytes = db.readBytes(length); + // assertEquals(value, bytes.getUnsignedByte(0)); + // } + // + // static Stream intsTestCases() { + // return Stream.of(Integer.MIN_VALUE,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE).map(Number::intValue); + // } + // + // @ParameterizedTest + // @MethodSource("intsTestCases") + // void intTest(Integer value) { + // final int length = Integer.BYTES*2; + // DataBuffer db = DataBuffer.allocate(length, false); + // db.writeInt(value); + // db.writeInt(value, ByteOrder.LITTLE_ENDIAN); + // db.reset(); + // final Bytes bytes = db.readBytes(length); + // assertEquals(value, bytes.getInt(0)); + // assertEquals(value, bytes.getInt(Integer.BYTES, ByteOrder.LITTLE_ENDIAN)); + // } + // + // @ParameterizedTest + // @MethodSource("intsTestCases") + // void varIntTest(Integer value) { + // DataBuffer db = DataBuffer.allocate(20, false); + // db.writeVarInt(value, false); + // final int varInt1Size = (int)db.position(); + // db.writeVarInt(value, true); + // db.flip(); + // final Bytes bytes = db.readBytes((int)db.remaining()); + // assertEquals(value, bytes.getVarInt(0, false)); + // assertEquals(value, bytes.getVarInt(varInt1Size, true)); + // } + // + // static Stream unsignedIntsTestCases() { + // return Stream.of(0,1,9,51,127,Integer.MAX_VALUE*2L).map(Number::longValue); + // } + // + // @ParameterizedTest + // @MethodSource("unsignedIntsTestCases") + // void unsignedIntTest(Long value) { + // final int length = Integer.BYTES*2; + // DataBuffer db = DataBuffer.allocate(length, false); + // db.writeUnsignedInt(value); + // db.writeUnsignedInt(value, ByteOrder.LITTLE_ENDIAN); + // db.reset(); + // final Bytes bytes = db.readBytes(length); + // assertEquals(value, bytes.getUnsignedInt(0)); + // assertEquals(value, bytes.getUnsignedInt(Integer.BYTES, ByteOrder.LITTLE_ENDIAN)); + // } + // + // static Stream longsTestCases() { + // return Stream.of(Long.MIN_VALUE, + // Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Long.MAX_VALUE).map(Number::longValue); + // } + // @ParameterizedTest + // @MethodSource("longsTestCases") + // void longTest(Long value) { + // final int length = Long.BYTES*2; + // DataBuffer db = DataBuffer.allocate(length, false); + // db.writeLong(value); + // db.writeLong(value, ByteOrder.LITTLE_ENDIAN); + // db.reset(); + // final Bytes bytes = db.readBytes(length); + // assertEquals(value, bytes.getLong(0)); + // assertEquals(value, bytes.getLong(Long.BYTES, ByteOrder.LITTLE_ENDIAN)); + // } + // + // @ParameterizedTest + // @MethodSource("longsTestCases") + // void varLongTest(Long value) { + // DataBuffer db = DataBuffer.allocate(20, false); + // db.writeVarLong(value, false); + // final int varInt1Size = (int)db.position(); + // db.writeVarLong(value, true); + // db.flip(); + // final Bytes bytes = db.readBytes((int)db.remaining()); + // assertEquals(value, bytes.getVarLong(0, false)); + // assertEquals(value, bytes.getVarLong(varInt1Size, true)); + // } + // + // static Stream floatsTestCases() { + // return Stream.of(Float.MIN_VALUE, + // Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Float.MAX_VALUE).map(Number::floatValue); + // } + // @ParameterizedTest + // @MethodSource("floatsTestCases") + // void floatTest(Float value) { + // final int length = Float.BYTES*2; + // DataBuffer db = DataBuffer.allocate(length, false); + // db.writeFloat(value); + // db.writeFloat(value, ByteOrder.LITTLE_ENDIAN); + // db.reset(); + // final Bytes bytes = db.readBytes(length); + // assertEquals(value, bytes.getFloat(0)); + // assertEquals(value, bytes.getFloat(Float.BYTES, ByteOrder.LITTLE_ENDIAN)); + // } + // + // static Stream doublesTestCases() { + // return Stream.of(Double.MIN_VALUE, + // Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Double.MAX_VALUE).map(Number::doubleValue); + // } + // + // @ParameterizedTest + // @MethodSource("doublesTestCases") + // void doubleTest(Double value) { + // final int length = Double.BYTES * 2; + // DataBuffer db = DataBuffer.allocate(length, false); + // db.writeDouble(value); + // db.writeDouble(value, ByteOrder.LITTLE_ENDIAN); + // db.reset(); + // final Bytes bytes = db.readBytes(length); + // assertEquals(value, bytes.getDouble(0)); + // assertEquals(value, bytes.getDouble(Double.BYTES, ByteOrder.LITTLE_ENDIAN)); + // } @Test void malformedVarTest() { @@ -761,7 +762,9 @@ protected RandomAccessData randomAccessData(@NonNull byte[] bytes) { } @ParameterizedTest - @CsvSource(textBlock = """ + @CsvSource( + textBlock = + """ "", "", 0 "a", "", 1 "", "a", -1 @@ -790,18 +793,17 @@ void compareByUnsignedBytes(byte[] arr1, byte[] arr2, int expected) { static Stream compareByUnsignedBytes() { return Stream.of( Arguments.of(new byte[0], new byte[0], 0), - Arguments.of(new byte[0], new byte[]{1}, -1), - Arguments.of(new byte[]{1}, new byte[0], 1), - Arguments.of(new byte[]{1}, new byte[]{2}, -1), - Arguments.of(new byte[]{2}, new byte[]{1}, 1), - Arguments.of(new byte[]{-1}, new byte[]{2}, 253), - Arguments.of(new byte[]{2}, new byte[]{-1}, -253), - Arguments.of(new byte[]{-1}, new byte[]{-2}, 1), - Arguments.of(new byte[]{-2}, new byte[]{-1}, -1), - Arguments.of(new byte[]{-2, -1}, new byte[]{-2, -1}, 0), - Arguments.of(new byte[]{-2}, new byte[]{-2, -1}, -1), - Arguments.of(new byte[]{-2, -1}, new byte[]{-1, -2}, -1) - ); + Arguments.of(new byte[0], new byte[] {1}, -1), + Arguments.of(new byte[] {1}, new byte[0], 1), + Arguments.of(new byte[] {1}, new byte[] {2}, -1), + Arguments.of(new byte[] {2}, new byte[] {1}, 1), + Arguments.of(new byte[] {-1}, new byte[] {2}, 253), + Arguments.of(new byte[] {2}, new byte[] {-1}, -253), + Arguments.of(new byte[] {-1}, new byte[] {-2}, 1), + Arguments.of(new byte[] {-2}, new byte[] {-1}, -1), + Arguments.of(new byte[] {-2, -1}, new byte[] {-2, -1}, 0), + Arguments.of(new byte[] {-2}, new byte[] {-2, -1}, -1), + Arguments.of(new byte[] {-2, -1}, new byte[] {-1, -2}, -1)); } @ParameterizedTest @@ -816,68 +818,67 @@ void compareBySignedBytes(byte[] arr1, byte[] arr2, int expected) { static Stream compareBySignedBytes() { return Stream.of( Arguments.of(new byte[0], new byte[0], 0), - Arguments.of(new byte[0], new byte[]{1}, -1), - Arguments.of(new byte[]{1}, new byte[0], 1), - Arguments.of(new byte[]{1}, new byte[]{2}, -1), - Arguments.of(new byte[]{2}, new byte[]{1}, 1), - Arguments.of(new byte[]{-1}, new byte[]{2}, -3), - Arguments.of(new byte[]{2}, new byte[]{-1}, 3), - Arguments.of(new byte[]{-1}, new byte[]{-2}, 1), - Arguments.of(new byte[]{-2}, new byte[]{-1}, -1), - Arguments.of(new byte[]{-2, -1}, new byte[]{-2, -1}, 0), - Arguments.of(new byte[]{-2}, new byte[]{-2, -1}, -1), - Arguments.of(new byte[]{-2, -1}, new byte[]{-1, -2}, -1) - ); + Arguments.of(new byte[0], new byte[] {1}, -1), + Arguments.of(new byte[] {1}, new byte[0], 1), + Arguments.of(new byte[] {1}, new byte[] {2}, -1), + Arguments.of(new byte[] {2}, new byte[] {1}, 1), + Arguments.of(new byte[] {-1}, new byte[] {2}, -3), + Arguments.of(new byte[] {2}, new byte[] {-1}, 3), + Arguments.of(new byte[] {-1}, new byte[] {-2}, 1), + Arguments.of(new byte[] {-2}, new byte[] {-1}, -1), + Arguments.of(new byte[] {-2, -1}, new byte[] {-2, -1}, 0), + Arguments.of(new byte[] {-2}, new byte[] {-2, -1}, -1), + Arguments.of(new byte[] {-2, -1}, new byte[] {-1, -2}, -1)); } @Test @DisplayName("Appends two Bytes objects") void appendBytes() { - Bytes b1 = Bytes.wrap(new byte[]{0, 1, 2, 3}); - Bytes b2 = Bytes.wrap(new byte[]{4, 5, 6}); + Bytes b1 = Bytes.wrap(new byte[] {0, 1, 2, 3}); + Bytes b2 = Bytes.wrap(new byte[] {4, 5, 6}); Bytes appended = b1.append(b2); byte[] res = new byte[7]; appended.getBytes(0, res); - assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6}, res); + assertArrayEquals(new byte[] {0, 1, 2, 3, 4, 5, 6}, res); } @Test @DisplayName("Appends two Bytes objects, one empty") void appendEmptyBytes() { - Bytes b1 = Bytes.wrap(new byte[]{0, 1, 2, 3}); + Bytes b1 = Bytes.wrap(new byte[] {0, 1, 2, 3}); Bytes appended = b1.append(Bytes.EMPTY); byte[] res = new byte[4]; appended.getBytes(0, res); - assertArrayEquals(new byte[]{0, 1, 2, 3}, res); + assertArrayEquals(new byte[] {0, 1, 2, 3}, res); } @Test @DisplayName("Appends RandomAccessData") void appendRandomAccessData() { - Bytes b1 = Bytes.wrap(new byte[]{0, 1, 2, 3}); - RandomAccessData rad = BufferedData.wrap(new byte[]{4, 5, 6}); + Bytes b1 = Bytes.wrap(new byte[] {0, 1, 2, 3}); + RandomAccessData rad = BufferedData.wrap(new byte[] {4, 5, 6}); Bytes appended = b1.append(rad); byte[] res = new byte[7]; appended.getBytes(0, res); - assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6}, res); + assertArrayEquals(new byte[] {0, 1, 2, 3, 4, 5, 6}, res); } @Test @DisplayName("Changed toString") void changedToString() { - Bytes b1 = Bytes.wrap(new byte[]{0, 0, (byte)0xFF}); + Bytes b1 = Bytes.wrap(new byte[] {0, 0, (byte) 0xFF}); assertEquals("0000ff", b1.toString()); } @Test @DisplayName("Changed toString2") void changedToString2() { - Bytes b1 = Bytes.wrap(new byte[]{(byte)0x0f, 0, (byte)0x0a}); + Bytes b1 = Bytes.wrap(new byte[] {(byte) 0x0f, 0, (byte) 0x0a}); assertEquals("0f000a", b1.toString()); } @ParameterizedTest - @ValueSource(strings = { "", "a", "ab", "abc", "abc123", "✅" }) + @ValueSource(strings = {"", "a", "ab", "abc", "abc123", "✅"}) @DisplayName("Overridden asUtf8String") void asUtf8StringTest(final String value) { final Bytes bytes = Bytes.wrap(value.getBytes(StandardCharsets.UTF_8)); @@ -905,11 +906,7 @@ private void testWriteToFromOffset( void writeToByteBufferTest() { final ByteBuffer bb = ByteBuffer.allocate(1); - testWriteToFromOffset( - bb, - (b, d) -> b.writeTo(d, 1, 1), - ByteBuffer::position, - d -> d.get(0)); + testWriteToFromOffset(bb, (b, d) -> b.writeTo(d, 1, 1), ByteBuffer::position, d -> d.get(0)); } @Test @@ -922,11 +919,8 @@ public void write(int b) throws IOException { } }; - testWriteToFromOffset( - os, - (b, d) -> b.writeTo(d, 1, 1), - d -> data.size(), - d -> data.get(0).byteValue()); + testWriteToFromOffset(os, (b, d) -> b.writeTo(d, 1, 1), d -> data.size(), d -> data.get(0) + .byteValue()); } @Test @@ -952,7 +946,6 @@ void writeToMessageDigestDataTest() throws NoSuchAlgorithmException { ai.set(md.digest()[0]); }, d -> ai.get() == 0 ? 0 : 1, - d -> (byte) (ai.get() + 121) - ); + d -> (byte) (ai.get() + 121)); } } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java index 05331ae8..2fe7d509 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java @@ -28,17 +28,53 @@ public abstract class RandomAccessTestBase extends ReadableTestBase { protected abstract RandomAccessData randomAccessData(@NonNull final byte[] bytes); static IntStream testIntegers() { - return IntStream.of(Integer.MIN_VALUE, Integer.MIN_VALUE + 1, - -65536, -65535, -101, -9, -1, 0, 1, 4, 59, 255, 1023, 1024, 1025, 10000, - Integer.MAX_VALUE - 1, Integer.MAX_VALUE); + return IntStream.of( + Integer.MIN_VALUE, + Integer.MIN_VALUE + 1, + -65536, + -65535, + -101, + -9, + -1, + 0, + 1, + 4, + 59, + 255, + 1023, + 1024, + 1025, + 10000, + Integer.MAX_VALUE - 1, + Integer.MAX_VALUE); } static LongStream testLongs() { - return LongStream.of(Long.MIN_VALUE, Long.MIN_VALUE + 1, - (long) Integer.MIN_VALUE - 1, Integer.MIN_VALUE, Integer.MIN_VALUE + 1, - -65536, -65535, -101, -9, -1, 0, 1, 4, 59, 255, 1023, 1024, 1025, 10000, - Integer.MAX_VALUE - 1, Integer.MAX_VALUE, (long) Integer.MAX_VALUE + 1, - Long.MAX_VALUE - 1, Long.MAX_VALUE); + return LongStream.of( + Long.MIN_VALUE, + Long.MIN_VALUE + 1, + (long) Integer.MIN_VALUE - 1, + Integer.MIN_VALUE, + Integer.MIN_VALUE + 1, + -65536, + -65535, + -101, + -9, + -1, + 0, + 1, + 4, + 59, + 255, + 1023, + 1024, + 1025, + 10000, + Integer.MAX_VALUE - 1, + Integer.MAX_VALUE, + (long) Integer.MAX_VALUE + 1, + Long.MAX_VALUE - 1, + Long.MAX_VALUE); } @Test @@ -48,7 +84,7 @@ void sliceLength() { } @ParameterizedTest - @ValueSource(strings = { "", "a", "ab", "abc", "✅" }) + @ValueSource(strings = {"", "a", "ab", "abc", "✅"}) void utf8Strings(final String s) { final var buf = randomAccessData(s.getBytes(StandardCharsets.UTF_8)); assertThat(buf.asUtf8String()).isEqualTo(s); @@ -83,29 +119,36 @@ void getBytesExtraDstLength() { @Test void matchesPrefixByteArray() { - final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09}); - - assertTrue(data.matchesPrefix(new byte[]{0x01})); - assertTrue(data.matchesPrefix(new byte[]{0x01,0x02})); - assertTrue(data.matchesPrefix(new byte[]{0x01,0x02,0x03,0x04,})); - assertTrue(data.matchesPrefix(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09})); - - assertFalse(data.matchesPrefix(new byte[]{0x02})); - assertFalse(data.matchesPrefix(new byte[]{0x01,0x02,0x03,0x02})); - assertFalse(data.matchesPrefix(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x00})); + final RandomAccessData data = + randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09}); + + assertTrue(data.matchesPrefix(new byte[] {0x01})); + assertTrue(data.matchesPrefix(new byte[] {0x01, 0x02})); + assertTrue(data.matchesPrefix(new byte[] { + 0x01, 0x02, 0x03, 0x04, + })); + assertTrue(data.matchesPrefix(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09})); + + assertFalse(data.matchesPrefix(new byte[] {0x02})); + assertFalse(data.matchesPrefix(new byte[] {0x01, 0x02, 0x03, 0x02})); + assertFalse(data.matchesPrefix(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x00})); } @Test void matchesPrefixBytes() { - final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09}); - assertTrue(data.matchesPrefix(Bytes.wrap(new byte[]{0x01}))); - assertTrue(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02}))); - assertTrue(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02,0x03,0x04,}))); - assertTrue(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09}))); - - assertFalse(data.matchesPrefix(Bytes.wrap(new byte[]{0x02}))); - assertFalse(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02,0x03,0x02}))); - assertFalse(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x00}))); + final RandomAccessData data = + randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09}); + assertTrue(data.matchesPrefix(Bytes.wrap(new byte[] {0x01}))); + assertTrue(data.matchesPrefix(Bytes.wrap(new byte[] {0x01, 0x02}))); + assertTrue(data.matchesPrefix(Bytes.wrap(new byte[] { + 0x01, 0x02, 0x03, 0x04, + }))); + assertTrue(data.matchesPrefix(Bytes.wrap(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09}))); + + assertFalse(data.matchesPrefix(Bytes.wrap(new byte[] {0x02}))); + assertFalse(data.matchesPrefix(Bytes.wrap(new byte[] {0x01, 0x02, 0x03, 0x02}))); + assertFalse(data.matchesPrefix( + Bytes.wrap(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x00}))); } @Test @@ -117,45 +160,45 @@ void matchesPrefixEmpty_issue37() { @Test void containsZeroOffset() { - final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06}); - assertTrue(data.contains(0, new byte[]{0x01})); - assertTrue(data.contains(0, new byte[]{0x01,0x02})); - assertTrue(data.contains(0, new byte[]{0x01,0x02,0x03,0x04,0x05,0x06})); - assertFalse(data.contains(0, new byte[]{0x01,0x02,0x02})); - assertFalse(data.contains(0, new byte[]{0x02,0x02})); - assertFalse(data.contains(0, new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07})); + final RandomAccessData data = randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06}); + assertTrue(data.contains(0, new byte[] {0x01})); + assertTrue(data.contains(0, new byte[] {0x01, 0x02})); + assertTrue(data.contains(0, new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06})); + assertFalse(data.contains(0, new byte[] {0x01, 0x02, 0x02})); + assertFalse(data.contains(0, new byte[] {0x02, 0x02})); + assertFalse(data.contains(0, new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07})); final RandomAccessData slice = data.slice(1, 4); - assertTrue(slice.contains(0, new byte[]{0x02})); - assertTrue(slice.contains(0, new byte[]{0x02,0x03})); - assertTrue(slice.contains(0, new byte[]{0x02,0x03,0x04,0x05})); - assertFalse(slice.contains(0, new byte[]{0x01})); - assertFalse(slice.contains(0, new byte[]{0x02,0x02})); - assertFalse(slice.contains(0, new byte[]{0x02,0x03,0x04,0x05,0x06})); + assertTrue(slice.contains(0, new byte[] {0x02})); + assertTrue(slice.contains(0, new byte[] {0x02, 0x03})); + assertTrue(slice.contains(0, new byte[] {0x02, 0x03, 0x04, 0x05})); + assertFalse(slice.contains(0, new byte[] {0x01})); + assertFalse(slice.contains(0, new byte[] {0x02, 0x02})); + assertFalse(slice.contains(0, new byte[] {0x02, 0x03, 0x04, 0x05, 0x06})); } @Test void containsNonZeroOffset() { - final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06}); - assertTrue(data.contains(1, new byte[]{0x02})); - assertTrue(data.contains(1, new byte[]{0x02,0x03})); - assertTrue(data.contains(1, new byte[]{0x02,0x03,0x04,0x05,0x06})); - assertFalse(data.contains(1, new byte[]{0x02,0x03,0x03})); - assertFalse(data.contains(1, new byte[]{0x03,0x03})); - assertFalse(data.contains(1, new byte[]{0x02,0x03,0x04,0x05,0x06,0x07})); + final RandomAccessData data = randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06}); + assertTrue(data.contains(1, new byte[] {0x02})); + assertTrue(data.contains(1, new byte[] {0x02, 0x03})); + assertTrue(data.contains(1, new byte[] {0x02, 0x03, 0x04, 0x05, 0x06})); + assertFalse(data.contains(1, new byte[] {0x02, 0x03, 0x03})); + assertFalse(data.contains(1, new byte[] {0x03, 0x03})); + assertFalse(data.contains(1, new byte[] {0x02, 0x03, 0x04, 0x05, 0x06, 0x07})); final RandomAccessData slice = data.slice(1, 4); - assertTrue(slice.contains(1, new byte[]{0x03})); - assertTrue(slice.contains(1, new byte[]{0x03,0x04})); - assertTrue(slice.contains(1, new byte[]{0x03,0x04,0x05})); - assertFalse(slice.contains(1, new byte[]{0x02})); - assertFalse(slice.contains(1, new byte[]{0x03,0x03})); - assertFalse(slice.contains(1, new byte[]{0x03,0x04,0x05,0x06})); + assertTrue(slice.contains(1, new byte[] {0x03})); + assertTrue(slice.contains(1, new byte[] {0x03, 0x04})); + assertTrue(slice.contains(1, new byte[] {0x03, 0x04, 0x05})); + assertFalse(slice.contains(1, new byte[] {0x02})); + assertFalse(slice.contains(1, new byte[] {0x03, 0x03})); + assertFalse(slice.contains(1, new byte[] {0x03, 0x04, 0x05, 0x06})); } @Test void getInt() { - final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06}); + final RandomAccessData data = randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06}); assertEquals(0x01020304, data.getInt(0)); assertEquals(0x02030405, data.getInt(1)); @@ -166,7 +209,8 @@ void getInt() { @Test void getLong() { - final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0A}); + final RandomAccessData data = + randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A}); assertEquals(0x0102030405060708L, data.getLong(0)); assertEquals(0x0203040506070809L, data.getLong(1)); @@ -238,5 +282,4 @@ void getVarLongZigZag(final long num) throws IOException { data = randomAccessData(writtenBytes); assertEquals(num, data.getVarLong(0, true)); } - } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java index 0923ec7c..937ef213 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java @@ -7,8 +7,6 @@ import java.io.OutputStream; import java.io.UncheckedIOException; -import static org.assertj.core.api.Assertions.assertThat; - public class StubbedRandomAccessDataTest extends RandomAccessTestBase { @NonNull @@ -20,7 +18,8 @@ protected ReadableSequentialData emptySequence() { @NonNull @Override protected ReadableSequentialData fullyUsedSequence() { - final var buf = new RandomAccessSequenceAdapter(new StubbedRandomAccessData(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })); + final var buf = new RandomAccessSequenceAdapter( + new StubbedRandomAccessData(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10})); buf.skip(10); return buf; } diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java index 02557276..1f6a5b05 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java @@ -1,34 +1,32 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io.stream; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + import com.hedera.pbj.runtime.io.ReadableSequentialData; import com.hedera.pbj.runtime.io.ReadableSequentialTestBase; import com.hedera.pbj.runtime.io.buffer.BufferedData; import edu.umd.cs.findbugs.annotations.NonNull; - import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; +import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.util.concurrent.atomic.AtomicBoolean; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -import java.nio.BufferUnderflowException; -import java.nio.charset.StandardCharsets; -import java.util.concurrent.atomic.AtomicBoolean; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.junit.jupiter.api.Assertions.assertArrayEquals; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - final class ReadableStreamingDataTest extends ReadableSequentialTestBase { @NonNull @@ -53,20 +51,29 @@ public int read() throws IOException { private ReadableSequentialData oneByteSequence() { return new ReadableStreamingData(new InputStream() { private int pos = 0; + @Override public int read() throws IOException { switch (pos) { - case 0: pos++; return 7; - case 1: pos++; return -1; - default: throw new IOException("EOF"); + case 0: + pos++; + return 7; + case 1: + pos++; + return -1; + default: + throw new IOException("EOF"); } } @Override public int readNBytes(byte[] b, int off, int len) throws IOException { switch (pos) { - case 0: b[off] = (byte) read(); return 1; - default: return super.readNBytes(b, off, len); + case 0: + b[off] = (byte) read(); + return 1; + default: + return super.readNBytes(b, off, len); } } }); @@ -125,7 +132,7 @@ protected ReadableStreamingData fullyUsedSequence() { @Override @NonNull - protected ReadableStreamingData sequence(@NonNull byte [] arr) { + protected ReadableStreamingData sequence(@NonNull byte[] arr) { final var stream = new ReadableStreamingData(arr); stream.limit(arr.length); return stream; @@ -147,8 +154,7 @@ void closedStreamHasNoBytesRemaining() { void closedStreamCannotBeRead() { try (var stream = sequence("0123456789".getBytes(StandardCharsets.UTF_8))) { stream.close(); - assertThatThrownBy(stream::readByte) - .isInstanceOf(BufferUnderflowException.class); + assertThatThrownBy(stream::readByte).isInstanceOf(BufferUnderflowException.class); } } @@ -158,15 +164,14 @@ void closeTwice() { try (var stream = sequence("0123456789".getBytes(StandardCharsets.UTF_8))) { stream.close(); stream.close(); - assertThatThrownBy(stream::readByte) - .isInstanceOf(BufferUnderflowException.class); + assertThatThrownBy(stream::readByte).isInstanceOf(BufferUnderflowException.class); } } @Test @DisplayName("Bad InputStream will fail on skip") void inputStreamFailsDuringSkip() { - final var byteStream = new ByteArrayInputStream(new byte[] { 1, 2, 3, 4, 5, 6, 7 }); + final var byteStream = new ByteArrayInputStream(new byte[] {1, 2, 3, 4, 5, 6, 7}); final var inputStream = new BufferedInputStream(byteStream) { @Override public synchronized long skip(long n) throws IOException { @@ -175,15 +180,14 @@ public synchronized long skip(long n) throws IOException { }; final var stream = new ReadableStreamingData(inputStream); - assertThatThrownBy(() -> stream.skip(5)) - .isInstanceOf(UncheckedIOException.class); + assertThatThrownBy(() -> stream.skip(5)).isInstanceOf(UncheckedIOException.class); } @Test @DisplayName("Bad InputStream will fail on read") void inputStreamFailsDuringRead() { final var throwNow = new AtomicBoolean(false); - final var byteStream = new ByteArrayInputStream(new byte[] { 1, 2, 3, 4, 5, 6, 7 }); + final var byteStream = new ByteArrayInputStream(new byte[] {1, 2, 3, 4, 5, 6, 7}); final var inputStream = new BufferedInputStream(byteStream) { @Override public int read() throws IOException { @@ -199,8 +203,7 @@ public int read() throws IOException { stream.skip(5); throwNow.set(true); - assertThatThrownBy(stream::readByte) - .isInstanceOf(UncheckedIOException.class); + assertThatThrownBy(stream::readByte).isInstanceOf(UncheckedIOException.class); } @Test @@ -239,8 +242,7 @@ public void close() throws IOException { @Test @DisplayName("Bad InputStream empty when read") void inputStreamEmptyReadVarLong() { - final var inputStream = new ByteArrayInputStream(new byte[] { - (byte) 128, (byte) 129, (byte) 130, (byte) 131}); + final var inputStream = new ByteArrayInputStream(new byte[] {(byte) 128, (byte) 129, (byte) 130, (byte) 131}); final var stream = new ReadableStreamingData(inputStream); @@ -249,8 +251,7 @@ void inputStreamEmptyReadVarLong() { @Test void incompleteStreamToByteBuffer() { - final var inputStream = new ByteArrayInputStream(new byte[] { - (byte) 128, (byte) 129, (byte) 130, (byte) 131}); + final var inputStream = new ByteArrayInputStream(new byte[] {(byte) 128, (byte) 129, (byte) 130, (byte) 131}); final var stream = new TestReadeableSequentialData(new ReadableStreamingData(inputStream)); ByteBuffer buffer = ByteBuffer.allocate(8); @@ -260,8 +261,7 @@ void incompleteStreamToByteBuffer() { @Test void incompleteStreamToBufferedData() { - final var inputStream = new ByteArrayInputStream(new byte[] { - (byte) 128, (byte) 129, (byte) 130, (byte) 131}); + final var inputStream = new ByteArrayInputStream(new byte[] {(byte) 128, (byte) 129, (byte) 130, (byte) 131}); final var stream = new TestReadeableSequentialData(new ReadableStreamingData(inputStream)); stream.limit(8); @@ -273,7 +273,7 @@ void incompleteStreamToBufferedData() { @Test @DisplayName("Reusing an input stream on two ReadableStreamingData does not lose any data") void reuseStream() { - final var byteStream = new ByteArrayInputStream(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); + final var byteStream = new ByteArrayInputStream(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); final var bytes1 = new byte[5]; final var stream1 = new ReadableStreamingData(byteStream); diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java index a9714f74..f7a39fad 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java @@ -1,20 +1,6 @@ // SPDX-License-Identifier: Apache-2.0 package com.hedera.pbj.runtime.io.stream; -import com.hedera.pbj.runtime.io.WritableSequentialData; -import com.hedera.pbj.runtime.io.WritableTestBase; -import com.hedera.pbj.runtime.io.buffer.RandomAccessData; -import edu.umd.cs.findbugs.annotations.NonNull; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.io.UncheckedIOException; -import java.nio.charset.StandardCharsets; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -28,6 +14,21 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; +import com.hedera.pbj.runtime.io.WritableSequentialData; +import com.hedera.pbj.runtime.io.WritableTestBase; +import com.hedera.pbj.runtime.io.buffer.RandomAccessData; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.UncheckedIOException; +import java.nio.charset.StandardCharsets; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + public class WritableStreamingDataTest extends WritableTestBase { private ByteArrayOutputStream out; @@ -73,7 +74,7 @@ void specifiedCapacity() throws IOException { } @ParameterizedTest - @ValueSource(ints = { -1, 0, 2, 1024, 1025, 2048, 3000 }) + @ValueSource(ints = {-1, 0, 2, 1024, 1025, 2048, 3000}) @DisplayName("Skip inserts empty bytes into the output stream") void skip(final int numBytesToSkip) { // Given a sequence @@ -141,5 +142,4 @@ void testWriteBytesFastPath() { assertEquals(10L, seq.position()); } - } diff --git a/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java b/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java index b15516d2..ba406b8b 100644 --- a/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java +++ b/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java @@ -1,14 +1,14 @@ // SPDX-License-Identifier: Apache-2.0 package tests; -import com.hedera.pbj.runtime.ComparableOneOf; -import com.hedera.pbj.runtime.EnumWithProtoMetadata; -import org.junit.jupiter.api.Test; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; +import com.hedera.pbj.runtime.ComparableOneOf; +import com.hedera.pbj.runtime.EnumWithProtoMetadata; +import org.junit.jupiter.api.Test; + class ComparableOneOfTest { @Test void nullNameIsOK() { @@ -30,8 +30,8 @@ void asReturnsValue() { @Test void hashCodeReturnsHashCode() { final var oneOf = new ComparableOneOf<>(TestEnum.KIND1, "Value"); - assertEquals((31 + Integer.hashCode(TestEnum.KIND1.protoOrdinal())) * 31 - + "Value".hashCode(), oneOf.hashCode()); + assertEquals( + (31 + Integer.hashCode(TestEnum.KIND1.protoOrdinal())) * 31 + "Value".hashCode(), oneOf.hashCode()); } @Test @@ -63,5 +63,4 @@ public String protoName() { return name(); } } - } diff --git a/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java b/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java index 25069987..31e4b87d 100644 --- a/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java +++ b/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java @@ -1,28 +1,25 @@ // SPDX-License-Identifier: Apache-2.0 package tests; +import static org.junit.jupiter.api.Assertions.assertThrows; + import com.hedera.pbj.runtime.FieldDefinition; import com.hedera.pbj.runtime.FieldType; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertThrows; - class FieldDefinitionTest { @Test void nullNameThrows() { - assertThrows(NullPointerException.class, () -> - new FieldDefinition(null, FieldType.STRING, false, 1)); + assertThrows(NullPointerException.class, () -> new FieldDefinition(null, FieldType.STRING, false, 1)); } @Test void nullTypeThrows() { - assertThrows(NullPointerException.class, () -> - new FieldDefinition("Name", null, false, 1)); + assertThrows(NullPointerException.class, () -> new FieldDefinition("Name", null, false, 1)); } @Test void negativeNumberThrows() { - assertThrows(IllegalArgumentException.class, () -> - new FieldDefinition("Name", FieldType.STRING, false, -1)); + assertThrows(IllegalArgumentException.class, () -> new FieldDefinition("Name", FieldType.STRING, false, -1)); } } diff --git a/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java b/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java index e200715f..425a9893 100644 --- a/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java +++ b/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java @@ -2,6 +2,6 @@ package tests; public class FuzzTest { - // Need a test where we take an arbitrary array of bytes from length 1 to 10,000 - // with arbitrary values and send it to a parser. It should always throw an exception. + // Need a test where we take an arbitrary array of bytes from length 1 to 10,000 + // with arbitrary values and send it to a parser. It should always throw an exception. } diff --git a/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java b/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java index 5e3055c7..a0dac254 100644 --- a/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java +++ b/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java @@ -1,16 +1,14 @@ // SPDX-License-Identifier: Apache-2.0 package tests; -import com.hedera.pbj.runtime.EnumWithProtoMetadata; -import com.hedera.pbj.runtime.OneOf; -import org.junit.jupiter.api.Test; - -import java.util.Objects; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; +import com.hedera.pbj.runtime.EnumWithProtoMetadata; +import com.hedera.pbj.runtime.OneOf; +import org.junit.jupiter.api.Test; + class OneOfTest { @Test void nullNameIsOK() { @@ -32,7 +30,8 @@ void asReturnsValue() { @Test void hashCodeReturnsHashCode() { final var oneOf = new OneOf<>(TestEnum.KIND1, "Value"); - assertEquals((31 + Integer.hashCode(TestEnum.KIND1.protoOrdinal())) * 31 + "Value".hashCode(), oneOf.hashCode()); + assertEquals( + (31 + Integer.hashCode(TestEnum.KIND1.protoOrdinal())) * 31 + "Value".hashCode(), oneOf.hashCode()); } @Test @@ -64,5 +63,4 @@ public String protoName() { return name(); } } - }