From 9e78226a72369a79b513ce98fd37aa9546608bb5 Mon Sep 17 00:00:00 2001 From: jasperpotts Date: Fri, 13 Jan 2023 14:49:32 -0800 Subject: [PATCH 1/5] Performance Work half done Signed-off-by: jasperpotts --- ...a.pbj.spotless-java-conventions.gradle.kts | 2 +- pbj-core/pbj-runtime/build.gradle.kts | 2 + .../pbj/runtime/ProtoParserTools.java | 10 +- .../hashgraph/pbj/runtime/ProtoTestTools.java | 4 +- .../pbj/runtime/ProtoWriterTools.java | 107 ++- .../pbj/runtime/io/ByteOverByteBuffer.java | 16 + .../hashgraph/pbj/runtime/io/DataBuffer.java | 28 +- .../hashgraph/pbj/runtime/io/DataInput.java | 13 + .../pbj/runtime/io/DataInputStream.java | 13 + .../pbj/runtime/io/OffHeapDataBuffer.java | 830 +++++++++++++++++- .../src/main/java/module-info.java | 5 +- .../hashgraph/pbj/runtime/io/DataTest.java | 4 +- pbj-integration-tests/build.gradle.kts | 5 +- .../pbj/intergration/jmh/EverythingBench.java | 11 +- .../pbj/intergration/jmh/TimestampBench.java | 132 ++- .../integration/EverythingWriterPerfTest.java | 38 +- 16 files changed, 1140 insertions(+), 80 deletions(-) diff --git a/pbj-core/buildSrc/src/main/kotlin/com.hedera.pbj.spotless-java-conventions.gradle.kts b/pbj-core/buildSrc/src/main/kotlin/com.hedera.pbj.spotless-java-conventions.gradle.kts index 33bf85e3..f9c44b11 100644 --- a/pbj-core/buildSrc/src/main/kotlin/com.hedera.pbj.spotless-java-conventions.gradle.kts +++ b/pbj-core/buildSrc/src/main/kotlin/com.hedera.pbj.spotless-java-conventions.gradle.kts @@ -25,7 +25,7 @@ spotless { toggleOffOn() // don't need to set target, it is inferred from java // apply a specific flavor of google-java-format - googleJavaFormat("1.15.0").aosp().reflowLongStrings() + palantirJavaFormat() // make sure every file has the following copyright header. // optionally, Spotless can set copyright years by digging // through git history (see "license" section below). diff --git a/pbj-core/pbj-runtime/build.gradle.kts b/pbj-core/pbj-runtime/build.gradle.kts index 58bddcc9..ce1c5cd1 100644 --- a/pbj-core/pbj-runtime/build.gradle.kts +++ b/pbj-core/pbj-runtime/build.gradle.kts @@ -23,6 +23,8 @@ plugins { } dependencies { + implementation("org.eclipse.collections:eclipse-collections-api:11.1.0") + implementation("org.eclipse.collections:eclipse-collections:11.1.0") testImplementation(testLibs.bundles.protobuf) testImplementation(testLibs.bundles.junit) testRuntimeOnly(testLibs.junit.jupiter.engine) diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoParserTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoParserTools.java index f2c6880f..1c7e8a2d 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoParserTools.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoParserTools.java @@ -2,6 +2,8 @@ import com.hedera.hashgraph.pbj.runtime.io.Bytes; import com.hedera.hashgraph.pbj.runtime.io.DataInput; +import org.eclipse.collections.impl.list.mutable.FastList; +import org.eclipse.collections.impl.list.mutable.primitive.IntArrayList; import java.io.IOException; import java.nio.ByteOrder; @@ -44,7 +46,11 @@ private ProtoParserTools() {} */ public static List addToList(List list, T newItem) { if (list == Collections.EMPTY_LIST) { - list = new ArrayList<>(); +// if (newItem instanceof Integer) { +// list = new FastList(); +// } +// list = new ArrayList<>(20); + list = new FastList<>(20); } list.add(newItem); return list; @@ -217,9 +223,11 @@ public static double readDouble(final DataInput input) throws IOException { */ public static String readString(final DataInput input) throws IOException { final int length = input.readVarInt(false); + if (length == 0) return ""; byte[] bytes = new byte[length]; input.readBytes(bytes); return new String(bytes,StandardCharsets.UTF_8); +// return input.readUtf8String(length); } /** diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoTestTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoTestTools.java index 86e3bd4b..02517eca 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoTestTools.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoTestTools.java @@ -25,11 +25,11 @@ public final class ProtoTestTools { private ProtoTestTools() {} /** Thread local set of reusable buffers */ private static final ThreadLocal THREAD_LOCAL_BUFFERS = - ThreadLocal.withInitial(() -> DataBuffer.allocate(BUFFER_SIZE, false)); + ThreadLocal.withInitial(() -> DataBuffer.allocate(BUFFER_SIZE, true)); /** Thread local set of reusable buffers, second buffer for each thread */ private static final ThreadLocal THREAD_LOCAL_BUFFERS_2 = - ThreadLocal.withInitial(() -> DataBuffer.allocate(BUFFER_SIZE, false)); + ThreadLocal.withInitial(() -> DataBuffer.allocate(BUFFER_SIZE, true)); /** * Get the thread local instance of DataBuffer, reset and ready to use. diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoWriterTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoWriterTools.java index 3cfa8694..7ad98052 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoWriterTools.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/ProtoWriterTools.java @@ -17,7 +17,7 @@ /** * Static helper methods for Writers */ -@SuppressWarnings({"DuplicatedCode", "OptionalUsedAsFieldOrParameterType"}) +@SuppressWarnings({"DuplicatedCode", "OptionalUsedAsFieldOrParameterType", "ForLoopReplaceableByForEach"}) public final class ProtoWriterTools { /** The number of leading bits of the tag that are used to store field type, the rest is field number */ @@ -226,7 +226,7 @@ public static void writeString(DataOutput out, FieldDefinition field, String val */ private static void writeStringNoChecks(DataOutput out, FieldDefinition field, String value) throws IOException { // When not a oneOf don't write default value - if (!field.oneOf() && (value == null || value.isBlank())) { + if (!field.oneOf() && (value == null || value.isEmpty())) { return; } writeTag(out, field, WIRE_TYPE_DELIMITED); @@ -464,38 +464,45 @@ assert switch(field.type()) { return; } + final int listSize = list.size(); switch (field.type()) { case INT32 -> { int size = 0; - for (final int i : list) { - size += sizeOfVarInt32(i); + for (int i = 0; i < listSize; i++) { + final int value = list.get(i); + size += sizeOfVarInt32(value); } writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarInt(size, false); - for (final int i : list) { - out.writeVarInt(i, false); + for (int i = 0; i < list.size(); i++) { + final int value = list.get(i); + out.writeVarInt(value, false); } } case UINT32 -> { int size = 0; - for (final int i : list) { - size += sizeOfUnsignedVarInt64(Integer.toUnsignedLong(i)); + for (int i = 0; i < listSize; i++) { + final int value = list.get(i); + size += sizeOfUnsignedVarInt64(Integer.toUnsignedLong(value)); } writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarInt(size, false); - for (final int i : list) { - out.writeVarLong(Integer.toUnsignedLong(i), false); + for (int i = 0; i < listSize; i++) { + final int value = list.get(i); + out.writeVarLong(Integer.toUnsignedLong(value), false); } } case SINT32 -> { int size = 0; - for (final int i : list) { - size += sizeOfUnsignedVarInt64(((long)i << 1) ^ ((long)i >> 63)); + for (int i = 0; i < listSize; i++) { + final int value = list.get(i); + size += sizeOfUnsignedVarInt64(((long)value << 1) ^ ((long)value >> 63)); } writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarInt(size, false); - for (final int i : list) { - out.writeVarInt(i, true); + for (int i = 0; i < listSize; i++) { + final int value = list.get(i); + out.writeVarInt(value, true); } } case SFIXED32, FIXED32 -> { @@ -503,8 +510,9 @@ assert switch(field.type()) { // Smallest byte first. writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarLong((long)list.size() * FIXED32_SIZE, false); - for (final int i : list) { - out.writeInt(i, ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < listSize; i++) { + final int value = list.get(i); + out.writeInt(value, ByteOrder.LITTLE_ENDIAN); } } default -> throw unsupported(); @@ -531,27 +539,32 @@ assert switch(field.type()) { return; } + final int listSize = list.size(); switch (field.type()) { case INT64, UINT64 -> { int size = 0; - for (final long i : list) { - size += sizeOfUnsignedVarInt64(i); + for (int i = 0; i < listSize; i++) { + final long value = list.get(i); + size += sizeOfUnsignedVarInt64(value); } writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarInt(size, false); - for (final long i : list) { - out.writeVarLong(i, false); + for (int i = 0; i < listSize; i++) { + final long value = list.get(i); + out.writeVarLong(value, false); } } case SINT64 -> { int size = 0; - for (final long i : list) { - size += sizeOfUnsignedVarInt64((i << 1) ^ (i >> 63)); + for (int i = 0; i < listSize; i++) { + final long value = list.get(i); + size += sizeOfUnsignedVarInt64((value << 1) ^ (value >> 63)); } writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarInt(size, false); - for (final long i : list) { - out.writeVarLong(i, true); + for (int i = 0; i < listSize; i++) { + final long value = list.get(i); + out.writeVarLong(value, true); } } case SFIXED64, FIXED64 -> { @@ -559,8 +572,9 @@ assert switch(field.type()) { // Smallest byte first. writeTag(out, field, WIRE_TYPE_DELIMITED); out.writeVarLong((long)list.size() * FIXED64_SIZE, false); - for (final long i : list) { - out.writeLong(i, ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < listSize; i++) { + final long value = list.get(i); + out.writeLong(value, ByteOrder.LITTLE_ENDIAN); } } default -> throw unsupported(); @@ -585,8 +599,9 @@ public static void writeFloatList(DataOutput out, FieldDefinition field, List void writeMessageList(DataOutput out, FieldDefinition field, L if (!field.oneOf() && list.isEmpty()) { return; } - for (final T value : list) { - writeMessageNoChecks(out, field, value, writer, sizeOf); + final int listSize = list.size(); + for (int i = 0; i < listSize; i++) { + writeMessageNoChecks(out, field, list.get(i), writer, sizeOf); } } @@ -722,8 +744,9 @@ public static void writeBytesList(DataOutput out, FieldDefinition field, List>> 1) ^ -(x & 1) : x; } + /** + * {@inheritDoc} + */ + @Override + public String readUtf8String(int lengthInBytes) throws IOException { +// if (buffer.remaining() < lengthInBytes) throw new IOException("Not enough bytes to remaining [" + +// buffer.remaining() + "] to read string of [" + lengthInBytes + "] bytes"); +// int oldLimit = buffer.limit(); +// buffer.limit(buffer.position()+lengthInBytes); +// final String readStr = StandardCharsets.UTF_8.decode(buffer).toString(); +// buffer.limit(oldLimit); +// return readStr; + final String readStr = new String(buffer.array(), buffer.position(), lengthInBytes, StandardCharsets.UTF_8); + buffer.position(buffer.position() + lengthInBytes); + return readStr; + } + // ================================================================================================================ // DataOutput Write Methods @@ -508,8 +534,6 @@ public void writeBytes(byte[] src) { @Override public void writeBytes(DataBuffer src) throws IOException { if ((getLimit() - getPosition()) < src.getRemaining()) { - System.err.println("Trying to write [" + src.getRemaining() + "] bytes but only [" + - (getLimit() - getPosition()) + "] remaining of [" + getCapacity() + "]"); throw new BufferUnderflowException(); } buffer.put(src.buffer); diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/DataInput.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/DataInput.java index 44dba68d..6bed139f 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/DataInput.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/DataInput.java @@ -4,6 +4,7 @@ import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; /** *

A high level interface to represent a way to read data as tokens, each method assumes there are enough bytes @@ -356,4 +357,16 @@ default long readVarLong(boolean zigZag) throws IOException { } throw new IOException("Malformed Varint"); } + + /** + * Read {@code lengthInBytes} bytes and return as UTF-8 decoded String. + * + * @param lengthInBytes The number of UTF-8 bytes to read + * @return UTF-8 decoded String from read bytes + */ + default String readUtf8String(int lengthInBytes) throws IOException { + byte[] bytes = new byte[lengthInBytes]; + readBytes(bytes); + return new String(bytes, StandardCharsets.UTF_8); + } } diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/DataInputStream.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/DataInputStream.java index a0de2ce8..a7f8e674 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/DataInputStream.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/DataInputStream.java @@ -38,6 +38,19 @@ public DataInputStream(InputStream in) { } } + /** + * Set a new InputStream, allows this object to be reused. + * + * @param newIn the new input stream to use + * @throws IOException if there was a problem reading first byte + */ + public void setInputStream(InputStream newIn) throws IOException { + in = newIn; + position = 0; + limit = Long.MAX_VALUE; + readNextByte(); + } + private void readNextByte() throws IOException{ // Have to read as int, so we can detect difference between byte -1 int for EOF and 255 valid byte final int nextByteAsInt = in.read(); diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/OffHeapDataBuffer.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/OffHeapDataBuffer.java index f5c9e807..a4e9806e 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/OffHeapDataBuffer.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/hashgraph/pbj/runtime/io/OffHeapDataBuffer.java @@ -1,16 +1,844 @@ package com.hedera.hashgraph.pbj.runtime.io; +import sun.misc.Unsafe; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.nio.Buffer; +import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import static java.nio.ByteOrder.BIG_ENDIAN; /** * A Buffer backed by a ByteBuffer that implements {@code DataInput} and {@code DataOutput}. */ +@SuppressWarnings("DuplicatedCode") public final class OffHeapDataBuffer extends DataBuffer { + + /** Offset of the {@code java.nio.Buffer#address} field. */ + private static final long BYTE_BUFFER_ADDRESS_FIELD_OFFSET; + + /** + * Offset of the {@code java.nio.ByteBuffer#hb} field. + */ + public static final long BYTE_BUFFER_HB_FIELD_OFFSET; + /** Offset of first data item in a byte array */ + private static final long BYTE_ARRAY_FIRST_DATA_OFFSET; + /** Access to sun.misc.Unsafe required for atomic compareAndSwapLong on off-heap memory */ + private static final Unsafe UNSAFE; + private static final int MAX_VARINT_SIZE = 10; + + private static final ByteOrder NATIVE_BYTE_ORDER = ByteOrder.nativeOrder(); + + static { + try { + Field f = Unsafe.class.getDeclaredField("theUnsafe"); + f.setAccessible(true); + UNSAFE = (Unsafe) f.get(null); + BYTE_BUFFER_ADDRESS_FIELD_OFFSET = UNSAFE.objectFieldOffset(Buffer.class.getDeclaredField("address")); + BYTE_BUFFER_HB_FIELD_OFFSET = UNSAFE.objectFieldOffset(ByteBuffer.class.getDeclaredField("hb")); + BYTE_ARRAY_FIRST_DATA_OFFSET = UNSAFE.arrayBaseOffset(byte[].class); + } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { + throw new InternalError(e); + } + } + + private int position = 0; + private int limit = 0; + private final int capacity; + + /** Pointer to beginning of off-heap bytes in buffer */ + private final long startOfBytesPointer; + OffHeapDataBuffer(ByteBuffer buffer) { super(buffer); + limit = capacity = buffer.capacity(); + if (!buffer.isDirect()) { + throw new IllegalArgumentException("buffer.isDirect() must be true"); + } + if(ByteOrder.nativeOrder() != ByteOrder.LITTLE_ENDIAN) { + throw new IllegalStateException("OffHeapDataBuffer assumes you are running on a little endian machine."+ + " Which is both Intel and ARM so covers most cases."); + } + startOfBytesPointer = UNSAFE.getLong(buffer, BYTE_BUFFER_ADDRESS_FIELD_OFFSET); } OffHeapDataBuffer(int size) { - super(ByteBuffer.allocateDirect(size)); + this(ByteBuffer.allocateDirect(size)); + } + + // ================================================================================================================ + // DataOutput Methods + + /** + * Set the limit to current position and position to origin. This is useful when you have just finished writing + * into a buffer and want to flip it ready to read back from. + */ + public void flip() { + limit = position; + position = 0; + } + + /** + * Reset position to origin and limit to capacity, allowing this buffer to be read or written again + */ + public void reset() { + position = 0; + limit = capacity; + } + + /** + * Reset position to origin and leave limit alone, allowing this buffer to be read again with existing limit + */ + public void resetPosition() { + position = 0; + } + + /** + * Get the capacity in bytes that can be stored in this buffer + * + * @return capacity in bytes + */ + public int getCapacity() { + return capacity; + } + + /** + * toString that outputs data in buffer in bytes. + * + * @return nice debug output of buffer contents + */ + @Override + public String toString() { + // build string + StringBuilder sb = new StringBuilder(); + sb.append("DataBuffer["); + for (int i = 0; i < limit; i++) { + int v = buffer.get(i) & 0xFF; + sb.append(v); + if (i < (buffer.limit()-1)) sb.append(','); + } + sb.append(']'); + return sb.toString(); + } + + /** + * Equals that compares DataBuffer contents + * + * @param o another object or DataBuffer to compare to + * @return if {@code o} is an instance of {@code DataBuffer} and they contain the same bytes + */ + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataBuffer that = (DataBuffer) o; + if (capacity != that.getCapacity()) return false; + if (limit != that.getLimit()) return false; + for (int i = 0; i < limit; i++) { + if (buffer.get(i) != that.buffer.get(i)) return false; + } + return true; + } + + /** + * Get hash based on contents of this buffer + * + * @return hash code + */ + @Override + public int hashCode() { + return buffer.hashCode(); + } + + // ================================================================================================================ + // DataOutput Position Methods + + /** + * {@inheritDoc} + */ + @Override + public long skip(long count) { + count = Math.max(count, buffer.remaining()); + position += count; + return count; + } + + /** + * {@inheritDoc} + */ + @Override + public long getPosition() { + return position; + } + + /** + * {@inheritDoc} + */ + @Override + public long getLimit() { + return limit; + } + + /** + * {@inheritDoc} + */ + @Override + public void setLimit(long limit) { + if (limit < 0 || limit > capacity) throw new IndexOutOfBoundsException(); + this.limit =(int)limit; + } + + /** + * {@inheritDoc} + */ + @Override + public boolean hasRemaining() { + return (limit - position) > 0; + } + + /** + * {@inheritDoc} + */ + @Override + public long getRemaining() { + return Math.max(0,limit - position); + } + + // ================================================================================================================ + // DataInput Read Methods + + /** + * {@inheritDoc} + */ + @Override + public byte readByte() { + if (position < 0 || position > limit) throw new IndexOutOfBoundsException(); + return UNSAFE.getByte(startOfBytesPointer + position++); + } + + /** + * {@inheritDoc} + */ + @Override + public int readUnsignedByte() { + if (position < 0 || position > limit) throw new IndexOutOfBoundsException(); + return Byte.toUnsignedInt(UNSAFE.getByte(startOfBytesPointer + position++)); + } + + /** + * {@inheritDoc} + */ + @Override + public void readBytes(byte[] dst, int offset, int length) { + if (position < 0 || position > limit - length || (offset+length) >= dst.length ) throw new IndexOutOfBoundsException(); + buffer.get(dst, offset, length); + UNSAFE.copyMemory(null, startOfBytesPointer + position, + dst, BYTE_ARRAY_FIRST_DATA_OFFSET+ offset, length); + position += length; + } + + /** + * {@inheritDoc} + */ + @Override + public void readBytes(ByteBuffer dst) throws IOException { + final int length = dst.remaining(); + if (position < 0 || position > limit - length) throw new IndexOutOfBoundsException(); + if (dst.isDirect()) { + final long dstPointer = UNSAFE.getLong(dst, BYTE_BUFFER_ADDRESS_FIELD_OFFSET); + UNSAFE.copyMemory(startOfBytesPointer + position, dstPointer, length); + } else { + byte[] internalByteArray = (byte[])UNSAFE.getObject(dst, BYTE_BUFFER_HB_FIELD_OFFSET); + UNSAFE.copyMemory(null, startOfBytesPointer + position, + internalByteArray, BYTE_ARRAY_FIRST_DATA_OFFSET, length); + } + position += length; + } + + /** + * {@inheritDoc} + */ + @Override + public void readBytes(ByteBuffer dst, int offset, int length) throws IOException { + if (position < 0 || position > limit - length || (offset+length) > dst.limit()) throw new IndexOutOfBoundsException(); + if (dst.isDirect()) { + final long dstPointer = UNSAFE.getLong(dst, BYTE_BUFFER_ADDRESS_FIELD_OFFSET) + offset; + UNSAFE.copyMemory(startOfBytesPointer + position, dstPointer, length); + } else { + byte[] internalByteArray = (byte[])UNSAFE.getObject(dst, BYTE_BUFFER_HB_FIELD_OFFSET); + UNSAFE.copyMemory(null, startOfBytesPointer + position, + internalByteArray, BYTE_ARRAY_FIRST_DATA_OFFSET + offset, length); + } + position += length; + dst.position(offset+length); + } + + /** + * {@inheritDoc} + */ + @Override + public void readBytes(byte[] dst) { + final int remaining = limit - position; + if (position < 0 || remaining <= 0) throw new IndexOutOfBoundsException(); + final int length = Math.min(remaining, dst.length); + UNSAFE.copyMemory(null, startOfBytesPointer + position, + dst, BYTE_ARRAY_FIRST_DATA_OFFSET, length); + position += dst.length; + } + + /** + * Create a new Bytes over a subsection of this buffer. Data is shared and not copied, so any changes to + * the contents of this buffer will be reflected in the Bytes. This position is incremented by + * {@code length}. + * + * @param length The length in bytes of this buffer starting at current position to be in sub buffer + * @return new read only data buffer representing a subsection of this buffers data + * @throws BufferUnderflowException If length is more than remaining bytes + */ + @Override + public Bytes readBytes(int length) { + if (position < 0 || position > limit - length) throw new IndexOutOfBoundsException(); + // move on position + final Bytes bytes = new ByteOverByteBuffer(buffer ,position , length); + position += length; + return bytes; + } + + /** + * {@inheritDoc} + */ + @Override + public int readInt() { + return readInt(BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public int readInt(ByteOrder byteOrder) { + if (position < 0 || position > limit - Integer.BYTES) throw new IndexOutOfBoundsException(); + final int value; + if (byteOrder == NATIVE_BYTE_ORDER) { + value = UNSAFE.getInt(startOfBytesPointer + position); + } else { + value = Integer.reverseBytes(UNSAFE.getInt(startOfBytesPointer + position)); + } + position += Integer.BYTES; + return value; + } + + /** + * {@inheritDoc} + */ + @Override + public long readUnsignedInt() { + return readUnsignedInt(BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public long readUnsignedInt(ByteOrder byteOrder) { + if (position < 0 || position > limit - Integer.BYTES) throw new IndexOutOfBoundsException(); + final int value; + if (byteOrder == NATIVE_BYTE_ORDER) { + value = UNSAFE.getInt(startOfBytesPointer + position); + } else { + value = Integer.reverseBytes(UNSAFE.getInt(startOfBytesPointer + position)); + } + position += Integer.BYTES; + return Integer.toUnsignedLong(value); + } + + /** + * {@inheritDoc} + */ + @Override + public long readLong() { + return readLong(BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public long readLong(ByteOrder byteOrder) { + if (position < 0 || position > limit - Long.BYTES) throw new IndexOutOfBoundsException(); + final long value; + if (byteOrder == NATIVE_BYTE_ORDER) { + value = UNSAFE.getLong(startOfBytesPointer + position); + } else { + value = Long.reverseBytes(UNSAFE.getLong(startOfBytesPointer + position)); + } + position += Long.BYTES; + return value; + } + + /** + * {@inheritDoc} + */ + @Override + public float readFloat() { + return readFloat(BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public float readFloat(ByteOrder byteOrder) { + if (position < 0 || position > limit - Float.BYTES) throw new IndexOutOfBoundsException(); + final float value; + if (byteOrder == NATIVE_BYTE_ORDER) { + value = UNSAFE.getFloat(startOfBytesPointer + position); + } else { + value = Float.intBitsToFloat(Integer.reverseBytes(UNSAFE.getInt(startOfBytesPointer + position))); + } + position += Float.BYTES; + return value; + } + + /** + * {@inheritDoc} + */ + @Override + public double readDouble() { + return readDouble(BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public double readDouble(ByteOrder byteOrder) { + if (position < 0 || position > limit - Double.BYTES) throw new IndexOutOfBoundsException(); + final double value; + if (byteOrder == NATIVE_BYTE_ORDER) { + value = UNSAFE.getDouble(startOfBytesPointer + position); + } else { + value = Double.longBitsToDouble(Long.reverseBytes(UNSAFE.getLong(startOfBytesPointer + position))); + } + position += Double.BYTES; + return value; + } + + /** + * {@inheritDoc} + */ + @Override + public int readVarInt(boolean zigZag) throws IOException { + if (!hasRemaining()) throw new IOException("Tried to rad var int from 0 bytes remaining"); + int tempPos = position; + int x; + if ((x = UNSAFE.getByte(startOfBytesPointer + tempPos++)) >= 0) { + position ++; + return zigZag ? (x >>> 1) ^ -(x & 1) : x; + } else if ((limit - position) < 10) { + return (int)readVarLongSlow(zigZag); + } else if ((x ^= (UNSAFE.getByte(startOfBytesPointer + tempPos++) << 7)) < 0) { + x ^= (~0 << 7); + } else if ((x ^= (UNSAFE.getByte(startOfBytesPointer + tempPos++) << 14)) >= 0) { + x ^= (~0 << 7) ^ (~0 << 14); + } else if ((x ^= (UNSAFE.getByte(startOfBytesPointer + tempPos++) << 21)) < 0) { + x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21); + } else { + int y = UNSAFE.getByte(startOfBytesPointer + tempPos++); + x ^= y << 28; + x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21) ^ (~0 << 28); + if (y < 0 + && UNSAFE.getByte(startOfBytesPointer + tempPos++) < 0 + && UNSAFE.getByte(startOfBytesPointer + tempPos++) < 0 + && UNSAFE.getByte(startOfBytesPointer + tempPos++) < 0 + && UNSAFE.getByte(startOfBytesPointer + tempPos++) < 0 + && UNSAFE.getByte(startOfBytesPointer + tempPos++) < 0) { + throw new IOException("Malformed Varint"); + } + } + position = tempPos; + return zigZag ? (x >>> 1) ^ -(x & 1) : x; + } + + /** + * {@inheritDoc} + */ + @Override + public long readVarLong(boolean zigZag) throws IOException { + if (!hasRemaining()) throw new IOException("Tried to rad var int from 0 bytes remaining"); + int tempPos = position; + long x; + int y; + if ((y = UNSAFE.getByte(startOfBytesPointer + tempPos++)) >= 0) { + position ++; + return zigZag ? (y >>> 1) ^ -(y & 1) : y; + } else if (buffer.remaining() < 10) { + return readVarLongSlow(zigZag); + } else if ((y ^= (UNSAFE.getByte(startOfBytesPointer + tempPos++) << 7)) < 0) { + x = y ^ (~0 << 7); + } else if ((y ^= (UNSAFE.getByte(startOfBytesPointer + tempPos++) << 14)) >= 0) { + x = y ^ ((~0 << 7) ^ (~0 << 14)); + } else if ((y ^= (UNSAFE.getByte(startOfBytesPointer + tempPos++) << 21)) < 0) { + x = y ^ ((~0 << 7) ^ (~0 << 14) ^ (~0 << 21)); + } else if ((x = y ^ ((long) UNSAFE.getByte(startOfBytesPointer + tempPos++) << 28)) >= 0L) { + x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28); + } else if ((x ^= ((long) UNSAFE.getByte(startOfBytesPointer + tempPos++) << 35)) < 0L) { + x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35); + } else if ((x ^= ((long) UNSAFE.getByte(startOfBytesPointer + tempPos++) << 42)) >= 0L) { + x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42); + } else if ((x ^= ((long) UNSAFE.getByte(startOfBytesPointer + tempPos++) << 49)) < 0L) { + x ^= + (~0L << 7) + ^ (~0L << 14) + ^ (~0L << 21) + ^ (~0L << 28) + ^ (~0L << 35) + ^ (~0L << 42) + ^ (~0L << 49); + } else { + x ^= ((long) UNSAFE.getByte(startOfBytesPointer + tempPos++) << 56); + x ^= + (~0L << 7) + ^ (~0L << 14) + ^ (~0L << 21) + ^ (~0L << 28) + ^ (~0L << 35) + ^ (~0L << 42) + ^ (~0L << 49) + ^ (~0L << 56); + if (x < 0L) { + if (UNSAFE.getByte(startOfBytesPointer + tempPos++) < 0L) { + throw new IOException("Malformed Varint"); + } + } + } + position = tempPos; + return zigZag ? (x >>> 1) ^ -(x & 1) : x; + } + + /** + * Read a 64bit protobuf varint at current position. A long var int can be 1 to 10 bytes. + * + * @return long read in var int format + * @param zigZag use protobuf zigZag varint encoding, optimized for negative numbers + * @throws IOException if an I/O error occurs + */ + private long readVarLongSlow(boolean zigZag) throws IOException { + long result = 0; + for (int shift = 0; shift < 64; shift += 7) { + final byte b = readByte(); + result |= (long) (b & 0x7F) << shift; + if ((b & 0x80) == 0) { + return zigZag ? ((result >>> 1) ^ -(result & 1)) : result; + } + } + throw new IOException("Malformed Varint"); + } + + /** + * {@inheritDoc} + */ + @Override + public String readUtf8String(int lengthInBytes) throws IOException { +// if (buffer.remaining() < lengthInBytes) throw new IOException("Not enough bytes to remaining [" + +// buffer.remaining() + "] to read string of [" + lengthInBytes + "] bytes"); +// int oldLimit = buffer.limit(); +// buffer.limit(buffer.position()+lengthInBytes); +// final String readStr = StandardCharsets.UTF_8.decode(buffer).toString(); +// buffer.limit(oldLimit); +// return readStr; + final String readStr = new String(buffer.array(), buffer.position(), lengthInBytes, StandardCharsets.UTF_8); + buffer.position(buffer.position() + lengthInBytes); + return readStr; + +// java.nio.charset.CharsetDecoder + } + + // ================================================================================================================ + // DataOutput Write Methods + + /** + * {@inheritDoc} + */ + @Override + public void writeByte(byte b) { + if (position < 0 || position > limit) throw new IndexOutOfBoundsException(); + UNSAFE.putByte(startOfBytesPointer + position++, b); + } + + /** + * {@inheritDoc} + */ + public void writeUnsignedByte(int b) { + if (position < 0 || position > limit) throw new IndexOutOfBoundsException(); + UNSAFE.putByte(startOfBytesPointer + position++, (byte)b); + } + + /** + * {@inheritDoc} + */ + @Override + public void writeBytes(byte[] src, int offset, int length) { + if (position < 0 || position > limit - length || (offset+length) < src.length ) throw new IndexOutOfBoundsException(); + UNSAFE.copyMemory(src, BYTE_ARRAY_FIRST_DATA_OFFSET + offset,null, startOfBytesPointer + position, length); + position += length; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeBytes(byte[] src) { + if (position < 0 || position > limit - src.length) throw new IndexOutOfBoundsException(); + UNSAFE.copyMemory(src, BYTE_ARRAY_FIRST_DATA_OFFSET,null, startOfBytesPointer + position, src.length); + position += src.length; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeBytes(DataBuffer src) throws IOException { + final int length = (int)src.getRemaining(); + if (position < 0 || position > limit - length) throw new IndexOutOfBoundsException(); + final ByteBuffer srcBuffer = src.buffer; + if (srcBuffer.isDirect()) { + final long srcPointer = UNSAFE.getLong(srcBuffer, BYTE_BUFFER_ADDRESS_FIELD_OFFSET) + src.getPosition(); + UNSAFE.copyMemory(srcPointer, startOfBytesPointer + position, length); + } else { + byte[] internalByteArray = (byte[])UNSAFE.getObject(srcBuffer, BYTE_BUFFER_HB_FIELD_OFFSET); + UNSAFE.copyMemory(internalByteArray, BYTE_ARRAY_FIRST_DATA_OFFSET + src.getPosition(), + null, startOfBytesPointer + position, length); + } + position += length; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeBytes(ByteBuffer src) throws IOException { + + final int length = src.remaining(); + if (position < 0 || position > limit - length) throw new IndexOutOfBoundsException(); + if (src.isDirect()) { + final long srcPointer = UNSAFE.getLong(src, BYTE_BUFFER_ADDRESS_FIELD_OFFSET) + src.position(); + UNSAFE.copyMemory(srcPointer, startOfBytesPointer + position, length); + } else { + byte[] internalByteArray = (byte[])UNSAFE.getObject(src, BYTE_BUFFER_HB_FIELD_OFFSET); + UNSAFE.copyMemory(internalByteArray, BYTE_ARRAY_FIRST_DATA_OFFSET + src.position(), + null, startOfBytesPointer + position, length); + } + position += length; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeBytes(Bytes src) throws IOException { + final int length = src.getLength(); + if (position < 0 || position > limit - length) throw new IndexOutOfBoundsException(); + if (src instanceof final ByteOverByteBuffer byteOverByteBuffer) { + final ByteBuffer buffer = byteOverByteBuffer.getBuffer(); + if (buffer.isDirect()) { + final long srcPointer = UNSAFE.getLong(buffer, BYTE_BUFFER_ADDRESS_FIELD_OFFSET) + byteOverByteBuffer.getStart(); + UNSAFE.copyMemory(srcPointer, startOfBytesPointer + position, length); + } else { + byte[] internalByteArray = (byte[])UNSAFE.getObject(buffer, BYTE_BUFFER_HB_FIELD_OFFSET); + UNSAFE.copyMemory(internalByteArray, BYTE_ARRAY_FIRST_DATA_OFFSET + byteOverByteBuffer.getStart(), + null, startOfBytesPointer + position, length); + } + position += length; + } else { + super.writeBytes(src); + } + } + + /** + * {@inheritDoc} + */ + @Override + public void writeInt(int value) { + writeInt(value, BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public void writeInt(int value, ByteOrder byteOrder) { + if (position < 0 || position > limit - Integer.BYTES) throw new IndexOutOfBoundsException(); + if (byteOrder == NATIVE_BYTE_ORDER) { + UNSAFE.putInt(startOfBytesPointer + position, value); + } else { + UNSAFE.putInt(startOfBytesPointer + position, Integer.reverseBytes(value)); + } + position += Integer.BYTES; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeUnsignedInt(long value) { + writeUnsignedInt(value, BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public void writeUnsignedInt(long value, ByteOrder byteOrder) { + if (position < 0 || position > limit - Integer.BYTES) throw new IndexOutOfBoundsException(); + if (byteOrder == NATIVE_BYTE_ORDER) { + UNSAFE.putInt(startOfBytesPointer + position, (int)value); + } else { + UNSAFE.putInt(startOfBytesPointer + position, Integer.reverseBytes((int)value)); + } + position += Integer.BYTES; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeLong(long value) { + writeLong(value, BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public void writeLong(long value, ByteOrder byteOrder) { + if (position < 0 || position > limit - Long.BYTES) throw new IndexOutOfBoundsException(); + if (byteOrder == NATIVE_BYTE_ORDER) { + UNSAFE.putLong(startOfBytesPointer + position, value); + } else { + UNSAFE.putLong(startOfBytesPointer + position, Long.reverseBytes(value)); + } + position += Long.BYTES; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeFloat(float value) { + writeFloat(value, BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public void writeFloat(float value, ByteOrder byteOrder) { + if (position < 0 || position > limit - Float.BYTES) throw new IndexOutOfBoundsException(); + if (byteOrder == NATIVE_BYTE_ORDER) { + UNSAFE.putFloat(startOfBytesPointer + position, value); + } else { + UNSAFE.putInt(startOfBytesPointer + position, Integer.reverseBytes(Float.floatToIntBits(value))); + } + position += Float.BYTES; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeDouble(double value) { + writeDouble(value, BIG_ENDIAN); + } + + /** + * {@inheritDoc} + */ + @Override + public void writeDouble(double value, ByteOrder byteOrder) { + if (position < 0 || position > limit - Double.BYTES) throw new IndexOutOfBoundsException(); + if (byteOrder == NATIVE_BYTE_ORDER) { + UNSAFE.putDouble(startOfBytesPointer + position, value); + } else { + UNSAFE.putLong(startOfBytesPointer + position, Long.reverseBytes(Double.doubleToLongBits(value))); + } + position += Double.BYTES; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeVarInt(int value, boolean zigZag) { + long longValue = value; + if (zigZag) { + longValue = (longValue << 1) ^ (longValue >> 63); + } + if (position < 0) { + throw new IndexOutOfBoundsException(); + } else if (position < limit - MAX_VARINT_SIZE) { // check if enough room for any size varint, fast path + while (true) { + if ((longValue & ~0x7F) == 0) { + UNSAFE.putByte(startOfBytesPointer + position++, (byte) longValue); + break; + } else { + UNSAFE.putByte(startOfBytesPointer + position++, (byte) ((longValue & 0x7F) | 0x80)); + longValue >>>= 7; + } + } + } else { + final int limitMinusOne = limit - 1; + while (position < limitMinusOne) { + if ((longValue & ~0x7F) == 0) { + UNSAFE.putByte(startOfBytesPointer + position++, (byte) longValue); + return; + } else { + UNSAFE.putByte(startOfBytesPointer + position++, (byte) ((longValue & 0x7F) | 0x80)); + longValue >>>= 7; + } + } + if (position > limitMinusOne) throw new IndexOutOfBoundsException(); + } + } + + /** + * {@inheritDoc} + */ + @Override + public void writeVarLong(long value, final boolean zigZag) { + if (zigZag) { + value = (value << 1) ^ (value >> 63); + } + if (position < 0) { + throw new IndexOutOfBoundsException(); + } else if (position < limit - MAX_VARINT_SIZE) { // check if enough room for any size varint, fast path + while (true) { + if ((value & ~0x7FL) == 0) { + UNSAFE.putByte(startOfBytesPointer + position++, (byte) value); + break; + } else { + UNSAFE.putByte(startOfBytesPointer + position++, (byte) (((int) value & 0x7F) | 0x80)); + value >>>= 7; + } + if (position > limit - 1) throw new IndexOutOfBoundsException(); + } + } else { + final int limitMinusOne = limit - 1; + while (position < limitMinusOne) { + if ((value & ~0x7FL) == 0) { + UNSAFE.putByte(startOfBytesPointer + position++, (byte) value); + return; + } else { + UNSAFE.putByte(startOfBytesPointer + position++, (byte) (((int) value & 0x7F) | 0x80)); + value >>>= 7; + } + } + if (position > limitMinusOne) throw new IndexOutOfBoundsException(); + } } } diff --git a/pbj-core/pbj-runtime/src/main/java/module-info.java b/pbj-core/pbj-runtime/src/main/java/module-info.java index b458607e..b5f1af12 100644 --- a/pbj-core/pbj-runtime/src/main/java/module-info.java +++ b/pbj-core/pbj-runtime/src/main/java/module-info.java @@ -2,7 +2,10 @@ * Runtime module of code needed by PBJ generated code at runtime. */ module com.hedera.hashgraph.pbj.runtime { - exports com.hedera.hashgraph.pbj.runtime; + requires jdk.unsupported; + requires org.eclipse.collections.api; + requires org.eclipse.collections.impl; + exports com.hedera.hashgraph.pbj.runtime; exports com.hedera.hashgraph.pbj.runtime.test; exports com.hedera.hashgraph.pbj.runtime.io; } \ No newline at end of file diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/hashgraph/pbj/runtime/io/DataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/hashgraph/pbj/runtime/io/DataTest.java index 719df7cf..c7277a44 100644 --- a/pbj-core/pbj-runtime/src/test/java/com/hedera/hashgraph/pbj/runtime/io/DataTest.java +++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/hashgraph/pbj/runtime/io/DataTest.java @@ -9,6 +9,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteOrder; +import java.util.Arrays; import java.util.stream.Stream; import static org.junit.jupiter.api.Assertions.assertArrayEquals; @@ -312,12 +313,13 @@ static void doTest(T value, T readValue2 = javaDataInputReadMethod.read(din2); assertEquals(value, readValue2); // write with DataBuffer - DataBuffer db = new DataBuffer(writtenData.length); + DataBuffer db = DataBuffer.allocate(writtenData.length, true); dataBufferWriteMethod.write(db, value); db.reset(); // check bytes in buffer byte[] writtenData3 = new byte[writtenData.length]; db.readBytes(writtenData3); + assertEquals(Arrays.toString(writtenData), Arrays.toString(writtenData3),"value = "+value); assertArrayEquals(writtenData, writtenData3); // read with DataBuffer db.reset(); diff --git a/pbj-integration-tests/build.gradle.kts b/pbj-integration-tests/build.gradle.kts index b75f3523..83f16201 100644 --- a/pbj-integration-tests/build.gradle.kts +++ b/pbj-integration-tests/build.gradle.kts @@ -72,7 +72,10 @@ tasks.withType { } jmh { -// includes.add("AccountDetailsBench") +// includes.add("AccountDetailsBench.parsePbjByteBufferDirect") +// includes.add("AccountDetailsBench.writePbjByteDirect") +// includes.add("EverythingBench") + includes.add("EverythingBench.parsePbjByteBufferDirect") jmhVersion.set("1.35") includeTests.set(true) // jvmArgsAppend.add("-XX:MaxInlineSize=100 -XX:MaxInlineLevel=20") diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/hashgraph/pbj/intergration/jmh/EverythingBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/hashgraph/pbj/intergration/jmh/EverythingBench.java index 426d5c27..c371bd7e 100644 --- a/pbj-integration-tests/src/jmh/java/com/hedera/hashgraph/pbj/intergration/jmh/EverythingBench.java +++ b/pbj-integration-tests/src/jmh/java/com/hedera/hashgraph/pbj/intergration/jmh/EverythingBench.java @@ -17,6 +17,7 @@ import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.infra.Blackhole; +import java.io.BufferedInputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; @@ -31,7 +32,7 @@ @State(Scope.Benchmark) @Fork(1) @Warmup(iterations = 1, time = 2) -@Measurement(iterations = 5, time = 3) +@Measurement(iterations = 30, time = 3) @OutputTimeUnit(TimeUnit.NANOSECONDS) @BenchmarkMode(Mode.AverageTime) public class EverythingBench { @@ -46,6 +47,7 @@ public class EverythingBench { private final ByteBuffer protobufByteBufferDirect; private final DataBuffer protobufDataBufferDirect; private final NonSynchronizedByteArrayInputStream bin; + private final DataInputStream din; // output buffers private final NonSynchronizedByteArrayOutputStream bout; @@ -72,10 +74,9 @@ public EverythingBench() { protobufDataBuffer = DataBuffer.wrap(protobuf); protobufByteBufferDirect = ByteBuffer.allocateDirect(protobuf.length); protobufByteBufferDirect.put(protobuf); - System.out.println("protobufByteBufferDirect = " + protobufByteBufferDirect); protobufDataBufferDirect = DataBuffer.wrap(protobufByteBufferDirect); bin = new NonSynchronizedByteArrayInputStream(protobuf); - DataInputStream din = new DataInputStream(bin); + din = new DataInputStream(bin); // output buffers bout = new NonSynchronizedByteArrayOutputStream(); DataOutputStream dout = new DataOutputStream(bout); @@ -109,8 +110,8 @@ public void parsePbjByteBufferDirect(Blackhole blackhole) throws IOException { public void parsePbjInputStream(Blackhole blackhole) throws IOException { for (int i = 0; i < 1000; i++) { bin.resetPosition(); -// blackhole.consume(EverythingProtoParser.parse(din)); - blackhole.consume(EverythingProtoParser.parse(new DataInputStream(bin))); + din.setInputStream(bin); + blackhole.consume(EverythingProtoParser.parse(din)); } } diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/hashgraph/pbj/intergration/jmh/TimestampBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/hashgraph/pbj/intergration/jmh/TimestampBench.java index fa54576c..36fc5a8f 100644 --- a/pbj-integration-tests/src/jmh/java/com/hedera/hashgraph/pbj/intergration/jmh/TimestampBench.java +++ b/pbj-integration-tests/src/jmh/java/com/hedera/hashgraph/pbj/intergration/jmh/TimestampBench.java @@ -1,11 +1,15 @@ package com.hedera.hashgraph.pbj.intergration.jmh; +import com.google.protobuf.CodedOutputStream; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Timestamp; import com.hedera.hapi.node.base.parser.TimestampProtoParser; import com.hedera.hapi.node.base.writer.TimestampWriter; +import com.hedera.hashgraph.pbj.integration.NonSynchronizedByteArrayInputStream; import com.hedera.hashgraph.pbj.integration.NonSynchronizedByteArrayOutputStream; import com.hedera.hashgraph.pbj.runtime.io.DataBuffer; +import com.hedera.hashgraph.pbj.runtime.io.DataInputStream; +import com.hedera.hashgraph.pbj.runtime.io.DataOutputStream; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.infra.Blackhole; @@ -21,49 +25,135 @@ @OutputTimeUnit(TimeUnit.NANOSECONDS) @BenchmarkMode(Mode.AverageTime) public class TimestampBench { - static { - System.out.println("TimestampBench.static initializer 2"); - } - private final byte[] protobuf = Timestamp.newBuilder().setNanos(1234).setSeconds(5678L).build().toByteArray(); + // input objects + private final Timestamp timestamp = Timestamp.newBuilder().setNanos(1234).setSeconds(5678L).build(); + private final com.hedera.hapi.node.base.Timestamp timestampPbj = new com.hedera.hapi.node.base.Timestamp(5678L, 1234); + // input bytes + private final byte[] protobuf = timestamp.toByteArray(); private final ByteBuffer protobufByteBuffer = ByteBuffer.wrap(protobuf); private final DataBuffer protobufDataBuffer = DataBuffer.wrap(protobuf); private final ByteBuffer protobufByteBufferDirect = ByteBuffer .allocateDirect(protobuf.length) .put(protobuf); - private final TimestampProtoParser parser = new TimestampProtoParser(); + private final DataBuffer protobufDataBufferDirect = DataBuffer.wrap(protobufByteBufferDirect); + + private NonSynchronizedByteArrayInputStream bin = new NonSynchronizedByteArrayInputStream(protobuf); + private DataInputStream din = new DataInputStream(bin); + + // output buffers private final NonSynchronizedByteArrayOutputStream bout = new NonSynchronizedByteArrayOutputStream(); + private final DataOutputStream dout = new DataOutputStream(bout); private final DataBuffer outDataBuffer = DataBuffer.allocate(protobuf.length, false); + private final DataBuffer outDataBufferDirect = DataBuffer.allocate(protobuf.length, true); + private final ByteBuffer bbout = ByteBuffer.allocate(protobuf.length); + private final ByteBuffer bboutDirect = ByteBuffer.allocateDirect(protobuf.length); @Benchmark public void parsePbjByteBuffer(Blackhole blackhole) throws IOException { - protobufDataBuffer.resetPosition(); - blackhole.consume(parser.parse(protobufDataBuffer)); + for (int i = 0; i < 1000; i++) { + protobufDataBuffer.resetPosition(); + blackhole.consume(TimestampProtoParser.parse(protobufDataBuffer)); + } } -// @Benchmark -// public void parsePbjByteBufferDirect(Blackhole blackhole) throws MalformedProtobufException { -// blackhole.consume(parser.parse(protobufByteBufferDirect.clear())); -// } @Benchmark - public void parseProtoCByteBuffer(Blackhole blackhole) throws InvalidProtocolBufferException { - blackhole.consume(Timestamp.parseFrom(protobufByteBuffer)); + public void parsePbjByteBufferDirect(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + protobufDataBufferDirect.resetPosition(); + blackhole.consume(TimestampProtoParser.parse(protobufDataBufferDirect)); + } + } + @Benchmark + public void parsePbjInputStream(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + bin.resetPosition(); +// blackhole.consume(TimestampProtoParser.parse(din)); + blackhole.consume(TimestampProtoParser.parse(new DataInputStream(bin))); + } + } + + @Benchmark + public void parseProtoCByteArray(Blackhole blackhole) throws InvalidProtocolBufferException { + for (int i = 0; i < 1000; i++) { + blackhole.consume(Timestamp.parseFrom(protobuf)); + } } @Benchmark public void parseProtoCByteBufferDirect(Blackhole blackhole) throws InvalidProtocolBufferException { - blackhole.consume(Timestamp.parseFrom(protobufByteBufferDirect)); + for (int i = 0; i < 1000; i++) { + protobufByteBufferDirect.position(0); + blackhole.consume(Timestamp.parseFrom(protobufByteBufferDirect)); + } + } + @Benchmark + public void parseProtoCByteBuffer(Blackhole blackhole) throws InvalidProtocolBufferException { + for (int i = 0; i < 1000; i++) { + blackhole.consume(Timestamp.parseFrom(protobufByteBuffer)); + } + } + @Benchmark + public void parseProtoCInputStream(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + bin.resetPosition(); + blackhole.consume(Timestamp.parseFrom(bin)); + } + } + + @Benchmark + public void writePbjByteBuffer(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + outDataBuffer.reset(); + TimestampWriter.write(timestampPbj, outDataBuffer); + blackhole.consume(outDataBuffer); + } + } + @Benchmark + public void writePbjByteDirect(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + outDataBufferDirect.reset(); + TimestampWriter.write(timestampPbj, outDataBufferDirect); + blackhole.consume(outDataBufferDirect); + } + } + @Benchmark + public void writePbjOutputStream(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + bout.reset(); + TimestampWriter.write(timestampPbj, dout); + blackhole.consume(bout.toByteArray()); + } } @Benchmark - public void writePbj(Blackhole blackhole) throws IOException { - outDataBuffer.reset(); - TimestampWriter.write( - new com.hedera.hapi.node.base.Timestamp(5678L, 1234), outDataBuffer); - blackhole.consume(outDataBuffer); + public void writeProtoCByteArray(Blackhole blackhole) { + for (int i = 0; i < 1000; i++) { + blackhole.consume(timestamp.toByteArray()); + } + } + @Benchmark + public void writeProtoCByteBuffer(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + CodedOutputStream cout = CodedOutputStream.newInstance(bbout); + timestamp.writeTo(cout); + blackhole.consume(bbout); + } } @Benchmark - public void writeProtoC(Blackhole blackhole) { - blackhole.consume(Timestamp.newBuilder().setNanos(1234).setSeconds(5678L).build().toByteArray()); + public void writeProtoCByteBufferDirect(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + CodedOutputStream cout = CodedOutputStream.newInstance(bboutDirect); + timestamp.writeTo(cout); + blackhole.consume(bbout); + } } + @Benchmark + public void writeProtoCOutputStream(Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + bout.reset(); + timestamp.writeTo(bout); + blackhole.consume(bout.toByteArray()); + } + } } diff --git a/pbj-integration-tests/src/main/java/com/hedera/hashgraph/pbj/integration/EverythingWriterPerfTest.java b/pbj-integration-tests/src/main/java/com/hedera/hashgraph/pbj/integration/EverythingWriterPerfTest.java index 9963e3ac..3f239a8e 100644 --- a/pbj-integration-tests/src/main/java/com/hedera/hashgraph/pbj/integration/EverythingWriterPerfTest.java +++ b/pbj-integration-tests/src/main/java/com/hedera/hashgraph/pbj/integration/EverythingWriterPerfTest.java @@ -2,8 +2,11 @@ import com.hedera.hashgraph.pbj.runtime.io.DataBuffer; import com.hederahashgraph.api.proto.java.GetAccountDetailsResponse; +import com.hederahashgraph.api.proto.pbj.test.parser.EverythingProtoParser; import com.hederahashgraph.api.proto.pbj.test.writer.EverythingWriter; +import java.nio.ByteBuffer; + import static com.hedera.hashgraph.pbj.integration.AccountDetailsPbj.ACCOUNT_DETAILS; import static com.hedera.hashgraph.pbj.integration.EverythingTestData.EVERYTHING; @@ -11,8 +14,13 @@ public class EverythingWriterPerfTest { public static void main(String[] args) throws Exception { - final DataBuffer outDataBuffer = DataBuffer.allocate(1024*1024, true); + parse(); +// parseProtoC(); +// write(); + } + private static void write() throws Exception { + final DataBuffer outDataBuffer = DataBuffer.allocate(1024*1024, true); for (int i = 0; i < 10_000_000; i++) { outDataBuffer.reset(); EverythingWriter.write(EVERYTHING, outDataBuffer); @@ -21,7 +29,33 @@ public static void main(String[] args) throws Exception { } } } - public static void main2(String[] args) throws Exception { + + private static void parse() throws Exception { + final DataBuffer inDataBuffer = DataBuffer.allocate(1024*1024, true); + EverythingWriter.write(EVERYTHING, inDataBuffer); + inDataBuffer.flip(); + + for (int i = 0; i < 10_000_000; i++) { + inDataBuffer.resetPosition(); + var e = EverythingProtoParser.parse(inDataBuffer); +// if (!e.booleanField()) { +// System.out.println("outDataBuffer = " + inDataBuffer); +// } + } + } + private static void parseProtoC() throws Exception { + final ByteBuffer inBuffer = ByteBuffer.allocateDirect(1024*1024); + final DataBuffer inDataBuffer = DataBuffer.wrap(inBuffer); + EverythingWriter.write(EVERYTHING, inDataBuffer); + inDataBuffer.flip(); + inBuffer.limit((int)inDataBuffer.getLimit()); + + for (int i = 0; i < 10_000_000; i++) { + inBuffer.position(0); + com.hederahashgraph.api.proto.java.test.Everything.parseFrom(inBuffer); + } + } + public static void write3() throws Exception { // write to temp data buffer and then read into byte array DataBuffer tempDataBuffer = DataBuffer.allocate(5 * 1024 * 1024, false); EverythingWriter.write(EVERYTHING, tempDataBuffer); From eda58d503918aa7fac87f3218c59a90feee598b8 Mon Sep 17 00:00:00 2001 From: jasperpotts Date: Fri, 10 Feb 2023 10:32:12 -0800 Subject: [PATCH 2/5] Merge and cleanup Signed-off-by: jasperpotts --- .../hedera/pbj/integration/EverythingWriterPerfTest.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java index b03ea718..749f168f 100644 --- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java +++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java @@ -7,8 +7,7 @@ import java.nio.ByteBuffer; -import static com.hedera.hashgraph.pbj.integration.AccountDetailsPbj.ACCOUNT_DETAILS; -import static com.hedera.hashgraph.pbj.integration.EverythingTestData.EVERYTHING; +import static com.hedera.pbj.integration.EverythingTestData.*; @SuppressWarnings("DuplicatedCode") public class EverythingWriterPerfTest { @@ -23,7 +22,7 @@ private static void write() throws Exception { final DataBuffer outDataBuffer = DataBuffer.allocate(1024*1024, true); for (int i = 0; i < 10_000_000; i++) { outDataBuffer.reset(); - EverythingWriter.write(EverythingTestData.EVERYTHING, outDataBuffer); + EverythingWriter.write(EVERYTHING, outDataBuffer); if (outDataBuffer.getPosition() <= 0) { System.out.println("outDataBuffer = " + outDataBuffer); } @@ -58,7 +57,7 @@ private static void parseProtoC() throws Exception { public static void write3() throws Exception { // write to temp data buffer and then read into byte array DataBuffer tempDataBuffer = DataBuffer.allocate(5 * 1024 * 1024, false); - EverythingWriter.write(EverythingTestData.EVERYTHING, tempDataBuffer); + EverythingWriter.write(EVERYTHING, tempDataBuffer); tempDataBuffer.flip(); final byte[] protobuf = new byte[(int) tempDataBuffer.getRemaining()]; tempDataBuffer.readBytes(protobuf); From 7a3a612c4b9871140b296ce892433f7f74fed5fa Mon Sep 17 00:00:00 2001 From: jasperpotts Date: Fri, 10 Feb 2023 10:44:45 -0800 Subject: [PATCH 3/5] Finish merge and make everything build again Signed-off-by: jasperpotts --- .../pbj/integration/EverythingWriterPerfTest.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java index 1ed2e151..47d25a58 100644 --- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java +++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java @@ -3,13 +3,14 @@ import com.hedera.pbj.runtime.io.DataBuffer; import com.hedera.pbj.test.proto.pbj.Everything; import com.hederahashgraph.api.proto.java.GetAccountDetailsResponse; -import com.hederahashgraph.api.proto.pbj.test.parser.EverythingProtoParser; -import com.hederahashgraph.api.proto.pbj.test.writer.EverythingWriter; import java.nio.ByteBuffer; import static com.hedera.pbj.integration.EverythingTestData.*; +/** + * Test class for hacking on code and profiling + */ @SuppressWarnings("DuplicatedCode") public class EverythingWriterPerfTest { @@ -32,12 +33,12 @@ private static void write() throws Exception { private static void parse() throws Exception { final DataBuffer inDataBuffer = DataBuffer.allocate(1024*1024, true); - EverythingWriter.write(EVERYTHING, inDataBuffer); + Everything.PROTOBUF.write(EVERYTHING, inDataBuffer); inDataBuffer.flip(); for (int i = 0; i < 10_000_000; i++) { inDataBuffer.resetPosition(); - var e = EverythingProtoParser.parse(inDataBuffer); + var e = Everything.PROTOBUF.parse(inDataBuffer); // if (!e.booleanField()) { // System.out.println("outDataBuffer = " + inDataBuffer); // } @@ -46,13 +47,13 @@ private static void parse() throws Exception { private static void parseProtoC() throws Exception { final ByteBuffer inBuffer = ByteBuffer.allocateDirect(1024*1024); final DataBuffer inDataBuffer = DataBuffer.wrap(inBuffer); - EverythingWriter.write(EVERYTHING, inDataBuffer); + Everything.PROTOBUF.write(EVERYTHING, inDataBuffer); inDataBuffer.flip(); inBuffer.limit((int)inDataBuffer.getLimit()); for (int i = 0; i < 10_000_000; i++) { inBuffer.position(0); - com.hederahashgraph.api.proto.java.test.Everything.parseFrom(inBuffer); + com.hedera.pbj.test.proto.java.Everything.parseFrom(inBuffer); } } public static void write3() throws Exception { From 1fa95da0ba7d9eaa4d02429c38513ea03f1432ed Mon Sep 17 00:00:00 2001 From: jasperpotts Date: Fri, 10 Feb 2023 18:09:14 -0800 Subject: [PATCH 4/5] Cleanup of benchmarks Signed-off-by: jasperpotts --- pbj-integration-tests/build.gradle.kts | 4 +- .../intergration/jmh/AccountDetailsBench.java | 193 ------------ .../pbj/intergration/jmh/EverythingBench.java | 196 ------------ .../intergration/jmh/ProtobufObjectBench.java | 285 ++++++++++++++++++ .../pbj/intergration/jmh/TimestampBench.java | 159 ---------- .../pbj/intergration/jmh/VarIntBench.java | 2 +- 6 files changed, 288 insertions(+), 551 deletions(-) delete mode 100644 pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/AccountDetailsBench.java delete mode 100644 pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/EverythingBench.java create mode 100644 pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ProtobufObjectBench.java delete mode 100644 pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/TimestampBench.java diff --git a/pbj-integration-tests/build.gradle.kts b/pbj-integration-tests/build.gradle.kts index fadfbf16..cf04289a 100644 --- a/pbj-integration-tests/build.gradle.kts +++ b/pbj-integration-tests/build.gradle.kts @@ -73,10 +73,10 @@ tasks.withType { } jmh { -// includes.add("AccountDetailsBench.parsePbjByteBufferDirect") + includes.add("ProtobufObjectBench") // includes.add("AccountDetailsBench.writePbjByteDirect") // includes.add("EverythingBench") - includes.add("EverythingBench.parsePbjByteBufferDirect") +// includes.add("EverythingBench.parsePbjByteBufferDirect") jmhVersion.set("1.35") includeTests.set(true) // jvmArgsAppend.add("-XX:MaxInlineSize=100 -XX:MaxInlineLevel=20") diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/AccountDetailsBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/AccountDetailsBench.java deleted file mode 100644 index 784c09ab..00000000 --- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/AccountDetailsBench.java +++ /dev/null @@ -1,193 +0,0 @@ -package com.hedera.pbj.intergration.jmh; - -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.InvalidProtocolBufferException; -import com.hedera.hapi.node.token.parser.AccountDetailsProtoParser; -import com.hedera.hapi.node.token.writer.AccountDetailsWriter; -import com.hedera.pbj.integration.AccountDetailsPbj; -import com.hedera.pbj.integration.NonSynchronizedByteArrayInputStream; -import com.hedera.pbj.integration.NonSynchronizedByteArrayOutputStream; -import com.hedera.pbj.runtime.io.DataBuffer; -import com.hedera.pbj.runtime.io.DataInputStream; -import com.hedera.pbj.runtime.io.DataOutputStream; -import com.hederahashgraph.api.proto.java.GetAccountDetailsResponse; -import org.openjdk.jmh.annotations.*; -import org.openjdk.jmh.infra.Blackhole; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.concurrent.TimeUnit; - -@SuppressWarnings("unused") -@State(Scope.Benchmark) -@Fork(1) -@Warmup(iterations = 1, time = 2) -@Measurement(iterations = 5, time = 3) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@BenchmarkMode(Mode.AverageTime) -public class AccountDetailsBench { - // input objects - private final com.hedera.hapi.node.token.AccountDetails accountDetailsPbj; - private final GetAccountDetailsResponse.AccountDetails accountDetailsProtoC; - - // input bytes - private final byte[] protobuf; - private final ByteBuffer protobufByteBuffer; - private final DataBuffer protobufDataBuffer; - private final ByteBuffer protobufByteBufferDirect; - private final DataBuffer protobufDataBufferDirect; - private final NonSynchronizedByteArrayInputStream bin; - - // output buffers - private final NonSynchronizedByteArrayOutputStream bout; - private final DataBuffer outDataBuffer; - private final DataBuffer outDataBufferDirect; - private final ByteBuffer bbout; - private final ByteBuffer bboutDirect; - - public AccountDetailsBench() { - try { - accountDetailsPbj = AccountDetailsPbj.ACCOUNT_DETAILS; - // write to temp data buffer and then read into byte array - DataBuffer tempDataBuffer = DataBuffer.allocate(5 * 1024 * 1024, false); - AccountDetailsWriter.write(accountDetailsPbj, tempDataBuffer); - tempDataBuffer.flip(); - protobuf = new byte[(int) tempDataBuffer.getRemaining()]; - System.out.println("protobuf.length = " + protobuf.length); - tempDataBuffer.readBytes(protobuf); - // start by parsing using protoc - accountDetailsProtoC = GetAccountDetailsResponse.AccountDetails.parseFrom(protobuf); - - // input buffers - protobufByteBuffer = ByteBuffer.wrap(protobuf); - protobufDataBuffer = DataBuffer.wrap(protobuf); - protobufByteBufferDirect = ByteBuffer.allocateDirect(protobuf.length); - protobufByteBufferDirect.put(protobuf); - System.out.println("protobufByteBufferDirect = " + protobufByteBufferDirect); - protobufDataBufferDirect = DataBuffer.wrap(protobufByteBufferDirect); - bin = new NonSynchronizedByteArrayInputStream(protobuf); - DataInputStream din = new DataInputStream(bin); - // output buffers - bout = new NonSynchronizedByteArrayOutputStream(); - DataOutputStream dout = new DataOutputStream(bout); - outDataBuffer = DataBuffer.allocate(protobuf.length, false); - outDataBufferDirect = DataBuffer.allocate(protobuf.length, true); - bbout = ByteBuffer.allocate(protobuf.length); - bboutDirect = ByteBuffer.allocateDirect(protobuf.length); - } catch (IOException e) { - e.getStackTrace(); - System.err.flush(); - throw new RuntimeException(e); - } - } - - @Benchmark - public void parsePbjByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - protobufDataBuffer.resetPosition(); - blackhole.consume(AccountDetailsProtoParser.parse(protobufDataBuffer)); - } - } - - @Benchmark - public void parsePbjByteBufferDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - protobufDataBufferDirect.resetPosition(); - blackhole.consume(AccountDetailsProtoParser.parse(protobufDataBufferDirect)); - } - } - @Benchmark - public void parsePbjInputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bin.resetPosition(); -// blackhole.consume(AccountDetailsProtoParser.parse(din)); - blackhole.consume(AccountDetailsProtoParser.parse(new DataInputStream(bin))); - } - } - - @Benchmark - public void parseProtoCByteArray(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - blackhole.consume(GetAccountDetailsResponse.AccountDetails.parseFrom(protobuf)); - } - } - @Benchmark - public void parseProtoCByteBufferDirect(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - protobufByteBufferDirect.position(0); - blackhole.consume(GetAccountDetailsResponse.AccountDetails.parseFrom(protobufByteBufferDirect)); - } - } - @Benchmark - public void parseProtoCByteBuffer(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - blackhole.consume(GetAccountDetailsResponse.AccountDetails.parseFrom(protobufByteBuffer)); - } - } - @Benchmark - public void parseProtoCInputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bin.resetPosition(); - blackhole.consume(GetAccountDetailsResponse.AccountDetails.parseFrom(bin)); - } - } - - @Benchmark - public void writePbjByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - outDataBuffer.reset(); - AccountDetailsWriter.write(accountDetailsPbj, outDataBuffer); - blackhole.consume(outDataBuffer); - } - } - @Benchmark - public void writePbjByteDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - outDataBufferDirect.reset(); - AccountDetailsWriter.write(accountDetailsPbj, outDataBufferDirect); - blackhole.consume(outDataBufferDirect); - } - } - @Benchmark - public void writePbjOutputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bout.reset(); -// AccountDetailsWriter.write(accountDetailsPbj, dout); - AccountDetailsWriter.write(accountDetailsPbj, new DataOutputStream(bout)); - blackhole.consume(bout.toByteArray()); - } - } - - @Benchmark - public void writeProtoCByteArray(Blackhole blackhole) { - for (int i = 0; i < 1000; i++) { - blackhole.consume(accountDetailsProtoC.toByteArray()); - } - } - @Benchmark - public void writeProtoCByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - CodedOutputStream cout = CodedOutputStream.newInstance(bbout); - accountDetailsProtoC.writeTo(cout); - blackhole.consume(bbout); - } - } - - @Benchmark - public void writeProtoCByteBufferDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - CodedOutputStream cout = CodedOutputStream.newInstance(bboutDirect); - accountDetailsProtoC.writeTo(cout); - blackhole.consume(bbout); - } - } - - @Benchmark - public void writeProtoCOutputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bout.reset(); - accountDetailsProtoC.writeTo(bout); - blackhole.consume(bout.toByteArray()); - } - } -} diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/EverythingBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/EverythingBench.java deleted file mode 100644 index 36902242..00000000 --- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/EverythingBench.java +++ /dev/null @@ -1,196 +0,0 @@ -package com.hedera.pbj.intergration.jmh; - -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.InvalidProtocolBufferException; -import com.hedera.pbj.integration.NonSynchronizedByteArrayInputStream; -import com.hedera.pbj.integration.NonSynchronizedByteArrayOutputStream; -import com.hedera.pbj.runtime.io.DataBuffer; -import com.hedera.pbj.runtime.io.DataInputStream; -import com.hedera.pbj.runtime.io.DataOutputStream; -import com.hederahashgraph.api.proto.pbj.test.Everything; -import com.hederahashgraph.api.proto.pbj.test.parser.EverythingProtoParser; -import com.hederahashgraph.api.proto.pbj.test.writer.EverythingWriter; -import org.openjdk.jmh.annotations.*; -import org.openjdk.jmh.infra.Blackhole; - -import java.io.BufferedInputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.concurrent.TimeUnit; - -import static com.hedera.pbj.integration.EverythingTestData.EVERYTHING; - -@SuppressWarnings("unused") -@State(Scope.Benchmark) -@Fork(1) -@Warmup(iterations = 1, time = 2) -@Measurement(iterations = 30, time = 3) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@BenchmarkMode(Mode.AverageTime) -public class EverythingBench { - // input objects - private final Everything everythingPbj; - private final com.hederahashgraph.api.proto.java.test.Everything everythingProtoC; - - // input bytes - private final byte[] protobuf; - private final ByteBuffer protobufByteBuffer; - private final DataBuffer protobufDataBuffer; - private final ByteBuffer protobufByteBufferDirect; - private final DataBuffer protobufDataBufferDirect; - private final NonSynchronizedByteArrayInputStream bin; - private final DataInputStream din; - - // output buffers - private final NonSynchronizedByteArrayOutputStream bout; - private final DataBuffer outDataBuffer; - private final DataBuffer outDataBufferDirect; - private final ByteBuffer bbout; - private final ByteBuffer bboutDirect; - - public EverythingBench() { - try { - everythingPbj = EVERYTHING; - // write to temp data buffer and then read into byte array - DataBuffer tempDataBuffer = DataBuffer.allocate(5 * 1024 * 1024, false); - EverythingWriter.write(everythingPbj, tempDataBuffer); - tempDataBuffer.flip(); - protobuf = new byte[(int) tempDataBuffer.getRemaining()]; - System.out.println("protobuf.length = " + protobuf.length); - tempDataBuffer.readBytes(protobuf); - // start by parsing using protoc - everythingProtoC = com.hederahashgraph.api.proto.java.test.Everything.parseFrom(protobuf); - - // input buffers - protobufByteBuffer = ByteBuffer.wrap(protobuf); - protobufDataBuffer = DataBuffer.wrap(protobuf); - protobufByteBufferDirect = ByteBuffer.allocateDirect(protobuf.length); - protobufByteBufferDirect.put(protobuf); - protobufDataBufferDirect = DataBuffer.wrap(protobufByteBufferDirect); - bin = new NonSynchronizedByteArrayInputStream(protobuf); - din = new DataInputStream(bin); - // output buffers - bout = new NonSynchronizedByteArrayOutputStream(); - DataOutputStream dout = new DataOutputStream(bout); - outDataBuffer = DataBuffer.allocate(protobuf.length, false); - outDataBufferDirect = DataBuffer.allocate(protobuf.length, true); - bbout = ByteBuffer.allocate(protobuf.length); - bboutDirect = ByteBuffer.allocateDirect(protobuf.length); - } catch (IOException e) { - e.getStackTrace(); - System.err.flush(); - throw new RuntimeException(e); - } - } - - @Benchmark - public void parsePbjByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - protobufDataBuffer.resetPosition(); - blackhole.consume(EverythingProtoParser.parse(protobufDataBuffer)); - } - } - - @Benchmark - public void parsePbjByteBufferDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - protobufDataBufferDirect.resetPosition(); - blackhole.consume(EverythingProtoParser.parse(protobufDataBufferDirect)); - } - } - @Benchmark - public void parsePbjInputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bin.resetPosition(); - din.setInputStream(bin); - blackhole.consume(EverythingProtoParser.parse(din)); - } - } - - @Benchmark - public void parseProtoCByteArray(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - blackhole.consume(com.hederahashgraph.api.proto.java.test.Everything.parseFrom(protobuf)); - } - } - @Benchmark - public void parseProtoCByteBufferDirect(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - protobufByteBufferDirect.position(0); - blackhole.consume(com.hederahashgraph.api.proto.java.test.Everything.parseFrom(protobufByteBufferDirect)); - } - } - @Benchmark - public void parseProtoCByteBuffer(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - blackhole.consume(com.hederahashgraph.api.proto.java.test.Everything.parseFrom(protobufByteBuffer)); - } - } - @Benchmark - public void parseProtoCInputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bin.resetPosition(); - blackhole.consume(com.hederahashgraph.api.proto.java.test.Everything.parseFrom(bin)); - } - } - - @Benchmark - public void writePbjByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - outDataBuffer.reset(); - EverythingWriter.write(everythingPbj, outDataBuffer); - blackhole.consume(outDataBuffer); - } - } - @Benchmark - public void writePbjByteDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - outDataBufferDirect.reset(); - EverythingWriter.write(everythingPbj, outDataBufferDirect); - blackhole.consume(outDataBufferDirect); - } - } - @Benchmark - public void writePbjOutputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bout.reset(); -// EverythingWriter.write(everythingPbj, dout); - EverythingWriter.write(everythingPbj, new DataOutputStream(bout)); - blackhole.consume(bout.toByteArray()); - } - } - - @Benchmark - public void writeProtoCByteArray(Blackhole blackhole) { - for (int i = 0; i < 1000; i++) { - blackhole.consume(everythingProtoC.toByteArray()); - } - } - @Benchmark - public void writeProtoCByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - CodedOutputStream cout = CodedOutputStream.newInstance(bbout); - everythingProtoC.writeTo(cout); - blackhole.consume(bbout); - } - } - - @Benchmark - public void writeProtoCByteBufferDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - CodedOutputStream cout = CodedOutputStream.newInstance(bboutDirect); - everythingProtoC.writeTo(cout); - blackhole.consume(bbout); - } - } - - @Benchmark - public void writeProtoCOutputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bout.reset(); - everythingProtoC.writeTo(bout); - blackhole.consume(bout.toByteArray()); - } - } - -} \ No newline at end of file diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ProtobufObjectBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ProtobufObjectBench.java new file mode 100644 index 00000000..891ce615 --- /dev/null +++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ProtobufObjectBench.java @@ -0,0 +1,285 @@ +package com.hedera.pbj.intergration.jmh; + +import com.google.protobuf.CodedOutputStream; +import com.google.protobuf.GeneratedMessageV3; +import com.hedera.hapi.node.base.Timestamp; +import com.hedera.hapi.node.token.AccountDetails; +import com.hedera.pbj.integration.AccountDetailsPbj; +import com.hedera.pbj.integration.EverythingTestData; +import com.hedera.pbj.integration.NonSynchronizedByteArrayInputStream; +import com.hedera.pbj.integration.NonSynchronizedByteArrayOutputStream; +import com.hedera.pbj.runtime.Codec; +import com.hedera.pbj.runtime.io.DataBuffer; +import com.hedera.pbj.runtime.io.DataInputStream; +import com.hedera.pbj.runtime.io.DataOutputStream; +import com.hedera.pbj.test.proto.pbj.Everything; +import com.hederahashgraph.api.proto.java.GetAccountDetailsResponse; +import org.openjdk.jmh.annotations.*; +import org.openjdk.jmh.infra.Blackhole; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.concurrent.TimeUnit; + +@SuppressWarnings("unused") +@Fork(1) +@Warmup(iterations = 2, time = 2) +@Measurement(iterations = 5, time = 2) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@BenchmarkMode(Mode.AverageTime) +public abstract class ProtobufObjectBench

{ + /** we repeat all operations 1000 times so that measured times are nig enough */ + private static final int OPERATION_COUNT = 1000; + + @State(Scope.Benchmark) + public static class BenchmarkState

{ + private Codec

pbjCodec; + private ProtobufParseFunction googleByteArrayParseMethod; + private ProtobufParseFunction googleByteBufferParseMethod; + private ProtobufParseFunction googleInputStreamParseMethod; + // input objects + private P pbjModelObject; + private G googleModelObject; + + // input bytes + private byte[] protobuf; + private ByteBuffer protobufByteBuffer; + private DataBuffer protobufDataBuffer; + private ByteBuffer protobufByteBufferDirect; + private DataBuffer protobufDataBufferDirect; + private NonSynchronizedByteArrayInputStream bin; + + // output buffers + private NonSynchronizedByteArrayOutputStream bout; + private DataBuffer outDataBuffer; + private DataBuffer outDataBufferDirect; + private ByteBuffer bbout; + private ByteBuffer bboutDirect; + public void configure(P pbjModelObject, Codec

pbjCodec, + ProtobufParseFunction googleByteArrayParseMethod, + ProtobufParseFunction googleByteBufferParseMethod, + ProtobufParseFunction googleInputStreamParseMethod) { + try { + this.pbjModelObject = pbjModelObject; + this.pbjCodec = pbjCodec; + this.googleByteArrayParseMethod = googleByteArrayParseMethod; + this.googleByteBufferParseMethod = googleByteBufferParseMethod; + this.googleInputStreamParseMethod = googleInputStreamParseMethod; + // write to temp data buffer and then read into byte array + DataBuffer tempDataBuffer = DataBuffer.allocate(5 * 1024 * 1024, false); + pbjCodec.write(pbjModelObject, tempDataBuffer); + tempDataBuffer.flip(); + this.protobuf = new byte[(int) tempDataBuffer.getRemaining()]; + tempDataBuffer.readBytes(this.protobuf); + // start by parsing using protoc + this.googleModelObject = googleByteArrayParseMethod.parse(this.protobuf); + + // input buffers + this.protobufByteBuffer = ByteBuffer.wrap(this.protobuf); + this.protobufDataBuffer = DataBuffer.wrap(this.protobuf); + this.protobufByteBufferDirect = ByteBuffer.allocateDirect(this.protobuf.length); + this.protobufByteBufferDirect.put(this.protobuf); + this.protobufDataBufferDirect = DataBuffer.wrap(this.protobufByteBufferDirect); + this.bin = new NonSynchronizedByteArrayInputStream(this.protobuf); + DataInputStream din = new DataInputStream(this.bin); + // output buffers + this.bout = new NonSynchronizedByteArrayOutputStream(); + DataOutputStream dout = new DataOutputStream(this.bout); + this.outDataBuffer = DataBuffer.allocate(this.protobuf.length, false); + this.outDataBufferDirect = DataBuffer.allocate(this.protobuf.length, true); + this.bbout = ByteBuffer.allocate(this.protobuf.length); + this.bboutDirect = ByteBuffer.allocateDirect(this.protobuf.length); + } catch (IOException e) { + e.getStackTrace(); + System.err.flush(); + throw new RuntimeException(e); + } + } + } + + /** Same as parsePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this because makes result plotting easier */ + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void parsePbjByteArray(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.protobufDataBuffer.resetPosition(); + blackhole.consume(benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBuffer)); + } + } + + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void parsePbjByteBuffer(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.protobufDataBuffer.resetPosition(); + blackhole.consume(benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBuffer)); + } + } + + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void parsePbjByteBufferDirect(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.protobufDataBufferDirect.resetPosition(); + blackhole.consume(benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBufferDirect)); + } + } + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void parsePbjInputStream(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.bin.resetPosition(); + blackhole.consume(benchmarkState.pbjCodec.parse(new DataInputStream(benchmarkState.bin))); + } + } + + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void parseProtoCByteArray(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + blackhole.consume(benchmarkState.googleByteArrayParseMethod.parse(benchmarkState.protobuf)); + } + } + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void parseProtoCByteBufferDirect(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.protobufByteBufferDirect.position(0); + blackhole.consume(benchmarkState.googleByteBufferParseMethod.parse(benchmarkState.protobufByteBufferDirect)); + } + } + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void parseProtoCByteBuffer(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + blackhole.consume(benchmarkState.googleByteBufferParseMethod.parse(benchmarkState.protobufByteBuffer)); + } + } + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void parseProtoCInputStream(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.bin.resetPosition(); + blackhole.consume(benchmarkState.googleInputStreamParseMethod.parse(benchmarkState.bin)); + } + } + + /** Same as writePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this because makes result plotting easier */ + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void writePbjByteArray(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.outDataBuffer.reset(); + benchmarkState.pbjCodec.write(benchmarkState.pbjModelObject, benchmarkState.outDataBuffer); + blackhole.consume(benchmarkState.outDataBuffer); + } + } + + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void writePbjByteBuffer(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.outDataBuffer.reset(); + benchmarkState.pbjCodec.write(benchmarkState.pbjModelObject, benchmarkState.outDataBuffer); + blackhole.consume(benchmarkState.outDataBuffer); + } + } + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void writePbjByteDirect(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.outDataBufferDirect.reset(); + benchmarkState.pbjCodec.write(benchmarkState.pbjModelObject, benchmarkState.outDataBufferDirect); + blackhole.consume(benchmarkState.outDataBufferDirect); + } + } + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void writePbjOutputStream(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.bout.reset(); + benchmarkState.pbjCodec.write(benchmarkState.pbjModelObject, new DataOutputStream(benchmarkState.bout)); + blackhole.consume(benchmarkState.bout.toByteArray()); + } + } + + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void writeProtoCByteArray(BenchmarkState benchmarkState, Blackhole blackhole) { + for (int i = 0; i < 1000; i++) { + blackhole.consume(benchmarkState.googleModelObject.toByteArray()); + } + } + + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void writeProtoCByteBuffer(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + CodedOutputStream cout = CodedOutputStream.newInstance(benchmarkState.bbout); + benchmarkState.googleModelObject.writeTo(cout); + blackhole.consume(benchmarkState.bbout); + } + } + + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void writeProtoCByteBufferDirect(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + CodedOutputStream cout = CodedOutputStream.newInstance(benchmarkState.bboutDirect); + benchmarkState.googleModelObject.writeTo(cout); + blackhole.consume(benchmarkState.bbout); + } + } + + @Benchmark + @OperationsPerInvocation(OPERATION_COUNT) + public void writeProtoCOutputStream(BenchmarkState benchmarkState, Blackhole blackhole) throws IOException { + for (int i = 0; i < 1000; i++) { + benchmarkState.bout.reset(); + benchmarkState.googleModelObject.writeTo(benchmarkState.bout); + blackhole.consume(benchmarkState.bout.toByteArray()); + } + } + + /** Custom interface for method references as java.util.Function does not throw IOException */ + public interface ProtobufParseFunction { + G parse(D data) throws IOException; + } + + @State(Scope.Benchmark) + public static class EverythingBench extends ProtobufObjectBench { + @Setup + public void setup(BenchmarkState benchmarkState) { + benchmarkState.configure(EverythingTestData.EVERYTHING, + Everything.PROTOBUF, + com.hedera.pbj.test.proto.java.Everything::parseFrom, + com.hedera.pbj.test.proto.java.Everything::parseFrom, + com.hedera.pbj.test.proto.java.Everything::parseFrom); + } + } + + @State(Scope.Benchmark) + public static class TimeStampBench extends ProtobufObjectBench { + @Setup + public void setup(BenchmarkState benchmarkState) { + benchmarkState.configure(new Timestamp(5678L, 1234), + Timestamp.PROTOBUF, + com.hederahashgraph.api.proto.java.Timestamp::parseFrom, + com.hederahashgraph.api.proto.java.Timestamp::parseFrom, + com.hederahashgraph.api.proto.java.Timestamp::parseFrom); + } + } + + @State(Scope.Benchmark) + public static class AccountDetailsBench extends ProtobufObjectBench { + @Setup + public void setup(BenchmarkState benchmarkState) { + benchmarkState.configure(AccountDetailsPbj.ACCOUNT_DETAILS, + AccountDetails.PROTOBUF, + GetAccountDetailsResponse.AccountDetails::parseFrom, + GetAccountDetailsResponse.AccountDetails::parseFrom, + GetAccountDetailsResponse.AccountDetails::parseFrom); + } + } +} diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/TimestampBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/TimestampBench.java deleted file mode 100644 index 7fc05c91..00000000 --- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/TimestampBench.java +++ /dev/null @@ -1,159 +0,0 @@ -package com.hedera.pbj.intergration.jmh; - -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.Timestamp; -import com.hedera.hapi.node.base.parser.TimestampProtoParser; -import com.hedera.hapi.node.base.writer.TimestampWriter; -import com.hedera.pbj.integration.NonSynchronizedByteArrayInputStream; -import com.hedera.pbj.integration.NonSynchronizedByteArrayOutputStream; -import com.hedera.pbj.runtime.io.DataBuffer; -import com.hedera.pbj.runtime.io.DataInputStream; -import com.hedera.pbj.runtime.io.DataOutputStream; -import org.openjdk.jmh.annotations.*; -import org.openjdk.jmh.infra.Blackhole; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.concurrent.TimeUnit; - -@SuppressWarnings("unused") -@State(Scope.Benchmark) -@Fork(1) -@Warmup(iterations = 1, time = 2) -@Measurement(iterations = 1, time = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@BenchmarkMode(Mode.AverageTime) -public class TimestampBench { - // input objects - private final Timestamp timestamp = Timestamp.newBuilder().setNanos(1234).setSeconds(5678L).build(); - private final com.hedera.hapi.node.base.Timestamp timestampPbj = new com.hedera.hapi.node.base.Timestamp(5678L, 1234); - // input bytes - private final byte[] protobuf = timestamp.toByteArray(); - private final ByteBuffer protobufByteBuffer = ByteBuffer.wrap(protobuf); - private final DataBuffer protobufDataBuffer = DataBuffer.wrap(protobuf); - private final ByteBuffer protobufByteBufferDirect = ByteBuffer - .allocateDirect(protobuf.length) - .put(protobuf); - private final DataBuffer protobufDataBufferDirect = DataBuffer.wrap(protobufByteBufferDirect); - - private NonSynchronizedByteArrayInputStream bin = new NonSynchronizedByteArrayInputStream(protobuf); - private DataInputStream din = new DataInputStream(bin); - - // output buffers - private final NonSynchronizedByteArrayOutputStream bout = new NonSynchronizedByteArrayOutputStream(); - private final DataOutputStream dout = new DataOutputStream(bout); - private final DataBuffer outDataBuffer = DataBuffer.allocate(protobuf.length, false); - private final DataBuffer outDataBufferDirect = DataBuffer.allocate(protobuf.length, true); - private final ByteBuffer bbout = ByteBuffer.allocate(protobuf.length); - private final ByteBuffer bboutDirect = ByteBuffer.allocateDirect(protobuf.length); - - @Benchmark - public void parsePbjByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - protobufDataBuffer.resetPosition(); - blackhole.consume(TimestampProtoParser.parse(protobufDataBuffer)); - } - } - - @Benchmark - public void parsePbjByteBufferDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - protobufDataBufferDirect.resetPosition(); - blackhole.consume(TimestampProtoParser.parse(protobufDataBufferDirect)); - } - } - @Benchmark - public void parsePbjInputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bin.resetPosition(); -// blackhole.consume(TimestampProtoParser.parse(din)); - blackhole.consume(TimestampProtoParser.parse(new DataInputStream(bin))); - } - } - - @Benchmark - public void parseProtoCByteArray(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - blackhole.consume(Timestamp.parseFrom(protobuf)); - } - } - @Benchmark - public void parseProtoCByteBufferDirect(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - protobufByteBufferDirect.position(0); - blackhole.consume(Timestamp.parseFrom(protobufByteBufferDirect)); - } - } - @Benchmark - public void parseProtoCByteBuffer(Blackhole blackhole) throws InvalidProtocolBufferException { - for (int i = 0; i < 1000; i++) { - blackhole.consume(Timestamp.parseFrom(protobufByteBuffer)); - } - } - @Benchmark - public void parseProtoCInputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bin.resetPosition(); - blackhole.consume(Timestamp.parseFrom(bin)); - } - } - - @Benchmark - public void writePbjByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - outDataBuffer.reset(); - TimestampWriter.write(timestampPbj, outDataBuffer); - blackhole.consume(outDataBuffer); - } - } - @Benchmark - public void writePbjByteDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - outDataBufferDirect.reset(); - TimestampWriter.write(timestampPbj, outDataBufferDirect); - blackhole.consume(outDataBufferDirect); - } - } - @Benchmark - public void writePbjOutputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bout.reset(); - TimestampWriter.write(timestampPbj, dout); - blackhole.consume(bout.toByteArray()); - } - } - - @Benchmark - public void writeProtoCByteArray(Blackhole blackhole) { - for (int i = 0; i < 1000; i++) { - blackhole.consume(timestamp.toByteArray()); - } - } - @Benchmark - public void writeProtoCByteBuffer(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - CodedOutputStream cout = CodedOutputStream.newInstance(bbout); - timestamp.writeTo(cout); - blackhole.consume(bbout); - } - } - - @Benchmark - public void writeProtoCByteBufferDirect(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - CodedOutputStream cout = CodedOutputStream.newInstance(bboutDirect); - timestamp.writeTo(cout); - blackhole.consume(bbout); - } - } - - @Benchmark - public void writeProtoCOutputStream(Blackhole blackhole) throws IOException { - for (int i = 0; i < 1000; i++) { - bout.reset(); - timestamp.writeTo(bout); - blackhole.consume(bout.toByteArray()); - } - } -} diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java index abb37d4a..a745b93d 100644 --- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java +++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java @@ -14,7 +14,7 @@ @SuppressWarnings("unused") @State(Scope.Benchmark) @Fork(1) -@Warmup(iterations = 4, time = 2) +@Warmup(iterations = 2, time = 2) @Measurement(iterations = 5, time = 2) @OutputTimeUnit(TimeUnit.NANOSECONDS) @BenchmarkMode(Mode.AverageTime) From b06ad0b5d3de18712a649817a106042d473f2d39 Mon Sep 17 00:00:00 2001 From: jasperpotts Date: Mon, 27 Mar 2023 15:50:53 -0700 Subject: [PATCH 5/5] Refined benchmarks Signed-off-by: jasperpotts --- .../pbj/runtime/io/OffHeapDataBuffer.java | 4 +- .../pbj/intergration/jmh/VarIntBench.java | 50 ++++++++++++++----- 2 files changed, 39 insertions(+), 15 deletions(-) diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/OffHeapDataBuffer.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/OffHeapDataBuffer.java index 602d3159..cdfcc666 100644 --- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/OffHeapDataBuffer.java +++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/OffHeapDataBuffer.java @@ -440,7 +440,7 @@ public int readVarInt(boolean zigZag) throws IOException { if ((x = UNSAFE.getByte(startOfBytesPointer + tempPos++)) >= 0) { position ++; return zigZag ? (x >>> 1) ^ -(x & 1) : x; - } else if ((limit - position) < 10) { + } else if ((limit - position) < 10) { // Why is this if needed, it is much faster without? return (int)readVarLongSlow(zigZag); } else if ((x ^= (UNSAFE.getByte(startOfBytesPointer + tempPos++) << 7)) < 0) { x ^= (~0 << 7); @@ -477,7 +477,7 @@ public long readVarLong(boolean zigZag) throws IOException { if ((y = UNSAFE.getByte(startOfBytesPointer + tempPos++)) >= 0) { position ++; return zigZag ? (y >>> 1) ^ -(y & 1) : y; - } else if (buffer.remaining() < 10) { + } else if (buffer.remaining() < 10) { // Why is this if needed, it is much faster without? return readVarLongSlow(zigZag); } else if ((y ^= (UNSAFE.getByte(startOfBytesPointer + tempPos++) << 7)) < 0) { x = y ^ (~0 << 7); diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java index a745b93d..7f92fa26 100644 --- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java +++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java @@ -3,6 +3,7 @@ import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import com.hedera.pbj.runtime.MalformedProtobufException; +import com.hedera.pbj.runtime.io.DataBuffer; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.infra.Blackhole; @@ -22,6 +23,8 @@ public class VarIntBench { final ByteBuffer buffer = ByteBuffer.allocate(256*1024); final ByteBuffer bufferDirect = ByteBuffer.allocateDirect(256*1024); + final DataBuffer dataBuffer = DataBuffer.wrap(buffer); + final DataBuffer dataBufferDirect = DataBuffer.wrap(bufferDirect); public VarIntBench() { try { @@ -48,35 +51,56 @@ public VarIntBench() { } } @Benchmark + public void dataBuffer(Blackhole blackhole) throws IOException { + dataBuffer.resetPosition(); +// for (int i = 0; i < 400; i++) { + blackhole.consume(dataBuffer.readVarInt(false)); +// } + } + @Benchmark + public void dataBufferDirect(Blackhole blackhole) throws IOException { + dataBufferDirect.resetPosition(); +// for (int i = 0; i < 400; i++) { + blackhole.consume(dataBufferDirect.readVarInt(false)); +// } + } + @Benchmark public void richard(Blackhole blackhole) throws MalformedProtobufException { buffer.clear(); - for (int i = 0; i < 400; i++) { +// for (int i = 0; i < 400; i++) { blackhole.consume(readVarintRichard(buffer)); - } +// } } @Benchmark public void google(Blackhole blackhole) throws IOException { buffer.clear(); final CodedInputStream codedInputStream = CodedInputStream.newInstance(buffer); - for (int i = 0; i < 400; i++) { +// for (int i = 0; i < 400; i++) { blackhole.consume(codedInputStream.readRawVarint64()); - } +// } + } + @Benchmark + public void googleDirect(Blackhole blackhole) throws IOException { + bufferDirect.clear(); + final CodedInputStream codedInputStream = CodedInputStream.newInstance(bufferDirect); +// for (int i = 0; i < 400; i++) { + blackhole.consume(codedInputStream.readRawVarint64()); +// } } @Benchmark public void googleSlowPath(Blackhole blackhole) throws MalformedProtobufException { buffer.clear(); - for (int i = 0; i < 400; i++) { - blackhole.consume(readRawVarint64SlowPath(buffer)); - } +// for (int i = 0; i < 400; i++) { + blackhole.consume(readRawVarint64SlowPath(buffer)); +// } } @Benchmark - public void googleOffHeap(Blackhole blackhole) throws IOException { - buffer.clear(); - final CodedInputStream codedInputStream = CodedInputStream.newInstance(bufferDirect); - for (int i = 0; i < 400; i++) { - blackhole.consume(codedInputStream.readRawVarint64()); - } + public void googleSlowPathDirect(Blackhole blackhole) throws MalformedProtobufException { + bufferDirect.clear(); +// for (int i = 0; i < 400; i++) { + blackhole.consume(readRawVarint64SlowPath(bufferDirect)); +// } } private static long readRawVarint64SlowPath(ByteBuffer buf) throws MalformedProtobufException {