diff --git a/data/arrow.go b/data/arrow.go index e7d678cab..0062bb837 100644 --- a/data/arrow.go +++ b/data/arrow.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "io" + "sync" "time" "github.com/apache/arrow-go/v18/arrow" @@ -22,6 +23,19 @@ const metadataKeyLabels = "labels" // labels serialized as JSON const metadataKeyTSType = "tstype" // typescript type const metadataKeyRefID = "refId" // added to the table metadata +// Object pools for frequently allocated objects to reduce GC pressure +var ( + // arrowAllocatorPool pools Arrow memory allocators to avoid repeated allocation overhead + arrowAllocatorPool = sync.Pool{ + New: func() interface{} { return memory.NewGoAllocator() }, + } + + // fileBufferPool pools filebuffer.Buffer instances for Arrow marshaling/unmarshaling + fileBufferPool = sync.Pool{ + New: func() interface{} { return filebuffer.New(nil) }, + } +) + // MarshalArrow converts the Frame to an arrow table and returns a byte // representation of that table. // All fields of a Frame must be of the same length or an error is returned. @@ -35,10 +49,10 @@ func (f *Frame) MarshalArrow() ([]byte, error) { tableReader := array.NewTableReader(table, -1) defer tableReader.Release() - // Arrow tables with the Go API are written to files, so we create a fake - // file buffer that the FileWriter can write to. In the future, and with - // streaming, I think will likely be using the Arrow message type some how. - fb := filebuffer.New(nil) + // Get filebuffer from pool to reduce allocations + fb := fileBufferPool.Get().(*filebuffer.Buffer) + fb.Buff.Reset() // Reset buffer for reuse + defer fileBufferPool.Put(fb) fw, err := ipc.NewFileWriter(fb, ipc.WithSchema(tableReader.Schema())) if err != nil { @@ -59,7 +73,10 @@ func (f *Frame) MarshalArrow() ([]byte, error) { return nil, err } - return fb.Buff.Bytes(), nil + // Copy bytes before returning buffer to pool + result := make([]byte, fb.Buff.Len()) + copy(result, fb.Buff.Bytes()) + return result, nil } // FrameToArrowTable creates a new arrow.Table from a data frame @@ -135,86 +152,80 @@ func buildArrowFields(f *Frame) ([]arrow.Field, error) { // buildArrowColumns builds Arrow columns from a Frame. // nolint:gocyclo func buildArrowColumns(f *Frame, arrowFields []arrow.Field) ([]arrow.Column, error) { - pool := memory.NewGoAllocator() + // Get allocator from pool to reduce allocation overhead + pool := arrowAllocatorPool.Get().(memory.Allocator) + defer arrowAllocatorPool.Put(pool) + columns := make([]arrow.Column, len(f.Fields)) for fieldIdx, field := range f.Fields { switch v := field.vector.(type) { - case *int8Vector: + // Time, JSON, and Enum types + case *genericVector[time.Time]: + columns[fieldIdx] = *buildTimeColumnGeneric(pool, arrowFields[fieldIdx], v) + case *nullableGenericVector[time.Time]: + columns[fieldIdx] = *buildNullableTimeColumnGeneric(pool, arrowFields[fieldIdx], v) + + case *genericVector[json.RawMessage]: + columns[fieldIdx] = *buildJSONColumnGeneric(pool, arrowFields[fieldIdx], v) + case *nullableGenericVector[json.RawMessage]: + columns[fieldIdx] = *buildNullableJSONColumnGeneric(pool, arrowFields[fieldIdx], v) + + case *genericVector[EnumItemIndex]: + columns[fieldIdx] = *buildEnumColumnGeneric(pool, arrowFields[fieldIdx], v) + case *nullableGenericVector[EnumItemIndex]: + columns[fieldIdx] = *buildNullableEnumColumnGeneric(pool, arrowFields[fieldIdx], v) + + // Generic vectors - use directly without conversion + case *genericVector[int8]: columns[fieldIdx] = *buildInt8Column(pool, arrowFields[fieldIdx], v) - case *nullableInt8Vector: + case *nullableGenericVector[int8]: columns[fieldIdx] = *buildNullableInt8Column(pool, arrowFields[fieldIdx], v) - - case *int16Vector: + case *genericVector[int16]: columns[fieldIdx] = *buildInt16Column(pool, arrowFields[fieldIdx], v) - case *nullableInt16Vector: + case *nullableGenericVector[int16]: columns[fieldIdx] = *buildNullableInt16Column(pool, arrowFields[fieldIdx], v) - - case *int32Vector: + case *genericVector[int32]: columns[fieldIdx] = *buildInt32Column(pool, arrowFields[fieldIdx], v) - case *nullableInt32Vector: + case *nullableGenericVector[int32]: columns[fieldIdx] = *buildNullableInt32Column(pool, arrowFields[fieldIdx], v) - - case *int64Vector: + case *genericVector[int64]: columns[fieldIdx] = *buildInt64Column(pool, arrowFields[fieldIdx], v) - case *nullableInt64Vector: + case *nullableGenericVector[int64]: columns[fieldIdx] = *buildNullableInt64Column(pool, arrowFields[fieldIdx], v) - - case *uint8Vector: + case *genericVector[uint8]: columns[fieldIdx] = *buildUInt8Column(pool, arrowFields[fieldIdx], v) - case *nullableUint8Vector: + case *nullableGenericVector[uint8]: columns[fieldIdx] = *buildNullableUInt8Column(pool, arrowFields[fieldIdx], v) - - case *uint16Vector: + case *genericVector[uint16]: columns[fieldIdx] = *buildUInt16Column(pool, arrowFields[fieldIdx], v) - case *nullableUint16Vector: + case *nullableGenericVector[uint16]: columns[fieldIdx] = *buildNullableUInt16Column(pool, arrowFields[fieldIdx], v) - - case *uint32Vector: + case *genericVector[uint32]: columns[fieldIdx] = *buildUInt32Column(pool, arrowFields[fieldIdx], v) - case *nullableUint32Vector: + case *nullableGenericVector[uint32]: columns[fieldIdx] = *buildNullableUInt32Column(pool, arrowFields[fieldIdx], v) - - case *uint64Vector: + case *genericVector[uint64]: columns[fieldIdx] = *buildUInt64Column(pool, arrowFields[fieldIdx], v) - case *nullableUint64Vector: + case *nullableGenericVector[uint64]: columns[fieldIdx] = *buildNullableUInt64Column(pool, arrowFields[fieldIdx], v) - - case *stringVector: - columns[fieldIdx] = *buildStringColumn(pool, arrowFields[fieldIdx], v) - case *nullableStringVector: - columns[fieldIdx] = *buildNullableStringColumn(pool, arrowFields[fieldIdx], v) - - case *float32Vector: + case *genericVector[float32]: columns[fieldIdx] = *buildFloat32Column(pool, arrowFields[fieldIdx], v) - case *nullableFloat32Vector: + case *nullableGenericVector[float32]: columns[fieldIdx] = *buildNullableFloat32Column(pool, arrowFields[fieldIdx], v) - - case *float64Vector: + case *genericVector[float64]: columns[fieldIdx] = *buildFloat64Column(pool, arrowFields[fieldIdx], v) - case *nullableFloat64Vector: + case *nullableGenericVector[float64]: columns[fieldIdx] = *buildNullableFloat64Column(pool, arrowFields[fieldIdx], v) - - case *boolVector: + case *genericVector[string]: + columns[fieldIdx] = *buildStringColumn(pool, arrowFields[fieldIdx], v) + case *nullableGenericVector[string]: + columns[fieldIdx] = *buildNullableStringColumn(pool, arrowFields[fieldIdx], v) + case *genericVector[bool]: columns[fieldIdx] = *buildBoolColumn(pool, arrowFields[fieldIdx], v) - case *nullableBoolVector: + case *nullableGenericVector[bool]: columns[fieldIdx] = *buildNullableBoolColumn(pool, arrowFields[fieldIdx], v) - case *timeTimeVector: - columns[fieldIdx] = *buildTimeColumn(pool, arrowFields[fieldIdx], v) - case *nullableTimeTimeVector: - columns[fieldIdx] = *buildNullableTimeColumn(pool, arrowFields[fieldIdx], v) - - case *jsonRawMessageVector: - columns[fieldIdx] = *buildJSONColumn(pool, arrowFields[fieldIdx], v) - case *nullableJsonRawMessageVector: - columns[fieldIdx] = *buildNullableJSONColumn(pool, arrowFields[fieldIdx], v) - - case *enumVector: - columns[fieldIdx] = *buildEnumColumn(pool, arrowFields[fieldIdx], v) - case *nullableEnumVector: - columns[fieldIdx] = *buildNullableEnumColumn(pool, arrowFields[fieldIdx], v) - default: return nil, fmt.Errorf("unsupported field vector type for conversion to arrow: %T", v) } @@ -245,78 +256,72 @@ func buildArrowSchema(f *Frame, fs []arrow.Field) (*arrow.Schema, error) { // nolint:gocyclo func fieldToArrow(f *Field) (arrow.DataType, bool, error) { switch f.vector.(type) { - case *stringVector: + // Time, JSON, and Enum types + case *genericVector[time.Time]: + return &arrow.TimestampType{Unit: arrow.Nanosecond}, false, nil + case *nullableGenericVector[time.Time]: + return &arrow.TimestampType{Unit: arrow.Nanosecond}, true, nil + + case *genericVector[json.RawMessage]: + return &arrow.BinaryType{}, false, nil + case *nullableGenericVector[json.RawMessage]: + return &arrow.BinaryType{}, true, nil + + case *genericVector[EnumItemIndex]: + return &arrow.Uint16Type{}, false, nil + case *nullableGenericVector[EnumItemIndex]: + return &arrow.Uint16Type{}, true, nil + + // Generic vectors + case *genericVector[string]: return &arrow.StringType{}, false, nil - case *nullableStringVector: + case *nullableGenericVector[string]: return &arrow.StringType{}, true, nil - - // Ints - case *int8Vector: + case *genericVector[int8]: return &arrow.Int8Type{}, false, nil - case *nullableInt8Vector: + case *nullableGenericVector[int8]: return &arrow.Int8Type{}, true, nil - - case *int16Vector: + case *genericVector[int16]: return &arrow.Int16Type{}, false, nil - case *nullableInt16Vector: + case *nullableGenericVector[int16]: return &arrow.Int16Type{}, true, nil - - case *int32Vector: + case *genericVector[int32]: return &arrow.Int32Type{}, false, nil - case *nullableInt32Vector: + case *nullableGenericVector[int32]: return &arrow.Int32Type{}, true, nil - - case *int64Vector: + case *genericVector[int64]: return &arrow.Int64Type{}, false, nil - case *nullableInt64Vector: + case *nullableGenericVector[int64]: return &arrow.Int64Type{}, true, nil - - // Uints - case *uint8Vector: + case *genericVector[uint8]: return &arrow.Uint8Type{}, false, nil - case *nullableUint8Vector: + case *nullableGenericVector[uint8]: return &arrow.Uint8Type{}, true, nil - - case *uint16Vector, *enumVector: + case *genericVector[uint16]: return &arrow.Uint16Type{}, false, nil - case *nullableUint16Vector, *nullableEnumVector: + case *nullableGenericVector[uint16]: return &arrow.Uint16Type{}, true, nil - - case *uint32Vector: + case *genericVector[uint32]: return &arrow.Uint32Type{}, false, nil - case *nullableUint32Vector: + case *nullableGenericVector[uint32]: return &arrow.Uint32Type{}, true, nil - - case *uint64Vector: + case *genericVector[uint64]: return &arrow.Uint64Type{}, false, nil - case *nullableUint64Vector: + case *nullableGenericVector[uint64]: return &arrow.Uint64Type{}, true, nil - - case *float32Vector: + case *genericVector[float32]: return &arrow.Float32Type{}, false, nil - case *nullableFloat32Vector: + case *nullableGenericVector[float32]: return &arrow.Float32Type{}, true, nil - - case *float64Vector: + case *genericVector[float64]: return &arrow.Float64Type{}, false, nil - case *nullableFloat64Vector: + case *nullableGenericVector[float64]: return &arrow.Float64Type{}, true, nil - - case *boolVector: + case *genericVector[bool]: return &arrow.BooleanType{}, false, nil - case *nullableBoolVector: + case *nullableGenericVector[bool]: return &arrow.BooleanType{}, true, nil - case *timeTimeVector: - return &arrow.TimestampType{Unit: arrow.Nanosecond}, false, nil - case *nullableTimeTimeVector: - return &arrow.TimestampType{Unit: arrow.Nanosecond}, true, nil - - case *jsonRawMessageVector: - return &arrow.BinaryType{}, false, nil - case *nullableJsonRawMessageVector: - return &arrow.BinaryType{}, true, nil - default: return nil, false, fmt.Errorf("unsupported type for conversion to arrow: %T", f.vector) } @@ -330,7 +335,7 @@ func getMDKey(key string, metaData arrow.Metadata) (string, bool) { return metaData.Values()[idx], true } -func initializeFrameFields(schema *arrow.Schema, frame *Frame) ([]bool, error) { +func initializeFrameFields(schema *arrow.Schema, frame *Frame, capacity int) ([]bool, error) { nullable := make([]bool, len(schema.Fields())) for idx, field := range schema.Fields() { sdkField := Field{ @@ -351,7 +356,7 @@ func initializeFrameFields(schema *arrow.Schema, frame *Frame) ([]bool, error) { } } nullable[idx] = field.Nullable - if err := initializeFrameField(field, idx, nullable, &sdkField); err != nil { + if err := initializeFrameField(field, idx, nullable, &sdkField, capacity); err != nil { return nil, err } @@ -361,107 +366,107 @@ func initializeFrameFields(schema *arrow.Schema, frame *Frame) ([]bool, error) { } // nolint:gocyclo -func initializeFrameField(field arrow.Field, idx int, nullable []bool, sdkField *Field) error { +func initializeFrameField(field arrow.Field, idx int, nullable []bool, sdkField *Field, capacity int) error { switch field.Type.ID() { case arrow.STRING: if nullable[idx] { - sdkField.vector = newNullableStringVector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[string](capacity) break } - sdkField.vector = newStringVector(0) + sdkField.vector = newGenericVectorWithCapacity[string](capacity) case arrow.STRING_VIEW: if nullable[idx] { - sdkField.vector = newNullableStringVector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[string](capacity) break } - sdkField.vector = newStringVector(0) + sdkField.vector = newGenericVectorWithCapacity[string](capacity) case arrow.INT8: if nullable[idx] { - sdkField.vector = newNullableInt8Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[int8](capacity) break } - sdkField.vector = newInt8Vector(0) + sdkField.vector = newGenericVectorWithCapacity[int8](capacity) case arrow.INT16: if nullable[idx] { - sdkField.vector = newNullableInt16Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[int16](capacity) break } - sdkField.vector = newInt16Vector(0) + sdkField.vector = newGenericVectorWithCapacity[int16](capacity) case arrow.INT32: if nullable[idx] { - sdkField.vector = newNullableInt32Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[int32](capacity) break } - sdkField.vector = newInt32Vector(0) + sdkField.vector = newGenericVectorWithCapacity[int32](capacity) case arrow.INT64: if nullable[idx] { - sdkField.vector = newNullableInt64Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[int64](capacity) break } - sdkField.vector = newInt64Vector(0) + sdkField.vector = newGenericVectorWithCapacity[int64](capacity) case arrow.UINT8: if nullable[idx] { - sdkField.vector = newNullableUint8Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[uint8](capacity) break } - sdkField.vector = newUint8Vector(0) + sdkField.vector = newGenericVectorWithCapacity[uint8](capacity) case arrow.UINT16: tstype, ok := getMDKey(metadataKeyTSType, field.Metadata) if ok && tstype == simpleTypeEnum { if nullable[idx] { - sdkField.vector = newNullableEnumVector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[EnumItemIndex](capacity) } else { - sdkField.vector = newEnumVector(0) + sdkField.vector = newGenericVectorWithCapacity[EnumItemIndex](capacity) } break } if nullable[idx] { - sdkField.vector = newNullableUint16Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[uint16](capacity) break } - sdkField.vector = newUint16Vector(0) + sdkField.vector = newGenericVectorWithCapacity[uint16](capacity) case arrow.UINT32: if nullable[idx] { - sdkField.vector = newNullableUint32Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[uint32](capacity) break } - sdkField.vector = newUint32Vector(0) + sdkField.vector = newGenericVectorWithCapacity[uint32](capacity) case arrow.UINT64: if nullable[idx] { - sdkField.vector = newNullableUint64Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[uint64](capacity) break } - sdkField.vector = newUint64Vector(0) + sdkField.vector = newGenericVectorWithCapacity[uint64](capacity) case arrow.FLOAT32: if nullable[idx] { - sdkField.vector = newNullableFloat32Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[float32](capacity) break } - sdkField.vector = newFloat32Vector(0) + sdkField.vector = newGenericVectorWithCapacity[float32](capacity) case arrow.FLOAT64: if nullable[idx] { - sdkField.vector = newNullableFloat64Vector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[float64](capacity) break } - sdkField.vector = newFloat64Vector(0) + sdkField.vector = newGenericVectorWithCapacity[float64](capacity) case arrow.BOOL: if nullable[idx] { - sdkField.vector = newNullableBoolVector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[bool](capacity) break } - sdkField.vector = newBoolVector(0) + sdkField.vector = newGenericVectorWithCapacity[bool](capacity) case arrow.TIMESTAMP: if nullable[idx] { - sdkField.vector = newNullableTimeTimeVector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[time.Time](capacity) break } - sdkField.vector = newTimeTimeVector(0) + sdkField.vector = newGenericVectorWithCapacity[time.Time](capacity) case arrow.BINARY: if nullable[idx] { - sdkField.vector = newNullableJsonRawMessageVector(0) + sdkField.vector = newNullableGenericVectorWithCapacity[json.RawMessage](capacity) break } - sdkField.vector = newJsonRawMessageVector(0) + sdkField.vector = newGenericVectorWithCapacity[json.RawMessage](capacity) default: return fmt.Errorf("unsupported conversion from arrow to sdk type for arrow type %v", field.Type.ID().String()) } @@ -501,197 +506,339 @@ func parseColumn(col arrow.Array, i int, nullable []bool, frame *Frame) error { switch col.DataType().ID() { case arrow.STRING: v := array.NewStringData(col.Data()) + // Note: True zero-copy isn't possible for strings in Go due to immutability + // Arrow's Value() method already optimizes string conversion internally for rIdx := 0; rIdx < col.Len(); rIdx++ { if nullable[i] { if v.IsNull(rIdx) { var ns *string - frame.Fields[i].vector.Append(ns) + appendTypedToVector(frame.Fields[i].vector, ns) continue } rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) + appendTypedToVector(frame.Fields[i].vector, &rv) continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) + appendTypedToVector(frame.Fields[i].vector, v.Value(rIdx)) } case arrow.STRING_VIEW: v := array.NewStringViewData(col.Data()) + // STRING_VIEW is already optimized in Arrow for avoiding copies + // Our vectors still need to materialize strings (Go immutability requirement) for rIdx := 0; rIdx < col.Len(); rIdx++ { if nullable[i] { if v.IsNull(rIdx) { var ns *string - frame.Fields[i].vector.Append(ns) + appendTypedToVector(frame.Fields[i].vector, ns) continue } rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) + appendTypedToVector(frame.Fields[i].vector, &rv) continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) + appendTypedToVector(frame.Fields[i].vector, v.Value(rIdx)) } case arrow.INT8: v := array.NewInt8Data(col.Data()) - for rIdx := 0; rIdx < col.Len(); rIdx++ { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *int8 - frame.Fields[i].vector.Append(ns) - continue + values := v.Int8Values() + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[int8]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *int8 + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := values[rIdx] + appendTypedToVector(frame.Fields[i].vector, &rv) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[int8]); ok { + gvec.AppendManyTyped(values) + } else { + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, val) } - rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) } case arrow.INT16: v := array.NewInt16Data(col.Data()) - for rIdx := 0; rIdx < col.Len(); rIdx++ { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *int16 - frame.Fields[i].vector.Append(ns) - continue + values := v.Int16Values() + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[int16]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *int16 + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := values[rIdx] + appendTypedToVector(frame.Fields[i].vector, &rv) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[int16]); ok { + gvec.AppendManyTyped(values) + } else { + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, val) } - rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) } case arrow.INT32: v := array.NewInt32Data(col.Data()) - for rIdx := 0; rIdx < col.Len(); rIdx++ { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *int32 - frame.Fields[i].vector.Append(ns) - continue + values := v.Int32Values() + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[int32]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *int32 + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := values[rIdx] + appendTypedToVector(frame.Fields[i].vector, &rv) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[int32]); ok { + gvec.AppendManyTyped(values) + } else { + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, val) } - rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) } case arrow.INT64: v := array.NewInt64Data(col.Data()) - for rIdx := 0; rIdx < col.Len(); rIdx++ { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *int64 - frame.Fields[i].vector.Append(ns) - continue + // Use zero-copy API to get direct slice access + values := v.Int64Values() + if nullable[i] { + // Batch append with null handling + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[int64]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + // Fallback for unexpected vector type + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *int64 + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := values[rIdx] + appendTypedToVector(frame.Fields[i].vector, &rv) + } + } + } else { + // Non-nullable: direct batch append + if gvec, ok := frame.Fields[i].vector.(*genericVector[int64]); ok { + gvec.AppendManyTyped(values) + } else { + // Fallback + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, val) } - rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) } case arrow.UINT8: v := array.NewUint8Data(col.Data()) - for rIdx := 0; rIdx < col.Len(); rIdx++ { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *uint8 - frame.Fields[i].vector.Append(ns) - continue + values := v.Uint8Values() + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[uint8]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *uint8 + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := values[rIdx] + appendTypedToVector(frame.Fields[i].vector, &rv) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[uint8]); ok { + gvec.AppendManyTyped(values) + } else { + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, val) } - rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) } case arrow.UINT32: v := array.NewUint32Data(col.Data()) - for rIdx := 0; rIdx < col.Len(); rIdx++ { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *uint32 - frame.Fields[i].vector.Append(ns) - continue + values := v.Uint32Values() + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[uint32]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *uint32 + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := values[rIdx] + appendTypedToVector(frame.Fields[i].vector, &rv) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[uint32]); ok { + gvec.AppendManyTyped(values) + } else { + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, val) } - rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) } case arrow.UINT64: v := array.NewUint64Data(col.Data()) - for rIdx := 0; rIdx < col.Len(); rIdx++ { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *uint64 - frame.Fields[i].vector.Append(ns) - continue + values := v.Uint64Values() + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[uint64]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *uint64 + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := values[rIdx] + appendTypedToVector(frame.Fields[i].vector, &rv) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[uint64]); ok { + gvec.AppendManyTyped(values) + } else { + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, val) } - rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) } case arrow.UINT16: v := array.NewUint16Data(col.Data()) - for rIdx := 0; rIdx < col.Len(); rIdx++ { - if frame.Fields[i].Type().NullableType() == FieldTypeNullableEnum { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *EnumItemIndex - frame.Fields[i].vector.Append(ns) - continue + values := v.Uint16Values() + if frame.Fields[i].Type().NullableType() == FieldTypeNullableEnum { + // Handle Enum type + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[EnumItemIndex]); ok { + // Convert []uint16 to []EnumItemIndex efficiently + enumValues := make([]EnumItemIndex, len(values)) + for idx, val := range values { + enumValues[idx] = EnumItemIndex(val) + } + nvec.AppendManyWithNulls(enumValues, v.IsNull) + } else { + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *EnumItemIndex + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := EnumItemIndex(values[rIdx]) + appendTypedToVector(frame.Fields[i].vector, &rv) } - rv := EnumItemIndex(v.Value(rIdx)) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(EnumItemIndex(v.Value(rIdx))) } else { - if nullable[i] { - if v.IsNull(rIdx) { - var ns *uint16 - frame.Fields[i].vector.Append(ns) - continue + if gvec, ok := frame.Fields[i].vector.(*genericVector[EnumItemIndex]); ok { + enumValues := make([]EnumItemIndex, len(values)) + for idx, val := range values { + enumValues[idx] = EnumItemIndex(val) + } + gvec.AppendManyTyped(enumValues) + } else { + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, EnumItemIndex(val)) + } + } + } + } else { + // Handle regular uint16 + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[uint16]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for rIdx := range values { + if v.IsNull(rIdx) { + var ns *uint16 + appendTypedToVector(frame.Fields[i].vector, ns) + continue + } + rv := values[rIdx] + appendTypedToVector(frame.Fields[i].vector, &rv) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[uint16]); ok { + gvec.AppendManyTyped(values) + } else { + for _, val := range values { + appendTypedToVector(frame.Fields[i].vector, val) } - rv := v.Value(rIdx) - frame.Fields[i].vector.Append(&rv) - continue } - frame.Fields[i].vector.Append(v.Value(rIdx)) } } case arrow.FLOAT32: v := array.NewFloat32Data(col.Data()) - for vIdx, f := range v.Float32Values() { - if nullable[i] { - if v.IsNull(vIdx) { - var nf *float32 - frame.Fields[i].vector.Append(nf) - continue + values := v.Float32Values() + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[float32]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for vIdx, f := range values { + if v.IsNull(vIdx) { + var nf *float32 + appendTypedToVector(frame.Fields[i].vector, nf) + continue + } + vF := f + appendTypedToVector(frame.Fields[i].vector, &vF) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[float32]); ok { + gvec.AppendManyTyped(values) + } else { + for _, f := range values { + appendTypedToVector(frame.Fields[i].vector, f) } - vF := f - frame.Fields[i].vector.Append(&vF) - continue } - frame.Fields[i].vector.Append(f) } case arrow.FLOAT64: v := array.NewFloat64Data(col.Data()) - for vIdx, f := range v.Float64Values() { - if nullable[i] { - if v.IsNull(vIdx) { - var nf *float64 - frame.Fields[i].vector.Append(nf) - continue + values := v.Float64Values() + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[float64]); ok { + nvec.AppendManyWithNulls(values, v.IsNull) + } else { + for vIdx, f := range values { + if v.IsNull(vIdx) { + var nf *float64 + appendTypedToVector(frame.Fields[i].vector, nf) + continue + } + vF := f + appendTypedToVector(frame.Fields[i].vector, &vF) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[float64]); ok { + gvec.AppendManyTyped(values) + } else { + for _, f := range values { + appendTypedToVector(frame.Fields[i].vector, f) } - vF := f - frame.Fields[i].vector.Append(&vF) - continue } - frame.Fields[i].vector.Append(f) } case arrow.BOOL: v := array.NewBooleanData(col.Data()) @@ -699,29 +846,45 @@ func parseColumn(col arrow.Array, i int, nullable []bool, frame *Frame) error { if nullable[i] { if v.IsNull(sIdx) { var ns *bool - frame.Fields[i].vector.Append(ns) + appendTypedToVector(frame.Fields[i].vector, ns) continue } vB := v.Value(sIdx) - frame.Fields[i].vector.Append(&vB) + appendTypedToVector(frame.Fields[i].vector, &vB) continue } - frame.Fields[i].vector.Append(v.Value(sIdx)) + appendTypedToVector(frame.Fields[i].vector, v.Value(sIdx)) } case arrow.TIMESTAMP: v := array.NewTimestampData(col.Data()) - for vIdx, ts := range v.TimestampValues() { - t := time.Unix(0, int64(ts)) // nanosecond assumption - if nullable[i] { - if v.IsNull(vIdx) { - var nt *time.Time - frame.Fields[i].vector.Append(nt) - continue + timestamps := v.TimestampValues() + // Convert Arrow timestamps to time.Time (nanosecond assumption) + times := make([]time.Time, len(timestamps)) + for idx, ts := range timestamps { + times[idx] = time.Unix(0, int64(ts)) + } + + if nullable[i] { + if nvec, ok := frame.Fields[i].vector.(*nullableGenericVector[time.Time]); ok { + nvec.AppendManyWithNulls(times, v.IsNull) + } else { + for vIdx, t := range times { + if v.IsNull(vIdx) { + var nt *time.Time + appendTypedToVector(frame.Fields[i].vector, nt) + continue + } + appendTypedToVector(frame.Fields[i].vector, &t) + } + } + } else { + if gvec, ok := frame.Fields[i].vector.(*genericVector[time.Time]); ok { + gvec.AppendManyTyped(times) + } else { + for _, t := range times { + appendTypedToVector(frame.Fields[i].vector, t) } - frame.Fields[i].vector.Append(&t) - continue } - frame.Fields[i].vector.Append(t) } case arrow.BINARY: v := array.NewBinaryData(col.Data()) @@ -729,15 +892,15 @@ func parseColumn(col arrow.Array, i int, nullable []bool, frame *Frame) error { if nullable[i] { if v.IsNull(sIdx) { var nb *json.RawMessage - frame.Fields[i].vector.Append(nb) + appendTypedToVector(frame.Fields[i].vector, nb) continue } r := json.RawMessage(v.Value(sIdx)) - frame.Fields[i].vector.Append(&r) + appendTypedToVector(frame.Fields[i].vector, &r) continue } r := json.RawMessage(v.Value(sIdx)) - frame.Fields[i].vector.Append(r) + appendTypedToVector(frame.Fields[i].vector, r) } default: return fmt.Errorf("unsupported arrow type %s for conversion", col.DataType().ID()) @@ -767,7 +930,9 @@ func FromArrowRecord(record arrow.Record) (*Frame, error) { return nil, err } - nullable, err := initializeFrameFields(schema, frame) + // Pre-allocate vectors with the known row count for better performance + capacity := int(record.NumRows()) + nullable, err := initializeFrameFields(schema, frame, capacity) if err != nil { return nil, err } @@ -780,7 +945,12 @@ func FromArrowRecord(record arrow.Record) (*Frame, error) { // UnmarshalArrowFrame converts a byte representation of an arrow table to a Frame. func UnmarshalArrowFrame(b []byte) (*Frame, error) { - fB := filebuffer.New(b) + // Get filebuffer from pool to reduce allocations + fB := fileBufferPool.Get().(*filebuffer.Buffer) + fB.Buff.Reset() + fB.Buff.Write(b) + defer fileBufferPool.Put(fB) + fR, err := ipc.NewFileReader(fB) if err != nil { return nil, err @@ -793,7 +963,19 @@ func UnmarshalArrowFrame(b []byte) (*Frame, error) { return nil, err } - nullable, err := initializeFrameFields(schema, frame) + // Calculate total capacity by reading all record batch sizes + // This pre-allocates vectors to avoid repeated reallocations + capacity := 0 + for i := 0; i < fR.NumRecords(); i++ { + rec, err := fR.Record(i) + if err != nil { + return nil, err + } + capacity += int(rec.NumRows()) + rec.Release() + } + + nullable, err := initializeFrameFields(schema, frame, capacity) if err != nil { return nil, err } diff --git a/data/arrow_bench_test.go b/data/arrow_bench_test.go new file mode 100644 index 000000000..918fe16dc --- /dev/null +++ b/data/arrow_bench_test.go @@ -0,0 +1,794 @@ +package data_test + +import ( + "encoding/json" + "math" + "testing" + "time" + + "github.com/apache/arrow-go/v18/arrow/array" + "github.com/grafana/grafana-plugin-sdk-go/data" +) + +// Benchmark helpers to create frames of different sizes and complexities + +func createSmallFrame() *data.Frame { + return data.NewFrame("small", + data.NewField("time", nil, []time.Time{ + time.Unix(1000, 0), time.Unix(2000, 0), time.Unix(3000, 0), + }), + data.NewField("value", nil, []float64{1.0, 2.0, 3.0}), + ) +} + +func createMediumFrame(rows int) *data.Frame { + times := make([]time.Time, rows) + values := make([]float64, rows) + strings := make([]string, rows) + ints := make([]int64, rows) + + for i := 0; i < rows; i++ { + times[i] = time.Unix(int64(i*1000), 0) + values[i] = float64(i) * 1.5 + strings[i] = "value_" + string(rune(i%26+97)) + ints[i] = int64(i) + } + + return data.NewFrame("medium", + data.NewField("time", nil, times), + data.NewField("value", nil, values), + data.NewField("string", nil, strings), + data.NewField("int", nil, ints), + ) +} + +func createLargeComplexFrame(rows int) *data.Frame { + times := make([]*time.Time, rows) + float64s := make([]*float64, rows) + float32s := make([]*float32, rows) + strings := make([]*string, rows) + int64s := make([]*int64, rows) + int32s := make([]*int32, rows) + int16s := make([]*int16, rows) + int8s := make([]*int8, rows) + uint64s := make([]*uint64, rows) + uint32s := make([]*uint32, rows) + uint16s := make([]*uint16, rows) + uint8s := make([]*uint8, rows) + bools := make([]*bool, rows) + jsons := make([]*json.RawMessage, rows) + + for i := 0; i < rows; i++ { + if i%10 != 0 { // 10% nulls + t := time.Unix(int64(i*1000), 0) + times[i] = &t + + f64 := float64(i) * 1.5 + float64s[i] = &f64 + + f32 := float32(i) * 0.5 + float32s[i] = &f32 + + s := "value_" + string(rune(i%26+97)) + strings[i] = &s + + i64 := int64(i) + int64s[i] = &i64 + + i32 := int32(i) + int32s[i] = &i32 + + i16 := int16(i % 32767) + int16s[i] = &i16 + + i8 := int8(i % 127) + int8s[i] = &i8 + + u64 := uint64(i) + uint64s[i] = &u64 + + u32 := uint32(i) + uint32s[i] = &u32 + + u16 := uint16(i % 65535) + uint16s[i] = &u16 + + u8 := uint8(i % 255) + uint8s[i] = &u8 + + b := i%2 == 0 + bools[i] = &b + + j := json.RawMessage(`{"id":` + string(rune(i%10+48)) + `}`) + jsons[i] = &j + } + } + + frame := data.NewFrame("large_complex", + data.NewField("time", data.Labels{"source": "benchmark"}, times), + data.NewField("float64", nil, float64s), + data.NewField("float32", nil, float32s), + data.NewField("string", data.Labels{"type": "text"}, strings), + data.NewField("int64", nil, int64s), + data.NewField("int32", nil, int32s), + data.NewField("int16", nil, int16s), + data.NewField("int8", nil, int8s), + data.NewField("uint64", nil, uint64s), + data.NewField("uint32", nil, uint32s), + data.NewField("uint16", nil, uint16s), + data.NewField("uint8", nil, uint8s), + data.NewField("bool", nil, bools), + data.NewField("json", nil, jsons), + ) + + frame.SetMeta(&data.FrameMeta{ + ExecutedQueryString: "SELECT * FROM benchmarks", + Custom: map[string]interface{}{"benchmark": true}, + }) + + // Add field configs to some fields + frame.Fields[1].SetConfig((&data.FieldConfig{ + DisplayName: "Float64 Value", + Unit: "percent", + }).SetMin(0.0).SetMax(float64(rows))) + + return frame +} + +func createWideFrame(rows, cols int) *data.Frame { + fields := make([]*data.Field, cols) + for c := 0; c < cols; c++ { + values := make([]float64, rows) + for r := 0; r < rows; r++ { + values[r] = float64(r*c) * 0.1 + } + fields[c] = data.NewField("col_"+string(rune(c%26+97)), nil, values) + } + return data.NewFrame("wide", fields...) +} + +func createNumericOnlyFrame(rows int) *data.Frame { + int64s := make([]int64, rows) + float64s := make([]float64, rows) + uint64s := make([]uint64, rows) + int32s := make([]int32, rows) + + for i := 0; i < rows; i++ { + int64s[i] = int64(i) + float64s[i] = float64(i) * math.Pi + uint64s[i] = uint64(i * 2) + int32s[i] = int32(i % math.MaxInt32) + } + + return data.NewFrame("numeric", + data.NewField("int64", nil, int64s), + data.NewField("float64", nil, float64s), + data.NewField("uint64", nil, uint64s), + data.NewField("int32", nil, int32s), + ) +} + +func createTimeSeriesFrame(rows int) *data.Frame { + times := make([]time.Time, rows) + values := make([]float64, rows) + start := time.Now() + + for i := 0; i < rows; i++ { + times[i] = start.Add(time.Duration(i) * time.Second) + values[i] = math.Sin(float64(i) * 0.1) + } + + return data.NewFrame("timeseries", + data.NewField("time", nil, times), + data.NewField("value", nil, values), + ).SetMeta(&data.FrameMeta{ + Type: data.FrameTypeTimeSeriesMany, + }) +} + +// Benchmarks for MarshalArrow + +func BenchmarkMarshalArrow_Small(b *testing.B) { + frame := createSmallFrame() + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkMarshalArrow_Medium_100Rows(b *testing.B) { + frame := createMediumFrame(100) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkMarshalArrow_Medium_1000Rows(b *testing.B) { + frame := createMediumFrame(1000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkMarshalArrow_Large_10000Rows(b *testing.B) { + frame := createLargeComplexFrame(10000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkMarshalArrow_Large_100000Rows(b *testing.B) { + frame := createLargeComplexFrame(100000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkMarshalArrow_Wide_100x100(b *testing.B) { + frame := createWideFrame(100, 100) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkMarshalArrow_NumericOnly_10000Rows(b *testing.B) { + frame := createNumericOnlyFrame(10000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkMarshalArrow_TimeSeries_10000Rows(b *testing.B) { + frame := createTimeSeriesFrame(10000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +// Benchmarks for UnmarshalArrowFrame + +func BenchmarkUnmarshalArrowFrame_Small(b *testing.B) { + frame := createSmallFrame() + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrame_Medium_100Rows(b *testing.B) { + frame := createMediumFrame(100) + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrame_Medium_1000Rows(b *testing.B) { + frame := createMediumFrame(1000) + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrame_Large_10000Rows(b *testing.B) { + frame := createLargeComplexFrame(10000) + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrame_Large_100000Rows(b *testing.B) { + frame := createLargeComplexFrame(100000) + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrame_Wide_100x100(b *testing.B) { + frame := createWideFrame(100, 100) + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrame_NumericOnly_10000Rows(b *testing.B) { + frame := createNumericOnlyFrame(10000) + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrame_TimeSeries_10000Rows(b *testing.B) { + frame := createTimeSeriesFrame(10000) + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +// Benchmarks for round-trip (Marshal + Unmarshal) + +func BenchmarkArrowRoundTrip_Small(b *testing.B) { + frame := createSmallFrame() + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + _, err = data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkArrowRoundTrip_Medium_1000Rows(b *testing.B) { + frame := createMediumFrame(1000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + _, err = data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkArrowRoundTrip_Large_10000Rows(b *testing.B) { + frame := createLargeComplexFrame(10000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + _, err = data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +// Benchmarks for FrameToArrowTable + +func BenchmarkFrameToArrowTable_Small(b *testing.B) { + frame := createSmallFrame() + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + table, err := data.FrameToArrowTable(frame) + if err != nil { + b.Fatal(err) + } + table.Release() + } +} + +func BenchmarkFrameToArrowTable_Medium_1000Rows(b *testing.B) { + frame := createMediumFrame(1000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + table, err := data.FrameToArrowTable(frame) + if err != nil { + b.Fatal(err) + } + table.Release() + } +} + +func BenchmarkFrameToArrowTable_Large_10000Rows(b *testing.B) { + frame := createLargeComplexFrame(10000) + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + table, err := data.FrameToArrowTable(frame) + if err != nil { + b.Fatal(err) + } + table.Release() + } +} + +// Benchmarks for FromArrowRecord + +func BenchmarkFromArrowRecord_Small(b *testing.B) { + frame := createSmallFrame() + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + // Get a record by decoding the arrow data + decodedFrame, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + + // Re-encode to get the arrow record for benchmarking + table, err := data.FrameToArrowTable(decodedFrame) + if err != nil { + b.Fatal(err) + } + defer table.Release() + + // Create a record from the table + tr := array.NewTableReader(table, -1) + defer tr.Release() + if !tr.Next() { + b.Fatal("no records in table") + } + record := tr.Record() + defer record.Release() + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.FromArrowRecord(record) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkFromArrowRecord_Medium_1000Rows(b *testing.B) { + frame := createMediumFrame(1000) + table, err := data.FrameToArrowTable(frame) + if err != nil { + b.Fatal(err) + } + defer table.Release() + + tr := array.NewTableReader(table, -1) + defer tr.Release() + if !tr.Next() { + b.Fatal("no records in table") + } + record := tr.Record() + defer record.Release() + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.FromArrowRecord(record) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkFromArrowRecord_Large_10000Rows(b *testing.B) { + frame := createLargeComplexFrame(10000) + table, err := data.FrameToArrowTable(frame) + if err != nil { + b.Fatal(err) + } + defer table.Release() + + tr := array.NewTableReader(table, -1) + defer tr.Release() + if !tr.Next() { + b.Fatal("no records in table") + } + record := tr.Record() + defer record.Release() + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.FromArrowRecord(record) + if err != nil { + b.Fatal(err) + } + } +} + +// Benchmarks for multiple frames (Frames.MarshalArrow and UnmarshalArrowFrames) + +func BenchmarkFramesMarshalArrow_5Frames_1000Rows(b *testing.B) { + frames := make(data.Frames, 5) + for i := 0; i < 5; i++ { + frames[i] = createMediumFrame(1000) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frames.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkFramesMarshalArrow_10Frames_100Rows(b *testing.B) { + frames := make(data.Frames, 10) + for i := 0; i < 10; i++ { + frames[i] = createMediumFrame(100) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frames.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrames_5Frames_1000Rows(b *testing.B) { + frames := make(data.Frames, 5) + for i := 0; i < 5; i++ { + frames[i] = createMediumFrame(1000) + } + encoded, err := frames.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrames(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrames_10Frames_100Rows(b *testing.B) { + frames := make(data.Frames, 10) + for i := 0; i < 10; i++ { + frames[i] = createMediumFrame(100) + } + encoded, err := frames.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrames(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +// Benchmarks for different data types + +func BenchmarkMarshalArrow_StringHeavy_1000Rows(b *testing.B) { + strings := make([]string, 1000) + for i := 0; i < 1000; i++ { + strings[i] = "this is a longer string value for benchmarking purposes" + } + frame := data.NewFrame("strings", + data.NewField("value", nil, strings), + ) + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkUnmarshalArrowFrame_StringHeavy_1000Rows(b *testing.B) { + strings := make([]string, 1000) + for i := 0; i < 1000; i++ { + strings[i] = "this is a longer string value for benchmarking purposes" + } + frame := data.NewFrame("strings", + data.NewField("value", nil, strings), + ) + encoded, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := data.UnmarshalArrowFrame(encoded) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkMarshalArrow_WithMetadataAndLabels_1000Rows(b *testing.B) { + frame := createMediumFrame(1000) + frame.SetMeta(&data.FrameMeta{ + ExecutedQueryString: "SELECT * FROM large_table WHERE condition = true", + Custom: map[string]interface{}{ + "key1": "value1", + "key2": 12345, + "key3": true, + }, + Stats: []data.QueryStat{ + {Value: 123.45, FieldConfig: data.FieldConfig{DisplayName: "stat1"}}, + {Value: 678.90, FieldConfig: data.FieldConfig{DisplayName: "stat2"}}, + }, + }) + + for _, field := range frame.Fields { + field.Labels = data.Labels{ + "source": "benchmark", + "environment": "test", + "version": "1.0.0", + } + field.SetConfig((&data.FieldConfig{ + DisplayName: "Display " + field.Name, + Unit: "units", + }).SetMin(0.0).SetMax(1000.0)) + } + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + _, err := frame.MarshalArrow() + if err != nil { + b.Fatal(err) + } + } +} diff --git a/data/arrow_column_builders.go b/data/arrow_column_builders.go index 45cb37249..cd81de0fd 100644 --- a/data/arrow_column_builders.go +++ b/data/arrow_column_builders.go @@ -1,16 +1,19 @@ package data import ( + "encoding/json" + "time" + "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/array" "github.com/apache/arrow-go/v18/arrow/memory" ) -func buildStringColumn(pool memory.Allocator, field arrow.Field, vec *stringVector) *arrow.Column { +func buildStringColumn(pool memory.Allocator, field arrow.Field, vec *genericVector[string]) *arrow.Column { builder := array.NewStringBuilder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -20,11 +23,11 @@ func buildStringColumn(pool memory.Allocator, field arrow.Field, vec *stringVect return arrow.NewColumn(field, chunked) } -func buildNullableStringColumn(pool memory.Allocator, field arrow.Field, vec *nullableStringVector) *arrow.Column { +func buildNullableStringColumn(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[string]) *arrow.Column { builder := array.NewStringBuilder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -38,11 +41,11 @@ func buildNullableStringColumn(pool memory.Allocator, field arrow.Field, vec *nu return arrow.NewColumn(field, chunked) } -func buildInt8Column(pool memory.Allocator, field arrow.Field, vec *int8Vector) *arrow.Column { +func buildInt8Column(pool memory.Allocator, field arrow.Field, vec *genericVector[int8]) *arrow.Column { builder := array.NewInt8Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -52,11 +55,11 @@ func buildInt8Column(pool memory.Allocator, field arrow.Field, vec *int8Vector) return arrow.NewColumn(field, chunked) } -func buildNullableInt8Column(pool memory.Allocator, field arrow.Field, vec *nullableInt8Vector) *arrow.Column { +func buildNullableInt8Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[int8]) *arrow.Column { builder := array.NewInt8Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -70,11 +73,11 @@ func buildNullableInt8Column(pool memory.Allocator, field arrow.Field, vec *null return arrow.NewColumn(field, chunked) } -func buildInt16Column(pool memory.Allocator, field arrow.Field, vec *int16Vector) *arrow.Column { +func buildInt16Column(pool memory.Allocator, field arrow.Field, vec *genericVector[int16]) *arrow.Column { builder := array.NewInt16Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -84,11 +87,11 @@ func buildInt16Column(pool memory.Allocator, field arrow.Field, vec *int16Vector return arrow.NewColumn(field, chunked) } -func buildNullableInt16Column(pool memory.Allocator, field arrow.Field, vec *nullableInt16Vector) *arrow.Column { +func buildNullableInt16Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[int16]) *arrow.Column { builder := array.NewInt16Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -102,11 +105,11 @@ func buildNullableInt16Column(pool memory.Allocator, field arrow.Field, vec *nul return arrow.NewColumn(field, chunked) } -func buildInt32Column(pool memory.Allocator, field arrow.Field, vec *int32Vector) *arrow.Column { +func buildInt32Column(pool memory.Allocator, field arrow.Field, vec *genericVector[int32]) *arrow.Column { builder := array.NewInt32Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -116,11 +119,11 @@ func buildInt32Column(pool memory.Allocator, field arrow.Field, vec *int32Vector return arrow.NewColumn(field, chunked) } -func buildNullableInt32Column(pool memory.Allocator, field arrow.Field, vec *nullableInt32Vector) *arrow.Column { +func buildNullableInt32Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[int32]) *arrow.Column { builder := array.NewInt32Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -134,11 +137,11 @@ func buildNullableInt32Column(pool memory.Allocator, field arrow.Field, vec *nul return arrow.NewColumn(field, chunked) } -func buildInt64Column(pool memory.Allocator, field arrow.Field, vec *int64Vector) *arrow.Column { +func buildInt64Column(pool memory.Allocator, field arrow.Field, vec *genericVector[int64]) *arrow.Column { builder := array.NewInt64Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -148,11 +151,11 @@ func buildInt64Column(pool memory.Allocator, field arrow.Field, vec *int64Vector return arrow.NewColumn(field, chunked) } -func buildNullableInt64Column(pool memory.Allocator, field arrow.Field, vec *nullableInt64Vector) *arrow.Column { +func buildNullableInt64Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[int64]) *arrow.Column { builder := array.NewInt64Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -166,11 +169,11 @@ func buildNullableInt64Column(pool memory.Allocator, field arrow.Field, vec *nul return arrow.NewColumn(field, chunked) } -func buildUInt8Column(pool memory.Allocator, field arrow.Field, vec *uint8Vector) *arrow.Column { +func buildUInt8Column(pool memory.Allocator, field arrow.Field, vec *genericVector[uint8]) *arrow.Column { builder := array.NewUint8Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -180,11 +183,11 @@ func buildUInt8Column(pool memory.Allocator, field arrow.Field, vec *uint8Vector return arrow.NewColumn(field, chunked) } -func buildNullableUInt8Column(pool memory.Allocator, field arrow.Field, vec *nullableUint8Vector) *arrow.Column { +func buildNullableUInt8Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[uint8]) *arrow.Column { builder := array.NewUint8Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -198,11 +201,11 @@ func buildNullableUInt8Column(pool memory.Allocator, field arrow.Field, vec *nul return arrow.NewColumn(field, chunked) } -func buildUInt16Column(pool memory.Allocator, field arrow.Field, vec *uint16Vector) *arrow.Column { +func buildUInt16Column(pool memory.Allocator, field arrow.Field, vec *genericVector[uint16]) *arrow.Column { builder := array.NewUint16Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -212,11 +215,11 @@ func buildUInt16Column(pool memory.Allocator, field arrow.Field, vec *uint16Vect return arrow.NewColumn(field, chunked) } -func buildNullableUInt16Column(pool memory.Allocator, field arrow.Field, vec *nullableUint16Vector) *arrow.Column { +func buildNullableUInt16Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[uint16]) *arrow.Column { builder := array.NewUint16Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -230,11 +233,11 @@ func buildNullableUInt16Column(pool memory.Allocator, field arrow.Field, vec *nu return arrow.NewColumn(field, chunked) } -func buildUInt32Column(pool memory.Allocator, field arrow.Field, vec *uint32Vector) *arrow.Column { +func buildUInt32Column(pool memory.Allocator, field arrow.Field, vec *genericVector[uint32]) *arrow.Column { builder := array.NewUint32Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -244,11 +247,11 @@ func buildUInt32Column(pool memory.Allocator, field arrow.Field, vec *uint32Vect return arrow.NewColumn(field, chunked) } -func buildNullableUInt32Column(pool memory.Allocator, field arrow.Field, vec *nullableUint32Vector) *arrow.Column { +func buildNullableUInt32Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[uint32]) *arrow.Column { builder := array.NewUint32Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -262,11 +265,11 @@ func buildNullableUInt32Column(pool memory.Allocator, field arrow.Field, vec *nu return arrow.NewColumn(field, chunked) } -func buildUInt64Column(pool memory.Allocator, field arrow.Field, vec *uint64Vector) *arrow.Column { +func buildUInt64Column(pool memory.Allocator, field arrow.Field, vec *genericVector[uint64]) *arrow.Column { builder := array.NewUint64Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -276,11 +279,11 @@ func buildUInt64Column(pool memory.Allocator, field arrow.Field, vec *uint64Vect return arrow.NewColumn(field, chunked) } -func buildNullableUInt64Column(pool memory.Allocator, field arrow.Field, vec *nullableUint64Vector) *arrow.Column { +func buildNullableUInt64Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[uint64]) *arrow.Column { builder := array.NewUint64Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -294,11 +297,11 @@ func buildNullableUInt64Column(pool memory.Allocator, field arrow.Field, vec *nu return arrow.NewColumn(field, chunked) } -func buildFloat32Column(pool memory.Allocator, field arrow.Field, vec *float32Vector) *arrow.Column { +func buildFloat32Column(pool memory.Allocator, field arrow.Field, vec *genericVector[float32]) *arrow.Column { builder := array.NewFloat32Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -308,11 +311,11 @@ func buildFloat32Column(pool memory.Allocator, field arrow.Field, vec *float32Ve return arrow.NewColumn(field, chunked) } -func buildNullableFloat32Column(pool memory.Allocator, field arrow.Field, vec *nullableFloat32Vector) *arrow.Column { +func buildNullableFloat32Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[float32]) *arrow.Column { builder := array.NewFloat32Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -326,11 +329,11 @@ func buildNullableFloat32Column(pool memory.Allocator, field arrow.Field, vec *n return arrow.NewColumn(field, chunked) } -func buildFloat64Column(pool memory.Allocator, field arrow.Field, vec *float64Vector) *arrow.Column { +func buildFloat64Column(pool memory.Allocator, field arrow.Field, vec *genericVector[float64]) *arrow.Column { builder := array.NewFloat64Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -340,11 +343,11 @@ func buildFloat64Column(pool memory.Allocator, field arrow.Field, vec *float64Ve return arrow.NewColumn(field, chunked) } -func buildNullableFloat64Column(pool memory.Allocator, field arrow.Field, vec *nullableFloat64Vector) *arrow.Column { +func buildNullableFloat64Column(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[float64]) *arrow.Column { builder := array.NewFloat64Builder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -358,11 +361,11 @@ func buildNullableFloat64Column(pool memory.Allocator, field arrow.Field, vec *n return arrow.NewColumn(field, chunked) } -func buildBoolColumn(pool memory.Allocator, field arrow.Field, vec *boolVector) *arrow.Column { +func buildBoolColumn(pool memory.Allocator, field arrow.Field, vec *genericVector[bool]) *arrow.Column { builder := array.NewBooleanBuilder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -372,11 +375,11 @@ func buildBoolColumn(pool memory.Allocator, field arrow.Field, vec *boolVector) return arrow.NewColumn(field, chunked) } -func buildNullableBoolColumn(pool memory.Allocator, field arrow.Field, vec *nullableBoolVector) *arrow.Column { +func buildNullableBoolColumn(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[bool]) *arrow.Column { builder := array.NewBooleanBuilder(pool) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -390,13 +393,13 @@ func buildNullableBoolColumn(pool memory.Allocator, field arrow.Field, vec *null return arrow.NewColumn(field, chunked) } -func buildTimeColumn(pool memory.Allocator, field arrow.Field, vec *timeTimeVector) *arrow.Column { +func buildTimeColumnGeneric(pool memory.Allocator, field arrow.Field, vec *genericVector[time.Time]) *arrow.Column { builder := array.NewTimestampBuilder(pool, &arrow.TimestampType{ Unit: arrow.Nanosecond, }) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(arrow.Timestamp((v).UnixNano())) } @@ -406,13 +409,13 @@ func buildTimeColumn(pool memory.Allocator, field arrow.Field, vec *timeTimeVect return arrow.NewColumn(field, chunked) } -func buildNullableTimeColumn(pool memory.Allocator, field arrow.Field, vec *nullableTimeTimeVector) *arrow.Column { +func buildNullableTimeColumnGeneric(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[time.Time]) *arrow.Column { builder := array.NewTimestampBuilder(pool, &arrow.TimestampType{ Unit: arrow.Nanosecond, }) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -426,11 +429,11 @@ func buildNullableTimeColumn(pool memory.Allocator, field arrow.Field, vec *null return arrow.NewColumn(field, chunked) } -func buildJSONColumn(pool memory.Allocator, field arrow.Field, vec *jsonRawMessageVector) *arrow.Column { +func buildJSONColumnGeneric(pool memory.Allocator, field arrow.Field, vec *genericVector[json.RawMessage]) *arrow.Column { builder := array.NewBinaryBuilder(pool, &arrow.BinaryType{}) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { builder.Append(v) } @@ -440,11 +443,11 @@ func buildJSONColumn(pool memory.Allocator, field arrow.Field, vec *jsonRawMessa return arrow.NewColumn(field, chunked) } -func buildNullableJSONColumn(pool memory.Allocator, field arrow.Field, vec *nullableJsonRawMessageVector) *arrow.Column { +func buildNullableJSONColumnGeneric(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[json.RawMessage]) *arrow.Column { builder := array.NewBinaryBuilder(pool, &arrow.BinaryType{}) defer builder.Release() - for _, v := range *vec { + for _, v := range vec.Slice() { if v == nil { builder.AppendNull() continue @@ -458,16 +461,12 @@ func buildNullableJSONColumn(pool memory.Allocator, field arrow.Field, vec *null return arrow.NewColumn(field, chunked) } -func buildNullableEnumColumn(pool memory.Allocator, field arrow.Field, vec *nullableEnumVector) *arrow.Column { +func buildEnumColumnGeneric(pool memory.Allocator, field arrow.Field, vec *genericVector[EnumItemIndex]) *arrow.Column { builder := array.NewUint16Builder(pool) defer builder.Release() - for _, v := range *vec { - if v == nil { - builder.AppendNull() - continue - } - builder.Append((uint16)(*v)) + for _, v := range vec.Slice() { + builder.Append(uint16(v)) } chunked := arrow.NewChunked(field.Type, []arrow.Array{builder.NewArray()}) @@ -476,12 +475,16 @@ func buildNullableEnumColumn(pool memory.Allocator, field arrow.Field, vec *null return arrow.NewColumn(field, chunked) } -func buildEnumColumn(pool memory.Allocator, field arrow.Field, vec *enumVector) *arrow.Column { +func buildNullableEnumColumnGeneric(pool memory.Allocator, field arrow.Field, vec *nullableGenericVector[EnumItemIndex]) *arrow.Column { builder := array.NewUint16Builder(pool) defer builder.Release() - for _, v := range *vec { - builder.Append(uint16(v)) + for _, v := range vec.Slice() { + if v == nil { + builder.AppendNull() + continue + } + builder.Append(uint16(*v)) } chunked := arrow.NewChunked(field.Type, []arrow.Array{builder.NewArray()}) diff --git a/data/benchmark-compare.sh b/data/benchmark-compare.sh new file mode 100755 index 000000000..ad74a1224 --- /dev/null +++ b/data/benchmark-compare.sh @@ -0,0 +1,90 @@ +#!/bin/bash +set -e + +# Script to compare benchmark performance between branches using best practices +# Usage: ./benchmark-compare.sh [base-branch] [feature-branch] [count] +# +# Best practices implemented: +# - Uses -count=10 for statistical significance (can override with 3rd arg) +# - Clears build cache between runs +# - Stabilizes CPU frequency where possible +# - Uses -benchtime for longer runs to reduce noise +# +# If no arguments provided: +# - Runs benchmarks on current branch and saves to new.txt +# - Switches to main, runs benchmarks and saves to old.txt +# - Switches back and compares with benchstat +# +# If arguments provided: +# - Uses specified branches for comparison + +CURRENT_BRANCH=$(git branch --show-current) +BASE_BRANCH=${1:-main} +FEATURE_BRANCH=${2:-$CURRENT_BRANCH} +COUNT=${3:-10} # Default to 10 runs for better statistical significance + +echo "======================================================================" +echo "Benchmark Comparison with Best Practices" +echo "======================================================================" +echo " Base branch: $BASE_BRANCH" +echo " Feature branch: $FEATURE_BRANCH" +echo " Iterations: $COUNT (minimum 6 recommended for confidence intervals)" +echo "" + +# Check if benchstat is installed +if ! command -v benchstat &> /dev/null; then + echo "benchstat is not installed. Installing..." + go install golang.org/x/perf/cmd/benchstat@latest + echo "" +fi + +# Warn about CPU frequency scaling +echo "NOTE: For most accurate results:" +echo " - Close other applications" +echo " - Disable CPU frequency scaling if possible" +echo " - Run on AC power (laptops)" +echo " - Consider: sudo cpupower frequency-set --governor performance (Linux)" +echo "" + +# Save current state +echo "Saving current work..." +git stash push -u -m "benchmark comparison stash" 2>/dev/null || true + +# Function to run benchmarks with best practices +run_benchmarks() { + local branch=$1 + local output=$2 + + echo "======================================================================" + echo "Running benchmarks on $branch..." + echo "======================================================================" + + # Clear build cache to ensure clean build + echo "Clearing build cache..." + go clean -cache -testcache + + # Run benchmarks with: + # - count=$COUNT: Multiple runs for statistical significance + # - benchmem: Include memory allocation stats + # - benchtime=1s: Run each benchmark for at least 1 second (reduces timing noise) + # - run=^$: Don't run any tests, only benchmarks + echo "Running $COUNT iterations (this may take several minutes)..." + go test -bench=. -benchmem -count=$COUNT -benchtime=1s -cpu=1 -run=^$ ./data 2>&1 | tee "$output" + + echo "" + echo "Results saved to $output" +} + +# Run benchmarks on base branch +git checkout "$BASE_BRANCH" 2>&1 | grep -v "^M\s" || true +run_benchmarks "$BASE_BRANCH" "old.txt" + +# Run benchmarks on feature branch +git checkout "$FEATURE_BRANCH" 2>&1 | grep -v "^M\s" || true +run_benchmarks "$FEATURE_BRANCH" "new.txt" + +echo "" +echo "======================================================================" +echo "Benchmark Comparison Results" +echo "======================================================================" +benchstat -alpha=0.05 old.txt new.txt diff --git a/data/field.go b/data/field.go index 0f74ebff6..216ee21d2 100644 --- a/data/field.go +++ b/data/field.go @@ -67,66 +67,67 @@ type Fields []*Field func NewField(name string, labels Labels, values interface{}) *Field { var vec vector switch v := values.(type) { + // Use generic vectors for basic types (performance optimized) case []int8: - vec = newInt8VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*int8: - vec = newNullableInt8VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []int16: - vec = newInt16VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*int16: - vec = newNullableInt16VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []int32: - vec = newInt32VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*int32: - vec = newNullableInt32VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []int64: - vec = newInt64VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*int64: - vec = newNullableInt64VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []uint8: - vec = newUint8VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*uint8: - vec = newNullableUint8VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []uint16: - vec = newUint16VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*uint16: - vec = newNullableUint16VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []uint32: - vec = newUint32VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*uint32: - vec = newNullableUint32VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []uint64: - vec = newUint64VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*uint64: - vec = newNullableUint64VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []float32: - vec = newFloat32VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*float32: - vec = newNullableFloat32VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []float64: - vec = newFloat64VectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*float64: - vec = newNullableFloat64VectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []string: - vec = newStringVectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*string: - vec = newNullableStringVectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []bool: - vec = newBoolVectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*bool: - vec = newNullableBoolVectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []time.Time: - vec = newTimeTimeVectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*time.Time: - vec = newNullableTimeTimeVectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []json.RawMessage: - vec = newJsonRawMessageVectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*json.RawMessage: - vec = newNullableJsonRawMessageVectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) case []EnumItemIndex: - vec = newEnumVectorWithValues(v) + vec = newGenericVectorWithValues(v) case []*EnumItemIndex: - vec = newNullableEnumVectorWithValues(v) + vec = newNullableGenericVectorWithValues(v) default: panic(fmt.Errorf("field '%s' specified with unsupported type %T", name, v)) } @@ -257,10 +258,20 @@ func (f *Field) SetConfig(conf *FieldConfig) *Field { // an error if ParseFloat errors. If the value is nil, NaN is returned. // nolint:gocyclo func (f *Field) FloatAt(idx int) (float64, error) { + // Fast path: Use typed accessors for generic vectors (zero allocation) switch f.Type() { case FieldTypeInt8: + if gv, ok := f.vector.(*genericVector[int8]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(int8)), nil case FieldTypeNullableInt8: + if gv, ok := f.vector.(*nullableGenericVector[int8]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } iv := f.At(idx).(*int8) if iv == nil { return math.NaN(), nil @@ -268,8 +279,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return float64(*iv), nil case FieldTypeInt16: + if gv, ok := f.vector.(*genericVector[int16]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(int16)), nil case FieldTypeNullableInt16: + if gv, ok := f.vector.(*nullableGenericVector[int16]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } iv := f.At(idx).(*int16) if iv == nil { return math.NaN(), nil @@ -277,8 +297,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return float64(*iv), nil case FieldTypeInt32: + if gv, ok := f.vector.(*genericVector[int32]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(int32)), nil case FieldTypeNullableInt32: + if gv, ok := f.vector.(*nullableGenericVector[int32]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } iv := f.At(idx).(*int32) if iv == nil { return math.NaN(), nil @@ -286,8 +315,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return float64(*iv), nil case FieldTypeInt64: + if gv, ok := f.vector.(*genericVector[int64]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(int64)), nil case FieldTypeNullableInt64: + if gv, ok := f.vector.(*nullableGenericVector[int64]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } iv := f.At(idx).(*int64) if iv == nil { return math.NaN(), nil @@ -295,8 +333,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return float64(*iv), nil case FieldTypeUint8: + if gv, ok := f.vector.(*genericVector[uint8]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(uint8)), nil case FieldTypeNullableUint8: + if gv, ok := f.vector.(*nullableGenericVector[uint8]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } uiv := f.At(idx).(*uint8) if uiv == nil { return math.NaN(), nil @@ -304,8 +351,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return float64(*uiv), nil case FieldTypeUint16: + if gv, ok := f.vector.(*genericVector[uint16]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(uint16)), nil case FieldTypeNullableUint16: + if gv, ok := f.vector.(*nullableGenericVector[uint16]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } uiv := f.At(idx).(*uint16) if uiv == nil { return math.NaN(), nil @@ -313,8 +369,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return float64(*uiv), nil case FieldTypeUint32: + if gv, ok := f.vector.(*genericVector[uint32]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(uint32)), nil case FieldTypeNullableUint32: + if gv, ok := f.vector.(*nullableGenericVector[uint32]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } uiv := f.At(idx).(*uint32) if uiv == nil { return math.NaN(), nil @@ -324,8 +389,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { // TODO: third param for loss of precision? // Maybe something in math/big can help with this (also see https://github.com/golang/go/issues/29463). case FieldTypeUint64: + if gv, ok := f.vector.(*genericVector[uint64]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(uint64)), nil case FieldTypeNullableUint64: + if gv, ok := f.vector.(*nullableGenericVector[uint64]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } uiv := f.At(idx).(*uint64) if uiv == nil { return math.NaN(), nil @@ -333,8 +407,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return float64(*uiv), nil case FieldTypeFloat32: + if gv, ok := f.vector.(*genericVector[float32]); ok { + return float64(gv.AtTyped(idx)), nil + } return float64(f.At(idx).(float32)), nil case FieldTypeNullableFloat32: + if gv, ok := f.vector.(*nullableGenericVector[float32]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val), nil + } + return math.NaN(), nil + } fv := f.At(idx).(*float32) if fv == nil { return math.NaN(), nil @@ -342,8 +425,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return float64(*fv), nil case FieldTypeFloat64: + if gv, ok := f.vector.(*genericVector[float64]); ok { + return gv.AtTyped(idx), nil + } return f.At(idx).(float64), nil case FieldTypeNullableFloat64: + if gv, ok := f.vector.(*nullableGenericVector[float64]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return val, nil + } + return math.NaN(), nil + } fv := f.At(idx).(*float64) if fv == nil { return math.NaN(), nil @@ -351,6 +443,13 @@ func (f *Field) FloatAt(idx int) (float64, error) { return *fv, nil case FieldTypeString: + if gv, ok := f.vector.(*genericVector[string]); ok { + ft, err := strconv.ParseFloat(gv.AtTyped(idx), 64) + if err != nil { + return 0, err + } + return ft, nil + } s := f.At(idx).(string) ft, err := strconv.ParseFloat(s, 64) if err != nil { @@ -358,6 +457,16 @@ func (f *Field) FloatAt(idx int) (float64, error) { } return ft, nil case FieldTypeNullableString: + if gv, ok := f.vector.(*nullableGenericVector[string]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + ft, err := strconv.ParseFloat(val, 64) + if err != nil { + return 0, err + } + return ft, nil + } + return math.NaN(), nil + } s := f.At(idx).(*string) if s == nil { return math.NaN(), nil @@ -369,12 +478,24 @@ func (f *Field) FloatAt(idx int) (float64, error) { return ft, nil case FieldTypeBool: + if gv, ok := f.vector.(*genericVector[bool]); ok { + if gv.AtTyped(idx) { + return 1, nil + } + return 0, nil + } if f.At(idx).(bool) { return 1, nil } return 0, nil case FieldTypeNullableBool: + if gv, ok := f.vector.(*nullableGenericVector[bool]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok && val { + return 1, nil + } + return 0, nil + } b := f.At(idx).(*bool) if b == nil || !*b { return 0, nil @@ -382,8 +503,17 @@ func (f *Field) FloatAt(idx int) (float64, error) { return 1, nil case FieldTypeTime: + if gv, ok := f.vector.(*genericVector[time.Time]); ok { + return float64(gv.AtTyped(idx).UnixNano() / int64(time.Millisecond)), nil + } return float64(f.At(idx).(time.Time).UnixNano() / int64(time.Millisecond)), nil case FieldTypeNullableTime: + if gv, ok := f.vector.(*nullableGenericVector[time.Time]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + return float64(val.UnixNano() / int64(time.Millisecond)), nil + } + return math.NaN(), nil + } t := f.At(idx).(*time.Time) if t == nil { return math.NaN(), nil @@ -405,72 +535,136 @@ func (f *Field) NullableFloatAt(idx int) (*float64, error) { return &fv, nil } + // Fast path: Use typed accessors for generic vectors (reduces allocation) switch f.Type() { case FieldTypeNullableInt8: + if gv, ok := f.vector.(*nullableGenericVector[int8]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } iv := f.At(idx).(*int8) if iv == nil { return nil, nil } - f := float64(*iv) - return &f, nil + fv := float64(*iv) + return &fv, nil case FieldTypeNullableInt16: + if gv, ok := f.vector.(*nullableGenericVector[int16]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } iv := f.At(idx).(*int16) if iv == nil { return nil, nil } - f := float64(*iv) - return &f, nil + fv := float64(*iv) + return &fv, nil case FieldTypeNullableInt32: + if gv, ok := f.vector.(*nullableGenericVector[int32]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } iv := f.At(idx).(*int32) if iv == nil { return nil, nil } - f := float64(*iv) - return &f, nil + fv := float64(*iv) + return &fv, nil case FieldTypeNullableInt64: + if gv, ok := f.vector.(*nullableGenericVector[int64]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } iv := f.At(idx).(*int64) if iv == nil { return nil, nil } - f := float64(*iv) - return &f, nil + fv := float64(*iv) + return &fv, nil case FieldTypeNullableUint8: + if gv, ok := f.vector.(*nullableGenericVector[uint8]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } uiv := f.At(idx).(*uint8) if uiv == nil { return nil, nil } - f := float64(*uiv) - return &f, nil + fv := float64(*uiv) + return &fv, nil case FieldTypeNullableUint16: + if gv, ok := f.vector.(*nullableGenericVector[uint16]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } uiv := f.At(idx).(*uint16) if uiv == nil { return nil, nil } - f := float64(*uiv) - return &f, nil + fv := float64(*uiv) + return &fv, nil case FieldTypeNullableUint32: + if gv, ok := f.vector.(*nullableGenericVector[uint32]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } uiv := f.At(idx).(*uint32) if uiv == nil { return nil, nil } - f := float64(*uiv) - return &f, nil + fv := float64(*uiv) + return &fv, nil case FieldTypeNullableUint64: + if gv, ok := f.vector.(*nullableGenericVector[uint64]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } uiv := f.At(idx).(*uint64) if uiv == nil { return nil, nil } - f := float64(*uiv) - return &f, nil + fv := float64(*uiv) + return &fv, nil case FieldTypeNullableFloat32: + if gv, ok := f.vector.(*nullableGenericVector[float32]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val) + return &fv, nil + } + return nil, nil + } fv := f.At(idx).(*float32) if fv == nil { return nil, nil @@ -479,6 +673,9 @@ func (f *Field) NullableFloatAt(idx int) (*float64, error) { return &f, nil case FieldTypeNullableFloat64: + if gv, ok := f.vector.(*nullableGenericVector[float64]); ok { + return gv.AtTyped(idx), nil + } fv := f.At(idx).(*float64) if fv == nil { return nil, nil @@ -486,6 +683,16 @@ func (f *Field) NullableFloatAt(idx int) (*float64, error) { return fv, nil case FieldTypeNullableString: + if gv, ok := f.vector.(*nullableGenericVector[string]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + ft, err := strconv.ParseFloat(val, 64) + if err != nil { + return nil, err + } + return &ft, nil + } + return nil, nil + } s := f.At(idx).(*string) if s == nil { return nil, nil @@ -497,23 +704,40 @@ func (f *Field) NullableFloatAt(idx int) (*float64, error) { return &ft, nil case FieldTypeNullableBool: + if gv, ok := f.vector.(*nullableGenericVector[bool]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := 0.0 + if val { + fv = 1.0 + } + return &fv, nil + } + return nil, nil + } b := f.At(idx).(*bool) if b == nil { return nil, nil } - f := 0.0 + fv := 0.0 if *b { - f = 1.0 + fv = 1.0 } - return &f, nil + return &fv, nil case FieldTypeNullableTime: + if gv, ok := f.vector.(*nullableGenericVector[time.Time]); ok { + if val, ok := gv.ConcreteAtTyped(idx); ok { + fv := float64(val.UnixNano() / int64(time.Millisecond)) + return &fv, nil + } + return nil, nil + } t := f.At(idx).(*time.Time) if t == nil { return nil, nil } - f := float64(t.UnixNano() / int64(time.Millisecond)) - return &f, nil + fv := float64(t.UnixNano() / int64(time.Millisecond)) + return &fv, nil } return nil, fmt.Errorf("unsupported field type %T", f.Type()) } diff --git a/data/field_generic.go b/data/field_generic.go new file mode 100644 index 000000000..95542a837 --- /dev/null +++ b/data/field_generic.go @@ -0,0 +1,110 @@ +package data + +// NewFieldGeneric creates a new Field using generic vectors for better performance. +// This eliminates interface{} boxing overhead for typed operations. +func NewFieldGeneric[T any](name string, labels Labels, values []T) *Field { + vec := newGenericVectorWithValues(values) + return &Field{ + Name: name, + vector: vec, + Labels: labels, + } +} + +// NewFieldGenericNullable creates a new Field using nullable generic vectors. +func NewFieldGenericNullable[T any](name string, labels Labels, values []*T) *Field { + vec := newNullableGenericVectorWithValues(values) + return &Field{ + Name: name, + vector: vec, + Labels: labels, + } +} + +// AtTyped returns the value at index idx with zero allocation. +// This method panics if the Field's underlying vector is not a genericVector[T]. +// For optimal performance, use this with fields created via NewFieldGeneric. +func AtTyped[T any](f *Field, idx int) T { + if gv, ok := f.vector.(*genericVector[T]); ok { + return gv.AtTyped(idx) + } + panic("Field is not backed by genericVector[T]") +} + +// SetTyped sets the value at index idx with zero allocation. +// This method panics if the Field's underlying vector is not a genericVector[T]. +func SetTyped[T any](f *Field, idx int, val T) { + if gv, ok := f.vector.(*genericVector[T]); ok { + gv.SetTyped(idx, val) + return + } + panic("Field is not backed by genericVector[T]") +} + +// AppendTyped appends a value with zero allocation. +// This method panics if the Field's underlying vector is not a genericVector[T]. +func AppendTyped[T any](f *Field, val T) { + if gv, ok := f.vector.(*genericVector[T]); ok { + gv.AppendTyped(val) + return + } + panic("Field is not backed by genericVector[T]") +} + +// AtTypedNullable returns the pointer value at index idx with zero allocation. +// This method panics if the Field's underlying vector is not a nullableGenericVector[T]. +func AtTypedNullable[T any](f *Field, idx int) *T { + if gv, ok := f.vector.(*nullableGenericVector[T]); ok { + return gv.AtTyped(idx) + } + panic("Field is not backed by nullableGenericVector[T]") +} + +// SetTypedNullable sets the pointer value at index idx with zero allocation. +// This method panics if the Field's underlying vector is not a nullableGenericVector[T]. +func SetTypedNullable[T any](f *Field, idx int, val *T) { + if gv, ok := f.vector.(*nullableGenericVector[T]); ok { + gv.SetTyped(idx, val) + return + } + panic("Field is not backed by nullableGenericVector[T]") +} + +// ConcreteAtTyped returns the dereferenced value for nullable fields with minimal allocation. +// The second return value indicates if the value was non-nil. +func ConcreteAtTyped[T any](f *Field, idx int) (T, bool) { + switch vec := f.vector.(type) { + case *nullableGenericVector[T]: + return vec.ConcreteAtTyped(idx) + case *genericVector[T]: + return vec.AtTyped(idx), true + default: + var zero T + return zero, false + } +} + +// IsgenericVector returns true if the field is backed by a genericVector. +func (f *Field) IsgenericVector() bool { + switch f.vector.(type) { + case *genericVector[int8], *genericVector[int16], *genericVector[int32], *genericVector[int64]: + return true + case *genericVector[uint8], *genericVector[uint16], *genericVector[uint32], *genericVector[uint64]: + return true + case *genericVector[float32], *genericVector[float64]: + return true + case *genericVector[string], *genericVector[bool]: + return true + case *nullableGenericVector[int8], *nullableGenericVector[int16], *nullableGenericVector[int32], *nullableGenericVector[int64]: + return true + case *nullableGenericVector[uint8], *nullableGenericVector[uint16], *nullableGenericVector[uint32], *nullableGenericVector[uint64]: + return true + case *nullableGenericVector[float32], *nullableGenericVector[float64]: + return true + case *nullableGenericVector[string], *nullableGenericVector[bool]: + return true + case *genericVector[EnumItemIndex], *nullableGenericVector[EnumItemIndex]: + return true + } + return false +} diff --git a/data/field_type_enum.go b/data/field_type_enum.go index e9ee6db55..8a0873de5 100644 --- a/data/field_type_enum.go +++ b/data/field_type_enum.go @@ -1,176 +1,58 @@ package data -// this supports the enum type -// it is different than the rest since it is backed by -// a uint16 but maps to the EnumItemIndex type, and has special semantics and interacts with the metadata -// Unlike the other fields it can not be easily generated - -type enumVector []EnumItemIndex +import ( + jsoniter "github.com/json-iterator/go" +) +// EnumItemIndex is used to represent enum values as uint16 indices type EnumItemIndex uint16 -func newEnumVector(n int) *enumVector { - v := enumVector(make([]EnumItemIndex, n)) - return &v -} - -func newEnumVectorWithValues(s []EnumItemIndex) *enumVector { - v := make([]EnumItemIndex, len(s)) - copy(v, s) - return (*enumVector)(&v) -} - -func (v *enumVector) Set(idx int, i interface{}) { - (*v)[idx] = i.(EnumItemIndex) -} - -func (v *enumVector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *enumVector) Append(i interface{}) { - *v = append(*v, i.(EnumItemIndex)) -} - -func (v *enumVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *enumVector) NilAt(_ int) bool { - return false -} - -func (v *enumVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *enumVector) Len() int { - return len(*v) -} - -func (v *enumVector) CopyAt(i int) interface{} { - return (*v)[i] -} - -func (v *enumVector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *enumVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *enumVector) Extend(i int) { - *v = append(*v, make([]EnumItemIndex, i)...) -} - -func (v *enumVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *enumVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableEnumVector []*EnumItemIndex - -func newNullableEnumVector(n int) *nullableEnumVector { - v := nullableEnumVector(make([]*EnumItemIndex, n)) - return &v -} - -func newNullableEnumVectorWithValues(s []*EnumItemIndex) *nullableEnumVector { - v := make([]*EnumItemIndex, len(s)) - copy(v, s) - return (*nullableEnumVector)(&v) -} - -func (v *nullableEnumVector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return +// JSON helpers for enum vectors +func readEnumVectorJSON(iter *jsoniter.Iterator, size int) (*genericVector[EnumItemIndex], error) { + arr := newGenericVector[EnumItemIndex](size) + for i := 0; i < size; i++ { + if !iter.ReadArray() { + iter.ReportError("readEnumVectorJSON", "expected array") + return nil, iter.Error + } + + t := iter.WhatIsNext() + if t == jsoniter.NilValue { + iter.ReadNil() + } else { + v := iter.ReadUint16() + arr.SetTyped(i, EnumItemIndex(v)) + } } - (*v)[idx] = i.(*EnumItemIndex) -} - -func (v *nullableEnumVector) SetConcrete(idx int, i interface{}) { - val := i.(EnumItemIndex) - (*v)[idx] = &val -} - -func (v *nullableEnumVector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*EnumItemIndex)) -} - -func (v *nullableEnumVector) At(i int) interface{} { - return (*v)[i] -} -func (v *nullableEnumVector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableEnumVector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *EnumItemIndex - return g + if iter.ReadArray() { + iter.ReportError("read", "expected close array") + return nil, iter.Error } - g := *(*v)[i] - return &g -} - -func (v *nullableEnumVector) ConcreteAt(i int) (interface{}, bool) { - var g EnumItemIndex - val := (*v)[i] - if val == nil { - return g, false + return arr, nil +} + +func readNullableEnumVectorJSON(iter *jsoniter.Iterator, size int) (*nullableGenericVector[EnumItemIndex], error) { + arr := newNullableGenericVector[EnumItemIndex](size) + for i := 0; i < size; i++ { + if !iter.ReadArray() { + iter.ReportError("readNullableEnumVectorJSON", "expected array") + return nil, iter.Error + } + t := iter.WhatIsNext() + if t == jsoniter.NilValue { + iter.ReadNil() + arr.SetTyped(i, nil) + } else { + v := iter.ReadUint16() + eII := EnumItemIndex(v) + arr.SetTyped(i, &eII) + } } - g = *val - return g, true -} - -func (v *nullableEnumVector) PointerAt(i int) interface{} { - return &(*v)[i] -} -func (v *nullableEnumVector) Len() int { - return len(*v) -} - -func (v *nullableEnumVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableEnumVector) Extend(i int) { - *v = append(*v, make([]*EnumItemIndex, i)...) -} - -func (v *nullableEnumVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") + if iter.ReadArray() { + iter.ReportError("readNullableEnumVectorJSON", "expected close array") + return nil, iter.Error } -} - -func (v *nullableEnumVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) + return arr, nil } diff --git a/data/frame.go b/data/frame.go index 4d7284a3d..6d89c06c0 100644 --- a/data/frame.go +++ b/data/frame.go @@ -49,12 +49,14 @@ type Frame struct { // UnmarshalJSON allows unmarshalling Frame from JSON. func (f *Frame) UnmarshalJSON(b []byte) error { + ensureJSONIterInit() // Lazy initialization of JSON codecs iter := jsoniter.ParseBytes(jsoniter.ConfigDefault, b) return readDataFrameJSON(f, iter) } // MarshalJSON marshals Frame to JSON. func (f *Frame) MarshalJSON() ([]byte, error) { + ensureJSONIterInit() // Lazy initialization of JSON codecs cfg := jsoniter.ConfigCompatibleWithStandardLibrary stream := cfg.BorrowStream(nil) defer cfg.ReturnStream(stream) @@ -75,6 +77,7 @@ func (f *Frame) MarshalJSON() ([]byte, error) { type Frames []*Frame func (frames *Frames) MarshalJSON() ([]byte, error) { + ensureJSONIterInit() // Lazy initialization of JSON codecs cfg := jsoniter.ConfigCompatibleWithStandardLibrary stream := cfg.BorrowStream(nil) defer cfg.ReturnStream(stream) @@ -89,6 +92,7 @@ func (frames *Frames) MarshalJSON() ([]byte, error) { // UnmarshalJSON allows unmarshalling Frame from JSON. func (frames *Frames) UnmarshalJSON(b []byte) error { + ensureJSONIterInit() // Lazy initialization of JSON codecs iter := jsoniter.ParseBytes(jsoniter.ConfigDefault, b) return readDataFramesJSON(frames, iter) } @@ -100,7 +104,7 @@ func (frames *Frames) UnmarshalJSON(b []byte) error { // to the Field type or AppendRow will panic. func (f *Frame) AppendRow(vals ...interface{}) { for i, v := range vals { - f.Fields[i].vector.Append(v) + appendTypedToVector(f.Fields[i].vector, v) } } @@ -112,7 +116,7 @@ func (f *Frame) AppendRow(vals ...interface{}) { // If rowIdx exceeds the Field length, this method will panic. func (f *Frame) InsertRow(rowIdx int, vals ...interface{}) { for i, v := range vals { - f.Fields[i].vector.Insert(rowIdx, v) + insertTypedInVector(f.Fields[i].vector, rowIdx, v) } } @@ -129,7 +133,7 @@ func (f *Frame) DeleteRow(rowIdx int) { // SetRow calls each field's Set which sets the Field's value at index idx to val. func (f *Frame) SetRow(rowIdx int, vals ...interface{}) { for i, v := range vals { - f.Fields[i].vector.Set(rowIdx, v) + setTypedInVector(f.Fields[i].vector, rowIdx, v) } } @@ -261,7 +265,7 @@ func (f *Frame) CopyAt(fieldIdx int, rowIdx int) interface{} { // It will panic if either fieldIdx or rowIdx are out of range or // if the underlying type of val does not match the element type of the Field. func (f *Frame) Set(fieldIdx int, rowIdx int, val interface{}) { - f.Fields[fieldIdx].vector.Set(rowIdx, val) + setTypedInVector(f.Fields[fieldIdx].vector, rowIdx, val) } // SetConcrete sets the val at the specified fieldIdx and rowIdx. @@ -270,7 +274,7 @@ func (f *Frame) Set(fieldIdx int, rowIdx int, val interface{}) { // is not nullable this method behaves the same as the Set method. // It will panic if the underlying type of val does not match the element concrete type of the Field. func (f *Frame) SetConcrete(fieldIdx int, rowIdx int, val interface{}) { - f.Fields[fieldIdx].vector.SetConcrete(rowIdx, val) + setConcreteTypedInVector(f.Fields[fieldIdx].vector, rowIdx, val) } // Extend extends all the Fields by length by i. @@ -426,7 +430,40 @@ func FrameTestCompareOptions() []cmp.Option { }) unexportedField := cmp.AllowUnexported(Field{}) - return []cmp.Option{f32s, f32Ptrs, f64s, f64Ptrs, confFloats, metas, rawjs, unexportedField, cmpopts.EquateEmpty()} + + // Custom comparer for generic vectors - compares via exported interface methods + vectorComparer := cmp.Comparer(func(x, y vector) bool { + if x == nil && y == nil { + return true + } + if x == nil || y == nil { + return false + } + if x.Len() != y.Len() { + return false + } + if x.Type() != y.Type() { + return false + } + // Compare each element through the interface + for i := 0; i < x.Len(); i++ { + xVal := x.At(i) + yVal := y.At(i) + if x.NilAt(i) != y.NilAt(i) { + return false + } + if x.NilAt(i) { + continue // both nil + } + // Use reflect.DeepEqual for interface{} comparison + if !cmp.Equal(xVal, yVal, f32s, f32Ptrs, f64s, f64Ptrs, rawjs) { + return false + } + } + return true + }) + + return []cmp.Option{f32s, f32Ptrs, f64s, f64Ptrs, confFloats, metas, rawjs, unexportedField, vectorComparer, cmpopts.EquateEmpty()} } const maxLengthExceededStr = "..." diff --git a/data/frame_json.gen.go b/data/frame_json.gen.go deleted file mode 100644 index 68f62b006..000000000 --- a/data/frame_json.gen.go +++ /dev/null @@ -1,925 +0,0 @@ -package data - -import ( - "github.com/apache/arrow-go/v18/arrow" - "github.com/apache/arrow-go/v18/arrow/array" - jsoniter "github.com/json-iterator/go" -) - -func writeArrowDataBinary(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewBinaryData(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteRaw(string(v.Value(i))) - } - stream.WriteArrayEnd() - return entities -} - -// ------------------------------------------------------------- -// The rest of this file is generated from frame_json_test.go -// ------------------------------------------------------------- - -func writeArrowDataUint8(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewUint8Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteUint8(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readUint8VectorJSON(iter *jsoniter.Iterator, size int) (*uint8Vector, error) { - arr := newUint8Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readUint8VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint8() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableUint8VectorJSON(iter *jsoniter.Iterator, size int) (*nullableUint8Vector, error) { - arr := newNullableUint8Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableUint8VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint8() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableUint8VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataUint16(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewUint16Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteUint16(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readUint16VectorJSON(iter *jsoniter.Iterator, size int) (*uint16Vector, error) { - arr := newUint16Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readUint16VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint16() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableUint16VectorJSON(iter *jsoniter.Iterator, size int) (*nullableUint16Vector, error) { - arr := newNullableUint16Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableUint16VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint16() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableUint16VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataUint32(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewUint32Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteUint32(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readUint32VectorJSON(iter *jsoniter.Iterator, size int) (*uint32Vector, error) { - arr := newUint32Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readUint32VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint32() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableUint32VectorJSON(iter *jsoniter.Iterator, size int) (*nullableUint32Vector, error) { - arr := newNullableUint32Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableUint32VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint32() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableUint32VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataUint64(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewUint64Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteUint64(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readUint64VectorJSON(iter *jsoniter.Iterator, size int) (*uint64Vector, error) { - arr := newUint64Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readUint64VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint64() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableUint64VectorJSON(iter *jsoniter.Iterator, size int) (*nullableUint64Vector, error) { - arr := newNullableUint64Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableUint64VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint64() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableUint64VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataInt8(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewInt8Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteInt8(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readInt8VectorJSON(iter *jsoniter.Iterator, size int) (*int8Vector, error) { - arr := newInt8Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readInt8VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadInt8() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableInt8VectorJSON(iter *jsoniter.Iterator, size int) (*nullableInt8Vector, error) { - arr := newNullableInt8Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableInt8VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadInt8() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableInt8VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataInt16(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewInt16Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteInt16(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readInt16VectorJSON(iter *jsoniter.Iterator, size int) (*int16Vector, error) { - arr := newInt16Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readInt16VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadInt16() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableInt16VectorJSON(iter *jsoniter.Iterator, size int) (*nullableInt16Vector, error) { - arr := newNullableInt16Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableInt16VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadInt16() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableInt16VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataInt32(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewInt32Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteInt32(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readInt32VectorJSON(iter *jsoniter.Iterator, size int) (*int32Vector, error) { - arr := newInt32Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readInt32VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadInt32() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableInt32VectorJSON(iter *jsoniter.Iterator, size int) (*nullableInt32Vector, error) { - arr := newNullableInt32Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableInt32VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadInt32() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableInt32VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataInt64(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewInt64Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteInt64(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readInt64VectorJSON(iter *jsoniter.Iterator, size int) (*int64Vector, error) { - arr := newInt64Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readInt64VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadInt64() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableInt64VectorJSON(iter *jsoniter.Iterator, size int) (*nullableInt64Vector, error) { - arr := newNullableInt64Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableInt64VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadInt64() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableInt64VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataFloat32(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewFloat32Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - val := v.Value(i) - f64 := float64(val) - if entityType, found := isSpecialEntity(f64); found { - if entities == nil { - entities = &fieldEntityLookup{} - } - entities.add(entityType, i) - stream.WriteNil() - } else { - stream.WriteFloat32(val) - } - - } - stream.WriteArrayEnd() - return entities -} - -func readFloat32VectorJSON(iter *jsoniter.Iterator, size int) (*float32Vector, error) { - arr := newFloat32Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readFloat32VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadFloat32() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableFloat32VectorJSON(iter *jsoniter.Iterator, size int) (*nullableFloat32Vector, error) { - arr := newNullableFloat32Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableFloat32VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadFloat32() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableFloat32VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataFloat64(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewFloat64Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - val := v.Value(i) - f64 := float64(val) - if entityType, found := isSpecialEntity(f64); found { - if entities == nil { - entities = &fieldEntityLookup{} - } - entities.add(entityType, i) - stream.WriteNil() - } else { - stream.WriteFloat64(val) - } - - } - stream.WriteArrayEnd() - return entities -} - -func readFloat64VectorJSON(iter *jsoniter.Iterator, size int) (*float64Vector, error) { - arr := newFloat64Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readFloat64VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadFloat64() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableFloat64VectorJSON(iter *jsoniter.Iterator, size int) (*nullableFloat64Vector, error) { - arr := newNullableFloat64Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableFloat64VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadFloat64() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableFloat64VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataString(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewStringData(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteString(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readStringVectorJSON(iter *jsoniter.Iterator, size int) (*stringVector, error) { - arr := newStringVector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readStringVectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadString() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableStringVectorJSON(iter *jsoniter.Iterator, size int) (*nullableStringVector, error) { - arr := newNullableStringVector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableStringVectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadString() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableStringVectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataBool(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewBooleanData(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteBool(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readBoolVectorJSON(iter *jsoniter.Iterator, size int) (*boolVector, error) { - arr := newBoolVector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readBoolVectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadBool() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableBoolVectorJSON(iter *jsoniter.Iterator, size int) (*nullableBoolVector, error) { - arr := newNullableBoolVector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableBoolVectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadBool() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableBoolVectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func writeArrowDataEnum(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.NewUint16Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } - stream.WriteUint16(v.Value(i)) - } - stream.WriteArrayEnd() - return entities -} - -func readEnumVectorJSON(iter *jsoniter.Iterator, size int) (*enumVector, error) { - arr := newEnumVector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readEnumVectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint16() - arr.Set(i, EnumItemIndex(v)) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readNullableEnumVectorJSON(iter *jsoniter.Iterator, size int) (*nullableEnumVector, error) { - arr := newNullableEnumVector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullableEnumVectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.ReadUint16() - eII := EnumItemIndex(v) - arr.Set(i, &eII) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullableEnumVectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} diff --git a/data/frame_json.go b/data/frame_json.go index 6fe74c1b1..c60178d0b 100644 --- a/data/frame_json.go +++ b/data/frame_json.go @@ -7,7 +7,9 @@ import ( "fmt" "io" "math" + "sort" "strconv" + "sync" "time" "unsafe" @@ -30,9 +32,15 @@ const simpleTypeOther = "other" const jsonKeySchema = "schema" const jsonKeyData = "data" -func init() { //nolint:gochecknoinits - jsoniter.RegisterTypeEncoder("data.Frame", &dataFrameCodec{}) - jsoniter.RegisterTypeDecoder("data.Frame", &dataFrameCodec{}) +// jsoniterOnce ensures JSON codecs are registered exactly once, lazily on first use +var jsoniterOnce sync.Once + +// ensureJSONIterInit lazily initializes jsoniter codecs only when JSON operations are used +func ensureJSONIterInit() { + jsoniterOnce.Do(func() { + jsoniter.RegisterTypeEncoder("data.Frame", &dataFrameCodec{}) + jsoniter.RegisterTypeDecoder("data.Frame", &dataFrameCodec{}) + }) } type dataFrameCodec struct{} @@ -84,20 +92,40 @@ type FrameJSONCache struct { // that was not serialized on creation will return an empty value func (f *FrameJSONCache) Bytes(args FrameInclude) []byte { if f.schema != nil && (args == IncludeAll || args == IncludeSchemaOnly) { - out := append([]byte(`{"`+jsonKeySchema+`":`), f.schema...) - - if f.data != nil && (args == IncludeAll || args == IncludeDataOnly) { - out = append(out, `,"`+jsonKeyData+`":`...) + // Pre-calculate total size to avoid multiple allocations + size := 1 + len(jsonKeySchema) + 3 + len(f.schema) // {" + schema + ": + includeData := f.data != nil && (args == IncludeAll || args == IncludeDataOnly) + if includeData { + size += 1 + len(jsonKeyData) + 3 + len(f.data) // ," + data + ": + } + size++ // closing } + + out := make([]byte, 0, size) + out = append(out, '{', '"') + out = append(out, jsonKeySchema...) + out = append(out, '"', ':') + out = append(out, f.schema...) + + if includeData { + out = append(out, ',', '"') + out = append(out, jsonKeyData...) + out = append(out, '"', ':') out = append(out, f.data...) } - return append(out, "}"...) + out = append(out, '}') + return out } // only data if f.data != nil && (args == IncludeAll || args == IncludeDataOnly) { - out := []byte(`{"` + jsonKeyData + `":`) + size := 1 + len(jsonKeyData) + 3 + len(f.data) + 1 + out := make([]byte, 0, size) + out = append(out, '{', '"') + out = append(out, jsonKeyData...) + out = append(out, '"', ':') out = append(out, f.data...) - return append(out, []byte("}")...) + out = append(out, '}') + return out } return []byte("{}") @@ -158,6 +186,7 @@ func (f *FrameJSONCache) MarshalJSON() ([]byte, error) { // // NOTE: the format should be considered experimental until grafana 8 is released. func FrameToJSON(frame *Frame, include FrameInclude) ([]byte, error) { + ensureJSONIterInit() // Lazy initialization of JSON codecs cfg := jsoniter.ConfigCompatibleWithStandardLibrary stream := cfg.BorrowStream(nil) defer cfg.ReturnStream(stream) @@ -288,12 +317,31 @@ func readFrameData(iter *jsoniter.Iterator, frame *Frame) error { addNanos := func() { if readNanos { if nanos[fieldIndex] != nil { - for i := 0; i < size; i++ { - t, ok := field.ConcreteAt(i) - if !ok { - continue + // Use typed access for time fields to avoid boxing + if tv, ok := field.vector.(*genericVector[time.Time]); ok { + for i := 0; i < size; i++ { + t := tv.AtTyped(i) + tv.SetTyped(i, t.Add(time.Nanosecond*time.Duration(nanos[fieldIndex][i]))) + } + } else if tv, ok := field.vector.(*nullableGenericVector[time.Time]); ok { + for i := 0; i < size; i++ { + pt := tv.AtTyped(i) + if pt == nil { + continue + } + t := *pt + tWithNS := t.Add(time.Nanosecond * time.Duration(nanos[fieldIndex][i])) + tv.SetTyped(i, &tWithNS) + } + } else { + // Fallback for other types + for i := 0; i < size; i++ { + t, ok := field.ConcreteAt(i) + if !ok { + continue + } + field.Set(i, t.(time.Time).Add(time.Nanosecond*time.Duration(nanos[fieldIndex][i]))) } - field.Set(i, t.(time.Time).Add(time.Nanosecond*time.Duration(nanos[fieldIndex][i]))) } } } @@ -324,7 +372,7 @@ func readFrameData(iter *jsoniter.Iterator, frame *Frame) error { replace := getReplacementValue(l3Field, field.Type()) for iter.ReadArray() { idx := iter.ReadInt() - field.vector.SetConcrete(idx, replace) + setConcreteTypedInVector(field.vector, idx, replace) } } } else { @@ -348,7 +396,7 @@ func readFrameData(iter *jsoniter.Iterator, frame *Frame) error { continue } tWithNS := t.(time.Time).Add(time.Nanosecond * time.Duration(ns)) - field.vector.SetConcrete(idx, tWithNS) + setConcreteTypedInVector(field.vector, idx, tWithNS) continue } if idx == 0 { @@ -436,7 +484,7 @@ func jsonValuesToVector(iter *jsoniter.Iterator, ft FieldType) (vector, error) { if err != nil { return nil, err } - return newUint64VectorWithValues(u), nil + return newGenericVectorWithValues(u), nil case FieldTypeNullableUint64: parseUint64 := func(s string) (*uint64, error) { @@ -450,10 +498,10 @@ func jsonValuesToVector(iter *jsoniter.Iterator, ft FieldType) (vector, error) { if err != nil { return nil, err } - return newNullableUint64VectorWithValues(u), nil + return newNullableGenericVectorWithValues(u), nil case FieldTypeInt64: - vals := newInt64Vector(0) + vals := newGenericVector[int64](0) for iter.ReadArray() { v := iter.ReadInt64() vals.Append(v) @@ -461,7 +509,7 @@ func jsonValuesToVector(iter *jsoniter.Iterator, ft FieldType) (vector, error) { return vals, nil case FieldTypeNullableInt64: - vals := newNullableInt64Vector(0) + vals := newNullableGenericVector[int64](0) for iter.ReadArray() { t := iter.WhatIsNext() if t == sdkjsoniter.NilValue { @@ -475,7 +523,7 @@ func jsonValuesToVector(iter *jsoniter.Iterator, ft FieldType) (vector, error) { return vals, nil case FieldTypeJSON, FieldTypeNullableJSON: - vals := newJsonRawMessageVector(0) + vals := newGenericVector[json.RawMessage](0) for iter.ReadArray() { var v json.RawMessage t := iter.WhatIsNext() @@ -490,143 +538,482 @@ func jsonValuesToVector(iter *jsoniter.Iterator, ft FieldType) (vector, error) { // Convert this to the pointer flavor if ft == FieldTypeNullableJSON { size := vals.Len() - nullable := newNullableJsonRawMessageVector(size) + nullable := newNullableGenericVector[json.RawMessage](size) for i := 0; i < size; i++ { - v := vals.At(i).(json.RawMessage) - nullable.Set(i, &v) + v := vals.AtTyped(i) // Use typed access to avoid boxing + nullable.SetTyped(i, &v) } return nullable, nil } return vals, nil - } - // if it's not uint64 field, handle the array the old way - convert := func(v interface{}) (interface{}, error) { - return v, nil - } + case FieldTypeFloat64: + vals := newGenericVector[float64](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(float64(0)) + } else { + v := iter.ReadFloat64() + vals.Append(v) + } + } + return vals, nil - switch ft.NonNullableType() { - case FieldTypeTime: - convert = func(v interface{}) (interface{}, error) { - fV, ok := v.(float64) - if !ok { - return nil, fmt.Errorf("error reading time") + case FieldTypeNullableFloat64: + vals := newNullableGenericVector[float64](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadFloat64() + vals.Append(&v) } - return time.Unix(0, int64(fV)*int64(time.Millisecond)).UTC(), nil } + return vals, nil - case FieldTypeUint8: - convert = func(v interface{}) (interface{}, error) { - iV, err := int64FromJSON(v) - return uint8(iV), err + case FieldTypeFloat32: + vals := newGenericVector[float32](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(float32(0)) + } else { + v := iter.ReadFloat32() + vals.Append(v) + } } + return vals, nil - case FieldTypeUint16: // enums and uint16 share the same backings - convert = func(v interface{}) (interface{}, error) { - iV, err := int64FromJSON(v) - return uint16(iV), err + case FieldTypeNullableFloat32: + vals := newNullableGenericVector[float32](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadFloat32() + vals.Append(&v) + } } + return vals, nil - case FieldTypeEnum: // enums and uint16 share the same backings - convert = func(v interface{}) (interface{}, error) { - iV, err := int64FromJSON(v) - return EnumItemIndex(iV), err + case FieldTypeString: + vals := newGenericVector[string](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append("") + } else { + v := iter.ReadString() + vals.Append(v) + } } + return vals, nil - case FieldTypeUint32: - convert = func(v interface{}) (interface{}, error) { - iV, err := int64FromJSON(v) - return uint32(iV), err + case FieldTypeNullableString: + vals := newNullableGenericVector[string](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadString() + vals.Append(&v) + } } - case FieldTypeInt8: - convert = func(v interface{}) (interface{}, error) { - iV, err := int64FromJSON(v) - return int8(iV), err + return vals, nil + + case FieldTypeBool: + vals := newGenericVector[bool](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(false) + } else { + v := iter.ReadBool() + vals.Append(v) + } } + return vals, nil - case FieldTypeInt16: - convert = func(v interface{}) (interface{}, error) { - iV, err := int64FromJSON(v) - return int16(iV), err + case FieldTypeNullableBool: + vals := newNullableGenericVector[bool](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadBool() + vals.Append(&v) + } } + return vals, nil - case FieldTypeInt32: - convert = func(v interface{}) (interface{}, error) { - iV, err := int64FromJSON(v) - return int32(iV), err + case FieldTypeTime: + vals := newGenericVector[time.Time](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(time.Time{}) + } else { + ms := iter.ReadInt64() + tv := time.Unix(ms/int64(1e+3), (ms%int64(1e+3))*int64(1e+6)).UTC() + vals.Append(tv) + } } + return vals, nil - case FieldTypeFloat32: - convert = func(v interface{}) (interface{}, error) { - fV, err := float64FromJSON(v) - return float32(fV), err + case FieldTypeNullableTime: + vals := newNullableGenericVector[time.Time](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + ms := iter.ReadInt64() + tv := time.Unix(ms/int64(1e+3), (ms%int64(1e+3))*int64(1e+6)).UTC() + vals.Append(&tv) + } } + return vals, nil - case FieldTypeFloat64: - convert = func(v interface{}) (interface{}, error) { - return float64FromJSON(v) + case FieldTypeInt8: + vals := newGenericVector[int8](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(int8(0)) + } else { + v := iter.ReadInt8() + vals.Append(v) + } } + return vals, nil - case FieldTypeString: - convert = func(v interface{}) (interface{}, error) { - str, ok := v.(string) - if ok { - return str, nil + case FieldTypeNullableInt8: + vals := newNullableGenericVector[int8](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadInt8() + vals.Append(&v) } - return fmt.Sprintf("%v", v), nil } + return vals, nil - case FieldTypeBool: - convert = func(v interface{}) (interface{}, error) { - val := v.(bool) - return val, nil + case FieldTypeInt16: + vals := newGenericVector[int16](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(int16(0)) + } else { + v := iter.ReadInt16() + vals.Append(v) + } } + return vals, nil - case FieldTypeJSON: - convert = func(v interface{}) (interface{}, error) { - r, ok := v.(json.RawMessage) - if ok { - return r, nil + case FieldTypeNullableInt16: + vals := newNullableGenericVector[int16](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadInt16() + vals.Append(&v) } - return nil, fmt.Errorf("unable to convert to json.RawMessage") } - } + return vals, nil - arr := make([]interface{}, 0) - err := itere.ReadVal(&arr) - if err != nil { - return nil, err - } - f := NewFieldFromFieldType(ft, len(arr)) - for i, v := range arr { - if v != nil { - norm, err := convert(v) - if err != nil { - return nil, err + case FieldTypeInt32: + vals := newGenericVector[int32](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(int32(0)) + } else { + v := iter.ReadInt32() + vals.Append(v) } - f.vector.SetConcrete(i, norm) // will be pointer for nullable types } - } - return f.vector, nil -} + return vals, nil -func readArrayOfNumbers[T any](iter *sdkjsoniter.Iterator, parse func(string) (T, error), reader func() (T, error)) ([]T, error) { - var def T - var result []T - for { - next, err := iter.ReadArray() - if err != nil { - return nil, err + case FieldTypeNullableInt32: + vals := newNullableGenericVector[int32](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadInt32() + vals.Append(&v) + } } - if !next { - break + return vals, nil + + case FieldTypeUint8: + vals := newGenericVector[uint8](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(uint8(0)) + } else { + v := iter.ReadUint8() + vals.Append(v) + } } - nextType, err := iter.WhatIsNext() - if err != nil { - return nil, err + return vals, nil + + case FieldTypeNullableUint8: + vals := newNullableGenericVector[uint8](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadUint8() + vals.Append(&v) + } } - switch nextType { + return vals, nil + + case FieldTypeUint16: + vals := newGenericVector[uint16](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(uint16(0)) + } else { + v := iter.ReadUint16() + vals.Append(v) + } + } + return vals, nil + + case FieldTypeNullableUint16: + vals := newNullableGenericVector[uint16](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadUint16() + vals.Append(&v) + } + } + return vals, nil + + case FieldTypeUint32: + vals := newGenericVector[uint32](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(uint32(0)) + } else { + v := iter.ReadUint32() + vals.Append(v) + } + } + return vals, nil + + case FieldTypeNullableUint32: + vals := newNullableGenericVector[uint32](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadUint32() + vals.Append(&v) + } + } + return vals, nil + + case FieldTypeEnum: + vals := newGenericVector[EnumItemIndex](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(EnumItemIndex(0)) + } else { + v := iter.ReadUint16() + vals.Append(EnumItemIndex(v)) + } + } + return vals, nil + + case FieldTypeNullableEnum: + vals := newNullableGenericVector[EnumItemIndex](0) + for iter.ReadArray() { + t := iter.WhatIsNext() + if t == sdkjsoniter.NilValue { + iter.ReadNil() + vals.Append(nil) + } else { + v := iter.ReadUint16() + e := EnumItemIndex(v) + vals.Append(&e) + } + } + return vals, nil + } + + // if it's not one of the above types with fast paths, handle the array the old way + convert := func(v interface{}) (interface{}, error) { + return v, nil + } + + switch ft.NonNullableType() { + case FieldTypeTime: + convert = func(v interface{}) (interface{}, error) { + fV, ok := v.(float64) + if !ok { + return nil, fmt.Errorf("error reading time") + } + return time.Unix(0, int64(fV)*int64(time.Millisecond)).UTC(), nil + } + + case FieldTypeUint8: + convert = func(v interface{}) (interface{}, error) { + iV, err := int64FromJSON(v) + return uint8(iV), err + } + + case FieldTypeUint16: // enums and uint16 share the same backings + convert = func(v interface{}) (interface{}, error) { + iV, err := int64FromJSON(v) + return uint16(iV), err + } + + case FieldTypeEnum: // enums and uint16 share the same backings + convert = func(v interface{}) (interface{}, error) { + iV, err := int64FromJSON(v) + return EnumItemIndex(iV), err + } + + case FieldTypeUint32: + convert = func(v interface{}) (interface{}, error) { + iV, err := int64FromJSON(v) + return uint32(iV), err + } + case FieldTypeInt8: + convert = func(v interface{}) (interface{}, error) { + iV, err := int64FromJSON(v) + return int8(iV), err + } + + case FieldTypeInt16: + convert = func(v interface{}) (interface{}, error) { + iV, err := int64FromJSON(v) + return int16(iV), err + } + + case FieldTypeInt32: + convert = func(v interface{}) (interface{}, error) { + iV, err := int64FromJSON(v) + return int32(iV), err + } + + case FieldTypeFloat32: + convert = func(v interface{}) (interface{}, error) { + fV, err := float64FromJSON(v) + return float32(fV), err + } + + case FieldTypeFloat64: + convert = func(v interface{}) (interface{}, error) { + return float64FromJSON(v) + } + + case FieldTypeString: + convert = func(v interface{}) (interface{}, error) { + str, ok := v.(string) + if ok { + return str, nil + } + return fmt.Sprintf("%v", v), nil + } + + case FieldTypeBool: + convert = func(v interface{}) (interface{}, error) { + val := v.(bool) + return val, nil + } + + case FieldTypeJSON: + convert = func(v interface{}) (interface{}, error) { + r, ok := v.(json.RawMessage) + if ok { + return r, nil + } + return nil, fmt.Errorf("unable to convert to json.RawMessage") + } + } + + arr := make([]interface{}, 0) + err := itere.ReadVal(&arr) + if err != nil { + return nil, err + } + f := NewFieldFromFieldType(ft, len(arr)) + for i, v := range arr { + if v != nil { + norm, err := convert(v) + if err != nil { + return nil, err + } + setConcreteTypedInVector(f.vector, i, norm) // will be pointer for nullable types + } + } + return f.vector, nil +} + +func readArrayOfNumbers[T any](iter *sdkjsoniter.Iterator, parse func(string) (T, error), reader func() (T, error)) ([]T, error) { + var def T + var result []T + for { + next, err := iter.ReadArray() + if err != nil { + return nil, err + } + if !next { + break + } + nextType, err := iter.WhatIsNext() + if err != nil { + return nil, err + } + switch nextType { case sdkjsoniter.StringValue: str, err := iter.ReadString() if err != nil { @@ -659,88 +1046,203 @@ func readArrayOfNumbers[T any](iter *sdkjsoniter.Iterator, parse func(string) (T // nolint:gocyclo func readVector(iter *jsoniter.Iterator, ft FieldType, size int) (vector, error) { switch ft { - // Manual + // Time, JSON, and Enum types with custom parsing logic case FieldTypeTime: - return readTimeVectorJSON(iter, false, size) + // generic time vector + vec := newGenericVector[time.Time](size) + for i := 0; i < size; i++ { + if !iter.ReadArray() { + return nil, fmt.Errorf("expected array element %d", i) + } + t := iter.WhatIsNext() + if t == jsoniter.NilValue { + iter.ReadNil() + } else { + ms := iter.ReadInt64() + tv := time.Unix(ms/int64(1e+3), (ms%int64(1e+3))*int64(1e+6)).UTC() + vec.SetTyped(i, tv) + } + } + if iter.ReadArray() { + return nil, fmt.Errorf("array size mismatch: expected %d elements", size) + } + return vec, iter.Error case FieldTypeNullableTime: - return readTimeVectorJSON(iter, true, size) + vec := newNullableGenericVector[time.Time](size) + for i := 0; i < size; i++ { + if !iter.ReadArray() { + return nil, fmt.Errorf("expected array element %d", i) + } + t := iter.WhatIsNext() + if t == jsoniter.NilValue { + iter.ReadNil() + vec.SetTyped(i, nil) + } else { + ms := iter.ReadInt64() + tv := time.Unix(ms/int64(1e+3), (ms%int64(1e+3))*int64(1e+6)).UTC() + vec.SetConcreteTyped(i, tv) + } + } + if iter.ReadArray() { + return nil, fmt.Errorf("array size mismatch: expected %d elements", size) + } + return vec, iter.Error case FieldTypeJSON: - return readJSONVectorJSON(iter, false, size) + vec := newGenericVector[json.RawMessage](size) + for i := 0; i < size; i++ { + if !iter.ReadArray() { + return nil, fmt.Errorf("expected array element %d", i) + } + t := iter.WhatIsNext() + if t == jsoniter.NilValue { + iter.ReadNil() + } else { + var v json.RawMessage + iter.ReadVal(&v) + vec.SetTyped(i, v) + } + } + if iter.ReadArray() { + return nil, fmt.Errorf("array size mismatch: expected %d elements", size) + } + return vec, iter.Error case FieldTypeNullableJSON: - return readJSONVectorJSON(iter, true, size) + vec := newNullableGenericVector[json.RawMessage](size) + for i := 0; i < size; i++ { + if !iter.ReadArray() { + return nil, fmt.Errorf("expected array element %d", i) + } + t := iter.WhatIsNext() + if t == jsoniter.NilValue { + iter.ReadNil() + vec.SetTyped(i, nil) + } else { + var v json.RawMessage + iter.ReadVal(&v) + vec.SetTyped(i, &v) + } + } + if iter.ReadArray() { + return nil, fmt.Errorf("array size mismatch: expected %d elements", size) + } + return vec, iter.Error + case FieldTypeEnum: + return readEnumVectorJSON(iter, size) + case FieldTypeNullableEnum: + return readNullableEnumVectorJSON(iter, size) - // Generated + // Generic vectors - inline implementations case FieldTypeUint8: - return readUint8VectorJSON(iter, size) + return readgenericVectorJSON[uint8](iter, size, iter.ReadUint8) case FieldTypeNullableUint8: - return readNullableUint8VectorJSON(iter, size) + return readnullableGenericVectorJSON[uint8](iter, size, iter.ReadUint8) case FieldTypeUint16: - return readUint16VectorJSON(iter, size) + return readgenericVectorJSON[uint16](iter, size, iter.ReadUint16) case FieldTypeNullableUint16: - return readNullableUint16VectorJSON(iter, size) + return readnullableGenericVectorJSON[uint16](iter, size, iter.ReadUint16) case FieldTypeUint32: - return readUint32VectorJSON(iter, size) + return readgenericVectorJSON[uint32](iter, size, iter.ReadUint32) case FieldTypeNullableUint32: - return readNullableUint32VectorJSON(iter, size) + return readnullableGenericVectorJSON[uint32](iter, size, iter.ReadUint32) case FieldTypeUint64: - return readUint64VectorJSON(iter, size) + return readgenericVectorJSON[uint64](iter, size, iter.ReadUint64) case FieldTypeNullableUint64: - return readNullableUint64VectorJSON(iter, size) + return readnullableGenericVectorJSON[uint64](iter, size, iter.ReadUint64) case FieldTypeInt8: - return readInt8VectorJSON(iter, size) + return readgenericVectorJSON[int8](iter, size, iter.ReadInt8) case FieldTypeNullableInt8: - return readNullableInt8VectorJSON(iter, size) + return readnullableGenericVectorJSON[int8](iter, size, iter.ReadInt8) case FieldTypeInt16: - return readInt16VectorJSON(iter, size) + return readgenericVectorJSON[int16](iter, size, iter.ReadInt16) case FieldTypeNullableInt16: - return readNullableInt16VectorJSON(iter, size) + return readnullableGenericVectorJSON[int16](iter, size, iter.ReadInt16) case FieldTypeInt32: - return readInt32VectorJSON(iter, size) + return readgenericVectorJSON[int32](iter, size, iter.ReadInt32) case FieldTypeNullableInt32: - return readNullableInt32VectorJSON(iter, size) + return readnullableGenericVectorJSON[int32](iter, size, iter.ReadInt32) case FieldTypeInt64: - return readInt64VectorJSON(iter, size) + return readgenericVectorJSON[int64](iter, size, iter.ReadInt64) case FieldTypeNullableInt64: - return readNullableInt64VectorJSON(iter, size) + return readnullableGenericVectorJSON[int64](iter, size, iter.ReadInt64) case FieldTypeFloat32: - return readFloat32VectorJSON(iter, size) + return readgenericVectorJSON[float32](iter, size, iter.ReadFloat32) case FieldTypeNullableFloat32: - return readNullableFloat32VectorJSON(iter, size) + return readnullableGenericVectorJSON[float32](iter, size, iter.ReadFloat32) case FieldTypeFloat64: - return readFloat64VectorJSON(iter, size) + return readgenericVectorJSON[float64](iter, size, iter.ReadFloat64) case FieldTypeNullableFloat64: - return readNullableFloat64VectorJSON(iter, size) + return readnullableGenericVectorJSON[float64](iter, size, iter.ReadFloat64) case FieldTypeString: - return readStringVectorJSON(iter, size) + return readgenericVectorJSON[string](iter, size, iter.ReadString) case FieldTypeNullableString: - return readNullableStringVectorJSON(iter, size) + return readnullableGenericVectorJSON[string](iter, size, iter.ReadString) case FieldTypeBool: - return readBoolVectorJSON(iter, size) + return readgenericVectorJSON[bool](iter, size, iter.ReadBool) case FieldTypeNullableBool: - return readNullableBoolVectorJSON(iter, size) - case FieldTypeEnum: - return readEnumVectorJSON(iter, size) - case FieldTypeNullableEnum: - return readNullableEnumVectorJSON(iter, size) + return readnullableGenericVectorJSON[bool](iter, size, iter.ReadBool) } return nil, fmt.Errorf("unsuppoted type: %s", ft.ItemTypeString()) } -// This returns the type name that is used in javascript -func getTypeScriptTypeString(t FieldType) (string, bool) { - if t.Time() { - return simpleTypeTime, true +// Generic helper for reading non-nullable vectors from JSON +func readgenericVectorJSON[T any](iter *jsoniter.Iterator, size int, readFunc func() T) (*genericVector[T], error) { + vec := newGenericVector[T](size) + for i := 0; i < size; i++ { + if !iter.ReadArray() { + return nil, fmt.Errorf("expected array element %d", i) + } + + t := iter.WhatIsNext() + if t == jsoniter.NilValue { + iter.ReadNil() + } else { + v := readFunc() + vec.SetTyped(i, v) + } } - if t.Numeric() { - return simpleTypeNumber, true + if iter.ReadArray() { + return nil, fmt.Errorf("array size mismatch: expected %d elements", size) } - switch t { - case FieldTypeBool, FieldTypeNullableBool: - return simpleTypeBool, true - case FieldTypeString, FieldTypeNullableString: - return simpleTypeString, true - case FieldTypeEnum, FieldTypeNullableEnum: - return simpleTypeEnum, true + return vec, iter.Error +} + +// Generic helper for reading nullable vectors from JSON +func readnullableGenericVectorJSON[T any](iter *jsoniter.Iterator, size int, readFunc func() T) (*nullableGenericVector[T], error) { + vec := newNullableGenericVector[T](size) + for i := 0; i < size; i++ { + if !iter.ReadArray() { + return nil, fmt.Errorf("expected array element %d", i) + } + t := iter.WhatIsNext() + if t == jsoniter.NilValue { + iter.ReadNil() + vec.SetTyped(i, nil) + } else { + v := readFunc() + vec.SetTyped(i, &v) + } + } + if iter.ReadArray() { + return nil, fmt.Errorf("array size mismatch: expected %d elements", size) + } + return vec, iter.Error +} + +// This returns the type name that is used in javascript +func getTypeScriptTypeString(t FieldType) (string, bool) { + if t.Time() { + return simpleTypeTime, true + } + if t.Numeric() { + return simpleTypeNumber, true + } + switch t { + case FieldTypeBool, FieldTypeNullableBool: + return simpleTypeBool, true + case FieldTypeString, FieldTypeNullableString: + return simpleTypeString, true + case FieldTypeEnum, FieldTypeNullableEnum: + return simpleTypeEnum, true case FieldTypeJSON, FieldTypeNullableJSON: return simpleTypeOther, true } @@ -803,242 +1305,1029 @@ const ( entityNegativeInf = "-Inf" ) -func (f *fieldEntityLookup) add(str string, idx int) { - switch str { - case entityPositiveInf: - f.Inf = append(f.Inf, idx) - case entityNegativeInf: - f.NegInf = append(f.NegInf, idx) - case entityNaN: - f.NaN = append(f.NaN, idx) +// Pre-allocate a small capacity to avoid initial allocations +const entitySliceInitialCap = 8 + +// Pool for reusing fieldEntityLookup objects +var entityLookupPool = sync.Pool{ + New: func() interface{} { + return &fieldEntityLookup{} + }, +} + +// Pool for reusing string slices when sorting map keys +var stringSlicePool = sync.Pool{ + New: func() interface{} { + s := make([]string, 0, 16) // Pre-allocate for typical label count + return &s + }, +} + +// getEntityLookup gets a fieldEntityLookup from the pool +func getEntityLookup() *fieldEntityLookup { + return entityLookupPool.Get().(*fieldEntityLookup) +} + +// putEntityLookup returns a fieldEntityLookup to the pool after resetting it +func putEntityLookup(f *fieldEntityLookup) { + if f == nil { + return + } + // Reset slices but keep capacity + f.NaN = f.NaN[:0] + f.Inf = f.Inf[:0] + f.NegInf = f.NegInf[:0] + entityLookupPool.Put(f) +} + +func (f *fieldEntityLookup) add(str string, idx int) { + switch str { + case entityPositiveInf: + if f.Inf == nil { + f.Inf = make([]int, 0, entitySliceInitialCap) + } + f.Inf = append(f.Inf, idx) + case entityNegativeInf: + if f.NegInf == nil { + f.NegInf = make([]int, 0, entitySliceInitialCap) + } + f.NegInf = append(f.NegInf, idx) + case entityNaN: + if f.NaN == nil { + f.NaN = make([]int, 0, entitySliceInitialCap) + } + f.NaN = append(f.NaN, idx) + } +} + +func isSpecialEntity(v float64) (string, bool) { + switch { + case math.IsNaN(v): + return entityNaN, true + case math.IsInf(v, 1): + return entityPositiveInf, true + case math.IsInf(v, -1): + return entityNegativeInf, true + default: + return "", false + } +} + +func writeDataFrame(frame *Frame, stream *jsoniter.Stream, includeSchema bool, includeData bool) { + stream.WriteObjectStart() + if includeSchema { + stream.WriteObjectField(jsonKeySchema) + writeDataFrameSchema(frame, stream) + } + + if includeData { + if includeSchema { + stream.WriteMore() + } + + stream.WriteObjectField(jsonKeyData) + writeDataFrameData(frame, stream) + } + stream.WriteObjectEnd() +} + +func writeDataFrameSchema(frame *Frame, stream *jsoniter.Stream) { + started := false + stream.WriteObjectStart() + + if len(frame.Name) > 0 { + stream.WriteObjectField("name") + stream.WriteString(frame.Name) + started = true + } + + if len(frame.RefID) > 0 { + if started { + stream.WriteMore() + } + stream.WriteObjectField("refId") + stream.WriteString(frame.RefID) + started = true + } + + if frame.Meta != nil { + if started { + stream.WriteMore() + } + stream.WriteObjectField("meta") + stream.WriteVal(frame.Meta) + started = true + } + + if started { + stream.WriteMore() + } + stream.WriteObjectField("fields") + stream.WriteArrayStart() + for i, f := range frame.Fields { + if i > 0 { + stream.WriteMore() + } + started = false + stream.WriteObjectStart() + if len(f.Name) > 0 { + stream.WriteObjectField("name") + stream.WriteString(f.Name) + started = true + } + + t, ok := getTypeScriptTypeString(f.Type()) + if ok { + if started { + stream.WriteMore() + } + stream.WriteObjectField("type") + stream.WriteString(t) + started = true + } + + ft := f.Type() + nnt := ft.NonNullableType() + if started { + stream.WriteMore() + } + stream.WriteObjectField("typeInfo") + stream.WriteObjectStart() + stream.WriteObjectField("frame") + stream.WriteString(nnt.ItemTypeString()) + if ft.Nullable() { + stream.WriteMore() + stream.WriteObjectField("nullable") + stream.WriteBool(true) + } + stream.WriteObjectEnd() + started = true + + if f.Labels != nil { + if started { + stream.WriteMore() + } + stream.WriteObjectField("labels") + writeLabelsMap(stream, f.Labels) + started = true + } + + if f.Config != nil { + if started { + stream.WriteMore() + } + stream.WriteObjectField("config") + writeFieldConfig(stream, f.Config) + } + + stream.WriteObjectEnd() + } + stream.WriteArrayEnd() + + stream.WriteObjectEnd() +} + +// fieldWriteResult contains the results of writing a field +type fieldWriteResult struct { + entities *fieldEntityLookup + nanos []int64 + hasNSTime bool + usedFallback bool +} + +// writeTimeField writes time field data to the stream +func writeTimeField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + var nsTime []int64 + var hasNSTime bool + + if tv, ok := f.vector.(*genericVector[time.Time]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + t := tv.AtTyped(i) + ms := t.UnixMilli() + stream.WriteInt64(ms) + msRes := t.Truncate(time.Millisecond) + ns := t.Sub(msRes).Nanoseconds() + if ns != 0 { + if !hasNSTime { + nsTime = make([]int64, rowCount) + hasNSTime = true + } + nsTime[i] = ns + } + } + return fieldWriteResult{nanos: nsTime, hasNSTime: hasNSTime} + } + + // Fallback + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if v, ok := f.ConcreteAt(i); ok { + t := v.(time.Time) + stream.WriteInt64(t.UnixMilli()) + msRes := t.Truncate(time.Millisecond) + ns := t.Sub(msRes).Nanoseconds() + if ns != 0 { + if !hasNSTime { + nsTime = make([]int64, rowCount) + hasNSTime = true + } + nsTime[i] = ns + } + } else { + stream.WriteNil() + } + } + return fieldWriteResult{nanos: nsTime, hasNSTime: hasNSTime, usedFallback: true} +} + +// writeNullableTimeField writes nullable time field data to the stream +func writeNullableTimeField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + var nsTime []int64 + var hasNSTime bool + + if tv, ok := f.vector.(*nullableGenericVector[time.Time]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pt := tv.AtTyped(i) + if pt == nil { + stream.WriteNil() + continue + } + t := *pt + ms := t.UnixMilli() + stream.WriteInt64(ms) + msRes := t.Truncate(time.Millisecond) + ns := t.Sub(msRes).Nanoseconds() + if ns != 0 { + if !hasNSTime { + nsTime = make([]int64, rowCount) + hasNSTime = true + } + nsTime[i] = ns + } + } + return fieldWriteResult{nanos: nsTime, hasNSTime: hasNSTime} + } + + // Fallback + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if v, ok := f.ConcreteAt(i); ok { + t := v.(time.Time) + stream.WriteInt64(t.UnixMilli()) + msRes := t.Truncate(time.Millisecond) + ns := t.Sub(msRes).Nanoseconds() + if ns != 0 { + if !hasNSTime { + nsTime = make([]int64, rowCount) + hasNSTime = true + } + nsTime[i] = ns + } + } else { + stream.WriteNil() + } + } + return fieldWriteResult{nanos: nsTime, hasNSTime: hasNSTime, usedFallback: true} +} + +// writeFloatField writes float field data to the stream +func writeFloatField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + var entities *fieldEntityLookup + + switch f.Type() { + case FieldTypeFloat64: + if gv, ok := f.vector.(*genericVector[float64]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + v := gv.AtTyped(i) + if entityType, found := isSpecialEntity(v); found { + if entities == nil { + entities = getEntityLookup() + } + entities.add(entityType, i) + stream.WriteNil() + } else { + stream.WriteFloat64(v) + } + } + return fieldWriteResult{entities: entities} + } + case FieldTypeNullableFloat64: + if gv, ok := f.vector.(*nullableGenericVector[float64]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pv := gv.AtTyped(i) + if pv == nil { + stream.WriteNil() + continue + } + v := *pv + if entityType, found := isSpecialEntity(v); found { + if entities == nil { + entities = getEntityLookup() + } + entities.add(entityType, i) + stream.WriteNil() + } else { + stream.WriteFloat64(v) + } + } + return fieldWriteResult{entities: entities} + } + case FieldTypeFloat32: + if gv, ok := f.vector.(*genericVector[float32]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + v := gv.AtTyped(i) + if entityType, found := isSpecialEntity(float64(v)); found { + if entities == nil { + entities = getEntityLookup() + } + entities.add(entityType, i) + stream.WriteNil() + } else { + stream.WriteFloat32(v) + } + } + return fieldWriteResult{entities: entities} + } + case FieldTypeNullableFloat32: + if gv, ok := f.vector.(*nullableGenericVector[float32]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pv := gv.AtTyped(i) + if pv == nil { + stream.WriteNil() + continue + } + v := *pv + if entityType, found := isSpecialEntity(float64(v)); found { + if entities == nil { + entities = getEntityLookup() + } + entities.add(entityType, i) + stream.WriteNil() + } else { + stream.WriteFloat32(v) + } + } + return fieldWriteResult{entities: entities} + } + } + + return fieldWriteResult{usedFallback: true} +} + +// writeSignedIntField writes signed integer field data to the stream using generics +func writeSignedIntField[T int8 | int16 | int32 | int64](f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + if gv, ok := f.vector.(*genericVector[T]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + stream.WriteInt64(int64(gv.AtTyped(i))) + } + return fieldWriteResult{} + } + if gv, ok := f.vector.(*nullableGenericVector[T]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pv := gv.AtTyped(i) + if pv == nil { + stream.WriteNil() + continue + } + stream.WriteInt64(int64(*pv)) + } + return fieldWriteResult{} + } + return fieldWriteResult{usedFallback: true} +} + +// writeIntField writes signed integer field data to the stream +func writeIntField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + switch f.Type() { + case FieldTypeInt8, FieldTypeNullableInt8: + return writeSignedIntField[int8](f, rowCount, stream) + case FieldTypeInt16, FieldTypeNullableInt16: + return writeSignedIntField[int16](f, rowCount, stream) + case FieldTypeInt32, FieldTypeNullableInt32: + return writeSignedIntField[int32](f, rowCount, stream) + case FieldTypeInt64, FieldTypeNullableInt64: + return writeSignedIntField[int64](f, rowCount, stream) + } + return fieldWriteResult{usedFallback: true} +} + +// writeUnsignedIntField writes unsigned integer field data to the stream using generics +func writeUnsignedIntField[T uint8 | uint16 | uint32 | uint64](f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + if gv, ok := f.vector.(*genericVector[T]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + stream.WriteUint64(uint64(gv.AtTyped(i))) + } + return fieldWriteResult{} + } + if gv, ok := f.vector.(*nullableGenericVector[T]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pv := gv.AtTyped(i) + if pv == nil { + stream.WriteNil() + continue + } + stream.WriteUint64(uint64(*pv)) + } + return fieldWriteResult{} + } + return fieldWriteResult{usedFallback: true} +} + +// writeUintField writes unsigned integer field data to the stream +func writeUintField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + switch f.Type() { + case FieldTypeUint8, FieldTypeNullableUint8: + return writeUnsignedIntField[uint8](f, rowCount, stream) + case FieldTypeUint16, FieldTypeNullableUint16: + return writeUnsignedIntField[uint16](f, rowCount, stream) + case FieldTypeUint32, FieldTypeNullableUint32: + return writeUnsignedIntField[uint32](f, rowCount, stream) + case FieldTypeUint64, FieldTypeNullableUint64: + return writeUnsignedIntField[uint64](f, rowCount, stream) + } + return fieldWriteResult{usedFallback: true} +} + +// writeStringField writes string field data to the stream +func writeStringField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + switch f.Type() { + case FieldTypeString: + if gv, ok := f.vector.(*genericVector[string]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + stream.WriteString(gv.AtTyped(i)) + } + return fieldWriteResult{} + } + case FieldTypeNullableString: + if gv, ok := f.vector.(*nullableGenericVector[string]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pv := gv.AtTyped(i) + if pv == nil { + stream.WriteNil() + continue + } + stream.WriteString(*pv) + } + return fieldWriteResult{} + } + } + + return fieldWriteResult{usedFallback: true} +} + +// writeBoolField writes bool field data to the stream +func writeBoolField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + switch f.Type() { + case FieldTypeBool: + if gv, ok := f.vector.(*genericVector[bool]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + stream.WriteBool(gv.AtTyped(i)) + } + return fieldWriteResult{} + } + case FieldTypeNullableBool: + if gv, ok := f.vector.(*nullableGenericVector[bool]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pv := gv.AtTyped(i) + if pv == nil { + stream.WriteNil() + continue + } + stream.WriteBool(*pv) + } + return fieldWriteResult{} + } + } + + return fieldWriteResult{usedFallback: true} +} + +// writeJSONField writes JSON field data to the stream +func writeJSONField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + switch f.Type() { + case FieldTypeJSON: + if gv, ok := f.vector.(*genericVector[json.RawMessage]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + msg := gv.AtTyped(i) + if len(msg) == 0 || string(msg) == "null" { + stream.WriteNil() + } else { + stream.WriteRaw(string(msg)) + } + } + return fieldWriteResult{} + } + case FieldTypeNullableJSON: + if gv, ok := f.vector.(*nullableGenericVector[json.RawMessage]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pv := gv.AtTyped(i) + if pv == nil || len(*pv) == 0 || string(*pv) == "null" { + stream.WriteNil() + } else { + stream.WriteRaw(string(*pv)) + } + } + return fieldWriteResult{} + } + } + + return fieldWriteResult{usedFallback: true} +} + +// writeEnumField writes enum field data to the stream +func writeEnumField(f *Field, rowCount int, stream *jsoniter.Stream) fieldWriteResult { + switch f.Type() { + case FieldTypeEnum: + if gv, ok := f.vector.(*genericVector[EnumItemIndex]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + stream.WriteUint16(uint16(gv.AtTyped(i))) + } + return fieldWriteResult{} + } + case FieldTypeNullableEnum: + if gv, ok := f.vector.(*nullableGenericVector[EnumItemIndex]); ok { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + pv := gv.AtTyped(i) + if pv == nil { + stream.WriteNil() + continue + } + stream.WriteUint16(uint16(*pv)) + } + return fieldWriteResult{} + } + } + + return fieldWriteResult{usedFallback: true} +} + +func writeDataFrameData(frame *Frame, stream *jsoniter.Stream) { + rowCount, err := frame.RowLen() + if err != nil { + stream.Error = err + return + } + + stream.WriteObjectStart() + + entities := make([]*fieldEntityLookup, len(frame.Fields)) + entityCount := 0 + + nanos := make([][]int64, len(frame.Fields)) + nsOffSetCount := 0 + + stream.WriteObjectField("values") + stream.WriteArrayStart() + for fidx, f := range frame.Fields { + if fidx > 0 { + stream.WriteMore() + } + + stream.WriteArrayStart() + + var result fieldWriteResult + + switch f.Type() { + case FieldTypeTime: + result = writeTimeField(f, rowCount, stream) + case FieldTypeNullableTime: + result = writeNullableTimeField(f, rowCount, stream) + case FieldTypeFloat64, FieldTypeNullableFloat64, FieldTypeFloat32, FieldTypeNullableFloat32: + result = writeFloatField(f, rowCount, stream) + case FieldTypeInt8, FieldTypeNullableInt8, FieldTypeInt16, FieldTypeNullableInt16, + FieldTypeInt32, FieldTypeNullableInt32, FieldTypeInt64, FieldTypeNullableInt64: + result = writeIntField(f, rowCount, stream) + case FieldTypeUint8, FieldTypeNullableUint8, FieldTypeUint16, FieldTypeNullableUint16, + FieldTypeUint32, FieldTypeNullableUint32, FieldTypeUint64, FieldTypeNullableUint64: + result = writeUintField(f, rowCount, stream) + case FieldTypeString, FieldTypeNullableString: + result = writeStringField(f, rowCount, stream) + case FieldTypeBool, FieldTypeNullableBool: + result = writeBoolField(f, rowCount, stream) + case FieldTypeJSON, FieldTypeNullableJSON: + result = writeJSONField(f, rowCount, stream) + case FieldTypeEnum, FieldTypeNullableEnum: + result = writeEnumField(f, rowCount, stream) + default: + result = fieldWriteResult{usedFallback: true} + } + + // Handle fallback path + if result.usedFallback { + for i := 0; i < rowCount; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if v, ok := f.ConcreteAt(i); ok { + stream.WriteVal(v) + } else { + stream.WriteNil() + } + } + } + + stream.WriteArrayEnd() + + // Handle entities + if result.entities != nil { + entities[fidx] = result.entities + entityCount++ + } + + // Handle nanosecond time offsets + if result.hasNSTime { + nanos[fidx] = result.nanos + nsOffSetCount++ + } + } + stream.WriteArrayEnd() + + if entityCount > 0 { + stream.WriteMore() + stream.WriteObjectField("entities") + writeEntitiesArray(stream, entities) + // Return entities to pool after serialization + for _, ent := range entities { + putEntityLookup(ent) + } + } + + if nsOffSetCount > 0 { + stream.WriteMore() + stream.WriteObjectField("nanos") + writeNanosArray(stream, nanos) } + + stream.WriteObjectEnd() } -func isSpecialEntity(v float64) (string, bool) { - switch { - case math.IsNaN(v): - return entityNaN, true - case math.IsInf(v, 1): - return entityPositiveInf, true - case math.IsInf(v, -1): - return entityNegativeInf, true - default: - return "", false +// writeLabelsMap writes a map[string]string without reflection +// This is significantly faster than WriteVal which uses reflection + sorting +func writeLabelsMap(stream *jsoniter.Stream, labels map[string]string) { + if len(labels) == 0 { + stream.WriteObjectStart() + stream.WriteObjectEnd() + return } -} -func writeDataFrame(frame *Frame, stream *jsoniter.Stream, includeSchema bool, includeData bool) { - stream.WriteObjectStart() - if includeSchema { - stream.WriteObjectField(jsonKeySchema) - writeDataFrameSchema(frame, stream) + // Option 1: Fast path - no sorting (non-deterministic) + // Use this if deterministic output is not required + // Saves ~200-300 MB allocations + /* + stream.WriteObjectStart() + first := true + for k, v := range labels { + if !first { + stream.WriteMore() + } + stream.WriteObjectField(k) + stream.WriteString(v) + first = false + } + stream.WriteObjectEnd() + */ + + // Option 2: Deterministic path - with sorting + // Required for consistent JSON output / tests + // Uses pooled slice to avoid allocation + keysPtr := stringSlicePool.Get().(*[]string) + keys := (*keysPtr)[:0] // Reset length but keep capacity + + for k := range labels { + keys = append(keys, k) } - if includeData { - if includeSchema { + // Sort for deterministic output + // Most label maps are small (< 10 keys), so this is fast + sort.Strings(keys) + + stream.WriteObjectStart() + for i, k := range keys { + if i > 0 { stream.WriteMore() } - - stream.WriteObjectField(jsonKeyData) - writeDataFrameData(frame, stream) + stream.WriteObjectField(k) + stream.WriteString(labels[k]) } stream.WriteObjectEnd() + + // Return keys slice to pool + *keysPtr = keys + stringSlicePool.Put(keysPtr) } -func writeDataFrameSchema(frame *Frame, stream *jsoniter.Stream) { - started := false +// writeFieldConfig writes FieldConfig without full reflection +// This manually serializes common simple fields and uses WriteVal for complex nested structures +// nolint:gocyclo +func writeFieldConfig(stream *jsoniter.Stream, config *FieldConfig) { stream.WriteObjectStart() + needsComma := false - if len(frame.Name) > 0 { - stream.WriteObjectField("name") - stream.WriteString(frame.Name) - started = true + // Simple string fields + if config.DisplayName != "" { + stream.WriteObjectField("displayName") + stream.WriteString(config.DisplayName) + needsComma = true } - if len(frame.RefID) > 0 { - if started { + if config.DisplayNameFromDS != "" { + if needsComma { stream.WriteMore() } - stream.WriteObjectField("refId") - stream.WriteString(frame.RefID) - started = true + stream.WriteObjectField("displayNameFromDS") + stream.WriteString(config.DisplayNameFromDS) + needsComma = true } - if frame.Meta != nil { - if started { + if config.Path != "" { + if needsComma { stream.WriteMore() } - stream.WriteObjectField("meta") - stream.WriteVal(frame.Meta) - started = true + stream.WriteObjectField("path") + stream.WriteString(config.Path) + needsComma = true } - if started { - stream.WriteMore() + if config.Description != "" { + if needsComma { + stream.WriteMore() + } + stream.WriteObjectField("description") + stream.WriteString(config.Description) + needsComma = true } - stream.WriteObjectField("fields") - stream.WriteArrayStart() - for i, f := range frame.Fields { - if i > 0 { + + // Pointer bool fields + if config.Filterable != nil { + if needsComma { stream.WriteMore() } - started = false - stream.WriteObjectStart() - if len(f.Name) > 0 { - stream.WriteObjectField("name") - stream.WriteString(f.Name) - started = true + stream.WriteObjectField("filterable") + stream.WriteBool(*config.Filterable) + needsComma = true + } + + if config.Writeable != nil { + if needsComma { + stream.WriteMore() } + stream.WriteObjectField("writeable") + stream.WriteBool(*config.Writeable) + needsComma = true + } - t, ok := getTypeScriptTypeString(f.Type()) - if ok { - if started { - stream.WriteMore() - } - stream.WriteObjectField("type") - stream.WriteString(t) - started = true + // Numeric fields + if config.Unit != "" { + if needsComma { + stream.WriteMore() } + stream.WriteObjectField("unit") + stream.WriteString(config.Unit) + needsComma = true + } - ft := f.Type() - nnt := ft.NonNullableType() - if started { + if config.Decimals != nil { + if needsComma { stream.WriteMore() } - stream.WriteObjectField("typeInfo") - stream.WriteObjectStart() - stream.WriteObjectField("frame") - stream.WriteString(nnt.ItemTypeString()) - if ft.Nullable() { + stream.WriteObjectField("decimals") + stream.WriteUint16(*config.Decimals) + needsComma = true + } + + if config.Min != nil { + if needsComma { stream.WriteMore() - stream.WriteObjectField("nullable") - stream.WriteBool(true) } - stream.WriteObjectEnd() - started = true + stream.WriteObjectField("min") + stream.WriteVal(config.Min) // ConfFloat64 has custom MarshalJSON + needsComma = true + } - if f.Labels != nil { - if started { - stream.WriteMore() - } - stream.WriteObjectField("labels") - stream.WriteVal(f.Labels) - started = true + if config.Max != nil { + if needsComma { + stream.WriteMore() } + stream.WriteObjectField("max") + stream.WriteVal(config.Max) // ConfFloat64 has custom MarshalJSON + needsComma = true + } - if f.Config != nil { - if started { - stream.WriteMore() - } - stream.WriteObjectField("config") - stream.WriteVal(f.Config) + if config.Interval != 0 { + if needsComma { + stream.WriteMore() + } + stream.WriteObjectField("interval") + stream.WriteFloat64(config.Interval) + needsComma = true + } + + // Complex fields - use WriteVal for these as they're less common + // and would require hundreds of lines to serialize manually + if config.Mappings != nil { + if needsComma { + stream.WriteMore() } + stream.WriteObjectField("mappings") + stream.WriteVal(config.Mappings) + needsComma = true + } - stream.WriteObjectEnd() + if config.Thresholds != nil { + if needsComma { + stream.WriteMore() + } + stream.WriteObjectField("thresholds") + stream.WriteVal(config.Thresholds) + needsComma = true } - stream.WriteArrayEnd() - stream.WriteObjectEnd() -} + if config.Color != nil { + if needsComma { + stream.WriteMore() + } + stream.WriteObjectField("color") + stream.WriteVal(config.Color) + needsComma = true + } -func writeDataFrameData(frame *Frame, stream *jsoniter.Stream) { - rowCount, err := frame.RowLen() - if err != nil { - stream.Error = err - return + if config.Links != nil { + if needsComma { + stream.WriteMore() + } + stream.WriteObjectField("links") + stream.WriteVal(config.Links) + needsComma = true } - stream.WriteObjectStart() + if config.NoValue != "" { + if needsComma { + stream.WriteMore() + } + stream.WriteObjectField("noValue") + stream.WriteString(config.NoValue) + needsComma = true + } - entities := make([]*fieldEntityLookup, len(frame.Fields)) - entityCount := 0 + if config.TypeConfig != nil { + if needsComma { + stream.WriteMore() + } + stream.WriteObjectField("type") + stream.WriteVal(config.TypeConfig) + needsComma = true + } - nanos := make([][]int64, len(frame.Fields)) - nsOffSetCount := 0 + if config.Custom != nil { + if needsComma { + stream.WriteMore() + } + stream.WriteObjectField("custom") + stream.WriteVal(config.Custom) + // needsComma = true last comma is not used + } - stream.WriteObjectField("values") + stream.WriteObjectEnd() +} + +// writeEntitiesArray writes entities array without reflection +func writeEntitiesArray(stream *jsoniter.Stream, entities []*fieldEntityLookup) { stream.WriteArrayStart() - for fidx, f := range frame.Fields { - if fidx > 0 { + for i, ent := range entities { + if i > 0 { stream.WriteMore() } - isTime := f.Type().Time() - nsTime := make([]int64, rowCount) - var hasNSTime bool - isFloat := f.Type() == FieldTypeFloat64 || f.Type() == FieldTypeNullableFloat64 || - f.Type() == FieldTypeFloat32 || f.Type() == FieldTypeNullableFloat32 - - stream.WriteArrayStart() - for i := 0; i < rowCount; i++ { - if i > 0 { - stream.WriteRaw(",") + if ent == nil { + stream.WriteNil() + continue + } + stream.WriteObjectStart() + hasField := false + if len(ent.NaN) > 0 { + stream.WriteObjectField("NaN") + stream.WriteArrayStart() + for j, idx := range ent.NaN { + if j > 0 { + stream.WriteMore() + } + stream.WriteInt(idx) } - if v, ok := f.ConcreteAt(i); ok { - switch { - case isTime: - t := v.(time.Time) - stream.WriteVal(t.UnixMilli()) - msRes := t.Truncate(time.Millisecond) - ns := t.Sub(msRes).Nanoseconds() - if ns != 0 { - hasNSTime = true - nsTime[i] = ns - } - case isFloat: - // For float and nullable float we check whether a value is a special - // entity (NaN, -Inf, +Inf) not supported by JSON spec, we then encode this - // information into a separate field to restore on a consumer side (setting - // null to the entity position in data). Since we are using f.ConcreteAt - // above the value is always float64 or float32 types, and never a *float64 - // or *float32. - var f64 float64 - switch vt := v.(type) { - case float64: - f64 = vt - case float32: - f64 = float64(vt) - default: - stream.Error = fmt.Errorf("unsupported float type: %T", v) - return - } - if entityType, found := isSpecialEntity(f64); found { - if entities[fidx] == nil { - entities[fidx] = &fieldEntityLookup{} - } - entities[fidx].add(entityType, i) - entityCount++ - stream.WriteNil() - } else { - stream.WriteVal(v) - } - default: - stream.WriteVal(v) + stream.WriteArrayEnd() + hasField = true + } + if len(ent.Inf) > 0 { + if hasField { + stream.WriteMore() + } + stream.WriteObjectField("Inf") + stream.WriteArrayStart() + for j, idx := range ent.Inf { + if j > 0 { + stream.WriteMore() } - } else { - stream.WriteNil() + stream.WriteInt(idx) } + stream.WriteArrayEnd() + hasField = true } - stream.WriteArrayEnd() - if hasNSTime { - nanos[fidx] = nsTime - nsOffSetCount++ + if len(ent.NegInf) > 0 { + if hasField { + stream.WriteMore() + } + stream.WriteObjectField("NegInf") + stream.WriteArrayStart() + for j, idx := range ent.NegInf { + if j > 0 { + stream.WriteMore() + } + stream.WriteInt(idx) + } + stream.WriteArrayEnd() } + stream.WriteObjectEnd() } stream.WriteArrayEnd() +} - if entityCount > 0 { - stream.WriteMore() - stream.WriteObjectField("entities") - stream.WriteVal(entities) - } - - if nsOffSetCount > 0 { - stream.WriteMore() - stream.WriteObjectField("nanos") - stream.WriteVal(nanos) +// writeNanosArray writes nanos array without reflection +func writeNanosArray(stream *jsoniter.Stream, nanos [][]int64) { + stream.WriteArrayStart() + for i, nano := range nanos { + if i > 0 { + stream.WriteMore() + } + if nano == nil { + stream.WriteNil() + continue + } + stream.WriteArrayStart() + for j, ns := range nano { + if j > 0 { + stream.WriteMore() + } + stream.WriteInt64(ns) + } + stream.WriteArrayEnd() } - - stream.WriteObjectEnd() + stream.WriteArrayEnd() } func writeDataFrames(frames *Frames, stream *jsoniter.Stream) { @@ -1230,31 +2519,31 @@ func writeArrowData(stream *jsoniter.Stream, record arrow.Record) error { } case arrow.UINT8: - ent = writeArrowDataUint8(stream, col) + writeArrowDataUint8(stream, col) case arrow.UINT16: - ent = writeArrowDataUint16(stream, col) + writeArrowDataUint16(stream, col) case arrow.UINT32: - ent = writeArrowDataUint32(stream, col) + writeArrowDataUint32(stream, col) case arrow.UINT64: - ent = writeArrowDataUint64(stream, col) + writeArrowDataUint64(stream, col) case arrow.INT8: - ent = writeArrowDataInt8(stream, col) + writeArrowDataInt8(stream, col) case arrow.INT16: - ent = writeArrowDataInt16(stream, col) + writeArrowDataInt16(stream, col) case arrow.INT32: - ent = writeArrowDataInt32(stream, col) + writeArrowDataInt32(stream, col) case arrow.INT64: - ent = writeArrowDataInt64(stream, col) + writeArrowDataInt64(stream, col) case arrow.FLOAT32: ent = writeArrowDataFloat32(stream, col) case arrow.FLOAT64: ent = writeArrowDataFloat64(stream, col) case arrow.STRING: - ent = writeArrowDataString(stream, col) + writeArrowDataString(stream, col) case arrow.BOOL: - ent = writeArrowDataBool(stream, col) + writeArrowDataBool(stream, col) case arrow.BINARY: - ent = writeArrowDataBinary(stream, col) + writeArrowDataBinary(stream, col) default: return fmt.Errorf("unsupported arrow type %s for JSON", col.DataType().ID()) } @@ -1269,13 +2558,17 @@ func writeArrowData(stream *jsoniter.Stream, record arrow.Record) error { if entityCount > 0 { stream.WriteMore() stream.WriteObjectField("entities") - stream.WriteVal(entities) + writeEntitiesArray(stream, entities) + // Return entities to pool after serialization + for _, ent := range entities { + putEntityLookup(ent) + } } if hasNano { stream.WriteMore() stream.WriteObjectField("nanos") - stream.WriteVal(nanos) + writeNanosArray(stream, nanos) } stream.WriteObjectEnd() @@ -1286,7 +2579,7 @@ func writeArrowData(stream *jsoniter.Stream, record arrow.Record) error { func writeArrowDataTIMESTAMP(stream *jsoniter.Stream, col arrow.Array) []int64 { count := col.Len() var hasNSTime bool - nsTime := make([]int64, count) + var nsTime []int64 v := array.NewTimestampData(col.Data()) stream.WriteArrayStart() for i := 0; i < count; i++ { @@ -1303,7 +2596,10 @@ func writeArrowDataTIMESTAMP(stream *jsoniter.Stream, col arrow.Array) []int64 { nsOffSet := int64(ns) - ms*int64(1e6) if nsOffSet != 0 { - hasNSTime = true + if !hasNSTime { + nsTime = make([]int64, count) + hasNSTime = true + } nsTime[i] = nsOffSet } @@ -1318,66 +2614,3 @@ func writeArrowDataTIMESTAMP(stream *jsoniter.Stream, col arrow.Array) []int64 { } return nil } - -func readTimeVectorJSON(iter *jsoniter.Iterator, nullable bool, size int) (vector, error) { - var arr vector - if nullable { - arr = newNullableTimeTimeVector(size) - } else { - arr = newTimeTimeVector(size) - } - - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readUint8VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == sdkjsoniter.NilValue { - iter.ReadNil() - } else { - ms := iter.ReadInt64() - - tv := time.Unix(ms/int64(1e+3), (ms%int64(1e+3))*int64(1e+6)).UTC() - arr.SetConcrete(i, tv) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -func readJSONVectorJSON(iter *jsoniter.Iterator, nullable bool, size int) (vector, error) { - var arr vector - if nullable { - arr = newNullableJsonRawMessageVector(size) - } else { - arr = newJsonRawMessageVector(size) - } - - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readJSONVectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == sdkjsoniter.NilValue { - iter.ReadNil() - } else { - var v json.RawMessage - iter.ReadVal(&v) - arr.SetConcrete(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} diff --git a/data/frame_json_arrow_writers.go b/data/frame_json_arrow_writers.go new file mode 100644 index 000000000..4deb327d4 --- /dev/null +++ b/data/frame_json_arrow_writers.go @@ -0,0 +1,266 @@ +// This file contains writeArrowData functions for basic types + +package data + +import ( + "github.com/apache/arrow-go/v18/arrow" + "github.com/apache/arrow-go/v18/arrow/array" + jsoniter "github.com/json-iterator/go" +) + +func writeArrowDataBinary(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewBinaryData(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteRaw(string(v.Value(i))) + } + stream.WriteArrayEnd() +} + +func writeArrowDataUint8(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewUint8Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteUint8(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataUint16(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewUint16Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteUint16(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataUint32(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewUint32Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteUint32(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataUint64(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewUint64Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteUint64(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataInt8(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewInt8Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteInt8(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataInt16(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewInt16Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteInt16(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataInt32(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewInt32Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteInt32(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataInt64(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewInt64Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteInt64(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataFloat32(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { + var entities *fieldEntityLookup + count := col.Len() + + v := array.NewFloat32Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + val := v.Value(i) + f64 := float64(val) + if entityType, found := isSpecialEntity(f64); found { + if entities == nil { + entities = getEntityLookup() + } + entities.add(entityType, i) + stream.WriteNil() + } else { + stream.WriteFloat32(val) + } + } + stream.WriteArrayEnd() + return entities +} + +func writeArrowDataFloat64(stream *jsoniter.Stream, col arrow.Array) *fieldEntityLookup { + var entities *fieldEntityLookup + count := col.Len() + + v := array.NewFloat64Data(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + val := v.Value(i) + if entityType, found := isSpecialEntity(val); found { + if entities == nil { + entities = getEntityLookup() + } + entities.add(entityType, i) + stream.WriteNil() + } else { + stream.WriteFloat64(val) + } + } + stream.WriteArrayEnd() + return entities +} + +func writeArrowDataString(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewStringData(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteString(v.Value(i)) + } + stream.WriteArrayEnd() +} + +func writeArrowDataBool(stream *jsoniter.Stream, col arrow.Array) { + count := col.Len() + + v := array.NewBooleanData(col.Data()) + stream.WriteArrayStart() + for i := 0; i < count; i++ { + if i > 0 { + stream.WriteRaw(",") + } + if col.IsNull(i) { + stream.WriteNil() + continue + } + stream.WriteBool(v.Value(i)) + } + stream.WriteArrayEnd() +} diff --git a/data/frame_json_bench_test.go b/data/frame_json_bench_test.go new file mode 100644 index 000000000..f8360585c --- /dev/null +++ b/data/frame_json_bench_test.go @@ -0,0 +1,752 @@ +package data_test + +import ( + "encoding/json" + "fmt" + "math" + "testing" + "time" + + "github.com/grafana/grafana-plugin-sdk-go/data" +) + +// BenchmarkFrameToJSON benchmarks the main public API function with different include options +func BenchmarkFrameToJSON_IncludeAll(b *testing.B) { + f := goldenDF() + warm, err := data.FrameToJSON(f, data.IncludeAll) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(warm))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := data.FrameToJSON(f, data.IncludeAll) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkFrameToJSON_SchemaOnly(b *testing.B) { + f := goldenDF() + warm, err := data.FrameToJSON(f, data.IncludeSchemaOnly) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(warm))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := data.FrameToJSON(f, data.IncludeSchemaOnly) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkFrameToJSON_DataOnly(b *testing.B) { + f := goldenDF() + warm, err := data.FrameToJSON(f, data.IncludeDataOnly) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(warm))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := data.FrameToJSON(f, data.IncludeDataOnly) + if err != nil { + b.Fatal(err) + } + } +} + +// BenchmarkFrameJSONCache_Create benchmarks the cache creation (existing benchmark was testing this) +func BenchmarkFrameJSONCache_Create(b *testing.B) { + f := goldenDF() + warm, err := data.FrameToJSONCache(f) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(warm.Bytes(data.IncludeAll)))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := data.FrameToJSONCache(f) + if err != nil { + b.Fatal(err) + } + } +} + +// BenchmarkFrameJSONCache_Bytes benchmarks the actual intended usage pattern +func BenchmarkFrameJSONCache_Bytes(b *testing.B) { + f := goldenDF() + cache, err := data.FrameToJSONCache(f) + if err != nil { + b.Fatal(err) + } + + b.Run("IncludeAll", func(b *testing.B) { + result := cache.Bytes(data.IncludeAll) + b.SetBytes(int64(len(result))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _ = cache.Bytes(data.IncludeAll) + } + }) + + b.Run("SchemaOnly", func(b *testing.B) { + result := cache.Bytes(data.IncludeSchemaOnly) + b.SetBytes(int64(len(result))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _ = cache.Bytes(data.IncludeSchemaOnly) + } + }) + + b.Run("DataOnly", func(b *testing.B) { + result := cache.Bytes(data.IncludeDataOnly) + b.SetBytes(int64(len(result))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _ = cache.Bytes(data.IncludeDataOnly) + } + }) +} + +// BenchmarkFrameUnmarshalJSON benchmarks deserialization - CRITICAL MISSING BENCHMARK +func BenchmarkFrameUnmarshalJSON(b *testing.B) { + f := goldenDF() + jsonData, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(jsonData))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + var frame data.Frame + err := json.Unmarshal(jsonData, &frame) + if err != nil { + b.Fatal(err) + } + } +} + +// BenchmarkFrameUnmarshalJSON_FromFrameToJSON benchmarks unmarshal from FrameToJSON output +func BenchmarkFrameUnmarshalJSON_FromFrameToJSON(b *testing.B) { + f := goldenDF() + jsonData, err := data.FrameToJSON(f, data.IncludeAll) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(jsonData))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + var frame data.Frame + err := json.Unmarshal(jsonData, &frame) + if err != nil { + b.Fatal(err) + } + } +} + +// BenchmarkFrameMarshalJSON_Sizes benchmarks different frame sizes to see scaling behavior +func BenchmarkFrameMarshalJSON_Sizes(b *testing.B) { + sizes := []int{10, 100, 1000, 10000} + + for _, size := range sizes { + b.Run(fmt.Sprintf("Rows_%d", size), func(b *testing.B) { + f := data.NewFrame("test", + data.NewField("time", nil, makeTimeSlice(size)), + data.NewField("value", nil, makeFloat64Slice(size)), + data.NewField("name", nil, makeStringSlice(size)), + ) + + warm, _ := json.Marshal(f) + b.SetBytes(int64(len(warm))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + } +} + +// BenchmarkFrameUnmarshalJSON_Sizes benchmarks deserialization at different scales +func BenchmarkFrameUnmarshalJSON_Sizes(b *testing.B) { + sizes := []int{10, 100, 1000, 10000} + + for _, size := range sizes { + b.Run(fmt.Sprintf("Rows_%d", size), func(b *testing.B) { + f := data.NewFrame("test", + data.NewField("time", nil, makeTimeSlice(size)), + data.NewField("value", nil, makeFloat64Slice(size)), + data.NewField("name", nil, makeStringSlice(size)), + ) + + jsonData, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(jsonData))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + var frame data.Frame + err := json.Unmarshal(jsonData, &frame) + if err != nil { + b.Fatal(err) + } + } + }) + } +} + +// BenchmarkFrameMarshalJSON_FieldTypes benchmarks specific field types to isolate optimized paths +// nolint:gocyclo +func BenchmarkFrameMarshalJSON_FieldTypes(b *testing.B) { + size := 1000 + + b.Run("TimeNoNanos", func(b *testing.B) { + times := make([]time.Time, size) + for i := range times { + times[i] = time.Unix(int64(i), 0) // No nanosecond precision + } + f := data.NewFrame("test", data.NewField("time", nil, times)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("TimeWithNanos", func(b *testing.B) { + times := make([]time.Time, size) + for i := range times { + times[i] = time.Unix(int64(i), int64((i%1000)*1000)) // Has nanosecond precision + } + f := data.NewFrame("test", data.NewField("time", nil, times)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("NullableTime", func(b *testing.B) { + times := make([]*time.Time, size) + for i := range times { + if i%10 != 0 { // 10% null values + t := time.Unix(int64(i), 0) + times[i] = &t + } + } + f := data.NewFrame("test", data.NewField("time", nil, times)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Float64Clean", func(b *testing.B) { + values := make([]float64, size) + for i := range values { + values[i] = float64(i) * 1.5 + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Float64WithSpecials", func(b *testing.B) { + values := make([]float64, size) + for i := range values { + switch i % 10 { + case 0: + values[i] = math.NaN() + case 1: + values[i] = math.Inf(1) + case 2: + values[i] = math.Inf(-1) + default: + values[i] = float64(i) * 1.5 + } + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Float32WithSpecials", func(b *testing.B) { + values := make([]float32, size) + for i := range values { + switch i % 10 { + case 0: + values[i] = float32(math.NaN()) + case 1: + values[i] = float32(math.Inf(1)) + case 2: + values[i] = float32(math.Inf(-1)) + default: + values[i] = float32(i) * 1.5 + } + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Int64", func(b *testing.B) { + values := make([]int64, size) + for i := range values { + values[i] = int64(i) + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("NullableInt64", func(b *testing.B) { + values := make([]*int64, size) + for i := range values { + if i%10 != 0 { // 10% null values + v := int64(i) + values[i] = &v + } + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Uint64", func(b *testing.B) { + values := make([]uint64, size) + for i := range values { + values[i] = uint64(i) + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("NullableUint64", func(b *testing.B) { + values := make([]*uint64, size) + for i := range values { + if i%10 != 0 { // 10% null values + v := uint64(i) + values[i] = &v + } + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("String", func(b *testing.B) { + values := make([]string, size) + for i := range values { + values[i] = fmt.Sprintf("value_%d", i) + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("NullableString", func(b *testing.B) { + values := make([]*string, size) + for i := range values { + if i%10 != 0 { // 10% null values + v := fmt.Sprintf("value_%d", i) + values[i] = &v + } + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Bool", func(b *testing.B) { + values := make([]bool, size) + for i := range values { + values[i] = i%2 == 0 + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("JSON", func(b *testing.B) { + values := make([]json.RawMessage, size) + for i := range values { + values[i] = json.RawMessage(fmt.Sprintf(`{"index":%d}`, i)) + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Enum", func(b *testing.B) { + values := make([]data.EnumItemIndex, size) + for i := range values { + values[i] = data.EnumItemIndex(i % 5) + } + f := data.NewFrame("test", data.NewField("value", nil, values)) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) +} + +// BenchmarkFrameMarshalJSON_Parallel tests concurrent marshaling (pool contention) +func BenchmarkFrameMarshalJSON_Parallel(b *testing.B) { + f := goldenDF() + b.ReportAllocs() + b.ResetTimer() + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) +} + +// BenchmarkFrameMarshalJSON_ParallelLarge tests concurrent marshaling with larger frames +func BenchmarkFrameMarshalJSON_ParallelLarge(b *testing.B) { + f := data.NewFrame("test", + data.NewField("time", nil, makeTimeSlice(1000)), + data.NewField("value", nil, makeFloat64Slice(1000)), + data.NewField("name", nil, makeStringSlice(1000)), + ) + b.ReportAllocs() + b.ResetTimer() + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) +} + +// BenchmarkFrameUnmarshalJSON_Parallel tests concurrent unmarshaling +func BenchmarkFrameUnmarshalJSON_Parallel(b *testing.B) { + f := goldenDF() + jsonData, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + b.ReportAllocs() + b.ResetTimer() + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + var frame data.Frame + err := json.Unmarshal(jsonData, &frame) + if err != nil { + b.Fatal(err) + } + } + }) +} + +// BenchmarkFrameMarshalJSON_WithLabels tests frames with labels (map sorting) +func BenchmarkFrameMarshalJSON_WithLabels(b *testing.B) { + size := 1000 + + b.Run("NoLabels", func(b *testing.B) { + f := data.NewFrame("test", + data.NewField("time", nil, makeTimeSlice(size)), + data.NewField("value", nil, makeFloat64Slice(size)), + ) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("SmallLabels", func(b *testing.B) { + labels := data.Labels{"job": "api", "instance": "server1"} + f := data.NewFrame("test", + data.NewField("time", labels, makeTimeSlice(size)), + data.NewField("value", labels, makeFloat64Slice(size)), + ) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("ManyLabels", func(b *testing.B) { + labels := data.Labels{ + "job": "api", + "instance": "server1", + "region": "us-west", + "datacenter": "dc1", + "cluster": "prod", + "namespace": "default", + "pod": "pod-123", + "container": "main", + } + f := data.NewFrame("test", + data.NewField("time", labels, makeTimeSlice(size)), + data.NewField("value", labels, makeFloat64Slice(size)), + ) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) +} + +// BenchmarkFrameMarshalJSON_WithMeta tests frames with metadata +func BenchmarkFrameMarshalJSON_WithMeta(b *testing.B) { + size := 1000 + + b.Run("NoMeta", func(b *testing.B) { + f := data.NewFrame("test", + data.NewField("time", nil, makeTimeSlice(size)), + data.NewField("value", nil, makeFloat64Slice(size)), + ) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("WithMeta", func(b *testing.B) { + f := data.NewFrame("test", + data.NewField("time", nil, makeTimeSlice(size)), + data.NewField("value", nil, makeFloat64Slice(size)), + ) + f.Meta = &data.FrameMeta{ + ExecutedQueryString: "SELECT * FROM table WHERE time > now() - 1h", + Custom: map[string]interface{}{ + "key1": "value1", + "key2": 123, + "key3": true, + }, + } + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + } + }) +} + +// BenchmarkArrowToJSON benchmarks Arrow to JSON conversion +func BenchmarkArrowToJSON(b *testing.B) { + f := goldenDF() + arrowData, err := f.MarshalArrow() + if err != nil { + b.Fatal(err) + } + + warm, err := data.ArrowBufferToJSON(arrowData, data.IncludeAll) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(warm))) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := data.ArrowBufferToJSON(arrowData, data.IncludeAll) + if err != nil { + b.Fatal(err) + } + } +} + +// BenchmarkFrameRoundtrip benchmarks complete marshal->unmarshal cycle +func BenchmarkFrameRoundtrip(b *testing.B) { + f := goldenDF() + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + jsonData, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + var frame data.Frame + err = json.Unmarshal(jsonData, &frame) + if err != nil { + b.Fatal(err) + } + } +} + +// Helper functions for creating test data + +func makeTimeSlice(n int) []time.Time { + result := make([]time.Time, n) + base := time.Unix(1600000000, 0) + for i := range result { + result[i] = base.Add(time.Duration(i) * time.Second) + } + return result +} + +func makeFloat64Slice(n int) []float64 { + result := make([]float64, n) + for i := range result { + result[i] = float64(i) * 1.5 + } + return result +} + +func makeStringSlice(n int) []string { + result := make([]string, n) + for i := range result { + result[i] = fmt.Sprintf("value_%d", i) + } + return result +} diff --git a/data/frame_json_test.go b/data/frame_json_test.go index ce14e6468..0ac42b542 100644 --- a/data/frame_json_test.go +++ b/data/frame_json_test.go @@ -6,15 +6,11 @@ import ( "math" "os" "path/filepath" - "strings" "sync" "testing" - "text/template" "time" jsoniter "github.com/json-iterator/go" - "golang.org/x/text/cases" - "golang.org/x/text/language" "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" @@ -291,6 +287,11 @@ func TestJSONFrames(t *testing.T) { func BenchmarkFrameToJSON(b *testing.B) { f := goldenDF() b.ReportAllocs() + warm, err := data.FrameToJSONCache(f) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(warm.Bytes(data.IncludeAll)))) b.ResetTimer() for i := 0; i < b.N; i++ { _, err := data.FrameToJSONCache(f) @@ -303,6 +304,11 @@ func BenchmarkFrameToJSON(b *testing.B) { func BenchmarkFrameMarshalJSONStd(b *testing.B) { f := goldenDF() b.ReportAllocs() + warm, err := json.Marshal(f) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(warm))) b.ResetTimer() for i := 0; i < b.N; i++ { _, err := json.Marshal(f) @@ -315,6 +321,11 @@ func BenchmarkFrameMarshalJSONStd(b *testing.B) { func BenchmarkFrameMarshalJSONIter(b *testing.B) { f := goldenDF() b.ReportAllocs() + warm, err := jsoniter.Marshal(f) + if err != nil { + b.Fatal(err) + } + b.SetBytes(int64(len(warm))) b.ResetTimer() for i := 0; i < b.N; i++ { _, err := jsoniter.Marshal(f) @@ -406,153 +417,3 @@ func TestFrame_UnmarshallUint64(t *testing.T) { } require.EqualValues(t, []uint64{math.MaxUint64, math.MaxUint64, 0, 1, 2, 3, 4, 5}, values) } - -// This function will write code to the console that should be copy/pasted into frame_json.gen.go -// when changes are required. Typically this function will always be skipped. -func TestGenerateGenericArrowCode(t *testing.T) { - t.Skip() - - types := []string{ - "uint8", "uint16", "uint32", "uint64", - "int8", "int16", "int32", "int64", - "float32", "float64", "string", "bool", - "enum", // Maps to uint16 - } - - code := ` -func writeArrowData{{.Type}}(stream *jsoniter.Stream, col array.Interface) *fieldEntityLookup { - var entities *fieldEntityLookup - count := col.Len() - - v := array.New{{.Typex}}Data(col.Data()) - stream.WriteArrayStart() - for i := 0; i < count; i++ { - if i > 0 { - stream.WriteRaw(",") - } - if col.IsNull(i) { - stream.WriteNil() - continue - } -{{- if .HasSpecialEntities }} - val := v.Value(i) - f64 := float64(val) - if entityType, found := isSpecialEntity(f64); found { - if entities == nil { - entities = &fieldEntityLookup{} - } - entities.add(entityType, i) - stream.WriteNil() - } else { - stream.Write{{.IterType}}(val) - } -{{ else }} - stream.Write{{.IterType}}(v.Value(i)){{ end }} - } - stream.WriteArrayEnd() - return entities -} - -func read{{.Type}}VectorJSON(iter *jsoniter.Iterator, size int) (*{{.Typen}}Vector, error) { - arr := new{{.Type}}Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("read{{.Type}}VectorJSON", "expected array") - return nil, iter.Error - } - - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.Read{{.IterType}}() - arr.Set(i, v) - } - } - - if iter.ReadArray() { - iter.ReportError("read", "expected close array") - return nil, iter.Error - } - return arr, nil -} - - -func readNullable{{.Type}}VectorJSON(iter *jsoniter.Iterator, size int) (*nullable{{.Type}}Vector, error) { - arr := newNullable{{.Type}}Vector(size) - for i := 0; i < size; i++ { - if !iter.ReadArray() { - iter.ReportError("readNullable{{.Type}}VectorJSON", "expected array") - return nil, iter.Error - } - t := iter.WhatIsNext() - if t == jsoniter.NilValue { - iter.ReadNil() - } else { - v := iter.Read{{.IterType}}() - arr.Set(i, &v) - } - } - - if iter.ReadArray() { - iter.ReportError("readNullable{{.Type}}VectorJSON", "expected close array") - return nil, iter.Error - } - return arr, nil -} - -` - caser := cases.Title(language.English, cases.NoLower) - - // switch col.DataType().ID() { - // // case arrow.STRING: - // // ent := writeArrowSTRING(stream, col) - for _, tstr := range types { - tname := caser.String(tstr) - tuppr := strings.ToUpper(tstr) - - fmt.Printf(" case arrow.%s:\n\t\tent = writeArrowData%s(stream, col)\n", tuppr, tname) - } - - for _, tstr := range types { - itertype := caser.String(tstr) - typex := tstr - switch tstr { - case "bool": - typex = "Boolean" - case "enum": - typex = "uint16" - itertype = caser.String(typex) - case "timeOffset": - typex = "int64" - itertype = caser.String(typex) - } - hasSpecialEntities := tstr == "float32" || tstr == "float64" - tmplData := struct { - Type string - Typex string - Typen string - IterType string - HasSpecialEntities bool - }{ - Type: caser.String(tstr), - Typex: caser.String(typex), - Typen: tstr, - IterType: itertype, - HasSpecialEntities: hasSpecialEntities, - } - tmpl, err := template.New("").Parse(code) - require.NoError(t, err) - err = tmpl.Execute(os.Stdout, tmplData) - require.NoError(t, err) - fmt.Printf("\n") - } - - for _, tstr := range types { - tname := caser.String(tstr) - fmt.Printf(" case FieldType%s: return read%sVectorJSON(iter, size)\n", tname, tname) - fmt.Printf(" case FieldTypeNullable%s: return readNullable%sVectorJSON(iter, size)\n", tname, tname) - } - - assert.FailNow(t, "fail so we see the output") -} diff --git a/data/gen.go b/data/gen.go deleted file mode 100644 index c9c29655d..000000000 --- a/data/gen.go +++ /dev/null @@ -1,5 +0,0 @@ -package data - -//go:generate genny -in=generic_nullable_vector.go -out=nullable_vector.gen.go gen "gen=uint8,uint16,uint32,uint64,int8,int16,int32,int64,float32,float64,string,bool,time.Time,json.RawMessage" - -//go:generate genny -in=generic_vector.go -out=vector.gen.go gen "gen=uint8,uint16,uint32,uint64,int8,int16,int32,int64,float32,float64,string,bool,time.Time,json.RawMessage" diff --git a/data/generic_nullable_vector.go b/data/generic_nullable_vector.go deleted file mode 100644 index 366206b02..000000000 --- a/data/generic_nullable_vector.go +++ /dev/null @@ -1,96 +0,0 @@ -package data - -type nullablegenVector []*gen - -func newNullablegenVector(n int) *nullablegenVector { - v := nullablegenVector(make([]*gen, n)) - return &v -} - -func newNullablegenVectorWithValues(s []*gen) *nullablegenVector { - v := make([]*gen, len(s)) - copy(v, s) - return (*nullablegenVector)(&v) -} - -func (v *nullablegenVector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*gen) -} - -func (v *nullablegenVector) SetConcrete(idx int, i interface{}) { - val := i.(gen) - (*v)[idx] = &val -} - -func (v *nullablegenVector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*gen)) -} - -func (v *nullablegenVector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullablegenVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullablegenVector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *gen - return g - } - var g gen - g = *(*v)[i] - return &g -} - -func (v *nullablegenVector) ConcreteAt(i int) (interface{}, bool) { - var g gen - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullablegenVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullablegenVector) Len() int { - return len(*v) -} - -func (v *nullablegenVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullablegenVector) Extend(i int) { - *v = append(*v, make([]*gen, i)...) -} - -func (v *nullablegenVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullablegenVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} diff --git a/data/generic_vector.go b/data/generic_vector.go deleted file mode 100644 index 1b5fbec63..000000000 --- a/data/generic_vector.go +++ /dev/null @@ -1,83 +0,0 @@ -package data - -import ( - "github.com/cheekybits/genny/generic" -) - -type gen generic.Type - -type genVector []gen - -func newgenVector(n int) *genVector { - v := genVector(make([]gen, n)) - return &v -} - -func newgenVectorWithValues(s []gen) *genVector { - v := make([]gen, len(s)) - copy(v, s) - return (*genVector)(&v) -} - -func (v *genVector) Set(idx int, i interface{}) { - (*v)[idx] = i.(gen) -} - -func (v *genVector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *genVector) Append(i interface{}) { - *v = append(*v, i.(gen)) -} - -func (v *genVector) NilAt(i int) bool { - return false -} - -func (v *genVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *genVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *genVector) Len() int { - return len(*v) -} - -func (v *genVector) CopyAt(i int) interface{} { - var g gen - g = (*v)[i] - return g -} - -func (v *genVector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *genVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *genVector) Extend(i int) { - *v = append(*v, make([]gen, i)...) -} - -func (v *genVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *genVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} diff --git a/data/nullable_vector.gen.go b/data/nullable_vector.gen.go deleted file mode 100644 index 1bf5de6f1..000000000 --- a/data/nullable_vector.gen.go +++ /dev/null @@ -1,1340 +0,0 @@ -// This file was automatically generated by genny. -// Any changes will be lost if this file is regenerated. -// see https://github.com/cheekybits/genny - -package data - -import ( - "encoding/json" - "time" -) - -type nullableUint8Vector []*uint8 - -func newNullableUint8Vector(n int) *nullableUint8Vector { - v := nullableUint8Vector(make([]*uint8, n)) - return &v -} - -func newNullableUint8VectorWithValues(s []*uint8) *nullableUint8Vector { - v := make([]*uint8, len(s)) - copy(v, s) - return (*nullableUint8Vector)(&v) -} - -func (v *nullableUint8Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*uint8) -} - -func (v *nullableUint8Vector) SetConcrete(idx int, i interface{}) { - val := i.(uint8) - (*v)[idx] = &val -} - -func (v *nullableUint8Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*uint8)) -} - -func (v *nullableUint8Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableUint8Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableUint8Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *uint8 - return g - } - var g uint8 - g = *(*v)[i] - return &g -} - -func (v *nullableUint8Vector) ConcreteAt(i int) (interface{}, bool) { - var g uint8 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableUint8Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableUint8Vector) Len() int { - return len(*v) -} - -func (v *nullableUint8Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableUint8Vector) Extend(i int) { - *v = append(*v, make([]*uint8, i)...) -} - -func (v *nullableUint8Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableUint8Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableUint16Vector []*uint16 - -func newNullableUint16Vector(n int) *nullableUint16Vector { - v := nullableUint16Vector(make([]*uint16, n)) - return &v -} - -func newNullableUint16VectorWithValues(s []*uint16) *nullableUint16Vector { - v := make([]*uint16, len(s)) - copy(v, s) - return (*nullableUint16Vector)(&v) -} - -func (v *nullableUint16Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*uint16) -} - -func (v *nullableUint16Vector) SetConcrete(idx int, i interface{}) { - val := i.(uint16) - (*v)[idx] = &val -} - -func (v *nullableUint16Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*uint16)) -} - -func (v *nullableUint16Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableUint16Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableUint16Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *uint16 - return g - } - var g uint16 - g = *(*v)[i] - return &g -} - -func (v *nullableUint16Vector) ConcreteAt(i int) (interface{}, bool) { - var g uint16 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableUint16Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableUint16Vector) Len() int { - return len(*v) -} - -func (v *nullableUint16Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableUint16Vector) Extend(i int) { - *v = append(*v, make([]*uint16, i)...) -} - -func (v *nullableUint16Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableUint16Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableUint32Vector []*uint32 - -func newNullableUint32Vector(n int) *nullableUint32Vector { - v := nullableUint32Vector(make([]*uint32, n)) - return &v -} - -func newNullableUint32VectorWithValues(s []*uint32) *nullableUint32Vector { - v := make([]*uint32, len(s)) - copy(v, s) - return (*nullableUint32Vector)(&v) -} - -func (v *nullableUint32Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*uint32) -} - -func (v *nullableUint32Vector) SetConcrete(idx int, i interface{}) { - val := i.(uint32) - (*v)[idx] = &val -} - -func (v *nullableUint32Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*uint32)) -} - -func (v *nullableUint32Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableUint32Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableUint32Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *uint32 - return g - } - var g uint32 - g = *(*v)[i] - return &g -} - -func (v *nullableUint32Vector) ConcreteAt(i int) (interface{}, bool) { - var g uint32 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableUint32Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableUint32Vector) Len() int { - return len(*v) -} - -func (v *nullableUint32Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableUint32Vector) Extend(i int) { - *v = append(*v, make([]*uint32, i)...) -} - -func (v *nullableUint32Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableUint32Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableUint64Vector []*uint64 - -func newNullableUint64Vector(n int) *nullableUint64Vector { - v := nullableUint64Vector(make([]*uint64, n)) - return &v -} - -func newNullableUint64VectorWithValues(s []*uint64) *nullableUint64Vector { - v := make([]*uint64, len(s)) - copy(v, s) - return (*nullableUint64Vector)(&v) -} - -func (v *nullableUint64Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*uint64) -} - -func (v *nullableUint64Vector) SetConcrete(idx int, i interface{}) { - val := i.(uint64) - (*v)[idx] = &val -} - -func (v *nullableUint64Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*uint64)) -} - -func (v *nullableUint64Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableUint64Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableUint64Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *uint64 - return g - } - var g uint64 - g = *(*v)[i] - return &g -} - -func (v *nullableUint64Vector) ConcreteAt(i int) (interface{}, bool) { - var g uint64 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableUint64Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableUint64Vector) Len() int { - return len(*v) -} - -func (v *nullableUint64Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableUint64Vector) Extend(i int) { - *v = append(*v, make([]*uint64, i)...) -} - -func (v *nullableUint64Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableUint64Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableInt8Vector []*int8 - -func newNullableInt8Vector(n int) *nullableInt8Vector { - v := nullableInt8Vector(make([]*int8, n)) - return &v -} - -func newNullableInt8VectorWithValues(s []*int8) *nullableInt8Vector { - v := make([]*int8, len(s)) - copy(v, s) - return (*nullableInt8Vector)(&v) -} - -func (v *nullableInt8Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*int8) -} - -func (v *nullableInt8Vector) SetConcrete(idx int, i interface{}) { - val := i.(int8) - (*v)[idx] = &val -} - -func (v *nullableInt8Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*int8)) -} - -func (v *nullableInt8Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableInt8Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableInt8Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *int8 - return g - } - var g int8 - g = *(*v)[i] - return &g -} - -func (v *nullableInt8Vector) ConcreteAt(i int) (interface{}, bool) { - var g int8 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableInt8Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableInt8Vector) Len() int { - return len(*v) -} - -func (v *nullableInt8Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableInt8Vector) Extend(i int) { - *v = append(*v, make([]*int8, i)...) -} - -func (v *nullableInt8Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableInt8Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableInt16Vector []*int16 - -func newNullableInt16Vector(n int) *nullableInt16Vector { - v := nullableInt16Vector(make([]*int16, n)) - return &v -} - -func newNullableInt16VectorWithValues(s []*int16) *nullableInt16Vector { - v := make([]*int16, len(s)) - copy(v, s) - return (*nullableInt16Vector)(&v) -} - -func (v *nullableInt16Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*int16) -} - -func (v *nullableInt16Vector) SetConcrete(idx int, i interface{}) { - val := i.(int16) - (*v)[idx] = &val -} - -func (v *nullableInt16Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*int16)) -} - -func (v *nullableInt16Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableInt16Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableInt16Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *int16 - return g - } - var g int16 - g = *(*v)[i] - return &g -} - -func (v *nullableInt16Vector) ConcreteAt(i int) (interface{}, bool) { - var g int16 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableInt16Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableInt16Vector) Len() int { - return len(*v) -} - -func (v *nullableInt16Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableInt16Vector) Extend(i int) { - *v = append(*v, make([]*int16, i)...) -} - -func (v *nullableInt16Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableInt16Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableInt32Vector []*int32 - -func newNullableInt32Vector(n int) *nullableInt32Vector { - v := nullableInt32Vector(make([]*int32, n)) - return &v -} - -func newNullableInt32VectorWithValues(s []*int32) *nullableInt32Vector { - v := make([]*int32, len(s)) - copy(v, s) - return (*nullableInt32Vector)(&v) -} - -func (v *nullableInt32Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*int32) -} - -func (v *nullableInt32Vector) SetConcrete(idx int, i interface{}) { - val := i.(int32) - (*v)[idx] = &val -} - -func (v *nullableInt32Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*int32)) -} - -func (v *nullableInt32Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableInt32Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableInt32Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *int32 - return g - } - var g int32 - g = *(*v)[i] - return &g -} - -func (v *nullableInt32Vector) ConcreteAt(i int) (interface{}, bool) { - var g int32 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableInt32Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableInt32Vector) Len() int { - return len(*v) -} - -func (v *nullableInt32Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableInt32Vector) Extend(i int) { - *v = append(*v, make([]*int32, i)...) -} - -func (v *nullableInt32Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableInt32Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableInt64Vector []*int64 - -func newNullableInt64Vector(n int) *nullableInt64Vector { - v := nullableInt64Vector(make([]*int64, n)) - return &v -} - -func newNullableInt64VectorWithValues(s []*int64) *nullableInt64Vector { - v := make([]*int64, len(s)) - copy(v, s) - return (*nullableInt64Vector)(&v) -} - -func (v *nullableInt64Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*int64) -} - -func (v *nullableInt64Vector) SetConcrete(idx int, i interface{}) { - val := i.(int64) - (*v)[idx] = &val -} - -func (v *nullableInt64Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*int64)) -} - -func (v *nullableInt64Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableInt64Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableInt64Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *int64 - return g - } - var g int64 - g = *(*v)[i] - return &g -} - -func (v *nullableInt64Vector) ConcreteAt(i int) (interface{}, bool) { - var g int64 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableInt64Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableInt64Vector) Len() int { - return len(*v) -} - -func (v *nullableInt64Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableInt64Vector) Extend(i int) { - *v = append(*v, make([]*int64, i)...) -} - -func (v *nullableInt64Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableInt64Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableFloat32Vector []*float32 - -func newNullableFloat32Vector(n int) *nullableFloat32Vector { - v := nullableFloat32Vector(make([]*float32, n)) - return &v -} - -func newNullableFloat32VectorWithValues(s []*float32) *nullableFloat32Vector { - v := make([]*float32, len(s)) - copy(v, s) - return (*nullableFloat32Vector)(&v) -} - -func (v *nullableFloat32Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*float32) -} - -func (v *nullableFloat32Vector) SetConcrete(idx int, i interface{}) { - val := i.(float32) - (*v)[idx] = &val -} - -func (v *nullableFloat32Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*float32)) -} - -func (v *nullableFloat32Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableFloat32Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableFloat32Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *float32 - return g - } - var g float32 - g = *(*v)[i] - return &g -} - -func (v *nullableFloat32Vector) ConcreteAt(i int) (interface{}, bool) { - var g float32 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableFloat32Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableFloat32Vector) Len() int { - return len(*v) -} - -func (v *nullableFloat32Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableFloat32Vector) Extend(i int) { - *v = append(*v, make([]*float32, i)...) -} - -func (v *nullableFloat32Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableFloat32Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableFloat64Vector []*float64 - -func newNullableFloat64Vector(n int) *nullableFloat64Vector { - v := nullableFloat64Vector(make([]*float64, n)) - return &v -} - -func newNullableFloat64VectorWithValues(s []*float64) *nullableFloat64Vector { - v := make([]*float64, len(s)) - copy(v, s) - return (*nullableFloat64Vector)(&v) -} - -func (v *nullableFloat64Vector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*float64) -} - -func (v *nullableFloat64Vector) SetConcrete(idx int, i interface{}) { - val := i.(float64) - (*v)[idx] = &val -} - -func (v *nullableFloat64Vector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*float64)) -} - -func (v *nullableFloat64Vector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableFloat64Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableFloat64Vector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *float64 - return g - } - var g float64 - g = *(*v)[i] - return &g -} - -func (v *nullableFloat64Vector) ConcreteAt(i int) (interface{}, bool) { - var g float64 - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableFloat64Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableFloat64Vector) Len() int { - return len(*v) -} - -func (v *nullableFloat64Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableFloat64Vector) Extend(i int) { - *v = append(*v, make([]*float64, i)...) -} - -func (v *nullableFloat64Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableFloat64Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableStringVector []*string - -func newNullableStringVector(n int) *nullableStringVector { - v := nullableStringVector(make([]*string, n)) - return &v -} - -func newNullableStringVectorWithValues(s []*string) *nullableStringVector { - v := make([]*string, len(s)) - copy(v, s) - return (*nullableStringVector)(&v) -} - -func (v *nullableStringVector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*string) -} - -func (v *nullableStringVector) SetConcrete(idx int, i interface{}) { - val := i.(string) - (*v)[idx] = &val -} - -func (v *nullableStringVector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*string)) -} - -func (v *nullableStringVector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableStringVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableStringVector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *string - return g - } - var g string - g = *(*v)[i] - return &g -} - -func (v *nullableStringVector) ConcreteAt(i int) (interface{}, bool) { - var g string - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableStringVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableStringVector) Len() int { - return len(*v) -} - -func (v *nullableStringVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableStringVector) Extend(i int) { - *v = append(*v, make([]*string, i)...) -} - -func (v *nullableStringVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableStringVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableBoolVector []*bool - -func newNullableBoolVector(n int) *nullableBoolVector { - v := nullableBoolVector(make([]*bool, n)) - return &v -} - -func newNullableBoolVectorWithValues(s []*bool) *nullableBoolVector { - v := make([]*bool, len(s)) - copy(v, s) - return (*nullableBoolVector)(&v) -} - -func (v *nullableBoolVector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*bool) -} - -func (v *nullableBoolVector) SetConcrete(idx int, i interface{}) { - val := i.(bool) - (*v)[idx] = &val -} - -func (v *nullableBoolVector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*bool)) -} - -func (v *nullableBoolVector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableBoolVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableBoolVector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *bool - return g - } - var g bool - g = *(*v)[i] - return &g -} - -func (v *nullableBoolVector) ConcreteAt(i int) (interface{}, bool) { - var g bool - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableBoolVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableBoolVector) Len() int { - return len(*v) -} - -func (v *nullableBoolVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableBoolVector) Extend(i int) { - *v = append(*v, make([]*bool, i)...) -} - -func (v *nullableBoolVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableBoolVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableTimeTimeVector []*time.Time - -func newNullableTimeTimeVector(n int) *nullableTimeTimeVector { - v := nullableTimeTimeVector(make([]*time.Time, n)) - return &v -} - -func newNullableTimeTimeVectorWithValues(s []*time.Time) *nullableTimeTimeVector { - v := make([]*time.Time, len(s)) - copy(v, s) - return (*nullableTimeTimeVector)(&v) -} - -func (v *nullableTimeTimeVector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*time.Time) -} - -func (v *nullableTimeTimeVector) SetConcrete(idx int, i interface{}) { - val := i.(time.Time) - (*v)[idx] = &val -} - -func (v *nullableTimeTimeVector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*time.Time)) -} - -func (v *nullableTimeTimeVector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableTimeTimeVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableTimeTimeVector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *time.Time - return g - } - var g time.Time - g = *(*v)[i] - return &g -} - -func (v *nullableTimeTimeVector) ConcreteAt(i int) (interface{}, bool) { - var g time.Time - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableTimeTimeVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableTimeTimeVector) Len() int { - return len(*v) -} - -func (v *nullableTimeTimeVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableTimeTimeVector) Extend(i int) { - *v = append(*v, make([]*time.Time, i)...) -} - -func (v *nullableTimeTimeVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableTimeTimeVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type nullableJsonRawMessageVector []*json.RawMessage - -func newNullableJsonRawMessageVector(n int) *nullableJsonRawMessageVector { - v := nullableJsonRawMessageVector(make([]*json.RawMessage, n)) - return &v -} - -func newNullableJsonRawMessageVectorWithValues(s []*json.RawMessage) *nullableJsonRawMessageVector { - v := make([]*json.RawMessage, len(s)) - copy(v, s) - return (*nullableJsonRawMessageVector)(&v) -} - -func (v *nullableJsonRawMessageVector) Set(idx int, i interface{}) { - if i == nil { - (*v)[idx] = nil - return - } - (*v)[idx] = i.(*json.RawMessage) -} - -func (v *nullableJsonRawMessageVector) SetConcrete(idx int, i interface{}) { - val := i.(json.RawMessage) - (*v)[idx] = &val -} - -func (v *nullableJsonRawMessageVector) Append(i interface{}) { - if i == nil { - *v = append(*v, nil) - return - } - *v = append(*v, i.(*json.RawMessage)) -} - -func (v *nullableJsonRawMessageVector) NilAt(i int) bool { - return (*v)[i] == nil -} - -func (v *nullableJsonRawMessageVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *nullableJsonRawMessageVector) CopyAt(i int) interface{} { - if (*v)[i] == nil { - var g *json.RawMessage - return g - } - var g json.RawMessage - g = *(*v)[i] - return &g -} - -func (v *nullableJsonRawMessageVector) ConcreteAt(i int) (interface{}, bool) { - var g json.RawMessage - val := (*v)[i] - if val == nil { - return g, false - } - g = *val - return g, true -} - -func (v *nullableJsonRawMessageVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *nullableJsonRawMessageVector) Len() int { - return len(*v) -} - -func (v *nullableJsonRawMessageVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *nullableJsonRawMessageVector) Extend(i int) { - *v = append(*v, make([]*json.RawMessage, i)...) -} - -func (v *nullableJsonRawMessageVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *nullableJsonRawMessageVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} diff --git a/data/vector.gen.go b/data/vector.gen.go deleted file mode 100644 index 29f7729c9..000000000 --- a/data/vector.gen.go +++ /dev/null @@ -1,1074 +0,0 @@ -// This file was automatically generated by genny. -// Any changes will be lost if this file is regenerated. -// see https://github.com/cheekybits/genny - -package data - -import ( - "encoding/json" - "time" -) - -type uint8Vector []uint8 - -func newUint8Vector(n int) *uint8Vector { - v := uint8Vector(make([]uint8, n)) - return &v -} - -func newUint8VectorWithValues(s []uint8) *uint8Vector { - v := make([]uint8, len(s)) - copy(v, s) - return (*uint8Vector)(&v) -} - -func (v *uint8Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(uint8) -} - -func (v *uint8Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *uint8Vector) Append(i interface{}) { - *v = append(*v, i.(uint8)) -} - -func (v *uint8Vector) NilAt(i int) bool { - return false -} - -func (v *uint8Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *uint8Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *uint8Vector) Len() int { - return len(*v) -} - -func (v *uint8Vector) CopyAt(i int) interface{} { - var g uint8 - g = (*v)[i] - return g -} - -func (v *uint8Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *uint8Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *uint8Vector) Extend(i int) { - *v = append(*v, make([]uint8, i)...) -} - -func (v *uint8Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *uint8Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type uint16Vector []uint16 - -func newUint16Vector(n int) *uint16Vector { - v := uint16Vector(make([]uint16, n)) - return &v -} - -func newUint16VectorWithValues(s []uint16) *uint16Vector { - v := make([]uint16, len(s)) - copy(v, s) - return (*uint16Vector)(&v) -} - -func (v *uint16Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(uint16) -} - -func (v *uint16Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *uint16Vector) Append(i interface{}) { - *v = append(*v, i.(uint16)) -} - -func (v *uint16Vector) NilAt(i int) bool { - return false -} - -func (v *uint16Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *uint16Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *uint16Vector) Len() int { - return len(*v) -} - -func (v *uint16Vector) CopyAt(i int) interface{} { - var g uint16 - g = (*v)[i] - return g -} - -func (v *uint16Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *uint16Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *uint16Vector) Extend(i int) { - *v = append(*v, make([]uint16, i)...) -} - -func (v *uint16Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *uint16Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type uint32Vector []uint32 - -func newUint32Vector(n int) *uint32Vector { - v := uint32Vector(make([]uint32, n)) - return &v -} - -func newUint32VectorWithValues(s []uint32) *uint32Vector { - v := make([]uint32, len(s)) - copy(v, s) - return (*uint32Vector)(&v) -} - -func (v *uint32Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(uint32) -} - -func (v *uint32Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *uint32Vector) Append(i interface{}) { - *v = append(*v, i.(uint32)) -} - -func (v *uint32Vector) NilAt(i int) bool { - return false -} - -func (v *uint32Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *uint32Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *uint32Vector) Len() int { - return len(*v) -} - -func (v *uint32Vector) CopyAt(i int) interface{} { - var g uint32 - g = (*v)[i] - return g -} - -func (v *uint32Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *uint32Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *uint32Vector) Extend(i int) { - *v = append(*v, make([]uint32, i)...) -} - -func (v *uint32Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *uint32Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type uint64Vector []uint64 - -func newUint64Vector(n int) *uint64Vector { - v := uint64Vector(make([]uint64, n)) - return &v -} - -func newUint64VectorWithValues(s []uint64) *uint64Vector { - v := make([]uint64, len(s)) - copy(v, s) - return (*uint64Vector)(&v) -} - -func (v *uint64Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(uint64) -} - -func (v *uint64Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *uint64Vector) Append(i interface{}) { - *v = append(*v, i.(uint64)) -} - -func (v *uint64Vector) NilAt(i int) bool { - return false -} - -func (v *uint64Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *uint64Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *uint64Vector) Len() int { - return len(*v) -} - -func (v *uint64Vector) CopyAt(i int) interface{} { - var g uint64 - g = (*v)[i] - return g -} - -func (v *uint64Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *uint64Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *uint64Vector) Extend(i int) { - *v = append(*v, make([]uint64, i)...) -} - -func (v *uint64Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *uint64Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type int8Vector []int8 - -func newInt8Vector(n int) *int8Vector { - v := int8Vector(make([]int8, n)) - return &v -} - -func newInt8VectorWithValues(s []int8) *int8Vector { - v := make([]int8, len(s)) - copy(v, s) - return (*int8Vector)(&v) -} - -func (v *int8Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(int8) -} - -func (v *int8Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *int8Vector) Append(i interface{}) { - *v = append(*v, i.(int8)) -} - -func (v *int8Vector) NilAt(i int) bool { - return false -} - -func (v *int8Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *int8Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *int8Vector) Len() int { - return len(*v) -} - -func (v *int8Vector) CopyAt(i int) interface{} { - var g int8 - g = (*v)[i] - return g -} - -func (v *int8Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *int8Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *int8Vector) Extend(i int) { - *v = append(*v, make([]int8, i)...) -} - -func (v *int8Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *int8Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type int16Vector []int16 - -func newInt16Vector(n int) *int16Vector { - v := int16Vector(make([]int16, n)) - return &v -} - -func newInt16VectorWithValues(s []int16) *int16Vector { - v := make([]int16, len(s)) - copy(v, s) - return (*int16Vector)(&v) -} - -func (v *int16Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(int16) -} - -func (v *int16Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *int16Vector) Append(i interface{}) { - *v = append(*v, i.(int16)) -} - -func (v *int16Vector) NilAt(i int) bool { - return false -} - -func (v *int16Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *int16Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *int16Vector) Len() int { - return len(*v) -} - -func (v *int16Vector) CopyAt(i int) interface{} { - var g int16 - g = (*v)[i] - return g -} - -func (v *int16Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *int16Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *int16Vector) Extend(i int) { - *v = append(*v, make([]int16, i)...) -} - -func (v *int16Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *int16Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type int32Vector []int32 - -func newInt32Vector(n int) *int32Vector { - v := int32Vector(make([]int32, n)) - return &v -} - -func newInt32VectorWithValues(s []int32) *int32Vector { - v := make([]int32, len(s)) - copy(v, s) - return (*int32Vector)(&v) -} - -func (v *int32Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(int32) -} - -func (v *int32Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *int32Vector) Append(i interface{}) { - *v = append(*v, i.(int32)) -} - -func (v *int32Vector) NilAt(i int) bool { - return false -} - -func (v *int32Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *int32Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *int32Vector) Len() int { - return len(*v) -} - -func (v *int32Vector) CopyAt(i int) interface{} { - var g int32 - g = (*v)[i] - return g -} - -func (v *int32Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *int32Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *int32Vector) Extend(i int) { - *v = append(*v, make([]int32, i)...) -} - -func (v *int32Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *int32Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type int64Vector []int64 - -func newInt64Vector(n int) *int64Vector { - v := int64Vector(make([]int64, n)) - return &v -} - -func newInt64VectorWithValues(s []int64) *int64Vector { - v := make([]int64, len(s)) - copy(v, s) - return (*int64Vector)(&v) -} - -func (v *int64Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(int64) -} - -func (v *int64Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *int64Vector) Append(i interface{}) { - *v = append(*v, i.(int64)) -} - -func (v *int64Vector) NilAt(i int) bool { - return false -} - -func (v *int64Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *int64Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *int64Vector) Len() int { - return len(*v) -} - -func (v *int64Vector) CopyAt(i int) interface{} { - var g int64 - g = (*v)[i] - return g -} - -func (v *int64Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *int64Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *int64Vector) Extend(i int) { - *v = append(*v, make([]int64, i)...) -} - -func (v *int64Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *int64Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type float32Vector []float32 - -func newFloat32Vector(n int) *float32Vector { - v := float32Vector(make([]float32, n)) - return &v -} - -func newFloat32VectorWithValues(s []float32) *float32Vector { - v := make([]float32, len(s)) - copy(v, s) - return (*float32Vector)(&v) -} - -func (v *float32Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(float32) -} - -func (v *float32Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *float32Vector) Append(i interface{}) { - *v = append(*v, i.(float32)) -} - -func (v *float32Vector) NilAt(i int) bool { - return false -} - -func (v *float32Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *float32Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *float32Vector) Len() int { - return len(*v) -} - -func (v *float32Vector) CopyAt(i int) interface{} { - var g float32 - g = (*v)[i] - return g -} - -func (v *float32Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *float32Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *float32Vector) Extend(i int) { - *v = append(*v, make([]float32, i)...) -} - -func (v *float32Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *float32Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type float64Vector []float64 - -func newFloat64Vector(n int) *float64Vector { - v := float64Vector(make([]float64, n)) - return &v -} - -func newFloat64VectorWithValues(s []float64) *float64Vector { - v := make([]float64, len(s)) - copy(v, s) - return (*float64Vector)(&v) -} - -func (v *float64Vector) Set(idx int, i interface{}) { - (*v)[idx] = i.(float64) -} - -func (v *float64Vector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *float64Vector) Append(i interface{}) { - *v = append(*v, i.(float64)) -} - -func (v *float64Vector) NilAt(i int) bool { - return false -} - -func (v *float64Vector) At(i int) interface{} { - return (*v)[i] -} - -func (v *float64Vector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *float64Vector) Len() int { - return len(*v) -} - -func (v *float64Vector) CopyAt(i int) interface{} { - var g float64 - g = (*v)[i] - return g -} - -func (v *float64Vector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *float64Vector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *float64Vector) Extend(i int) { - *v = append(*v, make([]float64, i)...) -} - -func (v *float64Vector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *float64Vector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type stringVector []string - -func newStringVector(n int) *stringVector { - v := stringVector(make([]string, n)) - return &v -} - -func newStringVectorWithValues(s []string) *stringVector { - v := make([]string, len(s)) - copy(v, s) - return (*stringVector)(&v) -} - -func (v *stringVector) Set(idx int, i interface{}) { - (*v)[idx] = i.(string) -} - -func (v *stringVector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *stringVector) Append(i interface{}) { - *v = append(*v, i.(string)) -} - -func (v *stringVector) NilAt(i int) bool { - return false -} - -func (v *stringVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *stringVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *stringVector) Len() int { - return len(*v) -} - -func (v *stringVector) CopyAt(i int) interface{} { - var g string - g = (*v)[i] - return g -} - -func (v *stringVector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *stringVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *stringVector) Extend(i int) { - *v = append(*v, make([]string, i)...) -} - -func (v *stringVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *stringVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type boolVector []bool - -func newBoolVector(n int) *boolVector { - v := boolVector(make([]bool, n)) - return &v -} - -func newBoolVectorWithValues(s []bool) *boolVector { - v := make([]bool, len(s)) - copy(v, s) - return (*boolVector)(&v) -} - -func (v *boolVector) Set(idx int, i interface{}) { - (*v)[idx] = i.(bool) -} - -func (v *boolVector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *boolVector) Append(i interface{}) { - *v = append(*v, i.(bool)) -} - -func (v *boolVector) NilAt(i int) bool { - return false -} - -func (v *boolVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *boolVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *boolVector) Len() int { - return len(*v) -} - -func (v *boolVector) CopyAt(i int) interface{} { - var g bool - g = (*v)[i] - return g -} - -func (v *boolVector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *boolVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *boolVector) Extend(i int) { - *v = append(*v, make([]bool, i)...) -} - -func (v *boolVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *boolVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type timeTimeVector []time.Time - -func newTimeTimeVector(n int) *timeTimeVector { - v := timeTimeVector(make([]time.Time, n)) - return &v -} - -func newTimeTimeVectorWithValues(s []time.Time) *timeTimeVector { - v := make([]time.Time, len(s)) - copy(v, s) - return (*timeTimeVector)(&v) -} - -func (v *timeTimeVector) Set(idx int, i interface{}) { - (*v)[idx] = i.(time.Time) -} - -func (v *timeTimeVector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *timeTimeVector) Append(i interface{}) { - *v = append(*v, i.(time.Time)) -} - -func (v *timeTimeVector) NilAt(i int) bool { - return false -} - -func (v *timeTimeVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *timeTimeVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *timeTimeVector) Len() int { - return len(*v) -} - -func (v *timeTimeVector) CopyAt(i int) interface{} { - var g time.Time - g = (*v)[i] - return g -} - -func (v *timeTimeVector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *timeTimeVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *timeTimeVector) Extend(i int) { - *v = append(*v, make([]time.Time, i)...) -} - -func (v *timeTimeVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *timeTimeVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} - -type jsonRawMessageVector []json.RawMessage - -func newJsonRawMessageVector(n int) *jsonRawMessageVector { - v := jsonRawMessageVector(make([]json.RawMessage, n)) - return &v -} - -func newJsonRawMessageVectorWithValues(s []json.RawMessage) *jsonRawMessageVector { - v := make([]json.RawMessage, len(s)) - copy(v, s) - return (*jsonRawMessageVector)(&v) -} - -func (v *jsonRawMessageVector) Set(idx int, i interface{}) { - (*v)[idx] = i.(json.RawMessage) -} - -func (v *jsonRawMessageVector) SetConcrete(idx int, i interface{}) { - v.Set(idx, i) -} - -func (v *jsonRawMessageVector) Append(i interface{}) { - *v = append(*v, i.(json.RawMessage)) -} - -func (v *jsonRawMessageVector) NilAt(i int) bool { - return false -} - -func (v *jsonRawMessageVector) At(i int) interface{} { - return (*v)[i] -} - -func (v *jsonRawMessageVector) PointerAt(i int) interface{} { - return &(*v)[i] -} - -func (v *jsonRawMessageVector) Len() int { - return len(*v) -} - -func (v *jsonRawMessageVector) CopyAt(i int) interface{} { - var g json.RawMessage - g = (*v)[i] - return g -} - -func (v *jsonRawMessageVector) ConcreteAt(i int) (interface{}, bool) { - return v.At(i), true -} - -func (v *jsonRawMessageVector) Type() FieldType { - return vectorFieldType(v) -} - -func (v *jsonRawMessageVector) Extend(i int) { - *v = append(*v, make([]json.RawMessage, i)...) -} - -func (v *jsonRawMessageVector) Insert(i int, val interface{}) { - switch { - case i < v.Len(): - v.Extend(1) - copy((*v)[i+1:], (*v)[i:]) - v.Set(i, val) - case i == v.Len(): - v.Append(val) - case i > v.Len(): - panic("Invalid index; vector length should be greater or equal to that index") - } -} - -func (v *jsonRawMessageVector) Delete(i int) { - *v = append((*v)[:i], (*v)[i+1:]...) -} diff --git a/data/vector.go b/data/vector.go index 0d712b129..6a41521f2 100644 --- a/data/vector.go +++ b/data/vector.go @@ -1,7 +1,9 @@ package data import ( + "encoding/json" "fmt" + "time" ) // vector represents a Field's collection of Elements. @@ -24,78 +26,72 @@ type vector interface { // nolint:gocyclo func vectorFieldType(v vector) FieldType { switch v.(type) { - case *int8Vector: + // time.Time + case *genericVector[time.Time]: + return FieldTypeTime + case *nullableGenericVector[time.Time]: + return FieldTypeNullableTime + + // json.RawMessage + case *genericVector[json.RawMessage]: + return FieldTypeJSON + case *nullableGenericVector[json.RawMessage]: + return FieldTypeNullableJSON + + // EnumItemIndex + case *genericVector[EnumItemIndex]: + return FieldTypeEnum + case *nullableGenericVector[EnumItemIndex]: + return FieldTypeNullableEnum + + case *genericVector[int8]: return FieldTypeInt8 - case *nullableInt8Vector: + case *nullableGenericVector[int8]: return FieldTypeNullableInt8 - - case *int16Vector: + case *genericVector[int16]: return FieldTypeInt16 - case *nullableInt16Vector: + case *nullableGenericVector[int16]: return FieldTypeNullableInt16 - - case *int32Vector: + case *genericVector[int32]: return FieldTypeInt32 - case *nullableInt32Vector: + case *nullableGenericVector[int32]: return FieldTypeNullableInt32 - - case *int64Vector: + case *genericVector[int64]: return FieldTypeInt64 - case *nullableInt64Vector: + case *nullableGenericVector[int64]: return FieldTypeNullableInt64 - - case *uint8Vector: + case *genericVector[uint8]: return FieldTypeUint8 - case *nullableUint8Vector: + case *nullableGenericVector[uint8]: return FieldTypeNullableUint8 - - case *uint16Vector: + case *genericVector[uint16]: return FieldTypeUint16 - case *nullableUint16Vector: + case *nullableGenericVector[uint16]: return FieldTypeNullableUint16 - - case *uint32Vector: + case *genericVector[uint32]: return FieldTypeUint32 - case *nullableUint32Vector: + case *nullableGenericVector[uint32]: return FieldTypeNullableUint32 - - case *uint64Vector: + case *genericVector[uint64]: return FieldTypeUint64 - case *nullableUint64Vector: + case *nullableGenericVector[uint64]: return FieldTypeNullableUint64 - - case *float32Vector: + case *genericVector[float32]: return FieldTypeFloat32 - case *nullableFloat32Vector: + case *nullableGenericVector[float32]: return FieldTypeNullableFloat32 - - case *float64Vector: + case *genericVector[float64]: return FieldTypeFloat64 - case *nullableFloat64Vector: + case *nullableGenericVector[float64]: return FieldTypeNullableFloat64 - - case *stringVector: + case *genericVector[string]: return FieldTypeString - case *nullableStringVector: + case *nullableGenericVector[string]: return FieldTypeNullableString - - case *boolVector: + case *genericVector[bool]: return FieldTypeBool - case *nullableBoolVector: + case *nullableGenericVector[bool]: return FieldTypeNullableBool - - case *timeTimeVector: - return FieldTypeTime - case *nullableTimeTimeVector: - return FieldTypeNullableTime - case *jsonRawMessageVector: - return FieldTypeJSON - case *nullableJsonRawMessageVector: - return FieldTypeNullableJSON - case *enumVector: - return FieldTypeEnum - case *nullableEnumVector: - return FieldTypeNullableEnum } return FieldTypeUnknown @@ -113,84 +109,85 @@ func (p FieldType) String() string { func NewFieldFromFieldType(p FieldType, n int) *Field { f := &Field{} switch p { - // ints + // ints (use generic vectors for performance) case FieldTypeInt8: - f.vector = newInt8Vector(n) + f.vector = newGenericVector[int8](n) case FieldTypeNullableInt8: - f.vector = newNullableInt8Vector(n) + f.vector = newNullableGenericVector[int8](n) case FieldTypeInt16: - f.vector = newInt16Vector(n) + f.vector = newGenericVector[int16](n) case FieldTypeNullableInt16: - f.vector = newNullableInt16Vector(n) + f.vector = newNullableGenericVector[int16](n) case FieldTypeInt32: - f.vector = newInt32Vector(n) + f.vector = newGenericVector[int32](n) case FieldTypeNullableInt32: - f.vector = newNullableInt32Vector(n) + f.vector = newNullableGenericVector[int32](n) case FieldTypeInt64: - f.vector = newInt64Vector(n) + f.vector = newGenericVector[int64](n) case FieldTypeNullableInt64: - f.vector = newNullableInt64Vector(n) + f.vector = newNullableGenericVector[int64](n) - // uints + // uints (use generic vectors for performance) case FieldTypeUint8: - f.vector = newUint8Vector(n) + f.vector = newGenericVector[uint8](n) case FieldTypeNullableUint8: - f.vector = newNullableUint8Vector(n) + f.vector = newNullableGenericVector[uint8](n) case FieldTypeUint16: - f.vector = newUint16Vector(n) + f.vector = newGenericVector[uint16](n) case FieldTypeNullableUint16: - f.vector = newNullableUint16Vector(n) + f.vector = newNullableGenericVector[uint16](n) case FieldTypeUint32: - f.vector = newUint32Vector(n) + f.vector = newGenericVector[uint32](n) case FieldTypeNullableUint32: - f.vector = newNullableUint32Vector(n) + f.vector = newNullableGenericVector[uint32](n) case FieldTypeUint64: - f.vector = newUint64Vector(n) + f.vector = newGenericVector[uint64](n) case FieldTypeNullableUint64: - f.vector = newNullableUint64Vector(n) + f.vector = newNullableGenericVector[uint64](n) - // floats + // floats (use generic vectors for performance) case FieldTypeFloat32: - f.vector = newFloat32Vector(n) + f.vector = newGenericVector[float32](n) case FieldTypeNullableFloat32: - f.vector = newNullableFloat32Vector(n) + f.vector = newNullableGenericVector[float32](n) case FieldTypeFloat64: - f.vector = newFloat64Vector(n) + f.vector = newGenericVector[float64](n) case FieldTypeNullableFloat64: - f.vector = newNullableFloat64Vector(n) + f.vector = newNullableGenericVector[float64](n) - // other + // other basic types (use generic vectors for performance) case FieldTypeString: - f.vector = newStringVector(n) + f.vector = newGenericVector[string](n) case FieldTypeNullableString: - f.vector = newNullableStringVector(n) + f.vector = newNullableGenericVector[string](n) case FieldTypeBool: - f.vector = newBoolVector(n) + f.vector = newGenericVector[bool](n) case FieldTypeNullableBool: - f.vector = newNullableBoolVector(n) + f.vector = newNullableGenericVector[bool](n) + // complex types (now using generic vectors) case FieldTypeTime: - f.vector = newTimeTimeVector(n) + f.vector = newGenericVector[time.Time](n) case FieldTypeNullableTime: - f.vector = newNullableTimeTimeVector(n) + f.vector = newNullableGenericVector[time.Time](n) case FieldTypeJSON: - f.vector = newJsonRawMessageVector(n) + f.vector = newGenericVector[json.RawMessage](n) case FieldTypeNullableJSON: - f.vector = newNullableJsonRawMessageVector(n) + f.vector = newNullableGenericVector[json.RawMessage](n) case FieldTypeEnum: - f.vector = newEnumVector(n) + f.vector = newGenericVector[EnumItemIndex](n) case FieldTypeNullableEnum: - f.vector = newNullableEnumVector(n) + f.vector = newNullableGenericVector[EnumItemIndex](n) default: panic("unsupported FieldType") } diff --git a/data/vector_generic.go b/data/vector_generic.go new file mode 100644 index 000000000..b5ed19a00 --- /dev/null +++ b/data/vector_generic.go @@ -0,0 +1,165 @@ +package data + +var _ vector = (*genericVector[int8])(nil) + +// genericVector is a type-safe vector implementation using Go generics. +// It eliminates interface{} boxing overhead for better performance. +type genericVector[T any] struct { + data []T +} + +// newGenericVector creates a new generic vector with the specified size. +func newGenericVector[T any](n int) *genericVector[T] { + return &genericVector[T]{ + data: make([]T, n), + } +} + +// newGenericVectorWithCapacity creates a new generic vector with length 0 but pre-allocated capacity. +// This is useful for avoiding reallocations when the final size is known in advance. +func newGenericVectorWithCapacity[T any](capacity int) *genericVector[T] { + return &genericVector[T]{ + data: make([]T, 0, capacity), + } +} + +// newGenericVectorWithValues creates a new generic vector from an existing slice. +// It copies the data to prevent external modifications. +func newGenericVectorWithValues[T any](values []T) *genericVector[T] { + data := make([]T, len(values)) + copy(data, values) + return &genericVector[T]{data: data} +} + +// AtTyped returns the value at index i without boxing. +// This is the zero-allocation accessor method. +func (v *genericVector[T]) AtTyped(i int) T { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + return v.data[i] +} + +// SetTyped sets the value at index i without boxing. +// This is the zero-allocation setter method. +func (v *genericVector[T]) SetTyped(i int, val T) { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + v.data[i] = val +} + +// AppendTyped adds a value to the end without boxing. +func (v *genericVector[T]) AppendTyped(val T) { + v.data = append(v.data, val) +} + +// AppendManyTyped appends multiple values at once from a slice. +// This is more efficient than calling AppendTyped repeatedly as it +// reduces slice growth operations and bounds checking overhead. +func (v *genericVector[T]) AppendManyTyped(vals []T) { + v.data = append(v.data, vals...) +} + +// Len returns the length of the vector. +func (v *genericVector[T]) Len() int { + return len(v.data) +} + +// Extend extends the vector by n elements with zero values. +func (v *genericVector[T]) Extend(n int) { + v.data = append(v.data, make([]T, n)...) +} + +// InsertTyped inserts a value at index i. +func (v *genericVector[T]) InsertTyped(i int, val T) { + switch { + case i < v.Len(): + v.Extend(1) + copy(v.data[i+1:], v.data[i:]) + v.SetTyped(i, val) + case i == v.Len(): + v.AppendTyped(val) + default: + panic("Invalid index; vector length should be greater or equal to that index") + } +} + +// DeleteTyped removes the element at index i. +func (v *genericVector[T]) DeleteTyped(i int) { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + v.data = append(v.data[:i], v.data[i+1:]...) +} + +// CopyAtTyped returns a copy of the value at index i. +// For value types, this is the same as AtTyped. +func (v *genericVector[T]) CopyAtTyped(i int) T { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + return v.data[i] +} + +// Slice returns the underlying slice (read-only access recommended). +func (v *genericVector[T]) Slice() []T { + return v.data +} + +// Set sets the value at index idx to i (requires type assertion). +func (v *genericVector[T]) Set(idx int, i interface{}) { + v.data[idx] = i.(T) +} + +// SetConcrete sets the value at index idx (same as Set for non-nullable). +func (v *genericVector[T]) SetConcrete(idx int, i interface{}) { + v.Set(idx, i) +} + +// Append adds a value to the end (requires type assertion). +func (v *genericVector[T]) Append(i interface{}) { + v.data = append(v.data, i.(T)) +} + +// At returns the value at index i as interface{}. +// Note: This boxes the value and causes allocation for value types. +func (v *genericVector[T]) At(i int) interface{} { + return v.data[i] +} + +// NilAt returns false for non-nullable vectors. +func (v *genericVector[T]) NilAt(_ int) bool { + return false +} + +// PointerAt returns a pointer to the element at index i. +func (v *genericVector[T]) PointerAt(i int) interface{} { + return &v.data[i] +} + +// CopyAt returns a copy of the value as interface{}. +func (v *genericVector[T]) CopyAt(i int) interface{} { + val := v.data[i] + return val +} + +// ConcreteAt returns the value at index i as interface{}. +func (v *genericVector[T]) ConcreteAt(i int) (interface{}, bool) { + return v.data[i], true +} + +// Type returns the FieldType for this vector. +func (v *genericVector[T]) Type() FieldType { + return vectorFieldType(v) +} + +// Insert inserts a value at index i (requires type assertion). +func (v *genericVector[T]) Insert(i int, val interface{}) { + v.InsertTyped(i, val.(T)) +} + +// Delete removes the element at index i. +func (v *genericVector[T]) Delete(i int) { + v.DeleteTyped(i) +} diff --git a/data/vector_generic_nullable.go b/data/vector_generic_nullable.go new file mode 100644 index 000000000..3ded01d72 --- /dev/null +++ b/data/vector_generic_nullable.go @@ -0,0 +1,227 @@ +package data + +// nullableGenericVector is a nullable vector implementation using Go generics. +// It stores pointers to T, allowing nil values. +type nullableGenericVector[T any] struct { + data []*T +} + +// newNullableGenericVector creates a new nullable generic vector with the specified size. +func newNullableGenericVector[T any](n int) *nullableGenericVector[T] { + return &nullableGenericVector[T]{ + data: make([]*T, n), + } +} + +// newNullableGenericVectorWithCapacity creates a new nullable generic vector with length 0 but pre-allocated capacity. +// This is useful for avoiding reallocations when the final size is known in advance. +func newNullableGenericVectorWithCapacity[T any](capacity int) *nullableGenericVector[T] { + return &nullableGenericVector[T]{ + data: make([]*T, 0, capacity), + } +} + +// newNullableGenericVectorWithValues creates a new nullable generic vector from an existing slice. +func newNullableGenericVectorWithValues[T any](values []*T) *nullableGenericVector[T] { + data := make([]*T, len(values)) + copy(data, values) + return &nullableGenericVector[T]{data: data} +} + +// AtTyped returns the pointer at index i without boxing. +// Returns nil if the value is null. +func (v *nullableGenericVector[T]) AtTyped(i int) *T { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + return v.data[i] +} + +// SetTyped sets the pointer at index i without boxing. +func (v *nullableGenericVector[T]) SetTyped(i int, val *T) { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + v.data[i] = val +} + +// AppendTyped adds a pointer to the end without boxing. +func (v *nullableGenericVector[T]) AppendTyped(val *T) { + v.data = append(v.data, val) +} + +// AppendManyTyped appends multiple pointer values at once from a slice. +// This is more efficient than calling AppendTyped repeatedly. +func (v *nullableGenericVector[T]) AppendManyTyped(vals []*T) { + v.data = append(v.data, vals...) +} + +// AppendManyWithNulls appends values from a slice, creating pointers for non-null values. +// The isNull function should return true if the value at index i is null. +// This is optimized for batch operations from Arrow arrays. +func (v *nullableGenericVector[T]) AppendManyWithNulls(vals []T, isNull func(int) bool) { + startIdx := len(v.data) + // Pre-allocate space + v.data = append(v.data, make([]*T, len(vals))...) + + // Fill in the values + for i, val := range vals { + if !isNull(i) { + // Create a new variable to get a stable pointer + valCopy := val + v.data[startIdx+i] = &valCopy + } + // else: already nil from make() + } +} + +// ConcreteAtTyped returns the dereferenced value if not nil. +// The second return value indicates if the value was non-nil. +func (v *nullableGenericVector[T]) ConcreteAtTyped(i int) (T, bool) { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + if v.data[i] == nil { + var zero T + return zero, false + } + return *v.data[i], true +} + +// SetConcreteTyped sets the value by creating a pointer to val. +func (v *nullableGenericVector[T]) SetConcreteTyped(i int, val T) { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + v.data[i] = &val +} + +// Len returns the length of the vector. +func (v *nullableGenericVector[T]) Len() int { + return len(v.data) +} + +// Extend extends the vector by n elements with nil values. +func (v *nullableGenericVector[T]) Extend(n int) { + v.data = append(v.data, make([]*T, n)...) +} + +// InsertTyped inserts a value at index i. +func (v *nullableGenericVector[T]) InsertTyped(i int, val *T) { + switch { + case i < v.Len(): + v.Extend(1) + copy(v.data[i+1:], v.data[i:]) + v.SetTyped(i, val) + case i == v.Len(): + v.AppendTyped(val) + default: + panic("Invalid index; vector length should be greater or equal to that index") + } +} + +// DeleteTyped removes the element at index i. +func (v *nullableGenericVector[T]) DeleteTyped(i int) { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + v.data = append(v.data[:i], v.data[i+1:]...) +} + +// CopyAtTyped returns a copy of the pointer value at index i. +// If the value is nil, returns nil. Otherwise returns a new pointer to a copy. +func (v *nullableGenericVector[T]) CopyAtTyped(i int) *T { + if i < 0 || i >= v.Len() { + panic("Invalid index; vector length should be greater or equal to that index") + } + if v.data[i] == nil { + return nil + } + val := *v.data[i] + return &val +} + +// Slice returns the underlying slice (read-only access recommended). +func (v *nullableGenericVector[T]) Slice() []*T { + return v.data +} + +// --- Backward compatibility interface{} methods --- + +// Set sets the value at index idx. +func (v *nullableGenericVector[T]) Set(idx int, i interface{}) { + if i == nil { + v.data[idx] = nil + return + } + v.data[idx] = i.(*T) +} + +// SetConcrete sets the value by converting from concrete type. +func (v *nullableGenericVector[T]) SetConcrete(idx int, i interface{}) { + val := i.(T) + v.data[idx] = &val +} + +// Append adds a value to the end. +func (v *nullableGenericVector[T]) Append(i interface{}) { + if i == nil { + v.data = append(v.data, nil) + return + } + v.data = append(v.data, i.(*T)) +} + +// NilAt returns true if the value at index i is nil. +func (v *nullableGenericVector[T]) NilAt(i int) bool { + return v.data[i] == nil +} + +// At returns the pointer at index i as interface{}. +func (v *nullableGenericVector[T]) At(i int) interface{} { + return v.data[i] +} + +// CopyAt returns a copy of the value as interface{}. +func (v *nullableGenericVector[T]) CopyAt(i int) interface{} { + if v.data[i] == nil { + var g *T + return g + } + val := *v.data[i] + return &val +} + +// ConcreteAt returns the dereferenced value as interface{}. +func (v *nullableGenericVector[T]) ConcreteAt(i int) (interface{}, bool) { + var zero T + val := v.data[i] + if val == nil { + return zero, false + } + return *val, true +} + +// PointerAt returns a pointer to the pointer at index i. +func (v *nullableGenericVector[T]) PointerAt(i int) interface{} { + return &v.data[i] +} + +// Type returns the FieldType for this vector. +func (v *nullableGenericVector[T]) Type() FieldType { + return vectorFieldType(v) +} + +// Insert inserts a value at index i. +func (v *nullableGenericVector[T]) Insert(i int, val interface{}) { + if val == nil { + v.InsertTyped(i, nil) + return + } + v.InsertTyped(i, val.(*T)) +} + +// Delete removes the element at index i. +func (v *nullableGenericVector[T]) Delete(i int) { + v.DeleteTyped(i) +} diff --git a/data/vector_helpers.go b/data/vector_helpers.go new file mode 100644 index 000000000..f5d7635e2 --- /dev/null +++ b/data/vector_helpers.go @@ -0,0 +1,435 @@ +package data + +import ( + "encoding/json" + "fmt" + "time" +) + +// appendTypedToVector is an internal helper that appends a value to a vector +// using typed methods instead of interface{} boxing. This eliminates allocation +// overhead for internal operations. +// nolint:gocyclo +func appendTypedToVector(vec vector, val interface{}) { + if val == nil { + vec.Append(nil) + return + } + + switch v := vec.(type) { + // int types + case *genericVector[int8]: + v.AppendTyped(val.(int8)) + case *nullableGenericVector[int8]: + v.AppendTyped(val.(*int8)) + case *genericVector[int16]: + v.AppendTyped(val.(int16)) + case *nullableGenericVector[int16]: + v.AppendTyped(val.(*int16)) + case *genericVector[int32]: + v.AppendTyped(val.(int32)) + case *nullableGenericVector[int32]: + v.AppendTyped(val.(*int32)) + case *genericVector[int64]: + v.AppendTyped(val.(int64)) + case *nullableGenericVector[int64]: + v.AppendTyped(val.(*int64)) + + // uint types + case *genericVector[uint8]: + v.AppendTyped(val.(uint8)) + case *nullableGenericVector[uint8]: + v.AppendTyped(val.(*uint8)) + case *genericVector[uint16]: + v.AppendTyped(val.(uint16)) + case *nullableGenericVector[uint16]: + v.AppendTyped(val.(*uint16)) + case *genericVector[uint32]: + v.AppendTyped(val.(uint32)) + case *nullableGenericVector[uint32]: + v.AppendTyped(val.(*uint32)) + case *genericVector[uint64]: + v.AppendTyped(val.(uint64)) + case *nullableGenericVector[uint64]: + v.AppendTyped(val.(*uint64)) + + // float types + case *genericVector[float32]: + v.AppendTyped(val.(float32)) + case *nullableGenericVector[float32]: + v.AppendTyped(val.(*float32)) + case *genericVector[float64]: + v.AppendTyped(val.(float64)) + case *nullableGenericVector[float64]: + v.AppendTyped(val.(*float64)) + + // string, bool + case *genericVector[string]: + v.AppendTyped(val.(string)) + case *nullableGenericVector[string]: + v.AppendTyped(val.(*string)) + case *genericVector[bool]: + v.AppendTyped(val.(bool)) + case *nullableGenericVector[bool]: + v.AppendTyped(val.(*bool)) + + // time, json, enum + case *genericVector[time.Time]: + v.AppendTyped(val.(time.Time)) + case *nullableGenericVector[time.Time]: + v.AppendTyped(val.(*time.Time)) + case *genericVector[json.RawMessage]: + v.AppendTyped(val.(json.RawMessage)) + case *nullableGenericVector[json.RawMessage]: + v.AppendTyped(val.(*json.RawMessage)) + case *genericVector[EnumItemIndex]: + enumVal, ok := enumValueFromInterface(val) + if !ok { + enumVal = 0 + } + v.AppendTyped(enumVal) + case *nullableGenericVector[EnumItemIndex]: + enumPtr, ok := enumPointerFromInterface(val) + if !ok { + v.AppendTyped(nil) + return + } + v.AppendTyped(enumPtr) + + default: + panic(fmt.Sprintf("unsupported vector type: %T", vec)) + } +} + +// setTypedInVector is an internal helper that sets a value in a vector +// using typed methods instead of interface{} boxing. +// nolint:gocyclo +func setTypedInVector(vec vector, idx int, val interface{}) { + if val == nil { + vec.Set(idx, nil) + return + } + + switch v := vec.(type) { + // int types + case *genericVector[int8]: + v.SetTyped(idx, val.(int8)) + case *nullableGenericVector[int8]: + v.SetTyped(idx, val.(*int8)) + case *genericVector[int16]: + v.SetTyped(idx, val.(int16)) + case *nullableGenericVector[int16]: + v.SetTyped(idx, val.(*int16)) + case *genericVector[int32]: + v.SetTyped(idx, val.(int32)) + case *nullableGenericVector[int32]: + v.SetTyped(idx, val.(*int32)) + case *genericVector[int64]: + v.SetTyped(idx, val.(int64)) + case *nullableGenericVector[int64]: + v.SetTyped(idx, val.(*int64)) + + // uint types + case *genericVector[uint8]: + v.SetTyped(idx, val.(uint8)) + case *nullableGenericVector[uint8]: + v.SetTyped(idx, val.(*uint8)) + case *genericVector[uint16]: + v.SetTyped(idx, val.(uint16)) + case *nullableGenericVector[uint16]: + v.SetTyped(idx, val.(*uint16)) + case *genericVector[uint32]: + v.SetTyped(idx, val.(uint32)) + case *nullableGenericVector[uint32]: + v.SetTyped(idx, val.(*uint32)) + case *genericVector[uint64]: + v.SetTyped(idx, val.(uint64)) + case *nullableGenericVector[uint64]: + v.SetTyped(idx, val.(*uint64)) + + // float types + case *genericVector[float32]: + v.SetTyped(idx, val.(float32)) + case *nullableGenericVector[float32]: + v.SetTyped(idx, val.(*float32)) + case *genericVector[float64]: + v.SetTyped(idx, val.(float64)) + case *nullableGenericVector[float64]: + v.SetTyped(idx, val.(*float64)) + + // string, bool + case *genericVector[string]: + v.SetTyped(idx, val.(string)) + case *nullableGenericVector[string]: + v.SetTyped(idx, val.(*string)) + case *genericVector[bool]: + v.SetTyped(idx, val.(bool)) + case *nullableGenericVector[bool]: + v.SetTyped(idx, val.(*bool)) + + // time, json, enum + case *genericVector[time.Time]: + v.SetTyped(idx, val.(time.Time)) + case *nullableGenericVector[time.Time]: + v.SetTyped(idx, val.(*time.Time)) + case *genericVector[json.RawMessage]: + v.SetTyped(idx, val.(json.RawMessage)) + case *nullableGenericVector[json.RawMessage]: + v.SetTyped(idx, val.(*json.RawMessage)) + case *genericVector[EnumItemIndex]: + enumVal, ok := enumValueFromInterface(val) + if !ok { + enumVal = 0 + } + v.SetTyped(idx, enumVal) + case *nullableGenericVector[EnumItemIndex]: + enumPtr, ok := enumPointerFromInterface(val) + if !ok { + v.Set(idx, nil) + return + } + v.SetTyped(idx, enumPtr) + + default: + panic(fmt.Sprintf("unsupported vector type: %T", vec)) + } +} + +// setConcreteTypedInVector is an internal helper that sets a concrete (non-pointer) value +// in a vector using typed methods. For nullable vectors, it converts the value to a pointer. +// nolint:gocyclo +func setConcreteTypedInVector(vec vector, idx int, val interface{}) { + switch v := vec.(type) { + // int types + case *genericVector[int8]: + v.SetTyped(idx, val.(int8)) + case *nullableGenericVector[int8]: + concrete := val.(int8) + v.SetConcreteTyped(idx, concrete) + case *genericVector[int16]: + v.SetTyped(idx, val.(int16)) + case *nullableGenericVector[int16]: + concrete := val.(int16) + v.SetConcreteTyped(idx, concrete) + case *genericVector[int32]: + v.SetTyped(idx, val.(int32)) + case *nullableGenericVector[int32]: + concrete := val.(int32) + v.SetConcreteTyped(idx, concrete) + case *genericVector[int64]: + v.SetTyped(idx, val.(int64)) + case *nullableGenericVector[int64]: + concrete := val.(int64) + v.SetConcreteTyped(idx, concrete) + + // uint types + case *genericVector[uint8]: + v.SetTyped(idx, val.(uint8)) + case *nullableGenericVector[uint8]: + concrete := val.(uint8) + v.SetConcreteTyped(idx, concrete) + case *genericVector[uint16]: + v.SetTyped(idx, val.(uint16)) + case *nullableGenericVector[uint16]: + concrete := val.(uint16) + v.SetConcreteTyped(idx, concrete) + case *genericVector[uint32]: + v.SetTyped(idx, val.(uint32)) + case *nullableGenericVector[uint32]: + concrete := val.(uint32) + v.SetConcreteTyped(idx, concrete) + case *genericVector[uint64]: + v.SetTyped(idx, val.(uint64)) + case *nullableGenericVector[uint64]: + concrete := val.(uint64) + v.SetConcreteTyped(idx, concrete) + + // float types + case *genericVector[float32]: + v.SetTyped(idx, val.(float32)) + case *nullableGenericVector[float32]: + concrete := val.(float32) + v.SetConcreteTyped(idx, concrete) + case *genericVector[float64]: + v.SetTyped(idx, val.(float64)) + case *nullableGenericVector[float64]: + concrete := val.(float64) + v.SetConcreteTyped(idx, concrete) + + // string, bool + case *genericVector[string]: + v.SetTyped(idx, val.(string)) + case *nullableGenericVector[string]: + concrete := val.(string) + v.SetConcreteTyped(idx, concrete) + case *genericVector[bool]: + v.SetTyped(idx, val.(bool)) + case *nullableGenericVector[bool]: + concrete := val.(bool) + v.SetConcreteTyped(idx, concrete) + + // time, json, enum + case *genericVector[time.Time]: + v.SetTyped(idx, val.(time.Time)) + case *nullableGenericVector[time.Time]: + concrete := val.(time.Time) + v.SetConcreteTyped(idx, concrete) + case *genericVector[json.RawMessage]: + v.SetTyped(idx, val.(json.RawMessage)) + case *nullableGenericVector[json.RawMessage]: + concrete := val.(json.RawMessage) + v.SetConcreteTyped(idx, concrete) + case *genericVector[EnumItemIndex]: + enumVal, ok := enumValueFromInterface(val) + if !ok { + enumVal = 0 + } + v.SetTyped(idx, enumVal) + case *nullableGenericVector[EnumItemIndex]: + enumVal, ok := enumValueFromInterface(val) + if !ok { + v.SetTyped(idx, nil) + return + } + v.SetConcreteTyped(idx, enumVal) + + default: + panic(fmt.Sprintf("unsupported vector type: %T", vec)) + } +} + +// insertTypedInVector is an internal helper that inserts a value in a vector +// using typed methods instead of interface{} boxing. +// nolint:gocyclo +func insertTypedInVector(vec vector, idx int, val interface{}) { + if val == nil { + vec.Insert(idx, nil) + return + } + + switch v := vec.(type) { + // int types + case *genericVector[int8]: + v.InsertTyped(idx, val.(int8)) + case *nullableGenericVector[int8]: + v.InsertTyped(idx, val.(*int8)) + case *genericVector[int16]: + v.InsertTyped(idx, val.(int16)) + case *nullableGenericVector[int16]: + v.InsertTyped(idx, val.(*int16)) + case *genericVector[int32]: + v.InsertTyped(idx, val.(int32)) + case *nullableGenericVector[int32]: + v.InsertTyped(idx, val.(*int32)) + case *genericVector[int64]: + v.InsertTyped(idx, val.(int64)) + case *nullableGenericVector[int64]: + v.InsertTyped(idx, val.(*int64)) + + // uint types + case *genericVector[uint8]: + v.InsertTyped(idx, val.(uint8)) + case *nullableGenericVector[uint8]: + v.InsertTyped(idx, val.(*uint8)) + case *genericVector[uint16]: + v.InsertTyped(idx, val.(uint16)) + case *nullableGenericVector[uint16]: + v.InsertTyped(idx, val.(*uint16)) + case *genericVector[uint32]: + v.InsertTyped(idx, val.(uint32)) + case *nullableGenericVector[uint32]: + v.InsertTyped(idx, val.(*uint32)) + case *genericVector[uint64]: + v.InsertTyped(idx, val.(uint64)) + case *nullableGenericVector[uint64]: + v.InsertTyped(idx, val.(*uint64)) + + // float types + case *genericVector[float32]: + v.InsertTyped(idx, val.(float32)) + case *nullableGenericVector[float32]: + v.InsertTyped(idx, val.(*float32)) + case *genericVector[float64]: + v.InsertTyped(idx, val.(float64)) + case *nullableGenericVector[float64]: + v.InsertTyped(idx, val.(*float64)) + + // string, bool + case *genericVector[string]: + v.InsertTyped(idx, val.(string)) + case *nullableGenericVector[string]: + v.InsertTyped(idx, val.(*string)) + case *genericVector[bool]: + v.InsertTyped(idx, val.(bool)) + case *nullableGenericVector[bool]: + v.InsertTyped(idx, val.(*bool)) + + // time, json, enum + case *genericVector[time.Time]: + v.InsertTyped(idx, val.(time.Time)) + case *nullableGenericVector[time.Time]: + v.InsertTyped(idx, val.(*time.Time)) + case *genericVector[json.RawMessage]: + v.InsertTyped(idx, val.(json.RawMessage)) + case *nullableGenericVector[json.RawMessage]: + v.InsertTyped(idx, val.(*json.RawMessage)) + case *genericVector[EnumItemIndex]: + enumVal, ok := enumValueFromInterface(val) + if !ok { + enumVal = 0 + } + v.InsertTyped(idx, enumVal) + case *nullableGenericVector[EnumItemIndex]: + enumPtr, ok := enumPointerFromInterface(val) + if !ok { + v.Insert(idx, nil) + return + } + v.InsertTyped(idx, enumPtr) + + default: + panic(fmt.Sprintf("unsupported vector type: %T", vec)) + } +} + +func enumValueFromInterface(val interface{}) (EnumItemIndex, bool) { + switch v := val.(type) { + case EnumItemIndex: + return v, true + case uint16: + return EnumItemIndex(v), true + case *EnumItemIndex: + if v == nil { + return 0, false + } + return *v, true + case *uint16: + if v == nil { + return 0, false + } + return EnumItemIndex(*v), true + default: + return 0, false + } +} + +func enumPointerFromInterface(val interface{}) (*EnumItemIndex, bool) { + switch v := val.(type) { + case *EnumItemIndex: + return v, true + case EnumItemIndex: + cp := v + return &cp, true + case uint16: + cp := EnumItemIndex(v) + return &cp, true + case *uint16: + if v == nil { + return nil, false + } + cp := EnumItemIndex(*v) + return &cp, true + default: + return nil, false + } +}