Skip to content

Commit 6a68b11

Browse files
author
zhangli20
committed
update to datafusion-v42/arrow-v53/arrow-java-v16
1 parent ed846af commit 6a68b11

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+1296
-806
lines changed

Cargo.lock

Lines changed: 368 additions & 240 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -51,42 +51,42 @@ datafusion-ext-exprs = { path = "./native-engine/datafusion-ext-exprs" }
5151
datafusion-ext-functions = { path = "./native-engine/datafusion-ext-functions" }
5252
datafusion-ext-plans = { path = "./native-engine/datafusion-ext-plans" }
5353

54-
# datafusion: branch=v36-blaze
55-
datafusion = { version = "36.0.0" }
54+
# datafusion: branch=v42-blaze
55+
datafusion = { version = "42.0.0" }
5656

5757
orc-rust = { version = "0.3.1" }
5858

59-
# arrow: branch=v50-blaze
60-
arrow = { version = "50.0.0", features = ["ffi"]}
61-
arrow-schema = { version = "50.0.0", features = ["serde"] }
62-
parquet = { version = "50.0.0" }
59+
# arrow: branch=v53-blaze
60+
arrow = { version = "53.0.0", features = ["ffi"]}
61+
arrow-schema = { version = "53.0.0", features = ["serde"] }
62+
parquet = { version = "53.0.0" }
6363

6464
# serde_json: branch=v1.0.96-blaze
6565
serde_json = { version = "1.0.96" }
6666

6767
[patch.crates-io]
68-
# datafusion: branch=v36-blaze
69-
datafusion = { git = "https://github.com/harveyyue/datafusion.git", rev = "d33877f8fbc7c57de946dc6081b2b357eedd0df9"}
70-
datafusion-common = { git = "https://github.com/harveyyue/datafusion.git", rev = "d33877f8fbc7c57de946dc6081b2b357eedd0df9"}
71-
datafusion-expr = { git = "https://github.com/harveyyue/datafusion.git", rev = "d33877f8fbc7c57de946dc6081b2b357eedd0df9"}
72-
datafusion-execution = { git = "https://github.com/harveyyue/datafusion.git", rev = "d33877f8fbc7c57de946dc6081b2b357eedd0df9"}
73-
datafusion-optimizer = { git = "https://github.com/harveyyue/datafusion.git", rev = "d33877f8fbc7c57de946dc6081b2b357eedd0df9"}
74-
datafusion-physical-expr = { git = "https://github.com/harveyyue/datafusion.git", rev = "d33877f8fbc7c57de946dc6081b2b357eedd0df9"}
75-
orc-rust = { git = "https://github.com/harveyyue/datafusion-orc.git", rev = "f0ff4bcffa762b62e8c57ed4c2f6e1a9547b4abb"}
68+
# datafusion: branch=v42-blaze
69+
datafusion = { git = "https://github.com/blaze-init/arrow-datafusion.git", rev = "dc799de77"}
70+
datafusion-common = { git = "https://github.com/blaze-init/arrow-datafusion.git", rev = "dc799de77"}
71+
datafusion-expr = { git = "https://github.com/blaze-init/arrow-datafusion.git", rev = "dc799de77"}
72+
datafusion-execution = { git = "https://github.com/blaze-init/arrow-datafusion.git", rev = "dc799de77"}
73+
datafusion-optimizer = { git = "https://github.com/blaze-init/arrow-datafusion.git", rev = "dc799de77"}
74+
datafusion-physical-expr = { git = "https://github.com/blaze-init/arrow-datafusion.git", rev = "dc799de77"}
75+
orc-rust = { git = "https://github.com/blaze-init/datafusion-orc.git", rev = "c54bfb5"}
7676

77-
# arrow: branch=v50-blaze
78-
arrow = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
79-
arrow-arith = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
80-
arrow-array = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
81-
arrow-buffer = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
82-
arrow-cast = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
83-
arrow-data = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
84-
arrow-ord = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
85-
arrow-row = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
86-
arrow-schema = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
87-
arrow-select = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
88-
arrow-string = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
89-
parquet = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "7471d70f7ae6edd5d4da82b7d966a8ede720e499"}
77+
# arrow: branch=v53-blaze
78+
arrow = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
79+
arrow-arith = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
80+
arrow-array = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
81+
arrow-buffer = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
82+
arrow-cast = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
83+
arrow-data = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
84+
arrow-ord = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
85+
arrow-row = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
86+
arrow-schema = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
87+
arrow-select = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
88+
arrow-string = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
89+
parquet = { git = "https://github.com/blaze-init/arrow-rs.git", rev = "9dbfd9018e"}
9090

9191
# serde_json: branch=v1.0.96-blaze
9292
serde_json = { git = "https://github.com/blaze-init/json", branch = "v1.0.96-blaze" }

native-engine/blaze-serde/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ datafusion-ext-exprs = { workspace = true }
1515
datafusion-ext-functions = { workspace = true }
1616
datafusion-ext-plans = { workspace = true }
1717
log = "0.4.22"
18-
object_store = "0.9.0"
18+
object_store = "0.11.0"
1919
prost = "0.13.3"
2020

2121
[build-dependencies]

native-engine/blaze-serde/build.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ fn main() -> Result<(), String> {
1717
println!("cargo:rerun-if-env-changed=FORCE_REBUILD");
1818

1919
println!("cargo:rerun-if-changed=proto/blaze.proto");
20-
tonic_build::configure()
21-
.compile(&["proto/blaze.proto"], &["proto"])
20+
tonic_build::compile_protos("proto/blaze.proto")
2221
.map_err(|e| format!("protobuf compilation failed: {}", e))
2322
}

native-engine/blaze-serde/proto/blaze.proto

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ enum ScalarFunction {
215215
Sqrt=17;
216216
Tan=18;
217217
Trunc=19;
218-
Array=20;
218+
NullIf=20;
219219
RegexpMatch=21;
220220
BitLength=22;
221221
Btrim=23;

native-engine/blaze-serde/src/from_proto.rs

Lines changed: 87 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -33,11 +33,10 @@ use datafusion::{
3333
physical_plan::FileScanConfig,
3434
},
3535
error::DataFusionError,
36-
execution::context::ExecutionProps,
37-
logical_expr::{BuiltinScalarFunction, ColumnarValue, Operator},
36+
logical_expr::{ColumnarValue, Operator, ScalarUDF, Volatility},
3837
physical_expr::{
3938
expressions::{in_list, LikeExpr, SCAndExpr, SCOrExpr},
40-
functions, ScalarFunctionExpr,
39+
ScalarFunctionExpr,
4140
},
4241
physical_plan::{
4342
expressions as phys_expr,
@@ -48,6 +47,7 @@ use datafusion::{
4847
union::UnionExec,
4948
ColumnStatistics, ExecutionPlan, Partitioning, PhysicalExpr, Statistics,
5049
},
50+
prelude::create_udf,
5151
};
5252
use datafusion_ext_commons::downcast_any;
5353
use datafusion_ext_exprs::{
@@ -116,7 +116,7 @@ fn bind(
116116
let new_children = expr_in
117117
.children()
118118
.iter()
119-
.map(|child_expr| bind(child_expr.clone(), input_schema))
119+
.map(|&child_expr| bind(child_expr.clone(), input_schema))
120120
.collect::<Result<Vec<_>, DataFusionError>>()?;
121121
Ok(expr_in.with_new_children(new_children)?)
122122
}
@@ -804,74 +804,75 @@ impl From<&protobuf::BoundReference> for Column {
804804
}
805805
}
806806

807-
impl From<&protobuf::ScalarFunction> for BuiltinScalarFunction {
808-
fn from(f: &protobuf::ScalarFunction) -> BuiltinScalarFunction {
807+
impl From<protobuf::ScalarFunction> for Arc<ScalarUDF> {
808+
fn from(f: protobuf::ScalarFunction) -> Self {
809+
use datafusion::functions as f;
809810
use protobuf::ScalarFunction;
811+
810812
match f {
811-
ScalarFunction::Sqrt => Self::Sqrt,
812-
ScalarFunction::Sin => Self::Sin,
813-
ScalarFunction::Cos => Self::Cos,
814-
ScalarFunction::Tan => Self::Tan,
815-
ScalarFunction::Asin => Self::Asin,
816-
ScalarFunction::Acos => Self::Acos,
817-
ScalarFunction::Atan => Self::Atan,
818-
ScalarFunction::Exp => Self::Exp,
819-
ScalarFunction::Log => Self::Log,
820-
ScalarFunction::Ln => Self::Ln,
821-
ScalarFunction::Log10 => Self::Log10,
822-
ScalarFunction::Floor => Self::Floor,
823-
ScalarFunction::Ceil => Self::Ceil,
824-
ScalarFunction::Round => Self::Round,
825-
ScalarFunction::Trunc => Self::Trunc,
826-
ScalarFunction::Abs => Self::Abs,
827-
ScalarFunction::OctetLength => Self::OctetLength,
828-
ScalarFunction::Concat => Self::Concat,
829-
ScalarFunction::Lower => Self::Lower,
830-
ScalarFunction::Upper => Self::Upper,
831-
ScalarFunction::Trim => Self::Trim,
832-
ScalarFunction::Ltrim => Self::Ltrim,
833-
ScalarFunction::Rtrim => Self::Rtrim,
834-
ScalarFunction::ToTimestamp => Self::ToTimestamp,
835-
ScalarFunction::Array => Self::MakeArray,
836-
// ScalarFunction::NullIf => todo!(),
837-
ScalarFunction::DatePart => Self::DatePart,
838-
ScalarFunction::DateTrunc => Self::DateTrunc,
839-
ScalarFunction::Md5 => Self::MD5,
840-
ScalarFunction::Sha224 => Self::SHA224,
841-
ScalarFunction::Sha256 => Self::SHA256,
842-
ScalarFunction::Sha384 => Self::SHA384,
843-
ScalarFunction::Sha512 => Self::SHA512,
844-
ScalarFunction::Digest => Self::Digest,
845-
ScalarFunction::ToTimestampMillis => Self::ToTimestampMillis,
846-
ScalarFunction::Log2 => Self::Log2,
847-
ScalarFunction::Signum => Self::Signum,
848-
ScalarFunction::Ascii => Self::Ascii,
849-
ScalarFunction::BitLength => Self::BitLength,
850-
ScalarFunction::Btrim => Self::Btrim,
851-
ScalarFunction::CharacterLength => Self::CharacterLength,
852-
ScalarFunction::Chr => Self::Chr,
853-
ScalarFunction::ConcatWithSeparator => Self::ConcatWithSeparator,
854-
ScalarFunction::InitCap => Self::InitCap,
855-
ScalarFunction::Left => Self::Left,
856-
ScalarFunction::Lpad => Self::Lpad,
857-
ScalarFunction::Random => Self::Random,
858-
ScalarFunction::RegexpReplace => Self::RegexpReplace,
859-
ScalarFunction::Repeat => Self::Repeat,
860-
ScalarFunction::Replace => Self::Replace,
861-
ScalarFunction::Reverse => Self::Reverse,
862-
ScalarFunction::Right => Self::Right,
863-
ScalarFunction::Rpad => Self::Rpad,
864-
ScalarFunction::SplitPart => Self::SplitPart,
865-
ScalarFunction::StartsWith => Self::StartsWith,
866-
ScalarFunction::Strpos => Self::Strpos,
867-
ScalarFunction::Substr => Self::Substr,
868-
ScalarFunction::ToHex => Self::ToHex,
869-
ScalarFunction::ToTimestampMicros => Self::ToTimestampMicros,
870-
ScalarFunction::ToTimestampSeconds => Self::ToTimestampSeconds,
871-
ScalarFunction::Now => Self::Now,
872-
ScalarFunction::Translate => Self::Translate,
873-
ScalarFunction::RegexpMatch => Self::RegexpMatch,
874-
ScalarFunction::Coalesce => Self::Coalesce,
813+
ScalarFunction::Sqrt => f::math::sqrt(),
814+
ScalarFunction::Sin => f::math::sin(),
815+
ScalarFunction::Cos => f::math::cos(),
816+
ScalarFunction::Tan => f::math::tan(),
817+
ScalarFunction::Asin => f::math::asin(),
818+
ScalarFunction::Acos => f::math::acos(),
819+
ScalarFunction::Atan => f::math::atan(),
820+
ScalarFunction::Exp => f::math::exp(),
821+
ScalarFunction::Log => f::math::log(),
822+
ScalarFunction::Ln => f::math::ln(),
823+
ScalarFunction::Log10 => f::math::log10(),
824+
ScalarFunction::Floor => f::math::floor(),
825+
ScalarFunction::Ceil => f::math::ceil(),
826+
ScalarFunction::Round => f::math::round(),
827+
ScalarFunction::Trunc => f::math::trunc(),
828+
ScalarFunction::Abs => f::math::abs(),
829+
ScalarFunction::OctetLength => f::string::octet_length(),
830+
ScalarFunction::Concat => f::string::concat(),
831+
ScalarFunction::Lower => f::string::lower(),
832+
ScalarFunction::Upper => f::string::upper(),
833+
ScalarFunction::Trim => f::string::btrim(),
834+
ScalarFunction::Ltrim => f::string::ltrim(),
835+
ScalarFunction::Rtrim => f::string::rtrim(),
836+
ScalarFunction::ToTimestamp => f::datetime::to_timestamp(),
837+
ScalarFunction::NullIf => f::core::nullif(),
838+
ScalarFunction::DatePart => f::datetime::date_part(),
839+
ScalarFunction::DateTrunc => f::datetime::date_trunc(),
840+
ScalarFunction::Md5 => f::crypto::md5(),
841+
ScalarFunction::Sha224 => f::crypto::sha224(),
842+
ScalarFunction::Sha256 => f::crypto::sha256(),
843+
ScalarFunction::Sha384 => f::crypto::sha384(),
844+
ScalarFunction::Sha512 => f::crypto::sha512(),
845+
ScalarFunction::Digest => f::crypto::digest(),
846+
ScalarFunction::ToTimestampMillis => f::datetime::to_timestamp_millis(),
847+
ScalarFunction::Log2 => f::math::log2(),
848+
ScalarFunction::Signum => f::math::signum(),
849+
ScalarFunction::Ascii => f::string::ascii(),
850+
ScalarFunction::BitLength => f::string::bit_length(),
851+
ScalarFunction::Btrim => f::string::btrim(),
852+
ScalarFunction::CharacterLength => f::unicode::character_length(),
853+
ScalarFunction::Chr => f::string::chr(),
854+
ScalarFunction::ConcatWithSeparator => f::string::concat_ws(),
855+
ScalarFunction::InitCap => f::string::initcap(),
856+
ScalarFunction::Left => f::unicode::left(),
857+
ScalarFunction::Lpad => f::unicode::lpad(),
858+
ScalarFunction::Random => f::math::random(),
859+
ScalarFunction::RegexpReplace => f::regex::regexp_replace(),
860+
ScalarFunction::Repeat => f::string::repeat(),
861+
ScalarFunction::Replace => f::string::replace(),
862+
ScalarFunction::Reverse => f::unicode::reverse(),
863+
ScalarFunction::Right => f::unicode::right(),
864+
ScalarFunction::Rpad => f::unicode::rpad(),
865+
ScalarFunction::SplitPart => f::string::split_part(),
866+
ScalarFunction::StartsWith => f::string::starts_with(),
867+
ScalarFunction::Strpos => f::unicode::strpos(),
868+
ScalarFunction::Substr => f::unicode::substr(),
869+
ScalarFunction::ToHex => f::string::to_hex(),
870+
ScalarFunction::ToTimestampMicros => f::datetime::to_timestamp_micros(),
871+
ScalarFunction::ToTimestampSeconds => f::datetime::to_timestamp_seconds(),
872+
ScalarFunction::Now => f::datetime::now(),
873+
ScalarFunction::Translate => f::unicode::translate(),
874+
ScalarFunction::RegexpMatch => f::regex::regexp_match(),
875+
ScalarFunction::Coalesce => f::core::coalesce(),
875876
ScalarFunction::SparkExtFunctions => {
876877
unreachable!()
877878
}
@@ -998,20 +999,26 @@ fn try_parse_physical_expr(
998999
.map(|x| try_parse_physical_expr(x, input_schema))
9991000
.collect::<Result<Vec<_>, _>>()?;
10001001

1001-
let execution_props = ExecutionProps::new();
1002-
let fun_expr = if scalar_function == protobuf::ScalarFunction::SparkExtFunctions {
1003-
datafusion_ext_functions::create_spark_ext_function(&e.name)?
1002+
let scalar_udf = if scalar_function == protobuf::ScalarFunction::SparkExtFunctions {
1003+
let fun = datafusion_ext_functions::create_spark_ext_function(&e.name)?;
1004+
Arc::new(create_udf(
1005+
"spark_ext_function",
1006+
args.iter()
1007+
.map(|e| e.data_type(input_schema))
1008+
.collect::<Result<Vec<_>, _>>()?,
1009+
Arc::new(convert_required!(e.return_type)?),
1010+
Volatility::Volatile,
1011+
fun,
1012+
))
10041013
} else {
1005-
functions::create_physical_fun(&(&scalar_function).into(), &execution_props)?
1014+
let scalar_udf: Arc<ScalarUDF> = scalar_function.into();
1015+
scalar_udf
10061016
};
1007-
10081017
Arc::new(ScalarFunctionExpr::new(
1009-
&e.name,
1010-
fun_expr,
1018+
scalar_udf.name(),
1019+
scalar_udf.clone(),
10111020
args,
10121021
convert_required!(e.return_type)?,
1013-
None,
1014-
false,
10151022
))
10161023
}
10171024
ExprType::SparkUdfWrapperExpr(e) => Arc::new(SparkUDFWrapperExpr::try_new(
@@ -1153,6 +1160,7 @@ impl TryFrom<&protobuf::PartitionedFile> for PartitionedFile {
11531160
.map(|v| v.try_into())
11541161
.collect::<Result<Vec<_>, _>>()?,
11551162
range: val.range.as_ref().map(|v| v.try_into()).transpose()?,
1163+
statistics: None,
11561164
extensions: None,
11571165
})
11581166
}

native-engine/blaze-serde/src/lib.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -498,7 +498,11 @@ impl TryInto<datafusion::scalar::ScalarValue> for &protobuf::ScalarValue {
498498
.map(|val| val.try_into())
499499
.collect::<Result<Vec<_>, _>>()?;
500500
let scalar_type: DataType = pb_scalar_type.try_into()?;
501-
ScalarValue::List(ScalarValue::new_list(&typechecked_values, &scalar_type))
501+
ScalarValue::List(ScalarValue::new_list(
502+
&typechecked_values,
503+
&scalar_type,
504+
true,
505+
))
502506
}
503507
protobuf::scalar_value::Value::NullValue(v) => {
504508
match v.datatype.as_ref().expect("missing scalar data type") {
@@ -633,6 +637,7 @@ impl TryInto<ScalarValue> for &protobuf::ScalarListValue {
633637
Ok(ScalarValue::List(ScalarValue::new_list(
634638
&values,
635639
&element_scalar_type,
640+
true,
636641
)))
637642
}
638643
}

native-engine/blaze/src/metrics.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ pub fn update_spark_metric_node(
3939
)?;
4040

4141
// update children nodes
42-
for (i, child_plan) in execution_plan.children().iter().enumerate() {
42+
for (i, &child_plan) in execution_plan.children().iter().enumerate() {
4343
let child_metric_node = jni_call!(
4444
SparkMetricNode(metric_node).getChild(i as i32) -> JObject
4545
)?;

native-engine/datafusion-ext-commons/src/io/batch_serde.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -162,14 +162,13 @@ fn write_bits_buffer<W: Write>(
162162

163163
fn read_bits_buffer<R: Read>(input: &mut R, bits_len: usize) -> Result<Buffer> {
164164
let buf = read_bytes_slice(input, (bits_len + 7) / 8)?;
165-
Ok(Buffer::from(buf))
165+
Ok(Buffer::from_vec(buf.into()))
166166
}
167167

168168
fn write_primitive_array<W: Write, PT: ArrowPrimitiveType>(
169169
array: &PrimitiveArray<PT>,
170170
output: &mut W,
171171
) -> Result<()> {
172-
let _item_size = PT::get_byte_width();
173172
let offset = array.offset();
174173
let len = array.len();
175174
let array_data = array.to_data();
@@ -510,7 +509,7 @@ fn read_bytes_array<R: Read>(
510509
let offsets_buffer: Buffer = offsets_buffer.into();
511510

512511
let data_len = cur_offset as usize;
513-
let data_buffer = Buffer::from(read_bytes_slice(input, data_len)?);
512+
let data_buffer = Buffer::from_vec(read_bytes_slice(input, data_len)?.into());
514513
let array_data = ArrayData::try_new(
515514
data_type,
516515
num_rows,

native-engine/datafusion-ext-commons/src/io/scalar_serde.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,8 @@ pub fn write_scalar<W: Write>(value: &ScalarValue, nullable: bool, output: &mut
9090
write_array(col, output)?;
9191
}
9292
}
93-
ScalarValue::Map(value, _bool) => {
94-
write_scalar(value, nullable, output)?;
93+
ScalarValue::Map(v) => {
94+
write_array(v.as_ref(), output)?;
9595
}
9696
other => df_unimplemented_err!("unsupported scalarValue type: {other}")?,
9797
}
@@ -186,9 +186,9 @@ pub fn read_scalar<R: Read>(
186186
.collect::<Result<Vec<_>>>()?;
187187
ScalarValue::Struct(Arc::new(StructArray::new(fields.clone(), columns, None)))
188188
}
189-
DataType::Map(field, bool) => {
190-
let map_value = read_scalar(input, field.data_type(), field.is_nullable())?;
191-
ScalarValue::Map(Box::new(map_value), *bool)
189+
DataType::Map(field, _bool) => {
190+
let map = read_array(input, field.data_type(), 1)?.as_map().clone();
191+
ScalarValue::Map(Arc::new(map))
192192
}
193193
other => df_unimplemented_err!("unsupported data type: {other}")?,
194194
})

0 commit comments

Comments
 (0)