Skip to content

Commit 758cfbc

Browse files
committed
Fix failing ITs
1 parent 6d27465 commit 758cfbc

File tree

2 files changed

+10
-6
lines changed

2 files changed

+10
-6
lines changed

hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieDataBlock.java

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import org.apache.hudi.common.schema.HoodieSchema;
2626
import org.apache.hudi.common.util.Option;
2727
import org.apache.hudi.common.util.collection.ClosableIterator;
28+
import org.apache.hudi.exception.HoodieAvroSchemaException;
2829
import org.apache.hudi.exception.HoodieIOException;
2930
import org.apache.hudi.io.SeekableDataInputStream;
3031
import org.apache.hudi.storage.HoodieStorage;
@@ -380,10 +381,13 @@ protected HoodieSchema getSchemaFromHeader() {
380381
(schemaString) -> {
381382
try {
382383
return HoodieSchema.parse(schemaStr);
383-
} catch (AvroTypeException e) {
384+
} catch (HoodieAvroSchemaException e) {
384385
// Archived commits from earlier hudi versions fail the schema check
385-
// So we retry in this one specific instance.
386-
return HoodieSchema.parse(schemaStr, false);
386+
// So we retry in this one specific instance with validation disabled
387+
if (e.getCause() instanceof AvroTypeException) {
388+
return HoodieSchema.parse(schemaStr, false);
389+
}
390+
throw e;
387391
}
388392
});
389393
return SCHEMA_MAP.get(schemaStr);

hudi-hadoop-common/src/test/java/org/apache/hudi/common/functional/TestHoodieLogFormat.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -580,7 +580,7 @@ public void testBasicAppendAndRead(HoodieLogBlockType dataBlockType) throws IOEx
580580
"Read records size should be equal to the written records size");
581581
assertEquals(convertAvroToSerializableIndexedRecords(copyOfRecords1), recordsRead1,
582582
"Both records lists should be the same. (ordering guaranteed)");
583-
assertEquals(dataBlockRead.getSchema(), getSimpleSchema());
583+
assertEquals(dataBlockRead.getSchema().toAvroSchema(), getSimpleSchema());
584584

585585
reader.hasNext();
586586
nextBlock = reader.next();
@@ -661,7 +661,7 @@ public void testCDCBlock() throws IOException, InterruptedException {
661661
List<IndexedRecord> recordsRead = getRecords(dataBlockRead);
662662
assertEquals(3, recordsRead.size(),
663663
"Read records size should be equal to the written records size");
664-
assertEquals(dataBlockRead.getSchema(), cdcSchema);
664+
assertEquals(dataBlockRead.getSchema().toAvroSchema(), cdcSchema);
665665

666666
GenericRecord insert = (GenericRecord) recordsRead.stream()
667667
.filter(record -> record.get(0).toString().equals("i")).findFirst().get();
@@ -2796,7 +2796,7 @@ public void testDataBlockFormatAppendAndReadWithProjectedSchema(
27962796
"Read records size should be equal to the written records size");
27972797
assertEquals(expectedRecords, recordsRead,
27982798
"Both records lists should be the same. (ordering guaranteed)");
2799-
assertEquals(dataBlockRead.getSchema(), projectedSchema);
2799+
assertEquals(dataBlockRead.getSchema().toAvroSchema(), projectedSchema);
28002800

28012801
int bytesRead = (int) BenchmarkCounter.getBytesRead();
28022802

0 commit comments

Comments
 (0)