Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: Netflix/hollow
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: v7.6.3-rc.4
Choose a base ref
...
head repository: Netflix/hollow
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: pr
Choose a head ref
  • 1 commit
  • 1 file changed
  • 1 contributor

Commits on Aug 17, 2023

  1. Merge change from steven

    Sunjeet committed Aug 17, 2023
    Copy the full SHA
    d964378 View commit details
Showing with 22 additions and 25 deletions.
  1. +22 −25 hollow/src/main/java/com/netflix/hollow/tools/history/keyindex/HollowOrdinalMapper.java
Original file line number Diff line number Diff line change
@@ -45,7 +45,7 @@ public class HollowOrdinalMapper {
* */
private int[] hashToAssignedOrdinal;
private int[][] fieldHashToObjectOrdinal;
private int[][] fieldHashToAssignedOrdinal;
private IntList[][] fieldHashToAssignedOrdinal;
private int[] assignedOrdinalToIndex;

private final PrimaryKey primaryKey;
@@ -58,13 +58,12 @@ public class HollowOrdinalMapper {
public HollowOrdinalMapper(PrimaryKey primaryKey, boolean[] keyFieldIsIndexed, int[][] keyFieldIndices, FieldType[] keyFieldTypes) {
this.hashToAssignedOrdinal = new int[STARTING_SIZE];
this.fieldHashToObjectOrdinal = new int[primaryKey.numFields()][STARTING_SIZE];
this.fieldHashToAssignedOrdinal = new int[primaryKey.numFields()][STARTING_SIZE];
this.fieldHashToAssignedOrdinal = new IntList[primaryKey.numFields()][STARTING_SIZE];
this.assignedOrdinalToIndex = new int[STARTING_SIZE];

Arrays.fill(this.hashToAssignedOrdinal, ORDINAL_NONE);
for(int field=0;field<primaryKey.numFields();field++) {
Arrays.fill(this.fieldHashToObjectOrdinal[field], ORDINAL_NONE);
Arrays.fill(this.fieldHashToAssignedOrdinal[field], ORDINAL_NONE);
}
Arrays.fill(this.assignedOrdinalToIndex, ORDINAL_NONE);

@@ -77,33 +76,32 @@ public HollowOrdinalMapper(PrimaryKey primaryKey, boolean[] keyFieldIsIndexed, i
}

public void addMatches(int hashCode, Object objectToMatch, int field, FieldType type, IntList results) {
int[] fieldHashes = fieldHashToAssignedOrdinal[field];
IntList[] fieldHashes = fieldHashToAssignedOrdinal[field];
int scanIndex = indexFromHash(hashCode, fieldHashes.length);

while(fieldHashes[scanIndex] != ORDINAL_NONE) {
int ordinal = fieldHashes[scanIndex];
Object matchingObject = getFieldObject(ordinal, field, type);
if(objectToMatch.equals(matchingObject)) {
results.add(ordinal);
}
scanIndex = (scanIndex + 1) % fieldHashes.length;
if (fieldHashes[scanIndex] == null)
return;
for(int i=0;i<fieldHashes[scanIndex].size();i++) {
int assignedOrdinal = fieldHashes[scanIndex].get(i);
Object object = getFieldObject(assignedOrdinal, field, type);
if(object.equals(objectToMatch))
results.add(assignedOrdinal);
}
}

public void writeKeyFieldHash(Object fieldObject, int assignedOrdinal, int fieldIdx) {
if (!keyFieldIsIndexed[fieldIdx])
return;

int[] fieldHashes = fieldHashToAssignedOrdinal[fieldIdx];
IntList[] fieldHashes = fieldHashToAssignedOrdinal[fieldIdx];

int fieldHash = hashObject(fieldObject);
int newIndex = indexFromHash(fieldHash, fieldHashes.length);

while (fieldHashes[newIndex] != ORDINAL_NONE) {
newIndex = (newIndex + 1) % fieldHashes.length;
if(fieldHashes[newIndex]==null) {
fieldHashes[newIndex] = new IntList();
}

fieldHashes[newIndex] = assignedOrdinal;
fieldHashes[newIndex].add(assignedOrdinal);
}

public void prepareForRead() {
@@ -210,25 +208,24 @@ private void expandAndRehashTable() {
Arrays.fill(newTable, ORDINAL_NONE);

int[][] newFieldMappings = new int[primaryKey.numFields()][hashToAssignedOrdinal.length*2];
int[][] newFieldHashToOrdinal = new int[primaryKey.numFields()][hashToAssignedOrdinal.length*2];
for(int i=0;i<primaryKey.numFields();i++) {
Arrays.fill(newFieldHashToOrdinal[i], ORDINAL_NONE);
}
IntList[][] newFieldHashToOrdinal = new IntList[primaryKey.numFields()][hashToAssignedOrdinal.length*2];
assignedOrdinalToIndex = Arrays.copyOf(assignedOrdinalToIndex, hashToAssignedOrdinal.length*2);

for(int fieldIdx=0;fieldIdx<primaryKey.numFields();fieldIdx++) {
int[] hashToOrdinal = fieldHashToAssignedOrdinal[fieldIdx];
IntList[] hashToOrdinal = fieldHashToAssignedOrdinal[fieldIdx];

for (int assignedOrdinal : hashToOrdinal) {
if (assignedOrdinal == ORDINAL_NONE)
for (IntList ordinalList : hashToOrdinal) {
if(ordinalList==null || ordinalList.size()==0)
continue;

Object originalFieldObject = getFieldObject(assignedOrdinal, fieldIdx, keyFieldTypes[fieldIdx]);
// Recompute original hash, based on the fact that all objects in this IntList have the same hash

Object originalFieldObject = getFieldObject(ordinalList.get(0), fieldIdx, keyFieldTypes[fieldIdx]);

int originalHash = hashObject(originalFieldObject);
int newIndex = indexFromHash(originalHash, newTable.length);

newFieldHashToOrdinal[fieldIdx][newIndex] = assignedOrdinal;
newFieldHashToOrdinal[fieldIdx][newIndex]=ordinalList;
}
}