Skip to content

Commit

Permalink
Prevent load / publish failures when missing types are defined in has…
Browse files Browse the repository at this point in the history
…h keys (#679)
  • Loading branch information
eduardoramirez authored Apr 22, 2024
1 parent d32be3c commit cd3a32d
Show file tree
Hide file tree
Showing 6 changed files with 568 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -232,8 +232,8 @@ static FieldPath<FieldSegment> createFieldPath(
/**
* An exception contain structured information when a field path cannot be bound.
*/
static final class FieldPathException extends IllegalArgumentException {
enum ErrorKind {
public static final class FieldPathException extends IllegalArgumentException {
public enum ErrorKind {
NOT_BINDABLE,
NOT_FOUND,
NOT_FULL,
Expand All @@ -242,7 +242,7 @@ enum ErrorKind {
;
}

final ErrorKind error;
public final ErrorKind error;
final String rootType;
final String[] segments;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,11 @@
*/
package com.netflix.hollow.core.read.engine.map;

import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;

import com.netflix.hollow.api.sampling.DisabledSamplingDirector;
import com.netflix.hollow.api.sampling.HollowMapSampler;
import com.netflix.hollow.api.sampling.HollowSampler;
import com.netflix.hollow.api.sampling.HollowSamplingDirector;
import com.netflix.hollow.core.index.FieldPaths;
import com.netflix.hollow.core.index.key.HollowPrimaryKeyValueDeriver;
import com.netflix.hollow.core.memory.MemoryMode;
import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader;
Expand All @@ -43,11 +42,17 @@
import com.netflix.hollow.tools.checksum.HollowChecksum;
import java.io.IOException;
import java.util.BitSet;
import java.util.logging.Level;
import java.util.logging.Logger;

import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;
import static com.netflix.hollow.core.index.FieldPaths.FieldPathException.ErrorKind.NOT_BINDABLE;

/**
* A {@link HollowTypeReadState} for MAP type records.
*/
public class HollowMapTypeReadState extends HollowTypeReadState implements HollowMapTypeDataAccess {
private static final Logger LOG = Logger.getLogger(HollowMapTypeReadState.class.getName());

private final HollowMapSampler sampler;

Expand Down Expand Up @@ -332,9 +337,19 @@ public HollowPrimaryKeyValueDeriver getKeyDeriver() {
}

public void buildKeyDeriver() {
if(getSchema().getHashKey() != null)
this.keyDeriver = new HollowPrimaryKeyValueDeriver(getSchema().getHashKey(), getStateEngine());

if(getSchema().getHashKey() != null) {
try {
this.keyDeriver = new HollowPrimaryKeyValueDeriver(getSchema().getHashKey(), getStateEngine());
} catch (FieldPaths.FieldPathException e) {
if (e.error == NOT_BINDABLE) {
LOG.log(Level.WARNING, "Failed to create a key value deriver for " + getSchema().getHashKey() +
" because a field could not be bound to a type in the state");
} else {
throw e;
}
}
}

for(int i=0; i<shards.length; i++)
shards[i].setKeyDeriver(keyDeriver);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,11 @@
*/
package com.netflix.hollow.core.read.engine.set;

import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;

import com.netflix.hollow.api.sampling.DisabledSamplingDirector;
import com.netflix.hollow.api.sampling.HollowSampler;
import com.netflix.hollow.api.sampling.HollowSamplingDirector;
import com.netflix.hollow.api.sampling.HollowSetSampler;
import com.netflix.hollow.core.index.FieldPaths;
import com.netflix.hollow.core.index.key.HollowPrimaryKeyValueDeriver;
import com.netflix.hollow.core.memory.MemoryMode;
import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader;
Expand All @@ -44,11 +43,17 @@
import com.netflix.hollow.tools.checksum.HollowChecksum;
import java.io.IOException;
import java.util.BitSet;
import java.util.logging.Level;
import java.util.logging.Logger;

import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;
import static com.netflix.hollow.core.index.FieldPaths.FieldPathException.ErrorKind.NOT_BINDABLE;

/**
* A {@link HollowTypeReadState} for OBJECT type records.
*/
public class HollowSetTypeReadState extends HollowCollectionTypeReadState implements HollowSetTypeDataAccess {
private static final Logger LOG = Logger.getLogger(HollowSetTypeReadState.class.getName());

private final HollowSetSampler sampler;

Expand Down Expand Up @@ -310,9 +315,19 @@ public HollowPrimaryKeyValueDeriver getKeyDeriver() {
}

public void buildKeyDeriver() {
if(getSchema().getHashKey() != null)
this.keyDeriver = new HollowPrimaryKeyValueDeriver(getSchema().getHashKey(), getStateEngine());

if(getSchema().getHashKey() != null) {
try {
this.keyDeriver = new HollowPrimaryKeyValueDeriver(getSchema().getHashKey(), getStateEngine());
} catch (FieldPaths.FieldPathException e) {
if (e.error == NOT_BINDABLE) {
LOG.log(Level.WARNING, "Failed to create a key value deriver for " + getSchema().getHashKey() +
" because a field could not be bound to a type in the state");
} else {
throw e;
}
}
}

for(int i=0;i<shards.length;i++)
shards[i].setKeyDeriver(keyDeriver);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/
package com.netflix.hollow.core.write;

import com.netflix.hollow.core.index.FieldPaths;
import com.netflix.hollow.core.memory.ByteData;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.ThreadSafeBitSet;
Expand All @@ -26,8 +27,13 @@
import com.netflix.hollow.core.schema.HollowMapSchema;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;

import static com.netflix.hollow.core.index.FieldPaths.FieldPathException.ErrorKind.NOT_BINDABLE;

public class HollowMapTypeWriteState extends HollowTypeWriteState {
private static final Logger LOG = Logger.getLogger(HollowMapTypeWriteState.class.getName());

/// statistics required for writing fixed length set data
private int bitsPerMapPointer;
Expand Down Expand Up @@ -212,9 +218,19 @@ public void calculateSnapshot() {

HollowWriteStateEnginePrimaryKeyHasher primaryKeyHasher = null;

if(getSchema().getHashKey() != null)
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());

if(getSchema().getHashKey() != null) {
try {
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
} catch (FieldPaths.FieldPathException e) {
if (e.error == NOT_BINDABLE) {
LOG.log(Level.WARNING, "Failed to create a key hasher for " + getSchema().getHashKey() +
" because a field could not be bound to a type in the state");
} else {
throw e;
}
}
}

for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
int shardOrdinal = ordinal / numShards;
Expand Down Expand Up @@ -378,8 +394,18 @@ private void calculateDelta(ThreadSafeBitSet fromCyclePopulated, ThreadSafeBitSe

HollowWriteStateEnginePrimaryKeyHasher primaryKeyHasher = null;

if(getSchema().getHashKey() != null)
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
if(getSchema().getHashKey() != null) {
try {
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
} catch (FieldPaths.FieldPathException e) {
if (e.error == NOT_BINDABLE) {
LOG.log(Level.WARNING, "Failed to create a key hasher for " + getSchema().getHashKey() +
" because a field could not be bound to a type in the state");
} else {
throw e;
}
}
}

for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/
package com.netflix.hollow.core.write;

import com.netflix.hollow.core.index.FieldPaths;
import com.netflix.hollow.core.memory.ByteData;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.ThreadSafeBitSet;
Expand All @@ -26,8 +27,13 @@
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;

import static com.netflix.hollow.core.index.FieldPaths.FieldPathException.ErrorKind.NOT_BINDABLE;

public class HollowSetTypeWriteState extends HollowTypeWriteState {
private static final Logger LOG = Logger.getLogger(HollowSetTypeWriteState.class.getName());

/// statistics required for writing fixed length set data
private int bitsPerSetPointer;
Expand Down Expand Up @@ -191,8 +197,18 @@ public void calculateSnapshot() {

HollowWriteStateEnginePrimaryKeyHasher primaryKeyHasher = null;

if(getSchema().getHashKey() != null)
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
if(getSchema().getHashKey() != null) {
try {
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
} catch (FieldPaths.FieldPathException e) {
if (e.error == NOT_BINDABLE) {
LOG.log(Level.WARNING, "Failed to create a key hasher for " + getSchema().getHashKey() +
" because a field could not be bound to a type in the state");
} else {
throw e;
}
}
}

for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
Expand Down Expand Up @@ -350,8 +366,18 @@ public void calculateDelta(ThreadSafeBitSet fromCyclePopulated, ThreadSafeBitSet

HollowWriteStateEnginePrimaryKeyHasher primaryKeyHasher = null;

if(getSchema().getHashKey() != null)
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
if(getSchema().getHashKey() != null) {
try {
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
} catch (FieldPaths.FieldPathException e) {
if (e.error == NOT_BINDABLE) {
LOG.log(Level.WARNING, "Failed to create a key hasher for " + getSchema().getHashKey() +
" because a field could not be bound to a type in the state");
} else {
throw e;
}
}
}

for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
Expand Down
Loading

0 comments on commit cd3a32d

Please sign in to comment.