diff --git a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/internal/Murmur3Hash.java b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/internal/Murmur3Hash.java
index d218d3a..b1ca11b 100644
--- a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/internal/Murmur3Hash.java
+++ b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/internal/Murmur3Hash.java
@@ -21,20 +21,18 @@ import static com.google.common.base.Strings.lenientFormat;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
- * Murmur3Hash for x64 (Little Endian).
- *
Reference: https: //en.wikipedia.org/wiki/MurmurHash
+ * Murmur3Hash for x86_64 (little endian).
+ *
+ * @see MurmurHash
*
- * This implementation provides span-based access for hashing content not available in a
- * {@link T:byte[]}
- *
*/
@SuppressWarnings("UnstableApiUsage")
@Immutable
public final class Murmur3Hash {
- private static final ByteBufAllocator allocator = ByteBufAllocator.DEFAULT;
private static final ByteBuf FALSE = Constant.add(false);
private static final ByteBuf TRUE = Constant.add(true);
+ private static final ByteBufAllocator allocator = ByteBufAllocator.DEFAULT;
private static final ByteBuf EMPTY_STRING = Constant.add("");
/**
@@ -47,8 +45,8 @@ public final class Murmur3Hash {
@SuppressWarnings("ConstantConditions")
public static HashCode128 Hash128(@Nonnull final String item, @Nonnull final HashCode128 seed) {
- checkNotNull(item, "value: null, seed: %s", seed);
- checkNotNull(seed, "value: %s, seed: null", item);
+ checkNotNull(item, "expected non-null item");
+ checkNotNull(seed, "expected non-null seed");
if (item.isEmpty()) {
return Hash128(EMPTY_STRING, seed);
@@ -70,10 +68,25 @@ public final class Murmur3Hash {
* @param seed The seed with which to initialize.
* @return The 128-bit hash represented as two 64-bit words encapsulated by a {@link HashCode128} instance.
*/
- public static HashCode128 Hash128(boolean item, HashCode128 seed) {
+ public static HashCode128 Hash128(final boolean item, final HashCode128 seed) {
return Murmur3Hash.Hash128(item ? TRUE : FALSE, seed);
}
+ public static HashCode128 Hash128(short item, HashCode128 seed) {
+ ByteBuf buffer = Unpooled.wrappedBuffer(new byte[Integer.BYTES]).writeShortLE(item);
+ return Murmur3Hash.Hash128(buffer, seed);
+ }
+
+ public static HashCode128 Hash128(byte item, HashCode128 seed) {
+ ByteBuf buffer = Unpooled.wrappedBuffer(new byte[Integer.BYTES]).writeByte(item);
+ return Murmur3Hash.Hash128(buffer, seed);
+ }
+
+ public static HashCode128 Hash128(int item, HashCode128 seed) {
+ ByteBuf buffer = Unpooled.wrappedBuffer(new byte[Integer.BYTES]).writeIntLE(item);
+ return Murmur3Hash.Hash128(buffer, seed);
+ }
+
/**
* Computes a 128-bit Murmur3Hash 128-bit value for a {@link ByteBuf} data item.
*
diff --git a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/Namespace.java b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/Namespace.java
index ff57ff7..ea0c14f 100644
--- a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/Namespace.java
+++ b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/Namespace.java
@@ -72,7 +72,7 @@ public class Namespace {
*/
public static Optional parse(String value) {
Optional ns = Json.parse(value);
- ns.ifPresent(SchemaValidator::Validate);
+ ns.ifPresent(SchemaValidator::validate);
return ns;
}
}
diff --git a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/SchemaHash.java b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/SchemaHash.java
index 7bf2d31..9cc75d3 100644
--- a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/SchemaHash.java
+++ b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/SchemaHash.java
@@ -7,6 +7,9 @@ import com.azure.data.cosmos.serialization.hybridrow.HashCode128;
import com.azure.data.cosmos.serialization.hybridrow.SchemaId;
import com.azure.data.cosmos.serialization.hybridrow.internal.Murmur3Hash;
+import java.util.Optional;
+import java.util.stream.Stream;
+
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.lenientFormat;
@@ -20,41 +23,41 @@ public final class SchemaHash {
* @param seed The seed to initialized the hash function.
* @return The logical 128-bit hash as a two-tuple (low, high).
*/
- public static HashCode128 ComputeHash(Namespace namespace, Schema schema, HashCode128 seed) {
+ public static HashCode128 computeHash(Namespace namespace, Schema schema, HashCode128 seed) {
HashCode128 hash = seed;
hash = Murmur3Hash.Hash128(schema.schemaId().value(), hash);
hash = Murmur3Hash.Hash128(schema.type().value(), hash);
- hash = SchemaHash.ComputeHash(namespace, schema.options(), hash);
+ hash = SchemaHash.computeHash(namespace, schema.options(), hash);
if (schema.partitionKeys() != null) {
for (PartitionKey partitionKey : schema.partitionKeys()) {
- hash = SchemaHash.ComputeHash(namespace, partitionKey, hash);
+ hash = SchemaHash.computeHash(namespace, partitionKey, hash);
}
}
if (schema.primarySortKeys() != null) {
for (PrimarySortKey p : schema.primarySortKeys()) {
- hash = SchemaHash.ComputeHash(namespace, p, hash);
+ hash = SchemaHash.computeHash(namespace, p, hash);
}
}
if (schema.staticKeys() != null) {
for (StaticKey p : schema.staticKeys()) {
- hash = SchemaHash.ComputeHash(namespace, p, hash);
+ hash = SchemaHash.computeHash(namespace, p, hash);
}
}
if (schema.properties() != null) {
for (Property p : schema.properties()) {
- hash = SchemaHash.ComputeHash(namespace, p, hash);
+ hash = SchemaHash.computeHash(namespace, p, hash);
}
}
return hash;
}
- private static HashCode128 ComputeHash(Namespace namespace, SchemaOptions options, HashCode128 seed) {
+ private static HashCode128 computeHash(Namespace namespace, SchemaOptions options, HashCode128 seed) {
HashCode128 hash = seed;
@@ -65,21 +68,21 @@ public final class SchemaHash {
return hash;
}
- private static HashCode128 ComputeHash(Namespace ns, Property p, HashCode128 seed) {
+ private static HashCode128 computeHash(Namespace ns, Property p, HashCode128 seed) {
HashCode128 hash = seed;
hash = Murmur3Hash.Hash128(p.path(), hash);
- hash = SchemaHash.ComputeHash(ns, p.propertyType(), hash);
+ hash = SchemaHash.computeHash(ns, p.propertyType(), hash);
return hash;
}
- private static HashCode128 ComputeHash(Namespace namespace, PropertyType p, HashCode128 seed) {
+ private static HashCode128 computeHash(Namespace namespace, PropertyType p, HashCode128 seed) {
HashCode128 hash = seed;
- hash = Murmur3Hash.Hash128(p.type(), hash);
+ hash = Murmur3Hash.Hash128(p.type().value(), hash);
hash = Murmur3Hash.Hash128(p.nullable(), hash);
if (p.apiType() != null) {
@@ -87,9 +90,12 @@ public final class SchemaHash {
}
if (p instanceof PrimitivePropertyType) {
+
PrimitivePropertyType pp = (PrimitivePropertyType) p;
- hash = Murmur3Hash.Hash128(pp.storage(), hash);
+
+ hash = Murmur3Hash.Hash128(pp.storage().value(), hash);
hash = Murmur3Hash.Hash128(pp.length(), hash);
+
return hash;
}
@@ -100,7 +106,7 @@ public final class SchemaHash {
if (p instanceof ArrayPropertyType) {
ArrayPropertyType spp = (ArrayPropertyType) p;
if (spp.items() != null) {
- hash = SchemaHash.ComputeHash(namespace, spp.items(), hash);
+ hash = SchemaHash.computeHash(namespace, spp.items(), hash);
}
return hash;
}
@@ -109,7 +115,7 @@ public final class SchemaHash {
ObjectPropertyType spp = (ObjectPropertyType) p;
if (spp.properties() != null) {
for (Property opp : spp.properties()) {
- hash = SchemaHash.ComputeHash(namespace, opp, hash);
+ hash = SchemaHash.computeHash(namespace, opp, hash);
}
}
return hash;
@@ -120,11 +126,11 @@ public final class SchemaHash {
MapPropertyType spp = (MapPropertyType) p;
if (spp.keys() != null) {
- hash = SchemaHash.ComputeHash(namespace, spp.keys(), hash);
+ hash = SchemaHash.computeHash(namespace, spp.keys(), hash);
}
if (spp.values() != null) {
- hash = SchemaHash.ComputeHash(namespace, spp.values(), hash);
+ hash = SchemaHash.computeHash(namespace, spp.values(), hash);
}
return hash;
@@ -135,7 +141,7 @@ public final class SchemaHash {
SetPropertyType spp = (SetPropertyType) p;
if (spp.items() != null) {
- hash = SchemaHash.ComputeHash(namespace, spp.items(), hash);
+ hash = SchemaHash.computeHash(namespace, spp.items(), hash);
}
return hash;
@@ -147,7 +153,7 @@ public final class SchemaHash {
if (spp.items() != null) {
for (PropertyType pt : spp.items()) {
- hash = SchemaHash.ComputeHash(namespace, pt, hash);
+ hash = SchemaHash.computeHash(namespace, pt, hash);
}
}
@@ -160,7 +166,7 @@ public final class SchemaHash {
if (spp.items() != null) {
for (PropertyType pt : spp.items()) {
- hash = SchemaHash.ComputeHash(namespace, pt, hash);
+ hash = SchemaHash.computeHash(namespace, pt, hash);
}
}
@@ -169,58 +175,39 @@ public final class SchemaHash {
if (p instanceof UdtPropertyType) {
+ Stream schemaStream = namespace.schemas().stream();
UdtPropertyType spp = (UdtPropertyType) p;
- Schema udtSchema;
+ Optional udtSchema;
if (spp.schemaId() == SchemaId.INVALID) {
- udtSchema = namespace.schemas().Find(s = > s.name() == spp.name());
+ udtSchema = schemaStream.filter(schema -> schema.name().equals(spp.name())).findFirst();
} else {
- udtSchema = namespace.schemas().Find(s = > s.schemaId() == spp.schemaId());
- checkState(udtSchema.name().equals(spp.name()), "Ambiguous schema reference: '%s:%s'", spp.name(), spp.schemaId());
+ udtSchema = schemaStream.filter(schema -> schema.schemaId().equals(spp.schemaId())).findFirst();
+ udtSchema.ifPresent(schema -> checkState(schema.name().equals(spp.name()),
+ "Ambiguous schema reference: '%s:%s'", spp.name(), spp.schemaId()));
}
- checkState(udtSchema != null, "Cannot resolve schema reference '{0}:{1}'", spp.name(), spp.schemaId());
- return SchemaHash.ComputeHash(namespace, udtSchema, hash);
+ checkState(udtSchema.isPresent(), "Cannot resolve schema reference '{0}:{1}'", spp.name(), spp.schemaId());
+ return SchemaHash.computeHash(namespace, udtSchema.get(), hash);
}
throw new IllegalStateException(lenientFormat("unrecognized property type: %s", p.getClass()));
}
- // TODO: C# TO JAVA CONVERTER: Methods returning tuples are not converted by C# to Java Converter:
- // private static(ulong low, ulong high) ComputeHash(Namespace ns, PartitionKey key, (ulong low, ulong high) seed
- // = default)
- // {
- // (ulong low, ulong high) hash = seed;
- // if (key != null)
- // {
- // hash = Murmur3Hash.Hash128(key.Path, hash);
- // }
- //
- // return hash;
- // }
+ private static HashCode128 computeHash(Namespace namespace, PartitionKey key, HashCode128 seed) {
+ return key == null ? seed : Murmur3Hash.Hash128(key.path(), seed);
+ }
- // TODO: C# TO JAVA CONVERTER: Methods returning tuples are not converted by C# to Java Converter:
- // private static(ulong low, ulong high) ComputeHash(Namespace ns, PrimarySortKey key, (ulong low, ulong high) seed = default)
- // {
- // (ulong low, ulong high) hash = seed;
- // if (key != null)
- // {
- // hash = Murmur3Hash.Hash128(key.Path, hash);
- // hash = Murmur3Hash.Hash128(key.Direction, hash);
- // }
- //
- // return hash;
- // }
+ private static HashCode128 computeHash(Namespace namespace, PrimarySortKey key, HashCode128 seed) {
+ HashCode128 hash = seed;
+ if (key != null) {
+ hash = Murmur3Hash.Hash128(key.path(), hash);
+ hash = Murmur3Hash.Hash128(key.direction().value(), hash);
+ }
+ return hash;
+ }
- // TODO: C# TO JAVA CONVERTER: Methods returning tuples are not converted by C# to Java Converter:
- // private static(ulong low, ulong high) ComputeHash(Namespace ns, StaticKey key, (ulong low, ulong high) seed = default)
- // {
- // (ulong low, ulong high) hash = seed;
- // if (key != null)
- // {
- // hash = Murmur3Hash.Hash128(key.Path, hash);
- // }
- //
- // return hash;
- // }
+ private static HashCode128 computeHash(Namespace ns, StaticKey key, HashCode128 seed) {
+ return key == null ? seed : Murmur3Hash.Hash128(key.path(), seed);
+ }
}
\ No newline at end of file
diff --git a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/SchemaValidator.java b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/SchemaValidator.java
index 3ef1979..2bdd4d9 100644
--- a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/SchemaValidator.java
+++ b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/SchemaValidator.java
@@ -18,6 +18,259 @@ import static com.google.common.base.Strings.lenientFormat;
public final class SchemaValidator {
+ public static void validate(@NonNull final Namespace namespace) {
+
+ checkNotNull(namespace, "expected non-null namespace");
+
+ final int initialCapacity = namespace.schemas().size();
+
+ final Map nameDupCheck = new HashMap<>(initialCapacity);
+ final Map nameVersioningCheck = new HashMap<>(initialCapacity);
+ final Map idDupCheck = new HashMap<>(initialCapacity);
+
+ for (Schema schema : namespace.schemas()) {
+
+ SchemaIdentification identification = SchemaIdentification.of(schema.name(), schema.schemaId());
+
+ Assert.isValidIdentifier(identification.name(), "Schema name");
+ Assert.isValidSchemaId(identification.id(), "Schema id");
+ Assert.duplicateCheck(identification.id(), schema, idDupCheck, "Schema id", "Namespace");
+ Assert.duplicateCheck(identification, schema, nameDupCheck, "Schema reference", "Namespace");
+
+ // Count the versions of each schema by name.
+
+ final int count = nameVersioningCheck.get(schema.name());
+ nameVersioningCheck.put(schema.name(), count + 1);
+ }
+
+ // Enable id-less Schema references for all types with a unique version in the namespace
+
+ for (Schema schema : namespace.schemas()) {
+ if (nameVersioningCheck.get(schema.name()) == 1) {
+ Assert.duplicateCheck(SchemaIdentification.of(schema.name(), SchemaId.INVALID), schema, nameDupCheck,
+ "Schema reference", "Namespace");
+ }
+ }
+
+ SchemaValidator.visit(namespace, nameDupCheck, idDupCheck);
+ }
+
+ /**
+ * Visit an entire namespace and validate its constraints.
+ *
+ * @param namespace The {@link Namespace} to validate.
+ * @param schemas A map from schema names within the namespace to their schemas.
+ * @param ids A map from schema ids within the namespace to their schemas.
+ */
+ private static void visit(
+ Namespace namespace, Map schemas, Map ids) {
+ for (Schema schema : namespace.schemas()) {
+ SchemaValidator.visit(schema, schemas, ids);
+ }
+ }
+
+ /**
+ * Visit a single schema and validate its constraints.
+ *
+ * @param schema The {@link Schema} to validate.
+ * @param schemas A map from schema names within the namespace to their schemas.
+ * @param ids A map from schema ids within the namespace to their schemas.
+ */
+ private static void visit(Schema schema, Map schemas, Map ids) {
+
+ Assert.areEqual(
+ schema.type(), TypeKind.SCHEMA, lenientFormat("The type of a schema MUST be %s: %s", TypeKind.SCHEMA,
+ schema.type())
+ );
+
+ HashMap pathDupCheck = new HashMap<>(schema.properties().size());
+
+ for (Property p : schema.properties()) {
+ Assert.duplicateCheck(p.path(), p, pathDupCheck, "Property path", "Schema");
+ }
+
+ for (PartitionKey pk : schema.partitionKeys()) {
+ Assert.exists(pk.path(), pathDupCheck, "Partition key column", "Schema");
+ }
+
+ for (PrimarySortKey ps : schema.primarySortKeys()) {
+ Assert.exists(ps.path(), pathDupCheck, "Primary sort key column", "Schema");
+ }
+
+ for (StaticKey sk : schema.staticKeys()) {
+ Assert.exists(sk.path(), pathDupCheck, "Static key column", "Schema");
+ }
+
+ for (Property p : schema.properties()) {
+ SchemaValidator.visit(p, schema, schemas, ids);
+ }
+ }
+
+ private static void visit(
+ Property p, Schema s, Map schemas, Map ids) {
+
+ Assert.isValidIdentifier(p.path(), "Property path");
+ SchemaValidator.visit(p.propertyType(), null, schemas, ids);
+ }
+
+ private static void visit(
+ PropertyType p,
+ PropertyType parent,
+ Map schemas,
+ Map ids) {
+
+ if (p instanceof PrimitivePropertyType) {
+ PrimitivePropertyType pp = (PrimitivePropertyType) p;
+ Assert.isTrue(pp.length() >= 0, "Length MUST be positive");
+ if (parent != null) {
+ Assert.areEqual(pp.storage(), StorageKind.SPARSE, "Nested fields MUST have storage kind SPARSE");
+ }
+ return;
+ }
+ if (p instanceof ArrayPropertyType) {
+ ArrayPropertyType ap = (ArrayPropertyType) p;
+ if (ap.items() != null) {
+ SchemaValidator.visit(ap.items(), p, schemas, ids);
+ }
+ return;
+ }
+ if (p instanceof MapPropertyType) {
+ MapPropertyType mp = (MapPropertyType) p;
+ SchemaValidator.visit(mp.keys(), p, schemas, ids);
+ SchemaValidator.visit(mp.values(), p, schemas, ids);
+ return;
+ }
+ if (p instanceof SetPropertyType) {
+ SetPropertyType sp = (SetPropertyType) p;
+ SchemaValidator.visit(sp.items(), p, schemas, ids);
+ return;
+ }
+ if (p instanceof TaggedPropertyType) {
+ TaggedPropertyType gp = (TaggedPropertyType) p;
+ for (PropertyType item : gp.items()) {
+ SchemaValidator.visit(item, p, schemas, ids);
+ }
+ return;
+ }
+ if (p instanceof TuplePropertyType) {
+ TuplePropertyType tp = (TuplePropertyType) p;
+ for (PropertyType item : tp.items()) {
+ SchemaValidator.visit(item, p, schemas, ids);
+ }
+ return;
+ }
+ if (p instanceof ObjectPropertyType) {
+ ObjectPropertyType op = (ObjectPropertyType) p;
+ Map pathDupCheck = new HashMap<>(op.properties().size());
+ for (Property nested : op.properties()) {
+ Assert.duplicateCheck(nested.path(), nested, pathDupCheck, "Property path", "Object");
+ SchemaValidator.visit(nested.propertyType(), p, schemas, ids);
+ }
+ return;
+ }
+ if (p instanceof UdtPropertyType) {
+ UdtPropertyType up = (UdtPropertyType) p;
+ Assert.exists(SchemaIdentification.of(up.name(), up.schemaId()), schemas, "Schema reference", "Namespace");
+ if (up.schemaId() != SchemaId.INVALID) {
+ Schema s = Assert.exists(up.schemaId(), ids, "Schema id", "Namespace");
+ Assert.areEqual(up.name(), s.name(), lenientFormat("Schema name '%s' does not match the name of " +
+ "schema with id '%s': %s", up.name(), up.schemaId(), s.name()));
+ }
+ return;
+ }
+ throw new IllegalStateException(lenientFormat("Unknown property type: %s", p.getClass()));
+ }
+
+ private static class Assert {
+
+ /**
+ * Validate two values are equal.
+ *
+ * @param Type of the values to compare.
+ * @param left The left value to compare.
+ * @param right The right value to compare.
+ * @param message Diagnostic message if the comparison fails.
+ */
+ static void areEqual(T left, T right, String message) {
+ if (!left.equals(right)) {
+ throw new SchemaException(message);
+ }
+ }
+
+ /**
+ * Validate {@code key} does not already appear within the given scope.
+ *
+ * @param The type of the keys within the scope.
+ * @param The type of the values within the scope.
+ * @param key The key to check.
+ * @param value The value to add to the scope if there is no duplicate.
+ * @param scope The set of existing values within the scope.
+ * @param label Diagnostic label describing {@code key}.
+ * @param scopeLabel Diagnostic label describing {@code scope}.
+ */
+ static void duplicateCheck(
+ TKey key, TValue value, Map scope, String label, String scopeLabel) {
+ if (scope.containsKey(key)) {
+ throw new SchemaException(lenientFormat("%s must be unique within a %s: %s", label, scopeLabel, key));
+ }
+ scope.put(key, value);
+ }
+
+ /**
+ * Validate {@code key} does appear within the given scope.
+ *
+ * @param The type of the keys within the scope.
+ * @param The type of the values within the scope.
+ * @param key The key to check.
+ * @param scope The set of existing values within the scope.
+ * @param label Diagnostic label describing {@code key}.
+ * @param scopeLabel Diagnostic label describing {@code scope}.
+ */
+ static TValue exists(TKey key, Map scope, String label, String scopeLabel) {
+ TValue value = scope.get(key);
+ if (value == null) {
+ throw new SchemaException(lenientFormat("%s must exist within a %s: %s", label, scopeLabel, key));
+ }
+ return value;
+ }
+
+ /**
+ * Validate a predicate is true.
+ *
+ * @param predicate The predicate to check.
+ * @param message Diagnostic message if the comparison fails.
+ */
+ static void isTrue(boolean predicate, String message) {
+ if (!predicate) {
+ throw new SchemaException(message);
+ }
+ }
+
+ /**
+ * Validate {@code identifier} contains only characters valid in a schema identifier.
+ *
+ * @param identifier The identifier to check.
+ * @param label Diagnostic label describing {@code identifier}.
+ */
+ static void isValidIdentifier(String identifier, String label) {
+ if (Strings.isNullOrEmpty(identifier)) {
+ throw new SchemaException(lenientFormat("%s must be a valid identifier: %s", label, identifier));
+ }
+ }
+
+ /**
+ * Validate a {@link SchemaId}.
+ *
+ * @param id The id to check.
+ * @param label Diagnostic label describing {@code id}.
+ */
+ static void isValidSchemaId(SchemaId id, String label) {
+ if (id == SchemaId.INVALID) {
+ throw new SchemaException(lenientFormat("%s cannot be 0", label));
+ }
+ }
+ }
+
private static class SchemaIdentification implements Comparable {
private final SchemaId id;
@@ -62,244 +315,13 @@ public final class SchemaValidator {
return this.name;
}
+ public static SchemaIdentification of(@NonNull String name, @NonNull SchemaId id) {
+ return new SchemaIdentification(name, id);
+ }
+
@Override
public String toString() {
return Json.toString(this);
}
-
- public static SchemaIdentification of(@NonNull String name, @NonNull SchemaId id) {
- return new SchemaIdentification(name, id);
- }
- }
-
- public static void Validate(@NonNull final Namespace namespace) {
-
- checkNotNull(namespace, "expected non-null namespace");
-
- final int initialCapacity = namespace.schemas().size();
-
- final Map nameDupCheck = new HashMap<>(initialCapacity);
- final Map nameVersioningCheck = new HashMap<>(initialCapacity);
- final Map idDupCheck = new HashMap<>(initialCapacity);
-
- for (Schema schema : namespace.schemas()) {
-
- SchemaIdentification identification = SchemaIdentification.of(schema.name(), schema.schemaId());
-
- Assert.isValidIdentifier(identification.name(), "Schema name");
- Assert.isValidSchemaId(identification.id(), "Schema id");
- Assert.duplicateCheck(identification.id(), schema, idDupCheck, "Schema id", "Namespace");
- Assert.duplicateCheck(identification, schema, nameDupCheck, "Schema reference", "Namespace");
-
- // Count the versions of each schema by name.
- nameVersioningCheck.TryGetValue(schema.name(), out int count);
- nameVersioningCheck.put(schema.name(), count + 1);
- }
-
- // Enable id-less Schema references for all types with a unique version in the namespace
-
- for (Schema schema : namespace.schemas()) {
- if (nameVersioningCheck.get(schema.name()) == 1) {
- Assert.duplicateCheck(SchemaIdentification.of(schema.name(), SchemaId.INVALID), schema, nameDupCheck, "Schema reference", "Namespace");
- }
- }
-
- SchemaValidator.visit(namespace, nameDupCheck, idDupCheck);
- }
-
- /// Visit an entire namespace and validate its constraints.
- /// The to validate.
- /// A map from schema names within the namespace to their schemas.
- /// A map from schema ids within the namespace to their schemas.
- private static void visit(Namespace ns, Map schemas, Map ids) {
- for (Schema schema : ns.schemas()) {
- SchemaValidator.visit(schema, schemas, ids);
- }
- }
-
- /// Visit a single schema and validate its constraints.
- /// The to validate.
- /// A map from schema names within the namespace to their schemas.
- /// A map from schema ids within the namespace to their schemas.
- private static void visit(Schema schema, Map schemas, Map ids) {
-
- Assert.areEqual(
- schema.type(), TypeKind.SCHEMA, lenientFormat("The type of a schema MUST be %s: %s", TypeKind.SCHEMA, schema.type())
- );
-
- HashMap pathDupCheck = new HashMap<>(schema.properties().size());
-
- for (Property p : schema.properties()) {
- Assert.duplicateCheck(p.path(), p, pathDupCheck, "Property path", "Schema");
- }
-
- for (PartitionKey pk : schema.partitionKeys()) {
- Assert.exists(pk.path(), pathDupCheck, "Partition key column", "Schema");
- }
-
- for (PrimarySortKey ps : schema.primarySortKeys()) {
- Assert.exists(ps.path(), pathDupCheck, "Primary sort key column", "Schema");
- }
-
- for (StaticKey sk : schema.staticKeys()) {
- Assert.exists(sk.path(), pathDupCheck, "Static key column", "Schema");
- }
-
- for (Property p : schema.properties()) {
- SchemaValidator.visit(p, schema, schemas, ids);
- }
- }
-
- private static void visit(
- Property p, Schema s, Map schemas, Map ids) {
-
- Assert.isValidIdentifier(p.path(), "Property path");
- SchemaValidator.visit(p.propertyType(), null, schemas, ids);
- }
-
- private static void visit(
- PropertyType p,
- PropertyType parent,
- Map schemas,
- Map ids)
- {
- switch (p)
- {
- case PrimitivePropertyType pp:
- Assert.isTrue(pp.Length >= 0, "Length MUST be positive");
- if (parent != null)
- {
- Assert.areEqual(pp.Storage, StorageKind.Sparse, $"Nested fields MUST have storage {StorageKind.Sparse}");
- }
-
- break;
- case ArrayPropertyType ap:
- if (ap.Items != null)
- {
- SchemaValidator.visit(ap.Items, p, schemas, ids);
- }
-
- break;
- case MapPropertyType mp:
- SchemaValidator.visit(mp.Keys, p, schemas, ids);
- SchemaValidator.visit(mp.Values, p, schemas, ids);
- break;
- case SetPropertyType sp:
- SchemaValidator.visit(sp.Items, p, schemas, ids);
- break;
- case TaggedPropertyType gp:
- for (PropertyType item : gp.Items)
- {
- SchemaValidator.visit(item, p, schemas, ids);
- }
-
- break;
- case TuplePropertyType tp:
- for (PropertyType item : tp.Items)
- {
- SchemaValidator.visit(item, p, schemas, ids);
- }
-
- break;
- case ObjectPropertyType op:
- Map pathDupCheck = new HashMap<>(op.Properties.Count);
- for (Property nested : op.Properties)
- {
- Assert.duplicateCheck(nested.path(), nested, pathDupCheck, "Property path", "Object");
- SchemaValidator.visit(nested.propertyType(), p, schemas, ids);
- }
-
- break;
- case UdtPropertyType up:
- Assert.exists((up.Name, up.SchemaId), schemas, "Schema reference", "Namespace");
- if (up.SchemaId != SchemaId.Invalid)
- {
- Schema s = Assert.exists(up.SchemaId, ids, "Schema id", "Namespace");
- Assert.areEqual(
- up.Name,
- s.Name,
- $"Schema name '{up.Name}' does not match the name of schema with id '{up.SchemaId}': {s.Name}");
- }
-
- break;
- default:
- Contract.Fail("Unknown property type");
- break;
- }
- }
-
- private static class Assert {
-
- /// Validate does not already appear within the given scope.
- /// The type of the keys within the scope.
- /// The type of the values within the scope.
- /// The key to check.
- /// The value to add to the scope if there is no duplicate.
- /// The set of existing values within the scope.
- /// Diagnostic label describing .
- /// Diagnostic label describing .
- static void duplicateCheck(
- TKey key, TValue value, Map scope, String label, String scopeLabel) {
- if (scope.containsKey(key)) {
- throw new SchemaException(lenientFormat("%s must be unique within a %s: %s", label, scopeLabel, key));
- }
- scope.put(key, value);
- }
-
- /// Validate does appear within the given scope.
- /// The type of the keys within the scope.
- /// The type of the values within the scope.
- /// The key to check.
- /// The set of existing values within the scope.
- /// Diagnostic label describing .
- /// Diagnostic label describing .
- static TValue exists(TKey key, Map scope, String label, String scopeLabel) {
- TValue value = scope.get(key);
- if (value == null) {
- throw new SchemaException(lenientFormat("%s must exist within a %s: %s", label, scopeLabel, key));
- }
- return value;
- }
-
- /// Validate two values are equal.
- /// Type of the values to compare.
- /// The left value to compare.
- /// The right value to compare.
- /// Diagnostic message if the comparison fails.
- static void areEqual(T left, T right, String message) {
- if (!left.equals(right)) {
- throw new SchemaException(message);
- }
- }
-
- /// Validate a predicate is true.
- /// The predicate to check.
- /// Diagnostic message if the comparison fails.
- static void isTrue(boolean predicate, String message) {
- if (!predicate) {
- throw new SchemaException(message);
- }
- }
-
- ///
- /// Validate contains only characters valid in a schema
- /// identifier.
- ///
- /// The identifier to check.
- /// Diagnostic label describing .
- static void isValidIdentifier(String identifier, String label) {
- if (Strings.isNullOrEmpty(identifier)) {
- throw new SchemaException(lenientFormat("%s must be a valid identifier: %s", label, identifier));
- }
- }
-
- /// Validate is a valid .
- /// The id to check.
- /// Diagnostic label describing .
- static void isValidSchemaId(SchemaId id, String label) {
- if (id == SchemaId.INVALID) {
- throw new SchemaException(lenientFormat("%s cannot be 0", label));
- }
- }
}
}
\ No newline at end of file
diff --git a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/StorageKind.java b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/StorageKind.java
index 0658b7d..627df17 100644
--- a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/StorageKind.java
+++ b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/StorageKind.java
@@ -3,6 +3,13 @@
package com.azure.data.cosmos.serialization.hybridrow.schemas;
+import com.google.common.base.Suppliers;
+import it.unimi.dsi.fastutil.ints.Int2ObjectArrayMap;
+import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
+
+import java.util.Arrays;
+import java.util.function.Supplier;
+
/**
* Describes the storage placement for primitive properties.
*/
@@ -41,31 +48,26 @@ public enum StorageKind {
*/
VARIABLE(2);
- public static final int SIZE = java.lang.Integer.SIZE;
- private static java.util.HashMap mappings;
+ public static final int BYTES = Integer.BYTES;
+
+ private static final Supplier> mappings = Suppliers.memoize(() -> {
+ StorageKind[] storageKinds = StorageKind.class.getEnumConstants();
+ int[] values = new int[storageKinds.length];
+ Arrays.setAll(values, index -> storageKinds[index].value);
+ return new Int2ObjectArrayMap(values, storageKinds);
+ });
+
private int value;
StorageKind(int value) {
this.value = value;
- mappings().put(value, this);
}
public int value() {
return this.value;
}
- public static StorageKind forValue(int value) {
- return mappings().get(value);
+ public static StorageKind from(int value) {
+ return mappings.get().get(value);
}
-
- private static java.util.HashMap mappings() {
- if (mappings == null) {
- synchronized (StorageKind.class) {
- if (mappings == null) {
- mappings = new java.util.HashMap<>();
- }
- }
- }
- return mappings;
- }
-}
\ No newline at end of file
+}
diff --git a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/TypeKind.java b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/TypeKind.java
index 5e5b0cf..6cdcf1e 100644
--- a/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/TypeKind.java
+++ b/java/src/main/java/com/azure/data/cosmos/serialization/hybridrow/schemas/TypeKind.java
@@ -5,9 +5,13 @@ package com.azure.data.cosmos.serialization.hybridrow.schemas;
// TODO: DANOBLE: Fixup JSON-serialized naming for agreement with the dotnet code
+import com.google.common.base.Suppliers;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
+import java.util.Arrays;
+import java.util.function.Supplier;
+
/**
* Describes the logical type of a property.
*/
@@ -185,12 +189,18 @@ public enum TypeKind {
ANY(30);
public static final int BYTES = Integer.BYTES;
- private static Int2ObjectMap mappings;
+
+ private static Supplier> mappings = Suppliers.memoize(() -> {
+ TypeKind[] typeKinds = TypeKind.class.getEnumConstants();
+ int[] values = new int[typeKinds.length];
+ Arrays.setAll(values, index -> typeKinds[index].value);
+ return new Int2ObjectOpenHashMap<>(values, typeKinds);
+ });
+
private int value;
TypeKind(int value) {
this.value = value;
- mappings().put(value, this);
}
public int value() {
@@ -198,17 +208,6 @@ public enum TypeKind {
}
public static TypeKind from(int value) {
- return mappings().get(value);
- }
-
- private static Int2ObjectMap mappings() {
- if (mappings == null) {
- synchronized (TypeKind.class) {
- if (mappings == null) {
- mappings = new Int2ObjectOpenHashMap<>();
- }
- }
- }
- return mappings;
+ return mappings.get().get(value);
}
}