summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--libmysqld/CMakeLists.txt1
-rw-r--r--mysql-test/main/func_json.result1901
-rw-r--r--mysql-test/main/func_json.test1603
-rw-r--r--mysql-test/main/json_debug_nonembedded.result7
-rw-r--r--mysql-test/main/json_debug_nonembedded.test9
-rw-r--r--sql/CMakeLists.txt2
-rw-r--r--sql/item_create.cc22
-rw-r--r--sql/item_jsonfunc.cc117
-rw-r--r--sql/item_jsonfunc.h31
-rw-r--r--sql/json_schema.cc2819
-rw-r--r--sql/json_schema.h827
-rw-r--r--sql/json_schema_helper.cc102
-rw-r--r--sql/json_schema_helper.h30
-rw-r--r--sql/json_table.cc16
-rw-r--r--sql/mysqld.cc3
-rw-r--r--sql/share/errmsg-utf8.txt4
-rw-r--r--sql/sql_parse.h16
17 files changed, 7477 insertions, 33 deletions
diff --git a/libmysqld/CMakeLists.txt b/libmysqld/CMakeLists.txt
index 5d5cc35e1be..c49ff603a47 100644
--- a/libmysqld/CMakeLists.txt
+++ b/libmysqld/CMakeLists.txt
@@ -65,6 +65,7 @@ SET(SQL_EMBEDDED_SOURCES emb_qcache.cc libmysqld.c lib_sql.cc
../sql/item_geofunc.cc ../sql/item_row.cc ../sql/item_strfunc.cc
../sql/item_subselect.cc ../sql/item_sum.cc ../sql/item_timefunc.cc
../sql/item_xmlfunc.cc ../sql/item_jsonfunc.cc
+ ../sql/json_schema.cc ../sql/json_schema_helper.cc
../sql/key.cc ../sql/lock.cc ../sql/log.cc
../sql/log_event.cc ../sql/log_event_server.cc
../sql/mf_iocache.cc ../sql/my_decimal.cc
diff --git a/mysql-test/main/func_json.result b/mysql-test/main/func_json.result
index 213cc798a6d..8b2b30cea8f 100644
--- a/mysql-test/main/func_json.result
+++ b/mysql-test/main/func_json.result
@@ -2562,3 +2562,1904 @@ JSON_EXTRACT('{ "my-key": 1 }', '$.my-key')
#
# End of 10.9 Test
#
+# Beginning of 11.1 test
+#
+# MDEV-27128: Implement JSON Schema Validation FUNCTION
+#
+# Checking annotations
+SET @schema_number= '{
+ "title" : "This is title 1",
+ "description":"this is description 1",
+ "$comment":"This is comment 1",
+ "type":"number",
+ "deprecated":true,
+ "readOnly":true,
+ "writeOnly":false,
+ "example":[2],
+ "default":4,
+ "$schema": "https://json-schema.org/draft/2019-09/json-schema-validation.html#rfc.section.9.5"
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_number, '"string1"');
+JSON_SCHEMA_VALID(@schema_number, '"string1"')
+0
+SELECT JSON_SCHEMA_VALID(@schema_number, '2');
+JSON_SCHEMA_VALID(@schema_number, '2')
+1
+# Checking empty schema with empty json document
+SET @schema= '{}';
+SELECT JSON_SCHEMA_VALID(@schema, '');
+JSON_SCHEMA_VALID(@schema, '')
+1
+SELECT JSON_SCHEMA_VALID(@schema, '{}');
+JSON_SCHEMA_VALID(@schema, '{}')
+1
+SELECT JSON_SCHEMA_VALID(@schema, '[]');
+JSON_SCHEMA_VALID(@schema, '[]')
+1
+SELECT JSON_SCHEMA_VALID(@schema, 'null');
+JSON_SCHEMA_VALID(@schema, 'null')
+1
+SELECT JSON_SCHEMA_VALID(@schema, 'true');
+JSON_SCHEMA_VALID(@schema, 'true')
+1
+SELECT JSON_SCHEMA_VALID(@schema, 'false');
+JSON_SCHEMA_VALID(@schema, 'false')
+1
+# Checking scalar against json schema
+# Checking boolean and null
+SET @schema_true= '{ "type": "boolean"}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+JSON_SCHEMA_VALID(@schema_true, 'true')
+1
+SELECT JSON_SCHEMA_VALID(@schema_true, 'false');
+JSON_SCHEMA_VALID(@schema_true, 'false')
+1
+SELECT JSON_SCHEMA_VALID(@schema_true, 'null');
+JSON_SCHEMA_VALID(@schema_true, 'null')
+0
+SET @schema_true= '{ "type": "boolean",
+ "const":"false"}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+JSON_SCHEMA_VALID(@schema_true, 'true')
+0
+SET @schema_true= '{ "type": "boolean",
+ "enum":[true, null, false]}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+JSON_SCHEMA_VALID(@schema_true, 'true')
+1
+SET @schema_true= '{ "type": "boolean",
+ "enum": [null, false]}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+JSON_SCHEMA_VALID(@schema_true, 'true')
+0
+SET @schema_true= '{ "type": "boolean",
+ "enum": [null, true]}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+JSON_SCHEMA_VALID(@schema_true, 'true')
+1
+SET @schema_true= '{ "type": "boolean",
+ "const":"false"}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+JSON_SCHEMA_VALID(@schema_true, 'true')
+0
+# Type can be more than one
+SET @schema= '
+ {
+ "type":["string", "number","array"]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema, '{"key1":"val1"}');
+JSON_SCHEMA_VALID(@schema, '{"key1":"val1"}')
+0
+SELECT JSON_SCHEMA_VALID(@schema, '"abc"');
+JSON_SCHEMA_VALID(@schema, '"abc"')
+1
+SELECT JSON_SCHEMA_VALID(@schema, '3.14');
+JSON_SCHEMA_VALID(@schema, '3.14')
+1
+# Checking number
+SET @schema_number= '{
+ "maximum":7,
+ "minimum": 3,
+ "multipleOf":3
+}';
+SELECT JSON_SCHEMA_VALID(@schema_number, '2');
+JSON_SCHEMA_VALID(@schema_number, '2')
+0
+SET @schema_number= '{
+ "type": "number",
+ "maximum":13,
+ "minimum": 4,
+ "multipleOf":3,
+ "exclusiveMaximum": 9,
+ "exclusiveMinimum":4
+}';
+SELECT JSON_SCHEMA_VALID(@schema_number, '2');
+JSON_SCHEMA_VALID(@schema_number, '2')
+0
+SELECT JSON_SCHEMA_VALID(@schema_number, '6');
+JSON_SCHEMA_VALID(@schema_number, '6')
+1
+SELECT JSON_SCHEMA_VALID(@schema_number, '9');
+JSON_SCHEMA_VALID(@schema_number, '9')
+0
+SELECT JSON_SCHEMA_VALID(@schema_number, '5');
+JSON_SCHEMA_VALID(@schema_number, '5')
+0
+SET @schema_number= '{
+ "type": "number",
+ "maximum":100,
+ "minimum": 0,
+ "enum": [1, 2, "3", [4, 5, 6], {"key1":"val1"}]
+}';
+SELECT JSON_SCHEMA_VALID(@schema_number, 1);
+JSON_SCHEMA_VALID(@schema_number, 1)
+1
+SELECT JSON_SCHEMA_VALID(@schema_number, 3);
+JSON_SCHEMA_VALID(@schema_number, 3)
+0
+SET @schema_number= '{
+ "type":"number",
+ "maximum":10,
+ "const":2
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_number, '3');
+JSON_SCHEMA_VALID(@schema_number, '3')
+0
+SELECT JSON_SCHEMA_VALID(@schema_number, '2');
+JSON_SCHEMA_VALID(@schema_number, '2')
+1
+# Checking string
+# checking format keyword. (not validating for now)
+SET @schema_string= '{
+ "type": "string",
+ "format":"date-time"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_date-time"');
+JSON_SCHEMA_VALID(@schema_string, '"not_date-time"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"date"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_date"');
+JSON_SCHEMA_VALID(@schema_string, '"not_date"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"time"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_time"');
+JSON_SCHEMA_VALID(@schema_string, '"not_time"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"duration"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_duration"');
+JSON_SCHEMA_VALID(@schema_string, '"not_duration"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"email"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_email"');
+JSON_SCHEMA_VALID(@schema_string, '"not_email"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"idn-email"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_idn-email"');
+JSON_SCHEMA_VALID(@schema_string, '"not_idn-email"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"hostname"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_hostname"');
+JSON_SCHEMA_VALID(@schema_string, '"not_hostname"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"idn-hostname"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_idn-hostname"');
+JSON_SCHEMA_VALID(@schema_string, '"not_idn-hostname"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"ipv4"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_ipv4"');
+JSON_SCHEMA_VALID(@schema_string, '"not_ipv4"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"ipv6"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_ipv6"');
+JSON_SCHEMA_VALID(@schema_string, '"not_ipv6"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"uri"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_uri"');
+JSON_SCHEMA_VALID(@schema_string, '"not_uri"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"uri-reference"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_uri-reference"');
+JSON_SCHEMA_VALID(@schema_string, '"not_uri-reference"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"iri"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_iri"');
+JSON_SCHEMA_VALID(@schema_string, '"not_iri"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"iri-reference"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_iri-reference"');
+JSON_SCHEMA_VALID(@schema_string, '"not_iri-reference"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"uuid"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_uuid"');
+JSON_SCHEMA_VALID(@schema_string, '"not_uuid"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"json-pointer"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_json-pointer"');
+JSON_SCHEMA_VALID(@schema_string, '"not_json-pointer"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"relative-json-pointer"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_relative-json-pointer"');
+JSON_SCHEMA_VALID(@schema_string, '"not_relative-json-pointer"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "format":"regex"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_regex"');
+JSON_SCHEMA_VALID(@schema_string, '"not_regex"')
+1
+# Validating other string keywords
+SET @schema_string= '{
+ "type": "string",
+ "maxLength":7,
+ "minLength": 4
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"foobar"');
+JSON_SCHEMA_VALID(@schema_string, '"foobar"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "maxLength": 10,
+ "minLength": 8
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"foobar"');
+JSON_SCHEMA_VALID(@schema_string, '"foobar"')
+0
+SELECT JSON_SCHEMA_VALID(@schema_string, '"foobar123"');
+JSON_SCHEMA_VALID(@schema_string, '"foobar123"')
+1
+SET @schema_string= '{
+ "type": "string",
+ "maxLength": 10,
+ "minLength": 3,
+ "const": "foobar"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"foobar123"');
+JSON_SCHEMA_VALID(@schema_string, '"foobar123"')
+0
+SET @schema_string= '{
+ "type": "string",
+ "enum": ["red", "green", "blue"]
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"green"');
+JSON_SCHEMA_VALID(@schema_string, '"green"')
+1
+SELECT JSON_SCHEMA_VALID(@schema_string, '"orange"');
+JSON_SCHEMA_VALID(@schema_string, '"orange"')
+0
+SET @string_schema= '{
+ "type":"string",
+ "pattern":"ab+c"
+ }';
+SELECT JSON_SCHEMA_VALID(@string_schema, '"abc"');
+JSON_SCHEMA_VALID(@string_schema, '"abc"')
+1
+# Validating non-scalar
+# Validating array
+SET @schema_array= '{"type":"array"}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]')
+1
+SET @schema_array= '{"type":"array",
+ "maxItems": 4,
+ "minItems": 2}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]')
+1
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, false, null, "foobar"]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, false, null, "foobar"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1]');
+JSON_SCHEMA_VALID(@schema_array, '[1]')
+0
+SET @schema_array= '{"maxItems": 4,
+ "minItems": 2}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]')
+1
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, false, null, "foobar"]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, false, null, "foobar"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2]')
+1
+SET @schema_array= '{
+ "type":"array",
+ "items": {"type":"number"},
+ "maxItems": 4,
+ "minItems": 2}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3]')
+1
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, "foobar"]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, "foobar"]')
+0
+SET @schema_array= '{"type":"array",
+ "maxItems": 4,
+ "minItems": 2,
+ "const": [1, 2, 3, 4]}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, "foobar"]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, "foobar"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4]')
+1
+SET @schema_array= '{"type":"array",
+ "enum":[[1,2,3], [4,5,6], [7,8,9]]}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[4,5,6]');
+JSON_SCHEMA_VALID(@schema_array, '[4,5,6]')
+1
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1,5,7]');
+JSON_SCHEMA_VALID(@schema_array, '[1,5,7]')
+0
+SET @schema_array= '{
+ "type": "array",
+ "uniqueItems":true
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 2.0]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 2.0]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4.0]');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4.0]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "contains": {
+ "type": "number"
+ },
+ "minContains": 2,
+ "maxContains": 3
+}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["string1", "string2", "string3", 1, 2, 3, 4]');
+JSON_SCHEMA_VALID(@schema_array, '["string1", "string2", "string3", 1, 2, 3, 4]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '["string1", "string2", "string3", 1, 2, 3]');
+JSON_SCHEMA_VALID(@schema_array, '["string1", "string2", "string3", 1, 2, 3]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]')
+1
+SELECT JSON_SCHEMA_VALID(@schema_array, '[2, "string1",2]');
+JSON_SCHEMA_VALID(@schema_array, '[2, "string1",2]')
+0
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items":true
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items":false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]')
+0
+# Checking objects
+SET @schema_object= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "string1": {
+ "type":"string",
+ "maxLength":10,
+ "minLength": 4
+ },
+ "array1": {"type":"array",
+ "maxItems": 4,
+ "minItems": 2}
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_object, '{"number1":10, "string1":"foobar","array1":[1,2,3]}');
+JSON_SCHEMA_VALID(@schema_object, '{"number1":10, "string1":"foobar","array1":[1,2,3]}')
+1
+SELECT JSON_SCHEMA_VALID(@schema_object, '{"number1":10, "string1":"foobar","array1":[1,2,3,4,5]}');
+JSON_SCHEMA_VALID(@schema_object, '{"number1":10, "string1":"foobar","array1":[1,2,3,4,5]}')
+0
+SET @schema_obj= '{
+ "type": "object",
+ "properties": {
+ "number1":{"type":"number"},
+ "string1":{"type":"string"},
+ "array1":{"type":"array"}
+ },
+ "dependentRequired": {
+ "number1":["string1"]
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"array1":[1,2,3], "number1":2, "string1":"abc"}');
+JSON_SCHEMA_VALID(@schema_obj, '{"array1":[1,2,3], "number1":2, "string1":"abc"}')
+1
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"array1":[1,2,3], "number1":2}');
+JSON_SCHEMA_VALID(@schema_obj, '{"array1":[1,2,3], "number1":2}')
+0
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "key1" : {
+ "type":"object",
+ "properties": {
+ "key2" :{
+ "type":"string"
+ }
+ }
+ }
+ },
+ "enum": [{"number1":3, "key1":{"key2":"string1"}}, {"number1":5, "key1":{"key2":"string3"}}, {"number1":7, "key1":{"key2":"string5"}}]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":1}}');
+JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":1}}')
+0
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":"string1"}}');
+JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":"string1"}}')
+0
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":"string7"}}');
+JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":"string7"}}')
+0
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "obj1" : {
+ "type":"object",
+ "properties": {
+ "obj1_1":{
+ "type":"string"
+ },
+ "obj1_2": {
+ "type":"array"
+ }
+ }
+ },
+ "obj2" : {
+ "type":"object",
+ "properties" : {
+ "obj2_1":{
+ "type":"number"
+ }
+ }
+ }
+ },
+ "required":["number1", "obj2"]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}}');
+JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}}')
+0
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}');
+JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}')
+1
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "obj1" : {
+ "type":"object",
+ "properties": {
+ "obj1_1":{
+ "type":"string"
+ },
+ "obj1_2": {
+ "type":"array"
+ }
+ }
+ },
+ "obj2" : {
+ "type":"object",
+ "properties" : {
+ "obj2_1":{
+ "type":"number"
+ }
+ }
+ }
+ },
+ "required":["number1", "obj2"],
+ "const": {"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}');
+JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}')
+1
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}}');
+JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}}')
+0
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "obj1" : {
+ "type":"object",
+ "properties": {
+ "obj1_1":{
+ "type":"string"
+ },
+ "obj1_2": {
+ "type":"array"
+ }
+ }
+ },
+ "obj2" : {
+ "type":"object",
+ "properties" : {
+ "obj2_1":{
+ "type":"number"
+ }
+ }
+ }
+ },
+ "maxProperties": 5,
+ "minProperties":2
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}');
+JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}')
+1
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":1, "number2":2, "number3":3, "number4":4, "number5":5, "number6":6}');
+JSON_SCHEMA_VALID(@schema_obj,'{"number1":1, "number2":2, "number3":3, "number4":4, "number5":5, "number6":6}')
+0
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "obj1" : {
+ "type":"object",
+ "properties": {
+ "obj1_1":{
+ "type":"string"
+ },
+ "obj1_2": {
+ "type":"array"
+ }
+ }
+ }
+ },
+ "maxProperties": 3,
+ "minProperties":1,
+ "additionalProperties":false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":"string2"}');
+JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":"string2"}')
+0
+# Demonstrating that irrelavent keywords for a type and irrelavent type
+# are ignored, and none of the keywords are mandatory, including "type".
+SET @schema_properties= '{
+ "properties" : {
+ "number1":{ "maximum":10 },
+ "string1" : { "maxLength": 3}
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":25, "string1":"ab" }');
+JSON_SCHEMA_VALID(@schema_properties, '{ "number1":25, "string1":"ab" }')
+0
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":10, "string1":"ab" }');
+JSON_SCHEMA_VALID(@schema_properties, '{ "number1":10, "string1":"ab" }')
+1
+SET @schema_properties= '{
+ "properties" : {
+ "number1":{ "maximum":10 },
+ "string1" : { "maxLength": 3},
+ "obj1" : {
+ "properties":{
+ "number2": {"minimum":8},
+ "array2": {"uniqueItems":true}
+ }
+ }
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":2, "array2":[1,2,3]} }');
+JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":2, "array2":[1,2,3]} }')
+0
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":10, "array2":[1,2,3]} }');
+JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":10, "array2":[1,2,3]} }')
+1
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"array2":[1,2,3]} }');
+JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"array2":[1,2,3]} }')
+1
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":10, "array2":[1,2,3,2]} }');
+JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":10, "array2":[1,2,3,2]} }')
+0
+SET @schema_num= '{
+ "maximum":10,
+ "minimum":2
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_num, '5');
+JSON_SCHEMA_VALID(@schema_num, '5')
+1
+SELECT JSON_SCHEMA_VALID(@schema_num, '"abc"');
+JSON_SCHEMA_VALID(@schema_num, '"abc"')
+1
+SET @schema_str= '{
+ "maxLength":5,
+ "minLength":2,
+ "pattern":"a+bc"
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_str, '"abc"');
+JSON_SCHEMA_VALID(@schema_str, '"abc"')
+1
+SELECT JSON_SCHEMA_VALID(@schema_str, '"abcadef"');
+JSON_SCHEMA_VALID(@schema_str, '"abcadef"')
+0
+SELECT JSON_SCHEMA_VALID(@schema_str, '"bc"');
+JSON_SCHEMA_VALID(@schema_str, '"bc"')
+0
+SET @schema_arr= '{
+ "uniqueItems":true,
+ "items":{"type":"string"},
+ "maximum":10
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", "cde"]');
+JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", "cde"]')
+1
+SELECT JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", "abc"]');
+JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", "abc"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", 1]');
+JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", 1]')
+0
+SET @schema_const1= '{"const":2}';
+SELECT JSON_SCHEMA_VALID(@schema_const1, '2');
+JSON_SCHEMA_VALID(@schema_const1, '2')
+1
+SELECT JSON_SCHEMA_VALID(@schema_const1, '"abc"');
+JSON_SCHEMA_VALID(@schema_const1, '"abc"')
+0
+SET @schema_const2= '{"const":true}';
+SELECT JSON_SCHEMA_VALID(@schema_const2,'true');
+JSON_SCHEMA_VALID(@schema_const2,'true')
+1
+SELECT JSON_SCHEMA_VALID(@schema_const2,'false');
+JSON_SCHEMA_VALID(@schema_const2,'false')
+0
+SET @schema_enum='{"enum":[1,2,3,"abc", [4,5,6]]}';
+SELECT JSON_SCHEMA_VALID(@schema_enum,'[4,5,6]');
+JSON_SCHEMA_VALID(@schema_enum,'[4,5,6]')
+1
+SELECT JSON_SCHEMA_VALID(@schema_enum,'4');
+JSON_SCHEMA_VALID(@schema_enum,'4')
+0
+SELECT JSON_SCHEMA_VALID(@schema_enum,'"abc"');
+JSON_SCHEMA_VALID(@schema_enum,'"abc"')
+1
+SET @schema_required='{"required":["num1","str1", "arr1"]}';
+SELECT JSON_SCHEMA_VALID(@schema_required,'{"num1":1, "str1":"abc", "arr1":[1,2,3]}');
+JSON_SCHEMA_VALID(@schema_required,'{"num1":1, "str1":"abc", "arr1":[1,2,3]}')
+1
+SELECT JSON_SCHEMA_VALID(@schema_required,'{"num1":1, "arr1":[1,2,3]}');
+JSON_SCHEMA_VALID(@schema_required,'{"num1":1, "arr1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@schema_required,'"abcd"');
+JSON_SCHEMA_VALID(@schema_required,'"abcd"')
+1
+SET @schema_dep_required='{
+ "dependentRequired": {
+ "num1":["num2","num3"],
+ "str1":["str2","str3"]
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3]}');
+JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3]}')
+1
+SELECT JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3], "str1":"abcd"}');
+JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3], "str1":"abcd"}')
+0
+SELECT JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3], "arr1":[1,2,3]}');
+JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3], "arr1":[1,2,3]}')
+1
+# Checking syntax error
+SET @invalid_schema= '{"type":"object"
+ "properties":{
+ "number1": {"type":"number"},
+ "obj2": {"type":"object",
+ "properties": {
+ "key1": {"type":"number"}
+ }
+ }
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@invalid_schema, '{"number1":3, "obj2":{"key1":3}}');
+JSON_SCHEMA_VALID(@invalid_schema, '{"number1":3, "obj2":{"key1":3}}')
+1
+Warnings:
+Warning 4038 Syntax error in JSON text in argument 2 to function 'json_schema_valid' at position 45
+SET @invalid_json= '{"type":"array",
+ "maxItems": 4,
+ "minItems": 2,
+ "const": [1, 2, 3, 4]}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4');
+JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4')
+0
+SET @schema_string= '{
+ "type": "string",
+ "maxLength":-2
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"abcxyz"');
+ERROR HY000: Invalid value for keyword maxLength
+SET @schema_number= '{
+ "type": "number",
+ "multipleOf":-3
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_number, '3');
+ERROR HY000: Invalid value for keyword multipleOf
+SET @schema_object= '{
+ "type": "object",
+ "properties":{"num1":{"type":"number"}},
+ "required":{}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_object, '{"num1":2}');
+ERROR HY000: Invalid value for keyword required
+SET @schema_string= '{
+ "type": "string",
+ "maxLength":-10
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"str1"');
+ERROR HY000: Invalid value for keyword maxLength
+SET @schema_number= '{"type":"numberz"}';
+SELECT JSON_SCHEMA_VALID(@schema_number, '"string1"');
+ERROR HY000: Invalid value for keyword type
+# Using JSON_SCHEMA_VALID() as a constraint validation to insert into table
+CREATE TABLE str_table (val_str JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"string",
+ "maxLength":5,
+ "minLength":2,
+ "enum":["ab", "cd", "abc", "def", "abcdef"]
+ }', val_str)));
+CREATE TABLE num_table(val_num JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"number",
+ "minimum":10,
+ "maximum":30,
+ "exclusiveMinimum":11,
+ "exclusiveMaximum":29,
+ "multipleOf":5,
+ "const":15
+ }', val_num)));
+CREATE TABLE true_table(val_true JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"boolean",
+ "enum":[true, false, null]
+ }', val_true)));
+CREATE TABLE false_table (val_false JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"boolean"
+ }', val_false)));
+CREATE TABLE null_table (val_null JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"null"
+ }', val_null)));
+CREATE TABLE arr_table (val_arr JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"array",
+ "uniqueItems":true,
+ "maxItems":5,
+ "minItems":1,
+ "items":true,
+ "prefixItems":[{"type":"number"}]
+ }', val_arr)));
+CREATE TABLE obj_table(val_obj JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"object",
+ "properties": {
+ "number1":{
+ "type":"number",
+ "maximum":5,
+ "const":4
+ },
+ "string1":{
+ "type":"string",
+ "maxLength":5,
+ "minLength":3
+ },
+ "object1":{
+ "type":"object",
+ "properties":{
+ "key1": {"type":"string"},
+ "key2":{"type":"array"},
+ "key3":{"type":"number", "minimum":3}
+ },
+ "dependentRequired": { "key1":["key3"] }
+ }
+ },
+ "required":["number1","object1"]
+ }', val_obj)));
+INSERT INTO str_table VALUES ('"ab"'), ('"cd"'), ('"abc"'), ('"def"');
+INSERT INTO str_table VALUES ('"feb"');
+ERROR 23000: CONSTRAINT `str_table.val_str` failed for `test`.`str_table`
+INSERT INTO str_table VALUES('"abcdef"');
+ERROR 23000: CONSTRAINT `str_table.val_str` failed for `test`.`str_table`
+INSERT INTO str_table VALUES('"fedcba"');
+ERROR 23000: CONSTRAINT `str_table.val_str` failed for `test`.`str_table`
+SELECT * FROM str_table;
+val_str
+"ab"
+"cd"
+"abc"
+"def"
+INSERT INTO num_table values('15');
+INSERT INTO num_table values('25');
+ERROR 23000: CONSTRAINT `num_table.val_num` failed for `test`.`num_table`
+SELECT * FROM num_table;
+val_num
+15
+INSERT INTO true_table VALUES ('true');
+SELECT * FROM true_table;
+val_true
+true
+INSERT INTO false_table VALUES('false');
+SELECT * FROM false_table;
+val_false
+false
+INSERT INTO arr_table VALUES ('[10, 2, "abc"]');
+INSERT INTO arr_table VALUES('[100]');
+INSERT INTO arr_table VALUES ('["str1", 2, "abc", 2.0]');
+ERROR 23000: CONSTRAINT `arr_table.val_arr` failed for `test`.`arr_table`
+SELECT * FROM arr_table;
+val_arr
+[10, 2, "abc"]
+[100]
+INSERT INTO obj_table VALUES('{"number1":4, "string1":"abcd", "object1":{"key1":"val1", "key2":[1,2,3, "string1"], "key3":4}}');
+INSERT INTO obj_table VALUES('{"number1":3, "string1":"abcd", "object1":{"key1":"val1", "key2":[1,2,3, "string1"], "key3":4}}');
+ERROR 23000: CONSTRAINT `obj_table.val_obj` failed for `test`.`obj_table`
+INSERT INTO obj_table VALUES('{"number1":3, "string1":"abcd"}');
+ERROR 23000: CONSTRAINT `obj_table.val_obj` failed for `test`.`obj_table`
+INSERT INTO obj_table VALUES('{"number1":3, "string1":"abcd", "object1":{"key1":"val1", "key2":[1,2,3, "string1"]}');
+ERROR 23000: CONSTRAINT `obj_table.val_obj` failed for `test`.`obj_table`
+SELECT * FROM obj_table;
+val_obj
+{"number1":4, "string1":"abcd", "object1":{"key1":"val1", "key2":[1,2,3, "string1"], "key3":4}}
+DROP TABLE str_table, num_table, true_table, false_table, null_table, arr_table, obj_table;
+# array validation
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"},
+ "items":{"type":"array"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "2", "string"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], [1,2]]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], [1,2]]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"},
+ "items": true
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"},
+ "items": false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]')
+0
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"},
+ "items": [{"type":"string"}]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+ERROR HY000: Invalid value for keyword items
+# Removing items
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : true,
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : false,
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]')
+0
+# Using items in place of additionalItems
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items" : false,
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]')
+0
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items" : true,
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]')
+0
+# Removing items and additionalItems both
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "unevaluatedItems": true
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "unevaluatedItems": false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]')
+0
+# Removing items, additionalItems, unevaluatedItems
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]')
+1
+# Removing prefixItems
+SET @schema_array= '{
+ "type": "array",
+ "items": { "type": "number", "maximum": 10, "minimum":3},
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]')
+0
+SET @schema_array= '{
+ "type": "array",
+ "items": { "type": "number", "maximum": 10, "minimum":3},
+ "additionalItems" : {"type":"string"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]')
+0
+SET @schema_array= '{
+ "type": "array",
+ "items": true,
+ "additionalItems" : {"type":"string"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "items": false,
+ "additionalItems" : {"type":"string"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]');
+JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[]');
+JSON_SCHEMA_VALID(@schema_array, '[]')
+1
+# Removing prefixItems and additionalItems
+SET @schema_array= '{
+ "type": "array",
+ "items": {"type":"string"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]');
+JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2"]');
+JSON_SCHEMA_VALID(@schema_array, '["str1", "str2"]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "items": {"type":"string"},
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]');
+JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]')
+0
+# removing prefixItems, additionalItems and unevaluatedItems
+SET @schema_array= '{
+ "type": "array",
+ "items": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]');
+JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2"]');
+JSON_SCHEMA_VALID(@schema_array, '["str1", "str2"]')
+1
+# Checking that additionalItems alone has no effect on schema without items/prefixItems
+# regardless existence of unevaluatedItems
+SET @schema_array= '{
+ "type": "array",
+ "additionalItems": {"type":"string"},
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]');
+JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "additionalItems": true,
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]');
+JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "additionalItems": false,
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]');
+JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]')
+1
+# checking that unevaluatedItems alone can have effect on schema validation
+SET @schema_array= '{
+ "type": "array",
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1]');
+JSON_SCHEMA_VALID(@schema_array, '[1]')
+1
+SET @schema_array= '{
+ "type": "array",
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, "str1"]');
+JSON_SCHEMA_VALID(@schema_array, '[1, "str1"]')
+0
+SET @schema_array= '{
+ "type": "array",
+ "unevaluatedItems": false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, "str1"]');
+JSON_SCHEMA_VALID(@schema_array, '[1, "str1"]')
+0
+SELECT JSON_SCHEMA_VALID(@schema_array, '[]');
+JSON_SCHEMA_VALID(@schema_array, '[]')
+1
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1,2,3]');
+JSON_SCHEMA_VALID(@schema_array, '[1,2,3]')
+0
+# Object validation
+SET @property_names= '{
+ "PropertyNames":{
+ "pattern": "^I_"
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@property_names, '{"I_int1":3, "O_ob1":{"key1":"val1"}}');
+JSON_SCHEMA_VALID(@property_names, '{"I_int1":3, "O_ob1":{"key1":"val1"}}')
+0
+SELECT JSON_SCHEMA_VALID(@property_names, '{"I_int1":3, "I_ob1":{"key1":"val1"}}');
+JSON_SCHEMA_VALID(@property_names, '{"I_int1":3, "I_ob1":{"key1":"val1"}}')
+1
+# checking that when a match is found in properties or patternProperties, it must validate and
+# validation result affects the schema. If key is not found in properties or patternProperties, and
+# additionalProperties exists, it must validate regardless of existence or value for unevaluatedProperties
+# and the result of validation with additionalProperties affects result of whole schema
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+1
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": [1,2,3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": [1,2,3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":[1,2,3], "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":[1,2,3], "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":"string1"}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":"string1"}')
+0
+# removing patternProperties to check that validation falls back on additionalProperties and
+# existence of unevaluatedProperties still does not change anything because of existence of additional
+# properties
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}')
+1
+# Remvoing additionalProperties to check that validation falls back on unevaluatedProperties
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":10, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":10, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":10, "S_":"abc", "some_prop1":"str"}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":10, "S_":"abc", "some_prop1":"str"}')
+1
+# Removing unevaluatedProperties has no effect on result when additionalProperties is present
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+1
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":"str1"}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":"str1"}')
+0
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties": false
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties": true
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+1
+# Checking that in absence of additionalProperties, validation falls back on evaluatedProperties
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":"str1"}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":"str1"}')
+1
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":"str2", "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":"str2", "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+# Properties to check if patternProperties get evaluated but keys not found in patternProperties get evaluated against
+# additionalProperties regardless of existence of unevaluatedProperperties
+SET @object_schema= '{
+ "type":"object",
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1, 2, 3], "key2": [1, 2, 3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":[1, 2, 3], "key2": [1, 2, 3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+1
+SET @object_schema= '{
+ "type":"object",
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1, 2, 3], "key2": [1, 2, 3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":[1, 2, 3], "key2": [1, 2, 3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+1
+# Checking that in absence of additionalProperties and properties, the keys not found in patternProperties are
+# evaluated against unevaluatedProperties
+SET @object_schema= '{
+ "type":"object",
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str2", "I_int":20, "S_":"abc", "some_prop1":"str1"}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str2", "I_int":20, "S_":"abc", "some_prop1":"str1"}')
+1
+# checking that in absence of properties, additionalProperties and unevaluatedPropoerties, the keys that are
+# not found are considered validated.
+SET @object_schema= '{
+ "type":"object",
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+1
+# checking that additionalProperties are evaluated in absence of patternProperties and properties, regardless
+# of presence of unevaluatedProperties
+SET @object_schema= '{
+ "type":"object",
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}')
+1
+SET @object_schema= '{
+ "type":"object",
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}')
+1
+# Checking that in absence of properties, patternProperties and additionalProperties, validation falls back on unevaluatedProperties
+SET @object_schema= '{
+ "type":"object",
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}')
+1
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}')
+0
+SET @object_schema= '{
+ "type":"object",
+ "unevaluatedProperties": false
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}')
+0
+SELECT JSON_SCHEMA_VALID(@object_schema, '{}');
+JSON_SCHEMA_VALID(@object_schema, '{}')
+1
+SET @object_schema= '{
+ "type":"object",
+ "unevaluatedProperties": true
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}');
+JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}')
+1
+SELECT JSON_SCHEMA_VALID(@object_schema, '{}');
+JSON_SCHEMA_VALID(@object_schema, '{}')
+1
+SET @json_schema_dependent_schemas='{
+ "type": "object",
+ "properties": {
+ "str1": { "type": "string" },
+ "num1": { "type": "number" }
+ },
+
+ "required": ["str1"],
+
+ "dependentSchemas": {
+ "num1": {
+ "properties": {
+ "str2": { "type": "string" }
+ },
+ "required": ["str2"]
+ }
+ }
+}';
+SELECT JSON_SCHEMA_VALID(@json_schema_dependent_schemas, '{ "str1": "str", "num1":4}');
+JSON_SCHEMA_VALID(@json_schema_dependent_schemas, '{ "str1": "str", "num1":4}')
+0
+SELECT JSON_SCHEMA_VALID(@json_schema_dependent_schemas, '{ "str1": "str"}');
+JSON_SCHEMA_VALID(@json_schema_dependent_schemas, '{ "str1": "str"}')
+1
+# Validating logic
+SET @not_schema= '{
+ "not":{
+ "maximum": 4
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@not_schema, '{"I_int1":3, "O_ob1":{"key1":"val1"}}');
+JSON_SCHEMA_VALID(@not_schema, '{"I_int1":3, "O_ob1":{"key1":"val1"}}')
+0
+SELECT JSON_SCHEMA_VALID(@not_schema, '3');
+JSON_SCHEMA_VALID(@not_schema, '3')
+0
+SELECT JSON_SCHEMA_VALID(@not_schema, '10');
+JSON_SCHEMA_VALID(@not_schema, '10')
+1
+SET @not_schema= '{
+ "not":{
+ "properties": {
+ "num1" : {"type":"number", "maximum":5},
+ "string1" : { "maxLength": 3}
+ }
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@not_schema, '{"num1":10, "string2":"abcd"}');
+JSON_SCHEMA_VALID(@not_schema, '{"num1":10, "string2":"abcd"}')
+1
+SELECT JSON_SCHEMA_VALID(@not_schema, '{"num1":2, "string2":"abcd"}');
+JSON_SCHEMA_VALID(@not_schema, '{"num1":2, "string2":"abcd"}')
+0
+SET @any_of_schema= '{
+ "anyOf":[{
+ "properties": {
+ "num1" : {"type":"number", "maximum":5},
+ "string1" : { "maxLength": 3}
+ }
+ },
+ {
+ "properties":{
+ "num1" : {"type":"number", "maximum": 1},
+ "string1" : { "maxLength":5}
+ }
+ }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '{"num1":2, "string1":"abcd"}');
+JSON_SCHEMA_VALID(@any_of_schema, '{"num1":2, "string1":"abcd"}')
+0
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '{"num1":2, "string1":"abc"}');
+JSON_SCHEMA_VALID(@any_of_schema, '{"num1":2, "string1":"abc"}')
+1
+SET @any_of_schema= '{
+ "anyOf": [
+ {"type":"number", "maximum":5},
+ {"type":"string"}
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '2');
+JSON_SCHEMA_VALID(@any_of_schema, '2')
+1
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '6');
+JSON_SCHEMA_VALID(@any_of_schema, '6')
+0
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '[1, 2, 3]');
+JSON_SCHEMA_VALID(@any_of_schema, '[1, 2, 3]')
+0
+SET @one_of_schema= '{
+ "oneOf":[{
+ "properties": {
+ "num1" : {"type":"number", "maximum":5},
+ "string1" : { "maxLength": 3}
+ }
+ },
+ {
+ "properties":{
+ "num1" : {"type":"number", "maximum": 1},
+ "string1" : { "maxLength":5}
+ }
+ }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@one_of_schema, '{"num1":2, "string1":"abcd"}');
+JSON_SCHEMA_VALID(@one_of_schema, '{"num1":2, "string1":"abcd"}')
+0
+SELECT JSON_SCHEMA_VALID(@one_of_schema, '{"num1":2, "string1":"abc"}');
+JSON_SCHEMA_VALID(@one_of_schema, '{"num1":2, "string1":"abc"}')
+1
+SET @one_of_schema= '{
+ "oneOf": [
+ {"type":"number", "maximum":5},
+ {"type":"number", "maximum":3}
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@one_of_schema, '2');
+JSON_SCHEMA_VALID(@one_of_schema, '2')
+0
+SELECT JSON_SCHEMA_VALID(@one_of_schema, '4');
+JSON_SCHEMA_VALID(@one_of_schema, '4')
+1
+SET @all_of_schema= '{
+ "allOf":[{
+ "properties": {
+ "num1" : {"type":"number", "maximum":5},
+ "string1" : { "maxLength": 3}
+ }
+ },
+ {
+ "properties":{
+ "num1" : {"type":"number", "maximum": 1},
+ "string1" : { "maxLength":5}
+ }
+ }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@all_of_schema, '{"num1":2, "string1":"abcd"}');
+JSON_SCHEMA_VALID(@all_of_schema, '{"num1":2, "string1":"abcd"}')
+0
+SET @all_of_schema= '{
+ "allOf":[
+ {
+ "properties":{
+ "num1": {"type":"number"},
+ "string1": {"type":"string"}
+ }
+ },
+ {
+ "properties":{
+ "num1": {"maximum":10},
+ "string1": {"maxLength":5}
+ }
+ }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@all_of_schema, '{"num1":5, "string1":"abc"}');
+JSON_SCHEMA_VALID(@all_of_schema, '{"num1":5, "string1":"abc"}')
+1
+SELECT JSON_SCHEMA_VALID(@all_of_schema, '{"num1":5, "string1":"foobar"}');
+JSON_SCHEMA_VALID(@all_of_schema, '{"num1":5, "string1":"foobar"}')
+0
+SET @condition_schema= '{
+ "if" : {"maximum":30, "multipleOf":3},
+ "then":{"minimum":5},
+ "else":{"minimum":10}
+ }';
+SELECT JSON_SCHEMA_VALID(@condition_schema, '6');
+JSON_SCHEMA_VALID(@condition_schema, '6')
+1
+SELECT JSON_SCHEMA_VALID(@condition_schema, '4');
+JSON_SCHEMA_VALID(@condition_schema, '4')
+0
+SELECT JSON_SCHEMA_VALID(@condition_schema, '13');
+JSON_SCHEMA_VALID(@condition_schema, '13')
+1
+SET @condition_schema= '{
+ "if" : {"maximum":30, "multipleOf":3}
+ }';
+SELECT JSON_SCHEMA_VALID(@condition_schema, '6');
+JSON_SCHEMA_VALID(@condition_schema, '6')
+1
+SELECT JSON_SCHEMA_VALID(@condition_schema, '7');
+JSON_SCHEMA_VALID(@condition_schema, '7')
+1
+SET @condition_schema= '{
+ "then":{"minimum":5},
+ "else":{"minimum":10}
+ }';
+SELECT JSON_SCHEMA_VALID(@condition_schema, '4');
+JSON_SCHEMA_VALID(@condition_schema, '4')
+1
+SELECT JSON_SCHEMA_VALID(@condition_schema, '11');
+JSON_SCHEMA_VALID(@condition_schema, '11')
+1
+# Checking unevaluatedProperperties with logical properties
+SET @all_of_unevaluated='{
+ "allOf": [
+ {
+ "type": "object",
+ "properties": {
+ "street_address": { "type": "string" },
+ "city": { "type": "string" },
+ "state": { "type": "string" }
+ },
+ "required": ["street_address", "city", "state"]
+ }
+ ],
+
+ "properties": {
+ "type": { "enum": ["residential", "business"] }
+ },
+ "required": ["type"],
+ "unevaluatedProperties": false
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}');
+JSON_SCHEMA_VALID(@all_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}')
+1
+SET @all_of_unevaluated='{
+ "allOf": [
+ {
+ "type": "object",
+ "properties": {
+ "street_address": { "type": "string" },
+ "city": { "type": "string" },
+ "state": { "type": "string" }
+ },
+ "required": ["street_address", "city", "state"]
+ }
+ ],
+
+ "unevaluatedProperties": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}');
+JSON_SCHEMA_VALID(@all_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}')
+0
+SET @any_of_unevaluated='{
+ "anyOf": [
+ {
+ "type": "object",
+ "properties": {
+ "street_address": { "type": "string" },
+ "city": { "type": "string" },
+ "state": { "type": "string" }
+ },
+ "required": ["street_address", "city", "state"]
+ }
+ ],
+
+ "unevaluatedProperties": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@any_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}');
+JSON_SCHEMA_VALID(@any_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}')
+0
+SET @all_of_unevaluated='{
+ "allOf": [
+ {
+ "type": "array"
+ },
+ {
+ "maxItems":10
+ },
+ {
+ "prefixItems": [ {"type":"number"}, {"type":"string"}],
+ "additionalItems":{"type":"array"}
+ }
+ ],
+
+ "unevaluatedItems": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]');
+JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]')
+0
+SET @all_of_unevaluated='{
+ "anyOf": [
+ {
+ "type": "array"
+ },
+ {
+ "maxItems":10
+ },
+ {
+ "prefixItems": [ {"type":"number"}, {"type":"string"}],
+ "additionalItems":{"type":"array"}
+ }
+ ],
+
+ "unevaluatedItems": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]');
+JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]')
+1
+SET @all_of_unevaluated='{
+ "oneOf": [
+ {
+ "type": "array"
+ },
+ {
+ "maxItems":10
+ },
+ {
+ "prefixItems": [ {"type":"number"}, {"type":"string"}],
+ "additionalItems":{"type":"array"}
+ }
+ ],
+
+ "unevaluatedItems": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]');
+JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]')
+0
+# Media string
+SET @schema_media_string= '{
+ "type": "string",
+ "contentMediaType": "text/html"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_media_string, '"str1"');
+JSON_SCHEMA_VALID(@schema_media_string, '"str1"')
+1
+SET @schema_reference= '{"$ref": "http://example.com/custom-email-validator.json#"}';
+SELECT JSON_SCHEMA_VALID(@schema_reference, '{}');
+ERROR HY000: $ref keyword is not supported
+SET @schema_reference= '{"$id": "http://example.com/custom-email-validator.json#"}';
+SELECT JSON_SCHEMA_VALID(@schema_reference, '{}');
+ERROR HY000: $id keyword is not supported
+SET @schema_reference= '{"$anchor": "http://example.com/custom-email-validator.json#"}';
+SELECT JSON_SCHEMA_VALID(@schema_reference, '{}');
+ERROR HY000: $anchor keyword is not supported
+SET @schema_reference= '{"$defs": "http://example.com/custom-email-validator.json#"}';
+SELECT JSON_SCHEMA_VALID(@schema_reference, '{}');
+ERROR HY000: $defs keyword is not supported
+# End of 11.1 test
diff --git a/mysql-test/main/func_json.test b/mysql-test/main/func_json.test
index 93d8c2a7d6a..7836a1f32ec 100644
--- a/mysql-test/main/func_json.test
+++ b/mysql-test/main/func_json.test
@@ -1755,3 +1755,1606 @@ SELECT JSON_EXTRACT('{ "my-key": 1 }', '$.my-key');
--echo #
--echo # End of 10.9 Test
--echo #
+
+--echo # Beginning of 11.1 test
+
+--echo #
+--echo # MDEV-27128: Implement JSON Schema Validation FUNCTION
+--echo #
+
+--echo # Checking annotations
+
+SET @schema_number= '{
+ "title" : "This is title 1",
+ "description":"this is description 1",
+ "$comment":"This is comment 1",
+ "type":"number",
+ "deprecated":true,
+ "readOnly":true,
+ "writeOnly":false,
+ "example":[2],
+ "default":4,
+ "$schema": "https://json-schema.org/draft/2019-09/json-schema-validation.html#rfc.section.9.5"
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_number, '"string1"');
+SELECT JSON_SCHEMA_VALID(@schema_number, '2');
+
+--echo # Checking empty schema with empty json document
+
+SET @schema= '{}';
+
+SELECT JSON_SCHEMA_VALID(@schema, '');
+SELECT JSON_SCHEMA_VALID(@schema, '{}');
+SELECT JSON_SCHEMA_VALID(@schema, '[]');
+SELECT JSON_SCHEMA_VALID(@schema, 'null');
+SELECT JSON_SCHEMA_VALID(@schema, 'true');
+SELECT JSON_SCHEMA_VALID(@schema, 'false');
+
+--echo # Checking scalar against json schema
+
+--echo # Checking boolean and null
+
+SET @schema_true= '{ "type": "boolean"}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+SELECT JSON_SCHEMA_VALID(@schema_true, 'false');
+SELECT JSON_SCHEMA_VALID(@schema_true, 'null');
+
+SET @schema_true= '{ "type": "boolean",
+ "const":"false"}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+SET @schema_true= '{ "type": "boolean",
+ "enum":[true, null, false]}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+SET @schema_true= '{ "type": "boolean",
+ "enum": [null, false]}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+SET @schema_true= '{ "type": "boolean",
+ "enum": [null, true]}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+SET @schema_true= '{ "type": "boolean",
+ "const":"false"}';
+SELECT JSON_SCHEMA_VALID(@schema_true, 'true');
+
+--echo # Type can be more than one
+
+SET @schema= '
+ {
+ "type":["string", "number","array"]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema, '{"key1":"val1"}');
+SELECT JSON_SCHEMA_VALID(@schema, '"abc"');
+SELECT JSON_SCHEMA_VALID(@schema, '3.14');
+
+--echo # Checking number
+
+SET @schema_number= '{
+ "maximum":7,
+ "minimum": 3,
+ "multipleOf":3
+}';
+SELECT JSON_SCHEMA_VALID(@schema_number, '2');
+
+SET @schema_number= '{
+ "type": "number",
+ "maximum":13,
+ "minimum": 4,
+ "multipleOf":3,
+ "exclusiveMaximum": 9,
+ "exclusiveMinimum":4
+}';
+
+SELECT JSON_SCHEMA_VALID(@schema_number, '2');
+SELECT JSON_SCHEMA_VALID(@schema_number, '6');
+SELECT JSON_SCHEMA_VALID(@schema_number, '9');
+SELECT JSON_SCHEMA_VALID(@schema_number, '5');
+
+SET @schema_number= '{
+ "type": "number",
+ "maximum":100,
+ "minimum": 0,
+ "enum": [1, 2, "3", [4, 5, 6], {"key1":"val1"}]
+}';
+SELECT JSON_SCHEMA_VALID(@schema_number, 1);
+SELECT JSON_SCHEMA_VALID(@schema_number, 3);
+
+SET @schema_number= '{
+ "type":"number",
+ "maximum":10,
+ "const":2
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_number, '3');
+SELECT JSON_SCHEMA_VALID(@schema_number, '2');
+
+--echo # Checking string
+
+--echo # checking format keyword. (not validating for now)
+
+SET @schema_string= '{
+ "type": "string",
+ "format":"date-time"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_date-time"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"date"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_date"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"time"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_time"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"duration"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_duration"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"email"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_email"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"idn-email"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_idn-email"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"hostname"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_hostname"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"idn-hostname"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_idn-hostname"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"ipv4"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_ipv4"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"ipv6"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_ipv6"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"uri"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_uri"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"uri-reference"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_uri-reference"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"iri"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_iri"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"iri-reference"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_iri-reference"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"uuid"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_uuid"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"json-pointer"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_json-pointer"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"relative-json-pointer"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_relative-json-pointer"');
+SET @schema_string= '{
+ "type": "string",
+ "format":"regex"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"not_regex"');
+
+--echo # Validating other string keywords
+
+SET @schema_string= '{
+ "type": "string",
+ "maxLength":7,
+ "minLength": 4
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"foobar"');
+
+SET @schema_string= '{
+ "type": "string",
+ "maxLength": 10,
+ "minLength": 8
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"foobar"');
+SELECT JSON_SCHEMA_VALID(@schema_string, '"foobar123"');
+
+SET @schema_string= '{
+ "type": "string",
+ "maxLength": 10,
+ "minLength": 3,
+ "const": "foobar"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"foobar123"');
+
+SET @schema_string= '{
+ "type": "string",
+ "enum": ["red", "green", "blue"]
+}';
+SELECT JSON_SCHEMA_VALID(@schema_string, '"green"');
+SELECT JSON_SCHEMA_VALID(@schema_string, '"orange"');
+
+SET @string_schema= '{
+ "type":"string",
+ "pattern":"ab+c"
+ }';
+SELECT JSON_SCHEMA_VALID(@string_schema, '"abc"');
+
+--echo # Validating non-scalar
+
+--echo # Validating array
+
+SET @schema_array= '{"type":"array"}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]');
+
+
+SET @schema_array= '{"type":"array",
+ "maxItems": 4,
+ "minItems": 2}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, false, null, "foobar"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1]');
+SET @schema_array= '{"maxItems": 4,
+ "minItems": 2}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, {"key1":"val1"}]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, false, null, "foobar"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2]');
+
+SET @schema_array= '{
+ "type":"array",
+ "items": {"type":"number"},
+ "maxItems": 4,
+ "minItems": 2}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, "foobar"]');
+
+SET @schema_array= '{"type":"array",
+ "maxItems": 4,
+ "minItems": 2,
+ "const": [1, 2, 3, 4]}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, "foobar"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4]');
+
+SET @schema_array= '{"type":"array",
+ "enum":[[1,2,3], [4,5,6], [7,8,9]]}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[4,5,6]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1,5,7]');
+
+SET @schema_array= '{
+ "type": "array",
+ "uniqueItems":true
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 2.0]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4.0]');
+
+SET @schema_array= '{
+ "type": "array",
+ "contains": {
+ "type": "number"
+ },
+ "minContains": 2,
+ "maxContains": 3
+}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["string1", "string2", "string3", 1, 2, 3, 4]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '["string1", "string2", "string3", 1, 2, 3]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[2, "string1",2]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items":true
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]');
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items":false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1",2]');
+
+--echo # Checking objects
+
+SET @schema_object= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "string1": {
+ "type":"string",
+ "maxLength":10,
+ "minLength": 4
+ },
+ "array1": {"type":"array",
+ "maxItems": 4,
+ "minItems": 2}
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_object, '{"number1":10, "string1":"foobar","array1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@schema_object, '{"number1":10, "string1":"foobar","array1":[1,2,3,4,5]}');
+
+SET @schema_obj= '{
+ "type": "object",
+ "properties": {
+ "number1":{"type":"number"},
+ "string1":{"type":"string"},
+ "array1":{"type":"array"}
+ },
+ "dependentRequired": {
+ "number1":["string1"]
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"array1":[1,2,3], "number1":2, "string1":"abc"}');
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"array1":[1,2,3], "number1":2}');
+
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "key1" : {
+ "type":"object",
+ "properties": {
+ "key2" :{
+ "type":"string"
+ }
+ }
+ }
+ },
+ "enum": [{"number1":3, "key1":{"key2":"string1"}}, {"number1":5, "key1":{"key2":"string3"}}, {"number1":7, "key1":{"key2":"string5"}}]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":1}}');
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":"string1"}}');
+SELECT JSON_SCHEMA_VALID(@schema_obj, '{"number1":5, "key1":{"key2":"string7"}}');
+
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "obj1" : {
+ "type":"object",
+ "properties": {
+ "obj1_1":{
+ "type":"string"
+ },
+ "obj1_2": {
+ "type":"array"
+ }
+ }
+ },
+ "obj2" : {
+ "type":"object",
+ "properties" : {
+ "obj2_1":{
+ "type":"number"
+ }
+ }
+ }
+ },
+ "required":["number1", "obj2"]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}}');
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}');
+
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "obj1" : {
+ "type":"object",
+ "properties": {
+ "obj1_1":{
+ "type":"string"
+ },
+ "obj1_2": {
+ "type":"array"
+ }
+ }
+ },
+ "obj2" : {
+ "type":"object",
+ "properties" : {
+ "obj2_1":{
+ "type":"number"
+ }
+ }
+ }
+ },
+ "required":["number1", "obj2"],
+ "const": {"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}');
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}}');
+
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "obj1" : {
+ "type":"object",
+ "properties": {
+ "obj1_1":{
+ "type":"string"
+ },
+ "obj1_2": {
+ "type":"array"
+ }
+ }
+ },
+ "obj2" : {
+ "type":"object",
+ "properties" : {
+ "obj2_1":{
+ "type":"number"
+ }
+ }
+ }
+ },
+ "maxProperties": 5,
+ "minProperties":2
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":{"obj2_1":7}}');
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":1, "number2":2, "number3":3, "number4":4, "number5":5, "number6":6}');
+
+SET @schema_obj= '{"type":"object",
+ "properties":
+ {
+ "number1":{
+ "type":"number",
+ "maximum":12,
+ "minimum":1
+ },
+ "obj1" : {
+ "type":"object",
+ "properties": {
+ "obj1_1":{
+ "type":"string"
+ },
+ "obj1_2": {
+ "type":"array"
+ }
+ }
+ }
+ },
+ "maxProperties": 3,
+ "minProperties":1,
+ "additionalProperties":false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_obj,'{"number1":5, "obj1":{"obj1_1":"string1", "obj1_2":[1, 2, 3]}, "obj2":"string2"}');
+
+--echo # Demonstrating that irrelavent keywords for a type and irrelavent type
+--echo # are ignored, and none of the keywords are mandatory, including "type".
+
+SET @schema_properties= '{
+ "properties" : {
+ "number1":{ "maximum":10 },
+ "string1" : { "maxLength": 3}
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":25, "string1":"ab" }');
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":10, "string1":"ab" }');
+
+SET @schema_properties= '{
+ "properties" : {
+ "number1":{ "maximum":10 },
+ "string1" : { "maxLength": 3},
+ "obj1" : {
+ "properties":{
+ "number2": {"minimum":8},
+ "array2": {"uniqueItems":true}
+ }
+ }
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":2, "array2":[1,2,3]} }');
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":10, "array2":[1,2,3]} }');
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"array2":[1,2,3]} }');
+SELECT JSON_SCHEMA_VALID(@schema_properties, '{ "number1":2, "string1":"ab", "obj1":{"number2":10, "array2":[1,2,3,2]} }');
+
+SET @schema_num= '{
+ "maximum":10,
+ "minimum":2
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_num, '5');
+SELECT JSON_SCHEMA_VALID(@schema_num, '"abc"');
+
+SET @schema_str= '{
+ "maxLength":5,
+ "minLength":2,
+ "pattern":"a+bc"
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_str, '"abc"');
+SELECT JSON_SCHEMA_VALID(@schema_str, '"abcadef"');
+SELECT JSON_SCHEMA_VALID(@schema_str, '"bc"');
+
+SET @schema_arr= '{
+ "uniqueItems":true,
+ "items":{"type":"string"},
+ "maximum":10
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", "cde"]');
+SELECT JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", "abc"]');
+SELECT JSON_SCHEMA_VALID(@schema_arr,'["abc", "bcd", 1]');
+
+SET @schema_const1= '{"const":2}';
+SELECT JSON_SCHEMA_VALID(@schema_const1, '2');
+SELECT JSON_SCHEMA_VALID(@schema_const1, '"abc"');
+
+SET @schema_const2= '{"const":true}';
+SELECT JSON_SCHEMA_VALID(@schema_const2,'true');
+SELECT JSON_SCHEMA_VALID(@schema_const2,'false');
+
+SET @schema_enum='{"enum":[1,2,3,"abc", [4,5,6]]}';
+SELECT JSON_SCHEMA_VALID(@schema_enum,'[4,5,6]');
+SELECT JSON_SCHEMA_VALID(@schema_enum,'4');
+SELECT JSON_SCHEMA_VALID(@schema_enum,'"abc"');
+
+SET @schema_required='{"required":["num1","str1", "arr1"]}';
+SELECT JSON_SCHEMA_VALID(@schema_required,'{"num1":1, "str1":"abc", "arr1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@schema_required,'{"num1":1, "arr1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@schema_required,'"abcd"');
+
+SET @schema_dep_required='{
+ "dependentRequired": {
+ "num1":["num2","num3"],
+ "str1":["str2","str3"]
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3], "str1":"abcd"}');
+SELECT JSON_SCHEMA_VALID(@schema_dep_required,'{"num1":1, "num2":"abc", "num3":[1,2,3], "arr1":[1,2,3]}');
+
+--echo # Checking syntax error
+SET @invalid_schema= '{"type":"object"
+ "properties":{
+ "number1": {"type":"number"},
+ "obj2": {"type":"object",
+ "properties": {
+ "key1": {"type":"number"}
+ }
+ }
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@invalid_schema, '{"number1":3, "obj2":{"key1":3}}');
+
+SET @invalid_json= '{"type":"array",
+ "maxItems": 4,
+ "minItems": 2,
+ "const": [1, 2, 3, 4]}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3, 4');
+
+SET @schema_string= '{
+ "type": "string",
+ "maxLength":-2
+ }';
+--error ER_JSON_INVALID_VALUE_FOR_KEYWORD
+SELECT JSON_SCHEMA_VALID(@schema_string, '"abcxyz"');
+
+SET @schema_number= '{
+ "type": "number",
+ "multipleOf":-3
+ }';
+--error ER_JSON_INVALID_VALUE_FOR_KEYWORD
+SELECT JSON_SCHEMA_VALID(@schema_number, '3');
+
+SET @schema_object= '{
+ "type": "object",
+ "properties":{"num1":{"type":"number"}},
+ "required":{}
+ }';
+--error ER_JSON_INVALID_VALUE_FOR_KEYWORD
+SELECT JSON_SCHEMA_VALID(@schema_object, '{"num1":2}');
+
+SET @schema_string= '{
+ "type": "string",
+ "maxLength":-10
+ }';
+--error ER_JSON_INVALID_VALUE_FOR_KEYWORD
+SELECT JSON_SCHEMA_VALID(@schema_string, '"str1"');
+
+SET @schema_number= '{"type":"numberz"}';
+--error ER_JSON_INVALID_VALUE_FOR_KEYWORD
+SELECT JSON_SCHEMA_VALID(@schema_number, '"string1"');
+
+--echo # Using JSON_SCHEMA_VALID() as a constraint validation to insert into table
+
+CREATE TABLE str_table (val_str JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"string",
+ "maxLength":5,
+ "minLength":2,
+ "enum":["ab", "cd", "abc", "def", "abcdef"]
+ }', val_str)));
+CREATE TABLE num_table(val_num JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"number",
+ "minimum":10,
+ "maximum":30,
+ "exclusiveMinimum":11,
+ "exclusiveMaximum":29,
+ "multipleOf":5,
+ "const":15
+ }', val_num)));
+CREATE TABLE true_table(val_true JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"boolean",
+ "enum":[true, false, null]
+ }', val_true)));
+CREATE TABLE false_table (val_false JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"boolean"
+ }', val_false)));
+CREATE TABLE null_table (val_null JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"null"
+ }', val_null)));
+CREATE TABLE arr_table (val_arr JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"array",
+ "uniqueItems":true,
+ "maxItems":5,
+ "minItems":1,
+ "items":true,
+ "prefixItems":[{"type":"number"}]
+ }', val_arr)));
+CREATE TABLE obj_table(val_obj JSON CHECK(JSON_SCHEMA_VALID('{
+ "type":"object",
+ "properties": {
+ "number1":{
+ "type":"number",
+ "maximum":5,
+ "const":4
+ },
+ "string1":{
+ "type":"string",
+ "maxLength":5,
+ "minLength":3
+ },
+ "object1":{
+ "type":"object",
+ "properties":{
+ "key1": {"type":"string"},
+ "key2":{"type":"array"},
+ "key3":{"type":"number", "minimum":3}
+ },
+ "dependentRequired": { "key1":["key3"] }
+ }
+ },
+ "required":["number1","object1"]
+ }', val_obj)));
+
+INSERT INTO str_table VALUES ('"ab"'), ('"cd"'), ('"abc"'), ('"def"');
+--error ER_CONSTRAINT_FAILED
+INSERT INTO str_table VALUES ('"feb"');
+--error ER_CONSTRAINT_FAILED
+INSERT INTO str_table VALUES('"abcdef"');
+--error ER_CONSTRAINT_FAILED
+INSERT INTO str_table VALUES('"fedcba"');
+SELECT * FROM str_table;
+
+
+INSERT INTO num_table values('15');
+--error ER_CONSTRAINT_FAILED
+INSERT INTO num_table values('25');
+SELECT * FROM num_table;
+
+INSERT INTO true_table VALUES ('true');
+SELECT * FROM true_table;
+
+INSERT INTO false_table VALUES('false');
+SELECT * FROM false_table;
+
+INSERT INTO arr_table VALUES ('[10, 2, "abc"]');
+INSERT INTO arr_table VALUES('[100]');
+--error ER_CONSTRAINT_FAILED
+INSERT INTO arr_table VALUES ('["str1", 2, "abc", 2.0]');
+SELECT * FROM arr_table;
+
+INSERT INTO obj_table VALUES('{"number1":4, "string1":"abcd", "object1":{"key1":"val1", "key2":[1,2,3, "string1"], "key3":4}}');
+--error ER_CONSTRAINT_FAILED
+INSERT INTO obj_table VALUES('{"number1":3, "string1":"abcd", "object1":{"key1":"val1", "key2":[1,2,3, "string1"], "key3":4}}');
+--error ER_CONSTRAINT_FAILED
+INSERT INTO obj_table VALUES('{"number1":3, "string1":"abcd"}');
+--error ER_CONSTRAINT_FAILED
+INSERT INTO obj_table VALUES('{"number1":3, "string1":"abcd", "object1":{"key1":"val1", "key2":[1,2,3, "string1"]}');
+SELECT * FROM obj_table;
+
+DROP TABLE str_table, num_table, true_table, false_table, null_table, arr_table, obj_table;
+
+--echo # array validation
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"},
+ "items":{"type":"array"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "2", "string"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], [1,2]]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"},
+ "items": true
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"},
+ "items": false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"},
+ "items": [{"type":"string"}]
+ }';
+--error ER_JSON_INVALID_VALUE_FOR_KEYWORD
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+
+--echo # Removing items
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", [1,2], "2", "string"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : true,
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "additionalItems" : false,
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+
+--echo # Using items in place of additionalItems
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items" : false,
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items" : true,
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2, 3]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "items" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+
+--echo # Removing items and additionalItems both
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", 1, 2]');
+
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "unevaluatedItems": true
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ],
+ "unevaluatedItems": false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+
+--echo # Removing items, additionalItems, unevaluatedItems
+
+SET @schema_array= '{
+ "type": "array",
+ "prefixItems": [
+ { "type": "number", "maximum": 10, "minimum":3},
+ { "type": "string" }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, "string1", "string2", "string3"]');
+
+
+--echo # Removing prefixItems
+
+
+SET @schema_array= '{
+ "type": "array",
+ "items": { "type": "number", "maximum": 10, "minimum":3},
+ "additionalItems" : {"type":"number"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]');
+
+
+SET @schema_array= '{
+ "type": "array",
+ "items": { "type": "number", "maximum": 10, "minimum":3},
+ "additionalItems" : {"type":"string"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]');
+
+SET @schema_array= '{
+ "type": "array",
+ "items": true,
+ "additionalItems" : {"type":"string"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]');
+
+SET @schema_array= '{
+ "type": "array",
+ "items": false,
+ "additionalItems" : {"type":"string"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[5, 6,"2", "string"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[]');
+
+--echo # Removing prefixItems and additionalItems
+
+SET @schema_array= '{
+ "type": "array",
+ "items": {"type":"string"},
+ "unevaluatedItems": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2"]');
+
+SET @schema_array= '{
+ "type": "array",
+ "items": {"type":"string"},
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]');
+
+--echo # removing prefixItems, additionalItems and unevaluatedItems
+
+SET @schema_array= '{
+ "type": "array",
+ "items": {"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 2]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2"]');
+
+--echo # Checking that additionalItems alone has no effect on schema without items/prefixItems
+--echo # regardless existence of unevaluatedItems
+
+SET @schema_array= '{
+ "type": "array",
+ "additionalItems": {"type":"string"},
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]');
+
+SET @schema_array= '{
+ "type": "array",
+ "additionalItems": true,
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]');
+
+SET @schema_array= '{
+ "type": "array",
+ "additionalItems": false,
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '["str1", "str2", 1]');
+
+
+--echo # checking that unevaluatedItems alone can have effect on schema validation
+
+SET @schema_array= '{
+ "type": "array",
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1]');
+
+SET @schema_array= '{
+ "type": "array",
+ "unevaluatedItems": {"type":"number"}
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, "str1"]');
+
+SET @schema_array= '{
+ "type": "array",
+ "unevaluatedItems": false
+ }';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, "str1"]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[]');
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1,2,3]');
+
+
+--echo # Object validation
+
+SET @property_names= '{
+ "PropertyNames":{
+ "pattern": "^I_"
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@property_names, '{"I_int1":3, "O_ob1":{"key1":"val1"}}');
+SELECT JSON_SCHEMA_VALID(@property_names, '{"I_int1":3, "I_ob1":{"key1":"val1"}}');
+
+--echo # checking that when a match is found in properties or patternProperties, it must validate and
+--echo # validation result affects the schema. If key is not found in properties or patternProperties, and
+--echo # additionalProperties exists, it must validate regardless of existence or value for unevaluatedProperties
+--echo # and the result of validation with additionalProperties affects result of whole schema
+
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": [1,2,3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":[1,2,3], "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":"string1"}');
+
+--echo # removing patternProperties to check that validation falls back on additionalProperties and
+--echo # existence of unevaluatedProperties still does not change anything because of existence of additional
+--echo # properties
+
+
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}');
+
+--echo # Remvoing additionalProperties to check that validation falls back on unevaluatedProperties
+
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":10, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":10, "S_":"abc", "some_prop1":"str"}');
+
+--echo # Removing unevaluatedProperties has no effect on result when additionalProperties is present
+
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":"str1"}');
+
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties": false
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties": true
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+
+
+--echo # Checking that in absence of additionalProperties, validation falls back on evaluatedProperties
+
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":"str1"}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+
+SET @object_schema= '{
+ "type":"object",
+ "properties": {
+ "key1":{"type":"string"},
+ "key2":{"type":"number", "maximum":50}
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":"str2", "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 3, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+
+--echo # Properties to check if patternProperties get evaluated but keys not found in patternProperties get evaluated against
+--echo # additionalProperties regardless of existence of unevaluatedProperperties
+
+SET @object_schema= '{
+ "type":"object",
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1, 2, 3], "key2": [1, 2, 3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+
+SET @object_schema= '{
+ "type":"object",
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1, 2, 3], "key2": [1, 2, 3], "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+
+--echo # Checking that in absence of additionalProperties and properties, the keys not found in patternProperties are
+--echo # evaluated against unevaluatedProperties
+SET @object_schema= '{
+ "type":"object",
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str2", "I_int":20, "S_":"abc", "some_prop1":"str1"}');
+
+--echo # checking that in absence of properties, additionalProperties and unevaluatedPropoerties, the keys that are
+--echo # not found are considered validated.
+
+SET @object_schema= '{
+ "type":"object",
+ "patternProperties": {
+ "^I_": {"type":"number", "maximum":100},
+ "^S_" : {"type":"string", "maxLength":4}
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+
+--echo # checking that additionalProperties are evaluated in absence of patternProperties and properties, regardless
+--echo # of presence of unevaluatedProperties
+
+SET @object_schema= '{
+ "type":"object",
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ },
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}');
+
+SET @object_schema= '{
+ "type":"object",
+ "additionalProperties":{
+ "type":"array",
+ "maxItems":5
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": 10, "I_int":20, "S_":"abc", "some_prop1":[1,2,3]}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}');
+
+--echo # Checking that in absence of properties, patternProperties and additionalProperties, validation falls back on unevaluatedProperties
+
+SET @object_schema= '{
+ "type":"object",
+ "unevaluatedProperties":{"type":"string"}
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":[1,2,3], "key2": [1,2,3], "I_int":[1,2,3], "S_":[1,2,3], "some_prop1":[1,2,3]}');
+
+SET @object_schema= '{
+ "type":"object",
+ "unevaluatedProperties": false
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{}');
+
+SET @object_schema= '{
+ "type":"object",
+ "unevaluatedProperties": true
+ }';
+SELECT JSON_SCHEMA_VALID(@object_schema, '{"key1":"val1", "key2": "str1", "I_int":"str2", "S_":"abc", "some_prop1":"str3"}');
+SELECT JSON_SCHEMA_VALID(@object_schema, '{}');
+
+SET @json_schema_dependent_schemas='{
+ "type": "object",
+ "properties": {
+ "str1": { "type": "string" },
+ "num1": { "type": "number" }
+ },
+
+ "required": ["str1"],
+
+ "dependentSchemas": {
+ "num1": {
+ "properties": {
+ "str2": { "type": "string" }
+ },
+ "required": ["str2"]
+ }
+ }
+}';
+SELECT JSON_SCHEMA_VALID(@json_schema_dependent_schemas, '{ "str1": "str", "num1":4}');
+SELECT JSON_SCHEMA_VALID(@json_schema_dependent_schemas, '{ "str1": "str"}');
+
+--echo # Validating logic
+
+SET @not_schema= '{
+ "not":{
+ "maximum": 4
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@not_schema, '{"I_int1":3, "O_ob1":{"key1":"val1"}}');
+SELECT JSON_SCHEMA_VALID(@not_schema, '3');
+SELECT JSON_SCHEMA_VALID(@not_schema, '10');
+
+SET @not_schema= '{
+ "not":{
+ "properties": {
+ "num1" : {"type":"number", "maximum":5},
+ "string1" : { "maxLength": 3}
+ }
+ }
+ }';
+SELECT JSON_SCHEMA_VALID(@not_schema, '{"num1":10, "string2":"abcd"}');
+SELECT JSON_SCHEMA_VALID(@not_schema, '{"num1":2, "string2":"abcd"}');
+
+SET @any_of_schema= '{
+ "anyOf":[{
+ "properties": {
+ "num1" : {"type":"number", "maximum":5},
+ "string1" : { "maxLength": 3}
+ }
+ },
+ {
+ "properties":{
+ "num1" : {"type":"number", "maximum": 1},
+ "string1" : { "maxLength":5}
+ }
+ }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '{"num1":2, "string1":"abcd"}');
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '{"num1":2, "string1":"abc"}');
+
+SET @any_of_schema= '{
+ "anyOf": [
+ {"type":"number", "maximum":5},
+ {"type":"string"}
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '2');
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '6');
+SELECT JSON_SCHEMA_VALID(@any_of_schema, '[1, 2, 3]');
+
+SET @one_of_schema= '{
+ "oneOf":[{
+ "properties": {
+ "num1" : {"type":"number", "maximum":5},
+ "string1" : { "maxLength": 3}
+ }
+ },
+ {
+ "properties":{
+ "num1" : {"type":"number", "maximum": 1},
+ "string1" : { "maxLength":5}
+ }
+ }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@one_of_schema, '{"num1":2, "string1":"abcd"}');
+SELECT JSON_SCHEMA_VALID(@one_of_schema, '{"num1":2, "string1":"abc"}');
+
+SET @one_of_schema= '{
+ "oneOf": [
+ {"type":"number", "maximum":5},
+ {"type":"number", "maximum":3}
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@one_of_schema, '2');
+SELECT JSON_SCHEMA_VALID(@one_of_schema, '4');
+
+SET @all_of_schema= '{
+ "allOf":[{
+ "properties": {
+ "num1" : {"type":"number", "maximum":5},
+ "string1" : { "maxLength": 3}
+ }
+ },
+ {
+ "properties":{
+ "num1" : {"type":"number", "maximum": 1},
+ "string1" : { "maxLength":5}
+ }
+ }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@all_of_schema, '{"num1":2, "string1":"abcd"}');
+
+SET @all_of_schema= '{
+ "allOf":[
+ {
+ "properties":{
+ "num1": {"type":"number"},
+ "string1": {"type":"string"}
+ }
+ },
+ {
+ "properties":{
+ "num1": {"maximum":10},
+ "string1": {"maxLength":5}
+ }
+ }
+ ]
+ }';
+SELECT JSON_SCHEMA_VALID(@all_of_schema, '{"num1":5, "string1":"abc"}');
+SELECT JSON_SCHEMA_VALID(@all_of_schema, '{"num1":5, "string1":"foobar"}');
+
+SET @condition_schema= '{
+ "if" : {"maximum":30, "multipleOf":3},
+ "then":{"minimum":5},
+ "else":{"minimum":10}
+ }';
+SELECT JSON_SCHEMA_VALID(@condition_schema, '6');
+SELECT JSON_SCHEMA_VALID(@condition_schema, '4');
+SELECT JSON_SCHEMA_VALID(@condition_schema, '13');
+
+SET @condition_schema= '{
+ "if" : {"maximum":30, "multipleOf":3}
+ }';
+SELECT JSON_SCHEMA_VALID(@condition_schema, '6');
+SELECT JSON_SCHEMA_VALID(@condition_schema, '7');
+
+SET @condition_schema= '{
+ "then":{"minimum":5},
+ "else":{"minimum":10}
+ }';
+SELECT JSON_SCHEMA_VALID(@condition_schema, '4');
+SELECT JSON_SCHEMA_VALID(@condition_schema, '11');
+
+--echo # Checking unevaluatedProperperties with logical properties
+
+SET @all_of_unevaluated='{
+ "allOf": [
+ {
+ "type": "object",
+ "properties": {
+ "street_address": { "type": "string" },
+ "city": { "type": "string" },
+ "state": { "type": "string" }
+ },
+ "required": ["street_address", "city", "state"]
+ }
+ ],
+
+ "properties": {
+ "type": { "enum": ["residential", "business"] }
+ },
+ "required": ["type"],
+ "unevaluatedProperties": false
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}');
+
+SET @all_of_unevaluated='{
+ "allOf": [
+ {
+ "type": "object",
+ "properties": {
+ "street_address": { "type": "string" },
+ "city": { "type": "string" },
+ "state": { "type": "string" }
+ },
+ "required": ["street_address", "city", "state"]
+ }
+ ],
+
+ "unevaluatedProperties": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}');
+
+SET @any_of_unevaluated='{
+ "anyOf": [
+ {
+ "type": "object",
+ "properties": {
+ "street_address": { "type": "string" },
+ "city": { "type": "string" },
+ "state": { "type": "string" }
+ },
+ "required": ["street_address", "city", "state"]
+ }
+ ],
+
+ "unevaluatedProperties": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@any_of_unevaluated, '{
+ "street_address": "1600 Pennsylvania Avenue NW",
+ "city": "Washington",
+ "state": "DC",
+ "type": "business"
+}');
+
+SET @all_of_unevaluated='{
+ "allOf": [
+ {
+ "type": "array"
+ },
+ {
+ "maxItems":10
+ },
+ {
+ "prefixItems": [ {"type":"number"}, {"type":"string"}],
+ "additionalItems":{"type":"array"}
+ }
+ ],
+
+ "unevaluatedItems": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]');
+
+SET @all_of_unevaluated='{
+ "anyOf": [
+ {
+ "type": "array"
+ },
+ {
+ "maxItems":10
+ },
+ {
+ "prefixItems": [ {"type":"number"}, {"type":"string"}],
+ "additionalItems":{"type":"array"}
+ }
+ ],
+
+ "unevaluatedItems": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]');
+
+SET @all_of_unevaluated='{
+ "oneOf": [
+ {
+ "type": "array"
+ },
+ {
+ "maxItems":10
+ },
+ {
+ "prefixItems": [ {"type":"number"}, {"type":"string"}],
+ "additionalItems":{"type":"array"}
+ }
+ ],
+
+ "unevaluatedItems": {"type":"number"}
+}';
+SELECT JSON_SCHEMA_VALID(@all_of_unevaluated, '[1, "str", 2]');
+
+--echo # Media string
+
+SET @schema_media_string= '{
+ "type": "string",
+ "contentMediaType": "text/html"
+}';
+SELECT JSON_SCHEMA_VALID(@schema_media_string, '"str1"');
+
+SET @schema_reference= '{"$ref": "http://example.com/custom-email-validator.json#"}';
+--error ER_JSON_SCHEMA_KEYWORD_UNSUPPORTED
+SELECT JSON_SCHEMA_VALID(@schema_reference, '{}');
+
+SET @schema_reference= '{"$id": "http://example.com/custom-email-validator.json#"}';
+--error ER_JSON_SCHEMA_KEYWORD_UNSUPPORTED
+SELECT JSON_SCHEMA_VALID(@schema_reference, '{}');
+
+SET @schema_reference= '{"$anchor": "http://example.com/custom-email-validator.json#"}';
+--error ER_JSON_SCHEMA_KEYWORD_UNSUPPORTED
+SELECT JSON_SCHEMA_VALID(@schema_reference, '{}');
+
+SET @schema_reference= '{"$defs": "http://example.com/custom-email-validator.json#"}';
+--error ER_JSON_SCHEMA_KEYWORD_UNSUPPORTED
+SELECT JSON_SCHEMA_VALID(@schema_reference, '{}');
+
+
+--echo # End of 11.1 test
diff --git a/mysql-test/main/json_debug_nonembedded.result b/mysql-test/main/json_debug_nonembedded.result
index e759cafa38b..0e7458edd9f 100644
--- a/mysql-test/main/json_debug_nonembedded.result
+++ b/mysql-test/main/json_debug_nonembedded.result
@@ -7,6 +7,13 @@ SET @json1= '{"key1":"val1"}';
SET @json2= '{"key1":"val1"}';
SELECT JSON_OVERLAPS(@json1, @json2);
ERROR HY000: Thread stack overrun: 'used bytes' used of a 'available' byte stack, and 'X' bytes needed. Consider increasing the thread_stack system variable.
+SET @schema_array= '{
+ "type":"array",
+ "items": {"type":"number"},
+ "maxItems": 4,
+ "minItems": 2}';
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3]');
+ERROR HY000: Thread stack overrun: 'used bytes' used of a 'available' byte stack, and 'X' bytes needed. Consider increasing the thread_stack system variable.
SET @@debug_dbug= @saved_dbug;
#
# End of 10.9 test
diff --git a/mysql-test/main/json_debug_nonembedded.test b/mysql-test/main/json_debug_nonembedded.test
index 5813b46e556..fb4fe67b5d3 100644
--- a/mysql-test/main/json_debug_nonembedded.test
+++ b/mysql-test/main/json_debug_nonembedded.test
@@ -15,6 +15,15 @@ SET @json2= '{"key1":"val1"}';
--error ER_STACK_OVERRUN_NEED_MORE
SELECT JSON_OVERLAPS(@json1, @json2);
+SET @schema_array= '{
+ "type":"array",
+ "items": {"type":"number"},
+ "maxItems": 4,
+ "minItems": 2}';
+--replace_regex /overrun: [0-9]* bytes used of a [0-9]* byte stack, and [0-9]* bytes needed/overrun: 'used bytes' used of a 'available' byte stack, and 'X' bytes needed/
+--error ER_STACK_OVERRUN_NEED_MORE
+SELECT JSON_SCHEMA_VALID(@schema_array, '[1, 2, 3]');
+
SET @@debug_dbug= @saved_dbug;
--echo #
diff --git a/sql/CMakeLists.txt b/sql/CMakeLists.txt
index 4938f8da02b..c2fe2e1ecae 100644
--- a/sql/CMakeLists.txt
+++ b/sql/CMakeLists.txt
@@ -159,7 +159,7 @@ SET (SQL_SOURCE
opt_table_elimination.cc sql_expression_cache.cc
gcalc_slicescan.cc gcalc_tools.cc
my_apc.cc mf_iocache_encr.cc item_jsonfunc.cc
- my_json_writer.cc
+ my_json_writer.cc json_schema.cc json_schema_helper.cc
rpl_gtid.cc rpl_parallel.cc
semisync.cc semisync_master.cc semisync_slave.cc
semisync_master_ack_receiver.cc
diff --git a/sql/item_create.cc b/sql/item_create.cc
index 0e2bcc7b1be..0486fc3ea1d 100644
--- a/sql/item_create.cc
+++ b/sql/item_create.cc
@@ -1339,6 +1339,18 @@ protected:
virtual ~Create_func_json_overlaps() {}
};
+class Create_func_json_schema_valid: public Create_func_arg2
+{
+public:
+ virtual Item *create_2_arg(THD *thd, Item *arg1, Item *arg2);
+
+ static Create_func_json_schema_valid s_singleton;
+
+protected:
+ Create_func_json_schema_valid() {}
+ virtual ~Create_func_json_schema_valid() {}
+};
+
class Create_func_last_day : public Create_func_arg1
{
@@ -4391,6 +4403,15 @@ Create_func_last_insert_id::create_native(THD *thd, const LEX_CSTRING *name,
return func;
}
+Create_func_json_schema_valid Create_func_json_schema_valid::s_singleton;
+
+Item*
+Create_func_json_schema_valid::create_2_arg(THD *thd, Item *arg1, Item *arg2)
+{
+ status_var_increment(thd->status_var.feature_json);
+ return new (thd->mem_root) Item_func_json_schema_valid(thd, arg1, arg2);
+}
+
Create_func_lcase Create_func_lcase::s_singleton;
@@ -5801,6 +5822,7 @@ Native_func_registry func_array[] =
{ { STRING_WITH_LEN("JSON_OVERLAPS") }, BUILDER(Create_func_json_overlaps)},
{ { STRING_WITH_LEN("JSON_REMOVE") }, BUILDER(Create_func_json_remove)},
{ { STRING_WITH_LEN("JSON_REPLACE") }, BUILDER(Create_func_json_replace)},
+ { { STRING_WITH_LEN("JSON_SCHEMA_VALID") }, BUILDER(Create_func_json_schema_valid)},
{ { STRING_WITH_LEN("JSON_SET") }, BUILDER(Create_func_json_set)},
{ { STRING_WITH_LEN("JSON_SEARCH") }, BUILDER(Create_func_json_search)},
{ { STRING_WITH_LEN("JSON_TYPE") }, BUILDER(Create_func_json_type)},
diff --git a/sql/item_jsonfunc.cc b/sql/item_jsonfunc.cc
index 590fde88a52..af0167b5d7e 100644
--- a/sql/item_jsonfunc.cc
+++ b/sql/item_jsonfunc.cc
@@ -19,21 +19,7 @@
#include "sql_class.h"
#include "item.h"
#include "sql_parse.h" // For check_stack_overrun
-
-/*
- Allocating memory and *also* using it (reading and
- writing from it) because some build instructions cause
- compiler to optimize out stack_used_up. Since alloca()
- here depends on stack_used_up, it doesnt get executed
- correctly and causes json_debug_nonembedded to fail
- ( --error ER_STACK_OVERRUN_NEED_MORE does not occur).
-*/
-#define ALLOCATE_MEM_ON_STACK(A) do \
- { \
- uchar *array= (uchar*)alloca(A); \
- bzero(array, A); \
- my_checksum(0, array, A); \
- } while(0)
+#include "json_schema_helper.h"
/*
Compare ASCII string against the string with the specified
@@ -4736,3 +4722,104 @@ bool Item_func_json_overlaps::fix_length_and_dec(THD *thd)
return Item_bool_func::fix_length_and_dec(thd);
}
+
+longlong Item_func_json_schema_valid::val_int()
+{
+ json_engine_t ve;
+ int is_valid= 1;
+
+ if (!schema_parsed)
+ return 0;
+
+ val= args[1]->val_json(&tmp_val);
+
+ if (!val || !val->length())
+ {
+ null_value= 0;
+ return 1;
+ }
+
+ json_scan_start(&ve, val->charset(), (const uchar *) val->ptr(),
+ (const uchar *) val->end());
+
+ if (json_read_value(&ve))
+ goto end;
+
+ if (!keyword_list.is_empty())
+ {
+ List_iterator <Json_schema_keyword> it(keyword_list);;
+ Json_schema_keyword* curr_keyword= NULL;
+ while ((curr_keyword=it++))
+ {
+ if (curr_keyword->validate(&ve, NULL, NULL))
+ {
+ is_valid= 0;
+ break;
+ }
+ }
+ }
+
+end:
+ if (unlikely(ve.s.error))
+ {
+ is_valid= 0;
+ report_json_error(val, &ve, 2);
+ }
+
+ return is_valid;
+}
+
+/*
+Idea behind implementation:
+JSON schema basically has same structure as that of json object, consisting of
+key-value pairs. So it can be parsed in the same manner as any json object.
+
+However, none of the keywords are mandatory, so making guess about the json value
+type based only on the keywords would be incorrect. Hence we need separate objects
+denoting each keyword.
+
+So during create_object_and_handle_keyword() we create appropriate objects
+based on the keywords and validate each of them individually on the json
+document by calling respective validate() function if the type matches.
+If any of them fails, return false, else return true.
+*/
+bool Item_func_json_schema_valid::fix_length_and_dec(THD *thd)
+{
+ json_engine_t je;
+ bool res= 0;
+
+ String *js= args[0]->val_json(&tmp_js);
+
+ if ((null_value= args[0]->null_value))
+ return 0;
+ json_scan_start(&je, js->charset(), (const uchar *) js->ptr(),
+ (const uchar *) js->ptr() + js->length());
+ if (!create_object_and_handle_keyword(thd, &je, &keyword_list,
+ &all_keywords))
+ schema_parsed= true;
+ else
+ res= true;
+
+ if (je.s.error)
+ report_json_error(js, &je, 1);
+
+ return res || Item_bool_func::fix_length_and_dec(thd);
+}
+
+void Item_func_json_schema_valid::cleanup()
+{
+ DBUG_ENTER("Item_func_json_schema_valid::cleanup");
+ Item_bool_func::cleanup();
+
+ List_iterator<Json_schema_keyword> it2(all_keywords);
+ Json_schema_keyword *curr_schema;
+ while ((curr_schema= it2++))
+ {
+ delete curr_schema;
+ curr_schema= nullptr;
+ }
+ all_keywords.empty();
+ keyword_list.empty();
+
+ DBUG_VOID_RETURN;
+}
diff --git a/sql/item_jsonfunc.h b/sql/item_jsonfunc.h
index 82efddb5a5a..4e857432b04 100644
--- a/sql/item_jsonfunc.h
+++ b/sql/item_jsonfunc.h
@@ -25,6 +25,7 @@
#include "item_strfunc.h" // Item_str_func
#include "item_sum.h"
#include "sql_type_json.h"
+#include "json_schema.h"
class json_path_with_flags
{
@@ -779,7 +780,8 @@ class Item_func_json_overlaps: public Item_bool_func
String tmp_val, *val;
public:
Item_func_json_overlaps(THD *thd, Item *a, Item *b):
- Item_bool_func(thd, a, b) {}
+ Item_bool_func(thd, a, b)
+ {}
LEX_CSTRING func_name_cstring() const override
{
static LEX_CSTRING name= {STRING_WITH_LEN("json_overlaps") };
@@ -791,4 +793,31 @@ public:
{ return get_item_copy<Item_func_json_overlaps>(thd, this); }
};
+class Item_func_json_schema_valid: public Item_bool_func
+{
+ String tmp_js;
+ bool schema_parsed;
+ String tmp_val, *val;
+ List<Json_schema_keyword> keyword_list;
+ List<Json_schema_keyword> all_keywords;
+
+public:
+ Item_func_json_schema_valid(THD *thd, Item *a, Item *b):
+ Item_bool_func(thd, a, b)
+ {
+ val= NULL;
+ schema_parsed= false;
+ }
+ LEX_CSTRING func_name_cstring() const override
+ {
+ static LEX_CSTRING name= {STRING_WITH_LEN("json_schema_valid") };
+ return name;
+ }
+ bool fix_length_and_dec(THD *thd) override;
+ longlong val_int() override;
+ Item *get_copy(THD *thd) override
+ { return get_item_copy<Item_func_json_schema_valid>(thd, this); }
+ void cleanup() override;
+};
+
#endif /* ITEM_JSONFUNC_INCLUDED */
diff --git a/sql/json_schema.cc b/sql/json_schema.cc
new file mode 100644
index 00000000000..b4b2e24d644
--- /dev/null
+++ b/sql/json_schema.cc
@@ -0,0 +1,2819 @@
+/* Copyright (c) 2016, 2022, MariaDB Corporation.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; version 2 of the License.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */
+
+
+#include "mariadb.h"
+#include "sql_class.h"
+#include "sql_parse.h" // For check_stack_overrun
+#include <m_string.h>
+#include "json_schema.h"
+#include "json_schema_helper.h"
+
+static HASH all_keywords_hash;
+
+static Json_schema_keyword *create_json_schema_keyword(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_keyword();
+}
+static Json_schema_keyword *create_json_schema_type(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_type();
+}
+static Json_schema_keyword *create_json_schema_enum(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_enum();
+}
+static Json_schema_keyword *create_json_schema_const(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_const();
+}
+static Json_schema_keyword *create_json_schema_maximum(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_maximum();
+}
+static Json_schema_keyword *create_json_schema_minimum(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_minimum();
+}
+static Json_schema_keyword *create_json_schema_ex_max(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_ex_maximum();
+}
+static Json_schema_keyword *create_json_schema_ex_min(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_ex_minimum();
+}
+static Json_schema_keyword *create_json_schema_multiple_of(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_multiple_of();
+}
+static Json_schema_keyword *create_json_schema_max_len(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_max_len();
+}
+static Json_schema_keyword *create_json_schema_min_len(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_min_len();
+}
+static Json_schema_keyword *create_json_schema_pattern(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_pattern();
+}
+static Json_schema_keyword *create_json_schema_items(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_items();
+}
+static Json_schema_keyword *create_json_schema_max_items(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_max_items();
+}
+static Json_schema_keyword *create_json_schema_min_items(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_min_items();
+}
+static Json_schema_keyword *create_json_schema_prefix_items(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_prefix_items();
+}
+static Json_schema_keyword *create_json_schema_contains(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_contains();
+}
+static Json_schema_keyword *create_json_schema_max_contains(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_max_contains();
+}
+static Json_schema_keyword *create_json_schema_min_contains(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_min_contains();
+}
+static Json_schema_keyword *create_json_schema_unique_items(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_unique_items();
+}
+static Json_schema_keyword *create_json_schema_additional_items(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_additional_items();
+}
+static Json_schema_keyword *create_json_schema_unevaluated_items(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_unevaluated_items();
+}
+static Json_schema_keyword *create_json_schema_properties(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_properties();
+}
+static Json_schema_keyword *create_json_schema_pattern_properties(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_pattern_properties();
+}
+static Json_schema_keyword *create_json_schema_additional_properties(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_additional_properties();
+}
+static Json_schema_keyword *create_json_schema_unevaluated_properties(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_unevaluated_properties();
+}
+static Json_schema_keyword *create_json_schema_property_names(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_property_names();
+}
+static Json_schema_keyword *create_json_schema_max_prop(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_max_prop();
+}
+static Json_schema_keyword *create_json_schema_min_prop(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_min_prop();
+}
+static Json_schema_keyword *create_json_schema_required(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_required();
+}
+static Json_schema_keyword *create_json_schema_dependent_required(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_dependent_required();
+}
+static Json_schema_keyword *create_json_schema_dependent_schemas(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_dependent_schemas();
+}
+static Json_schema_keyword *create_json_schema_not(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_not();
+}
+static Json_schema_keyword *create_json_schema_all_of(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_all_of();
+}
+static Json_schema_keyword *create_json_schema_any_of(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_any_of();
+}
+static Json_schema_keyword *create_json_schema_one_of(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_one_of();
+}
+static Json_schema_keyword *create_json_schema_if(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_if();
+}
+static Json_schema_keyword *create_json_schema_then(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_then();
+}
+static Json_schema_keyword *create_json_schema_else(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_else();
+}
+static Json_schema_keyword *create_json_schema_annotation(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_annotation();
+}
+static Json_schema_keyword *create_json_schema_format(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_format();
+}
+static Json_schema_keyword *create_json_schema_media_string(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_media_string();
+}
+static Json_schema_keyword *create_json_schema_reference(THD *thd)
+{
+ return new (thd->mem_root) Json_schema_reference();
+}
+
+ static st_json_schema_keyword_map json_schema_func_array[]=
+{
+ {{ STRING_WITH_LEN("type") }, create_json_schema_type, JSON_SCHEMA_COMMON_KEYWORD},
+ {{ STRING_WITH_LEN("const") }, create_json_schema_const, JSON_SCHEMA_COMMON_KEYWORD},
+ {{ STRING_WITH_LEN("enum") }, create_json_schema_enum, JSON_SCHEMA_COMMON_KEYWORD},
+
+ {{ STRING_WITH_LEN("maximum") }, create_json_schema_maximum, JSON_SCHEMA_NUMBER_KEYWORD},
+ {{ STRING_WITH_LEN("minimum") }, create_json_schema_minimum, JSON_SCHEMA_NUMBER_KEYWORD},
+ {{ STRING_WITH_LEN("exclusiveMaximum") }, create_json_schema_ex_max, JSON_SCHEMA_NUMBER_KEYWORD},
+ {{ STRING_WITH_LEN("exclusiveMinimum") }, create_json_schema_ex_min, JSON_SCHEMA_NUMBER_KEYWORD},
+ {{ STRING_WITH_LEN("multipleOf") }, create_json_schema_multiple_of, JSON_SCHEMA_NUMBER_KEYWORD},
+
+ {{ STRING_WITH_LEN("maxLength") }, create_json_schema_max_len, JSON_SCHEMA_STRING_KEYWORD},
+ {{ STRING_WITH_LEN("minLength") }, create_json_schema_min_len, JSON_SCHEMA_STRING_KEYWORD},
+ {{ STRING_WITH_LEN("pattern") }, create_json_schema_pattern, JSON_SCHEMA_STRING_KEYWORD},
+
+ {{ STRING_WITH_LEN("items") }, create_json_schema_items, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("maxItems") }, create_json_schema_max_items, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("minItems") }, create_json_schema_min_items, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("additionalItems") }, create_json_schema_additional_items, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("unevaluatedItems") }, create_json_schema_unevaluated_items, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("prefixItems") }, create_json_schema_prefix_items, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("uniqueItems") }, create_json_schema_unique_items, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("contains") }, create_json_schema_contains, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("maxContains") }, create_json_schema_max_contains, JSON_SCHEMA_ARRAY_KEYWORD},
+ {{ STRING_WITH_LEN("minContains") }, create_json_schema_min_contains, JSON_SCHEMA_ARRAY_KEYWORD},
+
+ {{ STRING_WITH_LEN("properties") }, create_json_schema_properties, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("patternProperties") }, create_json_schema_pattern_properties, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("propertyNames") }, create_json_schema_property_names, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("maxProperties") }, create_json_schema_max_prop, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("minProperties") }, create_json_schema_min_prop, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("dependentRequired") }, create_json_schema_dependent_required, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("dependentSchemas") }, create_json_schema_dependent_schemas, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("required") }, create_json_schema_required, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("additionalProperties") }, create_json_schema_additional_properties, JSON_SCHEMA_OBJECT_KEYWORD},
+ {{ STRING_WITH_LEN("unevaluatedProperties") }, create_json_schema_unevaluated_properties, JSON_SCHEMA_OBJECT_KEYWORD},
+
+ {{ STRING_WITH_LEN("not") }, create_json_schema_not, JSON_SCHEMA_LOGIC_KEYWORD},
+ {{ STRING_WITH_LEN("allOf") }, create_json_schema_all_of, JSON_SCHEMA_LOGIC_KEYWORD},
+ {{ STRING_WITH_LEN("anyOf") }, create_json_schema_any_of, JSON_SCHEMA_LOGIC_KEYWORD},
+ {{ STRING_WITH_LEN("oneOf") }, create_json_schema_one_of, JSON_SCHEMA_LOGIC_KEYWORD},
+
+ {{ STRING_WITH_LEN("if") }, create_json_schema_if, JSON_SCHEMA_CONDITION_KEYWORD},
+ {{ STRING_WITH_LEN("then") }, create_json_schema_then, JSON_SCHEMA_CONDITION_KEYWORD},
+ {{ STRING_WITH_LEN("else") }, create_json_schema_else, JSON_SCHEMA_CONDITION_KEYWORD},
+
+ {{ STRING_WITH_LEN("title") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("description") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("comment") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("$schema") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("deprecated") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("readOnly") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("writeOnly") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("example") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("default") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+ {{ STRING_WITH_LEN("$vocabulary") }, create_json_schema_annotation, JSON_SCHEMA_ANNOTATION_KEYWORD},
+
+ {{ STRING_WITH_LEN("date-time") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("date") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("time") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("duration") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("email") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("idn-email") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("hostname") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("idn-hostname") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("ipv4") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("ipv6") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("uri") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("uri-reference") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("iri") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("iri-reference") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("uuid") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("json-pointer") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("relative-json-pointer") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+ {{ STRING_WITH_LEN("regex") }, create_json_schema_format, JSON_SCHEMA_FORMAT_KEYWORD},
+
+ {{ STRING_WITH_LEN("contentMediaType") }, create_json_schema_media_string, JSON_SCHEMA_MEDIA_KEYWORD},
+ {{ STRING_WITH_LEN("conentEncoding") }, create_json_schema_media_string, JSON_SCHEMA_MEDIA_KEYWORD},
+ {{ STRING_WITH_LEN("contentSchema") }, create_json_schema_media_string, JSON_SCHEMA_MEDIA_KEYWORD},
+
+ {{ STRING_WITH_LEN("$ref") }, create_json_schema_reference, JSON_SCHEMA_REFERENCE_KEYWORD},
+ {{ STRING_WITH_LEN("$id") }, create_json_schema_reference, JSON_SCHEMA_REFERENCE_KEYWORD},
+ {{ STRING_WITH_LEN("$anchor") }, create_json_schema_reference, JSON_SCHEMA_REFERENCE_KEYWORD},
+ {{ STRING_WITH_LEN("$defs") }, create_json_schema_reference, JSON_SCHEMA_REFERENCE_KEYWORD},
+ {{ STRING_WITH_LEN("$dynamicRef") }, create_json_schema_reference, JSON_SCHEMA_REFERENCE_KEYWORD},
+ {{ STRING_WITH_LEN("$dynamicAnchor") }, create_json_schema_reference, JSON_SCHEMA_REFERENCE_KEYWORD},
+};
+
+static st_json_schema_keyword_map empty_func_map=
+ {{ STRING_WITH_LEN("") }, create_json_schema_keyword, JSON_SCHEMA_EMPTY_KEYWORD};
+
+/*
+ When some schemas dont validate, we want to check the annotation
+ for an alternate schema. Example, when we have "properties" and
+ "patternProperties", if "properties" does not validate for a certain
+ keyname, then we want to check if it validates for "patternProperties".
+ In this case "patterProperties" will be alternate schema for "properties".
+*/
+bool Json_schema_keyword::fall_back_on_alternate_schema(const json_engine_t
+ *je,
+ const uchar* k_start,
+ const uchar* k_end)
+{
+ if (alternate_schema)
+ {
+ if (alternate_schema->allowed)
+ {
+ if (alternate_schema->validate_as_alternate(je, k_start, k_end))
+ return true;
+ }
+ else
+ return true;
+ }
+ return false;
+}
+
+bool Json_schema_annotation::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ bool is_invalid_value_type= false, res= false;
+
+ if (this->keyword_map == &(json_schema_func_array[38]) ||
+ this->keyword_map == &(json_schema_func_array[39]) ||
+ this->keyword_map == &(json_schema_func_array[40]) ||
+ this->keyword_map == &(json_schema_func_array[41]))
+ {
+ if (je->value_type != JSON_VALUE_STRING)
+ is_invalid_value_type= true;
+ }
+ else if (this->keyword_map == &(json_schema_func_array[42]) ||
+ this->keyword_map == &(json_schema_func_array[43]) ||
+ this->keyword_map == &(json_schema_func_array[44]))
+ {
+ if (je->value_type != JSON_VALUE_TRUE &&
+ je->value_type != JSON_VALUE_FALSE)
+ is_invalid_value_type= true;
+ }
+ else if (this->keyword_map == &(json_schema_func_array[45]))
+ {
+ if (je->value_type != JSON_VALUE_ARRAY)
+ is_invalid_value_type= true;
+ if (json_skip_level(je))
+ return true;
+ }
+ else if (this->keyword_map == &(json_schema_func_array[46]))
+ return false;
+
+ if (is_invalid_value_type)
+ {
+ res= true;
+ String keyword(0);
+ keyword.append((const char*)key_start, (int)(key_end-key_start));
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), keyword.ptr());
+ }
+ return res;
+}
+
+bool Json_schema_format::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_STRING)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "format");
+ }
+ return false;
+}
+
+bool Json_schema_type::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ return !((1 << je->value_type) & type);
+}
+
+bool Json_schema_type::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type == JSON_VALUE_ARRAY)
+ {
+ int level= je->stack_p;
+ while (json_scan_next(je)==0 && je->stack_p >= level)
+ {
+ if (json_read_value(je))
+ return true;
+ json_assign_type(&type, je);
+ }
+ return false;
+ }
+ else if (je->value_type == JSON_VALUE_STRING)
+ {
+ return json_assign_type(&type, je);
+ }
+ else
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "type");
+ return true;
+ }
+}
+
+bool Json_schema_const::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t curr_je;
+ curr_je= *je;
+ const char *start= (char*)curr_je.value;
+ const char *end= (char*)curr_je.value+curr_je.value_len;
+ json_engine_t temp_je= *je;
+ json_engine_t temp_je_2;
+ String a_res("", 0, curr_je.s.cs);
+ int err;
+
+ if (type != curr_je.value_type)
+ return true;
+
+ if (curr_je.value_type <= JSON_VALUE_NUMBER)
+ {
+ if (!json_value_scalar(&temp_je))
+ {
+ if (json_skip_level(&temp_je))
+ {
+ curr_je= temp_je;
+ return true;
+ }
+ end= (char*)temp_je.s.c_str;
+ }
+ String val((char*)temp_je.value, end-start, temp_je.s.cs);
+
+ json_scan_start(&temp_je_2, temp_je.s.cs, (const uchar *) val.ptr(),
+ (const uchar *) val.end());
+
+ if (temp_je.value_type != JSON_VALUE_STRING)
+ {
+ if (json_read_value(&temp_je_2))
+ {
+ curr_je= temp_je;
+ return true;
+ }
+ json_get_normalized_string(&temp_je_2, &a_res, &err);
+ if (err)
+ return true;
+ }
+ else
+ a_res.append(val.ptr(), val.length(), temp_je.s.cs);
+
+ if (a_res.length() == strlen(const_json_value) &&
+ !strncmp((const char*)const_json_value, a_res.ptr(),
+ a_res.length()))
+ return false;
+ return true;
+ }
+ return false;
+}
+
+bool Json_schema_const::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ const char *start= (char*)je->value, *end= (char*)je->value+je->value_len;
+ json_engine_t temp_je;
+ String a_res("", 0, je->s.cs);
+ int err;
+
+ type= je->value_type;
+
+ if (!json_value_scalar(je))
+ {
+ if (json_skip_level(je))
+ return true;
+ end= (char*)je->s.c_str;
+ }
+
+ String val((char*)je->value, end-start, je->s.cs);
+
+ json_scan_start(&temp_je, je->s.cs, (const uchar *) val.ptr(),
+ (const uchar *) val.end());
+ if (je->value_type != JSON_VALUE_STRING)
+ {
+ if (json_read_value(&temp_je))
+ return true;
+ json_get_normalized_string(&temp_je, &a_res, &err);
+ if (err)
+ return true;
+ }
+ else
+ a_res.append(val.ptr(), val.length(), je->s.cs);
+
+ this->const_json_value= (char*)alloc_root(thd->mem_root,
+ a_res.length()+1);
+ if (!const_json_value)
+ return true;
+
+ const_json_value[a_res.length()]= '\0';
+ strncpy(const_json_value, (const char*)a_res.ptr(), a_res.length());
+
+ return false;
+}
+
+bool Json_schema_enum::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t temp_je;
+ temp_je= *je;
+
+ String norm_str((char*)"",0, je->s.cs);
+
+ String a_res("", 0, je->s.cs);
+ int err= 1;
+
+ if (temp_je.value_type > JSON_VALUE_NUMBER)
+ {
+ if (temp_je.value_type == JSON_VALUE_TRUE)
+ return !(enum_scalar & HAS_TRUE_VAL);
+ if (temp_je.value_type == JSON_VALUE_FALSE)
+ return !(enum_scalar & HAS_FALSE_VAL);
+ if (temp_je.value_type == JSON_VALUE_NULL)
+ return !(enum_scalar & HAS_NULL_VAL);
+ }
+ json_get_normalized_string(&temp_je, &a_res, &err);
+ if (err)
+ return true;
+
+ norm_str.append((const char*)a_res.ptr(), a_res.length(), je->s.cs);
+
+ if (my_hash_search(&this->enum_values, (const uchar*)(norm_str.ptr()),
+ strlen((const char*)(norm_str.ptr()))))
+ return false;
+ else
+ return true;
+}
+
+bool Json_schema_enum::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (my_hash_init(PSI_INSTRUMENT_ME,
+ &this->enum_values,
+ je->s.cs, 1024, 0, 0, (my_hash_get_key) get_key_name,
+ NULL, 0))
+ return true;
+
+ if (je->value_type == JSON_VALUE_ARRAY)
+ {
+ int curr_level= je->stack_p;
+ while(json_scan_next(je) == 0 && curr_level <= je->stack_p)
+ {
+ if (json_read_value(je))
+ return true;
+ if (je->value_type > JSON_VALUE_NUMBER)
+ {
+ if (je->value_type == JSON_VALUE_TRUE)
+ enum_scalar|= HAS_TRUE_VAL;
+ else if (je->value_type == JSON_VALUE_FALSE)
+ enum_scalar|= HAS_FALSE_VAL;
+ else if (je->value_type == JSON_VALUE_NULL)
+ enum_scalar|= HAS_NULL_VAL;
+ }
+ else
+ {
+ char *norm_str;
+ int err= 1;
+ String a_res("", 0, je->s.cs);
+
+ json_get_normalized_string(je, &a_res, &err);
+ if (err)
+ return true;
+
+ norm_str= (char*)alloc_root(thd->mem_root,
+ a_res.length()+1);
+ if (!norm_str)
+ return true;
+ else
+ {
+ norm_str[a_res.length()]= '\0';
+ strncpy(norm_str, (const char*)a_res.ptr(), a_res.length());
+ if (my_hash_insert(&this->enum_values, (uchar*)norm_str))
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+ else
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "enum");
+ return true;
+ }
+}
+
+bool Json_schema_maximum::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ return false;
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ return (val <= value) ? false : true;
+}
+
+bool Json_schema_maximum::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maximum");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_minimum::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ return false;
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ return val >= value ? false : true;
+}
+
+bool Json_schema_minimum::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "minimum");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_ex_minimum::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ return false;
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ return (val > value) ? false : true;
+}
+
+bool Json_schema_ex_minimum::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "exclusiveMinimum");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_ex_maximum::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ return false;
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ return (val < value) ? false : true;
+}
+
+bool Json_schema_ex_maximum::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "exclusiveMaximum");
+ return true;
+ }
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_multiple_of::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ return false;
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ double temp= val / this->value;
+ bool res= (temp - (long long int)temp) == 0;
+
+ return !res;
+}
+
+bool Json_schema_multiple_of::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "multipleOf");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ if (val < 0)
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "multipleOf");
+ value= val;
+
+ return false;
+}
+
+
+bool Json_schema_max_len::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ if (je->value_type != JSON_VALUE_STRING)
+ return false;
+ return (uint)(je->value_len) <= value ? false : true;
+}
+
+bool Json_schema_max_len::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maxLength");
+ return true;
+ }
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ if (val < 0)
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maxLength");
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_min_len::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ if (je->value_type != JSON_VALUE_STRING)
+ return false;
+ return (uint)(je->value_len) >= value ? false : true;
+}
+
+bool Json_schema_min_len::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "minLength");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ if (val < 0)
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "minLength");
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_pattern::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ bool pattern_matches= false;
+
+ /*
+ We want to match a single pattern against multiple
+ string when (see below):
+ 1) with "pattern" when there are different json strings
+ to be validated against one pattern.
+ 2) with "propertyNames", where there is one pattern but
+ multiple property names to be validated against one
+ pattern
+ */
+ if (!k_start && !k_end)
+ {
+ /* 1) */
+ if (je->value_type != JSON_VALUE_STRING)
+ return false;
+ str->str_value.set_or_copy_aligned((const char*)je->value,
+ (size_t)je->value_len, je->s.cs);
+ }
+ else
+ {
+ /* 2) */
+ str->str_value.set_or_copy_aligned((const char*)k_start,
+ (size_t)(k_end-k_start), je->s.cs);
+ }
+ if (re.recompile(pattern))
+ return true;
+ if (re.exec(str, 0, 0))
+ return true;
+ pattern_matches= re.match();
+
+ return pattern_matches ? false : true;
+}
+
+bool Json_schema_pattern::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_STRING)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "pattern");
+ return true;
+ }
+
+ my_repertoire_t repertoire= my_charset_repertoire(je->s.cs);
+ pattern= thd->make_string_literal((const char*)je->value,
+ je->value_len, repertoire);
+ str= (Item_string*)current_thd->make_string_literal((const char*)"",
+ 0, repertoire);
+ re.init(je->s.cs, 0);
+
+ return false;
+}
+
+bool Json_schema_max_items::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ uint count= 0;
+ json_engine_t curr_je;
+
+ curr_je= *je;
+
+ if (curr_je.value_type != JSON_VALUE_ARRAY)
+ return false;
+
+ int level= curr_je.stack_p;
+ while(json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ count++;
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ return count > value ? true : false;
+}
+
+bool Json_schema_max_items::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maxItems");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ if (val < 0)
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maxItems");
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_min_items::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ uint count= 0;
+ json_engine_t curr_je;
+
+ curr_je= *je;
+
+ if (curr_je.value_type != JSON_VALUE_ARRAY)
+ return false;
+
+ int level= curr_je.stack_p;
+ while(json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ count++;
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ return count < value ? true : false;
+}
+
+bool Json_schema_min_items::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "minItems");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ if (val < 0)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maxLength");
+ return true;
+ }
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_max_contains::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maxContains");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ value= val;
+ return false;
+}
+
+
+bool Json_schema_min_contains::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "minContains");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ value= val;
+ return false;
+}
+
+
+bool Json_schema_contains::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ uint contains_count=0;
+ json_engine_t curr_je; curr_je= *je;
+ int level= je->stack_p;
+ bool validated= true;
+
+ if (curr_je.value_type != JSON_VALUE_ARRAY)
+ return false;
+
+ while(json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ validated= true;
+ if (validate_schema_items(&curr_je, &contains))
+ validated= false;
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ if (validated)
+ contains_count++;
+ }
+
+ if ((max_contains ? contains_count <= max_contains->value :
+ contains_count>0) &&
+ (min_contains ? contains_count >= min_contains->value :
+ contains_count>0))
+ return false;
+
+ return true;
+}
+
+bool Json_schema_contains::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_OBJECT)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "contains");
+ return true;
+ }
+ return create_object_and_handle_keyword(thd, je, &contains, all_keywords);
+}
+
+
+bool Json_schema_items::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type == JSON_VALUE_FALSE)
+ {
+ set_allowed(false);
+ return false;
+ }
+ if (je->value_type == JSON_VALUE_OBJECT)
+ {
+ return create_object_and_handle_keyword(thd, je, &items_schema,
+ all_keywords);
+ }
+ else if (je->value_type != JSON_VALUE_TRUE)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "items");
+ return true;
+ }
+ return false;
+}
+
+bool Json_schema_items::validate_as_alternate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ /*
+ The indexes in prefix array were less than that in the json array.
+ So validate remaining using the json schema
+ */
+ return validate_schema_items(je, &items_schema);
+}
+
+bool Json_schema_items::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+
+ /*
+ There was no "prefixItesm", so we validate all values in the
+ array using one schema.
+ */
+ int level= je->stack_p, count=0;
+ bool is_false= false;
+ json_engine_t curr_je= *je;
+
+ if (je->value_type != JSON_VALUE_ARRAY)
+ return false;
+
+ if (!allowed)
+ is_false= true;
+
+ while (json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ count++;
+ if (validate_schema_items(&curr_je, &items_schema))
+ return true;
+ }
+
+ return is_false ? (!count ? false : true) : false;
+}
+
+bool Json_schema_prefix_items::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ int level= je->stack_p;
+ json_engine_t curr_je= *je;
+ List_iterator <List<Json_schema_keyword>> it1 (prefix_items);
+ List<Json_schema_keyword> *curr_prefix;
+
+ if (curr_je.value_type != JSON_VALUE_ARRAY)
+ return false;
+
+ while(curr_je.s.c_str < curr_je.s.str_end && json_scan_next(&curr_je)==0 &&
+ curr_je.stack_p >= level)
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ if (!(curr_prefix=it1++))
+ {
+ if (fall_back_on_alternate_schema(&curr_je))
+ return true;
+ else
+ {
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ }
+ else
+ {
+ if (validate_schema_items(&curr_je, &(*curr_prefix)))
+ return true;
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+bool Json_schema_prefix_items::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_ARRAY)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "prefixItems");
+ return true;
+ }
+
+ int level= je->stack_p;
+ while(json_scan_next(je)==0 && je->stack_p >= level)
+ {
+ json_engine_t temp_je;
+ char *begin, *end;
+ int len;
+
+ if (json_read_value(je))
+ return true;
+ begin= (char*)je->value;
+
+ if (json_skip_level(je))
+ return true;
+
+ end= (char*)je->s.c_str;
+ len= (int)(end-begin);
+
+ json_scan_start(&temp_je, je->s.cs, (const uchar *) begin,
+ (const uchar *)begin+len);
+ List<Json_schema_keyword> *keyword_list=
+ new (thd->mem_root) List<Json_schema_keyword>;
+
+ if (!keyword_list)
+ return true;
+ if (create_object_and_handle_keyword(thd, &temp_je, keyword_list,
+ all_keywords))
+ return true;
+
+ prefix_items.push_back(keyword_list, thd->mem_root);
+ }
+
+ return false;
+}
+
+bool Json_schema_unique_items::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ HASH unique_items;
+ List <char> norm_str_list;
+ json_engine_t curr_je= *je;
+ int res= true, level= curr_je.stack_p;
+
+ if (curr_je.value_type != JSON_VALUE_ARRAY)
+ return false;
+
+ if (my_hash_init(PSI_INSTRUMENT_ME, &unique_items, curr_je.s.cs,
+ 1024, 0, 0, (my_hash_get_key) get_key_name, NULL, 0))
+ return true;
+
+ while(json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ int scalar_val= 0, err= 1;
+ char *norm_str;
+ String a_res("", 0, curr_je.s.cs);
+
+ if (json_read_value(&curr_je))
+ goto end;
+
+ json_get_normalized_string(&curr_je, &a_res, &err);
+
+ if (err)
+ goto end;
+
+ norm_str= (char*)malloc(a_res.length()+1);
+ if (!norm_str)
+ goto end;
+
+ norm_str[a_res.length()]= '\0';
+ strncpy(norm_str, (const char*)a_res.ptr(), a_res.length());
+ norm_str_list.push_back(norm_str);
+
+ if (curr_je.value_type > JSON_VALUE_NUMBER)
+ {
+ if (scalar_val & 1 << curr_je.value_type)
+ goto end;
+ scalar_val|= 1 << curr_je.value_type;
+ }
+ else
+ {
+ if (!my_hash_search(&unique_items, (uchar*)norm_str,
+ strlen(((const char*)norm_str))))
+ {
+ if (my_hash_insert(&unique_items, (uchar*)norm_str))
+ goto end;
+ }
+ else
+ goto end;
+ }
+ a_res.set("", 0, curr_je.s.cs);
+ }
+ res= false;
+ end:
+ if (!norm_str_list.is_empty())
+ {
+ List_iterator<char> it(norm_str_list);
+ char *curr_norm_str;
+ while ((curr_norm_str= it++))
+ free(curr_norm_str);
+ norm_str_list.empty();
+ }
+ my_hash_free(&unique_items);
+ return res;
+}
+
+bool Json_schema_unique_items::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type == JSON_VALUE_TRUE)
+ is_unique= true;
+ else if (je->value_type == JSON_VALUE_FALSE)
+ is_unique= false;
+ else
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "uniqueItems");
+ return true;
+ }
+ return false;
+}
+
+
+bool Json_schema_max_prop::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ uint properties_count= 0;
+ json_engine_t curr_je= *je;
+ int curr_level= je->stack_p;
+
+ if (curr_je.value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ while (json_scan_next(&curr_je)== 0 && je->stack_p >= curr_level)
+ {
+ switch (curr_je.state)
+ {
+ case JST_KEY:
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ properties_count++;
+
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ }
+ }
+ return properties_count > value ? true : false;
+}
+
+bool Json_schema_max_prop::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maxProperties");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ if (val < 0)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "maxProperties");
+ return true;
+ }
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_min_prop::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ uint properties_count= 0;
+ int curr_level= je->stack_p;
+ json_engine_t curr_je= *je;
+
+ if (curr_je.value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ while (json_scan_next(&curr_je)== 0 && je->stack_p >= curr_level)
+ {
+ switch (curr_je.state)
+ {
+ case JST_KEY:
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ properties_count++;
+
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ }
+ }
+ return properties_count < value ? true : false;
+}
+
+bool Json_schema_min_prop::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int err= 0;
+ char *end;
+
+ if (je->value_type != JSON_VALUE_NUMBER)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "minProperties");
+ return true;
+ }
+
+ double val= je->s.cs->strntod((char *) je->value,
+ je->value_len, &end, &err);
+ if (val < 0)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "minProperties");
+ return true;
+ }
+ value= val;
+
+ return false;
+}
+
+bool Json_schema_required::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t curr_je= *je;
+ List<char> malloc_mem_list;
+ HASH required;
+ int res= true, curr_level= curr_je.stack_p;
+ List_iterator<String> it(required_properties);
+ String *curr_str;
+
+ if (curr_je.value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ if(my_hash_init(PSI_INSTRUMENT_ME, &required,
+ curr_je.s.cs, 1024, 0, 0, (my_hash_get_key) get_key_name,
+ NULL, 0))
+ return true;
+ while (json_scan_next(&curr_je)== 0 && curr_je.stack_p >= curr_level)
+ {
+ switch (curr_je.state)
+ {
+ case JST_KEY:
+ {
+ const uchar *key_end, *key_start;
+ int key_len;
+ char *str;
+
+ key_start= curr_je.s.c_str;
+ do
+ {
+ key_end= curr_je.s.c_str;
+ } while (json_read_keyname_chr(&curr_je) == 0);
+
+ key_len= (int)(key_end-key_start);
+ str= (char*)malloc((size_t)(key_len)+1);
+ strncpy(str, (const char*)key_start, key_len);
+ str[key_len]='\0';
+
+ if (my_hash_insert(&required, (const uchar*)str))
+ goto error;
+ malloc_mem_list.push_back(str);
+ }
+ }
+ }
+ while ((curr_str= it++))
+ {
+ if (!my_hash_search(&required, (const uchar*)curr_str->ptr(),
+ curr_str->length()))
+ goto error;
+ }
+ res= false;
+ error:
+ if (!malloc_mem_list.is_empty())
+ {
+ List_iterator<char> it(malloc_mem_list);
+ char *curr_ptr;
+ while ((curr_ptr= it++))
+ free(curr_ptr);
+ malloc_mem_list.empty();
+ }
+ my_hash_free(&required);
+ return res;
+}
+
+bool Json_schema_required::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ int level= je->stack_p;
+
+ if (je->value_type != JSON_VALUE_ARRAY)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "required");
+ return true;
+ }
+ while(json_scan_next(je)==0 && level <= je->stack_p)
+ {
+ if (json_read_value(je))
+ return true;
+ else
+ {
+ String *str= new (thd->mem_root)String((char*)je->value,
+ je->value_len, je->s.cs);
+ this->required_properties.push_back(str, thd->mem_root);
+ }
+ }
+ return false;
+}
+
+bool Json_schema_dependent_required::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t curr_je= *je;
+ HASH properties;
+ bool res= true;
+ int curr_level= curr_je.stack_p;
+ List <char> malloc_mem_list;
+ List_iterator<st_dependent_keywords> it(dependent_required);
+ st_dependent_keywords *curr_keyword= NULL;
+
+ if (curr_je.value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ if (my_hash_init(PSI_INSTRUMENT_ME, &properties,
+ curr_je.s.cs, 1024, 0, 0, (my_hash_get_key) get_key_name,
+ NULL, 0))
+ return true;
+
+ while (json_scan_next(&curr_je)== 0 && curr_je.stack_p >= curr_level)
+ {
+ switch (curr_je.state)
+ {
+ case JST_KEY:
+ {
+ const uchar *key_end, *key_start;
+ int key_len;
+ char *str;
+
+ key_start= curr_je.s.c_str;
+ do
+ {
+ key_end= curr_je.s.c_str;
+ } while (json_read_keyname_chr(&curr_je) == 0);
+
+ key_len= (int)(key_end-key_start);
+ str= (char*)malloc((size_t)(key_len)+1);
+ strncpy(str, (const char*)key_start, key_len);
+ str[(int)(key_end-key_start)]='\0';
+
+ if (my_hash_insert(&properties, (const uchar*)str))
+ goto error;
+ malloc_mem_list.push_back(str);
+ }
+ }
+ }
+ while ((curr_keyword= it++))
+ {
+ if (my_hash_search(&properties,
+ (const uchar*)curr_keyword->property->ptr(),
+ curr_keyword->property->length()))
+ {
+ List_iterator<String> it2(curr_keyword->dependents);
+ String *curr_depended_keyword;
+ while ((curr_depended_keyword= it2++))
+ {
+ if (!my_hash_search(&properties,
+ (const uchar*)curr_depended_keyword->ptr(),
+ curr_depended_keyword->length()))
+ {
+ goto error;
+ }
+ }
+ }
+ }
+ res= false;
+
+ error:
+ my_hash_free(&properties);
+ if (!malloc_mem_list.is_empty())
+ {
+ List_iterator<char> it(malloc_mem_list);
+ char *curr_ptr;
+ while ((curr_ptr= it++))
+ free(curr_ptr);
+ malloc_mem_list.empty();
+ }
+ return res;
+}
+
+bool Json_schema_dependent_required::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type == JSON_VALUE_OBJECT)
+ {
+ int level1= je->stack_p;
+ while (json_scan_next(je)==0 && level1 <= je->stack_p)
+ {
+ switch(je->state)
+ {
+ case JST_KEY:
+ {
+ const uchar *k_end, *k_start;
+ int k_len;
+
+ k_start= je->s.c_str;
+ do
+ {
+ k_end= je->s.c_str;
+ } while (json_read_keyname_chr(je) == 0);
+
+ k_len= (int)(k_end-k_start);
+ if (json_read_value(je))
+ return true;
+
+ if (je->value_type == JSON_VALUE_ARRAY)
+ {
+ st_dependent_keywords *curr_dependent_keywords=
+ (st_dependent_keywords *) alloc_root(thd->mem_root,
+ sizeof(st_dependent_keywords));
+
+ if (curr_dependent_keywords)
+ {
+ curr_dependent_keywords->property=
+ new (thd->mem_root)String((char*)k_start,
+ k_len, je->s.cs);
+ curr_dependent_keywords->dependents.empty();
+ int level2= je->stack_p;
+ while (json_scan_next(je)==0 && level2 <= je->stack_p)
+ {
+ if (json_read_value(je) || je->value_type != JSON_VALUE_STRING)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0),
+ "dependentRequired");
+ return true;
+ }
+ else
+ {
+ String *str=
+ new (thd->mem_root)String((char*)je->value,
+ je->value_len, je->s.cs);
+ curr_dependent_keywords->dependents.push_back(str, thd->mem_root);
+ }
+ }
+ dependent_required.push_back(curr_dependent_keywords, thd->mem_root);
+ }
+ else
+ {
+ return true;
+ }
+ }
+ else
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0),
+ "dependentRequired");
+ return true;
+ }
+ }
+ }
+ }
+ }
+ else
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "dependentRequired");
+ return true;
+ }
+ return false;
+}
+
+bool Json_schema_property_names::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t curr_je= *je;
+ int level= curr_je.stack_p;
+
+ if (je->value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ while (json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ switch(curr_je.state)
+ {
+ case JST_KEY:
+ {
+ const uchar *k_end, *k_start;
+ k_start= curr_je.s.c_str;
+ do
+ {
+ k_end= curr_je.s.c_str;
+ } while (json_read_keyname_chr(&curr_je) == 0);
+
+ if (json_read_value(&curr_je))
+ return true;
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+
+ List_iterator <Json_schema_keyword> it1 (property_names);
+ Json_schema_keyword *curr_schema= NULL;
+ while((curr_schema= it1++))
+ {
+ if (curr_schema->validate(&curr_je, k_start, k_end))
+ return true;
+ }
+ }
+ }
+ }
+
+ return false;
+}
+
+bool Json_schema_property_names::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_OBJECT)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "propertyNames");
+ return true;
+ }
+ if (create_object_and_handle_keyword(thd, je, &property_names,
+ all_keywords))
+ return true;
+
+ return false;
+}
+
+/*
+ additiona_items, additional_properties, unevaluated_items,
+ unevaluated_properties are all going to be schemas
+ (basically of object type). So they all can be handled
+ just like any other schema.
+*/
+bool
+Json_schema_additional_and_unevaluated::handle_keyword(THD *thd,
+ json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type == JSON_VALUE_FALSE)
+ {
+ set_allowed(false);
+ return false;
+ }
+ else if (je->value_type == JSON_VALUE_OBJECT)
+ {
+ return create_object_and_handle_keyword(thd, je, &schema_list,
+ all_keywords);
+ }
+ if (je->value_type != JSON_VALUE_TRUE)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), keyword_map->func_name.str);
+ return true;
+ }
+ return false;
+}
+
+/*
+ Validating properties as an alternate shcema happens only when we have
+ logic keywords. Example we have allOf, and one key is not
+ validated against allOf but it is present in "properties" and validates
+ against it. Then the validation result should be true. So we would want that
+ key to be validated against "properties", with "properties" begin
+ alternate schema.
+*/
+bool Json_schema_properties::validate_as_alternate(const json_engine_t *je,
+ const uchar* k_start,
+ const uchar* k_end)
+{
+ st_property *curr_property= NULL;
+ json_engine_t curr_je= *je;
+ if ((curr_property=
+ (st_property*)my_hash_search(&properties,
+ (const uchar*)k_start,
+ (size_t)(k_end-k_start))))
+ {
+ if (validate_schema_items(&curr_je, curr_property->curr_schema))
+ return true;
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ return false;
+}
+
+bool
+Json_schema_additional_and_unevaluated::
+ validate_as_alternate(const json_engine_t *je,
+ const uchar* k_start,
+ const uchar* k_end)
+{
+ return validate_schema_items(je, &schema_list);
+}
+
+
+/*
+ Makes sense on its own, without existence of additionalProperties,
+ properties, patternProperties.
+*/
+bool Json_schema_unevaluated_properties::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t curr_je= *je;
+ int level= curr_je.stack_p, count= 0;
+ bool has_false= false;
+
+ if (je->value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ if (!allowed)
+ has_false= true;
+
+ while (json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ count++;
+ if (validate_schema_items(&curr_je, &schema_list))
+ return true;
+ }
+ return has_false ? (!count ? false: true) : false;
+}
+
+/*
+ Unlike additionalItems, additionalProperties makes sense on its own
+ without existence of properties and patternProperties,
+*/
+bool Json_schema_additional_properties::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t curr_je= *je;
+ int level= curr_je.stack_p;
+
+ if (je->value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ while (json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ switch(curr_je.state)
+ {
+ case JST_KEY:
+ if (json_read_value(&curr_je))
+ return true;
+ if (validate_schema_items(&curr_je, &schema_list))
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/*
+ When items/prefix Items is present and if a key is not validated
+ against it, then additionalItems is validated as an "alternate validation".
+ It will be present/linked as alternate keyword and will not be present
+ in the schema list for that level. This function called when
+ "items"/"prefixItems" is absent, i.e when additionalItems appears
+ in the schema list for that level.
+ So additional Properties on its own will not make sense.
+*/
+bool Json_schema_additional_items::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ return false;
+}
+bool Json_schema_unevaluated_items::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ /*
+ Makes sense on its own without adjacent keywords.
+ */
+ int level= je->stack_p, count=0;
+ bool is_false= false;
+ json_engine_t curr_je= *je;
+
+ if (je->value_type != JSON_VALUE_ARRAY)
+ return false;
+
+ if (!allowed)
+ is_false= true;
+
+ while (json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ if (json_read_value(&curr_je))
+ return true;
+ count++;
+ if (validate_schema_items(&curr_je, &schema_list))
+ return true;
+ }
+
+ return is_false ? (!count ? false : true) : false;
+}
+
+bool Json_schema_properties::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t curr_je= *je;
+
+ if (curr_je.value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ int level= curr_je.stack_p;
+ while (json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ switch(curr_je.state)
+ {
+ case JST_KEY:
+ {
+ const uchar *k_end, *k_start= curr_je.s.c_str;
+ do
+ {
+ k_end= curr_je.s.c_str;
+ } while (json_read_keyname_chr(&curr_je) == 0);
+
+ if (json_read_value(&curr_je))
+ return true;
+
+ st_property *curr_property= NULL;
+ if ((curr_property=
+ (st_property*)my_hash_search(&properties,
+ (const uchar*)k_start,
+ (size_t)(k_end-k_start))))
+ {
+ if (validate_schema_items(&curr_je, curr_property->curr_schema))
+ return true;
+ }
+ else
+ {
+ if (fall_back_on_alternate_schema(&curr_je, k_start, k_end))
+ return true;
+ }
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ }
+ }
+
+ return false;
+}
+
+bool Json_schema_properties::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+
+ if (je->value_type != JSON_VALUE_OBJECT)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "properties");
+ return true;
+ }
+
+ if (my_hash_init(PSI_INSTRUMENT_ME,
+ &this->properties,
+ je->s.cs, 1024, 0, 0,
+ (my_hash_get_key) get_key_name_for_property,
+ NULL, 0))
+ return true;
+ is_hash_inited= true;
+
+ int level= je->stack_p;
+ while (json_scan_next(je)==0 && level <= je->stack_p)
+ {
+ switch(je->state)
+ {
+ case JST_KEY:
+ {
+ const uchar *k_end, *k_start= je->s.c_str;
+ do
+ {
+ k_end= je->s.c_str;
+ } while (json_read_keyname_chr(je) == 0);
+
+ if (json_read_value(je))
+ return true;
+
+ st_property *curr_property=
+ (st_property*)alloc_root(thd->mem_root,
+ sizeof(st_property));
+ if (curr_property)
+ {
+ curr_property->key_name= (char*)alloc_root(thd->mem_root,
+ (size_t)(k_end-k_start)+1);
+ curr_property->curr_schema=
+ new (thd->mem_root) List<Json_schema_keyword>;
+ if (curr_property->key_name)
+ {
+ curr_property->key_name[(int)(k_end-k_start)]= '\0';
+ strncpy((char*)curr_property->key_name, (const char*)k_start,
+ (size_t)(k_end-k_start));
+ if (create_object_and_handle_keyword(thd, je,
+ curr_property->curr_schema,
+ all_keywords))
+ return true;
+ if (my_hash_insert(&properties, (const uchar*)curr_property))
+ return true;
+ }
+ }
+ }
+ }
+ }
+ return false;
+}
+
+bool Json_schema_pattern_properties::
+ validate_as_alternate(const json_engine_t *curr_je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ bool match_found= false;
+ List_iterator <st_pattern_to_property> it1 (pattern_properties);
+ st_pattern_to_property *curr_pattern_property= NULL;
+
+ str->str_value.set_or_copy_aligned((const char*)k_start,
+ (size_t)(k_end-k_start), curr_je->s.cs);
+
+ while ((curr_pattern_property= it1++))
+ {
+ if (curr_pattern_property->re.recompile(curr_pattern_property->pattern))
+ return true;
+ if (curr_pattern_property->re.exec(str, 0, 0))
+ return true;
+ if (curr_pattern_property->re.match())
+ {
+ match_found= true;
+ if (validate_schema_items(curr_je, curr_pattern_property->curr_schema))
+ return true;
+ break;
+ }
+ }
+ if (!match_found)
+ {
+ if (fall_back_on_alternate_schema(curr_je))
+ return true;
+ }
+ return false;
+}
+
+
+bool Json_schema_pattern_properties::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ json_engine_t curr_je= *je;
+ int level= je->stack_p;
+ bool match_found= false;
+
+ if (je->value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ while (json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ switch(curr_je.state)
+ {
+ case JST_KEY:
+ {
+ const uchar *k_end, *k_start= curr_je.s.c_str;
+ do
+ {
+ k_end= curr_je.s.c_str;
+ } while (json_read_keyname_chr(&curr_je) == 0);
+
+ str->str_value.set_or_copy_aligned((const char*)k_start,
+ (size_t)(k_end-k_start), curr_je.s.cs);
+
+ if (json_read_value(&curr_je))
+ return true;
+
+ List_iterator <st_pattern_to_property> it1 (pattern_properties);
+ st_pattern_to_property *curr_pattern_property= NULL;
+
+ while ((curr_pattern_property= it1++))
+ {
+ if (curr_pattern_property->re.recompile(curr_pattern_property->pattern))
+ return true;
+ if (curr_pattern_property->re.exec(str, 0, 0))
+ return true;
+ if (curr_pattern_property->re.match())
+ {
+ match_found= true;
+ if (validate_schema_items(&curr_je, curr_pattern_property->curr_schema))
+ return true;
+ }
+ }
+ if (!match_found)
+ {
+ if (fall_back_on_alternate_schema(&curr_je, k_start, k_end))
+ return true;
+ }
+ }
+ }
+ }
+ return false;
+}
+
+
+
+bool Json_schema_pattern_properties::handle_keyword(THD *thd,
+ json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_OBJECT)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "patternProperties");
+ return true;
+ }
+
+ str= (Item_string*)current_thd->make_string_literal((const char*)"",
+ 0,
+ my_charset_repertoire(je->s.cs));
+
+ int level= je->stack_p;
+ while (json_scan_next(je)==0 && level <= je->stack_p)
+ {
+ switch(je->state)
+ {
+ case JST_KEY:
+ {
+ const uchar *k_end, *k_start= je->s.c_str;
+ do
+ {
+ k_end= je->s.c_str;
+ } while (json_read_keyname_chr(je) == 0);
+
+ if (json_read_value(je))
+ return true;
+
+ st_pattern_to_property *curr_pattern_to_property= NULL;
+
+ curr_pattern_to_property= new (thd->mem_root) pattern_to_property();
+ if (curr_pattern_to_property)
+ {
+ my_repertoire_t repertoire= my_charset_repertoire(je->s.cs);
+ curr_pattern_to_property->pattern=
+ thd->make_string_literal((const char*)k_start,
+ (size_t)(k_end-k_start),
+ repertoire);
+ curr_pattern_to_property->re.init(je->s.cs, 0);
+ curr_pattern_to_property->curr_schema=
+ new (thd->mem_root) List<Json_schema_keyword>;
+
+ if (curr_pattern_to_property->curr_schema)
+ {
+ if (create_object_and_handle_keyword(thd, je,
+ curr_pattern_to_property->curr_schema,
+ all_keywords))
+ return true;
+ }
+
+ pattern_properties.push_back(curr_pattern_to_property, thd->mem_root);
+ }
+ }
+ }
+ }
+ return false;
+}
+
+bool Json_schema_logic::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_ARRAY)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), keyword_map->func_name.str);
+ return true;
+ }
+
+ int level= je->stack_p;
+ while(json_scan_next(je)==0 && je->stack_p >= level)
+ {
+ json_engine_t temp_je;
+ char *begin, *end;
+ int len;
+
+ if (json_read_value(je))
+ return true;
+ begin= (char*)je->value;
+
+ if (json_skip_level(je))
+ return true;
+
+ end= (char*)je->s.c_str;
+ len= (int)(end-begin);
+
+ json_scan_start(&temp_je, je->s.cs, (const uchar *) begin,
+ (const uchar *)begin+len);
+ List<Json_schema_keyword> *keyword_list=
+ new (thd->mem_root) List<Json_schema_keyword>;
+
+ if (!keyword_list)
+ return true;
+ if (create_object_and_handle_keyword(thd, &temp_je, keyword_list,
+ all_keywords))
+ return true;
+
+ schema_items.push_back(keyword_list, thd->mem_root);
+ }
+
+ return false;
+}
+
+bool Json_schema_logic::check_validation(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar *k_end)
+{
+ List_iterator <List<Json_schema_keyword>> it1 (schema_items);
+ List<Json_schema_keyword> *curr_schema= NULL;
+ Json_schema_keyword *curr_alternate_schema= NULL;
+ uint count_validations= 0;
+ bool validated= true;
+
+ if (je->value_type == JSON_VALUE_ARRAY)
+ curr_alternate_schema= alternate_choice1;
+ else if (je->value_type == JSON_VALUE_OBJECT)
+ curr_alternate_schema= alternate_choice2;
+
+ while ((curr_schema= it1++))
+ {
+ List_iterator<Json_schema_keyword> it2(*curr_schema);
+ Json_schema_keyword *curr_keyword= NULL;
+ validated= true;
+
+ while ((curr_keyword=it2++))
+ {
+ if (!curr_keyword->alternate_schema)
+ curr_keyword->alternate_schema= curr_alternate_schema;
+ if (curr_keyword->validate(je))
+ {
+ validated= false;
+ break;
+ }
+ }
+ if (validated)
+ {
+ count_validations++;
+ if (logic_flag & HAS_NOT)
+ return true;
+ }
+ }
+
+ if (validate_count(&count_validations, &schema_items.elements))
+ return true;
+
+ return false;
+}
+bool Json_schema_logic::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar* k_end)
+{
+ return check_validation(je, k_start, k_end);
+}
+
+bool Json_schema_not::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ bool res= false;
+
+ if (je->value_type != JSON_VALUE_OBJECT)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), keyword_map->func_name.str);
+ return true;
+ }
+
+ res= create_object_and_handle_keyword(thd, je, &schema_list, all_keywords);
+
+ schema_items.push_back(&schema_list, thd->mem_root);
+
+ return res;
+}
+
+
+bool Json_schema_keyword::validate_schema_items(const json_engine_t *je,
+ List<Json_schema_keyword>
+ *schema_items)
+{
+ json_engine_t curr_je= *je;
+ List_iterator<Json_schema_keyword> it1(*schema_items);
+ Json_schema_keyword *curr_schema= NULL;
+
+ while((curr_schema= it1++))
+ {
+ if (curr_schema->validate(&curr_je))
+ return true;
+ }
+
+ return false;
+}
+
+bool Json_schema_conditional::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar *k_end)
+{
+ if (if_cond)
+ {
+ if (!if_cond->validate_schema_items(je,
+ if_cond->get_validation_keywords()))
+ {
+ if (then_cond)
+ {
+ if (then_cond->validate_schema_items(je,
+ then_cond->get_validation_keywords()))
+ return true;
+ }
+ }
+ else
+ {
+ if (else_cond)
+ {
+ if (else_cond->validate_schema_items(je,
+ else_cond->get_validation_keywords()))
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+
+bool Json_schema_conditional::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_OBJECT)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), keyword_map->func_name.str);
+ return true;
+ }
+ return create_object_and_handle_keyword(thd, je, &conditions_schema,
+ all_keywords);
+}
+
+bool Json_schema_dependent_schemas::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_OBJECT)
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "properties");
+ return true;
+ }
+
+ if (my_hash_init(PSI_INSTRUMENT_ME,
+ &this->properties,
+ je->s.cs, 1024, 0, 0,
+ (my_hash_get_key) get_key_name_for_property,
+ NULL, 0))
+ return true;
+ is_hash_inited= true;
+
+ int level= je->stack_p;
+ while (json_scan_next(je)==0 && level <= je->stack_p)
+ {
+ switch(je->state)
+ {
+ case JST_KEY:
+ {
+ const uchar *k_end, *k_start= je->s.c_str;
+ do
+ {
+ k_end= je->s.c_str;
+ } while (json_read_keyname_chr(je) == 0);
+
+ if (json_read_value(je))
+ return true;
+
+ st_property *curr_property=
+ (st_property*)alloc_root(thd->mem_root,
+ sizeof(st_property));
+ if (curr_property)
+ {
+ curr_property->key_name= (char*)alloc_root(thd->mem_root,
+ (size_t)(k_end-k_start)+1);
+ curr_property->curr_schema=
+ new (thd->mem_root) List<Json_schema_keyword>;
+ if (curr_property->key_name)
+ {
+ curr_property->key_name[(int)(k_end-k_start)]= '\0';
+ strncpy((char*)curr_property->key_name, (const char*)k_start,
+ (size_t)(k_end-k_start));
+ if (create_object_and_handle_keyword(thd, je,
+ curr_property->curr_schema,
+ all_keywords))
+ return true;
+ if (my_hash_insert(&properties, (const uchar*)curr_property))
+ return true;
+ }
+ }
+ }
+ }
+ }
+ return false;
+}
+
+bool Json_schema_dependent_schemas::validate(const json_engine_t *je,
+ const uchar *k_start,
+ const uchar *k_end)
+{
+ json_engine_t curr_je= *je;
+
+ if (curr_je.value_type != JSON_VALUE_OBJECT)
+ return false;
+
+ int level= curr_je.stack_p;
+ while (json_scan_next(&curr_je)==0 && level <= curr_je.stack_p)
+ {
+ switch(curr_je.state)
+ {
+ case JST_KEY:
+ {
+ const uchar *k_end, *k_start= curr_je.s.c_str;
+ do
+ {
+ k_end= curr_je.s.c_str;
+ } while (json_read_keyname_chr(&curr_je) == 0);
+
+ if (json_read_value(&curr_je))
+ return true;
+
+ st_property *curr_property= NULL;
+ if ((curr_property=
+ (st_property*)my_hash_search(&properties,
+ (const uchar*)k_start,
+ (size_t)(k_end-k_start))))
+ {
+ if (validate_schema_items(je, curr_property->curr_schema))
+ return true;
+ if (!json_value_scalar(&curr_je))
+ {
+ if (json_skip_level(&curr_je))
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ return false;
+}
+
+bool Json_schema_media_string::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ if (je->value_type != JSON_VALUE_STRING)
+ {
+ String curr_keyword((char*)key_start, key_end-key_start, je->s.cs);
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), curr_keyword.ptr());
+ return true;
+ }
+
+ return false;
+}
+
+bool Json_schema_reference::handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword>
+ *all_keywords)
+{
+ String keyword(0);
+ keyword.append((const char*)key_start, (int)(key_end-key_start));
+ my_error(ER_JSON_SCHEMA_KEYWORD_UNSUPPORTED, MYF(0), keyword.ptr());
+ return true;
+}
+
+Json_schema_keyword* create_object(THD *thd,
+ Json_schema_keyword *curr_keyword,
+ const uchar* key_start,
+ const uchar* key_end)
+{
+ st_json_schema_keyword_map *curr_keyword_map= NULL;
+ curr_keyword_map=
+ (st_json_schema_keyword_map*)
+ my_hash_search(&all_keywords_hash,
+ key_start,
+ (size_t)(key_end-key_start));
+ if (!curr_keyword_map)
+ {
+ curr_keyword_map= &empty_func_map;
+ }
+ curr_keyword= (curr_keyword_map->func)(thd);
+ curr_keyword->keyword_map= curr_keyword_map;
+
+ return curr_keyword;
+}
+
+static int sort_by_priority(Json_schema_keyword* el1, Json_schema_keyword* el2,
+ void *arg)
+{
+ return el1->priority > el2->priority;
+}
+
+void fix_keyword_list(List <Json_schema_keyword> *keyword_list)
+{
+ bubble_sort<Json_schema_keyword>(keyword_list, sort_by_priority, NULL);
+
+ if (keyword_list && !keyword_list->is_empty())
+ {
+ int count= 1;
+
+ List_iterator<Json_schema_keyword> it2(*keyword_list);
+ Json_schema_keyword *curr_schema= NULL;
+
+ while((curr_schema=it2++))
+ {
+ curr_schema->set_alternate_schema(keyword_list->elem(count));
+ count++;
+ }
+ }
+ return;
+}
+
+/*
+ Some schemas are interdependent: they are evaluated only if their
+ adjacent schemas fail to evaluate. So the need to be linked
+ in a way that if one fails to evaluate a value, we can try
+ an alternate schema.
+ Hence push such keywords in a temporary list, adjust the interdependence
+ and then add them to main schema list.
+*/
+bool
+add_schema_interdependence(THD *thd, List<Json_schema_keyword> *temporary,
+ List<Json_schema_keyword> *keyword_list)
+{
+ List_iterator<Json_schema_keyword> temp_it(*temporary);
+ List<Json_schema_keyword> array_prop, object_prop, logic_prop, conditional_prop;
+ Json_schema_keyword *temp_keyword= NULL, *contains= NULL,
+ *max_contains= NULL, *min_contains= NULL,
+ *if_cond= NULL, *then_cond= NULL, *else_cond= NULL;
+
+ while((temp_keyword= temp_it++))
+ {
+ size_t len= strlen(temp_keyword->keyword_map->func_name.str);
+ st_json_schema_keyword_map *curr_element= NULL;
+ if ((curr_element= (st_json_schema_keyword_map*) my_hash_search(&all_keywords_hash,
+ (uchar*)(temp_keyword->keyword_map->func_name.str), len)))
+ {
+ if (temp_keyword->priority > 0)
+ {
+ if (curr_element->flag == JSON_SCHEMA_ARRAY_KEYWORD)
+ array_prop.push_back(temp_keyword);
+ else if (curr_element->flag == JSON_SCHEMA_OBJECT_KEYWORD)
+ object_prop.push_back(temp_keyword);
+ else if (curr_element->flag == JSON_SCHEMA_LOGIC_KEYWORD)
+ logic_prop.push_back(temp_keyword);
+ }
+ else if (temp_keyword->keyword_map == &(json_schema_func_array[35]))
+ if_cond= temp_keyword;
+ else if (temp_keyword->keyword_map == &(json_schema_func_array[36]))
+ then_cond= temp_keyword;
+ else if (temp_keyword->keyword_map == &(json_schema_func_array[37]))
+ else_cond= temp_keyword;
+ else if (temp_keyword->keyword_map == &(json_schema_func_array[18]))
+ contains= temp_keyword;
+ else if (temp_keyword->keyword_map == &(json_schema_func_array[20]))
+ min_contains= temp_keyword;
+ else if (temp_keyword->keyword_map == &(json_schema_func_array[19]))
+ max_contains= temp_keyword;
+ else
+ keyword_list->push_back(temp_keyword, thd->mem_root);
+ }
+ }
+
+ if (if_cond)
+ {
+ Json_schema_conditional *cond_schema=
+ new (current_thd->mem_root) Json_schema_conditional();
+ if (cond_schema)
+ cond_schema->set_conditions(if_cond, then_cond, else_cond);
+ keyword_list->push_back(cond_schema, thd->mem_root);
+ }
+ if (contains)
+ {
+ contains->set_dependents(min_contains, max_contains);
+ keyword_list->push_back(contains, thd->mem_root);
+ }
+
+ fix_keyword_list(&array_prop);
+ fix_keyword_list(&object_prop);
+
+ /*
+ We want to check for alternate schema
+ When a key is not validated by logic keywords, we would want to also check
+ schema like properties, items etc to make sure the key is not validated by
+ any schema in order to return correct result. So "link" other schemas as
+ alternate when logic properties is present and only push logic keywords
+ to the schema list.
+ */
+ if (!logic_prop.is_empty())
+ {
+ List_iterator<Json_schema_keyword> it(logic_prop);
+ Json_schema_keyword *curr_schema= NULL;
+ while((curr_schema= it++))
+ {
+ curr_schema->set_alternate_schema_choice(array_prop.elem(0),
+ object_prop.elem(0));
+ keyword_list->push_back(curr_schema, thd->mem_root);
+ }
+ array_prop.empty();
+ object_prop.empty();
+ }
+ else
+ {
+ if (array_prop.elem(0))
+ keyword_list->push_back(array_prop.elem(0), thd->mem_root);
+ if (object_prop.elem(0))
+ keyword_list->push_back(object_prop.elem(0), thd->mem_root);
+ }
+ return false;
+}
+
+
+/*
+ Scan all keywords on the current level and put them in a temporary
+ list. Once scanning is done, adjust the dependency if needed, and
+ add the keywords in keyword_list
+*/
+bool create_object_and_handle_keyword(THD *thd, json_engine_t *je,
+ List<Json_schema_keyword> *keyword_list,
+ List<Json_schema_keyword> *all_keywords)
+{
+ int level= je->stack_p;
+ List<Json_schema_keyword> temporary_list;
+
+ DBUG_EXECUTE_IF("json_check_min_stack_requirement",
+ {
+ long arbitrary_var;
+ long stack_used_up=
+ (available_stack_size(thd->thread_stack,
+ &arbitrary_var));
+ ALLOCATE_MEM_ON_STACK(my_thread_stack_size-stack_used_up-STACK_MIN_SIZE);
+ });
+ if (check_stack_overrun(thd, STACK_MIN_SIZE , NULL))
+ return 1;
+
+ while (json_scan_next(je)== 0 && je->stack_p >= level)
+ {
+ switch(je->state)
+ {
+ case JST_KEY:
+ {
+ const uchar *key_end, *key_start;
+
+ key_start= je->s.c_str;
+ do
+ {
+ key_end= je->s.c_str;
+ } while (json_read_keyname_chr(je) == 0);
+
+ if (json_read_value(je))
+ return true;
+
+ Json_schema_keyword *curr_keyword= NULL;
+ curr_keyword= create_object(thd, curr_keyword,
+ key_start, key_end);
+ if (all_keywords)
+ all_keywords->push_back(curr_keyword, thd->mem_root);
+ if (curr_keyword->handle_keyword(thd, je,
+ (const char*)key_start,
+ (const char*)key_end, all_keywords))
+ {
+ return true;
+ }
+ temporary_list.push_back(curr_keyword, thd->mem_root);
+ break;
+ }
+ }
+ }
+
+ if (add_schema_interdependence(thd, &temporary_list, keyword_list))
+ return true;
+
+ return false;
+}
+
+uchar* get_key_name_for_property(const char *key_name, size_t *length,
+ my_bool /* unused */)
+{
+ st_property * curr_property= (st_property*)(key_name);
+
+ *length= strlen(curr_property->key_name);
+ return (uchar*) curr_property->key_name;
+}
+
+uchar* get_key_name_for_func(const char *key_name, size_t *length,
+ my_bool /* unused */)
+{
+ st_json_schema_keyword_map * curr_keyword=
+ (st_json_schema_keyword_map*)(key_name);
+
+ *length= curr_keyword->func_name.length;
+ return (uchar*)curr_keyword->func_name.str;
+}
+
+bool setup_json_schema_keyword_hash()
+{
+ if (my_hash_init(PSI_INSTRUMENT_ME,
+ &all_keywords_hash,
+ system_charset_info, 1024, 0, 0,
+ (my_hash_get_key) get_key_name_for_func,
+ NULL, 0))
+ return true;
+
+ int size= sizeof(json_schema_func_array)/sizeof(json_schema_func_array[0]);
+ for (int i= 0; i < size; i++)
+ {
+ if (my_hash_insert(&all_keywords_hash, (uchar*)(&json_schema_func_array[i])))
+ return true;
+ }
+ return false;
+}
+
+void cleanup_json_schema_keyword_hash()
+{
+ my_hash_free(&all_keywords_hash);
+
+ return;
+}
diff --git a/sql/json_schema.h b/sql/json_schema.h
new file mode 100644
index 00000000000..5da2576d7c4
--- /dev/null
+++ b/sql/json_schema.h
@@ -0,0 +1,827 @@
+#ifndef JSON_SCHEMA_INCLUDED
+#define JSON_SCHEMA_INCLUDED
+
+/* Copyright (c) 2016, 2021, MariaDB
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; version 2 of the License.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */
+
+
+/* This file defines all json schema classes. */
+
+#include "sql_class.h"
+#include "sql_type_json.h"
+#include "json_schema_helper.h"
+
+struct st_json_schema_keyword_map;
+
+class Json_schema_keyword : public Sql_alloc
+{
+ public:
+ Json_schema_keyword *alternate_schema;
+ st_json_schema_keyword_map *keyword_map;
+ double value;
+ uint priority;
+ bool allowed;
+
+ Json_schema_keyword() : alternate_schema(NULL), keyword_map(NULL),
+ value(0), priority(0), allowed(true)
+ {
+ }
+ virtual ~Json_schema_keyword() = default;
+
+ /*
+ Called for each keyword on the current level.
+ */
+ virtual bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL)
+ { return false; }
+ virtual bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords)
+ {
+ return false;
+ }
+ virtual List<Json_schema_keyword>* get_validation_keywords()
+ {
+ return NULL;
+ }
+ void set_alternate_schema(Json_schema_keyword *schema)
+ {
+ alternate_schema= schema;
+ }
+ virtual bool fall_back_on_alternate_schema(const json_engine_t *je,
+ const uchar* k_start= NULL,
+ const uchar* k_end= NULL);
+ virtual bool validate_as_alternate(const json_engine_t *je,
+ const uchar* k_start= NULL,
+ const uchar* k_end= NULL)
+ {
+ return false;
+ }
+ virtual bool validate_schema_items(const json_engine_t *je,
+ List<Json_schema_keyword>*schema_items);
+ virtual void set_alternate_schema_choice(Json_schema_keyword *schema1,
+ Json_schema_keyword *schema2)
+ {
+ return;
+ }
+ virtual void set_dependents(Json_schema_keyword *schema1,
+ Json_schema_keyword *schema2)
+ {
+ return;
+ }
+};
+
+/*
+ Additional and unvaluated keywords and items handle
+ keywords and validate schema in same way, so it makes sense
+ to have a base class for them.
+*/
+class Json_schema_additional_and_unevaluated : public Json_schema_keyword
+{
+ public:
+ List<Json_schema_keyword> schema_list;
+ Json_schema_additional_and_unevaluated()
+ {
+ allowed= true;
+ }
+ void set_allowed(bool allowed_val)
+ {
+ allowed= allowed_val;
+ }
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ bool validate(const json_engine_t *je,
+ const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override
+ {
+ return false;
+ }
+ bool validate_as_alternate(const json_engine_t *je, const uchar *k_start,
+ const uchar *k_end) override;
+};
+
+
+class Json_schema_annotation : public Json_schema_keyword
+{
+ public:
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_format : public Json_schema_keyword
+{
+ public:
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+typedef List<Json_schema_keyword> List_schema_keyword;
+
+class Json_schema_type : public Json_schema_keyword
+{
+ private:
+ uint type;
+
+ public:
+ bool validate(const json_engine_t *je,
+ const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd,
+ json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ Json_schema_type()
+ {
+ type= 0;
+ }
+};
+
+class Json_schema_const : public Json_schema_keyword
+{
+ private:
+ char *const_json_value;
+
+ public:
+ enum json_value_types type;
+ bool validate(const json_engine_t *je,
+ const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ Json_schema_const()
+ {
+ const_json_value= NULL;
+ }
+};
+
+enum enum_scalar_values {
+ HAS_NO_VAL= 0, HAS_TRUE_VAL= 2,
+ HAS_FALSE_VAL= 4, HAS_NULL_VAL= 8
+ };
+class Json_schema_enum : public Json_schema_keyword
+{
+ private:
+ HASH enum_values;
+ uint enum_scalar;
+
+ public:
+ bool validate(const json_engine_t *je,
+ const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ Json_schema_enum()
+ {
+ enum_scalar= HAS_NO_VAL;
+ }
+ ~Json_schema_enum()
+ {
+ my_hash_free(&enum_values);
+ }
+};
+
+class Json_schema_maximum : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je,
+ const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_minimum : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je,
+ const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_multiple_of : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_ex_maximum : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_ex_minimum : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_max_len : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_min_len : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_pattern : public Json_schema_keyword
+{
+ private:
+ Regexp_processor_pcre re;
+ Item *pattern;
+ Item_string *str;
+
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ Json_schema_pattern()
+ {
+ str= NULL;
+ pattern= NULL;
+ }
+ ~Json_schema_pattern() { re.cleanup(); }
+};
+
+class Json_schema_max_items : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_min_items : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_max_contains : public Json_schema_keyword
+{
+ public:
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_min_contains : public Json_schema_keyword
+{
+ public:
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+/*
+ The value of max_contains and min_contains is only
+ relevant when contains keyword is present.
+ Hence the pointers to access them directly.
+*/
+class Json_schema_contains : public Json_schema_keyword
+{
+ public:
+ List <Json_schema_keyword> contains;
+ Json_schema_keyword *max_contains;
+ Json_schema_keyword *min_contains;
+
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ void set_dependents(Json_schema_keyword *min, Json_schema_keyword *max)
+ {
+ min_contains= min;
+ max_contains= max;
+ }
+};
+
+class Json_schema_unique_items : public Json_schema_keyword
+{
+ private:
+ bool is_unique;
+
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+
+class Json_schema_prefix_items : public Json_schema_keyword
+{
+ public:
+ List <List_schema_keyword> prefix_items;
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ Json_schema_prefix_items()
+ {
+ priority= 1;
+ }
+};
+
+class Json_schema_unevaluated_items :
+ public Json_schema_additional_and_unevaluated
+{
+ public:
+ Json_schema_unevaluated_items()
+ {
+ priority= 4;
+ }
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+};
+
+class Json_schema_additional_items :
+ public Json_schema_additional_and_unevaluated
+{
+ public:
+ Json_schema_additional_items()
+ {
+ priority= 3;
+ }
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+};
+
+class Json_schema_items : public Json_schema_keyword
+{
+ private:
+ List<Json_schema_keyword> items_schema;
+ public:
+ Json_schema_items()
+ {
+ priority= 2;
+ }
+ void set_allowed(bool allowed_val) { allowed= allowed_val; }
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ bool validate_as_alternate(const json_engine_t *je, const uchar *k_start,
+ const uchar *k_end) override;
+};
+
+
+class Json_schema_property_names : public Json_schema_keyword
+{
+ protected:
+ List <Json_schema_keyword> property_names;
+
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+typedef struct property
+{
+ List<Json_schema_keyword> *curr_schema;
+ char *key_name;
+} st_property;
+
+class Json_schema_properties : public Json_schema_keyword
+{
+ private:
+ HASH properties;
+ bool is_hash_inited;
+
+ public:
+ Json_schema_properties()
+ {
+ priority= 1;
+ }
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ ~Json_schema_properties()
+ {
+ if (is_hash_inited)
+ my_hash_free(&properties);
+ }
+ bool validate_as_alternate(const json_engine_t *je, const uchar *k_start,
+ const uchar *k_end) override;
+ };
+
+class Json_schema_dependent_schemas : public Json_schema_keyword
+{
+ private:
+ HASH properties;
+ bool is_hash_inited;
+
+ public:
+ ~Json_schema_dependent_schemas()
+ {
+ if (is_hash_inited)
+ my_hash_free(&properties);
+ }
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+
+class Json_schema_additional_properties :
+ public Json_schema_additional_and_unevaluated
+{
+ public:
+ Json_schema_additional_properties()
+ {
+ priority= 3;
+ }
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+};
+
+class Json_schema_unevaluated_properties :
+ public Json_schema_additional_and_unevaluated
+{
+ public:
+ Json_schema_unevaluated_properties()
+ {
+ priority= 4;
+ }
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+};
+
+typedef struct pattern_to_property : public Sql_alloc
+{
+ Regexp_processor_pcre re;
+ Item *pattern;
+ List<Json_schema_keyword> *curr_schema;
+}st_pattern_to_property;
+
+class Json_schema_pattern_properties : public Json_schema_keyword
+{
+ private:
+ Item_string *str;
+ List<st_pattern_to_property> pattern_properties;
+
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ Json_schema_pattern_properties()
+ {
+ priority= 2;
+ }
+ ~Json_schema_pattern_properties()
+ {
+ str= NULL;
+ if (!pattern_properties.is_empty())
+ {
+ st_pattern_to_property *curr_pattern_to_property= NULL;
+ List_iterator<st_pattern_to_property> it(pattern_properties);
+ while((curr_pattern_to_property= it++))
+ {
+ curr_pattern_to_property->re.cleanup();
+ curr_pattern_to_property->pattern= NULL;
+ delete curr_pattern_to_property;
+ curr_pattern_to_property= nullptr;
+ }
+ }
+ }
+ bool validate_as_alternate(const json_engine_t *je, const uchar *k_start,
+ const uchar *k_end) override;
+};
+
+
+class Json_schema_max_prop : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_min_prop : public Json_schema_keyword
+{
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_required : public Json_schema_keyword
+{
+ private:
+ List <String> required_properties;
+
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+typedef struct dependent_keyowrds
+{
+ String *property;
+ List <String> dependents;
+} st_dependent_keywords;
+
+class Json_schema_dependent_required : public Json_schema_keyword
+{
+ private:
+ List<st_dependent_keywords> dependent_required;
+
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+enum logic_enum { HAS_ALL_OF= 2, HAS_ANY_OF= 4, HAS_ONE_OF= 8, HAS_NOT= 16};
+class Json_schema_logic : public Json_schema_keyword
+{
+ protected:
+ uint logic_flag;
+ List <List_schema_keyword> schema_items;
+ Json_schema_keyword *alternate_choice1, *alternate_choice2;
+ public:
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ Json_schema_logic()
+ {
+ logic_flag= 0;
+ alternate_choice1= alternate_choice2= NULL;
+ priority= 1;
+ }
+ virtual bool validate_count(uint* count, uint* total) { return false; }
+ void set_alternate_schema_choice(Json_schema_keyword *schema1,
+ Json_schema_keyword* schema2) override
+ {
+ alternate_choice1= schema1;
+ alternate_choice2= schema2;
+ }
+ bool check_validation(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL);
+};
+
+class Json_schema_not : public Json_schema_logic
+{
+ private:
+ List <Json_schema_keyword> schema_list;
+ public:
+ Json_schema_not()
+ {
+ logic_flag= HAS_NOT;
+ }
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ bool validate_count(uint *count, uint *total) override
+ {
+ return *count !=0;
+ }
+};
+
+class Json_schema_one_of : public Json_schema_logic
+{
+ public:
+ Json_schema_one_of()
+ {
+ logic_flag= HAS_ONE_OF;
+ }
+ bool validate_count(uint *count, uint *total) override
+ {
+ return !(*count == 1);
+ }
+};
+
+class Json_schema_any_of : public Json_schema_logic
+{
+ public:
+ Json_schema_any_of()
+ {
+ logic_flag= HAS_ANY_OF;
+ }
+ bool validate_count(uint *count, uint *total) override
+ {
+ return *count == 0;
+ }
+};
+
+class Json_schema_all_of : public Json_schema_logic
+{
+ public:
+ Json_schema_all_of()
+ {
+ logic_flag= HAS_ALL_OF;
+ }
+ bool validate_count(uint *count, uint *total) override
+ {
+ return *count != *total;
+ }
+};
+
+class Json_schema_conditional : public Json_schema_keyword
+{
+ private:
+ Json_schema_keyword *if_cond, *else_cond, *then_cond;
+
+ public:
+ List<Json_schema_keyword> conditions_schema;
+ Json_schema_conditional()
+ {
+ if_cond= NULL;
+ then_cond= NULL;
+ else_cond= NULL;
+ }
+ bool validate(const json_engine_t *je, const uchar *k_start= NULL,
+ const uchar *k_end= NULL) override;
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+ void set_conditions(Json_schema_keyword *if_val,
+ Json_schema_keyword* then_val,
+ Json_schema_keyword *else_val)
+ {
+ if_cond= if_val;
+ then_cond= then_val;
+ else_cond= else_val;
+ }
+ List<Json_schema_keyword>* get_validation_keywords() override
+ {
+ return &conditions_schema;
+ }
+
+};
+
+class Json_schema_if : public Json_schema_conditional
+{
+};
+
+class Json_schema_else : public Json_schema_conditional
+{
+};
+
+class Json_schema_then : public Json_schema_conditional
+{
+};
+
+class Json_schema_media_string : public Json_schema_keyword
+{
+ public:
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+class Json_schema_reference : public Json_schema_keyword
+{
+ public:
+ bool handle_keyword(THD *thd, json_engine_t *je,
+ const char* key_start,
+ const char* key_end,
+ List<Json_schema_keyword> *all_keywords) override;
+};
+
+bool create_object_and_handle_keyword(THD *thd, json_engine_t *je,
+ List<Json_schema_keyword> *keyword_list,
+ List<Json_schema_keyword> *all_keywords);
+uchar* get_key_name_for_property(const char *key_name, size_t *length,
+ my_bool /* unused */);
+uchar* get_key_name_for_func(const char *key_name, size_t *length,
+ my_bool /* unused */);
+
+enum keyword_flag
+{
+ JSON_SCHEMA_COMMON_KEYWORD= 0,
+ JSON_SCHEMA_NUMBER_KEYWORD= 1,
+ JSON_SCHEMA_STRING_KEYWORD= 2,
+ JSON_SCHEMA_ARRAY_KEYWORD= 3,
+ JSON_SCHEMA_OBJECT_KEYWORD= 4,
+ JSON_SCHEMA_LOGIC_KEYWORD= 5,
+ JSON_SCHEMA_CONDITION_KEYWORD= 6,
+ JSON_SCHEMA_ANNOTATION_KEYWORD= 7,
+ JSON_SCHEMA_FORMAT_KEYWORD= 8,
+ JSON_SCHEMA_MEDIA_KEYWORD= 9,
+ JSON_SCHEMA_REFERENCE_KEYWORD= 10,
+ JSON_SCHEMA_EMPTY_KEYWORD= 11
+};
+
+typedef struct st_json_schema_keyword_map
+{
+ LEX_CSTRING func_name;
+ Json_schema_keyword*(*func)(THD*);
+ enum keyword_flag flag;
+} json_schema_keyword_map;
+
+bool setup_json_schema_keyword_hash();
+void cleanup_json_schema_keyword_hash();
+
+#endif
diff --git a/sql/json_schema_helper.cc b/sql/json_schema_helper.cc
new file mode 100644
index 00000000000..e843b34b6b2
--- /dev/null
+++ b/sql/json_schema_helper.cc
@@ -0,0 +1,102 @@
+/* Copyright (c) 2016, 2022, MariaDB Corporation.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; version 2 of the License.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */
+
+
+#include "mariadb.h"
+#include "sql_class.h"
+#include "sql_parse.h" // For check_stack_overrun
+#include <m_string.h>
+#include "json_schema_helper.h"
+
+
+bool json_key_equals(const char* key, LEX_CSTRING val, int key_len)
+{
+ return (size_t)key_len == val.length && !strncmp(key, val.str, key_len);
+}
+
+bool json_assign_type(uint *curr_type, json_engine_t *je)
+{
+ const char* curr_value= (const char*)je->value;
+ int len= je->value_len;
+
+ if (json_key_equals(curr_value, { STRING_WITH_LEN("number") }, len))
+ *curr_type|= (1 << JSON_VALUE_NUMBER);
+ else if(json_key_equals(curr_value, { STRING_WITH_LEN("string") }, len))
+ *curr_type|= (1 << JSON_VALUE_STRING);
+ else if(json_key_equals(curr_value, { STRING_WITH_LEN("array") }, len))
+ *curr_type|= (1 << JSON_VALUE_ARRAY);
+ else if(json_key_equals(curr_value, { STRING_WITH_LEN("object") }, len))
+ *curr_type|= (1 << JSON_VALUE_OBJECT);
+ else if (json_key_equals(curr_value, { STRING_WITH_LEN("boolean") }, len))
+ *curr_type|= ((1 << JSON_VALUE_TRUE) | (1 << JSON_VALUE_FALSE));
+ else if (json_key_equals(curr_value, { STRING_WITH_LEN("null") }, len))
+ *curr_type|= (1 << JSON_VALUE_NULL);
+ else
+ {
+ my_error(ER_JSON_INVALID_VALUE_FOR_KEYWORD, MYF(0), "type");
+ return true;
+ }
+ return false;
+}
+
+uchar* get_key_name(const char *key_name, size_t *length,
+ my_bool /* unused */)
+{
+ *length= strlen(key_name);
+ return (uchar*) key_name;
+}
+
+void json_get_normalized_string(json_engine_t *je, String *res,
+ int *error)
+{
+ char *val_begin= (char*)je->value, *val_end;
+ String val;
+ DYNAMIC_STRING a_res;
+
+ if (init_dynamic_string(&a_res, NULL, 0, 0))
+ goto error;
+
+ if (!json_value_scalar(je))
+ {
+ if (json_skip_level(je))
+ goto error;
+ }
+
+ val_end= json_value_scalar(je) ? val_begin+je->value_len :
+ (char *)je->s.c_str;
+ val.set((const char*)val_begin, val_end-val_begin, je->s.cs);
+
+ if (je->value_type == JSON_VALUE_NUMBER ||
+ je->value_type == JSON_VALUE_ARRAY ||
+ je->value_type == JSON_VALUE_OBJECT)
+ {
+ if (json_normalize(&a_res, (const char*)val.ptr(),
+ val_end-val_begin, je->s.cs))
+ goto error;
+ }
+ else if(je->value_type == JSON_VALUE_STRING)
+ {
+ strncpy((char*)a_res.str, val.ptr(), je->value_len);
+ a_res.length= je->value_len;
+ }
+
+ res->append(a_res.str, a_res.length, je->s.cs);
+ *error= 0;
+
+ error:
+ dynstr_free(&a_res);
+
+ return;
+}
diff --git a/sql/json_schema_helper.h b/sql/json_schema_helper.h
new file mode 100644
index 00000000000..4a596b0b40d
--- /dev/null
+++ b/sql/json_schema_helper.h
@@ -0,0 +1,30 @@
+#ifndef JSON_SCHEMA_HELPER
+#define JSON_SCHEMA_HELPER
+
+/* Copyright (c) 2016, 2021, MariaDB
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; version 2 of the License.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */
+
+#include "sql_type_json.h"
+#include <m_string.h>
+#include "json_schema.h"
+
+bool json_key_equals(const char* key, LEX_CSTRING val, int key_len);
+
+bool json_assign_type(uint *curr_type, json_engine_t *je);
+uchar* get_key_name(const char *key_name, size_t *length,
+ my_bool /* unused */);
+void json_get_normalized_string(json_engine_t *je, String *res,
+ int *error);
+#endif
diff --git a/sql/json_table.cc b/sql/json_table.cc
index d404a54bc3f..5735d34d238 100644
--- a/sql/json_table.cc
+++ b/sql/json_table.cc
@@ -29,22 +29,6 @@
#define HA_ERR_JSON_TABLE (HA_ERR_LAST+1)
-/*
- Allocating memory and *also* using it (reading and
- writing from it) because some build instructions cause
- compiler to optimize out stack_used_up. Since alloca()
- here depends on stack_used_up, it doesnt get executed
- correctly and causes json_debug_nonembedded to fail
- ( --error ER_STACK_OVERRUN_NEED_MORE does not occur).
-*/
-#define ALLOCATE_MEM_ON_STACK(A) do \
- { \
- uchar *array= (uchar*)alloca(A); \
- array[0]= 1; \
- array[0]++; \
- array[0] ? array[0]++ : array[0]--; \
- } while(0)
-
class table_function_handlerton
{
public:
diff --git a/sql/mysqld.cc b/sql/mysqld.cc
index ee368def9be..67191db72a4 100644
--- a/sql/mysqld.cc
+++ b/sql/mysqld.cc
@@ -44,6 +44,7 @@
#include "sql_base.h"
#include "sql_test.h" // mysql_print_status
#include "item_create.h" // item_create_cleanup, item_create_init
+#include "json_schema.h"
#include "sql_servers.h" // servers_free, servers_init
#include "init.h" // unireg_init
#include "derror.h" // init_errmessage
@@ -1991,6 +1992,7 @@ static void clean_up(bool print_message)
item_func_sleep_free();
lex_free(); /* Free some memory */
item_create_cleanup();
+ cleanup_json_schema_keyword_hash();
tdc_start_shutdown();
#ifdef HAVE_REPLICATION
semi_sync_master_deinit();
@@ -4264,6 +4266,7 @@ static int init_common_variables()
if (item_create_init())
return 1;
item_init();
+ setup_json_schema_keyword_hash();
/*
Process a comma-separated character set list and choose
the first available character set. This is mostly for
diff --git a/sql/share/errmsg-utf8.txt b/sql/share/errmsg-utf8.txt
index b66bd694207..a89f1435171 100644
--- a/sql/share/errmsg-utf8.txt
+++ b/sql/share/errmsg-utf8.txt
@@ -10076,3 +10076,7 @@ ER_CM_OPTION_MISSING_REQUIREMENT
eng "CHANGE MASTER TO option '%s=%s' is missing requirement %s"
ER_SLAVE_STATEMENT_TIMEOUT 70100
eng "Slave log event execution was interrupted (slave_max_statement_time exceeded)"
+ER_JSON_INVALID_VALUE_FOR_KEYWORD
+ eng "Invalid value for keyword %s"
+ER_JSON_SCHEMA_KEYWORD_UNSUPPORTED
+ eng "%s keyword is not supported"
diff --git a/sql/sql_parse.h b/sql/sql_parse.h
index eeb7f832adb..4c5bcc67a36 100644
--- a/sql/sql_parse.h
+++ b/sql/sql_parse.h
@@ -188,4 +188,20 @@ check_table_access(THD *thd, privilege_t requirements,TABLE_LIST *tables,
{ return false; }
#endif /*NO_EMBEDDED_ACCESS_CHECKS*/
+
+/*
+ Allocating memory and *also* using it (reading and
+ writing from it) because some build instructions cause
+ compiler to optimize out stack_used_up. Since alloca()
+ here depends on stack_used_up, it doesnt get executed
+ correctly and causes json_debug_nonembedded to fail
+ ( --error ER_STACK_OVERRUN_NEED_MORE does not occur).
+*/
+#define ALLOCATE_MEM_ON_STACK(A) do \
+ { \
+ uchar *array= (uchar*)alloca(A); \
+ bzero(array, A); \
+ my_checksum(0, array, A); \
+ } while(0)
+
#endif /* SQL_PARSE_INCLUDED */