summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlexey Botchkov <holyfoot@askmonty.org>2020-06-15 22:51:21 +0400
committerAlexey Botchkov <holyfoot@askmonty.org>2020-06-15 22:51:21 +0400
commit6c573a9146caa76807db1190e0747f5befb5b170 (patch)
treea304df0f89fa87736623df6c313ff45fe093b526
parent30d41c8102c36af7551b3ae77e48efbeb6d7ecea (diff)
downloadmariadb-git-6c573a9146caa76807db1190e0747f5befb5b170.tar.gz
MDEV-22844 JSON_ARRAYAGG is limited by group_concat_max_len.
Warning message and function result fixed
-rw-r--r--mysql-test/main/func_json.result12
-rw-r--r--mysql-test/main/func_json.test8
-rw-r--r--sql/item_jsonfunc.cc18
-rw-r--r--sql/item_jsonfunc.h3
-rw-r--r--sql/item_sum.cc53
-rw-r--r--sql/item_sum.h2
-rw-r--r--sql/share/errmsg-utf8.txt2
7 files changed, 77 insertions, 21 deletions
diff --git a/mysql-test/main/func_json.result b/mysql-test/main/func_json.result
index 67eb4648872..1151d9761ea 100644
--- a/mysql-test/main/func_json.result
+++ b/mysql-test/main/func_json.result
@@ -1308,6 +1308,18 @@ SELECT JSON_ARRAYAGG(a ORDER BY a ASC) FROM t1;
JSON_ARRAYAGG(a ORDER BY a ASC)
[null,"blue","red"]
DROP TABLE t1;
+set group_concat_max_len=64;
+create table t1 (a varchar(254));
+insert into t1 values (concat('x64-', repeat('a', 60)));
+insert into t1 values (concat('x64-', repeat('b', 60)));
+insert into t1 values (concat('x64-', repeat('c', 60)));
+select json_arrayagg(a) from t1;
+json_arrayagg(a)
+["x64-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]
+Warnings:
+Warning 1260 Row 1 was cut by JSON_ARRAYAGG()
+drop table t1;
+SET group_concat_max_len= default;
#
# End of 10.5 tests
#
diff --git a/mysql-test/main/func_json.test b/mysql-test/main/func_json.test
index 7e4c94d8061..1c6c940af0c 100644
--- a/mysql-test/main/func_json.test
+++ b/mysql-test/main/func_json.test
@@ -812,6 +812,14 @@ SELECT JSON_ARRAYAGG(a ORDER BY a DESC) FROM t1;
SELECT JSON_ARRAYAGG(a ORDER BY a ASC) FROM t1;
DROP TABLE t1;
+set group_concat_max_len=64;
+create table t1 (a varchar(254));
+insert into t1 values (concat('x64-', repeat('a', 60)));
+insert into t1 values (concat('x64-', repeat('b', 60))); insert into t1 values (concat('x64-', repeat('c', 60)));
+select json_arrayagg(a) from t1;
+drop table t1;
+SET group_concat_max_len= default;
+
--echo #
--echo # End of 10.5 tests
--echo #
diff --git a/sql/item_jsonfunc.cc b/sql/item_jsonfunc.cc
index 7e2dc82cf1f..b9a84775311 100644
--- a/sql/item_jsonfunc.cc
+++ b/sql/item_jsonfunc.cc
@@ -3689,6 +3689,24 @@ String *Item_func_json_arrayagg::get_str_from_field(Item *i,Field *f,
}
+void Item_func_json_arrayagg::cut_max_length(String *result,
+ uint old_length, uint max_length) const
+{
+ if (result->length() == 0)
+ return;
+
+ if (result->ptr()[result->length() - 1] != '"' ||
+ max_length == 0)
+ {
+ Item_func_group_concat::cut_max_length(result, old_length, max_length);
+ return;
+ }
+
+ Item_func_group_concat::cut_max_length(result, old_length, max_length-1);
+ result->append('"');
+}
+
+
String* Item_func_json_arrayagg::val_str(String *str)
{
if ((str= Item_func_group_concat::val_str(str)))
diff --git a/sql/item_jsonfunc.h b/sql/item_jsonfunc.h
index 0b02b8e4da2..eb9a0b1187d 100644
--- a/sql/item_jsonfunc.h
+++ b/sql/item_jsonfunc.h
@@ -546,7 +546,8 @@ protected:
String *get_str_from_item(Item *i, String *tmp);
String *get_str_from_field(Item *i, Field *f, String *tmp,
const uchar *key, size_t offset);
-
+ void cut_max_length(String *result,
+ uint old_length, uint max_length) const;
public:
String m_tmp_json; /* Used in get_str_from_*.. */
Item_func_json_arrayagg(THD *thd, Name_resolution_context *context_arg,
diff --git a/sql/item_sum.cc b/sql/item_sum.cc
index 12310ddce20..e79344507b2 100644
--- a/sql/item_sum.cc
+++ b/sql/item_sum.cc
@@ -3733,6 +3733,37 @@ int group_concat_key_cmp_with_order_with_nulls(void *arg, const void *key1_arg,
}
+static void report_cut_value_error(THD *thd, uint row_count, const char *fname)
+{
+ size_t fn_len= strlen(fname);
+ char *fname_upper= (char *) my_alloca(fn_len + 1);
+ fname_upper[fn_len]= 0;
+ for (; fn_len; fn_len--)
+ fname_upper[fn_len-1]= my_toupper(&my_charset_latin1, fname[fn_len-1]);
+ push_warning_printf(thd, Sql_condition::WARN_LEVEL_WARN,
+ ER_CUT_VALUE_GROUP_CONCAT,
+ ER_THD(thd, ER_CUT_VALUE_GROUP_CONCAT),
+ row_count, fname_upper);
+}
+
+
+void Item_func_group_concat::cut_max_length(String *result,
+ uint old_length, uint max_length) const
+{
+ const char *ptr= result->ptr();
+ /*
+ It's ok to use item->result.length() as the fourth argument
+ as this is never used to limit the length of the data.
+ Cut is done with the third argument.
+ */
+ size_t add_length= Well_formed_prefix(collation.collation,
+ ptr + old_length,
+ ptr + max_length,
+ result->length()).length();
+ result->length(old_length + add_length);
+}
+
+
/**
Append data from current leaf to item->result.
*/
@@ -3812,24 +3843,10 @@ int dump_leaf_key(void* key_arg, element_count count __attribute__((unused)),
/* stop if length of result more than max_length */
if (result->length() > max_length)
{
- CHARSET_INFO *cs= item->collation.collation;
- const char *ptr= result->ptr();
THD *thd= current_thd;
- /*
- It's ok to use item->result.length() as the fourth argument
- as this is never used to limit the length of the data.
- Cut is done with the third argument.
- */
- size_t add_length= Well_formed_prefix(cs,
- ptr + old_length,
- ptr + max_length,
- result->length()).length();
- result->length(old_length + add_length);
+ item->cut_max_length(result, old_length, max_length);
item->warning_for_row= TRUE;
- push_warning_printf(thd, Sql_condition::WARN_LEVEL_WARN,
- ER_CUT_VALUE_GROUP_CONCAT,
- ER_THD(thd, ER_CUT_VALUE_GROUP_CONCAT),
- item->row_count);
+ report_cut_value_error(thd, item->row_count, item->func_name());
/**
To avoid duplicated warnings in Item_func_group_concat::val_str()
@@ -4427,9 +4444,7 @@ String* Item_func_group_concat::val_str(String* str)
table->blob_storage->is_truncated_value())
{
warning_for_row= true;
- push_warning_printf(current_thd, Sql_condition::WARN_LEVEL_WARN,
- ER_CUT_VALUE_GROUP_CONCAT, ER(ER_CUT_VALUE_GROUP_CONCAT),
- row_count);
+ report_cut_value_error(current_thd, row_count, func_name());
}
return &result;
diff --git a/sql/item_sum.h b/sql/item_sum.h
index 118b5b958f3..dc520ce2578 100644
--- a/sql/item_sum.h
+++ b/sql/item_sum.h
@@ -1941,6 +1941,8 @@ protected:
virtual String *get_str_from_field(Item *i, Field *f, String *tmp,
const uchar *key, size_t offset)
{ return f->val_str(tmp, key + offset); }
+ virtual void cut_max_length(String *result,
+ uint old_length, uint max_length) const;
public:
// Methods used by ColumnStore
bool get_distinct() const { return distinct; }
diff --git a/sql/share/errmsg-utf8.txt b/sql/share/errmsg-utf8.txt
index 49cabec9916..275dc1605d8 100644
--- a/sql/share/errmsg-utf8.txt
+++ b/sql/share/errmsg-utf8.txt
@@ -4940,7 +4940,7 @@ ER_ZLIB_Z_DATA_ERROR
por "ZLIB: Dados de entrada está corrupto"
spa "ZLIB: Dato de entrada fué corrompido para zlib"
ER_CUT_VALUE_GROUP_CONCAT
- eng "Row %u was cut by GROUP_CONCAT()"
+ eng "Row %u was cut by %s)"
ER_WARN_TOO_FEW_RECORDS 01000
eng "Row %lu doesn't contain data for all columns"
ger "Zeile %lu enthält nicht für alle Felder Daten"