blob: efc10ed19ea2038ab1eb2eac11b2155c7f0633fc (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
|
#
# Bug #45236: large blob inserts from mysqldump fail, possible memory issue ?
#
# This test consumes a significant amount of resources.
# Therefore it should be kept separated from other tests.
# Otherwise we might suffer from problems like
# Bug#43801 mysql.test takes too long, fails due to expired timeout
# on debx86-b in PB
#
-- source include/not_embedded.inc
--disable_warnings
DROP TABLE IF EXISTS t1;
--enable_warnings
# Have to change the global variable as the session variable is
# read-only.
SET @old_max_allowed_packet= @@global.max_allowed_packet;
# ~1 MB blob length + some space for the rest of INSERT query
SET @@global.max_allowed_packet = 1024 * 1024 + 1024;
# Create a new connection since the global max_allowed_packet
# has no effect onr the current one
connect (con1, localhost, root,,);
CREATE TABLE t1(data LONGBLOB);
INSERT INTO t1 SELECT CONCAT(REPEAT('1', 1024*1024 - 27),
"\'\r dummydb dummyhost");
let $outfile= $MYSQLTEST_VARDIR/tmp/bug41486.sql;
--error 0,1
remove_file $outfile;
--exec $MYSQL_DUMP --compact -t test t1 > $outfile
# Check that the mysql client does not interpret the "\r" sequence as a command
--exec $MYSQL --max_allowed_packet=1M test < $outfile 2>&1
DROP TABLE t1;
# Cleanup
disconnect con1;
--source include/wait_until_disconnected.inc
remove_file $outfile;
connection default;
SET @@global.max_allowed_packet = @old_max_allowed_packet;
|