summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorINADA Naoki <inada-n@klab.com>2012-09-21 13:58:56 +0900
committerINADA Naoki <inada-n@klab.com>2012-09-21 13:58:56 +0900
commit0297b36bda332fe21ab7d4c4c549cc68ccd344bc (patch)
tree10ba037856c3389f4897caec94d0f0e4d162aa37
parent56ec7ee1b1bdca7dd528cb6337f6b391581aefd0 (diff)
downloadmsgpack-python-0297b36bda332fe21ab7d4c4c549cc68ccd344bc.tar.gz
Fix reading more than read_size.
-rw-r--r--msgpack/_msgpack.pyx31
1 files changed, 14 insertions, 17 deletions
diff --git a/msgpack/_msgpack.pyx b/msgpack/_msgpack.pyx
index 7ff0cff..15bf5a7 100644
--- a/msgpack/_msgpack.pyx
+++ b/msgpack/_msgpack.pyx
@@ -278,8 +278,8 @@ cdef class Unpacker(object):
`unicode_errors` is used for decoding bytes.
- `max_buffer_size` limits size of data waiting unpacked. 0 means unlimited
- (default).
+ `max_buffer_size` limits size of data waiting unpacked.
+ 0 means system's INT_MAX (default).
Raises `BufferFull` exception when it is insufficient.
You shoud set this parameter when unpacking data from untrasted source.
@@ -332,11 +332,11 @@ cdef class Unpacker(object):
raise ValueError("`file_like.read` must be a callable.")
if not max_buffer_size:
max_buffer_size = INT_MAX
+ if read_size > max_buffer_size:
+ raise ValueError("read_size should be less or equal to max_buffer_size")
if not read_size:
read_size = min(max_buffer_size, 1024**2)
self.max_buffer_size = max_buffer_size
- if read_size > max_buffer_size:
- raise ValueError("read_size should be less or equal to max_buffer_size")
self.read_size = read_size
self.buf = <char*>malloc(read_size)
if self.buf == NULL:
@@ -419,18 +419,15 @@ cdef class Unpacker(object):
self.buf_size = buf_size
self.buf_tail = tail + _buf_len
- # prepare self.buf from file_like
- cdef fill_buffer(self):
- if self.file_like is not None:
- next_bytes = self.file_like_read(
- max(self.read_size,
- self.max_buffer_size - (self.buf_tail - self.buf_head)
- ))
- if next_bytes:
- self.append_buffer(PyBytes_AsString(next_bytes),
- PyBytes_Size(next_bytes))
- else:
- self.file_like = None
+ cdef read_from_file(self):
+ next_bytes = self.file_like_read(
+ min(self.read_size,
+ self.max_buffer_size - (self.buf_tail - self.buf_head)
+ ))
+ if next_bytes:
+ self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes))
+ else:
+ self.file_like = None
cpdef unpack(self):
"""unpack one object"""
@@ -443,7 +440,7 @@ cdef class Unpacker(object):
return o
elif ret == 0:
if self.file_like is not None:
- self.fill_buffer()
+ self.read_from_file()
continue
raise StopIteration("No more unpack data.")
else: