diff options
author | Shubham Saini <shubham6405@gmail.com> | 2019-08-05 08:32:33 +0000 |
---|---|---|
committer | Shubham Saini <shubham6405@gmail.com> | 2019-08-05 08:32:33 +0000 |
commit | 227b2d30a8675b44918f9d9ca89b24144a938215 (patch) | |
tree | 9f8e6a28724514b6fdf463a9ab2067a7ef309b72 /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack | |
parent | 842a8cfbbbdb1f92889d892e4859dbd5d40c5be8 (diff) |
removing venv files
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack')
4 files changed, 0 insertions, 1085 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/__init__.py deleted file mode 100644 index dda626a..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/__init__.py +++ /dev/null | |||
@@ -1,66 +0,0 @@ | |||
1 | # coding: utf-8 | ||
2 | from pip._vendor.msgpack._version import version | ||
3 | from pip._vendor.msgpack.exceptions import * | ||
4 | |||
5 | from collections import namedtuple | ||
6 | |||
7 | |||
8 | class ExtType(namedtuple('ExtType', 'code data')): | ||
9 | """ExtType represents ext type in msgpack.""" | ||
10 | def __new__(cls, code, data): | ||
11 | if not isinstance(code, int): | ||
12 | raise TypeError("code must be int") | ||
13 | if not isinstance(data, bytes): | ||
14 | raise TypeError("data must be bytes") | ||
15 | if not 0 <= code <= 127: | ||
16 | raise ValueError("code must be 0~127") | ||
17 | return super(ExtType, cls).__new__(cls, code, data) | ||
18 | |||
19 | |||
20 | import os | ||
21 | if os.environ.get('MSGPACK_PUREPYTHON'): | ||
22 | from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker | ||
23 | else: | ||
24 | try: | ||
25 | from pip._vendor.msgpack._packer import Packer | ||
26 | from pip._vendor.msgpack._unpacker import unpackb, Unpacker | ||
27 | except ImportError: | ||
28 | from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker | ||
29 | |||
30 | |||
31 | def pack(o, stream, **kwargs): | ||
32 | """ | ||
33 | Pack object `o` and write it to `stream` | ||
34 | |||
35 | See :class:`Packer` for options. | ||
36 | """ | ||
37 | packer = Packer(**kwargs) | ||
38 | stream.write(packer.pack(o)) | ||
39 | |||
40 | |||
41 | def packb(o, **kwargs): | ||
42 | """ | ||
43 | Pack object `o` and return packed bytes | ||
44 | |||
45 | See :class:`Packer` for options. | ||
46 | """ | ||
47 | return Packer(**kwargs).pack(o) | ||
48 | |||
49 | |||
50 | def unpack(stream, **kwargs): | ||
51 | """ | ||
52 | Unpack an object from `stream`. | ||
53 | |||
54 | Raises `ExtraData` when `stream` contains extra bytes. | ||
55 | See :class:`Unpacker` for options. | ||
56 | """ | ||
57 | data = stream.read() | ||
58 | return unpackb(data, **kwargs) | ||
59 | |||
60 | |||
61 | # alias for compatibility to simplejson/marshal/pickle. | ||
62 | load = unpack | ||
63 | loads = unpackb | ||
64 | |||
65 | dump = pack | ||
66 | dumps = packb | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/_version.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/_version.py deleted file mode 100644 index 91d97cd..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/_version.py +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | version = (0, 5, 6) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/exceptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/exceptions.py deleted file mode 100644 index e0b5133..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/exceptions.py +++ /dev/null | |||
@@ -1,41 +0,0 @@ | |||
1 | class UnpackException(Exception): | ||
2 | """Deprecated. Use Exception instead to catch all exception during unpacking.""" | ||
3 | |||
4 | |||
5 | class BufferFull(UnpackException): | ||
6 | pass | ||
7 | |||
8 | |||
9 | class OutOfData(UnpackException): | ||
10 | pass | ||
11 | |||
12 | |||
13 | class UnpackValueError(UnpackException, ValueError): | ||
14 | """Deprecated. Use ValueError instead.""" | ||
15 | |||
16 | |||
17 | class ExtraData(UnpackValueError): | ||
18 | def __init__(self, unpacked, extra): | ||
19 | self.unpacked = unpacked | ||
20 | self.extra = extra | ||
21 | |||
22 | def __str__(self): | ||
23 | return "unpack(b) received extra data." | ||
24 | |||
25 | |||
26 | class PackException(Exception): | ||
27 | """Deprecated. Use Exception instead to catch all exception during packing.""" | ||
28 | |||
29 | |||
30 | class PackValueError(PackException, ValueError): | ||
31 | """PackValueError is raised when type of input data is supported but it's value is unsupported. | ||
32 | |||
33 | Deprecated. Use ValueError instead. | ||
34 | """ | ||
35 | |||
36 | |||
37 | class PackOverflowError(PackValueError, OverflowError): | ||
38 | """PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32). | ||
39 | |||
40 | Deprecated. Use ValueError instead. | ||
41 | """ | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/fallback.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/fallback.py deleted file mode 100644 index a1a9712..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/fallback.py +++ /dev/null | |||
@@ -1,977 +0,0 @@ | |||
1 | """Fallback pure Python implementation of msgpack""" | ||
2 | |||
3 | import sys | ||
4 | import struct | ||
5 | import warnings | ||
6 | |||
7 | if sys.version_info[0] == 3: | ||
8 | PY3 = True | ||
9 | int_types = int | ||
10 | Unicode = str | ||
11 | xrange = range | ||
12 | def dict_iteritems(d): | ||
13 | return d.items() | ||
14 | else: | ||
15 | PY3 = False | ||
16 | int_types = (int, long) | ||
17 | Unicode = unicode | ||
18 | def dict_iteritems(d): | ||
19 | return d.iteritems() | ||
20 | |||
21 | |||
22 | if hasattr(sys, 'pypy_version_info'): | ||
23 | # cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own | ||
24 | # StringBuilder is fastest. | ||
25 | from __pypy__ import newlist_hint | ||
26 | try: | ||
27 | from __pypy__.builders import BytesBuilder as StringBuilder | ||
28 | except ImportError: | ||
29 | from __pypy__.builders import StringBuilder | ||
30 | USING_STRINGBUILDER = True | ||
31 | class StringIO(object): | ||
32 | def __init__(self, s=b''): | ||
33 | if s: | ||
34 | self.builder = StringBuilder(len(s)) | ||
35 | self.builder.append(s) | ||
36 | else: | ||
37 | self.builder = StringBuilder() | ||
38 | def write(self, s): | ||
39 | if isinstance(s, memoryview): | ||
40 | s = s.tobytes() | ||
41 | elif isinstance(s, bytearray): | ||
42 | s = bytes(s) | ||
43 | self.builder.append(s) | ||
44 | def getvalue(self): | ||
45 | return self.builder.build() | ||
46 | else: | ||
47 | USING_STRINGBUILDER = False | ||
48 | from io import BytesIO as StringIO | ||
49 | newlist_hint = lambda size: [] | ||
50 | |||
51 | |||
52 | from pip._vendor.msgpack.exceptions import ( | ||
53 | BufferFull, | ||
54 | OutOfData, | ||
55 | UnpackValueError, | ||
56 | PackValueError, | ||
57 | PackOverflowError, | ||
58 | ExtraData) | ||
59 | |||
60 | from pip._vendor.msgpack import ExtType | ||
61 | |||
62 | |||
63 | EX_SKIP = 0 | ||
64 | EX_CONSTRUCT = 1 | ||
65 | EX_READ_ARRAY_HEADER = 2 | ||
66 | EX_READ_MAP_HEADER = 3 | ||
67 | |||
68 | TYPE_IMMEDIATE = 0 | ||
69 | TYPE_ARRAY = 1 | ||
70 | TYPE_MAP = 2 | ||
71 | TYPE_RAW = 3 | ||
72 | TYPE_BIN = 4 | ||
73 | TYPE_EXT = 5 | ||
74 | |||
75 | DEFAULT_RECURSE_LIMIT = 511 | ||
76 | |||
77 | |||
78 | def _check_type_strict(obj, t, type=type, tuple=tuple): | ||
79 | if type(t) is tuple: | ||
80 | return type(obj) in t | ||
81 | else: | ||
82 | return type(obj) is t | ||
83 | |||
84 | |||
85 | def _get_data_from_buffer(obj): | ||
86 | try: | ||
87 | view = memoryview(obj) | ||
88 | except TypeError: | ||
89 | # try to use legacy buffer protocol if 2.7, otherwise re-raise | ||
90 | if not PY3: | ||
91 | view = memoryview(buffer(obj)) | ||
92 | warnings.warn("using old buffer interface to unpack %s; " | ||
93 | "this leads to unpacking errors if slicing is used and " | ||
94 | "will be removed in a future version" % type(obj), | ||
95 | RuntimeWarning) | ||
96 | else: | ||
97 | raise | ||
98 | if view.itemsize != 1: | ||
99 | raise ValueError("cannot unpack from multi-byte object") | ||
100 | return view | ||
101 | |||
102 | |||
103 | def unpack(stream, **kwargs): | ||
104 | warnings.warn( | ||
105 | "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", | ||
106 | PendingDeprecationWarning) | ||
107 | data = stream.read() | ||
108 | return unpackb(data, **kwargs) | ||
109 | |||
110 | |||
111 | def unpackb(packed, **kwargs): | ||
112 | """ | ||
113 | Unpack an object from `packed`. | ||
114 | |||
115 | Raises `ExtraData` when `packed` contains extra bytes. | ||
116 | See :class:`Unpacker` for options. | ||
117 | """ | ||
118 | unpacker = Unpacker(None, **kwargs) | ||
119 | unpacker.feed(packed) | ||
120 | try: | ||
121 | ret = unpacker._unpack() | ||
122 | except OutOfData: | ||
123 | raise UnpackValueError("Data is not enough.") | ||
124 | if unpacker._got_extradata(): | ||
125 | raise ExtraData(ret, unpacker._get_extradata()) | ||
126 | return ret | ||
127 | |||
128 | |||
129 | class Unpacker(object): | ||
130 | """Streaming unpacker. | ||
131 | |||
132 | arguments: | ||
133 | |||
134 | :param file_like: | ||
135 | File-like object having `.read(n)` method. | ||
136 | If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. | ||
137 | |||
138 | :param int read_size: | ||
139 | Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) | ||
140 | |||
141 | :param bool use_list: | ||
142 | If true, unpack msgpack array to Python list. | ||
143 | Otherwise, unpack to Python tuple. (default: True) | ||
144 | |||
145 | :param bool raw: | ||
146 | If true, unpack msgpack raw to Python bytes (default). | ||
147 | Otherwise, unpack to Python str (or unicode on Python 2) by decoding | ||
148 | with UTF-8 encoding (recommended). | ||
149 | Currently, the default is true, but it will be changed to false in | ||
150 | near future. So you must specify it explicitly for keeping backward | ||
151 | compatibility. | ||
152 | |||
153 | *encoding* option which is deprecated overrides this option. | ||
154 | |||
155 | :param callable object_hook: | ||
156 | When specified, it should be callable. | ||
157 | Unpacker calls it with a dict argument after unpacking msgpack map. | ||
158 | (See also simplejson) | ||
159 | |||
160 | :param callable object_pairs_hook: | ||
161 | When specified, it should be callable. | ||
162 | Unpacker calls it with a list of key-value pairs after unpacking msgpack map. | ||
163 | (See also simplejson) | ||
164 | |||
165 | :param str encoding: | ||
166 | Encoding used for decoding msgpack raw. | ||
167 | If it is None (default), msgpack raw is deserialized to Python bytes. | ||
168 | |||
169 | :param str unicode_errors: | ||
170 | (deprecated) Used for decoding msgpack raw with *encoding*. | ||
171 | (default: `'strict'`) | ||
172 | |||
173 | :param int max_buffer_size: | ||
174 | Limits size of data waiting unpacked. 0 means system's INT_MAX (default). | ||
175 | Raises `BufferFull` exception when it is insufficient. | ||
176 | You should set this parameter when unpacking data from untrusted source. | ||
177 | |||
178 | :param int max_str_len: | ||
179 | Limits max length of str. (default: 2**31-1) | ||
180 | |||
181 | :param int max_bin_len: | ||
182 | Limits max length of bin. (default: 2**31-1) | ||
183 | |||
184 | :param int max_array_len: | ||
185 | Limits max length of array. (default: 2**31-1) | ||
186 | |||
187 | :param int max_map_len: | ||
188 | Limits max length of map. (default: 2**31-1) | ||
189 | |||
190 | |||
191 | example of streaming deserialize from file-like object:: | ||
192 | |||
193 | unpacker = Unpacker(file_like, raw=False) | ||
194 | for o in unpacker: | ||
195 | process(o) | ||
196 | |||
197 | example of streaming deserialize from socket:: | ||
198 | |||
199 | unpacker = Unpacker(raw=False) | ||
200 | while True: | ||
201 | buf = sock.recv(1024**2) | ||
202 | if not buf: | ||
203 | break | ||
204 | unpacker.feed(buf) | ||
205 | for o in unpacker: | ||
206 | process(o) | ||
207 | """ | ||
208 | |||
209 | def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, | ||
210 | object_hook=None, object_pairs_hook=None, list_hook=None, | ||
211 | encoding=None, unicode_errors=None, max_buffer_size=0, | ||
212 | ext_hook=ExtType, | ||
213 | max_str_len=2147483647, # 2**32-1 | ||
214 | max_bin_len=2147483647, | ||
215 | max_array_len=2147483647, | ||
216 | max_map_len=2147483647, | ||
217 | max_ext_len=2147483647): | ||
218 | |||
219 | if encoding is not None: | ||
220 | warnings.warn( | ||
221 | "encoding is deprecated, Use raw=False instead.", | ||
222 | PendingDeprecationWarning) | ||
223 | |||
224 | if unicode_errors is None: | ||
225 | unicode_errors = 'strict' | ||
226 | |||
227 | if file_like is None: | ||
228 | self._feeding = True | ||
229 | else: | ||
230 | if not callable(file_like.read): | ||
231 | raise TypeError("`file_like.read` must be callable") | ||
232 | self.file_like = file_like | ||
233 | self._feeding = False | ||
234 | |||
235 | #: array of bytes fed. | ||
236 | self._buffer = bytearray() | ||
237 | # Some very old pythons don't support `struct.unpack_from()` with a | ||
238 | # `bytearray`. So we wrap it in a `buffer()` there. | ||
239 | if sys.version_info < (2, 7, 6): | ||
240 | self._buffer_view = buffer(self._buffer) | ||
241 | else: | ||
242 | self._buffer_view = self._buffer | ||
243 | #: Which position we currently reads | ||
244 | self._buff_i = 0 | ||
245 | |||
246 | # When Unpacker is used as an iterable, between the calls to next(), | ||
247 | # the buffer is not "consumed" completely, for efficiency sake. | ||
248 | # Instead, it is done sloppily. To make sure we raise BufferFull at | ||
249 | # the correct moments, we have to keep track of how sloppy we were. | ||
250 | # Furthermore, when the buffer is incomplete (that is: in the case | ||
251 | # we raise an OutOfData) we need to rollback the buffer to the correct | ||
252 | # state, which _buf_checkpoint records. | ||
253 | self._buf_checkpoint = 0 | ||
254 | |||
255 | self._max_buffer_size = max_buffer_size or 2**31-1 | ||
256 | if read_size > self._max_buffer_size: | ||
257 | raise ValueError("read_size must be smaller than max_buffer_size") | ||
258 | self._read_size = read_size or min(self._max_buffer_size, 16*1024) | ||
259 | self._raw = bool(raw) | ||
260 | self._encoding = encoding | ||
261 | self._unicode_errors = unicode_errors | ||
262 | self._use_list = use_list | ||
263 | self._list_hook = list_hook | ||
264 | self._object_hook = object_hook | ||
265 | self._object_pairs_hook = object_pairs_hook | ||
266 | self._ext_hook = ext_hook | ||
267 | self._max_str_len = max_str_len | ||
268 | self._max_bin_len = max_bin_len | ||
269 | self._max_array_len = max_array_len | ||
270 | self._max_map_len = max_map_len | ||
271 | self._max_ext_len = max_ext_len | ||
272 | self._stream_offset = 0 | ||
273 | |||
274 | if list_hook is not None and not callable(list_hook): | ||
275 | raise TypeError('`list_hook` is not callable') | ||
276 | if object_hook is not None and not callable(object_hook): | ||
277 | raise TypeError('`object_hook` is not callable') | ||
278 | if object_pairs_hook is not None and not callable(object_pairs_hook): | ||
279 | raise TypeError('`object_pairs_hook` is not callable') | ||
280 | if object_hook is not None and object_pairs_hook is not None: | ||
281 | raise TypeError("object_pairs_hook and object_hook are mutually " | ||
282 | "exclusive") | ||
283 | if not callable(ext_hook): | ||
284 | raise TypeError("`ext_hook` is not callable") | ||
285 | |||
286 | def feed(self, next_bytes): | ||
287 | assert self._feeding | ||
288 | view = _get_data_from_buffer(next_bytes) | ||
289 | if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): | ||
290 | raise BufferFull | ||
291 | |||
292 | # Strip buffer before checkpoint before reading file. | ||
293 | if self._buf_checkpoint > 0: | ||
294 | del self._buffer[:self._buf_checkpoint] | ||
295 | self._buff_i -= self._buf_checkpoint | ||
296 | self._buf_checkpoint = 0 | ||
297 | |||
298 | self._buffer += view | ||
299 | |||
300 | def _consume(self): | ||
301 | """ Gets rid of the used parts of the buffer. """ | ||
302 | self._stream_offset += self._buff_i - self._buf_checkpoint | ||
303 | self._buf_checkpoint = self._buff_i | ||
304 | |||
305 | def _got_extradata(self): | ||
306 | return self._buff_i < len(self._buffer) | ||
307 | |||
308 | def _get_extradata(self): | ||
309 | return self._buffer[self._buff_i:] | ||
310 | |||
311 | def read_bytes(self, n): | ||
312 | return self._read(n) | ||
313 | |||
314 | def _read(self, n): | ||
315 | # (int) -> bytearray | ||
316 | self._reserve(n) | ||
317 | i = self._buff_i | ||
318 | self._buff_i = i+n | ||
319 | return self._buffer[i:i+n] | ||
320 | |||
321 | def _reserve(self, n): | ||
322 | remain_bytes = len(self._buffer) - self._buff_i - n | ||
323 | |||
324 | # Fast path: buffer has n bytes already | ||
325 | if remain_bytes >= 0: | ||
326 | return | ||
327 | |||
328 | if self._feeding: | ||
329 | self._buff_i = self._buf_checkpoint | ||
330 | raise OutOfData | ||
331 | |||
332 | # Strip buffer before checkpoint before reading file. | ||
333 | if self._buf_checkpoint > 0: | ||
334 | del self._buffer[:self._buf_checkpoint] | ||
335 | self._buff_i -= self._buf_checkpoint | ||
336 | self._buf_checkpoint = 0 | ||
337 | |||
338 | # Read from file | ||
339 | remain_bytes = -remain_bytes | ||
340 | while remain_bytes > 0: | ||
341 | to_read_bytes = max(self._read_size, remain_bytes) | ||
342 | read_data = self.file_like.read(to_read_bytes) | ||
343 | if not read_data: | ||
344 | break | ||
345 | assert isinstance(read_data, bytes) | ||
346 | self._buffer += read_data | ||
347 | remain_bytes -= len(read_data) | ||
348 | |||
349 | if len(self._buffer) < n + self._buff_i: | ||
350 | self._buff_i = 0 # rollback | ||
351 | raise OutOfData | ||
352 | |||
353 | def _read_header(self, execute=EX_CONSTRUCT): | ||
354 | typ = TYPE_IMMEDIATE | ||
355 | n = 0 | ||
356 | obj = None | ||
357 | self._reserve(1) | ||
358 | b = self._buffer[self._buff_i] | ||
359 | self._buff_i += 1 | ||
360 | if b & 0b10000000 == 0: | ||
361 | obj = b | ||
362 | elif b & 0b11100000 == 0b11100000: | ||
363 | obj = -1 - (b ^ 0xff) | ||
364 | elif b & 0b11100000 == 0b10100000: | ||
365 | n = b & 0b00011111 | ||
366 | typ = TYPE_RAW | ||
367 | if n > self._max_str_len: | ||
368 | raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) | ||
369 | obj = self._read(n) | ||
370 | elif b & 0b11110000 == 0b10010000: | ||
371 | n = b & 0b00001111 | ||
372 | typ = TYPE_ARRAY | ||
373 | if n > self._max_array_len: | ||
374 | raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) | ||
375 | elif b & 0b11110000 == 0b10000000: | ||
376 | n = b & 0b00001111 | ||
377 | typ = TYPE_MAP | ||
378 | if n > self._max_map_len: | ||
379 | raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) | ||
380 | elif b == 0xc0: | ||
381 | obj = None | ||
382 | elif b == 0xc2: | ||
383 | obj = False | ||
384 | elif b == 0xc3: | ||
385 | obj = True | ||
386 | elif b == 0xc4: | ||
387 | typ = TYPE_BIN | ||
388 | self._reserve(1) | ||
389 | n = self._buffer[self._buff_i] | ||
390 | self._buff_i += 1 | ||
391 | if n > self._max_bin_len: | ||
392 | raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) | ||
393 | obj = self._read(n) | ||
394 | elif b == 0xc5: | ||
395 | typ = TYPE_BIN | ||
396 | self._reserve(2) | ||
397 | n = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] | ||
398 | self._buff_i += 2 | ||
399 | if n > self._max_bin_len: | ||
400 | raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) | ||
401 | obj = self._read(n) | ||
402 | elif b == 0xc6: | ||
403 | typ = TYPE_BIN | ||
404 | self._reserve(4) | ||
405 | n = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] | ||
406 | self._buff_i += 4 | ||
407 | if n > self._max_bin_len: | ||
408 | raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) | ||
409 | obj = self._read(n) | ||
410 | elif b == 0xc7: # ext 8 | ||
411 | typ = TYPE_EXT | ||
412 | self._reserve(2) | ||
413 | L, n = struct.unpack_from('Bb', self._buffer_view, self._buff_i) | ||
414 | self._buff_i += 2 | ||
415 | if L > self._max_ext_len: | ||
416 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) | ||
417 | obj = self._read(L) | ||
418 | elif b == 0xc8: # ext 16 | ||
419 | typ = TYPE_EXT | ||
420 | self._reserve(3) | ||
421 | L, n = struct.unpack_from('>Hb', self._buffer_view, self._buff_i) | ||
422 | self._buff_i += 3 | ||
423 | if L > self._max_ext_len: | ||
424 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) | ||
425 | obj = self._read(L) | ||
426 | elif b == 0xc9: # ext 32 | ||
427 | typ = TYPE_EXT | ||
428 | self._reserve(5) | ||
429 | L, n = struct.unpack_from('>Ib', self._buffer_view, self._buff_i) | ||
430 | self._buff_i += 5 | ||
431 | if L > self._max_ext_len: | ||
432 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) | ||
433 | obj = self._read(L) | ||
434 | elif b == 0xca: | ||
435 | self._reserve(4) | ||
436 | obj = struct.unpack_from(">f", self._buffer_view, self._buff_i)[0] | ||
437 | self._buff_i += 4 | ||
438 | elif b == 0xcb: | ||
439 | self._reserve(8) | ||
440 | obj = struct.unpack_from(">d", self._buffer_view, self._buff_i)[0] | ||
441 | self._buff_i += 8 | ||
442 | elif b == 0xcc: | ||
443 | self._reserve(1) | ||
444 | obj = self._buffer[self._buff_i] | ||
445 | self._buff_i += 1 | ||
446 | elif b == 0xcd: | ||
447 | self._reserve(2) | ||
448 | obj = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] | ||
449 | self._buff_i += 2 | ||
450 | elif b == 0xce: | ||
451 | self._reserve(4) | ||
452 | obj = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] | ||
453 | self._buff_i += 4 | ||
454 | elif b == 0xcf: | ||
455 | self._reserve(8) | ||
456 | obj = struct.unpack_from(">Q", self._buffer_view, self._buff_i)[0] | ||
457 | self._buff_i += 8 | ||
458 | elif b == 0xd0: | ||
459 | self._reserve(1) | ||
460 | obj = struct.unpack_from("b", self._buffer_view, self._buff_i)[0] | ||
461 | self._buff_i += 1 | ||
462 | elif b == 0xd1: | ||
463 | self._reserve(2) | ||
464 | obj = struct.unpack_from(">h", self._buffer_view, self._buff_i)[0] | ||
465 | self._buff_i += 2 | ||
466 | elif b == 0xd2: | ||
467 | self._reserve(4) | ||
468 | obj = struct.unpack_from(">i", self._buffer_view, self._buff_i)[0] | ||
469 | self._buff_i += 4 | ||
470 | elif b == 0xd3: | ||
471 | self._reserve(8) | ||
472 | obj = struct.unpack_from(">q", self._buffer_view, self._buff_i)[0] | ||
473 | self._buff_i += 8 | ||
474 | elif b == 0xd4: # fixext 1 | ||
475 | typ = TYPE_EXT | ||
476 | if self._max_ext_len < 1: | ||
477 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) | ||
478 | self._reserve(2) | ||
479 | n, obj = struct.unpack_from("b1s", self._buffer_view, self._buff_i) | ||
480 | self._buff_i += 2 | ||
481 | elif b == 0xd5: # fixext 2 | ||
482 | typ = TYPE_EXT | ||
483 | if self._max_ext_len < 2: | ||
484 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) | ||
485 | self._reserve(3) | ||
486 | n, obj = struct.unpack_from("b2s", self._buffer_view, self._buff_i) | ||
487 | self._buff_i += 3 | ||
488 | elif b == 0xd6: # fixext 4 | ||
489 | typ = TYPE_EXT | ||
490 | if self._max_ext_len < 4: | ||
491 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) | ||
492 | self._reserve(5) | ||
493 | n, obj = struct.unpack_from("b4s", self._buffer_view, self._buff_i) | ||
494 | self._buff_i += 5 | ||
495 | elif b == 0xd7: # fixext 8 | ||
496 | typ = TYPE_EXT | ||
497 | if self._max_ext_len < 8: | ||
498 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) | ||
499 | self._reserve(9) | ||
500 | n, obj = struct.unpack_from("b8s", self._buffer_view, self._buff_i) | ||
501 | self._buff_i += 9 | ||
502 | elif b == 0xd8: # fixext 16 | ||
503 | typ = TYPE_EXT | ||
504 | if self._max_ext_len < 16: | ||
505 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) | ||
506 | self._reserve(17) | ||
507 | n, obj = struct.unpack_from("b16s", self._buffer_view, self._buff_i) | ||
508 | self._buff_i += 17 | ||
509 | elif b == 0xd9: | ||
510 | typ = TYPE_RAW | ||
511 | self._reserve(1) | ||
512 | n = self._buffer[self._buff_i] | ||
513 | self._buff_i += 1 | ||
514 | if n > self._max_str_len: | ||
515 | raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) | ||
516 | obj = self._read(n) | ||
517 | elif b == 0xda: | ||
518 | typ = TYPE_RAW | ||
519 | self._reserve(2) | ||
520 | n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) | ||
521 | self._buff_i += 2 | ||
522 | if n > self._max_str_len: | ||
523 | raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) | ||
524 | obj = self._read(n) | ||
525 | elif b == 0xdb: | ||
526 | typ = TYPE_RAW | ||
527 | self._reserve(4) | ||
528 | n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) | ||
529 | self._buff_i += 4 | ||
530 | if n > self._max_str_len: | ||
531 | raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) | ||
532 | obj = self._read(n) | ||
533 | elif b == 0xdc: | ||
534 | typ = TYPE_ARRAY | ||
535 | self._reserve(2) | ||
536 | n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) | ||
537 | self._buff_i += 2 | ||
538 | if n > self._max_array_len: | ||
539 | raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) | ||
540 | elif b == 0xdd: | ||
541 | typ = TYPE_ARRAY | ||
542 | self._reserve(4) | ||
543 | n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) | ||
544 | self._buff_i += 4 | ||
545 | if n > self._max_array_len: | ||
546 | raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) | ||
547 | elif b == 0xde: | ||
548 | self._reserve(2) | ||
549 | n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) | ||
550 | self._buff_i += 2 | ||
551 | if n > self._max_map_len: | ||
552 | raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) | ||
553 | typ = TYPE_MAP | ||
554 | elif b == 0xdf: | ||
555 | self._reserve(4) | ||
556 | n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) | ||
557 | self._buff_i += 4 | ||
558 | if n > self._max_map_len: | ||
559 | raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) | ||
560 | typ = TYPE_MAP | ||
561 | else: | ||
562 | raise UnpackValueError("Unknown header: 0x%x" % b) | ||
563 | return typ, n, obj | ||
564 | |||
565 | def _unpack(self, execute=EX_CONSTRUCT): | ||
566 | typ, n, obj = self._read_header(execute) | ||
567 | |||
568 | if execute == EX_READ_ARRAY_HEADER: | ||
569 | if typ != TYPE_ARRAY: | ||
570 | raise UnpackValueError("Expected array") | ||
571 | return n | ||
572 | if execute == EX_READ_MAP_HEADER: | ||
573 | if typ != TYPE_MAP: | ||
574 | raise UnpackValueError("Expected map") | ||
575 | return n | ||
576 | # TODO should we eliminate the recursion? | ||
577 | if typ == TYPE_ARRAY: | ||
578 | if execute == EX_SKIP: | ||
579 | for i in xrange(n): | ||
580 | # TODO check whether we need to call `list_hook` | ||
581 | self._unpack(EX_SKIP) | ||
582 | return | ||
583 | ret = newlist_hint(n) | ||
584 | for i in xrange(n): | ||
585 | ret.append(self._unpack(EX_CONSTRUCT)) | ||
586 | if self._list_hook is not None: | ||
587 | ret = self._list_hook(ret) | ||
588 | # TODO is the interaction between `list_hook` and `use_list` ok? | ||
589 | return ret if self._use_list else tuple(ret) | ||
590 | if typ == TYPE_MAP: | ||
591 | if execute == EX_SKIP: | ||
592 | for i in xrange(n): | ||
593 | # TODO check whether we need to call hooks | ||
594 | self._unpack(EX_SKIP) | ||
595 | self._unpack(EX_SKIP) | ||
596 | return | ||
597 | if self._object_pairs_hook is not None: | ||
598 | ret = self._object_pairs_hook( | ||
599 | (self._unpack(EX_CONSTRUCT), | ||
600 | self._unpack(EX_CONSTRUCT)) | ||
601 | for _ in xrange(n)) | ||
602 | else: | ||
603 | ret = {} | ||
604 | for _ in xrange(n): | ||
605 | key = self._unpack(EX_CONSTRUCT) | ||
606 | ret[key] = self._unpack(EX_CONSTRUCT) | ||
607 | if self._object_hook is not None: | ||
608 | ret = self._object_hook(ret) | ||
609 | return ret | ||
610 | if execute == EX_SKIP: | ||
611 | return | ||
612 | if typ == TYPE_RAW: | ||
613 | if self._encoding is not None: | ||
614 | obj = obj.decode(self._encoding, self._unicode_errors) | ||
615 | elif self._raw: | ||
616 | obj = bytes(obj) | ||
617 | else: | ||
618 | obj = obj.decode('utf_8') | ||
619 | return obj | ||
620 | if typ == TYPE_EXT: | ||
621 | return self._ext_hook(n, bytes(obj)) | ||
622 | if typ == TYPE_BIN: | ||
623 | return bytes(obj) | ||
624 | assert typ == TYPE_IMMEDIATE | ||
625 | return obj | ||
626 | |||
627 | def __iter__(self): | ||
628 | return self | ||
629 | |||
630 | def __next__(self): | ||
631 | try: | ||
632 | ret = self._unpack(EX_CONSTRUCT) | ||
633 | self._consume() | ||
634 | return ret | ||
635 | except OutOfData: | ||
636 | self._consume() | ||
637 | raise StopIteration | ||
638 | |||
639 | next = __next__ | ||
640 | |||
641 | def skip(self, write_bytes=None): | ||
642 | self._unpack(EX_SKIP) | ||
643 | if write_bytes is not None: | ||
644 | warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) | ||
645 | write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) | ||
646 | self._consume() | ||
647 | |||
648 | def unpack(self, write_bytes=None): | ||
649 | ret = self._unpack(EX_CONSTRUCT) | ||
650 | if write_bytes is not None: | ||
651 | warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) | ||
652 | write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) | ||
653 | self._consume() | ||
654 | return ret | ||
655 | |||
656 | def read_array_header(self, write_bytes=None): | ||
657 | ret = self._unpack(EX_READ_ARRAY_HEADER) | ||
658 | if write_bytes is not None: | ||
659 | warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) | ||
660 | write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) | ||
661 | self._consume() | ||
662 | return ret | ||
663 | |||
664 | def read_map_header(self, write_bytes=None): | ||
665 | ret = self._unpack(EX_READ_MAP_HEADER) | ||
666 | if write_bytes is not None: | ||
667 | warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) | ||
668 | write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) | ||
669 | self._consume() | ||
670 | return ret | ||
671 | |||
672 | def tell(self): | ||
673 | return self._stream_offset | ||
674 | |||
675 | |||
676 | class Packer(object): | ||
677 | """ | ||
678 | MessagePack Packer | ||
679 | |||
680 | usage: | ||
681 | |||
682 | packer = Packer() | ||
683 | astream.write(packer.pack(a)) | ||
684 | astream.write(packer.pack(b)) | ||
685 | |||
686 | Packer's constructor has some keyword arguments: | ||
687 | |||
688 | :param callable default: | ||
689 | Convert user type to builtin type that Packer supports. | ||
690 | See also simplejson's document. | ||
691 | |||
692 | :param bool use_single_float: | ||
693 | Use single precision float type for float. (default: False) | ||
694 | |||
695 | :param bool autoreset: | ||
696 | Reset buffer after each pack and return its content as `bytes`. (default: True). | ||
697 | If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. | ||
698 | |||
699 | :param bool use_bin_type: | ||
700 | Use bin type introduced in msgpack spec 2.0 for bytes. | ||
701 | It also enables str8 type for unicode. | ||
702 | |||
703 | :param bool strict_types: | ||
704 | If set to true, types will be checked to be exact. Derived classes | ||
705 | from serializeable types will not be serialized and will be | ||
706 | treated as unsupported type and forwarded to default. | ||
707 | Additionally tuples will not be serialized as lists. | ||
708 | This is useful when trying to implement accurate serialization | ||
709 | for python types. | ||
710 | |||
711 | :param str encoding: | ||
712 | (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') | ||
713 | |||
714 | :param str unicode_errors: | ||
715 | Error handler for encoding unicode. (default: 'strict') | ||
716 | """ | ||
717 | def __init__(self, default=None, encoding=None, unicode_errors=None, | ||
718 | use_single_float=False, autoreset=True, use_bin_type=False, | ||
719 | strict_types=False): | ||
720 | if encoding is None: | ||
721 | encoding = 'utf_8' | ||
722 | else: | ||
723 | warnings.warn( | ||
724 | "encoding is deprecated, Use raw=False instead.", | ||
725 | PendingDeprecationWarning) | ||
726 | |||
727 | if unicode_errors is None: | ||
728 | unicode_errors = 'strict' | ||
729 | |||
730 | self._strict_types = strict_types | ||
731 | self._use_float = use_single_float | ||
732 | self._autoreset = autoreset | ||
733 | self._use_bin_type = use_bin_type | ||
734 | self._encoding = encoding | ||
735 | self._unicode_errors = unicode_errors | ||
736 | self._buffer = StringIO() | ||
737 | if default is not None: | ||
738 | if not callable(default): | ||
739 | raise TypeError("default must be callable") | ||
740 | self._default = default | ||
741 | |||
742 | def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, | ||
743 | check=isinstance, check_type_strict=_check_type_strict): | ||
744 | default_used = False | ||
745 | if self._strict_types: | ||
746 | check = check_type_strict | ||
747 | list_types = list | ||
748 | else: | ||
749 | list_types = (list, tuple) | ||
750 | while True: | ||
751 | if nest_limit < 0: | ||
752 | raise PackValueError("recursion limit exceeded") | ||
753 | if obj is None: | ||
754 | return self._buffer.write(b"\xc0") | ||
755 | if check(obj, bool): | ||
756 | if obj: | ||
757 | return self._buffer.write(b"\xc3") | ||
758 | return self._buffer.write(b"\xc2") | ||
759 | if check(obj, int_types): | ||
760 | if 0 <= obj < 0x80: | ||
761 | return self._buffer.write(struct.pack("B", obj)) | ||
762 | if -0x20 <= obj < 0: | ||
763 | return self._buffer.write(struct.pack("b", obj)) | ||
764 | if 0x80 <= obj <= 0xff: | ||
765 | return self._buffer.write(struct.pack("BB", 0xcc, obj)) | ||
766 | if -0x80 <= obj < 0: | ||
767 | return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) | ||
768 | if 0xff < obj <= 0xffff: | ||
769 | return self._buffer.write(struct.pack(">BH", 0xcd, obj)) | ||
770 | if -0x8000 <= obj < -0x80: | ||
771 | return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) | ||
772 | if 0xffff < obj <= 0xffffffff: | ||
773 | return self._buffer.write(struct.pack(">BI", 0xce, obj)) | ||
774 | if -0x80000000 <= obj < -0x8000: | ||
775 | return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) | ||
776 | if 0xffffffff < obj <= 0xffffffffffffffff: | ||
777 | return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) | ||
778 | if -0x8000000000000000 <= obj < -0x80000000: | ||
779 | return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) | ||
780 | if not default_used and self._default is not None: | ||
781 | obj = self._default(obj) | ||
782 | default_used = True | ||
783 | continue | ||
784 | raise PackOverflowError("Integer value out of range") | ||
785 | if check(obj, (bytes, bytearray)): | ||
786 | n = len(obj) | ||
787 | if n >= 2**32: | ||
788 | raise PackValueError("%s is too large" % type(obj).__name__) | ||
789 | self._pack_bin_header(n) | ||
790 | return self._buffer.write(obj) | ||
791 | if check(obj, Unicode): | ||
792 | if self._encoding is None: | ||
793 | raise TypeError( | ||
794 | "Can't encode unicode string: " | ||
795 | "no encoding is specified") | ||
796 | obj = obj.encode(self._encoding, self._unicode_errors) | ||
797 | n = len(obj) | ||
798 | if n >= 2**32: | ||
799 | raise PackValueError("String is too large") | ||
800 | self._pack_raw_header(n) | ||
801 | return self._buffer.write(obj) | ||
802 | if check(obj, memoryview): | ||
803 | n = len(obj) * obj.itemsize | ||
804 | if n >= 2**32: | ||
805 | raise PackValueError("Memoryview is too large") | ||
806 | self._pack_bin_header(n) | ||
807 | return self._buffer.write(obj) | ||
808 | if check(obj, float): | ||
809 | if self._use_float: | ||
810 | return self._buffer.write(struct.pack(">Bf", 0xca, obj)) | ||
811 | return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) | ||
812 | if check(obj, ExtType): | ||
813 | code = obj.code | ||
814 | data = obj.data | ||
815 | assert isinstance(code, int) | ||
816 | assert isinstance(data, bytes) | ||
817 | L = len(data) | ||
818 | if L == 1: | ||
819 | self._buffer.write(b'\xd4') | ||
820 | elif L == 2: | ||
821 | self._buffer.write(b'\xd5') | ||
822 | elif L == 4: | ||
823 | self._buffer.write(b'\xd6') | ||
824 | elif L == 8: | ||
825 | self._buffer.write(b'\xd7') | ||
826 | elif L == 16: | ||
827 | self._buffer.write(b'\xd8') | ||
828 | elif L <= 0xff: | ||
829 | self._buffer.write(struct.pack(">BB", 0xc7, L)) | ||
830 | elif L <= 0xffff: | ||
831 | self._buffer.write(struct.pack(">BH", 0xc8, L)) | ||
832 | else: | ||
833 | self._buffer.write(struct.pack(">BI", 0xc9, L)) | ||
834 | self._buffer.write(struct.pack("b", code)) | ||
835 | self._buffer.write(data) | ||
836 | return | ||
837 | if check(obj, list_types): | ||
838 | n = len(obj) | ||
839 | self._pack_array_header(n) | ||
840 | for i in xrange(n): | ||
841 | self._pack(obj[i], nest_limit - 1) | ||
842 | return | ||
843 | if check(obj, dict): | ||
844 | return self._pack_map_pairs(len(obj), dict_iteritems(obj), | ||
845 | nest_limit - 1) | ||
846 | if not default_used and self._default is not None: | ||
847 | obj = self._default(obj) | ||
848 | default_used = 1 | ||
849 | continue | ||
850 | raise TypeError("Cannot serialize %r" % (obj, )) | ||
851 | |||
852 | def pack(self, obj): | ||
853 | try: | ||
854 | self._pack(obj) | ||
855 | except: | ||
856 | self._buffer = StringIO() # force reset | ||
857 | raise | ||
858 | ret = self._buffer.getvalue() | ||
859 | if self._autoreset: | ||
860 | self._buffer = StringIO() | ||
861 | elif USING_STRINGBUILDER: | ||
862 | self._buffer = StringIO(ret) | ||
863 | return ret | ||
864 | |||
865 | def pack_map_pairs(self, pairs): | ||
866 | self._pack_map_pairs(len(pairs), pairs) | ||
867 | ret = self._buffer.getvalue() | ||
868 | if self._autoreset: | ||
869 | self._buffer = StringIO() | ||
870 | elif USING_STRINGBUILDER: | ||
871 | self._buffer = StringIO(ret) | ||
872 | return ret | ||
873 | |||
874 | def pack_array_header(self, n): | ||
875 | if n >= 2**32: | ||
876 | raise PackValueError | ||
877 | self._pack_array_header(n) | ||
878 | ret = self._buffer.getvalue() | ||
879 | if self._autoreset: | ||
880 | self._buffer = StringIO() | ||
881 | elif USING_STRINGBUILDER: | ||
882 | self._buffer = StringIO(ret) | ||
883 | return ret | ||
884 | |||
885 | def pack_map_header(self, n): | ||
886 | if n >= 2**32: | ||
887 | raise PackValueError | ||
888 | self._pack_map_header(n) | ||
889 | ret = self._buffer.getvalue() | ||
890 | if self._autoreset: | ||
891 | self._buffer = StringIO() | ||
892 | elif USING_STRINGBUILDER: | ||
893 | self._buffer = StringIO(ret) | ||
894 | return ret | ||
895 | |||
896 | def pack_ext_type(self, typecode, data): | ||
897 | if not isinstance(typecode, int): | ||
898 | raise TypeError("typecode must have int type.") | ||
899 | if not 0 <= typecode <= 127: | ||
900 | raise ValueError("typecode should be 0-127") | ||
901 | if not isinstance(data, bytes): | ||
902 | raise TypeError("data must have bytes type") | ||
903 | L = len(data) | ||
904 | if L > 0xffffffff: | ||
905 | raise PackValueError("Too large data") | ||
906 | if L == 1: | ||
907 | self._buffer.write(b'\xd4') | ||
908 | elif L == 2: | ||
909 | self._buffer.write(b'\xd5') | ||
910 | elif L == 4: | ||
911 | self._buffer.write(b'\xd6') | ||
912 | elif L == 8: | ||
913 | self._buffer.write(b'\xd7') | ||
914 | elif L == 16: | ||
915 | self._buffer.write(b'\xd8') | ||
916 | elif L <= 0xff: | ||
917 | self._buffer.write(b'\xc7' + struct.pack('B', L)) | ||
918 | elif L <= 0xffff: | ||
919 | self._buffer.write(b'\xc8' + struct.pack('>H', L)) | ||
920 | else: | ||
921 | self._buffer.write(b'\xc9' + struct.pack('>I', L)) | ||
922 | self._buffer.write(struct.pack('B', typecode)) | ||
923 | self._buffer.write(data) | ||
924 | |||
925 | def _pack_array_header(self, n): | ||
926 | if n <= 0x0f: | ||
927 | return self._buffer.write(struct.pack('B', 0x90 + n)) | ||
928 | if n <= 0xffff: | ||
929 | return self._buffer.write(struct.pack(">BH", 0xdc, n)) | ||
930 | if n <= 0xffffffff: | ||
931 | return self._buffer.write(struct.pack(">BI", 0xdd, n)) | ||
932 | raise PackValueError("Array is too large") | ||
933 | |||
934 | def _pack_map_header(self, n): | ||
935 | if n <= 0x0f: | ||
936 | return self._buffer.write(struct.pack('B', 0x80 + n)) | ||
937 | if n <= 0xffff: | ||
938 | return self._buffer.write(struct.pack(">BH", 0xde, n)) | ||
939 | if n <= 0xffffffff: | ||
940 | return self._buffer.write(struct.pack(">BI", 0xdf, n)) | ||
941 | raise PackValueError("Dict is too large") | ||
942 | |||
943 | def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): | ||
944 | self._pack_map_header(n) | ||
945 | for (k, v) in pairs: | ||
946 | self._pack(k, nest_limit - 1) | ||
947 | self._pack(v, nest_limit - 1) | ||
948 | |||
949 | def _pack_raw_header(self, n): | ||
950 | if n <= 0x1f: | ||
951 | self._buffer.write(struct.pack('B', 0xa0 + n)) | ||
952 | elif self._use_bin_type and n <= 0xff: | ||
953 | self._buffer.write(struct.pack('>BB', 0xd9, n)) | ||
954 | elif n <= 0xffff: | ||
955 | self._buffer.write(struct.pack(">BH", 0xda, n)) | ||
956 | elif n <= 0xffffffff: | ||
957 | self._buffer.write(struct.pack(">BI", 0xdb, n)) | ||
958 | else: | ||
959 | raise PackValueError('Raw is too large') | ||
960 | |||
961 | def _pack_bin_header(self, n): | ||
962 | if not self._use_bin_type: | ||
963 | return self._pack_raw_header(n) | ||
964 | elif n <= 0xff: | ||
965 | return self._buffer.write(struct.pack('>BB', 0xc4, n)) | ||
966 | elif n <= 0xffff: | ||
967 | return self._buffer.write(struct.pack(">BH", 0xc5, n)) | ||
968 | elif n <= 0xffffffff: | ||
969 | return self._buffer.write(struct.pack(">BI", 0xc6, n)) | ||
970 | else: | ||
971 | raise PackValueError('Bin is too large') | ||
972 | |||
973 | def bytes(self): | ||
974 | return self._buffer.getvalue() | ||
975 | |||
976 | def reset(self): | ||
977 | self._buffer = StringIO() | ||