Ticket #15763: patch_limit_bytes_redundant.diff
File patch_limit_bytes_redundant.diff, 4.1 KB (added by , 14 years ago) |
---|
-
tests/regressiontests/file_uploads/tests.py
151 151 got = simplejson.loads(self.client.request(**r).content) 152 152 self.assertTrue(len(got['file']) < 256, "Got a long file name (%s characters)." % len(got['file'])) 153 153 154 def test_input_stream_is_limited(self): 155 """ 156 If passed an incomplete multipart message, MultiPartParser does not 157 attempt to read beyond the end of the stream. 158 """ 159 payload = "\r\n".join([ 160 '--' + client.BOUNDARY, 161 'Content-Disposition: form-data; name="file"; filename="foo.txt"', 162 'Content-Type: application/octet-stream', 163 '', 164 'file contents' 165 '--' + client.BOUNDARY + '--', 166 '', 167 ]) 168 payload = payload[:-10] 169 r = { 170 'CONTENT_LENGTH': len(payload), 171 'CONTENT_TYPE': client.MULTIPART_CONTENT, 172 'PATH_INFO': "/file_uploads/echo/", 173 'REQUEST_METHOD': 'POST', 174 'wsgi.input': client.FakePayload(payload), 175 } 176 self.client.request(**r).content 177 # FakePayload will have asserted a failure by now if MultiPartParser 178 # attempted to read beyond the end of the payload. 179 154 180 def test_custom_upload_handler(self): 155 181 # A small file (under the 5M quota) 156 182 smallfile = tempfile.NamedTemporaryFile() -
django/http/multipartparser.py
105 105 encoding = self._encoding 106 106 handlers = self._upload_handlers 107 107 108 limited_input_data = LimitBytes(self._input_data, self._content_length)109 110 108 # See if the handler will want to take care of the parsing. 111 109 # This allows overriding everything if somebody wants it. 112 110 for handler in handlers: 113 result = handler.handle_raw_input( limited_input_data,111 result = handler.handle_raw_input(self._input_data, 114 112 self._meta, 115 113 self._content_length, 116 114 self._boundary, … … 123 121 self._files = MultiValueDict() 124 122 125 123 # Instantiate the parser and stream: 126 stream = LazyStream(ChunkIter( limited_input_data, self._chunk_size))124 stream = LazyStream(ChunkIter(self._input_data, self._chunk_size)) 127 125 128 126 # Whether or not to signal a file-completion at the beginning of the loop. 129 127 old_field_name = None … … 218 216 exhaust(stream) 219 217 except StopUpload, e: 220 218 if not e.connection_reset: 221 exhaust( limited_input_data)219 exhaust(self._input_data) 222 220 else: 223 221 # Make sure that the request data is all fed 224 exhaust( limited_input_data)222 exhaust(self._input_data) 225 223 226 224 # Signal that the upload has completed. 227 225 for handler in handlers: … … 383 381 def __iter__(self): 384 382 return self 385 383 386 class LimitBytes(object):387 """ Limit bytes for a file object. """388 def __init__(self, fileobject, length):389 self._file = fileobject390 self.remaining = length391 392 def read(self, num_bytes=None):393 """394 Read data from the underlying file.395 If you ask for too much or there isn't anything left,396 this will raise an InputStreamExhausted error.397 """398 if self.remaining <= 0:399 raise InputStreamExhausted()400 if num_bytes is None:401 num_bytes = self.remaining402 else:403 num_bytes = min(num_bytes, self.remaining)404 self.remaining -= num_bytes405 return self._file.read(num_bytes)406 407 384 class InterBoundaryIter(object): 408 385 """ 409 386 A Producer that will iterate over boundaries.