Lines Matching refs:data

75 # Below are some formats and associated data for reading/writing headers using
134 # size are zero in the local header and the real values are written in the data
135 # descriptor immediately following the compressed data.
254 data = fpin.read(sizeEndCentDir64Locator)
255 if len(data) != sizeEndCentDir64Locator:
257 sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
264 # Assume no 'zip64 extensible data'
266 data = fpin.read(sizeEndCentDir64)
267 if len(data) != sizeEndCentDir64:
271 struct.unpack(structEndArchive64, data)
275 # Update the original endrec using data from the ZIP64 record
287 """Return data from the "End of Central Directory" record, or None.
289 The data is a list of the nine items in the ZIP "End of central dir"
303 data = fpin.read()
304 if (len(data) == sizeEndCentDir and
305 data[0:4] == stringEndArchive and
306 data[-2:] == b"\000\000"):
308 endrec = struct.unpack(structEndArchive, data)
325 data = fpin.read()
326 start = data.rfind(stringEndArchive)
329 recData = data[start:start+sizeEndCentDir]
335 comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
398 self.extra = b"" # ZIP extra data
451 # Set these to zero because we write them after the file data
503 data = extra[4:ln+4]
508 self.file_size, = unpack('<Q', data[:8])
509 data = data[8:]
512 self.compress_size, = unpack('<Q', data[:8])
513 data = data[8:]
516 self.header_offset, = unpack('<Q', data[:8])
581 # to be able to get data out of such a file.
611 def decrypter(data):
615 for c in data:
637 def compress(self, data):
639 return self._init() + self._comp.compress(data)
640 return self._comp.compress(data)
655 def decompress(self, data):
657 self._unconsumed += data
668 data = self._unconsumed[4 + psize:]
671 result = self._decomp.decompress(data)
778 data = self._file.read(n)
780 return data
794 def write(self, data):
795 n = self.fp.write(data)
935 If the argument is omitted, None, or negative, data is read and returned until EOF is reached.
958 data = self._read1(n)
959 if n < len(data):
960 self._readbuffer = data
962 buf += data[:n]
964 buf += data
965 n -= len(data)
969 # Update the CRC using the given data.
986 data = self._read1(self.MAX_N)
987 if data:
988 buf += data
1004 data = self._read1(n)
1005 if n < len(data):
1006 self._readbuffer = data
1008 buf += data[:n]
1010 if data:
1011 buf += data
1023 ## Handle unconsumed data.
1024 data = self._decompressor.unconsumed_tail
1025 if n > len(data):
1026 data += self._read2(n - len(data))
1028 data = self._read2(n)
1034 data = self._decompressor.decompress(data, n)
1039 data += self._decompressor.flush()
1041 data = self._decompressor.decompress(data)
1044 data = data[:self._left]
1045 self._left -= len(data)
1048 self._update_crc(data)
1049 return data
1058 data = self._fileobj.read(n)
1059 self._compress_left -= len(data)
1060 if not data:
1064 data = self._decrypter(data)
1065 return data
1156 def write(self, data):
1160 # Accept any data that supports the buffer protocol
1161 if isinstance(data, (bytes, bytearray)):
1162 nbytes = len(data)
1164 data = memoryview(data)
1165 nbytes = data.nbytes
1168 self._crc = crc32(data, self._crc)
1170 data = self._compressor.compress(data)
1171 self._compress_size += len(data)
1172 self._fileobj.write(data)
1180 # Flush any data from the compressor, and update header info
1199 # Write CRC and file sizes after the file data
1393 data = fp.read(size_cd)
1394 fp = io.BytesIO(data)
1452 return [data.filename for data in self.filelist]
1584 # Zip 2.7: compressed patched data
1585 raise NotImplementedError("compressed patched data (flag bit 5)")
1635 # Size and CRC are overwritten with correct data after processing the file
1641 # Compressed data includes an end-of-stream (EOS) marker
1809 def writestr(self, zinfo_or_arcname, data,
1811 """Write a file into the archive. The contents is 'data', which
1816 if isinstance(data, str):
1817 data = data.encode("utf-8")
1845 zinfo.file_size = len(data) # Uncompressed size
1848 dest.write(data)
1874 # Compressed data includes an end-of-stream (EOS) marker
2394 >>> data = io.BytesIO()
2395 >>> zf = ZipFile(data, 'w')