__init__.py 86 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319
  1. """
  2. Read and write ZIP files.
  3. XXX references to utf-8 need further investigation.
  4. """
  5. import binascii
  6. import importlib.util
  7. import io
  8. import os
  9. import shutil
  10. import stat
  11. import struct
  12. import sys
  13. import threading
  14. import time
  15. try:
  16. import zlib # We may need its compression method
  17. crc32 = zlib.crc32
  18. except ImportError:
  19. zlib = None
  20. crc32 = binascii.crc32
  21. try:
  22. import bz2 # We may need its compression method
  23. except ImportError:
  24. bz2 = None
  25. try:
  26. import lzma # We may need its compression method
  27. except ImportError:
  28. lzma = None
  29. __all__ = ["BadZipFile", "BadZipfile", "error",
  30. "ZIP_STORED", "ZIP_DEFLATED", "ZIP_BZIP2", "ZIP_LZMA",
  31. "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile",
  32. "Path"]
  33. class BadZipFile(Exception):
  34. pass
  35. class LargeZipFile(Exception):
  36. """
  37. Raised when writing a zipfile, the zipfile requires ZIP64 extensions
  38. and those extensions are disabled.
  39. """
  40. error = BadZipfile = BadZipFile # Pre-3.2 compatibility names
  41. ZIP64_LIMIT = (1 << 31) - 1
  42. ZIP_FILECOUNT_LIMIT = (1 << 16) - 1
  43. ZIP_MAX_COMMENT = (1 << 16) - 1
  44. # constants for Zip file compression methods
  45. ZIP_STORED = 0
  46. ZIP_DEFLATED = 8
  47. ZIP_BZIP2 = 12
  48. ZIP_LZMA = 14
  49. # Other ZIP compression methods not supported
  50. DEFAULT_VERSION = 20
  51. ZIP64_VERSION = 45
  52. BZIP2_VERSION = 46
  53. LZMA_VERSION = 63
  54. # we recognize (but not necessarily support) all features up to that version
  55. MAX_EXTRACT_VERSION = 63
  56. # Below are some formats and associated data for reading/writing headers using
  57. # the struct module. The names and structures of headers/records are those used
  58. # in the PKWARE description of the ZIP file format:
  59. # http://www.pkware.com/documents/casestudies/APPNOTE.TXT
  60. # (URL valid as of January 2008)
  61. # The "end of central directory" structure, magic number, size, and indices
  62. # (section V.I in the format document)
  63. structEndArchive = b"<4s4H2LH"
  64. stringEndArchive = b"PK\005\006"
  65. sizeEndCentDir = struct.calcsize(structEndArchive)
  66. _ECD_SIGNATURE = 0
  67. _ECD_DISK_NUMBER = 1
  68. _ECD_DISK_START = 2
  69. _ECD_ENTRIES_THIS_DISK = 3
  70. _ECD_ENTRIES_TOTAL = 4
  71. _ECD_SIZE = 5
  72. _ECD_OFFSET = 6
  73. _ECD_COMMENT_SIZE = 7
  74. # These last two indices are not part of the structure as defined in the
  75. # spec, but they are used internally by this module as a convenience
  76. _ECD_COMMENT = 8
  77. _ECD_LOCATION = 9
  78. # The "central directory" structure, magic number, size, and indices
  79. # of entries in the structure (section V.F in the format document)
  80. structCentralDir = "<4s4B4HL2L5H2L"
  81. stringCentralDir = b"PK\001\002"
  82. sizeCentralDir = struct.calcsize(structCentralDir)
  83. # indexes of entries in the central directory structure
  84. _CD_SIGNATURE = 0
  85. _CD_CREATE_VERSION = 1
  86. _CD_CREATE_SYSTEM = 2
  87. _CD_EXTRACT_VERSION = 3
  88. _CD_EXTRACT_SYSTEM = 4
  89. _CD_FLAG_BITS = 5
  90. _CD_COMPRESS_TYPE = 6
  91. _CD_TIME = 7
  92. _CD_DATE = 8
  93. _CD_CRC = 9
  94. _CD_COMPRESSED_SIZE = 10
  95. _CD_UNCOMPRESSED_SIZE = 11
  96. _CD_FILENAME_LENGTH = 12
  97. _CD_EXTRA_FIELD_LENGTH = 13
  98. _CD_COMMENT_LENGTH = 14
  99. _CD_DISK_NUMBER_START = 15
  100. _CD_INTERNAL_FILE_ATTRIBUTES = 16
  101. _CD_EXTERNAL_FILE_ATTRIBUTES = 17
  102. _CD_LOCAL_HEADER_OFFSET = 18
  103. # General purpose bit flags
  104. # Zip Appnote: 4.4.4 general purpose bit flag: (2 bytes)
  105. _MASK_ENCRYPTED = 1 << 0
  106. # Bits 1 and 2 have different meanings depending on the compression used.
  107. _MASK_COMPRESS_OPTION_1 = 1 << 1
  108. # _MASK_COMPRESS_OPTION_2 = 1 << 2
  109. # _MASK_USE_DATA_DESCRIPTOR: If set, crc-32, compressed size and uncompressed
  110. # size are zero in the local header and the real values are written in the data
  111. # descriptor immediately following the compressed data.
  112. _MASK_USE_DATA_DESCRIPTOR = 1 << 3
  113. # Bit 4: Reserved for use with compression method 8, for enhanced deflating.
  114. # _MASK_RESERVED_BIT_4 = 1 << 4
  115. _MASK_COMPRESSED_PATCH = 1 << 5
  116. _MASK_STRONG_ENCRYPTION = 1 << 6
  117. # _MASK_UNUSED_BIT_7 = 1 << 7
  118. # _MASK_UNUSED_BIT_8 = 1 << 8
  119. # _MASK_UNUSED_BIT_9 = 1 << 9
  120. # _MASK_UNUSED_BIT_10 = 1 << 10
  121. _MASK_UTF_FILENAME = 1 << 11
  122. # Bit 12: Reserved by PKWARE for enhanced compression.
  123. # _MASK_RESERVED_BIT_12 = 1 << 12
  124. # _MASK_ENCRYPTED_CENTRAL_DIR = 1 << 13
  125. # Bit 14, 15: Reserved by PKWARE
  126. # _MASK_RESERVED_BIT_14 = 1 << 14
  127. # _MASK_RESERVED_BIT_15 = 1 << 15
  128. # The "local file header" structure, magic number, size, and indices
  129. # (section V.A in the format document)
  130. structFileHeader = "<4s2B4HL2L2H"
  131. stringFileHeader = b"PK\003\004"
  132. sizeFileHeader = struct.calcsize(structFileHeader)
  133. _FH_SIGNATURE = 0
  134. _FH_EXTRACT_VERSION = 1
  135. _FH_EXTRACT_SYSTEM = 2
  136. _FH_GENERAL_PURPOSE_FLAG_BITS = 3
  137. _FH_COMPRESSION_METHOD = 4
  138. _FH_LAST_MOD_TIME = 5
  139. _FH_LAST_MOD_DATE = 6
  140. _FH_CRC = 7
  141. _FH_COMPRESSED_SIZE = 8
  142. _FH_UNCOMPRESSED_SIZE = 9
  143. _FH_FILENAME_LENGTH = 10
  144. _FH_EXTRA_FIELD_LENGTH = 11
  145. # The "Zip64 end of central directory locator" structure, magic number, and size
  146. structEndArchive64Locator = "<4sLQL"
  147. stringEndArchive64Locator = b"PK\x06\x07"
  148. sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
  149. # The "Zip64 end of central directory" record, magic number, size, and indices
  150. # (section V.G in the format document)
  151. structEndArchive64 = "<4sQ2H2L4Q"
  152. stringEndArchive64 = b"PK\x06\x06"
  153. sizeEndCentDir64 = struct.calcsize(structEndArchive64)
  154. _CD64_SIGNATURE = 0
  155. _CD64_DIRECTORY_RECSIZE = 1
  156. _CD64_CREATE_VERSION = 2
  157. _CD64_EXTRACT_VERSION = 3
  158. _CD64_DISK_NUMBER = 4
  159. _CD64_DISK_NUMBER_START = 5
  160. _CD64_NUMBER_ENTRIES_THIS_DISK = 6
  161. _CD64_NUMBER_ENTRIES_TOTAL = 7
  162. _CD64_DIRECTORY_SIZE = 8
  163. _CD64_OFFSET_START_CENTDIR = 9
  164. _DD_SIGNATURE = 0x08074b50
  165. _EXTRA_FIELD_STRUCT = struct.Struct('<HH')
  166. def _strip_extra(extra, xids):
  167. # Remove Extra Fields with specified IDs.
  168. unpack = _EXTRA_FIELD_STRUCT.unpack
  169. modified = False
  170. buffer = []
  171. start = i = 0
  172. while i + 4 <= len(extra):
  173. xid, xlen = unpack(extra[i : i + 4])
  174. j = i + 4 + xlen
  175. if xid in xids:
  176. if i != start:
  177. buffer.append(extra[start : i])
  178. start = j
  179. modified = True
  180. i = j
  181. if not modified:
  182. return extra
  183. if start != len(extra):
  184. buffer.append(extra[start:])
  185. return b''.join(buffer)
  186. def _check_zipfile(fp):
  187. try:
  188. if _EndRecData(fp):
  189. return True # file has correct magic number
  190. except OSError:
  191. pass
  192. return False
  193. def is_zipfile(filename):
  194. """Quickly see if a file is a ZIP file by checking the magic number.
  195. The filename argument may be a file or file-like object too.
  196. """
  197. result = False
  198. try:
  199. if hasattr(filename, "read"):
  200. result = _check_zipfile(fp=filename)
  201. else:
  202. with open(filename, "rb") as fp:
  203. result = _check_zipfile(fp)
  204. except OSError:
  205. pass
  206. return result
  207. def _EndRecData64(fpin, offset, endrec):
  208. """
  209. Read the ZIP64 end-of-archive records and use that to update endrec
  210. """
  211. try:
  212. fpin.seek(offset - sizeEndCentDir64Locator, 2)
  213. except OSError:
  214. # If the seek fails, the file is not large enough to contain a ZIP64
  215. # end-of-archive record, so just return the end record we were given.
  216. return endrec
  217. data = fpin.read(sizeEndCentDir64Locator)
  218. if len(data) != sizeEndCentDir64Locator:
  219. return endrec
  220. sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
  221. if sig != stringEndArchive64Locator:
  222. return endrec
  223. if diskno != 0 or disks > 1:
  224. raise BadZipFile("zipfiles that span multiple disks are not supported")
  225. # Assume no 'zip64 extensible data'
  226. fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
  227. data = fpin.read(sizeEndCentDir64)
  228. if len(data) != sizeEndCentDir64:
  229. return endrec
  230. sig, sz, create_version, read_version, disk_num, disk_dir, \
  231. dircount, dircount2, dirsize, diroffset = \
  232. struct.unpack(structEndArchive64, data)
  233. if sig != stringEndArchive64:
  234. return endrec
  235. # Update the original endrec using data from the ZIP64 record
  236. endrec[_ECD_SIGNATURE] = sig
  237. endrec[_ECD_DISK_NUMBER] = disk_num
  238. endrec[_ECD_DISK_START] = disk_dir
  239. endrec[_ECD_ENTRIES_THIS_DISK] = dircount
  240. endrec[_ECD_ENTRIES_TOTAL] = dircount2
  241. endrec[_ECD_SIZE] = dirsize
  242. endrec[_ECD_OFFSET] = diroffset
  243. return endrec
  244. def _EndRecData(fpin):
  245. """Return data from the "End of Central Directory" record, or None.
  246. The data is a list of the nine items in the ZIP "End of central dir"
  247. record followed by a tenth item, the file seek offset of this record."""
  248. # Determine file size
  249. fpin.seek(0, 2)
  250. filesize = fpin.tell()
  251. # Check to see if this is ZIP file with no archive comment (the
  252. # "end of central directory" structure should be the last item in the
  253. # file if this is the case).
  254. try:
  255. fpin.seek(-sizeEndCentDir, 2)
  256. except OSError:
  257. return None
  258. data = fpin.read(sizeEndCentDir)
  259. if (len(data) == sizeEndCentDir and
  260. data[0:4] == stringEndArchive and
  261. data[-2:] == b"\000\000"):
  262. # the signature is correct and there's no comment, unpack structure
  263. endrec = struct.unpack(structEndArchive, data)
  264. endrec=list(endrec)
  265. # Append a blank comment and record start offset
  266. endrec.append(b"")
  267. endrec.append(filesize - sizeEndCentDir)
  268. # Try to read the "Zip64 end of central directory" structure
  269. return _EndRecData64(fpin, -sizeEndCentDir, endrec)
  270. # Either this is not a ZIP file, or it is a ZIP file with an archive
  271. # comment. Search the end of the file for the "end of central directory"
  272. # record signature. The comment is the last item in the ZIP file and may be
  273. # up to 64K long. It is assumed that the "end of central directory" magic
  274. # number does not appear in the comment.
  275. maxCommentStart = max(filesize - ZIP_MAX_COMMENT - sizeEndCentDir, 0)
  276. fpin.seek(maxCommentStart, 0)
  277. data = fpin.read(ZIP_MAX_COMMENT + sizeEndCentDir)
  278. start = data.rfind(stringEndArchive)
  279. if start >= 0:
  280. # found the magic number; attempt to unpack and interpret
  281. recData = data[start:start+sizeEndCentDir]
  282. if len(recData) != sizeEndCentDir:
  283. # Zip file is corrupted.
  284. return None
  285. endrec = list(struct.unpack(structEndArchive, recData))
  286. commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file
  287. comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
  288. endrec.append(comment)
  289. endrec.append(maxCommentStart + start)
  290. # Try to read the "Zip64 end of central directory" structure
  291. return _EndRecData64(fpin, maxCommentStart + start - filesize,
  292. endrec)
  293. # Unable to find a valid end of central directory structure
  294. return None
  295. def _sanitize_filename(filename):
  296. """Terminate the file name at the first null byte and
  297. ensure paths always use forward slashes as the directory separator."""
  298. # Terminate the file name at the first null byte. Null bytes in file
  299. # names are used as tricks by viruses in archives.
  300. null_byte = filename.find(chr(0))
  301. if null_byte >= 0:
  302. filename = filename[0:null_byte]
  303. # This is used to ensure paths in generated ZIP files always use
  304. # forward slashes as the directory separator, as required by the
  305. # ZIP format specification.
  306. if os.sep != "/" and os.sep in filename:
  307. filename = filename.replace(os.sep, "/")
  308. if os.altsep and os.altsep != "/" and os.altsep in filename:
  309. filename = filename.replace(os.altsep, "/")
  310. return filename
  311. class ZipInfo (object):
  312. """Class with attributes describing each file in the ZIP archive."""
  313. __slots__ = (
  314. 'orig_filename',
  315. 'filename',
  316. 'date_time',
  317. 'compress_type',
  318. '_compresslevel',
  319. 'comment',
  320. 'extra',
  321. 'create_system',
  322. 'create_version',
  323. 'extract_version',
  324. 'reserved',
  325. 'flag_bits',
  326. 'volume',
  327. 'internal_attr',
  328. 'external_attr',
  329. 'header_offset',
  330. 'CRC',
  331. 'compress_size',
  332. 'file_size',
  333. '_raw_time',
  334. '_end_offset',
  335. )
  336. def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
  337. self.orig_filename = filename # Original file name in archive
  338. # Terminate the file name at the first null byte and
  339. # ensure paths always use forward slashes as the directory separator.
  340. filename = _sanitize_filename(filename)
  341. self.filename = filename # Normalized file name
  342. self.date_time = date_time # year, month, day, hour, min, sec
  343. if date_time[0] < 1980:
  344. raise ValueError('ZIP does not support timestamps before 1980')
  345. # Standard values:
  346. self.compress_type = ZIP_STORED # Type of compression for the file
  347. self._compresslevel = None # Level for the compressor
  348. self.comment = b"" # Comment for each file
  349. self.extra = b"" # ZIP extra data
  350. if sys.platform == 'win32':
  351. self.create_system = 0 # System which created ZIP archive
  352. else:
  353. # Assume everything else is unix-y
  354. self.create_system = 3 # System which created ZIP archive
  355. self.create_version = DEFAULT_VERSION # Version which created ZIP archive
  356. self.extract_version = DEFAULT_VERSION # Version needed to extract archive
  357. self.reserved = 0 # Must be zero
  358. self.flag_bits = 0 # ZIP flag bits
  359. self.volume = 0 # Volume number of file header
  360. self.internal_attr = 0 # Internal attributes
  361. self.external_attr = 0 # External file attributes
  362. self.compress_size = 0 # Size of the compressed file
  363. self.file_size = 0 # Size of the uncompressed file
  364. self._end_offset = None # Start of the next local header or central directory
  365. # Other attributes are set by class ZipFile:
  366. # header_offset Byte offset to the file header
  367. # CRC CRC-32 of the uncompressed file
  368. def __repr__(self):
  369. result = ['<%s filename=%r' % (self.__class__.__name__, self.filename)]
  370. if self.compress_type != ZIP_STORED:
  371. result.append(' compress_type=%s' %
  372. compressor_names.get(self.compress_type,
  373. self.compress_type))
  374. hi = self.external_attr >> 16
  375. lo = self.external_attr & 0xFFFF
  376. if hi:
  377. result.append(' filemode=%r' % stat.filemode(hi))
  378. if lo:
  379. result.append(' external_attr=%#x' % lo)
  380. isdir = self.is_dir()
  381. if not isdir or self.file_size:
  382. result.append(' file_size=%r' % self.file_size)
  383. if ((not isdir or self.compress_size) and
  384. (self.compress_type != ZIP_STORED or
  385. self.file_size != self.compress_size)):
  386. result.append(' compress_size=%r' % self.compress_size)
  387. result.append('>')
  388. return ''.join(result)
  389. def FileHeader(self, zip64=None):
  390. """Return the per-file header as a bytes object.
  391. When the optional zip64 arg is None rather than a bool, we will
  392. decide based upon the file_size and compress_size, if known,
  393. False otherwise.
  394. """
  395. dt = self.date_time
  396. dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
  397. dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
  398. if self.flag_bits & _MASK_USE_DATA_DESCRIPTOR:
  399. # Set these to zero because we write them after the file data
  400. CRC = compress_size = file_size = 0
  401. else:
  402. CRC = self.CRC
  403. compress_size = self.compress_size
  404. file_size = self.file_size
  405. extra = self.extra
  406. min_version = 0
  407. if zip64 is None:
  408. # We always explicitly pass zip64 within this module.... This
  409. # remains for anyone using ZipInfo.FileHeader as a public API.
  410. zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
  411. if zip64:
  412. fmt = '<HHQQ'
  413. extra = extra + struct.pack(fmt,
  414. 1, struct.calcsize(fmt)-4, file_size, compress_size)
  415. file_size = 0xffffffff
  416. compress_size = 0xffffffff
  417. min_version = ZIP64_VERSION
  418. if self.compress_type == ZIP_BZIP2:
  419. min_version = max(BZIP2_VERSION, min_version)
  420. elif self.compress_type == ZIP_LZMA:
  421. min_version = max(LZMA_VERSION, min_version)
  422. self.extract_version = max(min_version, self.extract_version)
  423. self.create_version = max(min_version, self.create_version)
  424. filename, flag_bits = self._encodeFilenameFlags()
  425. header = struct.pack(structFileHeader, stringFileHeader,
  426. self.extract_version, self.reserved, flag_bits,
  427. self.compress_type, dostime, dosdate, CRC,
  428. compress_size, file_size,
  429. len(filename), len(extra))
  430. return header + filename + extra
  431. def _encodeFilenameFlags(self):
  432. try:
  433. return self.filename.encode('ascii'), self.flag_bits
  434. except UnicodeEncodeError:
  435. return self.filename.encode('utf-8'), self.flag_bits | _MASK_UTF_FILENAME
  436. def _decodeExtra(self, filename_crc):
  437. # Try to decode the extra field.
  438. extra = self.extra
  439. unpack = struct.unpack
  440. while len(extra) >= 4:
  441. tp, ln = unpack('<HH', extra[:4])
  442. if ln+4 > len(extra):
  443. raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln))
  444. if tp == 0x0001:
  445. data = extra[4:ln+4]
  446. # ZIP64 extension (large files and/or large archives)
  447. try:
  448. if self.file_size in (0xFFFF_FFFF_FFFF_FFFF, 0xFFFF_FFFF):
  449. field = "File size"
  450. self.file_size, = unpack('<Q', data[:8])
  451. data = data[8:]
  452. if self.compress_size == 0xFFFF_FFFF:
  453. field = "Compress size"
  454. self.compress_size, = unpack('<Q', data[:8])
  455. data = data[8:]
  456. if self.header_offset == 0xFFFF_FFFF:
  457. field = "Header offset"
  458. self.header_offset, = unpack('<Q', data[:8])
  459. except struct.error:
  460. raise BadZipFile(f"Corrupt zip64 extra field. "
  461. f"{field} not found.") from None
  462. elif tp == 0x7075:
  463. data = extra[4:ln+4]
  464. # Unicode Path Extra Field
  465. try:
  466. up_version, up_name_crc = unpack('<BL', data[:5])
  467. if up_version == 1 and up_name_crc == filename_crc:
  468. up_unicode_name = data[5:].decode('utf-8')
  469. if up_unicode_name:
  470. self.filename = _sanitize_filename(up_unicode_name)
  471. else:
  472. import warnings
  473. warnings.warn("Empty unicode path extra field (0x7075)", stacklevel=2)
  474. except struct.error as e:
  475. raise BadZipFile("Corrupt unicode path extra field (0x7075)") from e
  476. except UnicodeDecodeError as e:
  477. raise BadZipFile('Corrupt unicode path extra field (0x7075): invalid utf-8 bytes') from e
  478. extra = extra[ln+4:]
  479. @classmethod
  480. def from_file(cls, filename, arcname=None, *, strict_timestamps=True):
  481. """Construct an appropriate ZipInfo for a file on the filesystem.
  482. filename should be the path to a file or directory on the filesystem.
  483. arcname is the name which it will have within the archive (by default,
  484. this will be the same as filename, but without a drive letter and with
  485. leading path separators removed).
  486. """
  487. if isinstance(filename, os.PathLike):
  488. filename = os.fspath(filename)
  489. st = os.stat(filename)
  490. isdir = stat.S_ISDIR(st.st_mode)
  491. mtime = time.localtime(st.st_mtime)
  492. date_time = mtime[0:6]
  493. if not strict_timestamps and date_time[0] < 1980:
  494. date_time = (1980, 1, 1, 0, 0, 0)
  495. elif not strict_timestamps and date_time[0] > 2107:
  496. date_time = (2107, 12, 31, 23, 59, 59)
  497. # Create ZipInfo instance to store file information
  498. if arcname is None:
  499. arcname = filename
  500. arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
  501. while arcname[0] in (os.sep, os.altsep):
  502. arcname = arcname[1:]
  503. if isdir:
  504. arcname += '/'
  505. zinfo = cls(arcname, date_time)
  506. zinfo.external_attr = (st.st_mode & 0xFFFF) << 16 # Unix attributes
  507. if isdir:
  508. zinfo.file_size = 0
  509. zinfo.external_attr |= 0x10 # MS-DOS directory flag
  510. else:
  511. zinfo.file_size = st.st_size
  512. return zinfo
  513. def is_dir(self):
  514. """Return True if this archive member is a directory."""
  515. if self.filename.endswith('/'):
  516. return True
  517. # The ZIP format specification requires to use forward slashes
  518. # as the directory separator, but in practice some ZIP files
  519. # created on Windows can use backward slashes. For compatibility
  520. # with the extraction code which already handles this:
  521. if os.path.altsep:
  522. return self.filename.endswith((os.path.sep, os.path.altsep))
  523. return False
  524. # ZIP encryption uses the CRC32 one-byte primitive for scrambling some
  525. # internal keys. We noticed that a direct implementation is faster than
  526. # relying on binascii.crc32().
  527. _crctable = None
  528. def _gen_crc(crc):
  529. for j in range(8):
  530. if crc & 1:
  531. crc = (crc >> 1) ^ 0xEDB88320
  532. else:
  533. crc >>= 1
  534. return crc
  535. # ZIP supports a password-based form of encryption. Even though known
  536. # plaintext attacks have been found against it, it is still useful
  537. # to be able to get data out of such a file.
  538. #
  539. # Usage:
  540. # zd = _ZipDecrypter(mypwd)
  541. # plain_bytes = zd(cypher_bytes)
  542. def _ZipDecrypter(pwd):
  543. key0 = 305419896
  544. key1 = 591751049
  545. key2 = 878082192
  546. global _crctable
  547. if _crctable is None:
  548. _crctable = list(map(_gen_crc, range(256)))
  549. crctable = _crctable
  550. def crc32(ch, crc):
  551. """Compute the CRC32 primitive on one byte."""
  552. return (crc >> 8) ^ crctable[(crc ^ ch) & 0xFF]
  553. def update_keys(c):
  554. nonlocal key0, key1, key2
  555. key0 = crc32(c, key0)
  556. key1 = (key1 + (key0 & 0xFF)) & 0xFFFFFFFF
  557. key1 = (key1 * 134775813 + 1) & 0xFFFFFFFF
  558. key2 = crc32(key1 >> 24, key2)
  559. for p in pwd:
  560. update_keys(p)
  561. def decrypter(data):
  562. """Decrypt a bytes object."""
  563. result = bytearray()
  564. append = result.append
  565. for c in data:
  566. k = key2 | 2
  567. c ^= ((k * (k^1)) >> 8) & 0xFF
  568. update_keys(c)
  569. append(c)
  570. return bytes(result)
  571. return decrypter
  572. class LZMACompressor:
  573. def __init__(self):
  574. self._comp = None
  575. def _init(self):
  576. props = lzma._encode_filter_properties({'id': lzma.FILTER_LZMA1})
  577. self._comp = lzma.LZMACompressor(lzma.FORMAT_RAW, filters=[
  578. lzma._decode_filter_properties(lzma.FILTER_LZMA1, props)
  579. ])
  580. return struct.pack('<BBH', 9, 4, len(props)) + props
  581. def compress(self, data):
  582. if self._comp is None:
  583. return self._init() + self._comp.compress(data)
  584. return self._comp.compress(data)
  585. def flush(self):
  586. if self._comp is None:
  587. return self._init() + self._comp.flush()
  588. return self._comp.flush()
  589. class LZMADecompressor:
  590. def __init__(self):
  591. self._decomp = None
  592. self._unconsumed = b''
  593. self.eof = False
  594. def decompress(self, data):
  595. if self._decomp is None:
  596. self._unconsumed += data
  597. if len(self._unconsumed) <= 4:
  598. return b''
  599. psize, = struct.unpack('<H', self._unconsumed[2:4])
  600. if len(self._unconsumed) <= 4 + psize:
  601. return b''
  602. self._decomp = lzma.LZMADecompressor(lzma.FORMAT_RAW, filters=[
  603. lzma._decode_filter_properties(lzma.FILTER_LZMA1,
  604. self._unconsumed[4:4 + psize])
  605. ])
  606. data = self._unconsumed[4 + psize:]
  607. del self._unconsumed
  608. result = self._decomp.decompress(data)
  609. self.eof = self._decomp.eof
  610. return result
  611. compressor_names = {
  612. 0: 'store',
  613. 1: 'shrink',
  614. 2: 'reduce',
  615. 3: 'reduce',
  616. 4: 'reduce',
  617. 5: 'reduce',
  618. 6: 'implode',
  619. 7: 'tokenize',
  620. 8: 'deflate',
  621. 9: 'deflate64',
  622. 10: 'implode',
  623. 12: 'bzip2',
  624. 14: 'lzma',
  625. 18: 'terse',
  626. 19: 'lz77',
  627. 97: 'wavpack',
  628. 98: 'ppmd',
  629. }
  630. def _check_compression(compression):
  631. if compression == ZIP_STORED:
  632. pass
  633. elif compression == ZIP_DEFLATED:
  634. if not zlib:
  635. raise RuntimeError(
  636. "Compression requires the (missing) zlib module")
  637. elif compression == ZIP_BZIP2:
  638. if not bz2:
  639. raise RuntimeError(
  640. "Compression requires the (missing) bz2 module")
  641. elif compression == ZIP_LZMA:
  642. if not lzma:
  643. raise RuntimeError(
  644. "Compression requires the (missing) lzma module")
  645. else:
  646. raise NotImplementedError("That compression method is not supported")
  647. def _get_compressor(compress_type, compresslevel=None):
  648. if compress_type == ZIP_DEFLATED:
  649. if compresslevel is not None:
  650. return zlib.compressobj(compresslevel, zlib.DEFLATED, -15)
  651. return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
  652. elif compress_type == ZIP_BZIP2:
  653. if compresslevel is not None:
  654. return bz2.BZ2Compressor(compresslevel)
  655. return bz2.BZ2Compressor()
  656. # compresslevel is ignored for ZIP_LZMA
  657. elif compress_type == ZIP_LZMA:
  658. return LZMACompressor()
  659. else:
  660. return None
  661. def _get_decompressor(compress_type):
  662. _check_compression(compress_type)
  663. if compress_type == ZIP_STORED:
  664. return None
  665. elif compress_type == ZIP_DEFLATED:
  666. return zlib.decompressobj(-15)
  667. elif compress_type == ZIP_BZIP2:
  668. return bz2.BZ2Decompressor()
  669. elif compress_type == ZIP_LZMA:
  670. return LZMADecompressor()
  671. else:
  672. descr = compressor_names.get(compress_type)
  673. if descr:
  674. raise NotImplementedError("compression type %d (%s)" % (compress_type, descr))
  675. else:
  676. raise NotImplementedError("compression type %d" % (compress_type,))
  677. class _SharedFile:
  678. def __init__(self, file, pos, close, lock, writing):
  679. self._file = file
  680. self._pos = pos
  681. self._close = close
  682. self._lock = lock
  683. self._writing = writing
  684. self.seekable = file.seekable
  685. def tell(self):
  686. return self._pos
  687. def seek(self, offset, whence=0):
  688. with self._lock:
  689. if self._writing():
  690. raise ValueError("Can't reposition in the ZIP file while "
  691. "there is an open writing handle on it. "
  692. "Close the writing handle before trying to read.")
  693. if whence == os.SEEK_CUR:
  694. self._file.seek(self._pos + offset)
  695. else:
  696. self._file.seek(offset, whence)
  697. self._pos = self._file.tell()
  698. return self._pos
  699. def read(self, n=-1):
  700. with self._lock:
  701. if self._writing():
  702. raise ValueError("Can't read from the ZIP file while there "
  703. "is an open writing handle on it. "
  704. "Close the writing handle before trying to read.")
  705. self._file.seek(self._pos)
  706. data = self._file.read(n)
  707. self._pos = self._file.tell()
  708. return data
  709. def close(self):
  710. if self._file is not None:
  711. fileobj = self._file
  712. self._file = None
  713. self._close(fileobj)
  714. # Provide the tell method for unseekable stream
  715. class _Tellable:
  716. def __init__(self, fp):
  717. self.fp = fp
  718. self.offset = 0
  719. def write(self, data):
  720. n = self.fp.write(data)
  721. self.offset += n
  722. return n
  723. def tell(self):
  724. return self.offset
  725. def flush(self):
  726. self.fp.flush()
  727. def close(self):
  728. self.fp.close()
  729. class ZipExtFile(io.BufferedIOBase):
  730. """File-like object for reading an archive member.
  731. Is returned by ZipFile.open().
  732. """
  733. # Max size supported by decompressor.
  734. MAX_N = 1 << 31 - 1
  735. # Read from compressed files in 4k blocks.
  736. MIN_READ_SIZE = 4096
  737. # Chunk size to read during seek
  738. MAX_SEEK_READ = 1 << 24
  739. def __init__(self, fileobj, mode, zipinfo, pwd=None,
  740. close_fileobj=False):
  741. self._fileobj = fileobj
  742. self._pwd = pwd
  743. self._close_fileobj = close_fileobj
  744. self._compress_type = zipinfo.compress_type
  745. self._compress_left = zipinfo.compress_size
  746. self._left = zipinfo.file_size
  747. self._decompressor = _get_decompressor(self._compress_type)
  748. self._eof = False
  749. self._readbuffer = b''
  750. self._offset = 0
  751. self.newlines = None
  752. self.mode = mode
  753. self.name = zipinfo.filename
  754. if hasattr(zipinfo, 'CRC'):
  755. self._expected_crc = zipinfo.CRC
  756. self._running_crc = crc32(b'')
  757. else:
  758. self._expected_crc = None
  759. self._seekable = False
  760. try:
  761. if fileobj.seekable():
  762. self._orig_compress_start = fileobj.tell()
  763. self._orig_compress_size = zipinfo.compress_size
  764. self._orig_file_size = zipinfo.file_size
  765. self._orig_start_crc = self._running_crc
  766. self._orig_crc = self._expected_crc
  767. self._seekable = True
  768. except AttributeError:
  769. pass
  770. self._decrypter = None
  771. if pwd:
  772. if zipinfo.flag_bits & _MASK_USE_DATA_DESCRIPTOR:
  773. # compare against the file type from extended local headers
  774. check_byte = (zipinfo._raw_time >> 8) & 0xff
  775. else:
  776. # compare against the CRC otherwise
  777. check_byte = (zipinfo.CRC >> 24) & 0xff
  778. h = self._init_decrypter()
  779. if h != check_byte:
  780. raise RuntimeError("Bad password for file %r" % zipinfo.orig_filename)
  781. def _init_decrypter(self):
  782. self._decrypter = _ZipDecrypter(self._pwd)
  783. # The first 12 bytes in the cypher stream is an encryption header
  784. # used to strengthen the algorithm. The first 11 bytes are
  785. # completely random, while the 12th contains the MSB of the CRC,
  786. # or the MSB of the file time depending on the header type
  787. # and is used to check the correctness of the password.
  788. header = self._fileobj.read(12)
  789. self._compress_left -= 12
  790. return self._decrypter(header)[11]
  791. def __repr__(self):
  792. result = ['<%s.%s' % (self.__class__.__module__,
  793. self.__class__.__qualname__)]
  794. if not self.closed:
  795. result.append(' name=%r mode=%r' % (self.name, self.mode))
  796. if self._compress_type != ZIP_STORED:
  797. result.append(' compress_type=%s' %
  798. compressor_names.get(self._compress_type,
  799. self._compress_type))
  800. else:
  801. result.append(' [closed]')
  802. result.append('>')
  803. return ''.join(result)
  804. def readline(self, limit=-1):
  805. """Read and return a line from the stream.
  806. If limit is specified, at most limit bytes will be read.
  807. """
  808. if limit < 0:
  809. # Shortcut common case - newline found in buffer.
  810. i = self._readbuffer.find(b'\n', self._offset) + 1
  811. if i > 0:
  812. line = self._readbuffer[self._offset: i]
  813. self._offset = i
  814. return line
  815. return io.BufferedIOBase.readline(self, limit)
  816. def peek(self, n=1):
  817. """Returns buffered bytes without advancing the position."""
  818. if n > len(self._readbuffer) - self._offset:
  819. chunk = self.read(n)
  820. if len(chunk) > self._offset:
  821. self._readbuffer = chunk + self._readbuffer[self._offset:]
  822. self._offset = 0
  823. else:
  824. self._offset -= len(chunk)
  825. # Return up to 512 bytes to reduce allocation overhead for tight loops.
  826. return self._readbuffer[self._offset: self._offset + 512]
  827. def readable(self):
  828. if self.closed:
  829. raise ValueError("I/O operation on closed file.")
  830. return True
  831. def read(self, n=-1):
  832. """Read and return up to n bytes.
  833. If the argument is omitted, None, or negative, data is read and returned until EOF is reached.
  834. """
  835. if self.closed:
  836. raise ValueError("read from closed file.")
  837. if n is None or n < 0:
  838. buf = self._readbuffer[self._offset:]
  839. self._readbuffer = b''
  840. self._offset = 0
  841. while not self._eof:
  842. buf += self._read1(self.MAX_N)
  843. return buf
  844. end = n + self._offset
  845. if end < len(self._readbuffer):
  846. buf = self._readbuffer[self._offset:end]
  847. self._offset = end
  848. return buf
  849. n = end - len(self._readbuffer)
  850. buf = self._readbuffer[self._offset:]
  851. self._readbuffer = b''
  852. self._offset = 0
  853. while n > 0 and not self._eof:
  854. data = self._read1(n)
  855. if n < len(data):
  856. self._readbuffer = data
  857. self._offset = n
  858. buf += data[:n]
  859. break
  860. buf += data
  861. n -= len(data)
  862. return buf
  863. def _update_crc(self, newdata):
  864. # Update the CRC using the given data.
  865. if self._expected_crc is None:
  866. # No need to compute the CRC if we don't have a reference value
  867. return
  868. self._running_crc = crc32(newdata, self._running_crc)
  869. # Check the CRC if we're at the end of the file
  870. if self._eof and self._running_crc != self._expected_crc:
  871. raise BadZipFile("Bad CRC-32 for file %r" % self.name)
  872. def read1(self, n):
  873. """Read up to n bytes with at most one read() system call."""
  874. if n is None or n < 0:
  875. buf = self._readbuffer[self._offset:]
  876. self._readbuffer = b''
  877. self._offset = 0
  878. while not self._eof:
  879. data = self._read1(self.MAX_N)
  880. if data:
  881. buf += data
  882. break
  883. return buf
  884. end = n + self._offset
  885. if end < len(self._readbuffer):
  886. buf = self._readbuffer[self._offset:end]
  887. self._offset = end
  888. return buf
  889. n = end - len(self._readbuffer)
  890. buf = self._readbuffer[self._offset:]
  891. self._readbuffer = b''
  892. self._offset = 0
  893. if n > 0:
  894. while not self._eof:
  895. data = self._read1(n)
  896. if n < len(data):
  897. self._readbuffer = data
  898. self._offset = n
  899. buf += data[:n]
  900. break
  901. if data:
  902. buf += data
  903. break
  904. return buf
  905. def _read1(self, n):
  906. # Read up to n compressed bytes with at most one read() system call,
  907. # decrypt and decompress them.
  908. if self._eof or n <= 0:
  909. return b''
  910. # Read from file.
  911. if self._compress_type == ZIP_DEFLATED:
  912. ## Handle unconsumed data.
  913. data = self._decompressor.unconsumed_tail
  914. if n > len(data):
  915. data += self._read2(n - len(data))
  916. else:
  917. data = self._read2(n)
  918. if self._compress_type == ZIP_STORED:
  919. self._eof = self._compress_left <= 0
  920. elif self._compress_type == ZIP_DEFLATED:
  921. n = max(n, self.MIN_READ_SIZE)
  922. data = self._decompressor.decompress(data, n)
  923. self._eof = (self._decompressor.eof or
  924. self._compress_left <= 0 and
  925. not self._decompressor.unconsumed_tail)
  926. if self._eof:
  927. data += self._decompressor.flush()
  928. else:
  929. data = self._decompressor.decompress(data)
  930. self._eof = self._decompressor.eof or self._compress_left <= 0
  931. data = data[:self._left]
  932. self._left -= len(data)
  933. if self._left <= 0:
  934. self._eof = True
  935. self._update_crc(data)
  936. return data
  937. def _read2(self, n):
  938. if self._compress_left <= 0:
  939. return b''
  940. n = max(n, self.MIN_READ_SIZE)
  941. n = min(n, self._compress_left)
  942. data = self._fileobj.read(n)
  943. self._compress_left -= len(data)
  944. if not data:
  945. raise EOFError
  946. if self._decrypter is not None:
  947. data = self._decrypter(data)
  948. return data
  949. def close(self):
  950. try:
  951. if self._close_fileobj:
  952. self._fileobj.close()
  953. finally:
  954. super().close()
  955. def seekable(self):
  956. if self.closed:
  957. raise ValueError("I/O operation on closed file.")
  958. return self._seekable
  959. def seek(self, offset, whence=os.SEEK_SET):
  960. if self.closed:
  961. raise ValueError("seek on closed file.")
  962. if not self._seekable:
  963. raise io.UnsupportedOperation("underlying stream is not seekable")
  964. curr_pos = self.tell()
  965. if whence == os.SEEK_SET:
  966. new_pos = offset
  967. elif whence == os.SEEK_CUR:
  968. new_pos = curr_pos + offset
  969. elif whence == os.SEEK_END:
  970. new_pos = self._orig_file_size + offset
  971. else:
  972. raise ValueError("whence must be os.SEEK_SET (0), "
  973. "os.SEEK_CUR (1), or os.SEEK_END (2)")
  974. if new_pos > self._orig_file_size:
  975. new_pos = self._orig_file_size
  976. if new_pos < 0:
  977. new_pos = 0
  978. read_offset = new_pos - curr_pos
  979. buff_offset = read_offset + self._offset
  980. if buff_offset >= 0 and buff_offset < len(self._readbuffer):
  981. # Just move the _offset index if the new position is in the _readbuffer
  982. self._offset = buff_offset
  983. read_offset = 0
  984. # Fast seek uncompressed unencrypted file
  985. elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset != 0:
  986. # disable CRC checking after first seeking - it would be invalid
  987. self._expected_crc = None
  988. # seek actual file taking already buffered data into account
  989. read_offset -= len(self._readbuffer) - self._offset
  990. self._fileobj.seek(read_offset, os.SEEK_CUR)
  991. self._left -= read_offset
  992. self._compress_left -= read_offset
  993. self._eof = self._left <= 0
  994. read_offset = 0
  995. # flush read buffer
  996. self._readbuffer = b''
  997. self._offset = 0
  998. elif read_offset < 0:
  999. # Position is before the current position. Reset the ZipExtFile
  1000. self._fileobj.seek(self._orig_compress_start)
  1001. self._running_crc = self._orig_start_crc
  1002. self._expected_crc = self._orig_crc
  1003. self._compress_left = self._orig_compress_size
  1004. self._left = self._orig_file_size
  1005. self._readbuffer = b''
  1006. self._offset = 0
  1007. self._decompressor = _get_decompressor(self._compress_type)
  1008. self._eof = False
  1009. read_offset = new_pos
  1010. if self._decrypter is not None:
  1011. self._init_decrypter()
  1012. while read_offset > 0:
  1013. read_len = min(self.MAX_SEEK_READ, read_offset)
  1014. self.read(read_len)
  1015. read_offset -= read_len
  1016. return self.tell()
  1017. def tell(self):
  1018. if self.closed:
  1019. raise ValueError("tell on closed file.")
  1020. if not self._seekable:
  1021. raise io.UnsupportedOperation("underlying stream is not seekable")
  1022. filepos = self._orig_file_size - self._left - len(self._readbuffer) + self._offset
  1023. return filepos
  1024. class _ZipWriteFile(io.BufferedIOBase):
  1025. def __init__(self, zf, zinfo, zip64):
  1026. self._zinfo = zinfo
  1027. self._zip64 = zip64
  1028. self._zipfile = zf
  1029. self._compressor = _get_compressor(zinfo.compress_type,
  1030. zinfo._compresslevel)
  1031. self._file_size = 0
  1032. self._compress_size = 0
  1033. self._crc = 0
  1034. @property
  1035. def _fileobj(self):
  1036. return self._zipfile.fp
  1037. def writable(self):
  1038. return True
  1039. def write(self, data):
  1040. if self.closed:
  1041. raise ValueError('I/O operation on closed file.')
  1042. # Accept any data that supports the buffer protocol
  1043. if isinstance(data, (bytes, bytearray)):
  1044. nbytes = len(data)
  1045. else:
  1046. data = memoryview(data)
  1047. nbytes = data.nbytes
  1048. self._file_size += nbytes
  1049. self._crc = crc32(data, self._crc)
  1050. if self._compressor:
  1051. data = self._compressor.compress(data)
  1052. self._compress_size += len(data)
  1053. self._fileobj.write(data)
  1054. return nbytes
  1055. def close(self):
  1056. if self.closed:
  1057. return
  1058. try:
  1059. super().close()
  1060. # Flush any data from the compressor, and update header info
  1061. if self._compressor:
  1062. buf = self._compressor.flush()
  1063. self._compress_size += len(buf)
  1064. self._fileobj.write(buf)
  1065. self._zinfo.compress_size = self._compress_size
  1066. else:
  1067. self._zinfo.compress_size = self._file_size
  1068. self._zinfo.CRC = self._crc
  1069. self._zinfo.file_size = self._file_size
  1070. if not self._zip64:
  1071. if self._file_size > ZIP64_LIMIT:
  1072. raise RuntimeError("File size too large, try using force_zip64")
  1073. if self._compress_size > ZIP64_LIMIT:
  1074. raise RuntimeError("Compressed size too large, try using force_zip64")
  1075. # Write updated header info
  1076. if self._zinfo.flag_bits & _MASK_USE_DATA_DESCRIPTOR:
  1077. # Write CRC and file sizes after the file data
  1078. fmt = '<LLQQ' if self._zip64 else '<LLLL'
  1079. self._fileobj.write(struct.pack(fmt, _DD_SIGNATURE, self._zinfo.CRC,
  1080. self._zinfo.compress_size, self._zinfo.file_size))
  1081. self._zipfile.start_dir = self._fileobj.tell()
  1082. else:
  1083. # Seek backwards and write file header (which will now include
  1084. # correct CRC and file sizes)
  1085. # Preserve current position in file
  1086. self._zipfile.start_dir = self._fileobj.tell()
  1087. self._fileobj.seek(self._zinfo.header_offset)
  1088. self._fileobj.write(self._zinfo.FileHeader(self._zip64))
  1089. self._fileobj.seek(self._zipfile.start_dir)
  1090. # Successfully written: Add file to our caches
  1091. self._zipfile.filelist.append(self._zinfo)
  1092. self._zipfile.NameToInfo[self._zinfo.filename] = self._zinfo
  1093. finally:
  1094. self._zipfile._writing = False
  1095. class ZipFile:
  1096. """ Class with methods to open, read, write, close, list zip files.
  1097. z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True,
  1098. compresslevel=None)
  1099. file: Either the path to the file, or a file-like object.
  1100. If it is a path, the file will be opened and closed by ZipFile.
  1101. mode: The mode can be either read 'r', write 'w', exclusive create 'x',
  1102. or append 'a'.
  1103. compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib),
  1104. ZIP_BZIP2 (requires bz2) or ZIP_LZMA (requires lzma).
  1105. allowZip64: if True ZipFile will create files with ZIP64 extensions when
  1106. needed, otherwise it will raise an exception when this would
  1107. be necessary.
  1108. compresslevel: None (default for the given compression type) or an integer
  1109. specifying the level to pass to the compressor.
  1110. When using ZIP_STORED or ZIP_LZMA this keyword has no effect.
  1111. When using ZIP_DEFLATED integers 0 through 9 are accepted.
  1112. When using ZIP_BZIP2 integers 1 through 9 are accepted.
  1113. """
  1114. fp = None # Set here since __del__ checks it
  1115. _windows_illegal_name_trans_table = None
  1116. def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=True,
  1117. compresslevel=None, *, strict_timestamps=True, metadata_encoding=None):
  1118. """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',
  1119. or append 'a'."""
  1120. if mode not in ('r', 'w', 'x', 'a'):
  1121. raise ValueError("ZipFile requires mode 'r', 'w', 'x', or 'a'")
  1122. _check_compression(compression)
  1123. self._allowZip64 = allowZip64
  1124. self._didModify = False
  1125. self.debug = 0 # Level of printing: 0 through 3
  1126. self.NameToInfo = {} # Find file info given name
  1127. self.filelist = [] # List of ZipInfo instances for archive
  1128. self.compression = compression # Method of compression
  1129. self.compresslevel = compresslevel
  1130. self.mode = mode
  1131. self.pwd = None
  1132. self._comment = b''
  1133. self._strict_timestamps = strict_timestamps
  1134. self.metadata_encoding = metadata_encoding
  1135. # Check that we don't try to write with nonconforming codecs
  1136. if self.metadata_encoding and mode != 'r':
  1137. raise ValueError(
  1138. "metadata_encoding is only supported for reading files")
  1139. # Check if we were passed a file-like object
  1140. if isinstance(file, os.PathLike):
  1141. file = os.fspath(file)
  1142. if isinstance(file, str):
  1143. # No, it's a filename
  1144. self._filePassed = 0
  1145. self.filename = file
  1146. modeDict = {'r' : 'rb', 'w': 'w+b', 'x': 'x+b', 'a' : 'r+b',
  1147. 'r+b': 'w+b', 'w+b': 'wb', 'x+b': 'xb'}
  1148. filemode = modeDict[mode]
  1149. while True:
  1150. try:
  1151. self.fp = io.open(file, filemode)
  1152. except OSError:
  1153. if filemode in modeDict:
  1154. filemode = modeDict[filemode]
  1155. continue
  1156. raise
  1157. break
  1158. else:
  1159. self._filePassed = 1
  1160. self.fp = file
  1161. self.filename = getattr(file, 'name', None)
  1162. self._fileRefCnt = 1
  1163. self._lock = threading.RLock()
  1164. self._seekable = True
  1165. self._writing = False
  1166. try:
  1167. if mode == 'r':
  1168. self._RealGetContents()
  1169. elif mode in ('w', 'x'):
  1170. # set the modified flag so central directory gets written
  1171. # even if no files are added to the archive
  1172. self._didModify = True
  1173. try:
  1174. self.start_dir = self.fp.tell()
  1175. except (AttributeError, OSError):
  1176. self.fp = _Tellable(self.fp)
  1177. self.start_dir = 0
  1178. self._seekable = False
  1179. else:
  1180. # Some file-like objects can provide tell() but not seek()
  1181. try:
  1182. self.fp.seek(self.start_dir)
  1183. except (AttributeError, OSError):
  1184. self._seekable = False
  1185. elif mode == 'a':
  1186. try:
  1187. # See if file is a zip file
  1188. self._RealGetContents()
  1189. # seek to start of directory and overwrite
  1190. self.fp.seek(self.start_dir)
  1191. except BadZipFile:
  1192. # file is not a zip file, just append
  1193. self.fp.seek(0, 2)
  1194. # set the modified flag so central directory gets written
  1195. # even if no files are added to the archive
  1196. self._didModify = True
  1197. self.start_dir = self.fp.tell()
  1198. else:
  1199. raise ValueError("Mode must be 'r', 'w', 'x', or 'a'")
  1200. except:
  1201. fp = self.fp
  1202. self.fp = None
  1203. self._fpclose(fp)
  1204. raise
  1205. def __enter__(self):
  1206. return self
  1207. def __exit__(self, type, value, traceback):
  1208. self.close()
  1209. def __repr__(self):
  1210. result = ['<%s.%s' % (self.__class__.__module__,
  1211. self.__class__.__qualname__)]
  1212. if self.fp is not None:
  1213. if self._filePassed:
  1214. result.append(' file=%r' % self.fp)
  1215. elif self.filename is not None:
  1216. result.append(' filename=%r' % self.filename)
  1217. result.append(' mode=%r' % self.mode)
  1218. else:
  1219. result.append(' [closed]')
  1220. result.append('>')
  1221. return ''.join(result)
  1222. def _RealGetContents(self):
  1223. """Read in the table of contents for the ZIP file."""
  1224. fp = self.fp
  1225. try:
  1226. endrec = _EndRecData(fp)
  1227. except OSError:
  1228. raise BadZipFile("File is not a zip file")
  1229. if not endrec:
  1230. raise BadZipFile("File is not a zip file")
  1231. if self.debug > 1:
  1232. print(endrec)
  1233. size_cd = endrec[_ECD_SIZE] # bytes in central directory
  1234. offset_cd = endrec[_ECD_OFFSET] # offset of central directory
  1235. self._comment = endrec[_ECD_COMMENT] # archive comment
  1236. # "concat" is zero, unless zip was concatenated to another file
  1237. concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
  1238. if endrec[_ECD_SIGNATURE] == stringEndArchive64:
  1239. # If Zip64 extension structures are present, account for them
  1240. concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
  1241. if self.debug > 2:
  1242. inferred = concat + offset_cd
  1243. print("given, inferred, offset", offset_cd, inferred, concat)
  1244. # self.start_dir: Position of start of central directory
  1245. self.start_dir = offset_cd + concat
  1246. if self.start_dir < 0:
  1247. raise BadZipFile("Bad offset for central directory")
  1248. fp.seek(self.start_dir, 0)
  1249. data = fp.read(size_cd)
  1250. fp = io.BytesIO(data)
  1251. total = 0
  1252. while total < size_cd:
  1253. centdir = fp.read(sizeCentralDir)
  1254. if len(centdir) != sizeCentralDir:
  1255. raise BadZipFile("Truncated central directory")
  1256. centdir = struct.unpack(structCentralDir, centdir)
  1257. if centdir[_CD_SIGNATURE] != stringCentralDir:
  1258. raise BadZipFile("Bad magic number for central directory")
  1259. if self.debug > 2:
  1260. print(centdir)
  1261. filename = fp.read(centdir[_CD_FILENAME_LENGTH])
  1262. orig_filename_crc = crc32(filename)
  1263. flags = centdir[_CD_FLAG_BITS]
  1264. if flags & _MASK_UTF_FILENAME:
  1265. # UTF-8 file names extension
  1266. filename = filename.decode('utf-8')
  1267. else:
  1268. # Historical ZIP filename encoding
  1269. filename = filename.decode(self.metadata_encoding or 'cp437')
  1270. # Create ZipInfo instance to store file information
  1271. x = ZipInfo(filename)
  1272. x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
  1273. x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
  1274. x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
  1275. (x.create_version, x.create_system, x.extract_version, x.reserved,
  1276. x.flag_bits, x.compress_type, t, d,
  1277. x.CRC, x.compress_size, x.file_size) = centdir[1:12]
  1278. if x.extract_version > MAX_EXTRACT_VERSION:
  1279. raise NotImplementedError("zip file version %.1f" %
  1280. (x.extract_version / 10))
  1281. x.volume, x.internal_attr, x.external_attr = centdir[15:18]
  1282. # Convert date/time code to (year, month, day, hour, min, sec)
  1283. x._raw_time = t
  1284. x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
  1285. t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
  1286. x._decodeExtra(orig_filename_crc)
  1287. x.header_offset = x.header_offset + concat
  1288. self.filelist.append(x)
  1289. self.NameToInfo[x.filename] = x
  1290. # update total bytes read from central directory
  1291. total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
  1292. + centdir[_CD_EXTRA_FIELD_LENGTH]
  1293. + centdir[_CD_COMMENT_LENGTH])
  1294. if self.debug > 2:
  1295. print("total", total)
  1296. end_offset = self.start_dir
  1297. for zinfo in sorted(self.filelist,
  1298. key=lambda zinfo: zinfo.header_offset,
  1299. reverse=True):
  1300. zinfo._end_offset = end_offset
  1301. end_offset = zinfo.header_offset
  1302. def namelist(self):
  1303. """Return a list of file names in the archive."""
  1304. return [data.filename for data in self.filelist]
  1305. def infolist(self):
  1306. """Return a list of class ZipInfo instances for files in the
  1307. archive."""
  1308. return self.filelist
  1309. def printdir(self, file=None):
  1310. """Print a table of contents for the zip file."""
  1311. print("%-46s %19s %12s" % ("File Name", "Modified ", "Size"),
  1312. file=file)
  1313. for zinfo in self.filelist:
  1314. date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
  1315. print("%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size),
  1316. file=file)
  1317. def testzip(self):
  1318. """Read all the files and check the CRC.
  1319. Return None if all files could be read successfully, or the name
  1320. of the offending file otherwise."""
  1321. chunk_size = 2 ** 20
  1322. for zinfo in self.filelist:
  1323. try:
  1324. # Read by chunks, to avoid an OverflowError or a
  1325. # MemoryError with very large embedded files.
  1326. with self.open(zinfo.filename, "r") as f:
  1327. while f.read(chunk_size): # Check CRC-32
  1328. pass
  1329. except BadZipFile:
  1330. return zinfo.filename
  1331. def getinfo(self, name):
  1332. """Return the instance of ZipInfo given 'name'."""
  1333. info = self.NameToInfo.get(name)
  1334. if info is None:
  1335. raise KeyError(
  1336. 'There is no item named %r in the archive' % name)
  1337. return info
  1338. def setpassword(self, pwd):
  1339. """Set default password for encrypted files."""
  1340. if pwd and not isinstance(pwd, bytes):
  1341. raise TypeError("pwd: expected bytes, got %s" % type(pwd).__name__)
  1342. if pwd:
  1343. self.pwd = pwd
  1344. else:
  1345. self.pwd = None
  1346. @property
  1347. def comment(self):
  1348. """The comment text associated with the ZIP file."""
  1349. return self._comment
  1350. @comment.setter
  1351. def comment(self, comment):
  1352. if not isinstance(comment, bytes):
  1353. raise TypeError("comment: expected bytes, got %s" % type(comment).__name__)
  1354. # check for valid comment length
  1355. if len(comment) > ZIP_MAX_COMMENT:
  1356. import warnings
  1357. warnings.warn('Archive comment is too long; truncating to %d bytes'
  1358. % ZIP_MAX_COMMENT, stacklevel=2)
  1359. comment = comment[:ZIP_MAX_COMMENT]
  1360. self._comment = comment
  1361. self._didModify = True
  1362. def read(self, name, pwd=None):
  1363. """Return file bytes for name. 'pwd' is the password to decrypt
  1364. encrypted files."""
  1365. with self.open(name, "r", pwd) as fp:
  1366. return fp.read()
  1367. def open(self, name, mode="r", pwd=None, *, force_zip64=False):
  1368. """Return file-like object for 'name'.
  1369. name is a string for the file name within the ZIP file, or a ZipInfo
  1370. object.
  1371. mode should be 'r' to read a file already in the ZIP file, or 'w' to
  1372. write to a file newly added to the archive.
  1373. pwd is the password to decrypt files (only used for reading).
  1374. When writing, if the file size is not known in advance but may exceed
  1375. 2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large
  1376. files. If the size is known in advance, it is best to pass a ZipInfo
  1377. instance for name, with zinfo.file_size set.
  1378. """
  1379. if mode not in {"r", "w"}:
  1380. raise ValueError('open() requires mode "r" or "w"')
  1381. if pwd and (mode == "w"):
  1382. raise ValueError("pwd is only supported for reading files")
  1383. if not self.fp:
  1384. raise ValueError(
  1385. "Attempt to use ZIP archive that was already closed")
  1386. # Make sure we have an info object
  1387. if isinstance(name, ZipInfo):
  1388. # 'name' is already an info object
  1389. zinfo = name
  1390. elif mode == 'w':
  1391. zinfo = ZipInfo(name)
  1392. zinfo.compress_type = self.compression
  1393. zinfo._compresslevel = self.compresslevel
  1394. else:
  1395. # Get info object for name
  1396. zinfo = self.getinfo(name)
  1397. if mode == 'w':
  1398. return self._open_to_write(zinfo, force_zip64=force_zip64)
  1399. if self._writing:
  1400. raise ValueError("Can't read from the ZIP file while there "
  1401. "is an open writing handle on it. "
  1402. "Close the writing handle before trying to read.")
  1403. # Open for reading:
  1404. self._fileRefCnt += 1
  1405. zef_file = _SharedFile(self.fp, zinfo.header_offset,
  1406. self._fpclose, self._lock, lambda: self._writing)
  1407. try:
  1408. # Skip the file header:
  1409. fheader = zef_file.read(sizeFileHeader)
  1410. if len(fheader) != sizeFileHeader:
  1411. raise BadZipFile("Truncated file header")
  1412. fheader = struct.unpack(structFileHeader, fheader)
  1413. if fheader[_FH_SIGNATURE] != stringFileHeader:
  1414. raise BadZipFile("Bad magic number for file header")
  1415. fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
  1416. if fheader[_FH_EXTRA_FIELD_LENGTH]:
  1417. zef_file.seek(fheader[_FH_EXTRA_FIELD_LENGTH], whence=1)
  1418. if zinfo.flag_bits & _MASK_COMPRESSED_PATCH:
  1419. # Zip 2.7: compressed patched data
  1420. raise NotImplementedError("compressed patched data (flag bit 5)")
  1421. if zinfo.flag_bits & _MASK_STRONG_ENCRYPTION:
  1422. # strong encryption
  1423. raise NotImplementedError("strong encryption (flag bit 6)")
  1424. if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS] & _MASK_UTF_FILENAME:
  1425. # UTF-8 filename
  1426. fname_str = fname.decode("utf-8")
  1427. else:
  1428. fname_str = fname.decode(self.metadata_encoding or "cp437")
  1429. if fname_str != zinfo.orig_filename:
  1430. raise BadZipFile(
  1431. 'File name in directory %r and header %r differ.'
  1432. % (zinfo.orig_filename, fname))
  1433. if (zinfo._end_offset is not None and
  1434. zef_file.tell() + zinfo.compress_size > zinfo._end_offset):
  1435. raise BadZipFile(f"Overlapped entries: {zinfo.orig_filename!r} (possible zip bomb)")
  1436. # check for encrypted flag & handle password
  1437. is_encrypted = zinfo.flag_bits & _MASK_ENCRYPTED
  1438. if is_encrypted:
  1439. if not pwd:
  1440. pwd = self.pwd
  1441. if pwd and not isinstance(pwd, bytes):
  1442. raise TypeError("pwd: expected bytes, got %s" % type(pwd).__name__)
  1443. if not pwd:
  1444. raise RuntimeError("File %r is encrypted, password "
  1445. "required for extraction" % name)
  1446. else:
  1447. pwd = None
  1448. return ZipExtFile(zef_file, mode, zinfo, pwd, True)
  1449. except:
  1450. zef_file.close()
  1451. raise
  1452. def _open_to_write(self, zinfo, force_zip64=False):
  1453. if force_zip64 and not self._allowZip64:
  1454. raise ValueError(
  1455. "force_zip64 is True, but allowZip64 was False when opening "
  1456. "the ZIP file."
  1457. )
  1458. if self._writing:
  1459. raise ValueError("Can't write to the ZIP file while there is "
  1460. "another write handle open on it. "
  1461. "Close the first handle before opening another.")
  1462. # Size and CRC are overwritten with correct data after processing the file
  1463. zinfo.compress_size = 0
  1464. zinfo.CRC = 0
  1465. zinfo.flag_bits = 0x00
  1466. if zinfo.compress_type == ZIP_LZMA:
  1467. # Compressed data includes an end-of-stream (EOS) marker
  1468. zinfo.flag_bits |= _MASK_COMPRESS_OPTION_1
  1469. if not self._seekable:
  1470. zinfo.flag_bits |= _MASK_USE_DATA_DESCRIPTOR
  1471. if not zinfo.external_attr:
  1472. zinfo.external_attr = 0o600 << 16 # permissions: ?rw-------
  1473. # Compressed size can be larger than uncompressed size
  1474. zip64 = force_zip64 or (zinfo.file_size * 1.05 > ZIP64_LIMIT)
  1475. if not self._allowZip64 and zip64:
  1476. raise LargeZipFile("Filesize would require ZIP64 extensions")
  1477. if self._seekable:
  1478. self.fp.seek(self.start_dir)
  1479. zinfo.header_offset = self.fp.tell()
  1480. self._writecheck(zinfo)
  1481. self._didModify = True
  1482. self.fp.write(zinfo.FileHeader(zip64))
  1483. self._writing = True
  1484. return _ZipWriteFile(self, zinfo, zip64)
  1485. def extract(self, member, path=None, pwd=None):
  1486. """Extract a member from the archive to the current working directory,
  1487. using its full name. Its file information is extracted as accurately
  1488. as possible. `member' may be a filename or a ZipInfo object. You can
  1489. specify a different directory using `path'. You can specify the
  1490. password to decrypt the file using 'pwd'.
  1491. """
  1492. if path is None:
  1493. path = os.getcwd()
  1494. else:
  1495. path = os.fspath(path)
  1496. return self._extract_member(member, path, pwd)
  1497. def extractall(self, path=None, members=None, pwd=None):
  1498. """Extract all members from the archive to the current working
  1499. directory. `path' specifies a different directory to extract to.
  1500. `members' is optional and must be a subset of the list returned
  1501. by namelist(). You can specify the password to decrypt all files
  1502. using 'pwd'.
  1503. """
  1504. if members is None:
  1505. members = self.namelist()
  1506. if path is None:
  1507. path = os.getcwd()
  1508. else:
  1509. path = os.fspath(path)
  1510. for zipinfo in members:
  1511. self._extract_member(zipinfo, path, pwd)
  1512. @classmethod
  1513. def _sanitize_windows_name(cls, arcname, pathsep):
  1514. """Replace bad characters and remove trailing dots from parts."""
  1515. table = cls._windows_illegal_name_trans_table
  1516. if not table:
  1517. illegal = ':<>|"?*'
  1518. table = str.maketrans(illegal, '_' * len(illegal))
  1519. cls._windows_illegal_name_trans_table = table
  1520. arcname = arcname.translate(table)
  1521. # remove trailing dots and spaces
  1522. arcname = (x.rstrip(' .') for x in arcname.split(pathsep))
  1523. # rejoin, removing empty parts.
  1524. arcname = pathsep.join(x for x in arcname if x)
  1525. return arcname
  1526. def _extract_member(self, member, targetpath, pwd):
  1527. """Extract the ZipInfo object 'member' to a physical
  1528. file on the path targetpath.
  1529. """
  1530. if not isinstance(member, ZipInfo):
  1531. member = self.getinfo(member)
  1532. # build the destination pathname, replacing
  1533. # forward slashes to platform specific separators.
  1534. arcname = member.filename.replace('/', os.path.sep)
  1535. if os.path.altsep:
  1536. arcname = arcname.replace(os.path.altsep, os.path.sep)
  1537. # interpret absolute pathname as relative, remove drive letter or
  1538. # UNC path, redundant separators, "." and ".." components.
  1539. arcname = os.path.splitdrive(arcname)[1]
  1540. invalid_path_parts = ('', os.path.curdir, os.path.pardir)
  1541. arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
  1542. if x not in invalid_path_parts)
  1543. if os.path.sep == '\\':
  1544. # filter illegal characters on Windows
  1545. arcname = self._sanitize_windows_name(arcname, os.path.sep)
  1546. if not arcname and not member.is_dir():
  1547. raise ValueError("Empty filename.")
  1548. targetpath = os.path.join(targetpath, arcname)
  1549. targetpath = os.path.normpath(targetpath)
  1550. # Create all upper directories if necessary.
  1551. upperdirs = os.path.dirname(targetpath)
  1552. if upperdirs and not os.path.exists(upperdirs):
  1553. os.makedirs(upperdirs)
  1554. if member.is_dir():
  1555. if not os.path.isdir(targetpath):
  1556. os.mkdir(targetpath)
  1557. return targetpath
  1558. with self.open(member, pwd=pwd) as source, \
  1559. open(targetpath, "wb") as target:
  1560. shutil.copyfileobj(source, target)
  1561. return targetpath
  1562. def _writecheck(self, zinfo):
  1563. """Check for errors before writing a file to the archive."""
  1564. if zinfo.filename in self.NameToInfo:
  1565. import warnings
  1566. warnings.warn('Duplicate name: %r' % zinfo.filename, stacklevel=3)
  1567. if self.mode not in ('w', 'x', 'a'):
  1568. raise ValueError("write() requires mode 'w', 'x', or 'a'")
  1569. if not self.fp:
  1570. raise ValueError(
  1571. "Attempt to write ZIP archive that was already closed")
  1572. _check_compression(zinfo.compress_type)
  1573. if not self._allowZip64:
  1574. requires_zip64 = None
  1575. if len(self.filelist) >= ZIP_FILECOUNT_LIMIT:
  1576. requires_zip64 = "Files count"
  1577. elif zinfo.file_size > ZIP64_LIMIT:
  1578. requires_zip64 = "Filesize"
  1579. elif zinfo.header_offset > ZIP64_LIMIT:
  1580. requires_zip64 = "Zipfile size"
  1581. if requires_zip64:
  1582. raise LargeZipFile(requires_zip64 +
  1583. " would require ZIP64 extensions")
  1584. def write(self, filename, arcname=None,
  1585. compress_type=None, compresslevel=None):
  1586. """Put the bytes from filename into the archive under the name
  1587. arcname."""
  1588. if not self.fp:
  1589. raise ValueError(
  1590. "Attempt to write to ZIP archive that was already closed")
  1591. if self._writing:
  1592. raise ValueError(
  1593. "Can't write to ZIP archive while an open writing handle exists"
  1594. )
  1595. zinfo = ZipInfo.from_file(filename, arcname,
  1596. strict_timestamps=self._strict_timestamps)
  1597. if zinfo.is_dir():
  1598. zinfo.compress_size = 0
  1599. zinfo.CRC = 0
  1600. self.mkdir(zinfo)
  1601. else:
  1602. if compress_type is not None:
  1603. zinfo.compress_type = compress_type
  1604. else:
  1605. zinfo.compress_type = self.compression
  1606. if compresslevel is not None:
  1607. zinfo._compresslevel = compresslevel
  1608. else:
  1609. zinfo._compresslevel = self.compresslevel
  1610. with open(filename, "rb") as src, self.open(zinfo, 'w') as dest:
  1611. shutil.copyfileobj(src, dest, 1024*8)
  1612. def writestr(self, zinfo_or_arcname, data,
  1613. compress_type=None, compresslevel=None):
  1614. """Write a file into the archive. The contents is 'data', which
  1615. may be either a 'str' or a 'bytes' instance; if it is a 'str',
  1616. it is encoded as UTF-8 first.
  1617. 'zinfo_or_arcname' is either a ZipInfo instance or
  1618. the name of the file in the archive."""
  1619. if isinstance(data, str):
  1620. data = data.encode("utf-8")
  1621. if not isinstance(zinfo_or_arcname, ZipInfo):
  1622. zinfo = ZipInfo(filename=zinfo_or_arcname,
  1623. date_time=time.localtime(time.time())[:6])
  1624. zinfo.compress_type = self.compression
  1625. zinfo._compresslevel = self.compresslevel
  1626. if zinfo.filename.endswith('/'):
  1627. zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x
  1628. zinfo.external_attr |= 0x10 # MS-DOS directory flag
  1629. else:
  1630. zinfo.external_attr = 0o600 << 16 # ?rw-------
  1631. else:
  1632. zinfo = zinfo_or_arcname
  1633. if not self.fp:
  1634. raise ValueError(
  1635. "Attempt to write to ZIP archive that was already closed")
  1636. if self._writing:
  1637. raise ValueError(
  1638. "Can't write to ZIP archive while an open writing handle exists."
  1639. )
  1640. if compress_type is not None:
  1641. zinfo.compress_type = compress_type
  1642. if compresslevel is not None:
  1643. zinfo._compresslevel = compresslevel
  1644. zinfo.file_size = len(data) # Uncompressed size
  1645. with self._lock:
  1646. with self.open(zinfo, mode='w') as dest:
  1647. dest.write(data)
  1648. def mkdir(self, zinfo_or_directory_name, mode=511):
  1649. """Creates a directory inside the zip archive."""
  1650. if isinstance(zinfo_or_directory_name, ZipInfo):
  1651. zinfo = zinfo_or_directory_name
  1652. if not zinfo.is_dir():
  1653. raise ValueError("The given ZipInfo does not describe a directory")
  1654. elif isinstance(zinfo_or_directory_name, str):
  1655. directory_name = zinfo_or_directory_name
  1656. if not directory_name.endswith("/"):
  1657. directory_name += "/"
  1658. zinfo = ZipInfo(directory_name)
  1659. zinfo.compress_size = 0
  1660. zinfo.CRC = 0
  1661. zinfo.external_attr = ((0o40000 | mode) & 0xFFFF) << 16
  1662. zinfo.file_size = 0
  1663. zinfo.external_attr |= 0x10
  1664. else:
  1665. raise TypeError("Expected type str or ZipInfo")
  1666. with self._lock:
  1667. if self._seekable:
  1668. self.fp.seek(self.start_dir)
  1669. zinfo.header_offset = self.fp.tell() # Start of header bytes
  1670. if zinfo.compress_type == ZIP_LZMA:
  1671. # Compressed data includes an end-of-stream (EOS) marker
  1672. zinfo.flag_bits |= _MASK_COMPRESS_OPTION_1
  1673. self._writecheck(zinfo)
  1674. self._didModify = True
  1675. self.filelist.append(zinfo)
  1676. self.NameToInfo[zinfo.filename] = zinfo
  1677. self.fp.write(zinfo.FileHeader(False))
  1678. self.start_dir = self.fp.tell()
  1679. def __del__(self):
  1680. """Call the "close()" method in case the user forgot."""
  1681. self.close()
  1682. def close(self):
  1683. """Close the file, and for mode 'w', 'x' and 'a' write the ending
  1684. records."""
  1685. if self.fp is None:
  1686. return
  1687. if self._writing:
  1688. raise ValueError("Can't close the ZIP file while there is "
  1689. "an open writing handle on it. "
  1690. "Close the writing handle before closing the zip.")
  1691. try:
  1692. if self.mode in ('w', 'x', 'a') and self._didModify: # write ending records
  1693. with self._lock:
  1694. if self._seekable:
  1695. self.fp.seek(self.start_dir)
  1696. self._write_end_record()
  1697. finally:
  1698. fp = self.fp
  1699. self.fp = None
  1700. self._fpclose(fp)
  1701. def _write_end_record(self):
  1702. for zinfo in self.filelist: # write central directory
  1703. dt = zinfo.date_time
  1704. dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
  1705. dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
  1706. extra = []
  1707. if zinfo.file_size > ZIP64_LIMIT \
  1708. or zinfo.compress_size > ZIP64_LIMIT:
  1709. extra.append(zinfo.file_size)
  1710. extra.append(zinfo.compress_size)
  1711. file_size = 0xffffffff
  1712. compress_size = 0xffffffff
  1713. else:
  1714. file_size = zinfo.file_size
  1715. compress_size = zinfo.compress_size
  1716. if zinfo.header_offset > ZIP64_LIMIT:
  1717. extra.append(zinfo.header_offset)
  1718. header_offset = 0xffffffff
  1719. else:
  1720. header_offset = zinfo.header_offset
  1721. extra_data = zinfo.extra
  1722. min_version = 0
  1723. if extra:
  1724. # Append a ZIP64 field to the extra's
  1725. extra_data = _strip_extra(extra_data, (1,))
  1726. extra_data = struct.pack(
  1727. '<HH' + 'Q'*len(extra),
  1728. 1, 8*len(extra), *extra) + extra_data
  1729. min_version = ZIP64_VERSION
  1730. if zinfo.compress_type == ZIP_BZIP2:
  1731. min_version = max(BZIP2_VERSION, min_version)
  1732. elif zinfo.compress_type == ZIP_LZMA:
  1733. min_version = max(LZMA_VERSION, min_version)
  1734. extract_version = max(min_version, zinfo.extract_version)
  1735. create_version = max(min_version, zinfo.create_version)
  1736. filename, flag_bits = zinfo._encodeFilenameFlags()
  1737. centdir = struct.pack(structCentralDir,
  1738. stringCentralDir, create_version,
  1739. zinfo.create_system, extract_version, zinfo.reserved,
  1740. flag_bits, zinfo.compress_type, dostime, dosdate,
  1741. zinfo.CRC, compress_size, file_size,
  1742. len(filename), len(extra_data), len(zinfo.comment),
  1743. 0, zinfo.internal_attr, zinfo.external_attr,
  1744. header_offset)
  1745. self.fp.write(centdir)
  1746. self.fp.write(filename)
  1747. self.fp.write(extra_data)
  1748. self.fp.write(zinfo.comment)
  1749. pos2 = self.fp.tell()
  1750. # Write end-of-zip-archive record
  1751. centDirCount = len(self.filelist)
  1752. centDirSize = pos2 - self.start_dir
  1753. centDirOffset = self.start_dir
  1754. requires_zip64 = None
  1755. if centDirCount > ZIP_FILECOUNT_LIMIT:
  1756. requires_zip64 = "Files count"
  1757. elif centDirOffset > ZIP64_LIMIT:
  1758. requires_zip64 = "Central directory offset"
  1759. elif centDirSize > ZIP64_LIMIT:
  1760. requires_zip64 = "Central directory size"
  1761. if requires_zip64:
  1762. # Need to write the ZIP64 end-of-archive records
  1763. if not self._allowZip64:
  1764. raise LargeZipFile(requires_zip64 +
  1765. " would require ZIP64 extensions")
  1766. zip64endrec = struct.pack(
  1767. structEndArchive64, stringEndArchive64,
  1768. 44, 45, 45, 0, 0, centDirCount, centDirCount,
  1769. centDirSize, centDirOffset)
  1770. self.fp.write(zip64endrec)
  1771. zip64locrec = struct.pack(
  1772. structEndArchive64Locator,
  1773. stringEndArchive64Locator, 0, pos2, 1)
  1774. self.fp.write(zip64locrec)
  1775. centDirCount = min(centDirCount, 0xFFFF)
  1776. centDirSize = min(centDirSize, 0xFFFFFFFF)
  1777. centDirOffset = min(centDirOffset, 0xFFFFFFFF)
  1778. endrec = struct.pack(structEndArchive, stringEndArchive,
  1779. 0, 0, centDirCount, centDirCount,
  1780. centDirSize, centDirOffset, len(self._comment))
  1781. self.fp.write(endrec)
  1782. self.fp.write(self._comment)
  1783. if self.mode == "a":
  1784. self.fp.truncate()
  1785. self.fp.flush()
  1786. def _fpclose(self, fp):
  1787. assert self._fileRefCnt > 0
  1788. self._fileRefCnt -= 1
  1789. if not self._fileRefCnt and not self._filePassed:
  1790. fp.close()
  1791. class PyZipFile(ZipFile):
  1792. """Class to create ZIP archives with Python library files and packages."""
  1793. def __init__(self, file, mode="r", compression=ZIP_STORED,
  1794. allowZip64=True, optimize=-1):
  1795. ZipFile.__init__(self, file, mode=mode, compression=compression,
  1796. allowZip64=allowZip64)
  1797. self._optimize = optimize
  1798. def writepy(self, pathname, basename="", filterfunc=None):
  1799. """Add all files from "pathname" to the ZIP archive.
  1800. If pathname is a package directory, search the directory and
  1801. all package subdirectories recursively for all *.py and enter
  1802. the modules into the archive. If pathname is a plain
  1803. directory, listdir *.py and enter all modules. Else, pathname
  1804. must be a Python *.py file and the module will be put into the
  1805. archive. Added modules are always module.pyc.
  1806. This method will compile the module.py into module.pyc if
  1807. necessary.
  1808. If filterfunc(pathname) is given, it is called with every argument.
  1809. When it is False, the file or directory is skipped.
  1810. """
  1811. pathname = os.fspath(pathname)
  1812. if filterfunc and not filterfunc(pathname):
  1813. if self.debug:
  1814. label = 'path' if os.path.isdir(pathname) else 'file'
  1815. print('%s %r skipped by filterfunc' % (label, pathname))
  1816. return
  1817. dir, name = os.path.split(pathname)
  1818. if os.path.isdir(pathname):
  1819. initname = os.path.join(pathname, "__init__.py")
  1820. if os.path.isfile(initname):
  1821. # This is a package directory, add it
  1822. if basename:
  1823. basename = "%s/%s" % (basename, name)
  1824. else:
  1825. basename = name
  1826. if self.debug:
  1827. print("Adding package in", pathname, "as", basename)
  1828. fname, arcname = self._get_codename(initname[0:-3], basename)
  1829. if self.debug:
  1830. print("Adding", arcname)
  1831. self.write(fname, arcname)
  1832. dirlist = sorted(os.listdir(pathname))
  1833. dirlist.remove("__init__.py")
  1834. # Add all *.py files and package subdirectories
  1835. for filename in dirlist:
  1836. path = os.path.join(pathname, filename)
  1837. root, ext = os.path.splitext(filename)
  1838. if os.path.isdir(path):
  1839. if os.path.isfile(os.path.join(path, "__init__.py")):
  1840. # This is a package directory, add it
  1841. self.writepy(path, basename,
  1842. filterfunc=filterfunc) # Recursive call
  1843. elif ext == ".py":
  1844. if filterfunc and not filterfunc(path):
  1845. if self.debug:
  1846. print('file %r skipped by filterfunc' % path)
  1847. continue
  1848. fname, arcname = self._get_codename(path[0:-3],
  1849. basename)
  1850. if self.debug:
  1851. print("Adding", arcname)
  1852. self.write(fname, arcname)
  1853. else:
  1854. # This is NOT a package directory, add its files at top level
  1855. if self.debug:
  1856. print("Adding files from directory", pathname)
  1857. for filename in sorted(os.listdir(pathname)):
  1858. path = os.path.join(pathname, filename)
  1859. root, ext = os.path.splitext(filename)
  1860. if ext == ".py":
  1861. if filterfunc and not filterfunc(path):
  1862. if self.debug:
  1863. print('file %r skipped by filterfunc' % path)
  1864. continue
  1865. fname, arcname = self._get_codename(path[0:-3],
  1866. basename)
  1867. if self.debug:
  1868. print("Adding", arcname)
  1869. self.write(fname, arcname)
  1870. else:
  1871. if pathname[-3:] != ".py":
  1872. raise RuntimeError(
  1873. 'Files added with writepy() must end with ".py"')
  1874. fname, arcname = self._get_codename(pathname[0:-3], basename)
  1875. if self.debug:
  1876. print("Adding file", arcname)
  1877. self.write(fname, arcname)
  1878. def _get_codename(self, pathname, basename):
  1879. """Return (filename, archivename) for the path.
  1880. Given a module name path, return the correct file path and
  1881. archive name, compiling if necessary. For example, given
  1882. /python/lib/string, return (/python/lib/string.pyc, string).
  1883. """
  1884. def _compile(file, optimize=-1):
  1885. import py_compile
  1886. if self.debug:
  1887. print("Compiling", file)
  1888. try:
  1889. py_compile.compile(file, doraise=True, optimize=optimize)
  1890. except py_compile.PyCompileError as err:
  1891. print(err.msg)
  1892. return False
  1893. return True
  1894. file_py = pathname + ".py"
  1895. file_pyc = pathname + ".pyc"
  1896. pycache_opt0 = importlib.util.cache_from_source(file_py, optimization='')
  1897. pycache_opt1 = importlib.util.cache_from_source(file_py, optimization=1)
  1898. pycache_opt2 = importlib.util.cache_from_source(file_py, optimization=2)
  1899. if self._optimize == -1:
  1900. # legacy mode: use whatever file is present
  1901. if (os.path.isfile(file_pyc) and
  1902. os.stat(file_pyc).st_mtime >= os.stat(file_py).st_mtime):
  1903. # Use .pyc file.
  1904. arcname = fname = file_pyc
  1905. elif (os.path.isfile(pycache_opt0) and
  1906. os.stat(pycache_opt0).st_mtime >= os.stat(file_py).st_mtime):
  1907. # Use the __pycache__/*.pyc file, but write it to the legacy pyc
  1908. # file name in the archive.
  1909. fname = pycache_opt0
  1910. arcname = file_pyc
  1911. elif (os.path.isfile(pycache_opt1) and
  1912. os.stat(pycache_opt1).st_mtime >= os.stat(file_py).st_mtime):
  1913. # Use the __pycache__/*.pyc file, but write it to the legacy pyc
  1914. # file name in the archive.
  1915. fname = pycache_opt1
  1916. arcname = file_pyc
  1917. elif (os.path.isfile(pycache_opt2) and
  1918. os.stat(pycache_opt2).st_mtime >= os.stat(file_py).st_mtime):
  1919. # Use the __pycache__/*.pyc file, but write it to the legacy pyc
  1920. # file name in the archive.
  1921. fname = pycache_opt2
  1922. arcname = file_pyc
  1923. else:
  1924. # Compile py into PEP 3147 pyc file.
  1925. if _compile(file_py):
  1926. if sys.flags.optimize == 0:
  1927. fname = pycache_opt0
  1928. elif sys.flags.optimize == 1:
  1929. fname = pycache_opt1
  1930. else:
  1931. fname = pycache_opt2
  1932. arcname = file_pyc
  1933. else:
  1934. fname = arcname = file_py
  1935. else:
  1936. # new mode: use given optimization level
  1937. if self._optimize == 0:
  1938. fname = pycache_opt0
  1939. arcname = file_pyc
  1940. else:
  1941. arcname = file_pyc
  1942. if self._optimize == 1:
  1943. fname = pycache_opt1
  1944. elif self._optimize == 2:
  1945. fname = pycache_opt2
  1946. else:
  1947. msg = "invalid value for 'optimize': {!r}".format(self._optimize)
  1948. raise ValueError(msg)
  1949. if not (os.path.isfile(fname) and
  1950. os.stat(fname).st_mtime >= os.stat(file_py).st_mtime):
  1951. if not _compile(file_py, optimize=self._optimize):
  1952. fname = arcname = file_py
  1953. archivename = os.path.split(arcname)[1]
  1954. if basename:
  1955. archivename = "%s/%s" % (basename, archivename)
  1956. return (fname, archivename)
  1957. def main(args=None):
  1958. import argparse
  1959. description = 'A simple command-line interface for zipfile module.'
  1960. parser = argparse.ArgumentParser(description=description)
  1961. group = parser.add_mutually_exclusive_group(required=True)
  1962. group.add_argument('-l', '--list', metavar='<zipfile>',
  1963. help='Show listing of a zipfile')
  1964. group.add_argument('-e', '--extract', nargs=2,
  1965. metavar=('<zipfile>', '<output_dir>'),
  1966. help='Extract zipfile into target dir')
  1967. group.add_argument('-c', '--create', nargs='+',
  1968. metavar=('<name>', '<file>'),
  1969. help='Create zipfile from sources')
  1970. group.add_argument('-t', '--test', metavar='<zipfile>',
  1971. help='Test if a zipfile is valid')
  1972. parser.add_argument('--metadata-encoding', metavar='<encoding>',
  1973. help='Specify encoding of member names for -l, -e and -t')
  1974. args = parser.parse_args(args)
  1975. encoding = args.metadata_encoding
  1976. if args.test is not None:
  1977. src = args.test
  1978. with ZipFile(src, 'r', metadata_encoding=encoding) as zf:
  1979. badfile = zf.testzip()
  1980. if badfile:
  1981. print("The following enclosed file is corrupted: {!r}".format(badfile))
  1982. print("Done testing")
  1983. elif args.list is not None:
  1984. src = args.list
  1985. with ZipFile(src, 'r', metadata_encoding=encoding) as zf:
  1986. zf.printdir()
  1987. elif args.extract is not None:
  1988. src, curdir = args.extract
  1989. with ZipFile(src, 'r', metadata_encoding=encoding) as zf:
  1990. zf.extractall(curdir)
  1991. elif args.create is not None:
  1992. if encoding:
  1993. print("Non-conforming encodings not supported with -c.",
  1994. file=sys.stderr)
  1995. sys.exit(1)
  1996. zip_name = args.create.pop(0)
  1997. files = args.create
  1998. def addToZip(zf, path, zippath):
  1999. if os.path.isfile(path):
  2000. zf.write(path, zippath, ZIP_DEFLATED)
  2001. elif os.path.isdir(path):
  2002. if zippath:
  2003. zf.write(path, zippath)
  2004. for nm in sorted(os.listdir(path)):
  2005. addToZip(zf,
  2006. os.path.join(path, nm), os.path.join(zippath, nm))
  2007. # else: ignore
  2008. with ZipFile(zip_name, 'w') as zf:
  2009. for path in files:
  2010. zippath = os.path.basename(path)
  2011. if not zippath:
  2012. zippath = os.path.basename(os.path.dirname(path))
  2013. if zippath in ('', os.curdir, os.pardir):
  2014. zippath = ''
  2015. addToZip(zf, path, zippath)
  2016. from ._path import ( # noqa: E402
  2017. Path,
  2018. # used privately for tests
  2019. CompleteDirs, # noqa: F401
  2020. )