tarfile.py 90 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537
  1. #!/usr/bin/env python3
  2. #-------------------------------------------------------------------
  3. # tarfile.py
  4. #-------------------------------------------------------------------
  5. # Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
  6. # All rights reserved.
  7. #
  8. # Permission is hereby granted, free of charge, to any person
  9. # obtaining a copy of this software and associated documentation
  10. # files (the "Software"), to deal in the Software without
  11. # restriction, including without limitation the rights to use,
  12. # copy, modify, merge, publish, distribute, sublicense, and/or sell
  13. # copies of the Software, and to permit persons to whom the
  14. # Software is furnished to do so, subject to the following
  15. # conditions:
  16. #
  17. # The above copyright notice and this permission notice shall be
  18. # included in all copies or substantial portions of the Software.
  19. #
  20. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  21. # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
  22. # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  23. # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
  24. # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  25. # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  26. # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  27. # OTHER DEALINGS IN THE SOFTWARE.
  28. #
  29. """Read from and write to tar format archives.
  30. """
  31. version = "0.9.0"
  32. __author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
  33. __credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
  34. #---------
  35. # Imports
  36. #---------
  37. from builtins import open as bltn_open
  38. import sys
  39. import os
  40. import io
  41. import shutil
  42. import stat
  43. import time
  44. import struct
  45. import copy
  46. import re
  47. try:
  48. import pwd
  49. except ImportError:
  50. pwd = None
  51. try:
  52. import grp
  53. except ImportError:
  54. grp = None
  55. # os.symlink on Windows prior to 6.0 raises NotImplementedError
  56. symlink_exception = (AttributeError, NotImplementedError)
  57. try:
  58. # OSError (winerror=1314) will be raised if the caller does not hold the
  59. # SeCreateSymbolicLinkPrivilege privilege
  60. symlink_exception += (OSError,)
  61. except NameError:
  62. pass
  63. # from tarfile import *
  64. __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
  65. "CompressionError", "StreamError", "ExtractError", "HeaderError",
  66. "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
  67. "DEFAULT_FORMAT", "open"]
  68. #---------------------------------------------------------
  69. # tar constants
  70. #---------------------------------------------------------
  71. NUL = b"\0" # the null character
  72. BLOCKSIZE = 512 # length of processing blocks
  73. RECORDSIZE = BLOCKSIZE * 20 # length of records
  74. GNU_MAGIC = b"ustar \0" # magic gnu tar string
  75. POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
  76. LENGTH_NAME = 100 # maximum length of a filename
  77. LENGTH_LINK = 100 # maximum length of a linkname
  78. LENGTH_PREFIX = 155 # maximum length of the prefix field
  79. REGTYPE = b"0" # regular file
  80. AREGTYPE = b"\0" # regular file
  81. LNKTYPE = b"1" # link (inside tarfile)
  82. SYMTYPE = b"2" # symbolic link
  83. CHRTYPE = b"3" # character special device
  84. BLKTYPE = b"4" # block special device
  85. DIRTYPE = b"5" # directory
  86. FIFOTYPE = b"6" # fifo special device
  87. CONTTYPE = b"7" # contiguous file
  88. GNUTYPE_LONGNAME = b"L" # GNU tar longname
  89. GNUTYPE_LONGLINK = b"K" # GNU tar longlink
  90. GNUTYPE_SPARSE = b"S" # GNU tar sparse file
  91. XHDTYPE = b"x" # POSIX.1-2001 extended header
  92. XGLTYPE = b"g" # POSIX.1-2001 global header
  93. SOLARIS_XHDTYPE = b"X" # Solaris extended header
  94. USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
  95. GNU_FORMAT = 1 # GNU tar format
  96. PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
  97. DEFAULT_FORMAT = GNU_FORMAT
  98. #---------------------------------------------------------
  99. # tarfile constants
  100. #---------------------------------------------------------
  101. # File types that tarfile supports:
  102. SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
  103. SYMTYPE, DIRTYPE, FIFOTYPE,
  104. CONTTYPE, CHRTYPE, BLKTYPE,
  105. GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  106. GNUTYPE_SPARSE)
  107. # File types that will be treated as a regular file.
  108. REGULAR_TYPES = (REGTYPE, AREGTYPE,
  109. CONTTYPE, GNUTYPE_SPARSE)
  110. # File types that are part of the GNU tar format.
  111. GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  112. GNUTYPE_SPARSE)
  113. # Fields from a pax header that override a TarInfo attribute.
  114. PAX_FIELDS = ("path", "linkpath", "size", "mtime",
  115. "uid", "gid", "uname", "gname")
  116. # Fields from a pax header that are affected by hdrcharset.
  117. PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
  118. # Fields in a pax header that are numbers, all other fields
  119. # are treated as strings.
  120. PAX_NUMBER_FIELDS = {
  121. "atime": float,
  122. "ctime": float,
  123. "mtime": float,
  124. "uid": int,
  125. "gid": int,
  126. "size": int
  127. }
  128. #---------------------------------------------------------
  129. # initialization
  130. #---------------------------------------------------------
  131. if os.name == "nt":
  132. ENCODING = "utf-8"
  133. else:
  134. ENCODING = sys.getfilesystemencoding()
  135. #---------------------------------------------------------
  136. # Some useful functions
  137. #---------------------------------------------------------
  138. def stn(s, length, encoding, errors):
  139. """Convert a string to a null-terminated bytes object.
  140. """
  141. s = s.encode(encoding, errors)
  142. return s[:length] + (length - len(s)) * NUL
  143. def nts(s, encoding, errors):
  144. """Convert a null-terminated bytes object to a string.
  145. """
  146. p = s.find(b"\0")
  147. if p != -1:
  148. s = s[:p]
  149. return s.decode(encoding, errors)
  150. def nti(s):
  151. """Convert a number field to a python number.
  152. """
  153. # There are two possible encodings for a number field, see
  154. # itn() below.
  155. if s[0] in (0o200, 0o377):
  156. n = 0
  157. for i in range(len(s) - 1):
  158. n <<= 8
  159. n += s[i + 1]
  160. if s[0] == 0o377:
  161. n = -(256 ** (len(s) - 1) - n)
  162. else:
  163. try:
  164. s = nts(s, "ascii", "strict")
  165. n = int(s.strip() or "0", 8)
  166. except ValueError:
  167. raise InvalidHeaderError("invalid header")
  168. return n
  169. def itn(n, digits=8, format=DEFAULT_FORMAT):
  170. """Convert a python number to a number field.
  171. """
  172. # POSIX 1003.1-1988 requires numbers to be encoded as a string of
  173. # octal digits followed by a null-byte, this allows values up to
  174. # (8**(digits-1))-1. GNU tar allows storing numbers greater than
  175. # that if necessary. A leading 0o200 or 0o377 byte indicate this
  176. # particular encoding, the following digits-1 bytes are a big-endian
  177. # base-256 representation. This allows values up to (256**(digits-1))-1.
  178. # A 0o200 byte indicates a positive number, a 0o377 byte a negative
  179. # number.
  180. n = int(n)
  181. if 0 <= n < 8 ** (digits - 1):
  182. s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
  183. elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
  184. if n >= 0:
  185. s = bytearray([0o200])
  186. else:
  187. s = bytearray([0o377])
  188. n = 256 ** digits + n
  189. for i in range(digits - 1):
  190. s.insert(1, n & 0o377)
  191. n >>= 8
  192. else:
  193. raise ValueError("overflow in number field")
  194. return s
  195. def calc_chksums(buf):
  196. """Calculate the checksum for a member's header by summing up all
  197. characters except for the chksum field which is treated as if
  198. it was filled with spaces. According to the GNU tar sources,
  199. some tars (Sun and NeXT) calculate chksum with signed char,
  200. which will be different if there are chars in the buffer with
  201. the high bit set. So we calculate two checksums, unsigned and
  202. signed.
  203. """
  204. unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
  205. signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
  206. return unsigned_chksum, signed_chksum
  207. def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
  208. """Copy length bytes from fileobj src to fileobj dst.
  209. If length is None, copy the entire content.
  210. """
  211. bufsize = bufsize or 16 * 1024
  212. if length == 0:
  213. return
  214. if length is None:
  215. shutil.copyfileobj(src, dst, bufsize)
  216. return
  217. blocks, remainder = divmod(length, bufsize)
  218. for b in range(blocks):
  219. buf = src.read(bufsize)
  220. if len(buf) < bufsize:
  221. raise exception("unexpected end of data")
  222. dst.write(buf)
  223. if remainder != 0:
  224. buf = src.read(remainder)
  225. if len(buf) < remainder:
  226. raise exception("unexpected end of data")
  227. dst.write(buf)
  228. return
  229. def filemode(mode):
  230. """Deprecated in this location; use stat.filemode."""
  231. import warnings
  232. warnings.warn("deprecated in favor of stat.filemode",
  233. DeprecationWarning, 2)
  234. return stat.filemode(mode)
  235. def _safe_print(s):
  236. encoding = getattr(sys.stdout, 'encoding', None)
  237. if encoding is not None:
  238. s = s.encode(encoding, 'backslashreplace').decode(encoding)
  239. print(s, end=' ')
  240. class TarError(Exception):
  241. """Base exception."""
  242. pass
  243. class ExtractError(TarError):
  244. """General exception for extract errors."""
  245. pass
  246. class ReadError(TarError):
  247. """Exception for unreadable tar archives."""
  248. pass
  249. class CompressionError(TarError):
  250. """Exception for unavailable compression methods."""
  251. pass
  252. class StreamError(TarError):
  253. """Exception for unsupported operations on stream-like TarFiles."""
  254. pass
  255. class HeaderError(TarError):
  256. """Base exception for header errors."""
  257. pass
  258. class EmptyHeaderError(HeaderError):
  259. """Exception for empty headers."""
  260. pass
  261. class TruncatedHeaderError(HeaderError):
  262. """Exception for truncated headers."""
  263. pass
  264. class EOFHeaderError(HeaderError):
  265. """Exception for end of file headers."""
  266. pass
  267. class InvalidHeaderError(HeaderError):
  268. """Exception for invalid headers."""
  269. pass
  270. class SubsequentHeaderError(HeaderError):
  271. """Exception for missing and invalid extended headers."""
  272. pass
  273. #---------------------------
  274. # internal stream interface
  275. #---------------------------
  276. class _LowLevelFile:
  277. """Low-level file object. Supports reading and writing.
  278. It is used instead of a regular file object for streaming
  279. access.
  280. """
  281. def __init__(self, name, mode):
  282. mode = {
  283. "r": os.O_RDONLY,
  284. "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
  285. }[mode]
  286. if hasattr(os, "O_BINARY"):
  287. mode |= os.O_BINARY
  288. self.fd = os.open(name, mode, 0o666)
  289. def close(self):
  290. os.close(self.fd)
  291. def read(self, size):
  292. return os.read(self.fd, size)
  293. def write(self, s):
  294. os.write(self.fd, s)
  295. class _Stream:
  296. """Class that serves as an adapter between TarFile and
  297. a stream-like object. The stream-like object only
  298. needs to have a read() or write() method and is accessed
  299. blockwise. Use of gzip or bzip2 compression is possible.
  300. A stream-like object could be for example: sys.stdin,
  301. sys.stdout, a socket, a tape device etc.
  302. _Stream is intended to be used only internally.
  303. """
  304. def __init__(self, name, mode, comptype, fileobj, bufsize):
  305. """Construct a _Stream object.
  306. """
  307. self._extfileobj = True
  308. if fileobj is None:
  309. fileobj = _LowLevelFile(name, mode)
  310. self._extfileobj = False
  311. if comptype == '*':
  312. # Enable transparent compression detection for the
  313. # stream interface
  314. fileobj = _StreamProxy(fileobj)
  315. comptype = fileobj.getcomptype()
  316. self.name = name or ""
  317. self.mode = mode
  318. self.comptype = comptype
  319. self.fileobj = fileobj
  320. self.bufsize = bufsize
  321. self.buf = b""
  322. self.pos = 0
  323. self.closed = False
  324. try:
  325. if comptype == "gz":
  326. try:
  327. import zlib
  328. except ImportError:
  329. raise CompressionError("zlib module is not available")
  330. self.zlib = zlib
  331. self.crc = zlib.crc32(b"")
  332. if mode == "r":
  333. self._init_read_gz()
  334. self.exception = zlib.error
  335. else:
  336. self._init_write_gz()
  337. elif comptype == "bz2":
  338. try:
  339. import bz2
  340. except ImportError:
  341. raise CompressionError("bz2 module is not available")
  342. if mode == "r":
  343. self.dbuf = b""
  344. self.cmp = bz2.BZ2Decompressor()
  345. self.exception = OSError
  346. else:
  347. self.cmp = bz2.BZ2Compressor()
  348. elif comptype == "xz":
  349. try:
  350. import lzma
  351. except ImportError:
  352. raise CompressionError("lzma module is not available")
  353. if mode == "r":
  354. self.dbuf = b""
  355. self.cmp = lzma.LZMADecompressor()
  356. self.exception = lzma.LZMAError
  357. else:
  358. self.cmp = lzma.LZMACompressor()
  359. elif comptype != "tar":
  360. raise CompressionError("unknown compression type %r" % comptype)
  361. except:
  362. if not self._extfileobj:
  363. self.fileobj.close()
  364. self.closed = True
  365. raise
  366. def __del__(self):
  367. if hasattr(self, "closed") and not self.closed:
  368. self.close()
  369. def _init_write_gz(self):
  370. """Initialize for writing with gzip compression.
  371. """
  372. self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
  373. -self.zlib.MAX_WBITS,
  374. self.zlib.DEF_MEM_LEVEL,
  375. 0)
  376. timestamp = struct.pack("<L", int(time.time()))
  377. self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
  378. if self.name.endswith(".gz"):
  379. self.name = self.name[:-3]
  380. # RFC1952 says we must use ISO-8859-1 for the FNAME field.
  381. self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
  382. def write(self, s):
  383. """Write string s to the stream.
  384. """
  385. if self.comptype == "gz":
  386. self.crc = self.zlib.crc32(s, self.crc)
  387. self.pos += len(s)
  388. if self.comptype != "tar":
  389. s = self.cmp.compress(s)
  390. self.__write(s)
  391. def __write(self, s):
  392. """Write string s to the stream if a whole new block
  393. is ready to be written.
  394. """
  395. self.buf += s
  396. while len(self.buf) > self.bufsize:
  397. self.fileobj.write(self.buf[:self.bufsize])
  398. self.buf = self.buf[self.bufsize:]
  399. def close(self):
  400. """Close the _Stream object. No operation should be
  401. done on it afterwards.
  402. """
  403. if self.closed:
  404. return
  405. self.closed = True
  406. try:
  407. if self.mode == "w" and self.comptype != "tar":
  408. self.buf += self.cmp.flush()
  409. if self.mode == "w" and self.buf:
  410. self.fileobj.write(self.buf)
  411. self.buf = b""
  412. if self.comptype == "gz":
  413. self.fileobj.write(struct.pack("<L", self.crc))
  414. self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
  415. finally:
  416. if not self._extfileobj:
  417. self.fileobj.close()
  418. def _init_read_gz(self):
  419. """Initialize for reading a gzip compressed fileobj.
  420. """
  421. self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
  422. self.dbuf = b""
  423. # taken from gzip.GzipFile with some alterations
  424. if self.__read(2) != b"\037\213":
  425. raise ReadError("not a gzip file")
  426. if self.__read(1) != b"\010":
  427. raise CompressionError("unsupported compression method")
  428. flag = ord(self.__read(1))
  429. self.__read(6)
  430. if flag & 4:
  431. xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
  432. self.read(xlen)
  433. if flag & 8:
  434. while True:
  435. s = self.__read(1)
  436. if not s or s == NUL:
  437. break
  438. if flag & 16:
  439. while True:
  440. s = self.__read(1)
  441. if not s or s == NUL:
  442. break
  443. if flag & 2:
  444. self.__read(2)
  445. def tell(self):
  446. """Return the stream's file pointer position.
  447. """
  448. return self.pos
  449. def seek(self, pos=0):
  450. """Set the stream's file pointer to pos. Negative seeking
  451. is forbidden.
  452. """
  453. if pos - self.pos >= 0:
  454. blocks, remainder = divmod(pos - self.pos, self.bufsize)
  455. for i in range(blocks):
  456. self.read(self.bufsize)
  457. self.read(remainder)
  458. else:
  459. raise StreamError("seeking backwards is not allowed")
  460. return self.pos
  461. def read(self, size=None):
  462. """Return the next size number of bytes from the stream.
  463. If size is not defined, return all bytes of the stream
  464. up to EOF.
  465. """
  466. if size is None:
  467. t = []
  468. while True:
  469. buf = self._read(self.bufsize)
  470. if not buf:
  471. break
  472. t.append(buf)
  473. buf = b"".join(t)
  474. else:
  475. buf = self._read(size)
  476. self.pos += len(buf)
  477. return buf
  478. def _read(self, size):
  479. """Return size bytes from the stream.
  480. """
  481. if self.comptype == "tar":
  482. return self.__read(size)
  483. c = len(self.dbuf)
  484. t = [self.dbuf]
  485. while c < size:
  486. buf = self.__read(self.bufsize)
  487. if not buf:
  488. break
  489. try:
  490. buf = self.cmp.decompress(buf)
  491. except self.exception:
  492. raise ReadError("invalid compressed data")
  493. t.append(buf)
  494. c += len(buf)
  495. t = b"".join(t)
  496. self.dbuf = t[size:]
  497. return t[:size]
  498. def __read(self, size):
  499. """Return size bytes from stream. If internal buffer is empty,
  500. read another block from the stream.
  501. """
  502. c = len(self.buf)
  503. t = [self.buf]
  504. while c < size:
  505. buf = self.fileobj.read(self.bufsize)
  506. if not buf:
  507. break
  508. t.append(buf)
  509. c += len(buf)
  510. t = b"".join(t)
  511. self.buf = t[size:]
  512. return t[:size]
  513. # class _Stream
  514. class _StreamProxy(object):
  515. """Small proxy class that enables transparent compression
  516. detection for the Stream interface (mode 'r|*').
  517. """
  518. def __init__(self, fileobj):
  519. self.fileobj = fileobj
  520. self.buf = self.fileobj.read(BLOCKSIZE)
  521. def read(self, size):
  522. self.read = self.fileobj.read
  523. return self.buf
  524. def getcomptype(self):
  525. if self.buf.startswith(b"\x1f\x8b\x08"):
  526. return "gz"
  527. elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
  528. return "bz2"
  529. elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
  530. return "xz"
  531. else:
  532. return "tar"
  533. def close(self):
  534. self.fileobj.close()
  535. # class StreamProxy
  536. #------------------------
  537. # Extraction file object
  538. #------------------------
  539. class _FileInFile(object):
  540. """A thin wrapper around an existing file object that
  541. provides a part of its data as an individual file
  542. object.
  543. """
  544. def __init__(self, fileobj, offset, size, blockinfo=None):
  545. self.fileobj = fileobj
  546. self.offset = offset
  547. self.size = size
  548. self.position = 0
  549. self.name = getattr(fileobj, "name", None)
  550. self.closed = False
  551. if blockinfo is None:
  552. blockinfo = [(0, size)]
  553. # Construct a map with data and zero blocks.
  554. self.map_index = 0
  555. self.map = []
  556. lastpos = 0
  557. realpos = self.offset
  558. for offset, size in blockinfo:
  559. if offset > lastpos:
  560. self.map.append((False, lastpos, offset, None))
  561. self.map.append((True, offset, offset + size, realpos))
  562. realpos += size
  563. lastpos = offset + size
  564. if lastpos < self.size:
  565. self.map.append((False, lastpos, self.size, None))
  566. def flush(self):
  567. pass
  568. def readable(self):
  569. return True
  570. def writable(self):
  571. return False
  572. def seekable(self):
  573. return self.fileobj.seekable()
  574. def tell(self):
  575. """Return the current file position.
  576. """
  577. return self.position
  578. def seek(self, position, whence=io.SEEK_SET):
  579. """Seek to a position in the file.
  580. """
  581. if whence == io.SEEK_SET:
  582. self.position = min(max(position, 0), self.size)
  583. elif whence == io.SEEK_CUR:
  584. if position < 0:
  585. self.position = max(self.position + position, 0)
  586. else:
  587. self.position = min(self.position + position, self.size)
  588. elif whence == io.SEEK_END:
  589. self.position = max(min(self.size + position, self.size), 0)
  590. else:
  591. raise ValueError("Invalid argument")
  592. return self.position
  593. def read(self, size=None):
  594. """Read data from the file.
  595. """
  596. if size is None:
  597. size = self.size - self.position
  598. else:
  599. size = min(size, self.size - self.position)
  600. buf = b""
  601. while size > 0:
  602. while True:
  603. data, start, stop, offset = self.map[self.map_index]
  604. if start <= self.position < stop:
  605. break
  606. else:
  607. self.map_index += 1
  608. if self.map_index == len(self.map):
  609. self.map_index = 0
  610. length = min(size, stop - self.position)
  611. if data:
  612. self.fileobj.seek(offset + (self.position - start))
  613. b = self.fileobj.read(length)
  614. if len(b) != length:
  615. raise ReadError("unexpected end of data")
  616. buf += b
  617. else:
  618. buf += NUL * length
  619. size -= length
  620. self.position += length
  621. return buf
  622. def readinto(self, b):
  623. buf = self.read(len(b))
  624. b[:len(buf)] = buf
  625. return len(buf)
  626. def close(self):
  627. self.closed = True
  628. #class _FileInFile
  629. class ExFileObject(io.BufferedReader):
  630. def __init__(self, tarfile, tarinfo):
  631. fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
  632. tarinfo.size, tarinfo.sparse)
  633. super().__init__(fileobj)
  634. #class ExFileObject
  635. #------------------
  636. # Exported Classes
  637. #------------------
  638. class TarInfo(object):
  639. """Informational class which holds the details about an
  640. archive member given by a tar header block.
  641. TarInfo objects are returned by TarFile.getmember(),
  642. TarFile.getmembers() and TarFile.gettarinfo() and are
  643. usually created internally.
  644. """
  645. __slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
  646. "chksum", "type", "linkname", "uname", "gname",
  647. "devmajor", "devminor",
  648. "offset", "offset_data", "pax_headers", "sparse",
  649. "tarfile", "_sparse_structs", "_link_target")
  650. def __init__(self, name=""):
  651. """Construct a TarInfo object. name is the optional name
  652. of the member.
  653. """
  654. self.name = name # member name
  655. self.mode = 0o644 # file permissions
  656. self.uid = 0 # user id
  657. self.gid = 0 # group id
  658. self.size = 0 # file size
  659. self.mtime = 0 # modification time
  660. self.chksum = 0 # header checksum
  661. self.type = REGTYPE # member type
  662. self.linkname = "" # link name
  663. self.uname = "" # user name
  664. self.gname = "" # group name
  665. self.devmajor = 0 # device major number
  666. self.devminor = 0 # device minor number
  667. self.offset = 0 # the tar header starts here
  668. self.offset_data = 0 # the file's data starts here
  669. self.sparse = None # sparse member information
  670. self.pax_headers = {} # pax header information
  671. # In pax headers the "name" and "linkname" field are called
  672. # "path" and "linkpath".
  673. @property
  674. def path(self):
  675. return self.name
  676. @path.setter
  677. def path(self, name):
  678. self.name = name
  679. @property
  680. def linkpath(self):
  681. return self.linkname
  682. @linkpath.setter
  683. def linkpath(self, linkname):
  684. self.linkname = linkname
  685. def __repr__(self):
  686. return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
  687. def get_info(self):
  688. """Return the TarInfo's attributes as a dictionary.
  689. """
  690. info = {
  691. "name": self.name,
  692. "mode": self.mode & 0o7777,
  693. "uid": self.uid,
  694. "gid": self.gid,
  695. "size": self.size,
  696. "mtime": self.mtime,
  697. "chksum": self.chksum,
  698. "type": self.type,
  699. "linkname": self.linkname,
  700. "uname": self.uname,
  701. "gname": self.gname,
  702. "devmajor": self.devmajor,
  703. "devminor": self.devminor
  704. }
  705. if info["type"] == DIRTYPE and not info["name"].endswith("/"):
  706. info["name"] += "/"
  707. return info
  708. def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
  709. """Return a tar header as a string of 512 byte blocks.
  710. """
  711. info = self.get_info()
  712. if format == USTAR_FORMAT:
  713. return self.create_ustar_header(info, encoding, errors)
  714. elif format == GNU_FORMAT:
  715. return self.create_gnu_header(info, encoding, errors)
  716. elif format == PAX_FORMAT:
  717. return self.create_pax_header(info, encoding)
  718. else:
  719. raise ValueError("invalid format")
  720. def create_ustar_header(self, info, encoding, errors):
  721. """Return the object as a ustar header block.
  722. """
  723. info["magic"] = POSIX_MAGIC
  724. if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
  725. raise ValueError("linkname is too long")
  726. if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
  727. info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
  728. return self._create_header(info, USTAR_FORMAT, encoding, errors)
  729. def create_gnu_header(self, info, encoding, errors):
  730. """Return the object as a GNU header block sequence.
  731. """
  732. info["magic"] = GNU_MAGIC
  733. buf = b""
  734. if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
  735. buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
  736. if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
  737. buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
  738. return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
  739. def create_pax_header(self, info, encoding):
  740. """Return the object as a ustar header block. If it cannot be
  741. represented this way, prepend a pax extended header sequence
  742. with supplement information.
  743. """
  744. info["magic"] = POSIX_MAGIC
  745. pax_headers = self.pax_headers.copy()
  746. # Test string fields for values that exceed the field length or cannot
  747. # be represented in ASCII encoding.
  748. for name, hname, length in (
  749. ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
  750. ("uname", "uname", 32), ("gname", "gname", 32)):
  751. if hname in pax_headers:
  752. # The pax header has priority.
  753. continue
  754. # Try to encode the string as ASCII.
  755. try:
  756. info[name].encode("ascii", "strict")
  757. except UnicodeEncodeError:
  758. pax_headers[hname] = info[name]
  759. continue
  760. if len(info[name]) > length:
  761. pax_headers[hname] = info[name]
  762. # Test number fields for values that exceed the field limit or values
  763. # that like to be stored as float.
  764. for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
  765. if name in pax_headers:
  766. # The pax header has priority. Avoid overflow.
  767. info[name] = 0
  768. continue
  769. val = info[name]
  770. if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
  771. pax_headers[name] = str(val)
  772. info[name] = 0
  773. # Create a pax extended header if necessary.
  774. if pax_headers:
  775. buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
  776. else:
  777. buf = b""
  778. return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
  779. @classmethod
  780. def create_pax_global_header(cls, pax_headers):
  781. """Return the object as a pax global header block sequence.
  782. """
  783. return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
  784. def _posix_split_name(self, name, encoding, errors):
  785. """Split a name longer than 100 chars into a prefix
  786. and a name part.
  787. """
  788. components = name.split("/")
  789. for i in range(1, len(components)):
  790. prefix = "/".join(components[:i])
  791. name = "/".join(components[i:])
  792. if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
  793. len(name.encode(encoding, errors)) <= LENGTH_NAME:
  794. break
  795. else:
  796. raise ValueError("name is too long")
  797. return prefix, name
  798. @staticmethod
  799. def _create_header(info, format, encoding, errors):
  800. """Return a header block. info is a dictionary with file
  801. information, format must be one of the *_FORMAT constants.
  802. """
  803. parts = [
  804. stn(info.get("name", ""), 100, encoding, errors),
  805. itn(info.get("mode", 0) & 0o7777, 8, format),
  806. itn(info.get("uid", 0), 8, format),
  807. itn(info.get("gid", 0), 8, format),
  808. itn(info.get("size", 0), 12, format),
  809. itn(info.get("mtime", 0), 12, format),
  810. b" ", # checksum field
  811. info.get("type", REGTYPE),
  812. stn(info.get("linkname", ""), 100, encoding, errors),
  813. info.get("magic", POSIX_MAGIC),
  814. stn(info.get("uname", ""), 32, encoding, errors),
  815. stn(info.get("gname", ""), 32, encoding, errors),
  816. itn(info.get("devmajor", 0), 8, format),
  817. itn(info.get("devminor", 0), 8, format),
  818. stn(info.get("prefix", ""), 155, encoding, errors)
  819. ]
  820. buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
  821. chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
  822. buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
  823. return buf
  824. @staticmethod
  825. def _create_payload(payload):
  826. """Return the string payload filled with zero bytes
  827. up to the next 512 byte border.
  828. """
  829. blocks, remainder = divmod(len(payload), BLOCKSIZE)
  830. if remainder > 0:
  831. payload += (BLOCKSIZE - remainder) * NUL
  832. return payload
  833. @classmethod
  834. def _create_gnu_long_header(cls, name, type, encoding, errors):
  835. """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
  836. for name.
  837. """
  838. name = name.encode(encoding, errors) + NUL
  839. info = {}
  840. info["name"] = "././@LongLink"
  841. info["type"] = type
  842. info["size"] = len(name)
  843. info["magic"] = GNU_MAGIC
  844. # create extended header + name blocks.
  845. return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
  846. cls._create_payload(name)
  847. @classmethod
  848. def _create_pax_generic_header(cls, pax_headers, type, encoding):
  849. """Return a POSIX.1-2008 extended or global header sequence
  850. that contains a list of keyword, value pairs. The values
  851. must be strings.
  852. """
  853. # Check if one of the fields contains surrogate characters and thereby
  854. # forces hdrcharset=BINARY, see _proc_pax() for more information.
  855. binary = False
  856. for keyword, value in pax_headers.items():
  857. try:
  858. value.encode("utf-8", "strict")
  859. except UnicodeEncodeError:
  860. binary = True
  861. break
  862. records = b""
  863. if binary:
  864. # Put the hdrcharset field at the beginning of the header.
  865. records += b"21 hdrcharset=BINARY\n"
  866. for keyword, value in pax_headers.items():
  867. keyword = keyword.encode("utf-8")
  868. if binary:
  869. # Try to restore the original byte representation of `value'.
  870. # Needless to say, that the encoding must match the string.
  871. value = value.encode(encoding, "surrogateescape")
  872. else:
  873. value = value.encode("utf-8")
  874. l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
  875. n = p = 0
  876. while True:
  877. n = l + len(str(p))
  878. if n == p:
  879. break
  880. p = n
  881. records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
  882. # We use a hardcoded "././@PaxHeader" name like star does
  883. # instead of the one that POSIX recommends.
  884. info = {}
  885. info["name"] = "././@PaxHeader"
  886. info["type"] = type
  887. info["size"] = len(records)
  888. info["magic"] = POSIX_MAGIC
  889. # Create pax header + record blocks.
  890. return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
  891. cls._create_payload(records)
  892. @classmethod
  893. def frombuf(cls, buf, encoding, errors):
  894. """Construct a TarInfo object from a 512 byte bytes object.
  895. """
  896. if len(buf) == 0:
  897. raise EmptyHeaderError("empty header")
  898. if len(buf) != BLOCKSIZE:
  899. raise TruncatedHeaderError("truncated header")
  900. if buf.count(NUL) == BLOCKSIZE:
  901. raise EOFHeaderError("end of file header")
  902. chksum = nti(buf[148:156])
  903. if chksum not in calc_chksums(buf):
  904. raise InvalidHeaderError("bad checksum")
  905. obj = cls()
  906. obj.name = nts(buf[0:100], encoding, errors)
  907. obj.mode = nti(buf[100:108])
  908. obj.uid = nti(buf[108:116])
  909. obj.gid = nti(buf[116:124])
  910. obj.size = nti(buf[124:136])
  911. obj.mtime = nti(buf[136:148])
  912. obj.chksum = chksum
  913. obj.type = buf[156:157]
  914. obj.linkname = nts(buf[157:257], encoding, errors)
  915. obj.uname = nts(buf[265:297], encoding, errors)
  916. obj.gname = nts(buf[297:329], encoding, errors)
  917. obj.devmajor = nti(buf[329:337])
  918. obj.devminor = nti(buf[337:345])
  919. prefix = nts(buf[345:500], encoding, errors)
  920. # Old V7 tar format represents a directory as a regular
  921. # file with a trailing slash.
  922. if obj.type == AREGTYPE and obj.name.endswith("/"):
  923. obj.type = DIRTYPE
  924. # The old GNU sparse format occupies some of the unused
  925. # space in the buffer for up to 4 sparse structures.
  926. # Save them for later processing in _proc_sparse().
  927. if obj.type == GNUTYPE_SPARSE:
  928. pos = 386
  929. structs = []
  930. for i in range(4):
  931. try:
  932. offset = nti(buf[pos:pos + 12])
  933. numbytes = nti(buf[pos + 12:pos + 24])
  934. except ValueError:
  935. break
  936. structs.append((offset, numbytes))
  937. pos += 24
  938. isextended = bool(buf[482])
  939. origsize = nti(buf[483:495])
  940. obj._sparse_structs = (structs, isextended, origsize)
  941. # Remove redundant slashes from directories.
  942. if obj.isdir():
  943. obj.name = obj.name.rstrip("/")
  944. # Reconstruct a ustar longname.
  945. if prefix and obj.type not in GNU_TYPES:
  946. obj.name = prefix + "/" + obj.name
  947. return obj
  948. @classmethod
  949. def fromtarfile(cls, tarfile):
  950. """Return the next TarInfo object from TarFile object
  951. tarfile.
  952. """
  953. buf = tarfile.fileobj.read(BLOCKSIZE)
  954. obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
  955. obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
  956. return obj._proc_member(tarfile)
  957. #--------------------------------------------------------------------------
  958. # The following are methods that are called depending on the type of a
  959. # member. The entry point is _proc_member() which can be overridden in a
  960. # subclass to add custom _proc_*() methods. A _proc_*() method MUST
  961. # implement the following
  962. # operations:
  963. # 1. Set self.offset_data to the position where the data blocks begin,
  964. # if there is data that follows.
  965. # 2. Set tarfile.offset to the position where the next member's header will
  966. # begin.
  967. # 3. Return self or another valid TarInfo object.
  968. def _proc_member(self, tarfile):
  969. """Choose the right processing method depending on
  970. the type and call it.
  971. """
  972. if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
  973. return self._proc_gnulong(tarfile)
  974. elif self.type == GNUTYPE_SPARSE:
  975. return self._proc_sparse(tarfile)
  976. elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
  977. return self._proc_pax(tarfile)
  978. else:
  979. return self._proc_builtin(tarfile)
  980. def _proc_builtin(self, tarfile):
  981. """Process a builtin type or an unknown type which
  982. will be treated as a regular file.
  983. """
  984. self.offset_data = tarfile.fileobj.tell()
  985. offset = self.offset_data
  986. if self.isreg() or self.type not in SUPPORTED_TYPES:
  987. # Skip the following data blocks.
  988. offset += self._block(self.size)
  989. tarfile.offset = offset
  990. # Patch the TarInfo object with saved global
  991. # header information.
  992. self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
  993. return self
  994. def _proc_gnulong(self, tarfile):
  995. """Process the blocks that hold a GNU longname
  996. or longlink member.
  997. """
  998. buf = tarfile.fileobj.read(self._block(self.size))
  999. # Fetch the next header and process it.
  1000. try:
  1001. next = self.fromtarfile(tarfile)
  1002. except HeaderError:
  1003. raise SubsequentHeaderError("missing or bad subsequent header")
  1004. # Patch the TarInfo object from the next header with
  1005. # the longname information.
  1006. next.offset = self.offset
  1007. if self.type == GNUTYPE_LONGNAME:
  1008. next.name = nts(buf, tarfile.encoding, tarfile.errors)
  1009. elif self.type == GNUTYPE_LONGLINK:
  1010. next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
  1011. return next
  1012. def _proc_sparse(self, tarfile):
  1013. """Process a GNU sparse header plus extra headers.
  1014. """
  1015. # We already collected some sparse structures in frombuf().
  1016. structs, isextended, origsize = self._sparse_structs
  1017. del self._sparse_structs
  1018. # Collect sparse structures from extended header blocks.
  1019. while isextended:
  1020. buf = tarfile.fileobj.read(BLOCKSIZE)
  1021. pos = 0
  1022. for i in range(21):
  1023. try:
  1024. offset = nti(buf[pos:pos + 12])
  1025. numbytes = nti(buf[pos + 12:pos + 24])
  1026. except ValueError:
  1027. break
  1028. if offset and numbytes:
  1029. structs.append((offset, numbytes))
  1030. pos += 24
  1031. isextended = bool(buf[504])
  1032. self.sparse = structs
  1033. self.offset_data = tarfile.fileobj.tell()
  1034. tarfile.offset = self.offset_data + self._block(self.size)
  1035. self.size = origsize
  1036. return self
  1037. def _proc_pax(self, tarfile):
  1038. """Process an extended or global header as described in
  1039. POSIX.1-2008.
  1040. """
  1041. # Read the header information.
  1042. buf = tarfile.fileobj.read(self._block(self.size))
  1043. # A pax header stores supplemental information for either
  1044. # the following file (extended) or all following files
  1045. # (global).
  1046. if self.type == XGLTYPE:
  1047. pax_headers = tarfile.pax_headers
  1048. else:
  1049. pax_headers = tarfile.pax_headers.copy()
  1050. # Check if the pax header contains a hdrcharset field. This tells us
  1051. # the encoding of the path, linkpath, uname and gname fields. Normally,
  1052. # these fields are UTF-8 encoded but since POSIX.1-2008 tar
  1053. # implementations are allowed to store them as raw binary strings if
  1054. # the translation to UTF-8 fails.
  1055. match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
  1056. if match is not None:
  1057. pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
  1058. # For the time being, we don't care about anything other than "BINARY".
  1059. # The only other value that is currently allowed by the standard is
  1060. # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
  1061. hdrcharset = pax_headers.get("hdrcharset")
  1062. if hdrcharset == "BINARY":
  1063. encoding = tarfile.encoding
  1064. else:
  1065. encoding = "utf-8"
  1066. # Parse pax header information. A record looks like that:
  1067. # "%d %s=%s\n" % (length, keyword, value). length is the size
  1068. # of the complete record including the length field itself and
  1069. # the newline. keyword and value are both UTF-8 encoded strings.
  1070. regex = re.compile(br"(\d+) ([^=]+)=")
  1071. pos = 0
  1072. while True:
  1073. match = regex.match(buf, pos)
  1074. if not match:
  1075. break
  1076. length, keyword = match.groups()
  1077. length = int(length)
  1078. value = buf[match.end(2) + 1:match.start(1) + length - 1]
  1079. # Normally, we could just use "utf-8" as the encoding and "strict"
  1080. # as the error handler, but we better not take the risk. For
  1081. # example, GNU tar <= 1.23 is known to store filenames it cannot
  1082. # translate to UTF-8 as raw strings (unfortunately without a
  1083. # hdrcharset=BINARY header).
  1084. # We first try the strict standard encoding, and if that fails we
  1085. # fall back on the user's encoding and error handler.
  1086. keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
  1087. tarfile.errors)
  1088. if keyword in PAX_NAME_FIELDS:
  1089. value = self._decode_pax_field(value, encoding, tarfile.encoding,
  1090. tarfile.errors)
  1091. else:
  1092. value = self._decode_pax_field(value, "utf-8", "utf-8",
  1093. tarfile.errors)
  1094. pax_headers[keyword] = value
  1095. pos += length
  1096. # Fetch the next header.
  1097. try:
  1098. next = self.fromtarfile(tarfile)
  1099. except HeaderError:
  1100. raise SubsequentHeaderError("missing or bad subsequent header")
  1101. # Process GNU sparse information.
  1102. if "GNU.sparse.map" in pax_headers:
  1103. # GNU extended sparse format version 0.1.
  1104. self._proc_gnusparse_01(next, pax_headers)
  1105. elif "GNU.sparse.size" in pax_headers:
  1106. # GNU extended sparse format version 0.0.
  1107. self._proc_gnusparse_00(next, pax_headers, buf)
  1108. elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
  1109. # GNU extended sparse format version 1.0.
  1110. self._proc_gnusparse_10(next, pax_headers, tarfile)
  1111. if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
  1112. # Patch the TarInfo object with the extended header info.
  1113. next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
  1114. next.offset = self.offset
  1115. if "size" in pax_headers:
  1116. # If the extended header replaces the size field,
  1117. # we need to recalculate the offset where the next
  1118. # header starts.
  1119. offset = next.offset_data
  1120. if next.isreg() or next.type not in SUPPORTED_TYPES:
  1121. offset += next._block(next.size)
  1122. tarfile.offset = offset
  1123. return next
  1124. def _proc_gnusparse_00(self, next, pax_headers, buf):
  1125. """Process a GNU tar extended sparse header, version 0.0.
  1126. """
  1127. offsets = []
  1128. for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
  1129. offsets.append(int(match.group(1)))
  1130. numbytes = []
  1131. for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
  1132. numbytes.append(int(match.group(1)))
  1133. next.sparse = list(zip(offsets, numbytes))
  1134. def _proc_gnusparse_01(self, next, pax_headers):
  1135. """Process a GNU tar extended sparse header, version 0.1.
  1136. """
  1137. sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
  1138. next.sparse = list(zip(sparse[::2], sparse[1::2]))
  1139. def _proc_gnusparse_10(self, next, pax_headers, tarfile):
  1140. """Process a GNU tar extended sparse header, version 1.0.
  1141. """
  1142. fields = None
  1143. sparse = []
  1144. buf = tarfile.fileobj.read(BLOCKSIZE)
  1145. fields, buf = buf.split(b"\n", 1)
  1146. fields = int(fields)
  1147. while len(sparse) < fields * 2:
  1148. if b"\n" not in buf:
  1149. buf += tarfile.fileobj.read(BLOCKSIZE)
  1150. number, buf = buf.split(b"\n", 1)
  1151. sparse.append(int(number))
  1152. next.offset_data = tarfile.fileobj.tell()
  1153. next.sparse = list(zip(sparse[::2], sparse[1::2]))
  1154. def _apply_pax_info(self, pax_headers, encoding, errors):
  1155. """Replace fields with supplemental information from a previous
  1156. pax extended or global header.
  1157. """
  1158. for keyword, value in pax_headers.items():
  1159. if keyword == "GNU.sparse.name":
  1160. setattr(self, "path", value)
  1161. elif keyword == "GNU.sparse.size":
  1162. setattr(self, "size", int(value))
  1163. elif keyword == "GNU.sparse.realsize":
  1164. setattr(self, "size", int(value))
  1165. elif keyword in PAX_FIELDS:
  1166. if keyword in PAX_NUMBER_FIELDS:
  1167. try:
  1168. value = PAX_NUMBER_FIELDS[keyword](value)
  1169. except ValueError:
  1170. value = 0
  1171. if keyword == "path":
  1172. value = value.rstrip("/")
  1173. setattr(self, keyword, value)
  1174. self.pax_headers = pax_headers.copy()
  1175. def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
  1176. """Decode a single field from a pax record.
  1177. """
  1178. try:
  1179. return value.decode(encoding, "strict")
  1180. except UnicodeDecodeError:
  1181. return value.decode(fallback_encoding, fallback_errors)
  1182. def _block(self, count):
  1183. """Round up a byte count by BLOCKSIZE and return it,
  1184. e.g. _block(834) => 1024.
  1185. """
  1186. blocks, remainder = divmod(count, BLOCKSIZE)
  1187. if remainder:
  1188. blocks += 1
  1189. return blocks * BLOCKSIZE
  1190. def isreg(self):
  1191. return self.type in REGULAR_TYPES
  1192. def isfile(self):
  1193. return self.isreg()
  1194. def isdir(self):
  1195. return self.type == DIRTYPE
  1196. def issym(self):
  1197. return self.type == SYMTYPE
  1198. def islnk(self):
  1199. return self.type == LNKTYPE
  1200. def ischr(self):
  1201. return self.type == CHRTYPE
  1202. def isblk(self):
  1203. return self.type == BLKTYPE
  1204. def isfifo(self):
  1205. return self.type == FIFOTYPE
  1206. def issparse(self):
  1207. return self.sparse is not None
  1208. def isdev(self):
  1209. return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
  1210. # class TarInfo
  1211. class TarFile(object):
  1212. """The TarFile Class provides an interface to tar archives.
  1213. """
  1214. debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
  1215. dereference = False # If true, add content of linked file to the
  1216. # tar file, else the link.
  1217. ignore_zeros = False # If true, skips empty or invalid blocks and
  1218. # continues processing.
  1219. errorlevel = 1 # If 0, fatal errors only appear in debug
  1220. # messages (if debug >= 0). If > 0, errors
  1221. # are passed to the caller as exceptions.
  1222. format = DEFAULT_FORMAT # The format to use when creating an archive.
  1223. encoding = ENCODING # Encoding for 8-bit character strings.
  1224. errors = None # Error handler for unicode conversion.
  1225. tarinfo = TarInfo # The default TarInfo class to use.
  1226. fileobject = ExFileObject # The file-object for extractfile().
  1227. def __init__(self, name=None, mode="r", fileobj=None, format=None,
  1228. tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
  1229. errors="surrogateescape", pax_headers=None, debug=None,
  1230. errorlevel=None, copybufsize=None):
  1231. """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
  1232. read from an existing archive, 'a' to append data to an existing
  1233. file or 'w' to create a new file overwriting an existing one. `mode'
  1234. defaults to 'r'.
  1235. If `fileobj' is given, it is used for reading or writing data. If it
  1236. can be determined, `mode' is overridden by `fileobj's mode.
  1237. `fileobj' is not closed, when TarFile is closed.
  1238. """
  1239. modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
  1240. if mode not in modes:
  1241. raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
  1242. self.mode = mode
  1243. self._mode = modes[mode]
  1244. if not fileobj:
  1245. if self.mode == "a" and not os.path.exists(name):
  1246. # Create nonexistent files in append mode.
  1247. self.mode = "w"
  1248. self._mode = "wb"
  1249. fileobj = bltn_open(name, self._mode)
  1250. self._extfileobj = False
  1251. else:
  1252. if (name is None and hasattr(fileobj, "name") and
  1253. isinstance(fileobj.name, (str, bytes))):
  1254. name = fileobj.name
  1255. if hasattr(fileobj, "mode"):
  1256. self._mode = fileobj.mode
  1257. self._extfileobj = True
  1258. self.name = os.path.abspath(name) if name else None
  1259. self.fileobj = fileobj
  1260. # Init attributes.
  1261. if format is not None:
  1262. self.format = format
  1263. if tarinfo is not None:
  1264. self.tarinfo = tarinfo
  1265. if dereference is not None:
  1266. self.dereference = dereference
  1267. if ignore_zeros is not None:
  1268. self.ignore_zeros = ignore_zeros
  1269. if encoding is not None:
  1270. self.encoding = encoding
  1271. self.errors = errors
  1272. if pax_headers is not None and self.format == PAX_FORMAT:
  1273. self.pax_headers = pax_headers
  1274. else:
  1275. self.pax_headers = {}
  1276. if debug is not None:
  1277. self.debug = debug
  1278. if errorlevel is not None:
  1279. self.errorlevel = errorlevel
  1280. # Init datastructures.
  1281. self.copybufsize = copybufsize
  1282. self.closed = False
  1283. self.members = [] # list of members as TarInfo objects
  1284. self._loaded = False # flag if all members have been read
  1285. self.offset = self.fileobj.tell()
  1286. # current position in the archive file
  1287. self.inodes = {} # dictionary caching the inodes of
  1288. # archive members already added
  1289. try:
  1290. if self.mode == "r":
  1291. self.firstmember = None
  1292. self.firstmember = self.next()
  1293. if self.mode == "a":
  1294. # Move to the end of the archive,
  1295. # before the first empty block.
  1296. while True:
  1297. self.fileobj.seek(self.offset)
  1298. try:
  1299. tarinfo = self.tarinfo.fromtarfile(self)
  1300. self.members.append(tarinfo)
  1301. except EOFHeaderError:
  1302. self.fileobj.seek(self.offset)
  1303. break
  1304. except HeaderError as e:
  1305. raise ReadError(str(e))
  1306. if self.mode in ("a", "w", "x"):
  1307. self._loaded = True
  1308. if self.pax_headers:
  1309. buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
  1310. self.fileobj.write(buf)
  1311. self.offset += len(buf)
  1312. except:
  1313. if not self._extfileobj:
  1314. self.fileobj.close()
  1315. self.closed = True
  1316. raise
  1317. #--------------------------------------------------------------------------
  1318. # Below are the classmethods which act as alternate constructors to the
  1319. # TarFile class. The open() method is the only one that is needed for
  1320. # public use; it is the "super"-constructor and is able to select an
  1321. # adequate "sub"-constructor for a particular compression using the mapping
  1322. # from OPEN_METH.
  1323. #
  1324. # This concept allows one to subclass TarFile without losing the comfort of
  1325. # the super-constructor. A sub-constructor is registered and made available
  1326. # by adding it to the mapping in OPEN_METH.
  1327. @classmethod
  1328. def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
  1329. """Open a tar archive for reading, writing or appending. Return
  1330. an appropriate TarFile class.
  1331. mode:
  1332. 'r' or 'r:*' open for reading with transparent compression
  1333. 'r:' open for reading exclusively uncompressed
  1334. 'r:gz' open for reading with gzip compression
  1335. 'r:bz2' open for reading with bzip2 compression
  1336. 'r:xz' open for reading with lzma compression
  1337. 'a' or 'a:' open for appending, creating the file if necessary
  1338. 'w' or 'w:' open for writing without compression
  1339. 'w:gz' open for writing with gzip compression
  1340. 'w:bz2' open for writing with bzip2 compression
  1341. 'w:xz' open for writing with lzma compression
  1342. 'x' or 'x:' create a tarfile exclusively without compression, raise
  1343. an exception if the file is already created
  1344. 'x:gz' create a gzip compressed tarfile, raise an exception
  1345. if the file is already created
  1346. 'x:bz2' create a bzip2 compressed tarfile, raise an exception
  1347. if the file is already created
  1348. 'x:xz' create an lzma compressed tarfile, raise an exception
  1349. if the file is already created
  1350. 'r|*' open a stream of tar blocks with transparent compression
  1351. 'r|' open an uncompressed stream of tar blocks for reading
  1352. 'r|gz' open a gzip compressed stream of tar blocks
  1353. 'r|bz2' open a bzip2 compressed stream of tar blocks
  1354. 'r|xz' open an lzma compressed stream of tar blocks
  1355. 'w|' open an uncompressed stream for writing
  1356. 'w|gz' open a gzip compressed stream for writing
  1357. 'w|bz2' open a bzip2 compressed stream for writing
  1358. 'w|xz' open an lzma compressed stream for writing
  1359. """
  1360. if not name and not fileobj:
  1361. raise ValueError("nothing to open")
  1362. if mode in ("r", "r:*"):
  1363. # Find out which *open() is appropriate for opening the file.
  1364. def not_compressed(comptype):
  1365. return cls.OPEN_METH[comptype] == 'taropen'
  1366. for comptype in sorted(cls.OPEN_METH, key=not_compressed):
  1367. func = getattr(cls, cls.OPEN_METH[comptype])
  1368. if fileobj is not None:
  1369. saved_pos = fileobj.tell()
  1370. try:
  1371. return func(name, "r", fileobj, **kwargs)
  1372. except (ReadError, CompressionError):
  1373. if fileobj is not None:
  1374. fileobj.seek(saved_pos)
  1375. continue
  1376. raise ReadError("file could not be opened successfully")
  1377. elif ":" in mode:
  1378. filemode, comptype = mode.split(":", 1)
  1379. filemode = filemode or "r"
  1380. comptype = comptype or "tar"
  1381. # Select the *open() function according to
  1382. # given compression.
  1383. if comptype in cls.OPEN_METH:
  1384. func = getattr(cls, cls.OPEN_METH[comptype])
  1385. else:
  1386. raise CompressionError("unknown compression type %r" % comptype)
  1387. return func(name, filemode, fileobj, **kwargs)
  1388. elif "|" in mode:
  1389. filemode, comptype = mode.split("|", 1)
  1390. filemode = filemode or "r"
  1391. comptype = comptype or "tar"
  1392. if filemode not in ("r", "w"):
  1393. raise ValueError("mode must be 'r' or 'w'")
  1394. stream = _Stream(name, filemode, comptype, fileobj, bufsize)
  1395. try:
  1396. t = cls(name, filemode, stream, **kwargs)
  1397. except:
  1398. stream.close()
  1399. raise
  1400. t._extfileobj = False
  1401. return t
  1402. elif mode in ("a", "w", "x"):
  1403. return cls.taropen(name, mode, fileobj, **kwargs)
  1404. raise ValueError("undiscernible mode")
  1405. @classmethod
  1406. def taropen(cls, name, mode="r", fileobj=None, **kwargs):
  1407. """Open uncompressed tar archive name for reading or writing.
  1408. """
  1409. if mode not in ("r", "a", "w", "x"):
  1410. raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
  1411. return cls(name, mode, fileobj, **kwargs)
  1412. @classmethod
  1413. def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1414. """Open gzip compressed tar archive name for reading or writing.
  1415. Appending is not allowed.
  1416. """
  1417. if mode not in ("r", "w", "x"):
  1418. raise ValueError("mode must be 'r', 'w' or 'x'")
  1419. try:
  1420. from gzip import GzipFile
  1421. except ImportError:
  1422. raise CompressionError("gzip module is not available")
  1423. try:
  1424. fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
  1425. except OSError:
  1426. if fileobj is not None and mode == 'r':
  1427. raise ReadError("not a gzip file")
  1428. raise
  1429. try:
  1430. t = cls.taropen(name, mode, fileobj, **kwargs)
  1431. except OSError:
  1432. fileobj.close()
  1433. if mode == 'r':
  1434. raise ReadError("not a gzip file")
  1435. raise
  1436. except:
  1437. fileobj.close()
  1438. raise
  1439. t._extfileobj = False
  1440. return t
  1441. @classmethod
  1442. def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1443. """Open bzip2 compressed tar archive name for reading or writing.
  1444. Appending is not allowed.
  1445. """
  1446. if mode not in ("r", "w", "x"):
  1447. raise ValueError("mode must be 'r', 'w' or 'x'")
  1448. try:
  1449. from bz2 import BZ2File
  1450. except ImportError:
  1451. raise CompressionError("bz2 module is not available")
  1452. fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
  1453. try:
  1454. t = cls.taropen(name, mode, fileobj, **kwargs)
  1455. except (OSError, EOFError):
  1456. fileobj.close()
  1457. if mode == 'r':
  1458. raise ReadError("not a bzip2 file")
  1459. raise
  1460. except:
  1461. fileobj.close()
  1462. raise
  1463. t._extfileobj = False
  1464. return t
  1465. @classmethod
  1466. def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
  1467. """Open lzma compressed tar archive name for reading or writing.
  1468. Appending is not allowed.
  1469. """
  1470. if mode not in ("r", "w", "x"):
  1471. raise ValueError("mode must be 'r', 'w' or 'x'")
  1472. try:
  1473. from lzma import LZMAFile, LZMAError
  1474. except ImportError:
  1475. raise CompressionError("lzma module is not available")
  1476. fileobj = LZMAFile(fileobj or name, mode, preset=preset)
  1477. try:
  1478. t = cls.taropen(name, mode, fileobj, **kwargs)
  1479. except (LZMAError, EOFError):
  1480. fileobj.close()
  1481. if mode == 'r':
  1482. raise ReadError("not an lzma file")
  1483. raise
  1484. except:
  1485. fileobj.close()
  1486. raise
  1487. t._extfileobj = False
  1488. return t
  1489. # All *open() methods are registered here.
  1490. OPEN_METH = {
  1491. "tar": "taropen", # uncompressed tar
  1492. "gz": "gzopen", # gzip compressed tar
  1493. "bz2": "bz2open", # bzip2 compressed tar
  1494. "xz": "xzopen" # lzma compressed tar
  1495. }
  1496. #--------------------------------------------------------------------------
  1497. # The public methods which TarFile provides:
  1498. def close(self):
  1499. """Close the TarFile. In write-mode, two finishing zero blocks are
  1500. appended to the archive.
  1501. """
  1502. if self.closed:
  1503. return
  1504. self.closed = True
  1505. try:
  1506. if self.mode in ("a", "w", "x"):
  1507. self.fileobj.write(NUL * (BLOCKSIZE * 2))
  1508. self.offset += (BLOCKSIZE * 2)
  1509. # fill up the end with zero-blocks
  1510. # (like option -b20 for tar does)
  1511. blocks, remainder = divmod(self.offset, RECORDSIZE)
  1512. if remainder > 0:
  1513. self.fileobj.write(NUL * (RECORDSIZE - remainder))
  1514. finally:
  1515. if not self._extfileobj:
  1516. self.fileobj.close()
  1517. def getmember(self, name):
  1518. """Return a TarInfo object for member `name'. If `name' can not be
  1519. found in the archive, KeyError is raised. If a member occurs more
  1520. than once in the archive, its last occurrence is assumed to be the
  1521. most up-to-date version.
  1522. """
  1523. tarinfo = self._getmember(name)
  1524. if tarinfo is None:
  1525. raise KeyError("filename %r not found" % name)
  1526. return tarinfo
  1527. def getmembers(self):
  1528. """Return the members of the archive as a list of TarInfo objects. The
  1529. list has the same order as the members in the archive.
  1530. """
  1531. self._check()
  1532. if not self._loaded: # if we want to obtain a list of
  1533. self._load() # all members, we first have to
  1534. # scan the whole archive.
  1535. return self.members
  1536. def getnames(self):
  1537. """Return the members of the archive as a list of their names. It has
  1538. the same order as the list returned by getmembers().
  1539. """
  1540. return [tarinfo.name for tarinfo in self.getmembers()]
  1541. def gettarinfo(self, name=None, arcname=None, fileobj=None):
  1542. """Create a TarInfo object from the result of os.stat or equivalent
  1543. on an existing file. The file is either named by `name', or
  1544. specified as a file object `fileobj' with a file descriptor. If
  1545. given, `arcname' specifies an alternative name for the file in the
  1546. archive, otherwise, the name is taken from the 'name' attribute of
  1547. 'fileobj', or the 'name' argument. The name should be a text
  1548. string.
  1549. """
  1550. self._check("awx")
  1551. # When fileobj is given, replace name by
  1552. # fileobj's real name.
  1553. if fileobj is not None:
  1554. name = fileobj.name
  1555. # Building the name of the member in the archive.
  1556. # Backward slashes are converted to forward slashes,
  1557. # Absolute paths are turned to relative paths.
  1558. if arcname is None:
  1559. arcname = name
  1560. drv, arcname = os.path.splitdrive(arcname)
  1561. arcname = arcname.replace(os.sep, "/")
  1562. arcname = arcname.lstrip("/")
  1563. # Now, fill the TarInfo object with
  1564. # information specific for the file.
  1565. tarinfo = self.tarinfo()
  1566. tarinfo.tarfile = self # Not needed
  1567. # Use os.stat or os.lstat, depending on platform
  1568. # and if symlinks shall be resolved.
  1569. if fileobj is None:
  1570. if hasattr(os, "lstat") and not self.dereference:
  1571. statres = os.lstat(name)
  1572. else:
  1573. statres = os.stat(name)
  1574. else:
  1575. statres = os.fstat(fileobj.fileno())
  1576. linkname = ""
  1577. stmd = statres.st_mode
  1578. if stat.S_ISREG(stmd):
  1579. inode = (statres.st_ino, statres.st_dev)
  1580. if not self.dereference and statres.st_nlink > 1 and \
  1581. inode in self.inodes and arcname != self.inodes[inode]:
  1582. # Is it a hardlink to an already
  1583. # archived file?
  1584. type = LNKTYPE
  1585. linkname = self.inodes[inode]
  1586. else:
  1587. # The inode is added only if its valid.
  1588. # For win32 it is always 0.
  1589. type = REGTYPE
  1590. if inode[0]:
  1591. self.inodes[inode] = arcname
  1592. elif stat.S_ISDIR(stmd):
  1593. type = DIRTYPE
  1594. elif stat.S_ISFIFO(stmd):
  1595. type = FIFOTYPE
  1596. elif stat.S_ISLNK(stmd):
  1597. type = SYMTYPE
  1598. linkname = os.readlink(name)
  1599. elif stat.S_ISCHR(stmd):
  1600. type = CHRTYPE
  1601. elif stat.S_ISBLK(stmd):
  1602. type = BLKTYPE
  1603. else:
  1604. return None
  1605. # Fill the TarInfo object with all
  1606. # information we can get.
  1607. tarinfo.name = arcname
  1608. tarinfo.mode = stmd
  1609. tarinfo.uid = statres.st_uid
  1610. tarinfo.gid = statres.st_gid
  1611. if type == REGTYPE:
  1612. tarinfo.size = statres.st_size
  1613. else:
  1614. tarinfo.size = 0
  1615. tarinfo.mtime = statres.st_mtime
  1616. tarinfo.type = type
  1617. tarinfo.linkname = linkname
  1618. if pwd:
  1619. try:
  1620. tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
  1621. except KeyError:
  1622. pass
  1623. if grp:
  1624. try:
  1625. tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
  1626. except KeyError:
  1627. pass
  1628. if type in (CHRTYPE, BLKTYPE):
  1629. if hasattr(os, "major") and hasattr(os, "minor"):
  1630. tarinfo.devmajor = os.major(statres.st_rdev)
  1631. tarinfo.devminor = os.minor(statres.st_rdev)
  1632. return tarinfo
  1633. def list(self, verbose=True, *, members=None):
  1634. """Print a table of contents to sys.stdout. If `verbose' is False, only
  1635. the names of the members are printed. If it is True, an `ls -l'-like
  1636. output is produced. `members' is optional and must be a subset of the
  1637. list returned by getmembers().
  1638. """
  1639. self._check()
  1640. if members is None:
  1641. members = self
  1642. for tarinfo in members:
  1643. if verbose:
  1644. _safe_print(stat.filemode(tarinfo.mode))
  1645. _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
  1646. tarinfo.gname or tarinfo.gid))
  1647. if tarinfo.ischr() or tarinfo.isblk():
  1648. _safe_print("%10s" %
  1649. ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
  1650. else:
  1651. _safe_print("%10d" % tarinfo.size)
  1652. _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
  1653. % time.localtime(tarinfo.mtime)[:6])
  1654. _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
  1655. if verbose:
  1656. if tarinfo.issym():
  1657. _safe_print("-> " + tarinfo.linkname)
  1658. if tarinfo.islnk():
  1659. _safe_print("link to " + tarinfo.linkname)
  1660. print()
  1661. def add(self, name, arcname=None, recursive=True, *, filter=None):
  1662. """Add the file `name' to the archive. `name' may be any type of file
  1663. (directory, fifo, symbolic link, etc.). If given, `arcname'
  1664. specifies an alternative name for the file in the archive.
  1665. Directories are added recursively by default. This can be avoided by
  1666. setting `recursive' to False. `filter' is a function
  1667. that expects a TarInfo object argument and returns the changed
  1668. TarInfo object, if it returns None the TarInfo object will be
  1669. excluded from the archive.
  1670. """
  1671. self._check("awx")
  1672. if arcname is None:
  1673. arcname = name
  1674. # Skip if somebody tries to archive the archive...
  1675. if self.name is not None and os.path.abspath(name) == self.name:
  1676. self._dbg(2, "tarfile: Skipped %r" % name)
  1677. return
  1678. self._dbg(1, name)
  1679. # Create a TarInfo object from the file.
  1680. tarinfo = self.gettarinfo(name, arcname)
  1681. if tarinfo is None:
  1682. self._dbg(1, "tarfile: Unsupported type %r" % name)
  1683. return
  1684. # Change or exclude the TarInfo object.
  1685. if filter is not None:
  1686. tarinfo = filter(tarinfo)
  1687. if tarinfo is None:
  1688. self._dbg(2, "tarfile: Excluded %r" % name)
  1689. return
  1690. # Append the tar header and data to the archive.
  1691. if tarinfo.isreg():
  1692. with bltn_open(name, "rb") as f:
  1693. self.addfile(tarinfo, f)
  1694. elif tarinfo.isdir():
  1695. self.addfile(tarinfo)
  1696. if recursive:
  1697. for f in sorted(os.listdir(name)):
  1698. self.add(os.path.join(name, f), os.path.join(arcname, f),
  1699. recursive, filter=filter)
  1700. else:
  1701. self.addfile(tarinfo)
  1702. def addfile(self, tarinfo, fileobj=None):
  1703. """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
  1704. given, it should be a binary file, and tarinfo.size bytes are read
  1705. from it and added to the archive. You can create TarInfo objects
  1706. directly, or by using gettarinfo().
  1707. """
  1708. self._check("awx")
  1709. tarinfo = copy.copy(tarinfo)
  1710. buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
  1711. self.fileobj.write(buf)
  1712. self.offset += len(buf)
  1713. bufsize=self.copybufsize
  1714. # If there's data to follow, append it.
  1715. if fileobj is not None:
  1716. copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
  1717. blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
  1718. if remainder > 0:
  1719. self.fileobj.write(NUL * (BLOCKSIZE - remainder))
  1720. blocks += 1
  1721. self.offset += blocks * BLOCKSIZE
  1722. self.members.append(tarinfo)
  1723. def extractall(self, path=".", members=None, *, numeric_owner=False):
  1724. """Extract all members from the archive to the current working
  1725. directory and set owner, modification time and permissions on
  1726. directories afterwards. `path' specifies a different directory
  1727. to extract to. `members' is optional and must be a subset of the
  1728. list returned by getmembers(). If `numeric_owner` is True, only
  1729. the numbers for user/group names are used and not the names.
  1730. """
  1731. directories = []
  1732. if members is None:
  1733. members = self
  1734. for tarinfo in members:
  1735. if tarinfo.isdir():
  1736. # Extract directories with a safe mode.
  1737. directories.append(tarinfo)
  1738. tarinfo = copy.copy(tarinfo)
  1739. tarinfo.mode = 0o700
  1740. # Do not set_attrs directories, as we will do that further down
  1741. self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(),
  1742. numeric_owner=numeric_owner)
  1743. # Reverse sort directories.
  1744. directories.sort(key=lambda a: a.name)
  1745. directories.reverse()
  1746. # Set correct owner, mtime and filemode on directories.
  1747. for tarinfo in directories:
  1748. dirpath = os.path.join(path, tarinfo.name)
  1749. try:
  1750. self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
  1751. self.utime(tarinfo, dirpath)
  1752. self.chmod(tarinfo, dirpath)
  1753. except ExtractError as e:
  1754. if self.errorlevel > 1:
  1755. raise
  1756. else:
  1757. self._dbg(1, "tarfile: %s" % e)
  1758. def extract(self, member, path="", set_attrs=True, *, numeric_owner=False):
  1759. """Extract a member from the archive to the current working directory,
  1760. using its full name. Its file information is extracted as accurately
  1761. as possible. `member' may be a filename or a TarInfo object. You can
  1762. specify a different directory using `path'. File attributes (owner,
  1763. mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
  1764. is True, only the numbers for user/group names are used and not
  1765. the names.
  1766. """
  1767. self._check("r")
  1768. if isinstance(member, str):
  1769. tarinfo = self.getmember(member)
  1770. else:
  1771. tarinfo = member
  1772. # Prepare the link target for makelink().
  1773. if tarinfo.islnk():
  1774. tarinfo._link_target = os.path.join(path, tarinfo.linkname)
  1775. try:
  1776. self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
  1777. set_attrs=set_attrs,
  1778. numeric_owner=numeric_owner)
  1779. except OSError as e:
  1780. if self.errorlevel > 0:
  1781. raise
  1782. else:
  1783. if e.filename is None:
  1784. self._dbg(1, "tarfile: %s" % e.strerror)
  1785. else:
  1786. self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
  1787. except ExtractError as e:
  1788. if self.errorlevel > 1:
  1789. raise
  1790. else:
  1791. self._dbg(1, "tarfile: %s" % e)
  1792. def extractfile(self, member):
  1793. """Extract a member from the archive as a file object. `member' may be
  1794. a filename or a TarInfo object. If `member' is a regular file or a
  1795. link, an io.BufferedReader object is returned. Otherwise, None is
  1796. returned.
  1797. """
  1798. self._check("r")
  1799. if isinstance(member, str):
  1800. tarinfo = self.getmember(member)
  1801. else:
  1802. tarinfo = member
  1803. if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
  1804. # Members with unknown types are treated as regular files.
  1805. return self.fileobject(self, tarinfo)
  1806. elif tarinfo.islnk() or tarinfo.issym():
  1807. if isinstance(self.fileobj, _Stream):
  1808. # A small but ugly workaround for the case that someone tries
  1809. # to extract a (sym)link as a file-object from a non-seekable
  1810. # stream of tar blocks.
  1811. raise StreamError("cannot extract (sym)link as file object")
  1812. else:
  1813. # A (sym)link's file object is its target's file object.
  1814. return self.extractfile(self._find_link_target(tarinfo))
  1815. else:
  1816. # If there's no data associated with the member (directory, chrdev,
  1817. # blkdev, etc.), return None instead of a file object.
  1818. return None
  1819. def _extract_member(self, tarinfo, targetpath, set_attrs=True,
  1820. numeric_owner=False):
  1821. """Extract the TarInfo object tarinfo to a physical
  1822. file called targetpath.
  1823. """
  1824. # Fetch the TarInfo object for the given name
  1825. # and build the destination pathname, replacing
  1826. # forward slashes to platform specific separators.
  1827. targetpath = targetpath.rstrip("/")
  1828. targetpath = targetpath.replace("/", os.sep)
  1829. # Create all upper directories.
  1830. upperdirs = os.path.dirname(targetpath)
  1831. if upperdirs and not os.path.exists(upperdirs):
  1832. # Create directories that are not part of the archive with
  1833. # default permissions.
  1834. os.makedirs(upperdirs)
  1835. if tarinfo.islnk() or tarinfo.issym():
  1836. self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
  1837. else:
  1838. self._dbg(1, tarinfo.name)
  1839. if tarinfo.isreg():
  1840. self.makefile(tarinfo, targetpath)
  1841. elif tarinfo.isdir():
  1842. self.makedir(tarinfo, targetpath)
  1843. elif tarinfo.isfifo():
  1844. self.makefifo(tarinfo, targetpath)
  1845. elif tarinfo.ischr() or tarinfo.isblk():
  1846. self.makedev(tarinfo, targetpath)
  1847. elif tarinfo.islnk() or tarinfo.issym():
  1848. self.makelink(tarinfo, targetpath)
  1849. elif tarinfo.type not in SUPPORTED_TYPES:
  1850. self.makeunknown(tarinfo, targetpath)
  1851. else:
  1852. self.makefile(tarinfo, targetpath)
  1853. if set_attrs:
  1854. self.chown(tarinfo, targetpath, numeric_owner)
  1855. if not tarinfo.issym():
  1856. self.chmod(tarinfo, targetpath)
  1857. self.utime(tarinfo, targetpath)
  1858. #--------------------------------------------------------------------------
  1859. # Below are the different file methods. They are called via
  1860. # _extract_member() when extract() is called. They can be replaced in a
  1861. # subclass to implement other functionality.
  1862. def makedir(self, tarinfo, targetpath):
  1863. """Make a directory called targetpath.
  1864. """
  1865. try:
  1866. # Use a safe mode for the directory, the real mode is set
  1867. # later in _extract_member().
  1868. os.mkdir(targetpath, 0o700)
  1869. except FileExistsError:
  1870. pass
  1871. def makefile(self, tarinfo, targetpath):
  1872. """Make a file called targetpath.
  1873. """
  1874. source = self.fileobj
  1875. source.seek(tarinfo.offset_data)
  1876. bufsize = self.copybufsize
  1877. with bltn_open(targetpath, "wb") as target:
  1878. if tarinfo.sparse is not None:
  1879. for offset, size in tarinfo.sparse:
  1880. target.seek(offset)
  1881. copyfileobj(source, target, size, ReadError, bufsize)
  1882. target.seek(tarinfo.size)
  1883. target.truncate()
  1884. else:
  1885. copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
  1886. def makeunknown(self, tarinfo, targetpath):
  1887. """Make a file from a TarInfo object with an unknown type
  1888. at targetpath.
  1889. """
  1890. self.makefile(tarinfo, targetpath)
  1891. self._dbg(1, "tarfile: Unknown file type %r, " \
  1892. "extracted as regular file." % tarinfo.type)
  1893. def makefifo(self, tarinfo, targetpath):
  1894. """Make a fifo called targetpath.
  1895. """
  1896. if hasattr(os, "mkfifo"):
  1897. os.mkfifo(targetpath)
  1898. else:
  1899. raise ExtractError("fifo not supported by system")
  1900. def makedev(self, tarinfo, targetpath):
  1901. """Make a character or block device called targetpath.
  1902. """
  1903. if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
  1904. raise ExtractError("special devices not supported by system")
  1905. mode = tarinfo.mode
  1906. if tarinfo.isblk():
  1907. mode |= stat.S_IFBLK
  1908. else:
  1909. mode |= stat.S_IFCHR
  1910. os.mknod(targetpath, mode,
  1911. os.makedev(tarinfo.devmajor, tarinfo.devminor))
  1912. def makelink(self, tarinfo, targetpath):
  1913. """Make a (symbolic) link called targetpath. If it cannot be created
  1914. (platform limitation), we try to make a copy of the referenced file
  1915. instead of a link.
  1916. """
  1917. try:
  1918. # For systems that support symbolic and hard links.
  1919. if tarinfo.issym():
  1920. os.symlink(tarinfo.linkname, targetpath)
  1921. else:
  1922. # See extract().
  1923. if os.path.exists(tarinfo._link_target):
  1924. os.link(tarinfo._link_target, targetpath)
  1925. else:
  1926. self._extract_member(self._find_link_target(tarinfo),
  1927. targetpath)
  1928. except symlink_exception:
  1929. try:
  1930. self._extract_member(self._find_link_target(tarinfo),
  1931. targetpath)
  1932. except KeyError:
  1933. raise ExtractError("unable to resolve link inside archive")
  1934. def chown(self, tarinfo, targetpath, numeric_owner):
  1935. """Set owner of targetpath according to tarinfo. If numeric_owner
  1936. is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
  1937. is False, fall back to .gid/.uid when the search based on name
  1938. fails.
  1939. """
  1940. if hasattr(os, "geteuid") and os.geteuid() == 0:
  1941. # We have to be root to do so.
  1942. g = tarinfo.gid
  1943. u = tarinfo.uid
  1944. if not numeric_owner:
  1945. try:
  1946. if grp:
  1947. g = grp.getgrnam(tarinfo.gname)[2]
  1948. except KeyError:
  1949. pass
  1950. try:
  1951. if pwd:
  1952. u = pwd.getpwnam(tarinfo.uname)[2]
  1953. except KeyError:
  1954. pass
  1955. try:
  1956. if tarinfo.issym() and hasattr(os, "lchown"):
  1957. os.lchown(targetpath, u, g)
  1958. else:
  1959. os.chown(targetpath, u, g)
  1960. except OSError:
  1961. raise ExtractError("could not change owner")
  1962. def chmod(self, tarinfo, targetpath):
  1963. """Set file permissions of targetpath according to tarinfo.
  1964. """
  1965. if hasattr(os, 'chmod'):
  1966. try:
  1967. os.chmod(targetpath, tarinfo.mode)
  1968. except OSError:
  1969. raise ExtractError("could not change mode")
  1970. def utime(self, tarinfo, targetpath):
  1971. """Set modification time of targetpath according to tarinfo.
  1972. """
  1973. if not hasattr(os, 'utime'):
  1974. return
  1975. try:
  1976. os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
  1977. except OSError:
  1978. raise ExtractError("could not change modification time")
  1979. #--------------------------------------------------------------------------
  1980. def next(self):
  1981. """Return the next member of the archive as a TarInfo object, when
  1982. TarFile is opened for reading. Return None if there is no more
  1983. available.
  1984. """
  1985. self._check("ra")
  1986. if self.firstmember is not None:
  1987. m = self.firstmember
  1988. self.firstmember = None
  1989. return m
  1990. # Advance the file pointer.
  1991. if self.offset != self.fileobj.tell():
  1992. self.fileobj.seek(self.offset - 1)
  1993. if not self.fileobj.read(1):
  1994. raise ReadError("unexpected end of data")
  1995. # Read the next block.
  1996. tarinfo = None
  1997. while True:
  1998. try:
  1999. tarinfo = self.tarinfo.fromtarfile(self)
  2000. except EOFHeaderError as e:
  2001. if self.ignore_zeros:
  2002. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2003. self.offset += BLOCKSIZE
  2004. continue
  2005. except InvalidHeaderError as e:
  2006. if self.ignore_zeros:
  2007. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2008. self.offset += BLOCKSIZE
  2009. continue
  2010. elif self.offset == 0:
  2011. raise ReadError(str(e))
  2012. except EmptyHeaderError:
  2013. if self.offset == 0:
  2014. raise ReadError("empty file")
  2015. except TruncatedHeaderError as e:
  2016. if self.offset == 0:
  2017. raise ReadError(str(e))
  2018. except SubsequentHeaderError as e:
  2019. raise ReadError(str(e))
  2020. break
  2021. if tarinfo is not None:
  2022. self.members.append(tarinfo)
  2023. else:
  2024. self._loaded = True
  2025. return tarinfo
  2026. #--------------------------------------------------------------------------
  2027. # Little helper methods:
  2028. def _getmember(self, name, tarinfo=None, normalize=False):
  2029. """Find an archive member by name from bottom to top.
  2030. If tarinfo is given, it is used as the starting point.
  2031. """
  2032. # Ensure that all members have been loaded.
  2033. members = self.getmembers()
  2034. # Limit the member search list up to tarinfo.
  2035. if tarinfo is not None:
  2036. members = members[:members.index(tarinfo)]
  2037. if normalize:
  2038. name = os.path.normpath(name)
  2039. for member in reversed(members):
  2040. if normalize:
  2041. member_name = os.path.normpath(member.name)
  2042. else:
  2043. member_name = member.name
  2044. if name == member_name:
  2045. return member
  2046. def _load(self):
  2047. """Read through the entire archive file and look for readable
  2048. members.
  2049. """
  2050. while True:
  2051. tarinfo = self.next()
  2052. if tarinfo is None:
  2053. break
  2054. self._loaded = True
  2055. def _check(self, mode=None):
  2056. """Check if TarFile is still open, and if the operation's mode
  2057. corresponds to TarFile's mode.
  2058. """
  2059. if self.closed:
  2060. raise OSError("%s is closed" % self.__class__.__name__)
  2061. if mode is not None and self.mode not in mode:
  2062. raise OSError("bad operation for mode %r" % self.mode)
  2063. def _find_link_target(self, tarinfo):
  2064. """Find the target member of a symlink or hardlink member in the
  2065. archive.
  2066. """
  2067. if tarinfo.issym():
  2068. # Always search the entire archive.
  2069. linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
  2070. limit = None
  2071. else:
  2072. # Search the archive before the link, because a hard link is
  2073. # just a reference to an already archived file.
  2074. linkname = tarinfo.linkname
  2075. limit = tarinfo
  2076. member = self._getmember(linkname, tarinfo=limit, normalize=True)
  2077. if member is None:
  2078. raise KeyError("linkname %r not found" % linkname)
  2079. return member
  2080. def __iter__(self):
  2081. """Provide an iterator object.
  2082. """
  2083. if self._loaded:
  2084. yield from self.members
  2085. return
  2086. # Yield items using TarFile's next() method.
  2087. # When all members have been read, set TarFile as _loaded.
  2088. index = 0
  2089. # Fix for SF #1100429: Under rare circumstances it can
  2090. # happen that getmembers() is called during iteration,
  2091. # which will have already exhausted the next() method.
  2092. if self.firstmember is not None:
  2093. tarinfo = self.next()
  2094. index += 1
  2095. yield tarinfo
  2096. while True:
  2097. if index < len(self.members):
  2098. tarinfo = self.members[index]
  2099. elif not self._loaded:
  2100. tarinfo = self.next()
  2101. if not tarinfo:
  2102. self._loaded = True
  2103. return
  2104. else:
  2105. return
  2106. index += 1
  2107. yield tarinfo
  2108. def _dbg(self, level, msg):
  2109. """Write debugging output to sys.stderr.
  2110. """
  2111. if level <= self.debug:
  2112. print(msg, file=sys.stderr)
  2113. def __enter__(self):
  2114. self._check()
  2115. return self
  2116. def __exit__(self, type, value, traceback):
  2117. if type is None:
  2118. self.close()
  2119. else:
  2120. # An exception occurred. We must not call close() because
  2121. # it would try to write end-of-archive blocks and padding.
  2122. if not self._extfileobj:
  2123. self.fileobj.close()
  2124. self.closed = True
  2125. #--------------------
  2126. # exported functions
  2127. #--------------------
  2128. def is_tarfile(name):
  2129. """Return True if name points to a tar archive that we
  2130. are able to handle, else return False.
  2131. """
  2132. try:
  2133. t = open(name)
  2134. t.close()
  2135. return True
  2136. except TarError:
  2137. return False
  2138. open = TarFile.open
  2139. def main():
  2140. import argparse
  2141. description = 'A simple command-line interface for tarfile module.'
  2142. parser = argparse.ArgumentParser(description=description)
  2143. parser.add_argument('-v', '--verbose', action='store_true', default=False,
  2144. help='Verbose output')
  2145. group = parser.add_mutually_exclusive_group(required=True)
  2146. group.add_argument('-l', '--list', metavar='<tarfile>',
  2147. help='Show listing of a tarfile')
  2148. group.add_argument('-e', '--extract', nargs='+',
  2149. metavar=('<tarfile>', '<output_dir>'),
  2150. help='Extract tarfile into target dir')
  2151. group.add_argument('-c', '--create', nargs='+',
  2152. metavar=('<name>', '<file>'),
  2153. help='Create tarfile from sources')
  2154. group.add_argument('-t', '--test', metavar='<tarfile>',
  2155. help='Test if a tarfile is valid')
  2156. args = parser.parse_args()
  2157. if args.test is not None:
  2158. src = args.test
  2159. if is_tarfile(src):
  2160. with open(src, 'r') as tar:
  2161. tar.getmembers()
  2162. print(tar.getmembers(), file=sys.stderr)
  2163. if args.verbose:
  2164. print('{!r} is a tar archive.'.format(src))
  2165. else:
  2166. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2167. elif args.list is not None:
  2168. src = args.list
  2169. if is_tarfile(src):
  2170. with TarFile.open(src, 'r:*') as tf:
  2171. tf.list(verbose=args.verbose)
  2172. else:
  2173. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2174. elif args.extract is not None:
  2175. if len(args.extract) == 1:
  2176. src = args.extract[0]
  2177. curdir = os.curdir
  2178. elif len(args.extract) == 2:
  2179. src, curdir = args.extract
  2180. else:
  2181. parser.exit(1, parser.format_help())
  2182. if is_tarfile(src):
  2183. with TarFile.open(src, 'r:*') as tf:
  2184. tf.extractall(path=curdir)
  2185. if args.verbose:
  2186. if curdir == '.':
  2187. msg = '{!r} file is extracted.'.format(src)
  2188. else:
  2189. msg = ('{!r} file is extracted '
  2190. 'into {!r} directory.').format(src, curdir)
  2191. print(msg)
  2192. else:
  2193. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2194. elif args.create is not None:
  2195. tar_name = args.create.pop(0)
  2196. _, ext = os.path.splitext(tar_name)
  2197. compressions = {
  2198. # gz
  2199. '.gz': 'gz',
  2200. '.tgz': 'gz',
  2201. # xz
  2202. '.xz': 'xz',
  2203. '.txz': 'xz',
  2204. # bz2
  2205. '.bz2': 'bz2',
  2206. '.tbz': 'bz2',
  2207. '.tbz2': 'bz2',
  2208. '.tb2': 'bz2',
  2209. }
  2210. tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
  2211. tar_files = args.create
  2212. with TarFile.open(tar_name, tar_mode) as tf:
  2213. for file_name in tar_files:
  2214. tf.add(file_name)
  2215. if args.verbose:
  2216. print('{!r} file created.'.format(tar_name))
  2217. if __name__ == '__main__':
  2218. main()