tarfile.py 91 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548
  1. #!/usr/bin/env python3
  2. #-------------------------------------------------------------------
  3. # tarfile.py
  4. #-------------------------------------------------------------------
  5. # Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
  6. # All rights reserved.
  7. #
  8. # Permission is hereby granted, free of charge, to any person
  9. # obtaining a copy of this software and associated documentation
  10. # files (the "Software"), to deal in the Software without
  11. # restriction, including without limitation the rights to use,
  12. # copy, modify, merge, publish, distribute, sublicense, and/or sell
  13. # copies of the Software, and to permit persons to whom the
  14. # Software is furnished to do so, subject to the following
  15. # conditions:
  16. #
  17. # The above copyright notice and this permission notice shall be
  18. # included in all copies or substantial portions of the Software.
  19. #
  20. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  21. # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
  22. # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  23. # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
  24. # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  25. # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  26. # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  27. # OTHER DEALINGS IN THE SOFTWARE.
  28. #
  29. """Read from and write to tar format archives.
  30. """
  31. version = "0.9.0"
  32. __author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
  33. __date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"
  34. __cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $"
  35. __credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
  36. #---------
  37. # Imports
  38. #---------
  39. from builtins import open as bltn_open
  40. import sys
  41. import os
  42. import io
  43. import shutil
  44. import stat
  45. import time
  46. import struct
  47. import copy
  48. import re
  49. try:
  50. import grp, pwd
  51. except ImportError:
  52. grp = pwd = None
  53. # os.symlink on Windows prior to 6.0 raises NotImplementedError
  54. symlink_exception = (AttributeError, NotImplementedError)
  55. try:
  56. # OSError (winerror=1314) will be raised if the caller does not hold the
  57. # SeCreateSymbolicLinkPrivilege privilege
  58. symlink_exception += (OSError,)
  59. except NameError:
  60. pass
  61. # from tarfile import *
  62. __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
  63. #---------------------------------------------------------
  64. # tar constants
  65. #---------------------------------------------------------
  66. NUL = b"\0" # the null character
  67. BLOCKSIZE = 512 # length of processing blocks
  68. RECORDSIZE = BLOCKSIZE * 20 # length of records
  69. GNU_MAGIC = b"ustar \0" # magic gnu tar string
  70. POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
  71. LENGTH_NAME = 100 # maximum length of a filename
  72. LENGTH_LINK = 100 # maximum length of a linkname
  73. LENGTH_PREFIX = 155 # maximum length of the prefix field
  74. REGTYPE = b"0" # regular file
  75. AREGTYPE = b"\0" # regular file
  76. LNKTYPE = b"1" # link (inside tarfile)
  77. SYMTYPE = b"2" # symbolic link
  78. CHRTYPE = b"3" # character special device
  79. BLKTYPE = b"4" # block special device
  80. DIRTYPE = b"5" # directory
  81. FIFOTYPE = b"6" # fifo special device
  82. CONTTYPE = b"7" # contiguous file
  83. GNUTYPE_LONGNAME = b"L" # GNU tar longname
  84. GNUTYPE_LONGLINK = b"K" # GNU tar longlink
  85. GNUTYPE_SPARSE = b"S" # GNU tar sparse file
  86. XHDTYPE = b"x" # POSIX.1-2001 extended header
  87. XGLTYPE = b"g" # POSIX.1-2001 global header
  88. SOLARIS_XHDTYPE = b"X" # Solaris extended header
  89. USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
  90. GNU_FORMAT = 1 # GNU tar format
  91. PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
  92. DEFAULT_FORMAT = GNU_FORMAT
  93. #---------------------------------------------------------
  94. # tarfile constants
  95. #---------------------------------------------------------
  96. # File types that tarfile supports:
  97. SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
  98. SYMTYPE, DIRTYPE, FIFOTYPE,
  99. CONTTYPE, CHRTYPE, BLKTYPE,
  100. GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  101. GNUTYPE_SPARSE)
  102. # File types that will be treated as a regular file.
  103. REGULAR_TYPES = (REGTYPE, AREGTYPE,
  104. CONTTYPE, GNUTYPE_SPARSE)
  105. # File types that are part of the GNU tar format.
  106. GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  107. GNUTYPE_SPARSE)
  108. # Fields from a pax header that override a TarInfo attribute.
  109. PAX_FIELDS = ("path", "linkpath", "size", "mtime",
  110. "uid", "gid", "uname", "gname")
  111. # Fields from a pax header that are affected by hdrcharset.
  112. PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
  113. # Fields in a pax header that are numbers, all other fields
  114. # are treated as strings.
  115. PAX_NUMBER_FIELDS = {
  116. "atime": float,
  117. "ctime": float,
  118. "mtime": float,
  119. "uid": int,
  120. "gid": int,
  121. "size": int
  122. }
  123. #---------------------------------------------------------
  124. # initialization
  125. #---------------------------------------------------------
  126. if os.name in ("nt", "ce"):
  127. ENCODING = "utf-8"
  128. else:
  129. ENCODING = sys.getfilesystemencoding()
  130. #---------------------------------------------------------
  131. # Some useful functions
  132. #---------------------------------------------------------
  133. def stn(s, length, encoding, errors):
  134. """Convert a string to a null-terminated bytes object.
  135. """
  136. s = s.encode(encoding, errors)
  137. return s[:length] + (length - len(s)) * NUL
  138. def nts(s, encoding, errors):
  139. """Convert a null-terminated bytes object to a string.
  140. """
  141. p = s.find(b"\0")
  142. if p != -1:
  143. s = s[:p]
  144. return s.decode(encoding, errors)
  145. def nti(s):
  146. """Convert a number field to a python number.
  147. """
  148. # There are two possible encodings for a number field, see
  149. # itn() below.
  150. if s[0] in (0o200, 0o377):
  151. n = 0
  152. for i in range(len(s) - 1):
  153. n <<= 8
  154. n += s[i + 1]
  155. if s[0] == 0o377:
  156. n = -(256 ** (len(s) - 1) - n)
  157. else:
  158. try:
  159. s = nts(s, "ascii", "strict")
  160. n = int(s.strip() or "0", 8)
  161. except ValueError:
  162. raise InvalidHeaderError("invalid header")
  163. return n
  164. def itn(n, digits=8, format=DEFAULT_FORMAT):
  165. """Convert a python number to a number field.
  166. """
  167. # POSIX 1003.1-1988 requires numbers to be encoded as a string of
  168. # octal digits followed by a null-byte, this allows values up to
  169. # (8**(digits-1))-1. GNU tar allows storing numbers greater than
  170. # that if necessary. A leading 0o200 or 0o377 byte indicate this
  171. # particular encoding, the following digits-1 bytes are a big-endian
  172. # base-256 representation. This allows values up to (256**(digits-1))-1.
  173. # A 0o200 byte indicates a positive number, a 0o377 byte a negative
  174. # number.
  175. if 0 <= n < 8 ** (digits - 1):
  176. s = bytes("%0*o" % (digits - 1, int(n)), "ascii") + NUL
  177. elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
  178. if n >= 0:
  179. s = bytearray([0o200])
  180. else:
  181. s = bytearray([0o377])
  182. n = 256 ** digits + n
  183. for i in range(digits - 1):
  184. s.insert(1, n & 0o377)
  185. n >>= 8
  186. else:
  187. raise ValueError("overflow in number field")
  188. return s
  189. def calc_chksums(buf):
  190. """Calculate the checksum for a member's header by summing up all
  191. characters except for the chksum field which is treated as if
  192. it was filled with spaces. According to the GNU tar sources,
  193. some tars (Sun and NeXT) calculate chksum with signed char,
  194. which will be different if there are chars in the buffer with
  195. the high bit set. So we calculate two checksums, unsigned and
  196. signed.
  197. """
  198. unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
  199. signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
  200. return unsigned_chksum, signed_chksum
  201. def copyfileobj(src, dst, length=None, exception=OSError):
  202. """Copy length bytes from fileobj src to fileobj dst.
  203. If length is None, copy the entire content.
  204. """
  205. if length == 0:
  206. return
  207. if length is None:
  208. shutil.copyfileobj(src, dst)
  209. return
  210. BUFSIZE = 16 * 1024
  211. blocks, remainder = divmod(length, BUFSIZE)
  212. for b in range(blocks):
  213. buf = src.read(BUFSIZE)
  214. if len(buf) < BUFSIZE:
  215. raise exception("unexpected end of data")
  216. dst.write(buf)
  217. if remainder != 0:
  218. buf = src.read(remainder)
  219. if len(buf) < remainder:
  220. raise exception("unexpected end of data")
  221. dst.write(buf)
  222. return
  223. def filemode(mode):
  224. """Deprecated in this location; use stat.filemode."""
  225. import warnings
  226. warnings.warn("deprecated in favor of stat.filemode",
  227. DeprecationWarning, 2)
  228. return stat.filemode(mode)
  229. def _safe_print(s):
  230. encoding = getattr(sys.stdout, 'encoding', None)
  231. if encoding is not None:
  232. s = s.encode(encoding, 'backslashreplace').decode(encoding)
  233. print(s, end=' ')
  234. class TarError(Exception):
  235. """Base exception."""
  236. pass
  237. class ExtractError(TarError):
  238. """General exception for extract errors."""
  239. pass
  240. class ReadError(TarError):
  241. """Exception for unreadable tar archives."""
  242. pass
  243. class CompressionError(TarError):
  244. """Exception for unavailable compression methods."""
  245. pass
  246. class StreamError(TarError):
  247. """Exception for unsupported operations on stream-like TarFiles."""
  248. pass
  249. class HeaderError(TarError):
  250. """Base exception for header errors."""
  251. pass
  252. class EmptyHeaderError(HeaderError):
  253. """Exception for empty headers."""
  254. pass
  255. class TruncatedHeaderError(HeaderError):
  256. """Exception for truncated headers."""
  257. pass
  258. class EOFHeaderError(HeaderError):
  259. """Exception for end of file headers."""
  260. pass
  261. class InvalidHeaderError(HeaderError):
  262. """Exception for invalid headers."""
  263. pass
  264. class SubsequentHeaderError(HeaderError):
  265. """Exception for missing and invalid extended headers."""
  266. pass
  267. #---------------------------
  268. # internal stream interface
  269. #---------------------------
  270. class _LowLevelFile:
  271. """Low-level file object. Supports reading and writing.
  272. It is used instead of a regular file object for streaming
  273. access.
  274. """
  275. def __init__(self, name, mode):
  276. mode = {
  277. "r": os.O_RDONLY,
  278. "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
  279. }[mode]
  280. if hasattr(os, "O_BINARY"):
  281. mode |= os.O_BINARY
  282. self.fd = os.open(name, mode, 0o666)
  283. def close(self):
  284. os.close(self.fd)
  285. def read(self, size):
  286. return os.read(self.fd, size)
  287. def write(self, s):
  288. os.write(self.fd, s)
  289. class _Stream:
  290. """Class that serves as an adapter between TarFile and
  291. a stream-like object. The stream-like object only
  292. needs to have a read() or write() method and is accessed
  293. blockwise. Use of gzip or bzip2 compression is possible.
  294. A stream-like object could be for example: sys.stdin,
  295. sys.stdout, a socket, a tape device etc.
  296. _Stream is intended to be used only internally.
  297. """
  298. def __init__(self, name, mode, comptype, fileobj, bufsize):
  299. """Construct a _Stream object.
  300. """
  301. self._extfileobj = True
  302. if fileobj is None:
  303. fileobj = _LowLevelFile(name, mode)
  304. self._extfileobj = False
  305. if comptype == '*':
  306. # Enable transparent compression detection for the
  307. # stream interface
  308. fileobj = _StreamProxy(fileobj)
  309. comptype = fileobj.getcomptype()
  310. self.name = name or ""
  311. self.mode = mode
  312. self.comptype = comptype
  313. self.fileobj = fileobj
  314. self.bufsize = bufsize
  315. self.buf = b""
  316. self.pos = 0
  317. self.closed = False
  318. try:
  319. if comptype == "gz":
  320. try:
  321. import zlib
  322. except ImportError:
  323. raise CompressionError("zlib module is not available")
  324. self.zlib = zlib
  325. self.crc = zlib.crc32(b"")
  326. if mode == "r":
  327. self._init_read_gz()
  328. self.exception = zlib.error
  329. else:
  330. self._init_write_gz()
  331. elif comptype == "bz2":
  332. try:
  333. import bz2
  334. except ImportError:
  335. raise CompressionError("bz2 module is not available")
  336. if mode == "r":
  337. self.dbuf = b""
  338. self.cmp = bz2.BZ2Decompressor()
  339. self.exception = OSError
  340. else:
  341. self.cmp = bz2.BZ2Compressor()
  342. elif comptype == "xz":
  343. try:
  344. import lzma
  345. except ImportError:
  346. raise CompressionError("lzma module is not available")
  347. if mode == "r":
  348. self.dbuf = b""
  349. self.cmp = lzma.LZMADecompressor()
  350. self.exception = lzma.LZMAError
  351. else:
  352. self.cmp = lzma.LZMACompressor()
  353. elif comptype != "tar":
  354. raise CompressionError("unknown compression type %r" % comptype)
  355. except:
  356. if not self._extfileobj:
  357. self.fileobj.close()
  358. self.closed = True
  359. raise
  360. def __del__(self):
  361. if hasattr(self, "closed") and not self.closed:
  362. self.close()
  363. def _init_write_gz(self):
  364. """Initialize for writing with gzip compression.
  365. """
  366. self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
  367. -self.zlib.MAX_WBITS,
  368. self.zlib.DEF_MEM_LEVEL,
  369. 0)
  370. timestamp = struct.pack("<L", int(time.time()))
  371. self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
  372. if self.name.endswith(".gz"):
  373. self.name = self.name[:-3]
  374. # RFC1952 says we must use ISO-8859-1 for the FNAME field.
  375. self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
  376. def write(self, s):
  377. """Write string s to the stream.
  378. """
  379. if self.comptype == "gz":
  380. self.crc = self.zlib.crc32(s, self.crc)
  381. self.pos += len(s)
  382. if self.comptype != "tar":
  383. s = self.cmp.compress(s)
  384. self.__write(s)
  385. def __write(self, s):
  386. """Write string s to the stream if a whole new block
  387. is ready to be written.
  388. """
  389. self.buf += s
  390. while len(self.buf) > self.bufsize:
  391. self.fileobj.write(self.buf[:self.bufsize])
  392. self.buf = self.buf[self.bufsize:]
  393. def close(self):
  394. """Close the _Stream object. No operation should be
  395. done on it afterwards.
  396. """
  397. if self.closed:
  398. return
  399. self.closed = True
  400. try:
  401. if self.mode == "w" and self.comptype != "tar":
  402. self.buf += self.cmp.flush()
  403. if self.mode == "w" and self.buf:
  404. self.fileobj.write(self.buf)
  405. self.buf = b""
  406. if self.comptype == "gz":
  407. self.fileobj.write(struct.pack("<L", self.crc))
  408. self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
  409. finally:
  410. if not self._extfileobj:
  411. self.fileobj.close()
  412. def _init_read_gz(self):
  413. """Initialize for reading a gzip compressed fileobj.
  414. """
  415. self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
  416. self.dbuf = b""
  417. # taken from gzip.GzipFile with some alterations
  418. if self.__read(2) != b"\037\213":
  419. raise ReadError("not a gzip file")
  420. if self.__read(1) != b"\010":
  421. raise CompressionError("unsupported compression method")
  422. flag = ord(self.__read(1))
  423. self.__read(6)
  424. if flag & 4:
  425. xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
  426. self.read(xlen)
  427. if flag & 8:
  428. while True:
  429. s = self.__read(1)
  430. if not s or s == NUL:
  431. break
  432. if flag & 16:
  433. while True:
  434. s = self.__read(1)
  435. if not s or s == NUL:
  436. break
  437. if flag & 2:
  438. self.__read(2)
  439. def tell(self):
  440. """Return the stream's file pointer position.
  441. """
  442. return self.pos
  443. def seek(self, pos=0):
  444. """Set the stream's file pointer to pos. Negative seeking
  445. is forbidden.
  446. """
  447. if pos - self.pos >= 0:
  448. blocks, remainder = divmod(pos - self.pos, self.bufsize)
  449. for i in range(blocks):
  450. self.read(self.bufsize)
  451. self.read(remainder)
  452. else:
  453. raise StreamError("seeking backwards is not allowed")
  454. return self.pos
  455. def read(self, size=None):
  456. """Return the next size number of bytes from the stream.
  457. If size is not defined, return all bytes of the stream
  458. up to EOF.
  459. """
  460. if size is None:
  461. t = []
  462. while True:
  463. buf = self._read(self.bufsize)
  464. if not buf:
  465. break
  466. t.append(buf)
  467. buf = "".join(t)
  468. else:
  469. buf = self._read(size)
  470. self.pos += len(buf)
  471. return buf
  472. def _read(self, size):
  473. """Return size bytes from the stream.
  474. """
  475. if self.comptype == "tar":
  476. return self.__read(size)
  477. c = len(self.dbuf)
  478. while c < size:
  479. buf = self.__read(self.bufsize)
  480. if not buf:
  481. break
  482. try:
  483. buf = self.cmp.decompress(buf)
  484. except self.exception:
  485. raise ReadError("invalid compressed data")
  486. self.dbuf += buf
  487. c += len(buf)
  488. buf = self.dbuf[:size]
  489. self.dbuf = self.dbuf[size:]
  490. return buf
  491. def __read(self, size):
  492. """Return size bytes from stream. If internal buffer is empty,
  493. read another block from the stream.
  494. """
  495. c = len(self.buf)
  496. while c < size:
  497. buf = self.fileobj.read(self.bufsize)
  498. if not buf:
  499. break
  500. self.buf += buf
  501. c += len(buf)
  502. buf = self.buf[:size]
  503. self.buf = self.buf[size:]
  504. return buf
  505. # class _Stream
  506. class _StreamProxy(object):
  507. """Small proxy class that enables transparent compression
  508. detection for the Stream interface (mode 'r|*').
  509. """
  510. def __init__(self, fileobj):
  511. self.fileobj = fileobj
  512. self.buf = self.fileobj.read(BLOCKSIZE)
  513. def read(self, size):
  514. self.read = self.fileobj.read
  515. return self.buf
  516. def getcomptype(self):
  517. if self.buf.startswith(b"\x1f\x8b\x08"):
  518. return "gz"
  519. elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
  520. return "bz2"
  521. elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
  522. return "xz"
  523. else:
  524. return "tar"
  525. def close(self):
  526. self.fileobj.close()
  527. # class StreamProxy
  528. #------------------------
  529. # Extraction file object
  530. #------------------------
  531. class _FileInFile(object):
  532. """A thin wrapper around an existing file object that
  533. provides a part of its data as an individual file
  534. object.
  535. """
  536. def __init__(self, fileobj, offset, size, blockinfo=None):
  537. self.fileobj = fileobj
  538. self.offset = offset
  539. self.size = size
  540. self.position = 0
  541. self.name = getattr(fileobj, "name", None)
  542. self.closed = False
  543. if blockinfo is None:
  544. blockinfo = [(0, size)]
  545. # Construct a map with data and zero blocks.
  546. self.map_index = 0
  547. self.map = []
  548. lastpos = 0
  549. realpos = self.offset
  550. for offset, size in blockinfo:
  551. if offset > lastpos:
  552. self.map.append((False, lastpos, offset, None))
  553. self.map.append((True, offset, offset + size, realpos))
  554. realpos += size
  555. lastpos = offset + size
  556. if lastpos < self.size:
  557. self.map.append((False, lastpos, self.size, None))
  558. def flush(self):
  559. pass
  560. def readable(self):
  561. return True
  562. def writable(self):
  563. return False
  564. def seekable(self):
  565. return self.fileobj.seekable()
  566. def tell(self):
  567. """Return the current file position.
  568. """
  569. return self.position
  570. def seek(self, position, whence=io.SEEK_SET):
  571. """Seek to a position in the file.
  572. """
  573. if whence == io.SEEK_SET:
  574. self.position = min(max(position, 0), self.size)
  575. elif whence == io.SEEK_CUR:
  576. if position < 0:
  577. self.position = max(self.position + position, 0)
  578. else:
  579. self.position = min(self.position + position, self.size)
  580. elif whence == io.SEEK_END:
  581. self.position = max(min(self.size + position, self.size), 0)
  582. else:
  583. raise ValueError("Invalid argument")
  584. return self.position
  585. def read(self, size=None):
  586. """Read data from the file.
  587. """
  588. if size is None:
  589. size = self.size - self.position
  590. else:
  591. size = min(size, self.size - self.position)
  592. buf = b""
  593. while size > 0:
  594. while True:
  595. data, start, stop, offset = self.map[self.map_index]
  596. if start <= self.position < stop:
  597. break
  598. else:
  599. self.map_index += 1
  600. if self.map_index == len(self.map):
  601. self.map_index = 0
  602. length = min(size, stop - self.position)
  603. if data:
  604. self.fileobj.seek(offset + (self.position - start))
  605. b = self.fileobj.read(length)
  606. if len(b) != length:
  607. raise ReadError("unexpected end of data")
  608. buf += b
  609. else:
  610. buf += NUL * length
  611. size -= length
  612. self.position += length
  613. return buf
  614. def readinto(self, b):
  615. buf = self.read(len(b))
  616. b[:len(buf)] = buf
  617. return len(buf)
  618. def close(self):
  619. self.closed = True
  620. #class _FileInFile
  621. class ExFileObject(io.BufferedReader):
  622. def __init__(self, tarfile, tarinfo):
  623. fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
  624. tarinfo.size, tarinfo.sparse)
  625. super().__init__(fileobj)
  626. #class ExFileObject
  627. #------------------
  628. # Exported Classes
  629. #------------------
  630. class TarInfo(object):
  631. """Informational class which holds the details about an
  632. archive member given by a tar header block.
  633. TarInfo objects are returned by TarFile.getmember(),
  634. TarFile.getmembers() and TarFile.gettarinfo() and are
  635. usually created internally.
  636. """
  637. __slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
  638. "chksum", "type", "linkname", "uname", "gname",
  639. "devmajor", "devminor",
  640. "offset", "offset_data", "pax_headers", "sparse",
  641. "tarfile", "_sparse_structs", "_link_target")
  642. def __init__(self, name=""):
  643. """Construct a TarInfo object. name is the optional name
  644. of the member.
  645. """
  646. self.name = name # member name
  647. self.mode = 0o644 # file permissions
  648. self.uid = 0 # user id
  649. self.gid = 0 # group id
  650. self.size = 0 # file size
  651. self.mtime = 0 # modification time
  652. self.chksum = 0 # header checksum
  653. self.type = REGTYPE # member type
  654. self.linkname = "" # link name
  655. self.uname = "" # user name
  656. self.gname = "" # group name
  657. self.devmajor = 0 # device major number
  658. self.devminor = 0 # device minor number
  659. self.offset = 0 # the tar header starts here
  660. self.offset_data = 0 # the file's data starts here
  661. self.sparse = None # sparse member information
  662. self.pax_headers = {} # pax header information
  663. # In pax headers the "name" and "linkname" field are called
  664. # "path" and "linkpath".
  665. def _getpath(self):
  666. return self.name
  667. def _setpath(self, name):
  668. self.name = name
  669. path = property(_getpath, _setpath)
  670. def _getlinkpath(self):
  671. return self.linkname
  672. def _setlinkpath(self, linkname):
  673. self.linkname = linkname
  674. linkpath = property(_getlinkpath, _setlinkpath)
  675. def __repr__(self):
  676. return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
  677. def get_info(self):
  678. """Return the TarInfo's attributes as a dictionary.
  679. """
  680. info = {
  681. "name": self.name,
  682. "mode": self.mode & 0o7777,
  683. "uid": self.uid,
  684. "gid": self.gid,
  685. "size": self.size,
  686. "mtime": self.mtime,
  687. "chksum": self.chksum,
  688. "type": self.type,
  689. "linkname": self.linkname,
  690. "uname": self.uname,
  691. "gname": self.gname,
  692. "devmajor": self.devmajor,
  693. "devminor": self.devminor
  694. }
  695. if info["type"] == DIRTYPE and not info["name"].endswith("/"):
  696. info["name"] += "/"
  697. return info
  698. def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
  699. """Return a tar header as a string of 512 byte blocks.
  700. """
  701. info = self.get_info()
  702. if format == USTAR_FORMAT:
  703. return self.create_ustar_header(info, encoding, errors)
  704. elif format == GNU_FORMAT:
  705. return self.create_gnu_header(info, encoding, errors)
  706. elif format == PAX_FORMAT:
  707. return self.create_pax_header(info, encoding)
  708. else:
  709. raise ValueError("invalid format")
  710. def create_ustar_header(self, info, encoding, errors):
  711. """Return the object as a ustar header block.
  712. """
  713. info["magic"] = POSIX_MAGIC
  714. if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
  715. raise ValueError("linkname is too long")
  716. if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
  717. info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
  718. return self._create_header(info, USTAR_FORMAT, encoding, errors)
  719. def create_gnu_header(self, info, encoding, errors):
  720. """Return the object as a GNU header block sequence.
  721. """
  722. info["magic"] = GNU_MAGIC
  723. buf = b""
  724. if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
  725. buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
  726. if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
  727. buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
  728. return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
  729. def create_pax_header(self, info, encoding):
  730. """Return the object as a ustar header block. If it cannot be
  731. represented this way, prepend a pax extended header sequence
  732. with supplement information.
  733. """
  734. info["magic"] = POSIX_MAGIC
  735. pax_headers = self.pax_headers.copy()
  736. # Test string fields for values that exceed the field length or cannot
  737. # be represented in ASCII encoding.
  738. for name, hname, length in (
  739. ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
  740. ("uname", "uname", 32), ("gname", "gname", 32)):
  741. if hname in pax_headers:
  742. # The pax header has priority.
  743. continue
  744. # Try to encode the string as ASCII.
  745. try:
  746. info[name].encode("ascii", "strict")
  747. except UnicodeEncodeError:
  748. pax_headers[hname] = info[name]
  749. continue
  750. if len(info[name]) > length:
  751. pax_headers[hname] = info[name]
  752. # Test number fields for values that exceed the field limit or values
  753. # that like to be stored as float.
  754. for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
  755. if name in pax_headers:
  756. # The pax header has priority. Avoid overflow.
  757. info[name] = 0
  758. continue
  759. val = info[name]
  760. if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
  761. pax_headers[name] = str(val)
  762. info[name] = 0
  763. # Create a pax extended header if necessary.
  764. if pax_headers:
  765. buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
  766. else:
  767. buf = b""
  768. return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
  769. @classmethod
  770. def create_pax_global_header(cls, pax_headers):
  771. """Return the object as a pax global header block sequence.
  772. """
  773. return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
  774. def _posix_split_name(self, name, encoding, errors):
  775. """Split a name longer than 100 chars into a prefix
  776. and a name part.
  777. """
  778. components = name.split("/")
  779. for i in range(1, len(components)):
  780. prefix = "/".join(components[:i])
  781. name = "/".join(components[i:])
  782. if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
  783. len(name.encode(encoding, errors)) <= LENGTH_NAME:
  784. break
  785. else:
  786. raise ValueError("name is too long")
  787. return prefix, name
  788. @staticmethod
  789. def _create_header(info, format, encoding, errors):
  790. """Return a header block. info is a dictionary with file
  791. information, format must be one of the *_FORMAT constants.
  792. """
  793. parts = [
  794. stn(info.get("name", ""), 100, encoding, errors),
  795. itn(info.get("mode", 0) & 0o7777, 8, format),
  796. itn(info.get("uid", 0), 8, format),
  797. itn(info.get("gid", 0), 8, format),
  798. itn(info.get("size", 0), 12, format),
  799. itn(info.get("mtime", 0), 12, format),
  800. b" ", # checksum field
  801. info.get("type", REGTYPE),
  802. stn(info.get("linkname", ""), 100, encoding, errors),
  803. info.get("magic", POSIX_MAGIC),
  804. stn(info.get("uname", ""), 32, encoding, errors),
  805. stn(info.get("gname", ""), 32, encoding, errors),
  806. itn(info.get("devmajor", 0), 8, format),
  807. itn(info.get("devminor", 0), 8, format),
  808. stn(info.get("prefix", ""), 155, encoding, errors)
  809. ]
  810. buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
  811. chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
  812. buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
  813. return buf
  814. @staticmethod
  815. def _create_payload(payload):
  816. """Return the string payload filled with zero bytes
  817. up to the next 512 byte border.
  818. """
  819. blocks, remainder = divmod(len(payload), BLOCKSIZE)
  820. if remainder > 0:
  821. payload += (BLOCKSIZE - remainder) * NUL
  822. return payload
  823. @classmethod
  824. def _create_gnu_long_header(cls, name, type, encoding, errors):
  825. """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
  826. for name.
  827. """
  828. name = name.encode(encoding, errors) + NUL
  829. info = {}
  830. info["name"] = "././@LongLink"
  831. info["type"] = type
  832. info["size"] = len(name)
  833. info["magic"] = GNU_MAGIC
  834. # create extended header + name blocks.
  835. return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
  836. cls._create_payload(name)
  837. @classmethod
  838. def _create_pax_generic_header(cls, pax_headers, type, encoding):
  839. """Return a POSIX.1-2008 extended or global header sequence
  840. that contains a list of keyword, value pairs. The values
  841. must be strings.
  842. """
  843. # Check if one of the fields contains surrogate characters and thereby
  844. # forces hdrcharset=BINARY, see _proc_pax() for more information.
  845. binary = False
  846. for keyword, value in pax_headers.items():
  847. try:
  848. value.encode("utf-8", "strict")
  849. except UnicodeEncodeError:
  850. binary = True
  851. break
  852. records = b""
  853. if binary:
  854. # Put the hdrcharset field at the beginning of the header.
  855. records += b"21 hdrcharset=BINARY\n"
  856. for keyword, value in pax_headers.items():
  857. keyword = keyword.encode("utf-8")
  858. if binary:
  859. # Try to restore the original byte representation of `value'.
  860. # Needless to say, that the encoding must match the string.
  861. value = value.encode(encoding, "surrogateescape")
  862. else:
  863. value = value.encode("utf-8")
  864. l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
  865. n = p = 0
  866. while True:
  867. n = l + len(str(p))
  868. if n == p:
  869. break
  870. p = n
  871. records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
  872. # We use a hardcoded "././@PaxHeader" name like star does
  873. # instead of the one that POSIX recommends.
  874. info = {}
  875. info["name"] = "././@PaxHeader"
  876. info["type"] = type
  877. info["size"] = len(records)
  878. info["magic"] = POSIX_MAGIC
  879. # Create pax header + record blocks.
  880. return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
  881. cls._create_payload(records)
  882. @classmethod
  883. def frombuf(cls, buf, encoding, errors):
  884. """Construct a TarInfo object from a 512 byte bytes object.
  885. """
  886. if len(buf) == 0:
  887. raise EmptyHeaderError("empty header")
  888. if len(buf) != BLOCKSIZE:
  889. raise TruncatedHeaderError("truncated header")
  890. if buf.count(NUL) == BLOCKSIZE:
  891. raise EOFHeaderError("end of file header")
  892. chksum = nti(buf[148:156])
  893. if chksum not in calc_chksums(buf):
  894. raise InvalidHeaderError("bad checksum")
  895. obj = cls()
  896. obj.name = nts(buf[0:100], encoding, errors)
  897. obj.mode = nti(buf[100:108])
  898. obj.uid = nti(buf[108:116])
  899. obj.gid = nti(buf[116:124])
  900. obj.size = nti(buf[124:136])
  901. obj.mtime = nti(buf[136:148])
  902. obj.chksum = chksum
  903. obj.type = buf[156:157]
  904. obj.linkname = nts(buf[157:257], encoding, errors)
  905. obj.uname = nts(buf[265:297], encoding, errors)
  906. obj.gname = nts(buf[297:329], encoding, errors)
  907. obj.devmajor = nti(buf[329:337])
  908. obj.devminor = nti(buf[337:345])
  909. prefix = nts(buf[345:500], encoding, errors)
  910. # Old V7 tar format represents a directory as a regular
  911. # file with a trailing slash.
  912. if obj.type == AREGTYPE and obj.name.endswith("/"):
  913. obj.type = DIRTYPE
  914. # The old GNU sparse format occupies some of the unused
  915. # space in the buffer for up to 4 sparse structures.
  916. # Save the them for later processing in _proc_sparse().
  917. if obj.type == GNUTYPE_SPARSE:
  918. pos = 386
  919. structs = []
  920. for i in range(4):
  921. try:
  922. offset = nti(buf[pos:pos + 12])
  923. numbytes = nti(buf[pos + 12:pos + 24])
  924. except ValueError:
  925. break
  926. structs.append((offset, numbytes))
  927. pos += 24
  928. isextended = bool(buf[482])
  929. origsize = nti(buf[483:495])
  930. obj._sparse_structs = (structs, isextended, origsize)
  931. # Remove redundant slashes from directories.
  932. if obj.isdir():
  933. obj.name = obj.name.rstrip("/")
  934. # Reconstruct a ustar longname.
  935. if prefix and obj.type not in GNU_TYPES:
  936. obj.name = prefix + "/" + obj.name
  937. return obj
  938. @classmethod
  939. def fromtarfile(cls, tarfile):
  940. """Return the next TarInfo object from TarFile object
  941. tarfile.
  942. """
  943. buf = tarfile.fileobj.read(BLOCKSIZE)
  944. obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
  945. obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
  946. return obj._proc_member(tarfile)
  947. #--------------------------------------------------------------------------
  948. # The following are methods that are called depending on the type of a
  949. # member. The entry point is _proc_member() which can be overridden in a
  950. # subclass to add custom _proc_*() methods. A _proc_*() method MUST
  951. # implement the following
  952. # operations:
  953. # 1. Set self.offset_data to the position where the data blocks begin,
  954. # if there is data that follows.
  955. # 2. Set tarfile.offset to the position where the next member's header will
  956. # begin.
  957. # 3. Return self or another valid TarInfo object.
  958. def _proc_member(self, tarfile):
  959. """Choose the right processing method depending on
  960. the type and call it.
  961. """
  962. if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
  963. return self._proc_gnulong(tarfile)
  964. elif self.type == GNUTYPE_SPARSE:
  965. return self._proc_sparse(tarfile)
  966. elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
  967. return self._proc_pax(tarfile)
  968. else:
  969. return self._proc_builtin(tarfile)
  970. def _proc_builtin(self, tarfile):
  971. """Process a builtin type or an unknown type which
  972. will be treated as a regular file.
  973. """
  974. self.offset_data = tarfile.fileobj.tell()
  975. offset = self.offset_data
  976. if self.isreg() or self.type not in SUPPORTED_TYPES:
  977. # Skip the following data blocks.
  978. offset += self._block(self.size)
  979. tarfile.offset = offset
  980. # Patch the TarInfo object with saved global
  981. # header information.
  982. self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
  983. return self
  984. def _proc_gnulong(self, tarfile):
  985. """Process the blocks that hold a GNU longname
  986. or longlink member.
  987. """
  988. buf = tarfile.fileobj.read(self._block(self.size))
  989. # Fetch the next header and process it.
  990. try:
  991. next = self.fromtarfile(tarfile)
  992. except HeaderError:
  993. raise SubsequentHeaderError("missing or bad subsequent header")
  994. # Patch the TarInfo object from the next header with
  995. # the longname information.
  996. next.offset = self.offset
  997. if self.type == GNUTYPE_LONGNAME:
  998. next.name = nts(buf, tarfile.encoding, tarfile.errors)
  999. elif self.type == GNUTYPE_LONGLINK:
  1000. next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
  1001. return next
  1002. def _proc_sparse(self, tarfile):
  1003. """Process a GNU sparse header plus extra headers.
  1004. """
  1005. # We already collected some sparse structures in frombuf().
  1006. structs, isextended, origsize = self._sparse_structs
  1007. del self._sparse_structs
  1008. # Collect sparse structures from extended header blocks.
  1009. while isextended:
  1010. buf = tarfile.fileobj.read(BLOCKSIZE)
  1011. pos = 0
  1012. for i in range(21):
  1013. try:
  1014. offset = nti(buf[pos:pos + 12])
  1015. numbytes = nti(buf[pos + 12:pos + 24])
  1016. except ValueError:
  1017. break
  1018. if offset and numbytes:
  1019. structs.append((offset, numbytes))
  1020. pos += 24
  1021. isextended = bool(buf[504])
  1022. self.sparse = structs
  1023. self.offset_data = tarfile.fileobj.tell()
  1024. tarfile.offset = self.offset_data + self._block(self.size)
  1025. self.size = origsize
  1026. return self
  1027. def _proc_pax(self, tarfile):
  1028. """Process an extended or global header as described in
  1029. POSIX.1-2008.
  1030. """
  1031. # Read the header information.
  1032. buf = tarfile.fileobj.read(self._block(self.size))
  1033. # A pax header stores supplemental information for either
  1034. # the following file (extended) or all following files
  1035. # (global).
  1036. if self.type == XGLTYPE:
  1037. pax_headers = tarfile.pax_headers
  1038. else:
  1039. pax_headers = tarfile.pax_headers.copy()
  1040. # Check if the pax header contains a hdrcharset field. This tells us
  1041. # the encoding of the path, linkpath, uname and gname fields. Normally,
  1042. # these fields are UTF-8 encoded but since POSIX.1-2008 tar
  1043. # implementations are allowed to store them as raw binary strings if
  1044. # the translation to UTF-8 fails.
  1045. match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
  1046. if match is not None:
  1047. pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
  1048. # For the time being, we don't care about anything other than "BINARY".
  1049. # The only other value that is currently allowed by the standard is
  1050. # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
  1051. hdrcharset = pax_headers.get("hdrcharset")
  1052. if hdrcharset == "BINARY":
  1053. encoding = tarfile.encoding
  1054. else:
  1055. encoding = "utf-8"
  1056. # Parse pax header information. A record looks like that:
  1057. # "%d %s=%s\n" % (length, keyword, value). length is the size
  1058. # of the complete record including the length field itself and
  1059. # the newline. keyword and value are both UTF-8 encoded strings.
  1060. regex = re.compile(br"(\d+) ([^=]+)=")
  1061. pos = 0
  1062. while True:
  1063. match = regex.match(buf, pos)
  1064. if not match:
  1065. break
  1066. length, keyword = match.groups()
  1067. length = int(length)
  1068. value = buf[match.end(2) + 1:match.start(1) + length - 1]
  1069. # Normally, we could just use "utf-8" as the encoding and "strict"
  1070. # as the error handler, but we better not take the risk. For
  1071. # example, GNU tar <= 1.23 is known to store filenames it cannot
  1072. # translate to UTF-8 as raw strings (unfortunately without a
  1073. # hdrcharset=BINARY header).
  1074. # We first try the strict standard encoding, and if that fails we
  1075. # fall back on the user's encoding and error handler.
  1076. keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
  1077. tarfile.errors)
  1078. if keyword in PAX_NAME_FIELDS:
  1079. value = self._decode_pax_field(value, encoding, tarfile.encoding,
  1080. tarfile.errors)
  1081. else:
  1082. value = self._decode_pax_field(value, "utf-8", "utf-8",
  1083. tarfile.errors)
  1084. pax_headers[keyword] = value
  1085. pos += length
  1086. # Fetch the next header.
  1087. try:
  1088. next = self.fromtarfile(tarfile)
  1089. except HeaderError:
  1090. raise SubsequentHeaderError("missing or bad subsequent header")
  1091. # Process GNU sparse information.
  1092. if "GNU.sparse.map" in pax_headers:
  1093. # GNU extended sparse format version 0.1.
  1094. self._proc_gnusparse_01(next, pax_headers)
  1095. elif "GNU.sparse.size" in pax_headers:
  1096. # GNU extended sparse format version 0.0.
  1097. self._proc_gnusparse_00(next, pax_headers, buf)
  1098. elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
  1099. # GNU extended sparse format version 1.0.
  1100. self._proc_gnusparse_10(next, pax_headers, tarfile)
  1101. if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
  1102. # Patch the TarInfo object with the extended header info.
  1103. next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
  1104. next.offset = self.offset
  1105. if "size" in pax_headers:
  1106. # If the extended header replaces the size field,
  1107. # we need to recalculate the offset where the next
  1108. # header starts.
  1109. offset = next.offset_data
  1110. if next.isreg() or next.type not in SUPPORTED_TYPES:
  1111. offset += next._block(next.size)
  1112. tarfile.offset = offset
  1113. return next
  1114. def _proc_gnusparse_00(self, next, pax_headers, buf):
  1115. """Process a GNU tar extended sparse header, version 0.0.
  1116. """
  1117. offsets = []
  1118. for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
  1119. offsets.append(int(match.group(1)))
  1120. numbytes = []
  1121. for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
  1122. numbytes.append(int(match.group(1)))
  1123. next.sparse = list(zip(offsets, numbytes))
  1124. def _proc_gnusparse_01(self, next, pax_headers):
  1125. """Process a GNU tar extended sparse header, version 0.1.
  1126. """
  1127. sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
  1128. next.sparse = list(zip(sparse[::2], sparse[1::2]))
  1129. def _proc_gnusparse_10(self, next, pax_headers, tarfile):
  1130. """Process a GNU tar extended sparse header, version 1.0.
  1131. """
  1132. fields = None
  1133. sparse = []
  1134. buf = tarfile.fileobj.read(BLOCKSIZE)
  1135. fields, buf = buf.split(b"\n", 1)
  1136. fields = int(fields)
  1137. while len(sparse) < fields * 2:
  1138. if b"\n" not in buf:
  1139. buf += tarfile.fileobj.read(BLOCKSIZE)
  1140. number, buf = buf.split(b"\n", 1)
  1141. sparse.append(int(number))
  1142. next.offset_data = tarfile.fileobj.tell()
  1143. next.sparse = list(zip(sparse[::2], sparse[1::2]))
  1144. def _apply_pax_info(self, pax_headers, encoding, errors):
  1145. """Replace fields with supplemental information from a previous
  1146. pax extended or global header.
  1147. """
  1148. for keyword, value in pax_headers.items():
  1149. if keyword == "GNU.sparse.name":
  1150. setattr(self, "path", value)
  1151. elif keyword == "GNU.sparse.size":
  1152. setattr(self, "size", int(value))
  1153. elif keyword == "GNU.sparse.realsize":
  1154. setattr(self, "size", int(value))
  1155. elif keyword in PAX_FIELDS:
  1156. if keyword in PAX_NUMBER_FIELDS:
  1157. try:
  1158. value = PAX_NUMBER_FIELDS[keyword](value)
  1159. except ValueError:
  1160. value = 0
  1161. if keyword == "path":
  1162. value = value.rstrip("/")
  1163. setattr(self, keyword, value)
  1164. self.pax_headers = pax_headers.copy()
  1165. def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
  1166. """Decode a single field from a pax record.
  1167. """
  1168. try:
  1169. return value.decode(encoding, "strict")
  1170. except UnicodeDecodeError:
  1171. return value.decode(fallback_encoding, fallback_errors)
  1172. def _block(self, count):
  1173. """Round up a byte count by BLOCKSIZE and return it,
  1174. e.g. _block(834) => 1024.
  1175. """
  1176. blocks, remainder = divmod(count, BLOCKSIZE)
  1177. if remainder:
  1178. blocks += 1
  1179. return blocks * BLOCKSIZE
  1180. def isreg(self):
  1181. return self.type in REGULAR_TYPES
  1182. def isfile(self):
  1183. return self.isreg()
  1184. def isdir(self):
  1185. return self.type == DIRTYPE
  1186. def issym(self):
  1187. return self.type == SYMTYPE
  1188. def islnk(self):
  1189. return self.type == LNKTYPE
  1190. def ischr(self):
  1191. return self.type == CHRTYPE
  1192. def isblk(self):
  1193. return self.type == BLKTYPE
  1194. def isfifo(self):
  1195. return self.type == FIFOTYPE
  1196. def issparse(self):
  1197. return self.sparse is not None
  1198. def isdev(self):
  1199. return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
  1200. # class TarInfo
  1201. class TarFile(object):
  1202. """The TarFile Class provides an interface to tar archives.
  1203. """
  1204. debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
  1205. dereference = False # If true, add content of linked file to the
  1206. # tar file, else the link.
  1207. ignore_zeros = False # If true, skips empty or invalid blocks and
  1208. # continues processing.
  1209. errorlevel = 1 # If 0, fatal errors only appear in debug
  1210. # messages (if debug >= 0). If > 0, errors
  1211. # are passed to the caller as exceptions.
  1212. format = DEFAULT_FORMAT # The format to use when creating an archive.
  1213. encoding = ENCODING # Encoding for 8-bit character strings.
  1214. errors = None # Error handler for unicode conversion.
  1215. tarinfo = TarInfo # The default TarInfo class to use.
  1216. fileobject = ExFileObject # The file-object for extractfile().
  1217. def __init__(self, name=None, mode="r", fileobj=None, format=None,
  1218. tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
  1219. errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None):
  1220. """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
  1221. read from an existing archive, 'a' to append data to an existing
  1222. file or 'w' to create a new file overwriting an existing one. `mode'
  1223. defaults to 'r'.
  1224. If `fileobj' is given, it is used for reading or writing data. If it
  1225. can be determined, `mode' is overridden by `fileobj's mode.
  1226. `fileobj' is not closed, when TarFile is closed.
  1227. """
  1228. modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
  1229. if mode not in modes:
  1230. raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
  1231. self.mode = mode
  1232. self._mode = modes[mode]
  1233. if not fileobj:
  1234. if self.mode == "a" and not os.path.exists(name):
  1235. # Create nonexistent files in append mode.
  1236. self.mode = "w"
  1237. self._mode = "wb"
  1238. fileobj = bltn_open(name, self._mode)
  1239. self._extfileobj = False
  1240. else:
  1241. if (name is None and hasattr(fileobj, "name") and
  1242. isinstance(fileobj.name, (str, bytes))):
  1243. name = fileobj.name
  1244. if hasattr(fileobj, "mode"):
  1245. self._mode = fileobj.mode
  1246. self._extfileobj = True
  1247. self.name = os.path.abspath(name) if name else None
  1248. self.fileobj = fileobj
  1249. # Init attributes.
  1250. if format is not None:
  1251. self.format = format
  1252. if tarinfo is not None:
  1253. self.tarinfo = tarinfo
  1254. if dereference is not None:
  1255. self.dereference = dereference
  1256. if ignore_zeros is not None:
  1257. self.ignore_zeros = ignore_zeros
  1258. if encoding is not None:
  1259. self.encoding = encoding
  1260. self.errors = errors
  1261. if pax_headers is not None and self.format == PAX_FORMAT:
  1262. self.pax_headers = pax_headers
  1263. else:
  1264. self.pax_headers = {}
  1265. if debug is not None:
  1266. self.debug = debug
  1267. if errorlevel is not None:
  1268. self.errorlevel = errorlevel
  1269. # Init datastructures.
  1270. self.closed = False
  1271. self.members = [] # list of members as TarInfo objects
  1272. self._loaded = False # flag if all members have been read
  1273. self.offset = self.fileobj.tell()
  1274. # current position in the archive file
  1275. self.inodes = {} # dictionary caching the inodes of
  1276. # archive members already added
  1277. try:
  1278. if self.mode == "r":
  1279. self.firstmember = None
  1280. self.firstmember = self.next()
  1281. if self.mode == "a":
  1282. # Move to the end of the archive,
  1283. # before the first empty block.
  1284. while True:
  1285. self.fileobj.seek(self.offset)
  1286. try:
  1287. tarinfo = self.tarinfo.fromtarfile(self)
  1288. self.members.append(tarinfo)
  1289. except EOFHeaderError:
  1290. self.fileobj.seek(self.offset)
  1291. break
  1292. except HeaderError as e:
  1293. raise ReadError(str(e))
  1294. if self.mode in ("a", "w", "x"):
  1295. self._loaded = True
  1296. if self.pax_headers:
  1297. buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
  1298. self.fileobj.write(buf)
  1299. self.offset += len(buf)
  1300. except:
  1301. if not self._extfileobj:
  1302. self.fileobj.close()
  1303. self.closed = True
  1304. raise
  1305. #--------------------------------------------------------------------------
  1306. # Below are the classmethods which act as alternate constructors to the
  1307. # TarFile class. The open() method is the only one that is needed for
  1308. # public use; it is the "super"-constructor and is able to select an
  1309. # adequate "sub"-constructor for a particular compression using the mapping
  1310. # from OPEN_METH.
  1311. #
  1312. # This concept allows one to subclass TarFile without losing the comfort of
  1313. # the super-constructor. A sub-constructor is registered and made available
  1314. # by adding it to the mapping in OPEN_METH.
  1315. @classmethod
  1316. def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
  1317. """Open a tar archive for reading, writing or appending. Return
  1318. an appropriate TarFile class.
  1319. mode:
  1320. 'r' or 'r:*' open for reading with transparent compression
  1321. 'r:' open for reading exclusively uncompressed
  1322. 'r:gz' open for reading with gzip compression
  1323. 'r:bz2' open for reading with bzip2 compression
  1324. 'r:xz' open for reading with lzma compression
  1325. 'a' or 'a:' open for appending, creating the file if necessary
  1326. 'w' or 'w:' open for writing without compression
  1327. 'w:gz' open for writing with gzip compression
  1328. 'w:bz2' open for writing with bzip2 compression
  1329. 'w:xz' open for writing with lzma compression
  1330. 'x' or 'x:' create a tarfile exclusively without compression, raise
  1331. an exception if the file is already created
  1332. 'x:gz' create a gzip compressed tarfile, raise an exception
  1333. if the file is already created
  1334. 'x:bz2' create a bzip2 compressed tarfile, raise an exception
  1335. if the file is already created
  1336. 'x:xz' create an lzma compressed tarfile, raise an exception
  1337. if the file is already created
  1338. 'r|*' open a stream of tar blocks with transparent compression
  1339. 'r|' open an uncompressed stream of tar blocks for reading
  1340. 'r|gz' open a gzip compressed stream of tar blocks
  1341. 'r|bz2' open a bzip2 compressed stream of tar blocks
  1342. 'r|xz' open an lzma compressed stream of tar blocks
  1343. 'w|' open an uncompressed stream for writing
  1344. 'w|gz' open a gzip compressed stream for writing
  1345. 'w|bz2' open a bzip2 compressed stream for writing
  1346. 'w|xz' open an lzma compressed stream for writing
  1347. """
  1348. if not name and not fileobj:
  1349. raise ValueError("nothing to open")
  1350. if mode in ("r", "r:*"):
  1351. # Find out which *open() is appropriate for opening the file.
  1352. for comptype in cls.OPEN_METH:
  1353. func = getattr(cls, cls.OPEN_METH[comptype])
  1354. if fileobj is not None:
  1355. saved_pos = fileobj.tell()
  1356. try:
  1357. return func(name, "r", fileobj, **kwargs)
  1358. except (ReadError, CompressionError) as e:
  1359. if fileobj is not None:
  1360. fileobj.seek(saved_pos)
  1361. continue
  1362. raise ReadError("file could not be opened successfully")
  1363. elif ":" in mode:
  1364. filemode, comptype = mode.split(":", 1)
  1365. filemode = filemode or "r"
  1366. comptype = comptype or "tar"
  1367. # Select the *open() function according to
  1368. # given compression.
  1369. if comptype in cls.OPEN_METH:
  1370. func = getattr(cls, cls.OPEN_METH[comptype])
  1371. else:
  1372. raise CompressionError("unknown compression type %r" % comptype)
  1373. return func(name, filemode, fileobj, **kwargs)
  1374. elif "|" in mode:
  1375. filemode, comptype = mode.split("|", 1)
  1376. filemode = filemode or "r"
  1377. comptype = comptype or "tar"
  1378. if filemode not in ("r", "w"):
  1379. raise ValueError("mode must be 'r' or 'w'")
  1380. stream = _Stream(name, filemode, comptype, fileobj, bufsize)
  1381. try:
  1382. t = cls(name, filemode, stream, **kwargs)
  1383. except:
  1384. stream.close()
  1385. raise
  1386. t._extfileobj = False
  1387. return t
  1388. elif mode in ("a", "w", "x"):
  1389. return cls.taropen(name, mode, fileobj, **kwargs)
  1390. raise ValueError("undiscernible mode")
  1391. @classmethod
  1392. def taropen(cls, name, mode="r", fileobj=None, **kwargs):
  1393. """Open uncompressed tar archive name for reading or writing.
  1394. """
  1395. if mode not in ("r", "a", "w", "x"):
  1396. raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
  1397. return cls(name, mode, fileobj, **kwargs)
  1398. @classmethod
  1399. def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1400. """Open gzip compressed tar archive name for reading or writing.
  1401. Appending is not allowed.
  1402. """
  1403. if mode not in ("r", "w", "x"):
  1404. raise ValueError("mode must be 'r', 'w' or 'x'")
  1405. try:
  1406. import gzip
  1407. gzip.GzipFile
  1408. except (ImportError, AttributeError):
  1409. raise CompressionError("gzip module is not available")
  1410. try:
  1411. fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
  1412. except OSError:
  1413. if fileobj is not None and mode == 'r':
  1414. raise ReadError("not a gzip file")
  1415. raise
  1416. try:
  1417. t = cls.taropen(name, mode, fileobj, **kwargs)
  1418. except OSError:
  1419. fileobj.close()
  1420. if mode == 'r':
  1421. raise ReadError("not a gzip file")
  1422. raise
  1423. except:
  1424. fileobj.close()
  1425. raise
  1426. t._extfileobj = False
  1427. return t
  1428. @classmethod
  1429. def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1430. """Open bzip2 compressed tar archive name for reading or writing.
  1431. Appending is not allowed.
  1432. """
  1433. if mode not in ("r", "w", "x"):
  1434. raise ValueError("mode must be 'r', 'w' or 'x'")
  1435. try:
  1436. import bz2
  1437. except ImportError:
  1438. raise CompressionError("bz2 module is not available")
  1439. fileobj = bz2.BZ2File(fileobj or name, mode,
  1440. compresslevel=compresslevel)
  1441. try:
  1442. t = cls.taropen(name, mode, fileobj, **kwargs)
  1443. except (OSError, EOFError):
  1444. fileobj.close()
  1445. if mode == 'r':
  1446. raise ReadError("not a bzip2 file")
  1447. raise
  1448. except:
  1449. fileobj.close()
  1450. raise
  1451. t._extfileobj = False
  1452. return t
  1453. @classmethod
  1454. def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
  1455. """Open lzma compressed tar archive name for reading or writing.
  1456. Appending is not allowed.
  1457. """
  1458. if mode not in ("r", "w", "x"):
  1459. raise ValueError("mode must be 'r', 'w' or 'x'")
  1460. try:
  1461. import lzma
  1462. except ImportError:
  1463. raise CompressionError("lzma module is not available")
  1464. fileobj = lzma.LZMAFile(fileobj or name, mode, preset=preset)
  1465. try:
  1466. t = cls.taropen(name, mode, fileobj, **kwargs)
  1467. except (lzma.LZMAError, EOFError):
  1468. fileobj.close()
  1469. if mode == 'r':
  1470. raise ReadError("not an lzma file")
  1471. raise
  1472. except:
  1473. fileobj.close()
  1474. raise
  1475. t._extfileobj = False
  1476. return t
  1477. # All *open() methods are registered here.
  1478. OPEN_METH = {
  1479. "tar": "taropen", # uncompressed tar
  1480. "gz": "gzopen", # gzip compressed tar
  1481. "bz2": "bz2open", # bzip2 compressed tar
  1482. "xz": "xzopen" # lzma compressed tar
  1483. }
  1484. #--------------------------------------------------------------------------
  1485. # The public methods which TarFile provides:
  1486. def close(self):
  1487. """Close the TarFile. In write-mode, two finishing zero blocks are
  1488. appended to the archive.
  1489. """
  1490. if self.closed:
  1491. return
  1492. self.closed = True
  1493. try:
  1494. if self.mode in ("a", "w", "x"):
  1495. self.fileobj.write(NUL * (BLOCKSIZE * 2))
  1496. self.offset += (BLOCKSIZE * 2)
  1497. # fill up the end with zero-blocks
  1498. # (like option -b20 for tar does)
  1499. blocks, remainder = divmod(self.offset, RECORDSIZE)
  1500. if remainder > 0:
  1501. self.fileobj.write(NUL * (RECORDSIZE - remainder))
  1502. finally:
  1503. if not self._extfileobj:
  1504. self.fileobj.close()
  1505. def getmember(self, name):
  1506. """Return a TarInfo object for member `name'. If `name' can not be
  1507. found in the archive, KeyError is raised. If a member occurs more
  1508. than once in the archive, its last occurrence is assumed to be the
  1509. most up-to-date version.
  1510. """
  1511. tarinfo = self._getmember(name)
  1512. if tarinfo is None:
  1513. raise KeyError("filename %r not found" % name)
  1514. return tarinfo
  1515. def getmembers(self):
  1516. """Return the members of the archive as a list of TarInfo objects. The
  1517. list has the same order as the members in the archive.
  1518. """
  1519. self._check()
  1520. if not self._loaded: # if we want to obtain a list of
  1521. self._load() # all members, we first have to
  1522. # scan the whole archive.
  1523. return self.members
  1524. def getnames(self):
  1525. """Return the members of the archive as a list of their names. It has
  1526. the same order as the list returned by getmembers().
  1527. """
  1528. return [tarinfo.name for tarinfo in self.getmembers()]
  1529. def gettarinfo(self, name=None, arcname=None, fileobj=None):
  1530. """Create a TarInfo object from the result of os.stat or equivalent
  1531. on an existing file. The file is either named by `name', or
  1532. specified as a file object `fileobj' with a file descriptor. If
  1533. given, `arcname' specifies an alternative name for the file in the
  1534. archive, otherwise, the name is taken from the 'name' attribute of
  1535. 'fileobj', or the 'name' argument. The name should be a text
  1536. string.
  1537. """
  1538. self._check("awx")
  1539. # When fileobj is given, replace name by
  1540. # fileobj's real name.
  1541. if fileobj is not None:
  1542. name = fileobj.name
  1543. # Building the name of the member in the archive.
  1544. # Backward slashes are converted to forward slashes,
  1545. # Absolute paths are turned to relative paths.
  1546. if arcname is None:
  1547. arcname = name
  1548. drv, arcname = os.path.splitdrive(arcname)
  1549. arcname = arcname.replace(os.sep, "/")
  1550. arcname = arcname.lstrip("/")
  1551. # Now, fill the TarInfo object with
  1552. # information specific for the file.
  1553. tarinfo = self.tarinfo()
  1554. tarinfo.tarfile = self # Not needed
  1555. # Use os.stat or os.lstat, depending on platform
  1556. # and if symlinks shall be resolved.
  1557. if fileobj is None:
  1558. if hasattr(os, "lstat") and not self.dereference:
  1559. statres = os.lstat(name)
  1560. else:
  1561. statres = os.stat(name)
  1562. else:
  1563. statres = os.fstat(fileobj.fileno())
  1564. linkname = ""
  1565. stmd = statres.st_mode
  1566. if stat.S_ISREG(stmd):
  1567. inode = (statres.st_ino, statres.st_dev)
  1568. if not self.dereference and statres.st_nlink > 1 and \
  1569. inode in self.inodes and arcname != self.inodes[inode]:
  1570. # Is it a hardlink to an already
  1571. # archived file?
  1572. type = LNKTYPE
  1573. linkname = self.inodes[inode]
  1574. else:
  1575. # The inode is added only if its valid.
  1576. # For win32 it is always 0.
  1577. type = REGTYPE
  1578. if inode[0]:
  1579. self.inodes[inode] = arcname
  1580. elif stat.S_ISDIR(stmd):
  1581. type = DIRTYPE
  1582. elif stat.S_ISFIFO(stmd):
  1583. type = FIFOTYPE
  1584. elif stat.S_ISLNK(stmd):
  1585. type = SYMTYPE
  1586. linkname = os.readlink(name)
  1587. elif stat.S_ISCHR(stmd):
  1588. type = CHRTYPE
  1589. elif stat.S_ISBLK(stmd):
  1590. type = BLKTYPE
  1591. else:
  1592. return None
  1593. # Fill the TarInfo object with all
  1594. # information we can get.
  1595. tarinfo.name = arcname
  1596. tarinfo.mode = stmd
  1597. tarinfo.uid = statres.st_uid
  1598. tarinfo.gid = statres.st_gid
  1599. if type == REGTYPE:
  1600. tarinfo.size = statres.st_size
  1601. else:
  1602. tarinfo.size = 0
  1603. tarinfo.mtime = statres.st_mtime
  1604. tarinfo.type = type
  1605. tarinfo.linkname = linkname
  1606. if pwd:
  1607. try:
  1608. tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
  1609. except KeyError:
  1610. pass
  1611. if grp:
  1612. try:
  1613. tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
  1614. except KeyError:
  1615. pass
  1616. if type in (CHRTYPE, BLKTYPE):
  1617. if hasattr(os, "major") and hasattr(os, "minor"):
  1618. tarinfo.devmajor = os.major(statres.st_rdev)
  1619. tarinfo.devminor = os.minor(statres.st_rdev)
  1620. return tarinfo
  1621. def list(self, verbose=True, *, members=None):
  1622. """Print a table of contents to sys.stdout. If `verbose' is False, only
  1623. the names of the members are printed. If it is True, an `ls -l'-like
  1624. output is produced. `members' is optional and must be a subset of the
  1625. list returned by getmembers().
  1626. """
  1627. self._check()
  1628. if members is None:
  1629. members = self
  1630. for tarinfo in members:
  1631. if verbose:
  1632. _safe_print(stat.filemode(tarinfo.mode))
  1633. _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
  1634. tarinfo.gname or tarinfo.gid))
  1635. if tarinfo.ischr() or tarinfo.isblk():
  1636. _safe_print("%10s" %
  1637. ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
  1638. else:
  1639. _safe_print("%10d" % tarinfo.size)
  1640. _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
  1641. % time.localtime(tarinfo.mtime)[:6])
  1642. _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
  1643. if verbose:
  1644. if tarinfo.issym():
  1645. _safe_print("-> " + tarinfo.linkname)
  1646. if tarinfo.islnk():
  1647. _safe_print("link to " + tarinfo.linkname)
  1648. print()
  1649. def add(self, name, arcname=None, recursive=True, exclude=None, *, filter=None):
  1650. """Add the file `name' to the archive. `name' may be any type of file
  1651. (directory, fifo, symbolic link, etc.). If given, `arcname'
  1652. specifies an alternative name for the file in the archive.
  1653. Directories are added recursively by default. This can be avoided by
  1654. setting `recursive' to False. `exclude' is a function that should
  1655. return True for each filename to be excluded. `filter' is a function
  1656. that expects a TarInfo object argument and returns the changed
  1657. TarInfo object, if it returns None the TarInfo object will be
  1658. excluded from the archive.
  1659. """
  1660. self._check("awx")
  1661. if arcname is None:
  1662. arcname = name
  1663. # Exclude pathnames.
  1664. if exclude is not None:
  1665. import warnings
  1666. warnings.warn("use the filter argument instead",
  1667. DeprecationWarning, 2)
  1668. if exclude(name):
  1669. self._dbg(2, "tarfile: Excluded %r" % name)
  1670. return
  1671. # Skip if somebody tries to archive the archive...
  1672. if self.name is not None and os.path.abspath(name) == self.name:
  1673. self._dbg(2, "tarfile: Skipped %r" % name)
  1674. return
  1675. self._dbg(1, name)
  1676. # Create a TarInfo object from the file.
  1677. tarinfo = self.gettarinfo(name, arcname)
  1678. if tarinfo is None:
  1679. self._dbg(1, "tarfile: Unsupported type %r" % name)
  1680. return
  1681. # Change or exclude the TarInfo object.
  1682. if filter is not None:
  1683. tarinfo = filter(tarinfo)
  1684. if tarinfo is None:
  1685. self._dbg(2, "tarfile: Excluded %r" % name)
  1686. return
  1687. # Append the tar header and data to the archive.
  1688. if tarinfo.isreg():
  1689. with bltn_open(name, "rb") as f:
  1690. self.addfile(tarinfo, f)
  1691. elif tarinfo.isdir():
  1692. self.addfile(tarinfo)
  1693. if recursive:
  1694. for f in os.listdir(name):
  1695. self.add(os.path.join(name, f), os.path.join(arcname, f),
  1696. recursive, exclude, filter=filter)
  1697. else:
  1698. self.addfile(tarinfo)
  1699. def addfile(self, tarinfo, fileobj=None):
  1700. """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
  1701. given, it should be a binary file, and tarinfo.size bytes are read
  1702. from it and added to the archive. You can create TarInfo objects
  1703. directly, or by using gettarinfo().
  1704. """
  1705. self._check("awx")
  1706. tarinfo = copy.copy(tarinfo)
  1707. buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
  1708. self.fileobj.write(buf)
  1709. self.offset += len(buf)
  1710. # If there's data to follow, append it.
  1711. if fileobj is not None:
  1712. copyfileobj(fileobj, self.fileobj, tarinfo.size)
  1713. blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
  1714. if remainder > 0:
  1715. self.fileobj.write(NUL * (BLOCKSIZE - remainder))
  1716. blocks += 1
  1717. self.offset += blocks * BLOCKSIZE
  1718. self.members.append(tarinfo)
  1719. def extractall(self, path=".", members=None, *, numeric_owner=False):
  1720. """Extract all members from the archive to the current working
  1721. directory and set owner, modification time and permissions on
  1722. directories afterwards. `path' specifies a different directory
  1723. to extract to. `members' is optional and must be a subset of the
  1724. list returned by getmembers(). If `numeric_owner` is True, only
  1725. the numbers for user/group names are used and not the names.
  1726. """
  1727. directories = []
  1728. if members is None:
  1729. members = self
  1730. for tarinfo in members:
  1731. if tarinfo.isdir():
  1732. # Extract directories with a safe mode.
  1733. directories.append(tarinfo)
  1734. tarinfo = copy.copy(tarinfo)
  1735. tarinfo.mode = 0o700
  1736. # Do not set_attrs directories, as we will do that further down
  1737. self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(),
  1738. numeric_owner=numeric_owner)
  1739. # Reverse sort directories.
  1740. directories.sort(key=lambda a: a.name)
  1741. directories.reverse()
  1742. # Set correct owner, mtime and filemode on directories.
  1743. for tarinfo in directories:
  1744. dirpath = os.path.join(path, tarinfo.name)
  1745. try:
  1746. self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
  1747. self.utime(tarinfo, dirpath)
  1748. self.chmod(tarinfo, dirpath)
  1749. except ExtractError as e:
  1750. if self.errorlevel > 1:
  1751. raise
  1752. else:
  1753. self._dbg(1, "tarfile: %s" % e)
  1754. def extract(self, member, path="", set_attrs=True, *, numeric_owner=False):
  1755. """Extract a member from the archive to the current working directory,
  1756. using its full name. Its file information is extracted as accurately
  1757. as possible. `member' may be a filename or a TarInfo object. You can
  1758. specify a different directory using `path'. File attributes (owner,
  1759. mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
  1760. is True, only the numbers for user/group names are used and not
  1761. the names.
  1762. """
  1763. self._check("r")
  1764. if isinstance(member, str):
  1765. tarinfo = self.getmember(member)
  1766. else:
  1767. tarinfo = member
  1768. # Prepare the link target for makelink().
  1769. if tarinfo.islnk():
  1770. tarinfo._link_target = os.path.join(path, tarinfo.linkname)
  1771. try:
  1772. self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
  1773. set_attrs=set_attrs,
  1774. numeric_owner=numeric_owner)
  1775. except OSError as e:
  1776. if self.errorlevel > 0:
  1777. raise
  1778. else:
  1779. if e.filename is None:
  1780. self._dbg(1, "tarfile: %s" % e.strerror)
  1781. else:
  1782. self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
  1783. except ExtractError as e:
  1784. if self.errorlevel > 1:
  1785. raise
  1786. else:
  1787. self._dbg(1, "tarfile: %s" % e)
  1788. def extractfile(self, member):
  1789. """Extract a member from the archive as a file object. `member' may be
  1790. a filename or a TarInfo object. If `member' is a regular file or a
  1791. link, an io.BufferedReader object is returned. Otherwise, None is
  1792. returned.
  1793. """
  1794. self._check("r")
  1795. if isinstance(member, str):
  1796. tarinfo = self.getmember(member)
  1797. else:
  1798. tarinfo = member
  1799. if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
  1800. # Members with unknown types are treated as regular files.
  1801. return self.fileobject(self, tarinfo)
  1802. elif tarinfo.islnk() or tarinfo.issym():
  1803. if isinstance(self.fileobj, _Stream):
  1804. # A small but ugly workaround for the case that someone tries
  1805. # to extract a (sym)link as a file-object from a non-seekable
  1806. # stream of tar blocks.
  1807. raise StreamError("cannot extract (sym)link as file object")
  1808. else:
  1809. # A (sym)link's file object is its target's file object.
  1810. return self.extractfile(self._find_link_target(tarinfo))
  1811. else:
  1812. # If there's no data associated with the member (directory, chrdev,
  1813. # blkdev, etc.), return None instead of a file object.
  1814. return None
  1815. def _extract_member(self, tarinfo, targetpath, set_attrs=True,
  1816. numeric_owner=False):
  1817. """Extract the TarInfo object tarinfo to a physical
  1818. file called targetpath.
  1819. """
  1820. # Fetch the TarInfo object for the given name
  1821. # and build the destination pathname, replacing
  1822. # forward slashes to platform specific separators.
  1823. targetpath = targetpath.rstrip("/")
  1824. targetpath = targetpath.replace("/", os.sep)
  1825. # Create all upper directories.
  1826. upperdirs = os.path.dirname(targetpath)
  1827. if upperdirs and not os.path.exists(upperdirs):
  1828. # Create directories that are not part of the archive with
  1829. # default permissions.
  1830. os.makedirs(upperdirs)
  1831. if tarinfo.islnk() or tarinfo.issym():
  1832. self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
  1833. else:
  1834. self._dbg(1, tarinfo.name)
  1835. if tarinfo.isreg():
  1836. self.makefile(tarinfo, targetpath)
  1837. elif tarinfo.isdir():
  1838. self.makedir(tarinfo, targetpath)
  1839. elif tarinfo.isfifo():
  1840. self.makefifo(tarinfo, targetpath)
  1841. elif tarinfo.ischr() or tarinfo.isblk():
  1842. self.makedev(tarinfo, targetpath)
  1843. elif tarinfo.islnk() or tarinfo.issym():
  1844. self.makelink(tarinfo, targetpath)
  1845. elif tarinfo.type not in SUPPORTED_TYPES:
  1846. self.makeunknown(tarinfo, targetpath)
  1847. else:
  1848. self.makefile(tarinfo, targetpath)
  1849. if set_attrs:
  1850. self.chown(tarinfo, targetpath, numeric_owner)
  1851. if not tarinfo.issym():
  1852. self.chmod(tarinfo, targetpath)
  1853. self.utime(tarinfo, targetpath)
  1854. #--------------------------------------------------------------------------
  1855. # Below are the different file methods. They are called via
  1856. # _extract_member() when extract() is called. They can be replaced in a
  1857. # subclass to implement other functionality.
  1858. def makedir(self, tarinfo, targetpath):
  1859. """Make a directory called targetpath.
  1860. """
  1861. try:
  1862. # Use a safe mode for the directory, the real mode is set
  1863. # later in _extract_member().
  1864. os.mkdir(targetpath, 0o700)
  1865. except FileExistsError:
  1866. pass
  1867. def makefile(self, tarinfo, targetpath):
  1868. """Make a file called targetpath.
  1869. """
  1870. source = self.fileobj
  1871. source.seek(tarinfo.offset_data)
  1872. with bltn_open(targetpath, "wb") as target:
  1873. if tarinfo.sparse is not None:
  1874. for offset, size in tarinfo.sparse:
  1875. target.seek(offset)
  1876. copyfileobj(source, target, size, ReadError)
  1877. target.seek(tarinfo.size)
  1878. target.truncate()
  1879. else:
  1880. copyfileobj(source, target, tarinfo.size, ReadError)
  1881. def makeunknown(self, tarinfo, targetpath):
  1882. """Make a file from a TarInfo object with an unknown type
  1883. at targetpath.
  1884. """
  1885. self.makefile(tarinfo, targetpath)
  1886. self._dbg(1, "tarfile: Unknown file type %r, " \
  1887. "extracted as regular file." % tarinfo.type)
  1888. def makefifo(self, tarinfo, targetpath):
  1889. """Make a fifo called targetpath.
  1890. """
  1891. if hasattr(os, "mkfifo"):
  1892. os.mkfifo(targetpath)
  1893. else:
  1894. raise ExtractError("fifo not supported by system")
  1895. def makedev(self, tarinfo, targetpath):
  1896. """Make a character or block device called targetpath.
  1897. """
  1898. if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
  1899. raise ExtractError("special devices not supported by system")
  1900. mode = tarinfo.mode
  1901. if tarinfo.isblk():
  1902. mode |= stat.S_IFBLK
  1903. else:
  1904. mode |= stat.S_IFCHR
  1905. os.mknod(targetpath, mode,
  1906. os.makedev(tarinfo.devmajor, tarinfo.devminor))
  1907. def makelink(self, tarinfo, targetpath):
  1908. """Make a (symbolic) link called targetpath. If it cannot be created
  1909. (platform limitation), we try to make a copy of the referenced file
  1910. instead of a link.
  1911. """
  1912. try:
  1913. # For systems that support symbolic and hard links.
  1914. if tarinfo.issym():
  1915. os.symlink(tarinfo.linkname, targetpath)
  1916. else:
  1917. # See extract().
  1918. if os.path.exists(tarinfo._link_target):
  1919. os.link(tarinfo._link_target, targetpath)
  1920. else:
  1921. self._extract_member(self._find_link_target(tarinfo),
  1922. targetpath)
  1923. except symlink_exception:
  1924. try:
  1925. self._extract_member(self._find_link_target(tarinfo),
  1926. targetpath)
  1927. except KeyError:
  1928. raise ExtractError("unable to resolve link inside archive")
  1929. def chown(self, tarinfo, targetpath, numeric_owner):
  1930. """Set owner of targetpath according to tarinfo. If numeric_owner
  1931. is True, use .gid/.uid instead of .gname/.uname.
  1932. """
  1933. if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
  1934. # We have to be root to do so.
  1935. if numeric_owner:
  1936. g = tarinfo.gid
  1937. u = tarinfo.uid
  1938. else:
  1939. try:
  1940. g = grp.getgrnam(tarinfo.gname)[2]
  1941. except KeyError:
  1942. g = tarinfo.gid
  1943. try:
  1944. u = pwd.getpwnam(tarinfo.uname)[2]
  1945. except KeyError:
  1946. u = tarinfo.uid
  1947. try:
  1948. if tarinfo.issym() and hasattr(os, "lchown"):
  1949. os.lchown(targetpath, u, g)
  1950. else:
  1951. os.chown(targetpath, u, g)
  1952. except OSError as e:
  1953. raise ExtractError("could not change owner")
  1954. def chmod(self, tarinfo, targetpath):
  1955. """Set file permissions of targetpath according to tarinfo.
  1956. """
  1957. if hasattr(os, 'chmod'):
  1958. try:
  1959. os.chmod(targetpath, tarinfo.mode)
  1960. except OSError as e:
  1961. raise ExtractError("could not change mode")
  1962. def utime(self, tarinfo, targetpath):
  1963. """Set modification time of targetpath according to tarinfo.
  1964. """
  1965. if not hasattr(os, 'utime'):
  1966. return
  1967. try:
  1968. os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
  1969. except OSError as e:
  1970. raise ExtractError("could not change modification time")
  1971. #--------------------------------------------------------------------------
  1972. def next(self):
  1973. """Return the next member of the archive as a TarInfo object, when
  1974. TarFile is opened for reading. Return None if there is no more
  1975. available.
  1976. """
  1977. self._check("ra")
  1978. if self.firstmember is not None:
  1979. m = self.firstmember
  1980. self.firstmember = None
  1981. return m
  1982. # Advance the file pointer.
  1983. if self.offset != self.fileobj.tell():
  1984. self.fileobj.seek(self.offset - 1)
  1985. if not self.fileobj.read(1):
  1986. raise ReadError("unexpected end of data")
  1987. # Read the next block.
  1988. tarinfo = None
  1989. while True:
  1990. try:
  1991. tarinfo = self.tarinfo.fromtarfile(self)
  1992. except EOFHeaderError as e:
  1993. if self.ignore_zeros:
  1994. self._dbg(2, "0x%X: %s" % (self.offset, e))
  1995. self.offset += BLOCKSIZE
  1996. continue
  1997. except InvalidHeaderError as e:
  1998. if self.ignore_zeros:
  1999. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2000. self.offset += BLOCKSIZE
  2001. continue
  2002. elif self.offset == 0:
  2003. raise ReadError(str(e))
  2004. except EmptyHeaderError:
  2005. if self.offset == 0:
  2006. raise ReadError("empty file")
  2007. except TruncatedHeaderError as e:
  2008. if self.offset == 0:
  2009. raise ReadError(str(e))
  2010. except SubsequentHeaderError as e:
  2011. raise ReadError(str(e))
  2012. break
  2013. if tarinfo is not None:
  2014. self.members.append(tarinfo)
  2015. else:
  2016. self._loaded = True
  2017. return tarinfo
  2018. #--------------------------------------------------------------------------
  2019. # Little helper methods:
  2020. def _getmember(self, name, tarinfo=None, normalize=False):
  2021. """Find an archive member by name from bottom to top.
  2022. If tarinfo is given, it is used as the starting point.
  2023. """
  2024. # Ensure that all members have been loaded.
  2025. members = self.getmembers()
  2026. # Limit the member search list up to tarinfo.
  2027. if tarinfo is not None:
  2028. members = members[:members.index(tarinfo)]
  2029. if normalize:
  2030. name = os.path.normpath(name)
  2031. for member in reversed(members):
  2032. if normalize:
  2033. member_name = os.path.normpath(member.name)
  2034. else:
  2035. member_name = member.name
  2036. if name == member_name:
  2037. return member
  2038. def _load(self):
  2039. """Read through the entire archive file and look for readable
  2040. members.
  2041. """
  2042. while True:
  2043. tarinfo = self.next()
  2044. if tarinfo is None:
  2045. break
  2046. self._loaded = True
  2047. def _check(self, mode=None):
  2048. """Check if TarFile is still open, and if the operation's mode
  2049. corresponds to TarFile's mode.
  2050. """
  2051. if self.closed:
  2052. raise OSError("%s is closed" % self.__class__.__name__)
  2053. if mode is not None and self.mode not in mode:
  2054. raise OSError("bad operation for mode %r" % self.mode)
  2055. def _find_link_target(self, tarinfo):
  2056. """Find the target member of a symlink or hardlink member in the
  2057. archive.
  2058. """
  2059. if tarinfo.issym():
  2060. # Always search the entire archive.
  2061. linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
  2062. limit = None
  2063. else:
  2064. # Search the archive before the link, because a hard link is
  2065. # just a reference to an already archived file.
  2066. linkname = tarinfo.linkname
  2067. limit = tarinfo
  2068. member = self._getmember(linkname, tarinfo=limit, normalize=True)
  2069. if member is None:
  2070. raise KeyError("linkname %r not found" % linkname)
  2071. return member
  2072. def __iter__(self):
  2073. """Provide an iterator object.
  2074. """
  2075. if self._loaded:
  2076. return iter(self.members)
  2077. else:
  2078. return TarIter(self)
  2079. def _dbg(self, level, msg):
  2080. """Write debugging output to sys.stderr.
  2081. """
  2082. if level <= self.debug:
  2083. print(msg, file=sys.stderr)
  2084. def __enter__(self):
  2085. self._check()
  2086. return self
  2087. def __exit__(self, type, value, traceback):
  2088. if type is None:
  2089. self.close()
  2090. else:
  2091. # An exception occurred. We must not call close() because
  2092. # it would try to write end-of-archive blocks and padding.
  2093. if not self._extfileobj:
  2094. self.fileobj.close()
  2095. self.closed = True
  2096. # class TarFile
  2097. class TarIter:
  2098. """Iterator Class.
  2099. for tarinfo in TarFile(...):
  2100. suite...
  2101. """
  2102. def __init__(self, tarfile):
  2103. """Construct a TarIter object.
  2104. """
  2105. self.tarfile = tarfile
  2106. self.index = 0
  2107. def __iter__(self):
  2108. """Return iterator object.
  2109. """
  2110. return self
  2111. def __next__(self):
  2112. """Return the next item using TarFile's next() method.
  2113. When all members have been read, set TarFile as _loaded.
  2114. """
  2115. # Fix for SF #1100429: Under rare circumstances it can
  2116. # happen that getmembers() is called during iteration,
  2117. # which will cause TarIter to stop prematurely.
  2118. if self.index == 0 and self.tarfile.firstmember is not None:
  2119. tarinfo = self.tarfile.next()
  2120. elif self.index < len(self.tarfile.members):
  2121. tarinfo = self.tarfile.members[self.index]
  2122. elif not self.tarfile._loaded:
  2123. tarinfo = self.tarfile.next()
  2124. if not tarinfo:
  2125. self.tarfile._loaded = True
  2126. raise StopIteration
  2127. else:
  2128. raise StopIteration
  2129. self.index += 1
  2130. return tarinfo
  2131. #--------------------
  2132. # exported functions
  2133. #--------------------
  2134. def is_tarfile(name):
  2135. """Return True if name points to a tar archive that we
  2136. are able to handle, else return False.
  2137. """
  2138. try:
  2139. t = open(name)
  2140. t.close()
  2141. return True
  2142. except TarError:
  2143. return False
  2144. open = TarFile.open
  2145. def main():
  2146. import argparse
  2147. description = 'A simple command line interface for tarfile module.'
  2148. parser = argparse.ArgumentParser(description=description)
  2149. parser.add_argument('-v', '--verbose', action='store_true', default=False,
  2150. help='Verbose output')
  2151. group = parser.add_mutually_exclusive_group()
  2152. group.add_argument('-l', '--list', metavar='<tarfile>',
  2153. help='Show listing of a tarfile')
  2154. group.add_argument('-e', '--extract', nargs='+',
  2155. metavar=('<tarfile>', '<output_dir>'),
  2156. help='Extract tarfile into target dir')
  2157. group.add_argument('-c', '--create', nargs='+',
  2158. metavar=('<name>', '<file>'),
  2159. help='Create tarfile from sources')
  2160. group.add_argument('-t', '--test', metavar='<tarfile>',
  2161. help='Test if a tarfile is valid')
  2162. args = parser.parse_args()
  2163. if args.test:
  2164. src = args.test
  2165. if is_tarfile(src):
  2166. with open(src, 'r') as tar:
  2167. tar.getmembers()
  2168. print(tar.getmembers(), file=sys.stderr)
  2169. if args.verbose:
  2170. print('{!r} is a tar archive.'.format(src))
  2171. else:
  2172. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2173. elif args.list:
  2174. src = args.list
  2175. if is_tarfile(src):
  2176. with TarFile.open(src, 'r:*') as tf:
  2177. tf.list(verbose=args.verbose)
  2178. else:
  2179. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2180. elif args.extract:
  2181. if len(args.extract) == 1:
  2182. src = args.extract[0]
  2183. curdir = os.curdir
  2184. elif len(args.extract) == 2:
  2185. src, curdir = args.extract
  2186. else:
  2187. parser.exit(1, parser.format_help())
  2188. if is_tarfile(src):
  2189. with TarFile.open(src, 'r:*') as tf:
  2190. tf.extractall(path=curdir)
  2191. if args.verbose:
  2192. if curdir == '.':
  2193. msg = '{!r} file is extracted.'.format(src)
  2194. else:
  2195. msg = ('{!r} file is extracted '
  2196. 'into {!r} directory.').format(src, curdir)
  2197. print(msg)
  2198. else:
  2199. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2200. elif args.create:
  2201. tar_name = args.create.pop(0)
  2202. _, ext = os.path.splitext(tar_name)
  2203. compressions = {
  2204. # gz
  2205. '.gz': 'gz',
  2206. '.tgz': 'gz',
  2207. # xz
  2208. '.xz': 'xz',
  2209. '.txz': 'xz',
  2210. # bz2
  2211. '.bz2': 'bz2',
  2212. '.tbz': 'bz2',
  2213. '.tbz2': 'bz2',
  2214. '.tb2': 'bz2',
  2215. }
  2216. tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
  2217. tar_files = args.create
  2218. with TarFile.open(tar_name, tar_mode) as tf:
  2219. for file_name in tar_files:
  2220. tf.add(file_name)
  2221. if args.verbose:
  2222. print('{!r} file created.'.format(tar_name))
  2223. else:
  2224. parser.exit(1, parser.format_help())
  2225. if __name__ == '__main__':
  2226. main()