tarfile.py 91 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563
  1. #!/usr/bin/env python3
  2. #-------------------------------------------------------------------
  3. # tarfile.py
  4. #-------------------------------------------------------------------
  5. # Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
  6. # All rights reserved.
  7. #
  8. # Permission is hereby granted, free of charge, to any person
  9. # obtaining a copy of this software and associated documentation
  10. # files (the "Software"), to deal in the Software without
  11. # restriction, including without limitation the rights to use,
  12. # copy, modify, merge, publish, distribute, sublicense, and/or sell
  13. # copies of the Software, and to permit persons to whom the
  14. # Software is furnished to do so, subject to the following
  15. # conditions:
  16. #
  17. # The above copyright notice and this permission notice shall be
  18. # included in all copies or substantial portions of the Software.
  19. #
  20. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  21. # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
  22. # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  23. # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
  24. # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  25. # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  26. # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  27. # OTHER DEALINGS IN THE SOFTWARE.
  28. #
  29. """Read from and write to tar format archives.
  30. """
  31. version = "0.9.0"
  32. __author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
  33. __credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
  34. #---------
  35. # Imports
  36. #---------
  37. from builtins import open as bltn_open
  38. import sys
  39. import os
  40. import io
  41. import shutil
  42. import stat
  43. import time
  44. import struct
  45. import copy
  46. import re
  47. try:
  48. import pwd
  49. except ImportError:
  50. pwd = None
  51. try:
  52. import grp
  53. except ImportError:
  54. grp = None
  55. # os.symlink on Windows prior to 6.0 raises NotImplementedError
  56. symlink_exception = (AttributeError, NotImplementedError)
  57. try:
  58. # OSError (winerror=1314) will be raised if the caller does not hold the
  59. # SeCreateSymbolicLinkPrivilege privilege
  60. symlink_exception += (OSError,)
  61. except NameError:
  62. pass
  63. # from tarfile import *
  64. __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
  65. "CompressionError", "StreamError", "ExtractError", "HeaderError",
  66. "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
  67. "DEFAULT_FORMAT", "open"]
  68. #---------------------------------------------------------
  69. # tar constants
  70. #---------------------------------------------------------
  71. NUL = b"\0" # the null character
  72. BLOCKSIZE = 512 # length of processing blocks
  73. RECORDSIZE = BLOCKSIZE * 20 # length of records
  74. GNU_MAGIC = b"ustar \0" # magic gnu tar string
  75. POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
  76. LENGTH_NAME = 100 # maximum length of a filename
  77. LENGTH_LINK = 100 # maximum length of a linkname
  78. LENGTH_PREFIX = 155 # maximum length of the prefix field
  79. REGTYPE = b"0" # regular file
  80. AREGTYPE = b"\0" # regular file
  81. LNKTYPE = b"1" # link (inside tarfile)
  82. SYMTYPE = b"2" # symbolic link
  83. CHRTYPE = b"3" # character special device
  84. BLKTYPE = b"4" # block special device
  85. DIRTYPE = b"5" # directory
  86. FIFOTYPE = b"6" # fifo special device
  87. CONTTYPE = b"7" # contiguous file
  88. GNUTYPE_LONGNAME = b"L" # GNU tar longname
  89. GNUTYPE_LONGLINK = b"K" # GNU tar longlink
  90. GNUTYPE_SPARSE = b"S" # GNU tar sparse file
  91. XHDTYPE = b"x" # POSIX.1-2001 extended header
  92. XGLTYPE = b"g" # POSIX.1-2001 global header
  93. SOLARIS_XHDTYPE = b"X" # Solaris extended header
  94. USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
  95. GNU_FORMAT = 1 # GNU tar format
  96. PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
  97. DEFAULT_FORMAT = PAX_FORMAT
  98. #---------------------------------------------------------
  99. # tarfile constants
  100. #---------------------------------------------------------
  101. # File types that tarfile supports:
  102. SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
  103. SYMTYPE, DIRTYPE, FIFOTYPE,
  104. CONTTYPE, CHRTYPE, BLKTYPE,
  105. GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  106. GNUTYPE_SPARSE)
  107. # File types that will be treated as a regular file.
  108. REGULAR_TYPES = (REGTYPE, AREGTYPE,
  109. CONTTYPE, GNUTYPE_SPARSE)
  110. # File types that are part of the GNU tar format.
  111. GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  112. GNUTYPE_SPARSE)
  113. # Fields from a pax header that override a TarInfo attribute.
  114. PAX_FIELDS = ("path", "linkpath", "size", "mtime",
  115. "uid", "gid", "uname", "gname")
  116. # Fields from a pax header that are affected by hdrcharset.
  117. PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
  118. # Fields in a pax header that are numbers, all other fields
  119. # are treated as strings.
  120. PAX_NUMBER_FIELDS = {
  121. "atime": float,
  122. "ctime": float,
  123. "mtime": float,
  124. "uid": int,
  125. "gid": int,
  126. "size": int
  127. }
  128. #---------------------------------------------------------
  129. # initialization
  130. #---------------------------------------------------------
  131. if os.name == "nt":
  132. ENCODING = "utf-8"
  133. else:
  134. ENCODING = sys.getfilesystemencoding()
  135. #---------------------------------------------------------
  136. # Some useful functions
  137. #---------------------------------------------------------
  138. def stn(s, length, encoding, errors):
  139. """Convert a string to a null-terminated bytes object.
  140. """
  141. s = s.encode(encoding, errors)
  142. return s[:length] + (length - len(s)) * NUL
  143. def nts(s, encoding, errors):
  144. """Convert a null-terminated bytes object to a string.
  145. """
  146. p = s.find(b"\0")
  147. if p != -1:
  148. s = s[:p]
  149. return s.decode(encoding, errors)
  150. def nti(s):
  151. """Convert a number field to a python number.
  152. """
  153. # There are two possible encodings for a number field, see
  154. # itn() below.
  155. if s[0] in (0o200, 0o377):
  156. n = 0
  157. for i in range(len(s) - 1):
  158. n <<= 8
  159. n += s[i + 1]
  160. if s[0] == 0o377:
  161. n = -(256 ** (len(s) - 1) - n)
  162. else:
  163. try:
  164. s = nts(s, "ascii", "strict")
  165. n = int(s.strip() or "0", 8)
  166. except ValueError:
  167. raise InvalidHeaderError("invalid header")
  168. return n
  169. def itn(n, digits=8, format=DEFAULT_FORMAT):
  170. """Convert a python number to a number field.
  171. """
  172. # POSIX 1003.1-1988 requires numbers to be encoded as a string of
  173. # octal digits followed by a null-byte, this allows values up to
  174. # (8**(digits-1))-1. GNU tar allows storing numbers greater than
  175. # that if necessary. A leading 0o200 or 0o377 byte indicate this
  176. # particular encoding, the following digits-1 bytes are a big-endian
  177. # base-256 representation. This allows values up to (256**(digits-1))-1.
  178. # A 0o200 byte indicates a positive number, a 0o377 byte a negative
  179. # number.
  180. n = int(n)
  181. if 0 <= n < 8 ** (digits - 1):
  182. s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
  183. elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
  184. if n >= 0:
  185. s = bytearray([0o200])
  186. else:
  187. s = bytearray([0o377])
  188. n = 256 ** digits + n
  189. for i in range(digits - 1):
  190. s.insert(1, n & 0o377)
  191. n >>= 8
  192. else:
  193. raise ValueError("overflow in number field")
  194. return s
  195. def calc_chksums(buf):
  196. """Calculate the checksum for a member's header by summing up all
  197. characters except for the chksum field which is treated as if
  198. it was filled with spaces. According to the GNU tar sources,
  199. some tars (Sun and NeXT) calculate chksum with signed char,
  200. which will be different if there are chars in the buffer with
  201. the high bit set. So we calculate two checksums, unsigned and
  202. signed.
  203. """
  204. unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
  205. signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
  206. return unsigned_chksum, signed_chksum
  207. def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
  208. """Copy length bytes from fileobj src to fileobj dst.
  209. If length is None, copy the entire content.
  210. """
  211. bufsize = bufsize or 16 * 1024
  212. if length == 0:
  213. return
  214. if length is None:
  215. shutil.copyfileobj(src, dst, bufsize)
  216. return
  217. blocks, remainder = divmod(length, bufsize)
  218. for b in range(blocks):
  219. buf = src.read(bufsize)
  220. if len(buf) < bufsize:
  221. raise exception("unexpected end of data")
  222. dst.write(buf)
  223. if remainder != 0:
  224. buf = src.read(remainder)
  225. if len(buf) < remainder:
  226. raise exception("unexpected end of data")
  227. dst.write(buf)
  228. return
  229. def _safe_print(s):
  230. encoding = getattr(sys.stdout, 'encoding', None)
  231. if encoding is not None:
  232. s = s.encode(encoding, 'backslashreplace').decode(encoding)
  233. print(s, end=' ')
  234. class TarError(Exception):
  235. """Base exception."""
  236. pass
  237. class ExtractError(TarError):
  238. """General exception for extract errors."""
  239. pass
  240. class ReadError(TarError):
  241. """Exception for unreadable tar archives."""
  242. pass
  243. class CompressionError(TarError):
  244. """Exception for unavailable compression methods."""
  245. pass
  246. class StreamError(TarError):
  247. """Exception for unsupported operations on stream-like TarFiles."""
  248. pass
  249. class HeaderError(TarError):
  250. """Base exception for header errors."""
  251. pass
  252. class EmptyHeaderError(HeaderError):
  253. """Exception for empty headers."""
  254. pass
  255. class TruncatedHeaderError(HeaderError):
  256. """Exception for truncated headers."""
  257. pass
  258. class EOFHeaderError(HeaderError):
  259. """Exception for end of file headers."""
  260. pass
  261. class InvalidHeaderError(HeaderError):
  262. """Exception for invalid headers."""
  263. pass
  264. class SubsequentHeaderError(HeaderError):
  265. """Exception for missing and invalid extended headers."""
  266. pass
  267. #---------------------------
  268. # internal stream interface
  269. #---------------------------
  270. class _LowLevelFile:
  271. """Low-level file object. Supports reading and writing.
  272. It is used instead of a regular file object for streaming
  273. access.
  274. """
  275. def __init__(self, name, mode):
  276. mode = {
  277. "r": os.O_RDONLY,
  278. "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
  279. }[mode]
  280. if hasattr(os, "O_BINARY"):
  281. mode |= os.O_BINARY
  282. self.fd = os.open(name, mode, 0o666)
  283. def close(self):
  284. os.close(self.fd)
  285. def read(self, size):
  286. return os.read(self.fd, size)
  287. def write(self, s):
  288. os.write(self.fd, s)
  289. class _Stream:
  290. """Class that serves as an adapter between TarFile and
  291. a stream-like object. The stream-like object only
  292. needs to have a read() or write() method and is accessed
  293. blockwise. Use of gzip or bzip2 compression is possible.
  294. A stream-like object could be for example: sys.stdin,
  295. sys.stdout, a socket, a tape device etc.
  296. _Stream is intended to be used only internally.
  297. """
  298. def __init__(self, name, mode, comptype, fileobj, bufsize):
  299. """Construct a _Stream object.
  300. """
  301. self._extfileobj = True
  302. if fileobj is None:
  303. fileobj = _LowLevelFile(name, mode)
  304. self._extfileobj = False
  305. if comptype == '*':
  306. # Enable transparent compression detection for the
  307. # stream interface
  308. fileobj = _StreamProxy(fileobj)
  309. comptype = fileobj.getcomptype()
  310. self.name = name or ""
  311. self.mode = mode
  312. self.comptype = comptype
  313. self.fileobj = fileobj
  314. self.bufsize = bufsize
  315. self.buf = b""
  316. self.pos = 0
  317. self.closed = False
  318. try:
  319. if comptype == "gz":
  320. try:
  321. import zlib
  322. except ImportError:
  323. raise CompressionError("zlib module is not available")
  324. self.zlib = zlib
  325. self.crc = zlib.crc32(b"")
  326. if mode == "r":
  327. self._init_read_gz()
  328. self.exception = zlib.error
  329. else:
  330. self._init_write_gz()
  331. elif comptype == "bz2":
  332. try:
  333. import bz2
  334. except ImportError:
  335. raise CompressionError("bz2 module is not available")
  336. if mode == "r":
  337. self.dbuf = b""
  338. self.cmp = bz2.BZ2Decompressor()
  339. self.exception = OSError
  340. else:
  341. self.cmp = bz2.BZ2Compressor()
  342. elif comptype == "xz":
  343. try:
  344. import lzma
  345. except ImportError:
  346. raise CompressionError("lzma module is not available")
  347. if mode == "r":
  348. self.dbuf = b""
  349. self.cmp = lzma.LZMADecompressor()
  350. self.exception = lzma.LZMAError
  351. else:
  352. self.cmp = lzma.LZMACompressor()
  353. elif comptype != "tar":
  354. raise CompressionError("unknown compression type %r" % comptype)
  355. except:
  356. if not self._extfileobj:
  357. self.fileobj.close()
  358. self.closed = True
  359. raise
  360. def __del__(self):
  361. if hasattr(self, "closed") and not self.closed:
  362. self.close()
  363. def _init_write_gz(self):
  364. """Initialize for writing with gzip compression.
  365. """
  366. self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
  367. -self.zlib.MAX_WBITS,
  368. self.zlib.DEF_MEM_LEVEL,
  369. 0)
  370. timestamp = struct.pack("<L", int(time.time()))
  371. self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
  372. if self.name.endswith(".gz"):
  373. self.name = self.name[:-3]
  374. # RFC1952 says we must use ISO-8859-1 for the FNAME field.
  375. self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
  376. def write(self, s):
  377. """Write string s to the stream.
  378. """
  379. if self.comptype == "gz":
  380. self.crc = self.zlib.crc32(s, self.crc)
  381. self.pos += len(s)
  382. if self.comptype != "tar":
  383. s = self.cmp.compress(s)
  384. self.__write(s)
  385. def __write(self, s):
  386. """Write string s to the stream if a whole new block
  387. is ready to be written.
  388. """
  389. self.buf += s
  390. while len(self.buf) > self.bufsize:
  391. self.fileobj.write(self.buf[:self.bufsize])
  392. self.buf = self.buf[self.bufsize:]
  393. def close(self):
  394. """Close the _Stream object. No operation should be
  395. done on it afterwards.
  396. """
  397. if self.closed:
  398. return
  399. self.closed = True
  400. try:
  401. if self.mode == "w" and self.comptype != "tar":
  402. self.buf += self.cmp.flush()
  403. if self.mode == "w" and self.buf:
  404. self.fileobj.write(self.buf)
  405. self.buf = b""
  406. if self.comptype == "gz":
  407. self.fileobj.write(struct.pack("<L", self.crc))
  408. self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
  409. finally:
  410. if not self._extfileobj:
  411. self.fileobj.close()
  412. def _init_read_gz(self):
  413. """Initialize for reading a gzip compressed fileobj.
  414. """
  415. self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
  416. self.dbuf = b""
  417. # taken from gzip.GzipFile with some alterations
  418. if self.__read(2) != b"\037\213":
  419. raise ReadError("not a gzip file")
  420. if self.__read(1) != b"\010":
  421. raise CompressionError("unsupported compression method")
  422. flag = ord(self.__read(1))
  423. self.__read(6)
  424. if flag & 4:
  425. xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
  426. self.read(xlen)
  427. if flag & 8:
  428. while True:
  429. s = self.__read(1)
  430. if not s or s == NUL:
  431. break
  432. if flag & 16:
  433. while True:
  434. s = self.__read(1)
  435. if not s or s == NUL:
  436. break
  437. if flag & 2:
  438. self.__read(2)
  439. def tell(self):
  440. """Return the stream's file pointer position.
  441. """
  442. return self.pos
  443. def seek(self, pos=0):
  444. """Set the stream's file pointer to pos. Negative seeking
  445. is forbidden.
  446. """
  447. if pos - self.pos >= 0:
  448. blocks, remainder = divmod(pos - self.pos, self.bufsize)
  449. for i in range(blocks):
  450. self.read(self.bufsize)
  451. self.read(remainder)
  452. else:
  453. raise StreamError("seeking backwards is not allowed")
  454. return self.pos
  455. def read(self, size):
  456. """Return the next size number of bytes from the stream."""
  457. assert size is not None
  458. buf = self._read(size)
  459. self.pos += len(buf)
  460. return buf
  461. def _read(self, size):
  462. """Return size bytes from the stream.
  463. """
  464. if self.comptype == "tar":
  465. return self.__read(size)
  466. c = len(self.dbuf)
  467. t = [self.dbuf]
  468. while c < size:
  469. # Skip underlying buffer to avoid unaligned double buffering.
  470. if self.buf:
  471. buf = self.buf
  472. self.buf = b""
  473. else:
  474. buf = self.fileobj.read(self.bufsize)
  475. if not buf:
  476. break
  477. try:
  478. buf = self.cmp.decompress(buf)
  479. except self.exception:
  480. raise ReadError("invalid compressed data")
  481. t.append(buf)
  482. c += len(buf)
  483. t = b"".join(t)
  484. self.dbuf = t[size:]
  485. return t[:size]
  486. def __read(self, size):
  487. """Return size bytes from stream. If internal buffer is empty,
  488. read another block from the stream.
  489. """
  490. c = len(self.buf)
  491. t = [self.buf]
  492. while c < size:
  493. buf = self.fileobj.read(self.bufsize)
  494. if not buf:
  495. break
  496. t.append(buf)
  497. c += len(buf)
  498. t = b"".join(t)
  499. self.buf = t[size:]
  500. return t[:size]
  501. # class _Stream
  502. class _StreamProxy(object):
  503. """Small proxy class that enables transparent compression
  504. detection for the Stream interface (mode 'r|*').
  505. """
  506. def __init__(self, fileobj):
  507. self.fileobj = fileobj
  508. self.buf = self.fileobj.read(BLOCKSIZE)
  509. def read(self, size):
  510. self.read = self.fileobj.read
  511. return self.buf
  512. def getcomptype(self):
  513. if self.buf.startswith(b"\x1f\x8b\x08"):
  514. return "gz"
  515. elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
  516. return "bz2"
  517. elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
  518. return "xz"
  519. else:
  520. return "tar"
  521. def close(self):
  522. self.fileobj.close()
  523. # class StreamProxy
  524. #------------------------
  525. # Extraction file object
  526. #------------------------
  527. class _FileInFile(object):
  528. """A thin wrapper around an existing file object that
  529. provides a part of its data as an individual file
  530. object.
  531. """
  532. def __init__(self, fileobj, offset, size, blockinfo=None):
  533. self.fileobj = fileobj
  534. self.offset = offset
  535. self.size = size
  536. self.position = 0
  537. self.name = getattr(fileobj, "name", None)
  538. self.closed = False
  539. if blockinfo is None:
  540. blockinfo = [(0, size)]
  541. # Construct a map with data and zero blocks.
  542. self.map_index = 0
  543. self.map = []
  544. lastpos = 0
  545. realpos = self.offset
  546. for offset, size in blockinfo:
  547. if offset > lastpos:
  548. self.map.append((False, lastpos, offset, None))
  549. self.map.append((True, offset, offset + size, realpos))
  550. realpos += size
  551. lastpos = offset + size
  552. if lastpos < self.size:
  553. self.map.append((False, lastpos, self.size, None))
  554. def flush(self):
  555. pass
  556. def readable(self):
  557. return True
  558. def writable(self):
  559. return False
  560. def seekable(self):
  561. return self.fileobj.seekable()
  562. def tell(self):
  563. """Return the current file position.
  564. """
  565. return self.position
  566. def seek(self, position, whence=io.SEEK_SET):
  567. """Seek to a position in the file.
  568. """
  569. if whence == io.SEEK_SET:
  570. self.position = min(max(position, 0), self.size)
  571. elif whence == io.SEEK_CUR:
  572. if position < 0:
  573. self.position = max(self.position + position, 0)
  574. else:
  575. self.position = min(self.position + position, self.size)
  576. elif whence == io.SEEK_END:
  577. self.position = max(min(self.size + position, self.size), 0)
  578. else:
  579. raise ValueError("Invalid argument")
  580. return self.position
  581. def read(self, size=None):
  582. """Read data from the file.
  583. """
  584. if size is None:
  585. size = self.size - self.position
  586. else:
  587. size = min(size, self.size - self.position)
  588. buf = b""
  589. while size > 0:
  590. while True:
  591. data, start, stop, offset = self.map[self.map_index]
  592. if start <= self.position < stop:
  593. break
  594. else:
  595. self.map_index += 1
  596. if self.map_index == len(self.map):
  597. self.map_index = 0
  598. length = min(size, stop - self.position)
  599. if data:
  600. self.fileobj.seek(offset + (self.position - start))
  601. b = self.fileobj.read(length)
  602. if len(b) != length:
  603. raise ReadError("unexpected end of data")
  604. buf += b
  605. else:
  606. buf += NUL * length
  607. size -= length
  608. self.position += length
  609. return buf
  610. def readinto(self, b):
  611. buf = self.read(len(b))
  612. b[:len(buf)] = buf
  613. return len(buf)
  614. def close(self):
  615. self.closed = True
  616. #class _FileInFile
  617. class ExFileObject(io.BufferedReader):
  618. def __init__(self, tarfile, tarinfo):
  619. fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
  620. tarinfo.size, tarinfo.sparse)
  621. super().__init__(fileobj)
  622. #class ExFileObject
  623. #------------------
  624. # Exported Classes
  625. #------------------
  626. class TarInfo(object):
  627. """Informational class which holds the details about an
  628. archive member given by a tar header block.
  629. TarInfo objects are returned by TarFile.getmember(),
  630. TarFile.getmembers() and TarFile.gettarinfo() and are
  631. usually created internally.
  632. """
  633. __slots__ = dict(
  634. name = 'Name of the archive member.',
  635. mode = 'Permission bits.',
  636. uid = 'User ID of the user who originally stored this member.',
  637. gid = 'Group ID of the user who originally stored this member.',
  638. size = 'Size in bytes.',
  639. mtime = 'Time of last modification.',
  640. chksum = 'Header checksum.',
  641. type = ('File type. type is usually one of these constants: '
  642. 'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
  643. 'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
  644. linkname = ('Name of the target file name, which is only present '
  645. 'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
  646. uname = 'User name.',
  647. gname = 'Group name.',
  648. devmajor = 'Device major number.',
  649. devminor = 'Device minor number.',
  650. offset = 'The tar header starts here.',
  651. offset_data = "The file's data starts here.",
  652. pax_headers = ('A dictionary containing key-value pairs of an '
  653. 'associated pax extended header.'),
  654. sparse = 'Sparse member information.',
  655. tarfile = None,
  656. _sparse_structs = None,
  657. _link_target = None,
  658. )
  659. def __init__(self, name=""):
  660. """Construct a TarInfo object. name is the optional name
  661. of the member.
  662. """
  663. self.name = name # member name
  664. self.mode = 0o644 # file permissions
  665. self.uid = 0 # user id
  666. self.gid = 0 # group id
  667. self.size = 0 # file size
  668. self.mtime = 0 # modification time
  669. self.chksum = 0 # header checksum
  670. self.type = REGTYPE # member type
  671. self.linkname = "" # link name
  672. self.uname = "" # user name
  673. self.gname = "" # group name
  674. self.devmajor = 0 # device major number
  675. self.devminor = 0 # device minor number
  676. self.offset = 0 # the tar header starts here
  677. self.offset_data = 0 # the file's data starts here
  678. self.sparse = None # sparse member information
  679. self.pax_headers = {} # pax header information
  680. @property
  681. def path(self):
  682. 'In pax headers, "name" is called "path".'
  683. return self.name
  684. @path.setter
  685. def path(self, name):
  686. self.name = name
  687. @property
  688. def linkpath(self):
  689. 'In pax headers, "linkname" is called "linkpath".'
  690. return self.linkname
  691. @linkpath.setter
  692. def linkpath(self, linkname):
  693. self.linkname = linkname
  694. def __repr__(self):
  695. return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
  696. def get_info(self):
  697. """Return the TarInfo's attributes as a dictionary.
  698. """
  699. info = {
  700. "name": self.name,
  701. "mode": self.mode & 0o7777,
  702. "uid": self.uid,
  703. "gid": self.gid,
  704. "size": self.size,
  705. "mtime": self.mtime,
  706. "chksum": self.chksum,
  707. "type": self.type,
  708. "linkname": self.linkname,
  709. "uname": self.uname,
  710. "gname": self.gname,
  711. "devmajor": self.devmajor,
  712. "devminor": self.devminor
  713. }
  714. if info["type"] == DIRTYPE and not info["name"].endswith("/"):
  715. info["name"] += "/"
  716. return info
  717. def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
  718. """Return a tar header as a string of 512 byte blocks.
  719. """
  720. info = self.get_info()
  721. if format == USTAR_FORMAT:
  722. return self.create_ustar_header(info, encoding, errors)
  723. elif format == GNU_FORMAT:
  724. return self.create_gnu_header(info, encoding, errors)
  725. elif format == PAX_FORMAT:
  726. return self.create_pax_header(info, encoding)
  727. else:
  728. raise ValueError("invalid format")
  729. def create_ustar_header(self, info, encoding, errors):
  730. """Return the object as a ustar header block.
  731. """
  732. info["magic"] = POSIX_MAGIC
  733. if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
  734. raise ValueError("linkname is too long")
  735. if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
  736. info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
  737. return self._create_header(info, USTAR_FORMAT, encoding, errors)
  738. def create_gnu_header(self, info, encoding, errors):
  739. """Return the object as a GNU header block sequence.
  740. """
  741. info["magic"] = GNU_MAGIC
  742. buf = b""
  743. if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
  744. buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
  745. if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
  746. buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
  747. return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
  748. def create_pax_header(self, info, encoding):
  749. """Return the object as a ustar header block. If it cannot be
  750. represented this way, prepend a pax extended header sequence
  751. with supplement information.
  752. """
  753. info["magic"] = POSIX_MAGIC
  754. pax_headers = self.pax_headers.copy()
  755. # Test string fields for values that exceed the field length or cannot
  756. # be represented in ASCII encoding.
  757. for name, hname, length in (
  758. ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
  759. ("uname", "uname", 32), ("gname", "gname", 32)):
  760. if hname in pax_headers:
  761. # The pax header has priority.
  762. continue
  763. # Try to encode the string as ASCII.
  764. try:
  765. info[name].encode("ascii", "strict")
  766. except UnicodeEncodeError:
  767. pax_headers[hname] = info[name]
  768. continue
  769. if len(info[name]) > length:
  770. pax_headers[hname] = info[name]
  771. # Test number fields for values that exceed the field limit or values
  772. # that like to be stored as float.
  773. for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
  774. if name in pax_headers:
  775. # The pax header has priority. Avoid overflow.
  776. info[name] = 0
  777. continue
  778. val = info[name]
  779. if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
  780. pax_headers[name] = str(val)
  781. info[name] = 0
  782. # Create a pax extended header if necessary.
  783. if pax_headers:
  784. buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
  785. else:
  786. buf = b""
  787. return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
  788. @classmethod
  789. def create_pax_global_header(cls, pax_headers):
  790. """Return the object as a pax global header block sequence.
  791. """
  792. return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
  793. def _posix_split_name(self, name, encoding, errors):
  794. """Split a name longer than 100 chars into a prefix
  795. and a name part.
  796. """
  797. components = name.split("/")
  798. for i in range(1, len(components)):
  799. prefix = "/".join(components[:i])
  800. name = "/".join(components[i:])
  801. if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
  802. len(name.encode(encoding, errors)) <= LENGTH_NAME:
  803. break
  804. else:
  805. raise ValueError("name is too long")
  806. return prefix, name
  807. @staticmethod
  808. def _create_header(info, format, encoding, errors):
  809. """Return a header block. info is a dictionary with file
  810. information, format must be one of the *_FORMAT constants.
  811. """
  812. parts = [
  813. stn(info.get("name", ""), 100, encoding, errors),
  814. itn(info.get("mode", 0) & 0o7777, 8, format),
  815. itn(info.get("uid", 0), 8, format),
  816. itn(info.get("gid", 0), 8, format),
  817. itn(info.get("size", 0), 12, format),
  818. itn(info.get("mtime", 0), 12, format),
  819. b" ", # checksum field
  820. info.get("type", REGTYPE),
  821. stn(info.get("linkname", ""), 100, encoding, errors),
  822. info.get("magic", POSIX_MAGIC),
  823. stn(info.get("uname", ""), 32, encoding, errors),
  824. stn(info.get("gname", ""), 32, encoding, errors),
  825. itn(info.get("devmajor", 0), 8, format),
  826. itn(info.get("devminor", 0), 8, format),
  827. stn(info.get("prefix", ""), 155, encoding, errors)
  828. ]
  829. buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
  830. chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
  831. buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
  832. return buf
  833. @staticmethod
  834. def _create_payload(payload):
  835. """Return the string payload filled with zero bytes
  836. up to the next 512 byte border.
  837. """
  838. blocks, remainder = divmod(len(payload), BLOCKSIZE)
  839. if remainder > 0:
  840. payload += (BLOCKSIZE - remainder) * NUL
  841. return payload
  842. @classmethod
  843. def _create_gnu_long_header(cls, name, type, encoding, errors):
  844. """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
  845. for name.
  846. """
  847. name = name.encode(encoding, errors) + NUL
  848. info = {}
  849. info["name"] = "././@LongLink"
  850. info["type"] = type
  851. info["size"] = len(name)
  852. info["magic"] = GNU_MAGIC
  853. # create extended header + name blocks.
  854. return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
  855. cls._create_payload(name)
  856. @classmethod
  857. def _create_pax_generic_header(cls, pax_headers, type, encoding):
  858. """Return a POSIX.1-2008 extended or global header sequence
  859. that contains a list of keyword, value pairs. The values
  860. must be strings.
  861. """
  862. # Check if one of the fields contains surrogate characters and thereby
  863. # forces hdrcharset=BINARY, see _proc_pax() for more information.
  864. binary = False
  865. for keyword, value in pax_headers.items():
  866. try:
  867. value.encode("utf-8", "strict")
  868. except UnicodeEncodeError:
  869. binary = True
  870. break
  871. records = b""
  872. if binary:
  873. # Put the hdrcharset field at the beginning of the header.
  874. records += b"21 hdrcharset=BINARY\n"
  875. for keyword, value in pax_headers.items():
  876. keyword = keyword.encode("utf-8")
  877. if binary:
  878. # Try to restore the original byte representation of `value'.
  879. # Needless to say, that the encoding must match the string.
  880. value = value.encode(encoding, "surrogateescape")
  881. else:
  882. value = value.encode("utf-8")
  883. l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
  884. n = p = 0
  885. while True:
  886. n = l + len(str(p))
  887. if n == p:
  888. break
  889. p = n
  890. records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
  891. # We use a hardcoded "././@PaxHeader" name like star does
  892. # instead of the one that POSIX recommends.
  893. info = {}
  894. info["name"] = "././@PaxHeader"
  895. info["type"] = type
  896. info["size"] = len(records)
  897. info["magic"] = POSIX_MAGIC
  898. # Create pax header + record blocks.
  899. return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
  900. cls._create_payload(records)
  901. @classmethod
  902. def frombuf(cls, buf, encoding, errors):
  903. """Construct a TarInfo object from a 512 byte bytes object.
  904. """
  905. if len(buf) == 0:
  906. raise EmptyHeaderError("empty header")
  907. if len(buf) != BLOCKSIZE:
  908. raise TruncatedHeaderError("truncated header")
  909. if buf.count(NUL) == BLOCKSIZE:
  910. raise EOFHeaderError("end of file header")
  911. chksum = nti(buf[148:156])
  912. if chksum not in calc_chksums(buf):
  913. raise InvalidHeaderError("bad checksum")
  914. obj = cls()
  915. obj.name = nts(buf[0:100], encoding, errors)
  916. obj.mode = nti(buf[100:108])
  917. obj.uid = nti(buf[108:116])
  918. obj.gid = nti(buf[116:124])
  919. obj.size = nti(buf[124:136])
  920. obj.mtime = nti(buf[136:148])
  921. obj.chksum = chksum
  922. obj.type = buf[156:157]
  923. obj.linkname = nts(buf[157:257], encoding, errors)
  924. obj.uname = nts(buf[265:297], encoding, errors)
  925. obj.gname = nts(buf[297:329], encoding, errors)
  926. obj.devmajor = nti(buf[329:337])
  927. obj.devminor = nti(buf[337:345])
  928. prefix = nts(buf[345:500], encoding, errors)
  929. # Old V7 tar format represents a directory as a regular
  930. # file with a trailing slash.
  931. if obj.type == AREGTYPE and obj.name.endswith("/"):
  932. obj.type = DIRTYPE
  933. # The old GNU sparse format occupies some of the unused
  934. # space in the buffer for up to 4 sparse structures.
  935. # Save them for later processing in _proc_sparse().
  936. if obj.type == GNUTYPE_SPARSE:
  937. pos = 386
  938. structs = []
  939. for i in range(4):
  940. try:
  941. offset = nti(buf[pos:pos + 12])
  942. numbytes = nti(buf[pos + 12:pos + 24])
  943. except ValueError:
  944. break
  945. structs.append((offset, numbytes))
  946. pos += 24
  947. isextended = bool(buf[482])
  948. origsize = nti(buf[483:495])
  949. obj._sparse_structs = (structs, isextended, origsize)
  950. # Remove redundant slashes from directories.
  951. if obj.isdir():
  952. obj.name = obj.name.rstrip("/")
  953. # Reconstruct a ustar longname.
  954. if prefix and obj.type not in GNU_TYPES:
  955. obj.name = prefix + "/" + obj.name
  956. return obj
  957. @classmethod
  958. def fromtarfile(cls, tarfile):
  959. """Return the next TarInfo object from TarFile object
  960. tarfile.
  961. """
  962. buf = tarfile.fileobj.read(BLOCKSIZE)
  963. obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
  964. obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
  965. return obj._proc_member(tarfile)
  966. #--------------------------------------------------------------------------
  967. # The following are methods that are called depending on the type of a
  968. # member. The entry point is _proc_member() which can be overridden in a
  969. # subclass to add custom _proc_*() methods. A _proc_*() method MUST
  970. # implement the following
  971. # operations:
  972. # 1. Set self.offset_data to the position where the data blocks begin,
  973. # if there is data that follows.
  974. # 2. Set tarfile.offset to the position where the next member's header will
  975. # begin.
  976. # 3. Return self or another valid TarInfo object.
  977. def _proc_member(self, tarfile):
  978. """Choose the right processing method depending on
  979. the type and call it.
  980. """
  981. if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
  982. return self._proc_gnulong(tarfile)
  983. elif self.type == GNUTYPE_SPARSE:
  984. return self._proc_sparse(tarfile)
  985. elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
  986. return self._proc_pax(tarfile)
  987. else:
  988. return self._proc_builtin(tarfile)
  989. def _proc_builtin(self, tarfile):
  990. """Process a builtin type or an unknown type which
  991. will be treated as a regular file.
  992. """
  993. self.offset_data = tarfile.fileobj.tell()
  994. offset = self.offset_data
  995. if self.isreg() or self.type not in SUPPORTED_TYPES:
  996. # Skip the following data blocks.
  997. offset += self._block(self.size)
  998. tarfile.offset = offset
  999. # Patch the TarInfo object with saved global
  1000. # header information.
  1001. self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
  1002. return self
  1003. def _proc_gnulong(self, tarfile):
  1004. """Process the blocks that hold a GNU longname
  1005. or longlink member.
  1006. """
  1007. buf = tarfile.fileobj.read(self._block(self.size))
  1008. # Fetch the next header and process it.
  1009. try:
  1010. next = self.fromtarfile(tarfile)
  1011. except HeaderError:
  1012. raise SubsequentHeaderError("missing or bad subsequent header")
  1013. # Patch the TarInfo object from the next header with
  1014. # the longname information.
  1015. next.offset = self.offset
  1016. if self.type == GNUTYPE_LONGNAME:
  1017. next.name = nts(buf, tarfile.encoding, tarfile.errors)
  1018. elif self.type == GNUTYPE_LONGLINK:
  1019. next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
  1020. return next
  1021. def _proc_sparse(self, tarfile):
  1022. """Process a GNU sparse header plus extra headers.
  1023. """
  1024. # We already collected some sparse structures in frombuf().
  1025. structs, isextended, origsize = self._sparse_structs
  1026. del self._sparse_structs
  1027. # Collect sparse structures from extended header blocks.
  1028. while isextended:
  1029. buf = tarfile.fileobj.read(BLOCKSIZE)
  1030. pos = 0
  1031. for i in range(21):
  1032. try:
  1033. offset = nti(buf[pos:pos + 12])
  1034. numbytes = nti(buf[pos + 12:pos + 24])
  1035. except ValueError:
  1036. break
  1037. if offset and numbytes:
  1038. structs.append((offset, numbytes))
  1039. pos += 24
  1040. isextended = bool(buf[504])
  1041. self.sparse = structs
  1042. self.offset_data = tarfile.fileobj.tell()
  1043. tarfile.offset = self.offset_data + self._block(self.size)
  1044. self.size = origsize
  1045. return self
  1046. def _proc_pax(self, tarfile):
  1047. """Process an extended or global header as described in
  1048. POSIX.1-2008.
  1049. """
  1050. # Read the header information.
  1051. buf = tarfile.fileobj.read(self._block(self.size))
  1052. # A pax header stores supplemental information for either
  1053. # the following file (extended) or all following files
  1054. # (global).
  1055. if self.type == XGLTYPE:
  1056. pax_headers = tarfile.pax_headers
  1057. else:
  1058. pax_headers = tarfile.pax_headers.copy()
  1059. # Check if the pax header contains a hdrcharset field. This tells us
  1060. # the encoding of the path, linkpath, uname and gname fields. Normally,
  1061. # these fields are UTF-8 encoded but since POSIX.1-2008 tar
  1062. # implementations are allowed to store them as raw binary strings if
  1063. # the translation to UTF-8 fails.
  1064. match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
  1065. if match is not None:
  1066. pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
  1067. # For the time being, we don't care about anything other than "BINARY".
  1068. # The only other value that is currently allowed by the standard is
  1069. # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
  1070. hdrcharset = pax_headers.get("hdrcharset")
  1071. if hdrcharset == "BINARY":
  1072. encoding = tarfile.encoding
  1073. else:
  1074. encoding = "utf-8"
  1075. # Parse pax header information. A record looks like that:
  1076. # "%d %s=%s\n" % (length, keyword, value). length is the size
  1077. # of the complete record including the length field itself and
  1078. # the newline. keyword and value are both UTF-8 encoded strings.
  1079. regex = re.compile(br"(\d+) ([^=]+)=")
  1080. pos = 0
  1081. while True:
  1082. match = regex.match(buf, pos)
  1083. if not match:
  1084. break
  1085. length, keyword = match.groups()
  1086. length = int(length)
  1087. value = buf[match.end(2) + 1:match.start(1) + length - 1]
  1088. # Normally, we could just use "utf-8" as the encoding and "strict"
  1089. # as the error handler, but we better not take the risk. For
  1090. # example, GNU tar <= 1.23 is known to store filenames it cannot
  1091. # translate to UTF-8 as raw strings (unfortunately without a
  1092. # hdrcharset=BINARY header).
  1093. # We first try the strict standard encoding, and if that fails we
  1094. # fall back on the user's encoding and error handler.
  1095. keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
  1096. tarfile.errors)
  1097. if keyword in PAX_NAME_FIELDS:
  1098. value = self._decode_pax_field(value, encoding, tarfile.encoding,
  1099. tarfile.errors)
  1100. else:
  1101. value = self._decode_pax_field(value, "utf-8", "utf-8",
  1102. tarfile.errors)
  1103. pax_headers[keyword] = value
  1104. pos += length
  1105. # Fetch the next header.
  1106. try:
  1107. next = self.fromtarfile(tarfile)
  1108. except HeaderError:
  1109. raise SubsequentHeaderError("missing or bad subsequent header")
  1110. # Process GNU sparse information.
  1111. if "GNU.sparse.map" in pax_headers:
  1112. # GNU extended sparse format version 0.1.
  1113. self._proc_gnusparse_01(next, pax_headers)
  1114. elif "GNU.sparse.size" in pax_headers:
  1115. # GNU extended sparse format version 0.0.
  1116. self._proc_gnusparse_00(next, pax_headers, buf)
  1117. elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
  1118. # GNU extended sparse format version 1.0.
  1119. self._proc_gnusparse_10(next, pax_headers, tarfile)
  1120. if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
  1121. # Patch the TarInfo object with the extended header info.
  1122. next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
  1123. next.offset = self.offset
  1124. if "size" in pax_headers:
  1125. # If the extended header replaces the size field,
  1126. # we need to recalculate the offset where the next
  1127. # header starts.
  1128. offset = next.offset_data
  1129. if next.isreg() or next.type not in SUPPORTED_TYPES:
  1130. offset += next._block(next.size)
  1131. tarfile.offset = offset
  1132. return next
  1133. def _proc_gnusparse_00(self, next, pax_headers, buf):
  1134. """Process a GNU tar extended sparse header, version 0.0.
  1135. """
  1136. offsets = []
  1137. for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
  1138. offsets.append(int(match.group(1)))
  1139. numbytes = []
  1140. for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
  1141. numbytes.append(int(match.group(1)))
  1142. next.sparse = list(zip(offsets, numbytes))
  1143. def _proc_gnusparse_01(self, next, pax_headers):
  1144. """Process a GNU tar extended sparse header, version 0.1.
  1145. """
  1146. sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
  1147. next.sparse = list(zip(sparse[::2], sparse[1::2]))
  1148. def _proc_gnusparse_10(self, next, pax_headers, tarfile):
  1149. """Process a GNU tar extended sparse header, version 1.0.
  1150. """
  1151. fields = None
  1152. sparse = []
  1153. buf = tarfile.fileobj.read(BLOCKSIZE)
  1154. fields, buf = buf.split(b"\n", 1)
  1155. fields = int(fields)
  1156. while len(sparse) < fields * 2:
  1157. if b"\n" not in buf:
  1158. buf += tarfile.fileobj.read(BLOCKSIZE)
  1159. number, buf = buf.split(b"\n", 1)
  1160. sparse.append(int(number))
  1161. next.offset_data = tarfile.fileobj.tell()
  1162. next.sparse = list(zip(sparse[::2], sparse[1::2]))
  1163. def _apply_pax_info(self, pax_headers, encoding, errors):
  1164. """Replace fields with supplemental information from a previous
  1165. pax extended or global header.
  1166. """
  1167. for keyword, value in pax_headers.items():
  1168. if keyword == "GNU.sparse.name":
  1169. setattr(self, "path", value)
  1170. elif keyword == "GNU.sparse.size":
  1171. setattr(self, "size", int(value))
  1172. elif keyword == "GNU.sparse.realsize":
  1173. setattr(self, "size", int(value))
  1174. elif keyword in PAX_FIELDS:
  1175. if keyword in PAX_NUMBER_FIELDS:
  1176. try:
  1177. value = PAX_NUMBER_FIELDS[keyword](value)
  1178. except ValueError:
  1179. value = 0
  1180. if keyword == "path":
  1181. value = value.rstrip("/")
  1182. setattr(self, keyword, value)
  1183. self.pax_headers = pax_headers.copy()
  1184. def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
  1185. """Decode a single field from a pax record.
  1186. """
  1187. try:
  1188. return value.decode(encoding, "strict")
  1189. except UnicodeDecodeError:
  1190. return value.decode(fallback_encoding, fallback_errors)
  1191. def _block(self, count):
  1192. """Round up a byte count by BLOCKSIZE and return it,
  1193. e.g. _block(834) => 1024.
  1194. """
  1195. blocks, remainder = divmod(count, BLOCKSIZE)
  1196. if remainder:
  1197. blocks += 1
  1198. return blocks * BLOCKSIZE
  1199. def isreg(self):
  1200. 'Return True if the Tarinfo object is a regular file.'
  1201. return self.type in REGULAR_TYPES
  1202. def isfile(self):
  1203. 'Return True if the Tarinfo object is a regular file.'
  1204. return self.isreg()
  1205. def isdir(self):
  1206. 'Return True if it is a directory.'
  1207. return self.type == DIRTYPE
  1208. def issym(self):
  1209. 'Return True if it is a symbolic link.'
  1210. return self.type == SYMTYPE
  1211. def islnk(self):
  1212. 'Return True if it is a hard link.'
  1213. return self.type == LNKTYPE
  1214. def ischr(self):
  1215. 'Return True if it is a character device.'
  1216. return self.type == CHRTYPE
  1217. def isblk(self):
  1218. 'Return True if it is a block device.'
  1219. return self.type == BLKTYPE
  1220. def isfifo(self):
  1221. 'Return True if it is a FIFO.'
  1222. return self.type == FIFOTYPE
  1223. def issparse(self):
  1224. return self.sparse is not None
  1225. def isdev(self):
  1226. 'Return True if it is one of character device, block device or FIFO.'
  1227. return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
  1228. # class TarInfo
  1229. class TarFile(object):
  1230. """The TarFile Class provides an interface to tar archives.
  1231. """
  1232. debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
  1233. dereference = False # If true, add content of linked file to the
  1234. # tar file, else the link.
  1235. ignore_zeros = False # If true, skips empty or invalid blocks and
  1236. # continues processing.
  1237. errorlevel = 1 # If 0, fatal errors only appear in debug
  1238. # messages (if debug >= 0). If > 0, errors
  1239. # are passed to the caller as exceptions.
  1240. format = DEFAULT_FORMAT # The format to use when creating an archive.
  1241. encoding = ENCODING # Encoding for 8-bit character strings.
  1242. errors = None # Error handler for unicode conversion.
  1243. tarinfo = TarInfo # The default TarInfo class to use.
  1244. fileobject = ExFileObject # The file-object for extractfile().
  1245. def __init__(self, name=None, mode="r", fileobj=None, format=None,
  1246. tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
  1247. errors="surrogateescape", pax_headers=None, debug=None,
  1248. errorlevel=None, copybufsize=None):
  1249. """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
  1250. read from an existing archive, 'a' to append data to an existing
  1251. file or 'w' to create a new file overwriting an existing one. `mode'
  1252. defaults to 'r'.
  1253. If `fileobj' is given, it is used for reading or writing data. If it
  1254. can be determined, `mode' is overridden by `fileobj's mode.
  1255. `fileobj' is not closed, when TarFile is closed.
  1256. """
  1257. modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
  1258. if mode not in modes:
  1259. raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
  1260. self.mode = mode
  1261. self._mode = modes[mode]
  1262. if not fileobj:
  1263. if self.mode == "a" and not os.path.exists(name):
  1264. # Create nonexistent files in append mode.
  1265. self.mode = "w"
  1266. self._mode = "wb"
  1267. fileobj = bltn_open(name, self._mode)
  1268. self._extfileobj = False
  1269. else:
  1270. if (name is None and hasattr(fileobj, "name") and
  1271. isinstance(fileobj.name, (str, bytes))):
  1272. name = fileobj.name
  1273. if hasattr(fileobj, "mode"):
  1274. self._mode = fileobj.mode
  1275. self._extfileobj = True
  1276. self.name = os.path.abspath(name) if name else None
  1277. self.fileobj = fileobj
  1278. # Init attributes.
  1279. if format is not None:
  1280. self.format = format
  1281. if tarinfo is not None:
  1282. self.tarinfo = tarinfo
  1283. if dereference is not None:
  1284. self.dereference = dereference
  1285. if ignore_zeros is not None:
  1286. self.ignore_zeros = ignore_zeros
  1287. if encoding is not None:
  1288. self.encoding = encoding
  1289. self.errors = errors
  1290. if pax_headers is not None and self.format == PAX_FORMAT:
  1291. self.pax_headers = pax_headers
  1292. else:
  1293. self.pax_headers = {}
  1294. if debug is not None:
  1295. self.debug = debug
  1296. if errorlevel is not None:
  1297. self.errorlevel = errorlevel
  1298. # Init datastructures.
  1299. self.copybufsize = copybufsize
  1300. self.closed = False
  1301. self.members = [] # list of members as TarInfo objects
  1302. self._loaded = False # flag if all members have been read
  1303. self.offset = self.fileobj.tell()
  1304. # current position in the archive file
  1305. self.inodes = {} # dictionary caching the inodes of
  1306. # archive members already added
  1307. try:
  1308. if self.mode == "r":
  1309. self.firstmember = None
  1310. self.firstmember = self.next()
  1311. if self.mode == "a":
  1312. # Move to the end of the archive,
  1313. # before the first empty block.
  1314. while True:
  1315. self.fileobj.seek(self.offset)
  1316. try:
  1317. tarinfo = self.tarinfo.fromtarfile(self)
  1318. self.members.append(tarinfo)
  1319. except EOFHeaderError:
  1320. self.fileobj.seek(self.offset)
  1321. break
  1322. except HeaderError as e:
  1323. raise ReadError(str(e))
  1324. if self.mode in ("a", "w", "x"):
  1325. self._loaded = True
  1326. if self.pax_headers:
  1327. buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
  1328. self.fileobj.write(buf)
  1329. self.offset += len(buf)
  1330. except:
  1331. if not self._extfileobj:
  1332. self.fileobj.close()
  1333. self.closed = True
  1334. raise
  1335. #--------------------------------------------------------------------------
  1336. # Below are the classmethods which act as alternate constructors to the
  1337. # TarFile class. The open() method is the only one that is needed for
  1338. # public use; it is the "super"-constructor and is able to select an
  1339. # adequate "sub"-constructor for a particular compression using the mapping
  1340. # from OPEN_METH.
  1341. #
  1342. # This concept allows one to subclass TarFile without losing the comfort of
  1343. # the super-constructor. A sub-constructor is registered and made available
  1344. # by adding it to the mapping in OPEN_METH.
  1345. @classmethod
  1346. def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
  1347. """Open a tar archive for reading, writing or appending. Return
  1348. an appropriate TarFile class.
  1349. mode:
  1350. 'r' or 'r:*' open for reading with transparent compression
  1351. 'r:' open for reading exclusively uncompressed
  1352. 'r:gz' open for reading with gzip compression
  1353. 'r:bz2' open for reading with bzip2 compression
  1354. 'r:xz' open for reading with lzma compression
  1355. 'a' or 'a:' open for appending, creating the file if necessary
  1356. 'w' or 'w:' open for writing without compression
  1357. 'w:gz' open for writing with gzip compression
  1358. 'w:bz2' open for writing with bzip2 compression
  1359. 'w:xz' open for writing with lzma compression
  1360. 'x' or 'x:' create a tarfile exclusively without compression, raise
  1361. an exception if the file is already created
  1362. 'x:gz' create a gzip compressed tarfile, raise an exception
  1363. if the file is already created
  1364. 'x:bz2' create a bzip2 compressed tarfile, raise an exception
  1365. if the file is already created
  1366. 'x:xz' create an lzma compressed tarfile, raise an exception
  1367. if the file is already created
  1368. 'r|*' open a stream of tar blocks with transparent compression
  1369. 'r|' open an uncompressed stream of tar blocks for reading
  1370. 'r|gz' open a gzip compressed stream of tar blocks
  1371. 'r|bz2' open a bzip2 compressed stream of tar blocks
  1372. 'r|xz' open an lzma compressed stream of tar blocks
  1373. 'w|' open an uncompressed stream for writing
  1374. 'w|gz' open a gzip compressed stream for writing
  1375. 'w|bz2' open a bzip2 compressed stream for writing
  1376. 'w|xz' open an lzma compressed stream for writing
  1377. """
  1378. if not name and not fileobj:
  1379. raise ValueError("nothing to open")
  1380. if mode in ("r", "r:*"):
  1381. # Find out which *open() is appropriate for opening the file.
  1382. def not_compressed(comptype):
  1383. return cls.OPEN_METH[comptype] == 'taropen'
  1384. for comptype in sorted(cls.OPEN_METH, key=not_compressed):
  1385. func = getattr(cls, cls.OPEN_METH[comptype])
  1386. if fileobj is not None:
  1387. saved_pos = fileobj.tell()
  1388. try:
  1389. return func(name, "r", fileobj, **kwargs)
  1390. except (ReadError, CompressionError):
  1391. if fileobj is not None:
  1392. fileobj.seek(saved_pos)
  1393. continue
  1394. raise ReadError("file could not be opened successfully")
  1395. elif ":" in mode:
  1396. filemode, comptype = mode.split(":", 1)
  1397. filemode = filemode or "r"
  1398. comptype = comptype or "tar"
  1399. # Select the *open() function according to
  1400. # given compression.
  1401. if comptype in cls.OPEN_METH:
  1402. func = getattr(cls, cls.OPEN_METH[comptype])
  1403. else:
  1404. raise CompressionError("unknown compression type %r" % comptype)
  1405. return func(name, filemode, fileobj, **kwargs)
  1406. elif "|" in mode:
  1407. filemode, comptype = mode.split("|", 1)
  1408. filemode = filemode or "r"
  1409. comptype = comptype or "tar"
  1410. if filemode not in ("r", "w"):
  1411. raise ValueError("mode must be 'r' or 'w'")
  1412. stream = _Stream(name, filemode, comptype, fileobj, bufsize)
  1413. try:
  1414. t = cls(name, filemode, stream, **kwargs)
  1415. except:
  1416. stream.close()
  1417. raise
  1418. t._extfileobj = False
  1419. return t
  1420. elif mode in ("a", "w", "x"):
  1421. return cls.taropen(name, mode, fileobj, **kwargs)
  1422. raise ValueError("undiscernible mode")
  1423. @classmethod
  1424. def taropen(cls, name, mode="r", fileobj=None, **kwargs):
  1425. """Open uncompressed tar archive name for reading or writing.
  1426. """
  1427. if mode not in ("r", "a", "w", "x"):
  1428. raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
  1429. return cls(name, mode, fileobj, **kwargs)
  1430. @classmethod
  1431. def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1432. """Open gzip compressed tar archive name for reading or writing.
  1433. Appending is not allowed.
  1434. """
  1435. if mode not in ("r", "w", "x"):
  1436. raise ValueError("mode must be 'r', 'w' or 'x'")
  1437. try:
  1438. import gzip
  1439. gzip.GzipFile
  1440. except (ImportError, AttributeError):
  1441. raise CompressionError("gzip module is not available")
  1442. try:
  1443. fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
  1444. except OSError:
  1445. if fileobj is not None and mode == 'r':
  1446. raise ReadError("not a gzip file")
  1447. raise
  1448. try:
  1449. t = cls.taropen(name, mode, fileobj, **kwargs)
  1450. except OSError:
  1451. fileobj.close()
  1452. if mode == 'r':
  1453. raise ReadError("not a gzip file")
  1454. raise
  1455. except:
  1456. fileobj.close()
  1457. raise
  1458. t._extfileobj = False
  1459. return t
  1460. @classmethod
  1461. def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1462. """Open bzip2 compressed tar archive name for reading or writing.
  1463. Appending is not allowed.
  1464. """
  1465. if mode not in ("r", "w", "x"):
  1466. raise ValueError("mode must be 'r', 'w' or 'x'")
  1467. try:
  1468. import bz2
  1469. except ImportError:
  1470. raise CompressionError("bz2 module is not available")
  1471. fileobj = bz2.BZ2File(fileobj or name, mode,
  1472. compresslevel=compresslevel)
  1473. try:
  1474. t = cls.taropen(name, mode, fileobj, **kwargs)
  1475. except (OSError, EOFError):
  1476. fileobj.close()
  1477. if mode == 'r':
  1478. raise ReadError("not a bzip2 file")
  1479. raise
  1480. except:
  1481. fileobj.close()
  1482. raise
  1483. t._extfileobj = False
  1484. return t
  1485. @classmethod
  1486. def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
  1487. """Open lzma compressed tar archive name for reading or writing.
  1488. Appending is not allowed.
  1489. """
  1490. if mode not in ("r", "w", "x"):
  1491. raise ValueError("mode must be 'r', 'w' or 'x'")
  1492. try:
  1493. import lzma
  1494. except ImportError:
  1495. raise CompressionError("lzma module is not available")
  1496. fileobj = lzma.LZMAFile(fileobj or name, mode, preset=preset)
  1497. try:
  1498. t = cls.taropen(name, mode, fileobj, **kwargs)
  1499. except (lzma.LZMAError, EOFError):
  1500. fileobj.close()
  1501. if mode == 'r':
  1502. raise ReadError("not an lzma file")
  1503. raise
  1504. except:
  1505. fileobj.close()
  1506. raise
  1507. t._extfileobj = False
  1508. return t
  1509. # All *open() methods are registered here.
  1510. OPEN_METH = {
  1511. "tar": "taropen", # uncompressed tar
  1512. "gz": "gzopen", # gzip compressed tar
  1513. "bz2": "bz2open", # bzip2 compressed tar
  1514. "xz": "xzopen" # lzma compressed tar
  1515. }
  1516. #--------------------------------------------------------------------------
  1517. # The public methods which TarFile provides:
  1518. def close(self):
  1519. """Close the TarFile. In write-mode, two finishing zero blocks are
  1520. appended to the archive.
  1521. """
  1522. if self.closed:
  1523. return
  1524. self.closed = True
  1525. try:
  1526. if self.mode in ("a", "w", "x"):
  1527. self.fileobj.write(NUL * (BLOCKSIZE * 2))
  1528. self.offset += (BLOCKSIZE * 2)
  1529. # fill up the end with zero-blocks
  1530. # (like option -b20 for tar does)
  1531. blocks, remainder = divmod(self.offset, RECORDSIZE)
  1532. if remainder > 0:
  1533. self.fileobj.write(NUL * (RECORDSIZE - remainder))
  1534. finally:
  1535. if not self._extfileobj:
  1536. self.fileobj.close()
  1537. def getmember(self, name):
  1538. """Return a TarInfo object for member `name'. If `name' can not be
  1539. found in the archive, KeyError is raised. If a member occurs more
  1540. than once in the archive, its last occurrence is assumed to be the
  1541. most up-to-date version.
  1542. """
  1543. tarinfo = self._getmember(name)
  1544. if tarinfo is None:
  1545. raise KeyError("filename %r not found" % name)
  1546. return tarinfo
  1547. def getmembers(self):
  1548. """Return the members of the archive as a list of TarInfo objects. The
  1549. list has the same order as the members in the archive.
  1550. """
  1551. self._check()
  1552. if not self._loaded: # if we want to obtain a list of
  1553. self._load() # all members, we first have to
  1554. # scan the whole archive.
  1555. return self.members
  1556. def getnames(self):
  1557. """Return the members of the archive as a list of their names. It has
  1558. the same order as the list returned by getmembers().
  1559. """
  1560. return [tarinfo.name for tarinfo in self.getmembers()]
  1561. def gettarinfo(self, name=None, arcname=None, fileobj=None):
  1562. """Create a TarInfo object from the result of os.stat or equivalent
  1563. on an existing file. The file is either named by `name', or
  1564. specified as a file object `fileobj' with a file descriptor. If
  1565. given, `arcname' specifies an alternative name for the file in the
  1566. archive, otherwise, the name is taken from the 'name' attribute of
  1567. 'fileobj', or the 'name' argument. The name should be a text
  1568. string.
  1569. """
  1570. self._check("awx")
  1571. # When fileobj is given, replace name by
  1572. # fileobj's real name.
  1573. if fileobj is not None:
  1574. name = fileobj.name
  1575. # Building the name of the member in the archive.
  1576. # Backward slashes are converted to forward slashes,
  1577. # Absolute paths are turned to relative paths.
  1578. if arcname is None:
  1579. arcname = name
  1580. drv, arcname = os.path.splitdrive(arcname)
  1581. arcname = arcname.replace(os.sep, "/")
  1582. arcname = arcname.lstrip("/")
  1583. # Now, fill the TarInfo object with
  1584. # information specific for the file.
  1585. tarinfo = self.tarinfo()
  1586. tarinfo.tarfile = self # Not needed
  1587. # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
  1588. if fileobj is None:
  1589. if not self.dereference:
  1590. statres = os.lstat(name)
  1591. else:
  1592. statres = os.stat(name)
  1593. else:
  1594. statres = os.fstat(fileobj.fileno())
  1595. linkname = ""
  1596. stmd = statres.st_mode
  1597. if stat.S_ISREG(stmd):
  1598. inode = (statres.st_ino, statres.st_dev)
  1599. if not self.dereference and statres.st_nlink > 1 and \
  1600. inode in self.inodes and arcname != self.inodes[inode]:
  1601. # Is it a hardlink to an already
  1602. # archived file?
  1603. type = LNKTYPE
  1604. linkname = self.inodes[inode]
  1605. else:
  1606. # The inode is added only if its valid.
  1607. # For win32 it is always 0.
  1608. type = REGTYPE
  1609. if inode[0]:
  1610. self.inodes[inode] = arcname
  1611. elif stat.S_ISDIR(stmd):
  1612. type = DIRTYPE
  1613. elif stat.S_ISFIFO(stmd):
  1614. type = FIFOTYPE
  1615. elif stat.S_ISLNK(stmd):
  1616. type = SYMTYPE
  1617. linkname = os.readlink(name)
  1618. elif stat.S_ISCHR(stmd):
  1619. type = CHRTYPE
  1620. elif stat.S_ISBLK(stmd):
  1621. type = BLKTYPE
  1622. else:
  1623. return None
  1624. # Fill the TarInfo object with all
  1625. # information we can get.
  1626. tarinfo.name = arcname
  1627. tarinfo.mode = stmd
  1628. tarinfo.uid = statres.st_uid
  1629. tarinfo.gid = statres.st_gid
  1630. if type == REGTYPE:
  1631. tarinfo.size = statres.st_size
  1632. else:
  1633. tarinfo.size = 0
  1634. tarinfo.mtime = statres.st_mtime
  1635. tarinfo.type = type
  1636. tarinfo.linkname = linkname
  1637. if pwd:
  1638. try:
  1639. tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
  1640. except KeyError:
  1641. pass
  1642. if grp:
  1643. try:
  1644. tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
  1645. except KeyError:
  1646. pass
  1647. if type in (CHRTYPE, BLKTYPE):
  1648. if hasattr(os, "major") and hasattr(os, "minor"):
  1649. tarinfo.devmajor = os.major(statres.st_rdev)
  1650. tarinfo.devminor = os.minor(statres.st_rdev)
  1651. return tarinfo
  1652. def list(self, verbose=True, *, members=None):
  1653. """Print a table of contents to sys.stdout. If `verbose' is False, only
  1654. the names of the members are printed. If it is True, an `ls -l'-like
  1655. output is produced. `members' is optional and must be a subset of the
  1656. list returned by getmembers().
  1657. """
  1658. self._check()
  1659. if members is None:
  1660. members = self
  1661. for tarinfo in members:
  1662. if verbose:
  1663. _safe_print(stat.filemode(tarinfo.mode))
  1664. _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
  1665. tarinfo.gname or tarinfo.gid))
  1666. if tarinfo.ischr() or tarinfo.isblk():
  1667. _safe_print("%10s" %
  1668. ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
  1669. else:
  1670. _safe_print("%10d" % tarinfo.size)
  1671. _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
  1672. % time.localtime(tarinfo.mtime)[:6])
  1673. _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
  1674. if verbose:
  1675. if tarinfo.issym():
  1676. _safe_print("-> " + tarinfo.linkname)
  1677. if tarinfo.islnk():
  1678. _safe_print("link to " + tarinfo.linkname)
  1679. print()
  1680. def add(self, name, arcname=None, recursive=True, *, filter=None):
  1681. """Add the file `name' to the archive. `name' may be any type of file
  1682. (directory, fifo, symbolic link, etc.). If given, `arcname'
  1683. specifies an alternative name for the file in the archive.
  1684. Directories are added recursively by default. This can be avoided by
  1685. setting `recursive' to False. `filter' is a function
  1686. that expects a TarInfo object argument and returns the changed
  1687. TarInfo object, if it returns None the TarInfo object will be
  1688. excluded from the archive.
  1689. """
  1690. self._check("awx")
  1691. if arcname is None:
  1692. arcname = name
  1693. # Skip if somebody tries to archive the archive...
  1694. if self.name is not None and os.path.abspath(name) == self.name:
  1695. self._dbg(2, "tarfile: Skipped %r" % name)
  1696. return
  1697. self._dbg(1, name)
  1698. # Create a TarInfo object from the file.
  1699. tarinfo = self.gettarinfo(name, arcname)
  1700. if tarinfo is None:
  1701. self._dbg(1, "tarfile: Unsupported type %r" % name)
  1702. return
  1703. # Change or exclude the TarInfo object.
  1704. if filter is not None:
  1705. tarinfo = filter(tarinfo)
  1706. if tarinfo is None:
  1707. self._dbg(2, "tarfile: Excluded %r" % name)
  1708. return
  1709. # Append the tar header and data to the archive.
  1710. if tarinfo.isreg():
  1711. with bltn_open(name, "rb") as f:
  1712. self.addfile(tarinfo, f)
  1713. elif tarinfo.isdir():
  1714. self.addfile(tarinfo)
  1715. if recursive:
  1716. for f in sorted(os.listdir(name)):
  1717. self.add(os.path.join(name, f), os.path.join(arcname, f),
  1718. recursive, filter=filter)
  1719. else:
  1720. self.addfile(tarinfo)
  1721. def addfile(self, tarinfo, fileobj=None):
  1722. """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
  1723. given, it should be a binary file, and tarinfo.size bytes are read
  1724. from it and added to the archive. You can create TarInfo objects
  1725. directly, or by using gettarinfo().
  1726. """
  1727. self._check("awx")
  1728. tarinfo = copy.copy(tarinfo)
  1729. buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
  1730. self.fileobj.write(buf)
  1731. self.offset += len(buf)
  1732. bufsize=self.copybufsize
  1733. # If there's data to follow, append it.
  1734. if fileobj is not None:
  1735. copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
  1736. blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
  1737. if remainder > 0:
  1738. self.fileobj.write(NUL * (BLOCKSIZE - remainder))
  1739. blocks += 1
  1740. self.offset += blocks * BLOCKSIZE
  1741. self.members.append(tarinfo)
  1742. def extractall(self, path=".", members=None, *, numeric_owner=False):
  1743. """Extract all members from the archive to the current working
  1744. directory and set owner, modification time and permissions on
  1745. directories afterwards. `path' specifies a different directory
  1746. to extract to. `members' is optional and must be a subset of the
  1747. list returned by getmembers(). If `numeric_owner` is True, only
  1748. the numbers for user/group names are used and not the names.
  1749. """
  1750. directories = []
  1751. if members is None:
  1752. members = self
  1753. for tarinfo in members:
  1754. if tarinfo.isdir():
  1755. # Extract directories with a safe mode.
  1756. directories.append(tarinfo)
  1757. tarinfo = copy.copy(tarinfo)
  1758. tarinfo.mode = 0o700
  1759. # Do not set_attrs directories, as we will do that further down
  1760. self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(),
  1761. numeric_owner=numeric_owner)
  1762. # Reverse sort directories.
  1763. directories.sort(key=lambda a: a.name)
  1764. directories.reverse()
  1765. # Set correct owner, mtime and filemode on directories.
  1766. for tarinfo in directories:
  1767. dirpath = os.path.join(path, tarinfo.name)
  1768. try:
  1769. self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
  1770. self.utime(tarinfo, dirpath)
  1771. self.chmod(tarinfo, dirpath)
  1772. except ExtractError as e:
  1773. if self.errorlevel > 1:
  1774. raise
  1775. else:
  1776. self._dbg(1, "tarfile: %s" % e)
  1777. def extract(self, member, path="", set_attrs=True, *, numeric_owner=False):
  1778. """Extract a member from the archive to the current working directory,
  1779. using its full name. Its file information is extracted as accurately
  1780. as possible. `member' may be a filename or a TarInfo object. You can
  1781. specify a different directory using `path'. File attributes (owner,
  1782. mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
  1783. is True, only the numbers for user/group names are used and not
  1784. the names.
  1785. """
  1786. self._check("r")
  1787. if isinstance(member, str):
  1788. tarinfo = self.getmember(member)
  1789. else:
  1790. tarinfo = member
  1791. # Prepare the link target for makelink().
  1792. if tarinfo.islnk():
  1793. tarinfo._link_target = os.path.join(path, tarinfo.linkname)
  1794. try:
  1795. self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
  1796. set_attrs=set_attrs,
  1797. numeric_owner=numeric_owner)
  1798. except OSError as e:
  1799. if self.errorlevel > 0:
  1800. raise
  1801. else:
  1802. if e.filename is None:
  1803. self._dbg(1, "tarfile: %s" % e.strerror)
  1804. else:
  1805. self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
  1806. except ExtractError as e:
  1807. if self.errorlevel > 1:
  1808. raise
  1809. else:
  1810. self._dbg(1, "tarfile: %s" % e)
  1811. def extractfile(self, member):
  1812. """Extract a member from the archive as a file object. `member' may be
  1813. a filename or a TarInfo object. If `member' is a regular file or a
  1814. link, an io.BufferedReader object is returned. Otherwise, None is
  1815. returned.
  1816. """
  1817. self._check("r")
  1818. if isinstance(member, str):
  1819. tarinfo = self.getmember(member)
  1820. else:
  1821. tarinfo = member
  1822. if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
  1823. # Members with unknown types are treated as regular files.
  1824. return self.fileobject(self, tarinfo)
  1825. elif tarinfo.islnk() or tarinfo.issym():
  1826. if isinstance(self.fileobj, _Stream):
  1827. # A small but ugly workaround for the case that someone tries
  1828. # to extract a (sym)link as a file-object from a non-seekable
  1829. # stream of tar blocks.
  1830. raise StreamError("cannot extract (sym)link as file object")
  1831. else:
  1832. # A (sym)link's file object is its target's file object.
  1833. return self.extractfile(self._find_link_target(tarinfo))
  1834. else:
  1835. # If there's no data associated with the member (directory, chrdev,
  1836. # blkdev, etc.), return None instead of a file object.
  1837. return None
  1838. def _extract_member(self, tarinfo, targetpath, set_attrs=True,
  1839. numeric_owner=False):
  1840. """Extract the TarInfo object tarinfo to a physical
  1841. file called targetpath.
  1842. """
  1843. # Fetch the TarInfo object for the given name
  1844. # and build the destination pathname, replacing
  1845. # forward slashes to platform specific separators.
  1846. targetpath = targetpath.rstrip("/")
  1847. targetpath = targetpath.replace("/", os.sep)
  1848. # Create all upper directories.
  1849. upperdirs = os.path.dirname(targetpath)
  1850. if upperdirs and not os.path.exists(upperdirs):
  1851. # Create directories that are not part of the archive with
  1852. # default permissions.
  1853. os.makedirs(upperdirs)
  1854. if tarinfo.islnk() or tarinfo.issym():
  1855. self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
  1856. else:
  1857. self._dbg(1, tarinfo.name)
  1858. if tarinfo.isreg():
  1859. self.makefile(tarinfo, targetpath)
  1860. elif tarinfo.isdir():
  1861. self.makedir(tarinfo, targetpath)
  1862. elif tarinfo.isfifo():
  1863. self.makefifo(tarinfo, targetpath)
  1864. elif tarinfo.ischr() or tarinfo.isblk():
  1865. self.makedev(tarinfo, targetpath)
  1866. elif tarinfo.islnk() or tarinfo.issym():
  1867. self.makelink(tarinfo, targetpath)
  1868. elif tarinfo.type not in SUPPORTED_TYPES:
  1869. self.makeunknown(tarinfo, targetpath)
  1870. else:
  1871. self.makefile(tarinfo, targetpath)
  1872. if set_attrs:
  1873. self.chown(tarinfo, targetpath, numeric_owner)
  1874. if not tarinfo.issym():
  1875. self.chmod(tarinfo, targetpath)
  1876. self.utime(tarinfo, targetpath)
  1877. #--------------------------------------------------------------------------
  1878. # Below are the different file methods. They are called via
  1879. # _extract_member() when extract() is called. They can be replaced in a
  1880. # subclass to implement other functionality.
  1881. def makedir(self, tarinfo, targetpath):
  1882. """Make a directory called targetpath.
  1883. """
  1884. try:
  1885. # Use a safe mode for the directory, the real mode is set
  1886. # later in _extract_member().
  1887. os.mkdir(targetpath, 0o700)
  1888. except FileExistsError:
  1889. pass
  1890. def makefile(self, tarinfo, targetpath):
  1891. """Make a file called targetpath.
  1892. """
  1893. source = self.fileobj
  1894. source.seek(tarinfo.offset_data)
  1895. bufsize = self.copybufsize
  1896. with bltn_open(targetpath, "wb") as target:
  1897. if tarinfo.sparse is not None:
  1898. for offset, size in tarinfo.sparse:
  1899. target.seek(offset)
  1900. copyfileobj(source, target, size, ReadError, bufsize)
  1901. target.seek(tarinfo.size)
  1902. target.truncate()
  1903. else:
  1904. copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
  1905. def makeunknown(self, tarinfo, targetpath):
  1906. """Make a file from a TarInfo object with an unknown type
  1907. at targetpath.
  1908. """
  1909. self.makefile(tarinfo, targetpath)
  1910. self._dbg(1, "tarfile: Unknown file type %r, " \
  1911. "extracted as regular file." % tarinfo.type)
  1912. def makefifo(self, tarinfo, targetpath):
  1913. """Make a fifo called targetpath.
  1914. """
  1915. if hasattr(os, "mkfifo"):
  1916. os.mkfifo(targetpath)
  1917. else:
  1918. raise ExtractError("fifo not supported by system")
  1919. def makedev(self, tarinfo, targetpath):
  1920. """Make a character or block device called targetpath.
  1921. """
  1922. if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
  1923. raise ExtractError("special devices not supported by system")
  1924. mode = tarinfo.mode
  1925. if tarinfo.isblk():
  1926. mode |= stat.S_IFBLK
  1927. else:
  1928. mode |= stat.S_IFCHR
  1929. os.mknod(targetpath, mode,
  1930. os.makedev(tarinfo.devmajor, tarinfo.devminor))
  1931. def makelink(self, tarinfo, targetpath):
  1932. """Make a (symbolic) link called targetpath. If it cannot be created
  1933. (platform limitation), we try to make a copy of the referenced file
  1934. instead of a link.
  1935. """
  1936. try:
  1937. # For systems that support symbolic and hard links.
  1938. if tarinfo.issym():
  1939. os.symlink(tarinfo.linkname, targetpath)
  1940. else:
  1941. # See extract().
  1942. if os.path.exists(tarinfo._link_target):
  1943. os.link(tarinfo._link_target, targetpath)
  1944. else:
  1945. self._extract_member(self._find_link_target(tarinfo),
  1946. targetpath)
  1947. except symlink_exception:
  1948. try:
  1949. self._extract_member(self._find_link_target(tarinfo),
  1950. targetpath)
  1951. except KeyError:
  1952. raise ExtractError("unable to resolve link inside archive")
  1953. def chown(self, tarinfo, targetpath, numeric_owner):
  1954. """Set owner of targetpath according to tarinfo. If numeric_owner
  1955. is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
  1956. is False, fall back to .gid/.uid when the search based on name
  1957. fails.
  1958. """
  1959. if hasattr(os, "geteuid") and os.geteuid() == 0:
  1960. # We have to be root to do so.
  1961. g = tarinfo.gid
  1962. u = tarinfo.uid
  1963. if not numeric_owner:
  1964. try:
  1965. if grp:
  1966. g = grp.getgrnam(tarinfo.gname)[2]
  1967. except KeyError:
  1968. pass
  1969. try:
  1970. if pwd:
  1971. u = pwd.getpwnam(tarinfo.uname)[2]
  1972. except KeyError:
  1973. pass
  1974. try:
  1975. if tarinfo.issym() and hasattr(os, "lchown"):
  1976. os.lchown(targetpath, u, g)
  1977. else:
  1978. os.chown(targetpath, u, g)
  1979. except OSError:
  1980. raise ExtractError("could not change owner")
  1981. def chmod(self, tarinfo, targetpath):
  1982. """Set file permissions of targetpath according to tarinfo.
  1983. """
  1984. try:
  1985. os.chmod(targetpath, tarinfo.mode)
  1986. except OSError:
  1987. raise ExtractError("could not change mode")
  1988. def utime(self, tarinfo, targetpath):
  1989. """Set modification time of targetpath according to tarinfo.
  1990. """
  1991. if not hasattr(os, 'utime'):
  1992. return
  1993. try:
  1994. os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
  1995. except OSError:
  1996. raise ExtractError("could not change modification time")
  1997. #--------------------------------------------------------------------------
  1998. def next(self):
  1999. """Return the next member of the archive as a TarInfo object, when
  2000. TarFile is opened for reading. Return None if there is no more
  2001. available.
  2002. """
  2003. self._check("ra")
  2004. if self.firstmember is not None:
  2005. m = self.firstmember
  2006. self.firstmember = None
  2007. return m
  2008. # Advance the file pointer.
  2009. if self.offset != self.fileobj.tell():
  2010. self.fileobj.seek(self.offset - 1)
  2011. if not self.fileobj.read(1):
  2012. raise ReadError("unexpected end of data")
  2013. # Read the next block.
  2014. tarinfo = None
  2015. while True:
  2016. try:
  2017. tarinfo = self.tarinfo.fromtarfile(self)
  2018. except EOFHeaderError as e:
  2019. if self.ignore_zeros:
  2020. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2021. self.offset += BLOCKSIZE
  2022. continue
  2023. except InvalidHeaderError as e:
  2024. if self.ignore_zeros:
  2025. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2026. self.offset += BLOCKSIZE
  2027. continue
  2028. elif self.offset == 0:
  2029. raise ReadError(str(e))
  2030. except EmptyHeaderError:
  2031. if self.offset == 0:
  2032. raise ReadError("empty file")
  2033. except TruncatedHeaderError as e:
  2034. if self.offset == 0:
  2035. raise ReadError(str(e))
  2036. except SubsequentHeaderError as e:
  2037. raise ReadError(str(e))
  2038. break
  2039. if tarinfo is not None:
  2040. self.members.append(tarinfo)
  2041. else:
  2042. self._loaded = True
  2043. return tarinfo
  2044. #--------------------------------------------------------------------------
  2045. # Little helper methods:
  2046. def _getmember(self, name, tarinfo=None, normalize=False):
  2047. """Find an archive member by name from bottom to top.
  2048. If tarinfo is given, it is used as the starting point.
  2049. """
  2050. # Ensure that all members have been loaded.
  2051. members = self.getmembers()
  2052. # Limit the member search list up to tarinfo.
  2053. if tarinfo is not None:
  2054. members = members[:members.index(tarinfo)]
  2055. if normalize:
  2056. name = os.path.normpath(name)
  2057. for member in reversed(members):
  2058. if normalize:
  2059. member_name = os.path.normpath(member.name)
  2060. else:
  2061. member_name = member.name
  2062. if name == member_name:
  2063. return member
  2064. def _load(self):
  2065. """Read through the entire archive file and look for readable
  2066. members.
  2067. """
  2068. while True:
  2069. tarinfo = self.next()
  2070. if tarinfo is None:
  2071. break
  2072. self._loaded = True
  2073. def _check(self, mode=None):
  2074. """Check if TarFile is still open, and if the operation's mode
  2075. corresponds to TarFile's mode.
  2076. """
  2077. if self.closed:
  2078. raise OSError("%s is closed" % self.__class__.__name__)
  2079. if mode is not None and self.mode not in mode:
  2080. raise OSError("bad operation for mode %r" % self.mode)
  2081. def _find_link_target(self, tarinfo):
  2082. """Find the target member of a symlink or hardlink member in the
  2083. archive.
  2084. """
  2085. if tarinfo.issym():
  2086. # Always search the entire archive.
  2087. linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
  2088. limit = None
  2089. else:
  2090. # Search the archive before the link, because a hard link is
  2091. # just a reference to an already archived file.
  2092. linkname = tarinfo.linkname
  2093. limit = tarinfo
  2094. member = self._getmember(linkname, tarinfo=limit, normalize=True)
  2095. if member is None:
  2096. raise KeyError("linkname %r not found" % linkname)
  2097. return member
  2098. def __iter__(self):
  2099. """Provide an iterator object.
  2100. """
  2101. if self._loaded:
  2102. yield from self.members
  2103. return
  2104. # Yield items using TarFile's next() method.
  2105. # When all members have been read, set TarFile as _loaded.
  2106. index = 0
  2107. # Fix for SF #1100429: Under rare circumstances it can
  2108. # happen that getmembers() is called during iteration,
  2109. # which will have already exhausted the next() method.
  2110. if self.firstmember is not None:
  2111. tarinfo = self.next()
  2112. index += 1
  2113. yield tarinfo
  2114. while True:
  2115. if index < len(self.members):
  2116. tarinfo = self.members[index]
  2117. elif not self._loaded:
  2118. tarinfo = self.next()
  2119. if not tarinfo:
  2120. self._loaded = True
  2121. return
  2122. else:
  2123. return
  2124. index += 1
  2125. yield tarinfo
  2126. def _dbg(self, level, msg):
  2127. """Write debugging output to sys.stderr.
  2128. """
  2129. if level <= self.debug:
  2130. print(msg, file=sys.stderr)
  2131. def __enter__(self):
  2132. self._check()
  2133. return self
  2134. def __exit__(self, type, value, traceback):
  2135. if type is None:
  2136. self.close()
  2137. else:
  2138. # An exception occurred. We must not call close() because
  2139. # it would try to write end-of-archive blocks and padding.
  2140. if not self._extfileobj:
  2141. self.fileobj.close()
  2142. self.closed = True
  2143. #--------------------
  2144. # exported functions
  2145. #--------------------
  2146. def is_tarfile(name):
  2147. """Return True if name points to a tar archive that we
  2148. are able to handle, else return False.
  2149. """
  2150. try:
  2151. t = open(name)
  2152. t.close()
  2153. return True
  2154. except TarError:
  2155. return False
  2156. open = TarFile.open
  2157. def main():
  2158. import argparse
  2159. description = 'A simple command-line interface for tarfile module.'
  2160. parser = argparse.ArgumentParser(description=description)
  2161. parser.add_argument('-v', '--verbose', action='store_true', default=False,
  2162. help='Verbose output')
  2163. group = parser.add_mutually_exclusive_group(required=True)
  2164. group.add_argument('-l', '--list', metavar='<tarfile>',
  2165. help='Show listing of a tarfile')
  2166. group.add_argument('-e', '--extract', nargs='+',
  2167. metavar=('<tarfile>', '<output_dir>'),
  2168. help='Extract tarfile into target dir')
  2169. group.add_argument('-c', '--create', nargs='+',
  2170. metavar=('<name>', '<file>'),
  2171. help='Create tarfile from sources')
  2172. group.add_argument('-t', '--test', metavar='<tarfile>',
  2173. help='Test if a tarfile is valid')
  2174. args = parser.parse_args()
  2175. if args.test is not None:
  2176. src = args.test
  2177. if is_tarfile(src):
  2178. with open(src, 'r') as tar:
  2179. tar.getmembers()
  2180. print(tar.getmembers(), file=sys.stderr)
  2181. if args.verbose:
  2182. print('{!r} is a tar archive.'.format(src))
  2183. else:
  2184. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2185. elif args.list is not None:
  2186. src = args.list
  2187. if is_tarfile(src):
  2188. with TarFile.open(src, 'r:*') as tf:
  2189. tf.list(verbose=args.verbose)
  2190. else:
  2191. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2192. elif args.extract is not None:
  2193. if len(args.extract) == 1:
  2194. src = args.extract[0]
  2195. curdir = os.curdir
  2196. elif len(args.extract) == 2:
  2197. src, curdir = args.extract
  2198. else:
  2199. parser.exit(1, parser.format_help())
  2200. if is_tarfile(src):
  2201. with TarFile.open(src, 'r:*') as tf:
  2202. tf.extractall(path=curdir)
  2203. if args.verbose:
  2204. if curdir == '.':
  2205. msg = '{!r} file is extracted.'.format(src)
  2206. else:
  2207. msg = ('{!r} file is extracted '
  2208. 'into {!r} directory.').format(src, curdir)
  2209. print(msg)
  2210. else:
  2211. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2212. elif args.create is not None:
  2213. tar_name = args.create.pop(0)
  2214. _, ext = os.path.splitext(tar_name)
  2215. compressions = {
  2216. # gz
  2217. '.gz': 'gz',
  2218. '.tgz': 'gz',
  2219. # xz
  2220. '.xz': 'xz',
  2221. '.txz': 'xz',
  2222. # bz2
  2223. '.bz2': 'bz2',
  2224. '.tbz': 'bz2',
  2225. '.tbz2': 'bz2',
  2226. '.tb2': 'bz2',
  2227. }
  2228. tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
  2229. tar_files = args.create
  2230. with TarFile.open(tar_name, tar_mode) as tf:
  2231. for file_name in tar_files:
  2232. tf.add(file_name)
  2233. if args.verbose:
  2234. print('{!r} file created.'.format(tar_name))
  2235. if __name__ == '__main__':
  2236. main()