summaryrefslogtreecommitdiff
path: root/MAT
diff options
context:
space:
mode:
authorjvoisin2013-01-20 03:57:17 +0100
committerjvoisin2013-01-20 03:57:17 +0100
commitac4110fbfc60b112836d0b279ae07ea9a779b16d (patch)
tree2649b954ddb7fcfa15c5a7167907a27398008732 /MAT
parent8427e8f4aff71a40ea6ba920608c38edd7ee6905 (diff)
preliminary support of gtk3 (one test fails for PDF)
Diffstat (limited to 'MAT')
-rw-r--r--MAT/archive.py3
-rw-r--r--MAT/office.py4
-rw-r--r--MAT/strippers.py5
-rw-r--r--MAT/tarfile/__init__.py1
-rw-r--r--MAT/tarfile/tarfile.py2593
5 files changed, 7 insertions, 2599 deletions
diff --git a/MAT/archive.py b/MAT/archive.py
index 1dcddef..d3e6861 100644
--- a/MAT/archive.py
+++ b/MAT/archive.py
@@ -10,7 +10,8 @@ import tempfile
10 10
11import parser 11import parser
12import mat 12import mat
13from tarfile import tarfile 13#from tarfile import tarfile
14import tarfile
14 15
15 16
16class GenericArchiveStripper(parser.GenericParser): 17class GenericArchiveStripper(parser.GenericParser):
diff --git a/MAT/office.py b/MAT/office.py
index 20664d2..b74b49c 100644
--- a/MAT/office.py
+++ b/MAT/office.py
@@ -10,7 +10,7 @@ import xml.dom.minidom as minidom
10 10
11try: 11try:
12 import cairo 12 import cairo
13 import poppler 13 from gi.repository import Poppler
14except ImportError: 14except ImportError:
15 pass 15 pass
16 16
@@ -125,7 +125,7 @@ class PdfStripper(parser.GenericParser):
125 uri = 'file://' + os.path.abspath(self.filename) 125 uri = 'file://' + os.path.abspath(self.filename)
126 self.password = None 126 self.password = None
127 self.pdf_quality = kwargs['low_pdf_quality'] 127 self.pdf_quality = kwargs['low_pdf_quality']
128 self.document = poppler.document_new_from_file(uri, self.password) 128 self.document = Poppler.Document.new_from_file(uri, self.password)
129 self.meta_list = frozenset(['title', 'author', 'subject', 'keywords', 'creator', 129 self.meta_list = frozenset(['title', 'author', 'subject', 'keywords', 'creator',
130 'producer', 'metadata']) 130 'producer', 'metadata'])
131 131
diff --git a/MAT/strippers.py b/MAT/strippers.py
index 4b673fe..ad79d54 100644
--- a/MAT/strippers.py
+++ b/MAT/strippers.py
@@ -4,6 +4,7 @@
4 4
5import images 5import images
6import audio 6import audio
7import gi
7import office 8import office
8import archive 9import archive
9import misc 10import misc
@@ -24,9 +25,9 @@ STRIPPERS = {
24# PDF support 25# PDF support
25pdfSupport = True 26pdfSupport = True
26try: 27try:
27 import poppler 28 from gi.repository import Poppler
28except ImportError: 29except ImportError:
29 print('Unable to import python-poppler: not PDF support') 30 print('Unable to import Poppler')
30 pdfSupport = False 31 pdfSupport = False
31 32
32try: 33try:
diff --git a/MAT/tarfile/__init__.py b/MAT/tarfile/__init__.py
deleted file mode 100644
index 8b13789..0000000
--- a/MAT/tarfile/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
1
diff --git a/MAT/tarfile/tarfile.py b/MAT/tarfile/tarfile.py
deleted file mode 100644
index 6978eb1..0000000
--- a/MAT/tarfile/tarfile.py
+++ /dev/null
@@ -1,2593 +0,0 @@
1# -*- coding: utf-8 -*-
2#-------------------------------------------------------------------
3# tarfile.py
4#-------------------------------------------------------------------
5# Copyright (C) 2002 Lars Gustäbel <lars@gustaebel.de>
6# All rights reserved.
7#
8# Permission is hereby granted, free of charge, to any person
9# obtaining a copy of this software and associated documentation
10# files (the "Software"), to deal in the Software without
11# restriction, including without limitation the rights to use,
12# copy, modify, merge, publish, distribute, sublicense, and/or sell
13# copies of the Software, and to permit persons to whom the
14# Software is furnished to do so, subject to the following
15# conditions:
16#
17# The above copyright notice and this permission notice shall be
18# included in all copies or substantial portions of the Software.
19#
20# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
22# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
23# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
24# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
25# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
26# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
27# OTHER DEALINGS IN THE SOFTWARE.
28#
29"""Read from and write to tar format archives.
30"""
31
32__version__ = "$Revision$"
33# $Source$
34
35version = "0.9.0"
36__author__ = "Lars Gustäbel (lars@gustaebel.de)"
37__date__ = "$Date$"
38__cvsid__ = "$Id$"
39__credits__ = "Gustavo Niemeyer, Niels Gustäbel, Richard Townsend."
40
41#---------
42# Imports
43#---------
44import sys
45import os
46import shutil
47import stat
48import errno
49import time
50import struct
51import copy
52import re
53import operator
54
55try:
56 import grp, pwd
57except ImportError:
58 grp = pwd = None
59
60# from tarfile import *
61__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
62
63#---------------------------------------------------------
64# tar constants
65#---------------------------------------------------------
66NUL = "\0" # the null character
67BLOCKSIZE = 512 # length of processing blocks
68RECORDSIZE = BLOCKSIZE * 20 # length of records
69GNU_MAGIC = "ustar \0" # magic gnu tar string
70POSIX_MAGIC = "ustar\x0000" # magic posix tar string
71
72LENGTH_NAME = 100 # maximum length of a filename
73LENGTH_LINK = 100 # maximum length of a linkname
74LENGTH_PREFIX = 155 # maximum length of the prefix field
75
76REGTYPE = "0" # regular file
77AREGTYPE = "\0" # regular file
78LNKTYPE = "1" # link (inside tarfile)
79SYMTYPE = "2" # symbolic link
80CHRTYPE = "3" # character special device
81BLKTYPE = "4" # block special device
82DIRTYPE = "5" # directory
83FIFOTYPE = "6" # fifo special device
84CONTTYPE = "7" # contiguous file
85
86GNUTYPE_LONGNAME = "L" # GNU tar longname
87GNUTYPE_LONGLINK = "K" # GNU tar longlink
88GNUTYPE_SPARSE = "S" # GNU tar sparse file
89
90XHDTYPE = "x" # POSIX.1-2001 extended header
91XGLTYPE = "g" # POSIX.1-2001 global header
92SOLARIS_XHDTYPE = "X" # Solaris extended header
93
94USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
95GNU_FORMAT = 1 # GNU tar format
96PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
97DEFAULT_FORMAT = GNU_FORMAT
98
99#---------------------------------------------------------
100# tarfile constants
101#---------------------------------------------------------
102# File types that tarfile supports:
103SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
104 SYMTYPE, DIRTYPE, FIFOTYPE,
105 CONTTYPE, CHRTYPE, BLKTYPE,
106 GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
107 GNUTYPE_SPARSE)
108
109# File types that will be treated as a regular file.
110REGULAR_TYPES = (REGTYPE, AREGTYPE,
111 CONTTYPE, GNUTYPE_SPARSE)
112
113# File types that are part of the GNU tar format.
114GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
115 GNUTYPE_SPARSE)
116
117# Fields from a pax header that override a TarInfo attribute.
118PAX_FIELDS = ("path", "linkpath", "size", "mtime",
119 "uid", "gid", "uname", "gname")
120
121# Fields in a pax header that are numbers, all other fields
122# are treated as strings.
123PAX_NUMBER_FIELDS = {
124 "atime": float,
125 "ctime": float,
126 "mtime": float,
127 "uid": int,
128 "gid": int,
129 "size": int
130}
131
132#---------------------------------------------------------
133# Bits used in the mode field, values in octal.
134#---------------------------------------------------------
135S_IFLNK = 0120000 # symbolic link
136S_IFREG = 0100000 # regular file
137S_IFBLK = 0060000 # block device
138S_IFDIR = 0040000 # directory
139S_IFCHR = 0020000 # character device
140S_IFIFO = 0010000 # fifo
141
142TSUID = 04000 # set UID on execution
143TSGID = 02000 # set GID on execution
144TSVTX = 01000 # reserved
145
146TUREAD = 0400 # read by owner
147TUWRITE = 0200 # write by owner
148TUEXEC = 0100 # execute/search by owner
149TGREAD = 0040 # read by group
150TGWRITE = 0020 # write by group
151TGEXEC = 0010 # execute/search by group
152TOREAD = 0004 # read by other
153TOWRITE = 0002 # write by other
154TOEXEC = 0001 # execute/search by other
155
156#---------------------------------------------------------
157# initialization
158#---------------------------------------------------------
159ENCODING = sys.getfilesystemencoding()
160if ENCODING is None:
161 ENCODING = sys.getdefaultencoding()
162
163#---------------------------------------------------------
164# Some useful functions
165#---------------------------------------------------------
166
167def stn(s, length):
168 """Convert a python string to a null-terminated string buffer.
169 """
170 return s[:length] + (length - len(s)) * NUL
171
172def nts(s):
173 """Convert a null-terminated string field to a python string.
174 """
175 # Use the string up to the first null char.
176 p = s.find("\0")
177 if p == -1:
178 return s
179 return s[:p]
180
181def nti(s):
182 """Convert a number field to a python number.
183 """
184 # There are two possible encodings for a number field, see
185 # itn() below.
186 if s[0] != chr(0200):
187 try:
188 n = int(nts(s) or "0", 8)
189 except ValueError:
190 raise InvalidHeaderError("invalid header")
191 else:
192 n = 0L
193 for i in xrange(len(s) - 1):
194 n <<= 8
195 n += ord(s[i + 1])
196 return n
197
198def itn(n, digits=8, format=DEFAULT_FORMAT):
199 """Convert a python number to a number field.
200 """
201 # POSIX 1003.1-1988 requires numbers to be encoded as a string of
202 # octal digits followed by a null-byte, this allows values up to
203 # (8**(digits-1))-1. GNU tar allows storing numbers greater than
204 # that if necessary. A leading 0200 byte indicates this particular
205 # encoding, the following digits-1 bytes are a big-endian
206 # representation. This allows values up to (256**(digits-1))-1.
207 if 0 <= n < 8 ** (digits - 1):
208 s = "%0*o" % (digits - 1, n) + NUL
209 else:
210 if format != GNU_FORMAT or n >= 256 ** (digits - 1):
211 raise ValueError("overflow in number field")
212
213 if n < 0:
214 # XXX We mimic GNU tar's behaviour with negative numbers,
215 # this could raise OverflowError.
216 n = struct.unpack("L", struct.pack("l", n))[0]
217
218 s = ""
219 for i in xrange(digits - 1):
220 s = chr(n & 0377) + s
221 n >>= 8
222 s = chr(0200) + s
223 return s
224
225def uts(s, encoding, errors):
226 """Convert a unicode object to a string.
227 """
228 if errors == "utf-8":
229 # An extra error handler similar to the -o invalid=UTF-8 option
230 # in POSIX.1-2001. Replace untranslatable characters with their
231 # UTF-8 representation.
232 try:
233 return s.encode(encoding, "strict")
234 except UnicodeEncodeError:
235 x = []
236 for c in s:
237 try:
238 x.append(c.encode(encoding, "strict"))
239 except UnicodeEncodeError:
240 x.append(c.encode("utf8"))
241 return "".join(x)
242 else:
243 return s.encode(encoding, errors)
244
245def calc_chksums(buf):
246 """Calculate the checksum for a member's header by summing up all
247 characters except for the chksum field which is treated as if
248 it was filled with spaces. According to the GNU tar sources,
249 some tars (Sun and NeXT) calculate chksum with signed char,
250 which will be different if there are chars in the buffer with
251 the high bit set. So we calculate two checksums, unsigned and
252 signed.
253 """
254 unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
255 signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
256 return unsigned_chksum, signed_chksum
257
258def copyfileobj(src, dst, length=None):
259 """Copy length bytes from fileobj src to fileobj dst.
260 If length is None, copy the entire content.
261 """
262 if length == 0:
263 return
264 if length is None:
265 shutil.copyfileobj(src, dst)
266 return
267
268 BUFSIZE = 16 * 1024
269 blocks, remainder = divmod(length, BUFSIZE)
270 for b in xrange(blocks):
271 buf = src.read(BUFSIZE)
272 if len(buf) < BUFSIZE:
273 raise IOError("end of file reached")
274 dst.write(buf)
275
276 if remainder != 0:
277 buf = src.read(remainder)
278 if len(buf) < remainder:
279 raise IOError("end of file reached")
280 dst.write(buf)
281 return
282
283filemode_table = (
284 ((S_IFLNK, "l"),
285 (S_IFREG, "-"),
286 (S_IFBLK, "b"),
287 (S_IFDIR, "d"),
288 (S_IFCHR, "c"),
289 (S_IFIFO, "p")),
290
291 ((TUREAD, "r"),),
292 ((TUWRITE, "w"),),
293 ((TUEXEC|TSUID, "s"),
294 (TSUID, "S"),
295 (TUEXEC, "x")),
296
297 ((TGREAD, "r"),),
298 ((TGWRITE, "w"),),
299 ((TGEXEC|TSGID, "s"),
300 (TSGID, "S"),
301 (TGEXEC, "x")),
302
303 ((TOREAD, "r"),),
304 ((TOWRITE, "w"),),
305 ((TOEXEC|TSVTX, "t"),
306 (TSVTX, "T"),
307 (TOEXEC, "x"))
308)
309
310def filemode(mode):
311 """Convert a file's mode to a string of the form
312 -rwxrwxrwx.
313 Used by TarFile.list()
314 """
315 perm = []
316 for table in filemode_table:
317 for bit, char in table:
318 if mode & bit == bit:
319 perm.append(char)
320 break
321 else:
322 perm.append("-")
323 return "".join(perm)
324
325class TarError(Exception):
326 """Base exception."""
327 pass
328class ExtractError(TarError):
329 """General exception for extract errors."""
330 pass
331class ReadError(TarError):
332 """Exception for unreadble tar archives."""
333 pass
334class CompressionError(TarError):
335 """Exception for unavailable compression methods."""
336 pass
337class StreamError(TarError):
338 """Exception for unsupported operations on stream-like TarFiles."""
339 pass
340class HeaderError(TarError):
341 """Base exception for header errors."""
342 pass
343class EmptyHeaderError(HeaderError):
344 """Exception for empty headers."""
345 pass
346class TruncatedHeaderError(HeaderError):
347 """Exception for truncated headers."""
348 pass
349class EOFHeaderError(HeaderError):
350 """Exception for end of file headers."""
351 pass
352class InvalidHeaderError(HeaderError):
353 """Exception for invalid headers."""
354 pass
355class SubsequentHeaderError(HeaderError):
356 """Exception for missing and invalid extended headers."""
357 pass
358
359#---------------------------
360# internal stream interface
361#---------------------------
362class _LowLevelFile:
363 """Low-level file object. Supports reading and writing.
364 It is used instead of a regular file object for streaming
365 access.
366 """
367
368 def __init__(self, name, mode):
369 mode = {
370 "r": os.O_RDONLY,
371 "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
372 }[mode]
373 if hasattr(os, "O_BINARY"):
374 mode |= os.O_BINARY
375 self.fd = os.open(name, mode, 0666)
376
377 def close(self):
378 os.close(self.fd)
379
380 def read(self, size):
381 return os.read(self.fd, size)
382
383 def write(self, s):
384 os.write(self.fd, s)
385
386class _Stream:
387 """Class that serves as an adapter between TarFile and
388 a stream-like object. The stream-like object only
389 needs to have a read() or write() method and is accessed
390 blockwise. Use of gzip or bzip2 compression is possible.
391 A stream-like object could be for example: sys.stdin,
392 sys.stdout, a socket, a tape device etc.
393
394 _Stream is intended to be used only internally.
395 """
396
397 def __init__(self, name, mode, comptype, fileobj, bufsize):
398 """Construct a _Stream object.
399 """
400 self._extfileobj = True
401 if fileobj is None:
402 fileobj = _LowLevelFile(name, mode)
403 self._extfileobj = False
404
405 if comptype == '*':
406 # Enable transparent compression detection for the
407 # stream interface
408 fileobj = _StreamProxy(fileobj)
409 comptype = fileobj.getcomptype()
410
411 self.name = name or ""
412 self.mode = mode
413 self.comptype = comptype
414 self.fileobj = fileobj
415 self.bufsize = bufsize
416 self.buf = ""
417 self.pos = 0L
418 self.closed = False
419
420 if comptype == "gz":
421 try:
422 import zlib
423 except ImportError:
424 raise CompressionError("zlib module is not available")
425 self.zlib = zlib
426 self.crc = zlib.crc32("") & 0xffffffffL
427 if mode == "r":
428 self._init_read_gz()
429 else:
430 self._init_write_gz()
431
432 if comptype == "bz2":
433 try:
434 import bz2
435 except ImportError:
436 raise CompressionError("bz2 module is not available")
437 if mode == "r":
438 self.dbuf = ""
439 self.cmp = bz2.BZ2Decompressor()
440 else:
441 self.cmp = bz2.BZ2Compressor()
442
443 def __del__(self):
444 if hasattr(self, "closed") and not self.closed:
445 self.close()
446
447 def _init_write_gz(self):
448 """Initialize for writing with gzip compression.
449 """
450 self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
451 -self.zlib.MAX_WBITS,
452 self.zlib.DEF_MEM_LEVEL,
453 0)
454 timestamp = struct.pack("<L", long(time.time()))
455 self.__write("\037\213\010\010%s\002\377" % timestamp)
456 if self.name.endswith(".gz"):
457 self.name = self.name[:-3]
458 self.__write(self.name + NUL)
459
460 def write(self, s):
461 """Write string s to the stream.
462 """
463 if self.comptype == "gz":
464 self.crc = self.zlib.crc32(s, self.crc) & 0xffffffffL
465 self.pos += len(s)
466 if self.comptype != "tar":
467 s = self.cmp.compress(s)
468 self.__write(s)
469
470 def __write(self, s):
471 """Write string s to the stream if a whole new block
472 is ready to be written.
473 """
474 self.buf += s
475 while len(self.buf) > self.bufsize:
476 self.fileobj.write(self.buf[:self.bufsize])
477 self.buf = self.buf[self.bufsize:]
478
479 def close(self):
480 """Close the _Stream object. No operation should be
481 done on it afterwards.
482 """
483 if self.closed:
484 return
485
486 if self.mode == "w" and self.comptype != "tar":
487 self.buf += self.cmp.flush()
488
489 if self.mode == "w" and self.buf:
490 self.fileobj.write(self.buf)
491 self.buf = ""
492 if self.comptype == "gz":
493 # The native zlib crc is an unsigned 32-bit integer, but
494 # the Python wrapper implicitly casts that to a signed C
495 # long. So, on a 32-bit box self.crc may "look negative",
496 # while the same crc on a 64-bit box may "look positive".
497 # To avoid irksome warnings from the `struct` module, force
498 # it to look positive on all boxes.
499 self.fileobj.write(struct.pack("<L", self.crc & 0xffffffffL))
500 self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
501
502 if not self._extfileobj:
503 self.fileobj.close()
504
505 self.closed = True
506
507 def _init_read_gz(self):
508 """Initialize for reading a gzip compressed fileobj.
509 """
510 self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
511 self.dbuf = ""
512
513 # taken from gzip.GzipFile with some alterations
514 if self.__read(2) != "\037\213":
515 raise ReadError("not a gzip file")
516 if self.__read(1) != "\010":
517 raise CompressionError("unsupported compression method")
518
519 flag = ord(self.__read(1))
520 self.__read(6)
521
522 if flag & 4:
523 xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
524 self.read(xlen)
525 if flag & 8:
526 while True:
527 s = self.__read(1)
528 if not s or s == NUL:
529 break
530 if flag & 16:
531 while True:
532 s = self.__read(1)
533 if not s or s == NUL:
534 break
535 if flag & 2:
536 self.__read(2)
537
538 def tell(self):
539 """Return the stream's file pointer position.
540 """
541 return self.pos
542
543 def seek(self, pos=0):
544 """Set the stream's file pointer to pos. Negative seeking
545 is forbidden.
546 """
547 if pos - self.pos >= 0:
548 blocks, remainder = divmod(pos - self.pos, self.bufsize)
549 for i in xrange(blocks):
550 self.read(self.bufsize)
551 self.read(remainder)
552 else:
553 raise StreamError("seeking backwards is not allowed")
554 return self.pos
555
556 def read(self, size=None):
557 """Return the next size number of bytes from the stream.
558 If size is not defined, return all bytes of the stream
559 up to EOF.
560 """
561 if size is None:
562 t = []
563 while True:
564 buf = self._read(self.bufsize)
565 if not buf:
566 break
567 t.append(buf)
568 buf = "".join(t)
569 else:
570 buf = self._read(size)
571 self.pos += len(buf)
572 return buf
573
574 def _read(self, size):
575 """Return size bytes from the stream.
576 """
577 if self.comptype == "tar":
578 return self.__read(size)
579
580 c = len(self.dbuf)
581 t = [self.dbuf]
582 while c < size:
583 buf = self.__read(self.bufsize)
584 if not buf:
585 break
586 try:
587 buf = self.cmp.decompress(buf)
588 except IOError:
589 raise ReadError("invalid compressed data")
590 t.append(buf)
591 c += len(buf)
592 t = "".join(t)
593 self.dbuf = t[size:]
594 return t[:size]
595
596 def __read(self, size):
597 """Return size bytes from stream. If internal buffer is empty,
598 read another block from the stream.
599 """
600 c = len(self.buf)
601 t = [self.buf]
602 while c < size:
603 buf = self.fileobj.read(self.bufsize)
604 if not buf:
605 break
606 t.append(buf)
607 c += len(buf)
608 t = "".join(t)
609 self.buf = t[size:]
610 return t[:size]
611# class _Stream
612
613class _StreamProxy(object):
614 """Small proxy class that enables transparent compression
615 detection for the Stream interface (mode 'r|*').
616 """
617
618 def __init__(self, fileobj):
619 self.fileobj = fileobj
620 self.buf = self.fileobj.read(BLOCKSIZE)
621
622 def read(self, size):
623 self.read = self.fileobj.read
624 return self.buf
625
626 def getcomptype(self):
627 if self.buf.startswith("\037\213\010"):
628 return "gz"
629 if self.buf.startswith("BZh91"):
630 return "bz2"
631 return "tar"
632
633 def close(self):
634 self.fileobj.close()
635# class StreamProxy
636
637class _BZ2Proxy(object):
638 """Small proxy class that enables external file object
639 support for "r:bz2" and "w:bz2" modes. This is actually
640 a workaround for a limitation in bz2 module's BZ2File
641 class which (unlike gzip.GzipFile) has no support for
642 a file object argument.
643 """
644
645 blocksize = 16 * 1024
646
647 def __init__(self, fileobj, mode):
648 self.fileobj = fileobj
649 self.mode = mode
650 self.name = getattr(self.fileobj, "name", None)
651 self.init()
652
653 def init(self):
654 import bz2
655 self.pos = 0
656 if self.mode == "r":
657 self.bz2obj = bz2.BZ2Decompressor()
658 self.fileobj.seek(0)
659 self.buf = ""
660 else:
661 self.bz2obj = bz2.BZ2Compressor()
662
663 def read(self, size):
664 b = [self.buf]
665 x = len(self.buf)
666 while x < size:
667 raw = self.fileobj.read(self.blocksize)
668 if not raw:
669 break
670 data = self.bz2obj.decompress(raw)
671 b.append(data)
672 x += len(data)
673 self.buf = "".join(b)
674
675 buf = self.buf[:size]
676 self.buf = self.buf[size:]
677 self.pos += len(buf)
678 return buf
679
680 def seek(self, pos):
681 if pos < self.pos:
682 self.init()
683 self.read(pos - self.pos)
684
685 def tell(self):
686 return self.pos
687
688 def write(self, data):
689 self.pos += len(data)
690 raw = self.bz2obj.compress(data)
691 self.fileobj.write(raw)
692
693 def close(self):
694 if self.mode == "w":
695 raw = self.bz2obj.flush()
696 self.fileobj.write(raw)
697# class _BZ2Proxy
698
699#------------------------
700# Extraction file object
701#------------------------
702class _FileInFile(object):
703 """A thin wrapper around an existing file object that
704 provides a part of its data as an individual file
705 object.
706 """
707
708 def __init__(self, fileobj, offset, size, sparse=None):
709 self.fileobj = fileobj
710 self.offset = offset
711 self.size = size
712 self.sparse = sparse
713 self.position = 0
714
715 def tell(self):
716 """Return the current file position.
717 """
718 return self.position
719
720 def seek(self, position):
721 """Seek to a position in the file.
722 """
723 self.position = position
724
725 def read(self, size=None):
726 """Read data from the file.
727 """
728 if size is None:
729 size = self.size - self.position
730 else:
731 size = min(size, self.size - self.position)
732
733 if self.sparse is None:
734 return self.readnormal(size)
735 else:
736 return self.readsparse(size)
737
738 def readnormal(self, size):
739 """Read operation for regular files.
740 """
741 self.fileobj.seek(self.offset + self.position)
742 self.position += size
743 return self.fileobj.read(size)
744
745 def readsparse(self, size):
746 """Read operation for sparse files.
747 """
748 data = []
749 while size > 0:
750 buf = self.readsparsesection(size)
751 if not buf:
752 break
753 size -= len(buf)
754 data.append(buf)
755 return "".join(data)
756
757 def readsparsesection(self, size):
758 """Read a single section of a sparse file.
759 """
760 section = self.sparse.find(self.position)
761
762 if section is None:
763 return ""
764
765 size = min(size, section.offset + section.size - self.position)
766
767 if isinstance(section, _data):
768 realpos = section.realpos + self.position - section.offset
769 self.fileobj.seek(self.offset + realpos)
770 self.position += size
771 return self.fileobj.read(size)
772 else:
773 self.position += size
774 return NUL * size
775#class _FileInFile
776
777
778class ExFileObject(object):
779 """File-like object for reading an archive member.
780 Is returned by TarFile.extractfile().
781 """
782 blocksize = 1024
783
784 def __init__(self, tarfile, tarinfo):
785 self.fileobj = _FileInFile(tarfile.fileobj,
786 tarinfo.offset_data,
787 tarinfo.size,
788 getattr(tarinfo, "sparse", None))
789 self.name = tarinfo.name
790 self.mode = "r"
791 self.closed = False
792 self.size = tarinfo.size
793
794 self.position = 0
795 self.buffer = ""
796
797 def read(self, size=None):
798 """Read at most size bytes from the file. If size is not
799 present or None, read all data until EOF is reached.
800 """
801 if self.closed:
802 raise ValueError("I/O operation on closed file")
803
804 buf = ""
805 if self.buffer:
806 if size is None:
807 buf = self.buffer
808 self.buffer = ""
809 else:
810 buf = self.buffer[:size]
811 self.buffer = self.buffer[size:]
812
813 if size is None:
814 buf += self.fileobj.read()
815 else:
816 buf += self.fileobj.read(size - len(buf))
817
818 self.position += len(buf)
819 return buf
820
821 def readline(self, size=-1):
822 """Read one entire line from the file. If size is present
823 and non-negative, return a string with at most that
824 size, which may be an incomplete line.
825 """
826 if self.closed:
827 raise ValueError("I/O operation on closed file")
828
829 if "\n" in self.buffer:
830 pos = self.buffer.find("\n") + 1
831 else:
832 buffers = [self.buffer]
833 while True:
834 buf = self.fileobj.read(self.blocksize)
835 buffers.append(buf)
836 if not buf or "\n" in buf:
837 self.buffer = "".join(buffers)
838 pos = self.buffer.find("\n") + 1
839 if pos == 0:
840 # no newline found.
841 pos = len(self.buffer)
842 break
843
844 if size != -1:
845 pos = min(size, pos)
846
847 buf = self.buffer[:pos]
848 self.buffer = self.buffer[pos:]
849 self.position += len(buf)
850 return buf
851
852 def readlines(self):
853 """Return a list with all remaining lines.
854 """
855 result = []
856 while True:
857 line = self.readline()
858 if not line: break
859 result.append(line)
860 return result
861
862 def tell(self):
863 """Return the current file position.
864 """
865 if self.closed:
866 raise ValueError("I/O operation on closed file")
867
868 return self.position
869
870 def seek(self, pos, whence=os.SEEK_SET):
871 """Seek to a position in the file.
872 """
873 if self.closed:
874 raise ValueError("I/O operation on closed file")
875
876 if whence == os.SEEK_SET:
877 self.position = min(max(pos, 0), self.size)
878 elif whence == os.SEEK_CUR:
879 if pos < 0:
880 self.position = max(self.position + pos, 0)
881 else:
882 self.position = min(self.position + pos, self.size)
883 elif whence == os.SEEK_END:
884 self.position = max(min(self.size + pos, self.size), 0)
885 else:
886 raise ValueError("Invalid argument")
887
888 self.buffer = ""
889 self.fileobj.seek(self.position)
890
891 def close(self):
892 """Close the file object.
893 """
894 self.closed = True
895
896 def __iter__(self):
897 """Get an iterator over the file's lines.
898 """
899 while True:
900 line = self.readline()
901 if not line:
902 break
903 yield line
904#class ExFileObject
905
906#------------------
907# Exported Classes
908#------------------
909class TarInfo(object):
910 """Informational class which holds the details about an
911 archive member given by a tar header block.
912 TarInfo objects are returned by TarFile.getmember(),
913 TarFile.getmembers() and TarFile.gettarinfo() and are
914 usually created internally.
915 """
916
917 def __init__(self, name=""):
918 """Construct a TarInfo object. name is the optional name
919 of the member.
920 """
921 self.name = name # member name
922 self.mode = 0644 # file permissions
923 self.uid = 0 # user id
924 self.gid = 0 # group id
925 self.size = 0 # file size
926 self.mtime = 0 # modification time
927 self.chksum = 0 # header checksum
928 self.type = REGTYPE # member type
929 self.linkname = "" # link name
930 self.uname = "" # user name
931 self.gname = "" # group name
932 self.devmajor = 0 # device major number
933 self.devminor = 0 # device minor number
934
935 self.offset = 0 # the tar header starts here
936 self.offset_data = 0 # the file's data starts here
937
938 self.pax_headers = {} # pax header information
939
940 # In pax headers the "name" and "linkname" field are called
941 # "path" and "linkpath".
942 def _getpath(self):
943 return self.name
944 def _setpath(self, name):
945 self.name = name
946 path = property(_getpath, _setpath)
947
948 def _getlinkpath(self):
949 return self.linkname
950 def _setlinkpath(self, linkname):
951 self.linkname = linkname
952 linkpath = property(_getlinkpath, _setlinkpath)
953
954 def __repr__(self):
955 return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
956
957 def get_info(self, encoding, errors):
958 """Return the TarInfo's attributes as a dictionary.
959 """
960 info = {
961 "name": self.name,
962 "mode": self.mode & 07777,
963 "uid": self.uid,
964 "gid": self.gid,
965 "size": self.size,
966 "mtime": self.mtime,
967 "chksum": self.chksum,
968 "type": self.type,
969 "linkname": self.linkname,
970 "uname": self.uname,
971 "gname": self.gname,
972 "devmajor": self.devmajor,
973 "devminor": self.devminor
974 }
975
976 if info["type"] == DIRTYPE and not info["name"].endswith("/"):
977 info["name"] += "/"
978
979 for key in ("name", "linkname", "uname", "gname"):
980 if type(info[key]) is unicode:
981 info[key] = info[key].encode(encoding, errors)
982
983 return info
984
985 def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="strict"):
986 """Return a tar header as a string of 512 byte blocks.
987 """
988 info = self.get_info(encoding, errors)
989
990 if format == USTAR_FORMAT:
991 return self.create_ustar_header(info)
992 elif format == GNU_FORMAT:
993 return self.create_gnu_header(info)
994 elif format == PAX_FORMAT:
995 return self.create_pax_header(info, encoding, errors)
996 else:
997 raise ValueError("invalid format")
998
999 def create_ustar_header(self, info):
1000 """Return the object as a ustar header block.
1001 """
1002 info["magic"] = POSIX_MAGIC
1003
1004 if len(info["linkname"]) > LENGTH_LINK:
1005 raise ValueError("linkname is too long")
1006
1007 if len(info["name"]) > LENGTH_NAME:
1008 info["prefix"], info["name"] = self._posix_split_name(info["name"])
1009
1010 return self._create_header(info, USTAR_FORMAT)
1011
1012 def create_gnu_header(self, info):
1013 """Return the object as a GNU header block sequence.
1014 """
1015 info["magic"] = GNU_MAGIC
1016
1017 buf = ""
1018 if len(info["linkname"]) > LENGTH_LINK:
1019 buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK)
1020
1021 if len(info["name"]) > LENGTH_NAME:
1022 buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME)
1023
1024 return buf + self._create_header(info, GNU_FORMAT)
1025
1026 def create_pax_header(self, info, encoding, errors):
1027 """Return the object as a ustar header block. If it cannot be
1028 represented this way, prepend a pax extended header sequence
1029 with supplement information.
1030 """
1031 info["magic"] = POSIX_MAGIC
1032 pax_headers = self.pax_headers.copy()
1033
1034 # Test string fields for values that exceed the field length or cannot
1035 # be represented in ASCII encoding.
1036 for name, hname, length in (
1037 ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
1038 ("uname", "uname", 32), ("gname", "gname", 32)):
1039
1040 if hname in pax_headers:
1041 # The pax header has priority.
1042 continue
1043
1044 val = info[name].decode(encoding, errors)
1045
1046 # Try to encode the string as ASCII.
1047 try:
1048 val.encode("ascii")
1049 except UnicodeEncodeError:
1050 pax_headers[hname] = val
1051 continue
1052
1053 if len(info[name]) > length:
1054 pax_headers[hname] = val
1055
1056 # Test number fields for values that exceed the field limit or values
1057 # that like to be stored as float.
1058 for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
1059 if name in pax_headers:
1060 # The pax header has priority. Avoid overflow.
1061 info[name] = 0
1062 continue
1063
1064 val = info[name]
1065 if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
1066 pax_headers[name] = unicode(val)
1067 info[name] = 0
1068
1069 # Create a pax extended header if necessary.
1070 if pax_headers:
1071 buf = self._create_pax_generic_header(pax_headers)
1072 else:
1073 buf = ""
1074
1075 return buf + self._create_header(info, USTAR_FORMAT)
1076
1077 @classmethod
1078 def create_pax_global_header(cls, pax_headers):
1079 """Return the object as a pax global header block sequence.
1080 """
1081 return cls._create_pax_generic_header(pax_headers, type=XGLTYPE)
1082
1083 def _posix_split_name(self, name):
1084 """Split a name longer than 100 chars into a prefix
1085 and a name part.
1086 """
1087 prefix = name[:LENGTH_PREFIX + 1]
1088 while prefix and prefix[-1] != "/":
1089 prefix = prefix[:-1]
1090
1091 name = name[len(prefix):]
1092 prefix = prefix[:-1]
1093
1094 if not prefix or len(name) > LENGTH_NAME:
1095 raise ValueError("name is too long")
1096 return prefix, name
1097
1098 @staticmethod
1099 def _create_header(info, format):
1100 """Return a header block. info is a dictionary with file
1101 information, format must be one of the *_FORMAT constants.
1102 """
1103 parts = [
1104 stn(info.get("name", ""), 100),
1105 itn(info.get("mode", 0) & 07777, 8, format),
1106 itn(info.get("uid", 0), 8, format),
1107 itn(info.get("gid", 0), 8, format),
1108 itn(info.get("size", 0), 12, format),
1109 itn(info.get("mtime", 0), 12, format),
1110 " ", # checksum field
1111 info.get("type", REGTYPE),
1112 stn(info.get("linkname", ""), 100),
1113 stn(info.get("magic", POSIX_MAGIC), 8),
1114 stn(info.get("uname", ""), 32),
1115 stn(info.get("gname", ""), 32),
1116 itn(info.get("devmajor", 0), 8, format),
1117 itn(info.get("devminor", 0), 8, format),
1118 stn(info.get("prefix", ""), 155)
1119 ]
1120
1121 buf = struct.pack("%ds" % BLOCKSIZE, "".join(parts))
1122 chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
1123 buf = buf[:-364] + "%06o\0" % chksum + buf[-357:]
1124 return buf
1125
1126 @staticmethod
1127 def _create_payload(payload):
1128 """Return the string payload filled with zero bytes
1129 up to the next 512 byte border.
1130 """
1131 blocks, remainder = divmod(len(payload), BLOCKSIZE)
1132 if remainder > 0:
1133 payload += (BLOCKSIZE - remainder) * NUL
1134 return payload
1135
1136 @classmethod
1137 def _create_gnu_long_header(cls, name, type):
1138 """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
1139 for name.
1140 """
1141 name += NUL
1142
1143 info = {}
1144 info["name"] = "././@LongLink"
1145 info["type"] = type
1146 info["size"] = len(name)
1147 info["magic"] = GNU_MAGIC
1148
1149 # create extended header + name blocks.
1150 return cls._create_header(info, USTAR_FORMAT) + \
1151 cls._create_payload(name)
1152
1153 @classmethod
1154 def _create_pax_generic_header(cls, pax_headers, type=XHDTYPE):
1155 """Return a POSIX.1-2001 extended or global header sequence
1156 that contains a list of keyword, value pairs. The values
1157 must be unicode objects.
1158 """
1159 records = []
1160 for keyword, value in pax_headers.iteritems():
1161 keyword = keyword.encode("utf8")
1162 value = value.encode("utf8")
1163 l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
1164 n = p = 0
1165 while True:
1166 n = l + len(str(p))
1167 if n == p:
1168 break
1169 p = n
1170 records.append("%d %s=%s\n" % (p, keyword, value))
1171 records = "".join(records)
1172
1173 # We use a hardcoded "././@PaxHeader" name like star does
1174 # instead of the one that POSIX recommends.
1175 info = {}
1176 info["name"] = "././@PaxHeader"
1177 info["type"] = type
1178 info["size"] = len(records)
1179 info["magic"] = POSIX_MAGIC
1180
1181 # Create pax header + record blocks.
1182 return cls._create_header(info, USTAR_FORMAT) + \
1183 cls._create_payload(records)
1184
1185 @classmethod
1186 def frombuf(cls, buf):
1187 """Construct a TarInfo object from a 512 byte string buffer.
1188 """
1189 if len(buf) == 0:
1190 raise EmptyHeaderError("empty header")
1191 if len(buf) != BLOCKSIZE:
1192 raise TruncatedHeaderError("truncated header")
1193 if buf.count(NUL) == BLOCKSIZE:
1194 raise EOFHeaderError("end of file header")
1195
1196 chksum = nti(buf[148:156])
1197 if chksum not in calc_chksums(buf):
1198 raise InvalidHeaderError("bad checksum")
1199
1200 obj = cls()
1201 obj.buf = buf
1202 obj.name = nts(buf[0:100])
1203 obj.mode = nti(buf[100:108])
1204 obj.uid = nti(buf[108:116])
1205 obj.gid = nti(buf[116:124])
1206 obj.size = nti(buf[124:136])
1207 obj.mtime = nti(buf[136:148])
1208 obj.chksum = chksum
1209 obj.type = buf[156:157]
1210 obj.linkname = nts(buf[157:257])
1211 obj.uname = nts(buf[265:297])
1212 obj.gname = nts(buf[297:329])
1213 obj.devmajor = nti(buf[329:337])
1214 obj.devminor = nti(buf[337:345])
1215 prefix = nts(buf[345:500])
1216
1217 # Old V7 tar format represents a directory as a regular
1218 # file with a trailing slash.
1219 if obj.type == AREGTYPE and obj.name.endswith("/"):
1220 obj.type = DIRTYPE
1221
1222 # Remove redundant slashes from directories.
1223 if obj.isdir():
1224 obj.name = obj.name.rstrip("/")
1225
1226 # Reconstruct a ustar longname.
1227 if prefix and obj.type not in GNU_TYPES:
1228 obj.name = prefix + "/" + obj.name
1229 return obj
1230
1231 @classmethod
1232 def fromtarfile(cls, tarfile):
1233 """Return the next TarInfo object from TarFile object
1234 tarfile.
1235 """
1236 buf = tarfile.fileobj.read(BLOCKSIZE)
1237 obj = cls.frombuf(buf)
1238 obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
1239 return obj._proc_member(tarfile)
1240
1241 #--------------------------------------------------------------------------
1242 # The following are methods that are called depending on the type of a
1243 # member. The entry point is _proc_member() which can be overridden in a
1244 # subclass to add custom _proc_*() methods. A _proc_*() method MUST
1245 # implement the following
1246 # operations:
1247 # 1. Set self.offset_data to the position where the data blocks begin,
1248 # if there is data that follows.
1249 # 2. Set tarfile.offset to the position where the next member's header will
1250 # begin.
1251 # 3. Return self or another valid TarInfo object.
1252 def _proc_member(self, tarfile):
1253 """Choose the right processing method depending on
1254 the type and call it.
1255 """
1256 if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
1257 return self._proc_gnulong(tarfile)
1258 elif self.type == GNUTYPE_SPARSE:
1259 return self._proc_sparse(tarfile)
1260 elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
1261 return self._proc_pax(tarfile)
1262 else:
1263 return self._proc_builtin(tarfile)
1264
1265 def _proc_builtin(self, tarfile):
1266 """Process a builtin type or an unknown type which
1267 will be treated as a regular file.
1268 """
1269 self.offset_data = tarfile.fileobj.tell()
1270 offset = self.offset_data
1271 if self.isreg() or self.type not in SUPPORTED_TYPES:
1272 # Skip the following data blocks.
1273 offset += self._block(self.size)
1274 tarfile.offset = offset
1275
1276 # Patch the TarInfo object with saved global
1277 # header information.
1278 self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
1279
1280 return self
1281
1282 def _proc_gnulong(self, tarfile):
1283 """Process the blocks that hold a GNU longname
1284 or longlink member.
1285 """
1286 buf = tarfile.fileobj.read(self._block(self.size))
1287
1288 # Fetch the next header and process it.
1289 try:
1290 next = self.fromtarfile(tarfile)
1291 except HeaderError:
1292 raise SubsequentHeaderError("missing or bad subsequent header")
1293
1294 # Patch the TarInfo object from the next header with
1295 # the longname information.
1296 next.offset = self.offset
1297 if self.type == GNUTYPE_LONGNAME:
1298 next.name = nts(buf)
1299 elif self.type == GNUTYPE_LONGLINK:
1300 next.linkname = nts(buf)
1301
1302 return next
1303
1304 def _proc_sparse(self, tarfile):
1305 """Process a GNU sparse header plus extra headers.
1306 """
1307 buf = self.buf
1308 sp = _ringbuffer()
1309 pos = 386
1310 lastpos = 0L
1311 realpos = 0L
1312 # There are 4 possible sparse structs in the
1313 # first header.
1314 for i in xrange(4):
1315 try:
1316 offset = nti(buf[pos:pos + 12])
1317 numbytes = nti(buf[pos + 12:pos + 24])
1318 except ValueError:
1319 break
1320 if offset > lastpos:
1321 sp.append(_hole(lastpos, offset - lastpos))
1322 sp.append(_data(offset, numbytes, realpos))
1323 realpos += numbytes
1324 lastpos = offset + numbytes
1325 pos += 24
1326
1327 isextended = ord(buf[482])
1328 origsize = nti(buf[483:495])
1329
1330 # If the isextended flag is given,
1331 # there are extra headers to process.
1332 while isextended == 1:
1333 buf = tarfile.fileobj.read(BLOCKSIZE)
1334 pos = 0
1335 for i in xrange(21):
1336 try:
1337 offset = nti(buf[pos:pos + 12])
1338 numbytes = nti(buf[pos + 12:pos + 24])
1339 except ValueError:
1340 break
1341 if offset > lastpos:
1342 sp.append(_hole(lastpos, offset - lastpos))
1343 sp.append(_data(offset, numbytes, realpos))
1344 realpos += numbytes
1345 lastpos = offset + numbytes
1346 pos += 24
1347 isextended = ord(buf[504])
1348
1349 if lastpos < origsize:
1350 sp.append(_hole(lastpos, origsize - lastpos))
1351
1352 self.sparse = sp
1353
1354 self.offset_data = tarfile.fileobj.tell()
1355 tarfile.offset = self.offset_data + self._block(self.size)
1356 self.size = origsize
1357
1358 return self
1359
1360 def _proc_pax(self, tarfile):
1361 """Process an extended or global header as described in
1362 POSIX.1-2001.
1363 """
1364 # Read the header information.
1365 buf = tarfile.fileobj.read(self._block(self.size))
1366
1367 # A pax header stores supplemental information for either
1368 # the following file (extended) or all following files
1369 # (global).
1370 if self.type == XGLTYPE:
1371 pax_headers = tarfile.pax_headers
1372 else:
1373 pax_headers = tarfile.pax_headers.copy()
1374
1375 # Parse pax header information. A record looks like that:
1376 # "%d %s=%s\n" % (length, keyword, value). length is the size
1377 # of the complete record including the length field itself and
1378 # the newline. keyword and value are both UTF-8 encoded strings.
1379 regex = re.compile(r"(\d+) ([^=]+)=", re.U)
1380 pos = 0
1381 while True:
1382 match = regex.match(buf, pos)
1383 if not match:
1384 break
1385
1386 length, keyword = match.groups()
1387 length = int(length)
1388 value = buf[match.end(2) + 1:match.start(1) + length - 1]
1389
1390 keyword = keyword.decode("utf8")
1391 value = value.decode("utf8")
1392
1393 pax_headers[keyword] = value
1394 pos += length
1395
1396 # Fetch the next header.
1397 try:
1398 next = self.fromtarfile(tarfile)
1399 except HeaderError:
1400 raise SubsequentHeaderError("missing or bad subsequent header")
1401
1402 if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
1403 # Patch the TarInfo object with the extended header info.
1404 next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
1405 next.offset = self.offset
1406
1407 if "size" in pax_headers:
1408 # If the extended header replaces the size field,
1409 # we need to recalculate the offset where the next
1410 # header starts.
1411 offset = next.offset_data
1412 if next.isreg() or next.type not in SUPPORTED_TYPES:
1413 offset += next._block(next.size)
1414 tarfile.offset = offset
1415
1416 return next
1417
1418 def _apply_pax_info(self, pax_headers, encoding, errors):
1419 """Replace fields with supplemental information from a previous
1420 pax extended or global header.
1421 """
1422 for keyword, value in pax_headers.iteritems():
1423 if keyword not in PAX_FIELDS:
1424 continue
1425
1426 if keyword == "path":
1427 value = value.rstrip("/")
1428
1429 if keyword in PAX_NUMBER_FIELDS:
1430 try:
1431 value = PAX_NUMBER_FIELDS[keyword](value)
1432 except ValueError:
1433 value = 0
1434 else:
1435 value = uts(value, encoding, errors)
1436
1437 setattr(self, keyword, value)
1438
1439 self.pax_headers = pax_headers.copy()
1440
1441 def _block(self, count):
1442 """Round up a byte count by BLOCKSIZE and return it,
1443 e.g. _block(834) => 1024.
1444 """
1445 blocks, remainder = divmod(count, BLOCKSIZE)
1446 if remainder:
1447 blocks += 1
1448 return blocks * BLOCKSIZE
1449
1450 def isreg(self):
1451 return self.type in REGULAR_TYPES
1452 def isfile(self):
1453 return self.isreg()
1454 def isdir(self):
1455 return self.type == DIRTYPE
1456 def issym(self):
1457 return self.type == SYMTYPE
1458 def islnk(self):
1459 return self.type == LNKTYPE
1460 def ischr(self):
1461 return self.type == CHRTYPE
1462 def isblk(self):
1463 return self.type == BLKTYPE
1464 def isfifo(self):
1465 return self.type == FIFOTYPE
1466 def issparse(self):
1467 return self.type == GNUTYPE_SPARSE
1468 def isdev(self):
1469 return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
1470# class TarInfo
1471
1472class TarFile(object):
1473 """The TarFile Class provides an interface to tar archives.
1474 """
1475
1476 debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
1477
1478 dereference = False # If true, add content of linked file to the
1479 # tar file, else the link.
1480
1481 ignore_zeros = False # If true, skips empty or invalid blocks and
1482 # continues processing.
1483
1484 errorlevel = 1 # If 0, fatal errors only appear in debug
1485 # messages (if debug >= 0). If > 0, errors
1486 # are passed to the caller as exceptions.
1487
1488 format = DEFAULT_FORMAT # The format to use when creating an archive.
1489
1490 encoding = ENCODING # Encoding for 8-bit character strings.
1491
1492 errors = None # Error handler for unicode conversion.
1493
1494 tarinfo = TarInfo # The default TarInfo class to use.
1495
1496 fileobject = ExFileObject # The default ExFileObject class to use.
1497
1498 def __init__(self, name=None, mode="r", fileobj=None, format=None,
1499 tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
1500 errors=None, pax_headers=None, debug=None, errorlevel=None):
1501 """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
1502 read from an existing archive, 'a' to append data to an existing
1503 file or 'w' to create a new file overwriting an existing one. `mode'
1504 defaults to 'r'.
1505 If `fileobj' is given, it is used for reading or writing data. If it
1506 can be determined, `mode' is overridden by `fileobj's mode.
1507 `fileobj' is not closed, when TarFile is closed.
1508 """
1509 if len(mode) > 1 or mode not in "raw":
1510 raise ValueError("mode must be 'r', 'a' or 'w'")
1511 self.mode = mode
1512 self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
1513
1514 if not fileobj:
1515 if self.mode == "a" and not os.path.exists(name):
1516 # Create nonexistent files in append mode.
1517 self.mode = "w"
1518 self._mode = "wb"
1519 fileobj = bltn_open(name, self._mode)
1520 self._extfileobj = False
1521 else:
1522 if name is None and hasattr(fileobj, "name"):
1523 name = fileobj.name
1524 if hasattr(fileobj, "mode"):
1525 self._mode = fileobj.mode
1526 self._extfileobj = True
1527 self.name = os.path.abspath(name) if name else None
1528 self.fileobj = fileobj
1529
1530 # Init attributes.
1531 if format is not None:
1532 self.format = format
1533 if tarinfo is not None:
1534 self.tarinfo = tarinfo
1535 if dereference is not None:
1536 self.dereference = dereference
1537 if ignore_zeros is not None:
1538 self.ignore_zeros = ignore_zeros
1539 if encoding is not None:
1540 self.encoding = encoding
1541
1542 if errors is not None:
1543 self.errors = errors
1544 elif mode == "r":
1545 self.errors = "utf-8"
1546 else:
1547 self.errors = "strict"
1548
1549 if pax_headers is not None and self.format == PAX_FORMAT:
1550 self.pax_headers = pax_headers
1551 else:
1552 self.pax_headers = {}
1553
1554 if debug is not None:
1555 self.debug = debug
1556 if errorlevel is not None:
1557 self.errorlevel = errorlevel
1558
1559 # Init datastructures.
1560 self.closed = False
1561 self.members = [] # list of members as TarInfo objects
1562 self._loaded = False # flag if all members have been read
1563 self.offset = self.fileobj.tell()
1564 # current position in the archive file
1565 self.inodes = {} # dictionary caching the inodes of
1566 # archive members already added
1567
1568 try:
1569 if self.mode == "r":
1570 self.firstmember = None
1571 self.firstmember = self.next()
1572
1573 if self.mode == "a":
1574 # Move to the end of the archive,
1575 # before the first empty block.
1576 while True:
1577 self.fileobj.seek(self.offset)
1578 try:
1579 tarinfo = self.tarinfo.fromtarfile(self)
1580 self.members.append(tarinfo)
1581 except EOFHeaderError:
1582 self.fileobj.seek(self.offset)
1583 break
1584 except HeaderError, e:
1585 raise ReadError(str(e))
1586
1587 if self.mode in "aw":
1588 self._loaded = True
1589
1590 if self.pax_headers:
1591 buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
1592 self.fileobj.write(buf)
1593 self.offset += len(buf)
1594 except:
1595 if not self._extfileobj:
1596 self.fileobj.close()
1597 self.closed = True
1598 raise
1599
1600 def _getposix(self):
1601 return self.format == USTAR_FORMAT
1602 def _setposix(self, value):
1603 import warnings
1604 warnings.warn("use the format attribute instead", DeprecationWarning,
1605 2)
1606 if value:
1607 self.format = USTAR_FORMAT
1608 else:
1609 self.format = GNU_FORMAT
1610 posix = property(_getposix, _setposix)
1611
1612 #--------------------------------------------------------------------------
1613 # Below are the classmethods which act as alternate constructors to the
1614 # TarFile class. The open() method is the only one that is needed for
1615 # public use; it is the "super"-constructor and is able to select an
1616 # adequate "sub"-constructor for a particular compression using the mapping
1617 # from OPEN_METH.
1618 #
1619 # This concept allows one to subclass TarFile without losing the comfort of
1620 # the super-constructor. A sub-constructor is registered and made available
1621 # by adding it to the mapping in OPEN_METH.
1622
1623 @classmethod
1624 def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
1625 """Open a tar archive for reading, writing or appending. Return
1626 an appropriate TarFile class.
1627
1628 mode:
1629 'r' or 'r:*' open for reading with transparent compression
1630 'r:' open for reading exclusively uncompressed
1631 'r:gz' open for reading with gzip compression
1632 'r:bz2' open for reading with bzip2 compression
1633 'a' or 'a:' open for appending, creating the file if necessary
1634 'w' or 'w:' open for writing without compression
1635 'w:gz' open for writing with gzip compression
1636 'w:bz2' open for writing with bzip2 compression
1637
1638 'r|*' open a stream of tar blocks with transparent compression
1639 'r|' open an uncompressed stream of tar blocks for reading
1640 'r|gz' open a gzip compressed stream of tar blocks
1641 'r|bz2' open a bzip2 compressed stream of tar blocks
1642 'w|' open an uncompressed stream for writing
1643 'w|gz' open a gzip compressed stream for writing
1644 'w|bz2' open a bzip2 compressed stream for writing
1645 """
1646
1647 if not name and not fileobj:
1648 raise ValueError("nothing to open")
1649
1650 if mode in ("r", "r:*"):
1651 # Find out which *open() is appropriate for opening the file.
1652 for comptype in cls.OPEN_METH:
1653 func = getattr(cls, cls.OPEN_METH[comptype])
1654 if fileobj is not None:
1655 saved_pos = fileobj.tell()
1656 try:
1657 return func(name, "r", fileobj, **kwargs)
1658 except (ReadError, CompressionError), e:
1659 if fileobj is not None:
1660 fileobj.seek(saved_pos)
1661 continue
1662 raise ReadError("file could not be opened successfully")
1663
1664 elif ":" in mode:
1665 filemode, comptype = mode.split(":", 1)
1666 filemode = filemode or "r"
1667 comptype = comptype or "tar"
1668
1669 # Select the *open() function according to
1670 # given compression.
1671 if comptype in cls.OPEN_METH:
1672 func = getattr(cls, cls.OPEN_METH[comptype])
1673 else:
1674 raise CompressionError("unknown compression type %r" % comptype)
1675 return func(name, filemode, fileobj, **kwargs)
1676
1677 elif "|" in mode:
1678 filemode, comptype = mode.split("|", 1)
1679 filemode = filemode or "r"
1680 comptype = comptype or "tar"
1681
1682 if filemode not in "rw":
1683 raise ValueError("mode must be 'r' or 'w'")
1684
1685 t = cls(name, filemode,
1686 _Stream(name, filemode, comptype, fileobj, bufsize),
1687 **kwargs)
1688 t._extfileobj = False
1689 return t
1690
1691 elif mode in "aw":
1692 return cls.taropen(name, mode, fileobj, **kwargs)
1693
1694 raise ValueError("undiscernible mode")
1695
1696 @classmethod
1697 def taropen(cls, name, mode="r", fileobj=None, **kwargs):
1698 """Open uncompressed tar archive name for reading or writing.
1699 """
1700 if len(mode) > 1 or mode not in "raw":
1701 raise ValueError("mode must be 'r', 'a' or 'w'")
1702 return cls(name, mode, fileobj, **kwargs)
1703
1704 @classmethod
1705 def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
1706 """Open gzip compressed tar archive name for reading or writing.
1707 Appending is not allowed.
1708 """
1709 if len(mode) > 1 or mode not in "rw":
1710 raise ValueError("mode must be 'r' or 'w'")
1711
1712 try:
1713 import gzip
1714 gzip.GzipFile
1715 except (ImportError, AttributeError):
1716 raise CompressionError("gzip module is not available")
1717
1718 if fileobj is None:
1719 fileobj = bltn_open(name, mode + "b")
1720
1721 try:
1722 t = cls.taropen(name, mode,
1723 gzip.GzipFile(name, mode, compresslevel, fileobj),
1724 **kwargs)
1725 except IOError:
1726 raise ReadError("not a gzip file")
1727 t._extfileobj = False
1728 return t
1729
1730 @classmethod
1731 def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
1732 """Open bzip2 compressed tar archive name for reading or writing.
1733 Appending is not allowed.
1734 """
1735 if len(mode) > 1 or mode not in "rw":
1736 raise ValueError("mode must be 'r' or 'w'.")
1737
1738 try:
1739 import bz2
1740 except ImportError:
1741 raise CompressionError("bz2 module is not available")
1742
1743 if fileobj is not None:
1744 fileobj = _BZ2Proxy(fileobj, mode)
1745 else:
1746 fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
1747
1748 try:
1749 t = cls.taropen(name, mode, fileobj, **kwargs)
1750 except (IOError, EOFError):
1751 raise ReadError("not a bzip2 file")
1752 t._extfileobj = False
1753 return t
1754
1755 # All *open() methods are registered here.
1756 OPEN_METH = {
1757 "tar": "taropen", # uncompressed tar
1758 "gz": "gzopen", # gzip compressed tar
1759 "bz2": "bz2open" # bzip2 compressed tar
1760 }
1761
1762 #--------------------------------------------------------------------------
1763 # The public methods which TarFile provides:
1764
1765 def close(self):
1766 """Close the TarFile. In write-mode, two finishing zero blocks are
1767 appended to the archive.
1768 """
1769 if self.closed:
1770 return
1771
1772 if self.mode in "aw":
1773 self.fileobj.write(NUL * (BLOCKSIZE * 2))
1774 self.offset += (BLOCKSIZE * 2)
1775 # fill up the end with zero-blocks
1776 # (like option -b20 for tar does)
1777 blocks, remainder = divmod(self.offset, RECORDSIZE)
1778 if remainder > 0:
1779 self.fileobj.write(NUL * (RECORDSIZE - remainder))
1780
1781 if not self._extfileobj:
1782 self.fileobj.close()
1783 self.closed = True
1784
1785 def getmember(self, name):
1786 """Return a TarInfo object for member `name'. If `name' can not be
1787 found in the archive, KeyError is raised. If a member occurs more
1788 than once in the archive, its last occurrence is assumed to be the
1789 most up-to-date version.
1790 """
1791 tarinfo = self._getmember(name)
1792 if tarinfo is None:
1793 raise KeyError("filename %r not found" % name)
1794 return tarinfo
1795
1796 def getmembers(self):
1797 """Return the members of the archive as a list of TarInfo objects. The
1798 list has the same order as the members in the archive.
1799 """
1800 self._check()
1801 if not self._loaded: # if we want to obtain a list of
1802 self._load() # all members, we first have to
1803 # scan the whole archive.
1804 return self.members
1805
1806 def getnames(self):
1807 """Return the members of the archive as a list of their names. It has
1808 the same order as the list returned by getmembers().
1809 """
1810 return [tarinfo.name for tarinfo in self.getmembers()]
1811
1812 def gettarinfo(self, name=None, arcname=None, fileobj=None):
1813 """Create a TarInfo object for either the file `name' or the file
1814 object `fileobj' (using os.fstat on its file descriptor). You can
1815 modify some of the TarInfo's attributes before you add it using
1816 addfile(). If given, `arcname' specifies an alternative name for the
1817 file in the archive.
1818 """
1819 self._check("aw")
1820
1821 # When fileobj is given, replace name by
1822 # fileobj's real name.
1823 if fileobj is not None:
1824 name = fileobj.name
1825
1826 # Building the name of the member in the archive.
1827 # Backward slashes are converted to forward slashes,
1828 # Absolute paths are turned to relative paths.
1829 if arcname is None:
1830 arcname = name
1831 drv, arcname = os.path.splitdrive(arcname)
1832 arcname = arcname.replace(os.sep, "/")
1833 arcname = arcname.lstrip("/")
1834
1835 # Now, fill the TarInfo object with
1836 # information specific for the file.
1837 tarinfo = self.tarinfo()
1838 tarinfo.tarfile = self
1839
1840 # Use os.stat or os.lstat, depending on platform
1841 # and if symlinks shall be resolved.
1842 if fileobj is None:
1843 if hasattr(os, "lstat") and not self.dereference:
1844 statres = os.lstat(name)
1845 else:
1846 statres = os.stat(name)
1847 else:
1848 statres = os.fstat(fileobj.fileno())
1849 linkname = ""
1850
1851 stmd = statres.st_mode
1852 if stat.S_ISREG(stmd):
1853 inode = (statres.st_ino, statres.st_dev)
1854 if not self.dereference and statres.st_nlink > 1 and \
1855 inode in self.inodes and arcname != self.inodes[inode]:
1856 # Is it a hardlink to an already
1857 # archived file?
1858 type = LNKTYPE
1859 linkname = self.inodes[inode]
1860 else:
1861 # The inode is added only if its valid.
1862 # For win32 it is always 0.
1863 type = REGTYPE
1864 if inode[0]:
1865 self.inodes[inode] = arcname
1866 elif stat.S_ISDIR(stmd):
1867 type = DIRTYPE
1868 elif stat.S_ISFIFO(stmd):
1869 type = FIFOTYPE
1870 elif stat.S_ISLNK(stmd):
1871 type = SYMTYPE
1872 linkname = os.readlink(name)
1873 elif stat.S_ISCHR(stmd):
1874 type = CHRTYPE
1875 elif stat.S_ISBLK(stmd):
1876 type = BLKTYPE
1877 else:
1878 return None
1879
1880 # Fill the TarInfo object with all
1881 # information we can get.
1882 tarinfo.name = arcname
1883 tarinfo.mode = stmd
1884 tarinfo.uid = statres.st_uid
1885 tarinfo.gid = statres.st_gid
1886 if type == REGTYPE:
1887 tarinfo.size = statres.st_size
1888 else:
1889 tarinfo.size = 0L
1890 tarinfo.mtime = statres.st_mtime
1891 tarinfo.type = type
1892 tarinfo.linkname = linkname
1893 if pwd:
1894 try:
1895 tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
1896 except KeyError:
1897 pass
1898 if grp:
1899 try:
1900 tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
1901 except KeyError:
1902 pass
1903
1904 if type in (CHRTYPE, BLKTYPE):
1905 if hasattr(os, "major") and hasattr(os, "minor"):
1906 tarinfo.devmajor = os.major(statres.st_rdev)
1907 tarinfo.devminor = os.minor(statres.st_rdev)
1908 return tarinfo
1909
1910 def list(self, verbose=True):
1911 """Print a table of contents to sys.stdout. If `verbose' is False, only
1912 the names of the members are printed. If it is True, an `ls -l'-like
1913 output is produced.
1914 """
1915 self._check()
1916
1917 for tarinfo in self:
1918 if verbose:
1919 print filemode(tarinfo.mode),
1920 print "%s/%s" % (tarinfo.uname or tarinfo.uid,
1921 tarinfo.gname or tarinfo.gid),
1922 if tarinfo.ischr() or tarinfo.isblk():
1923 print "%10s" % ("%d,%d" \
1924 % (tarinfo.devmajor, tarinfo.devminor)),
1925 else:
1926 print "%10d" % tarinfo.size,
1927 print "%d-%02d-%02d %02d:%02d:%02d" \
1928 % time.localtime(tarinfo.mtime)[:6],
1929
1930 print tarinfo.name + ("/" if tarinfo.isdir() else ""),
1931
1932 if verbose:
1933 if tarinfo.issym():
1934 print "->", tarinfo.linkname,
1935 if tarinfo.islnk():
1936 print "link to", tarinfo.linkname,
1937 print
1938
1939 def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
1940 """Add the file `name' to the archive. `name' may be any type of file
1941 (directory, fifo, symbolic link, etc.). If given, `arcname'
1942 specifies an alternative name for the file in the archive.
1943 Directories are added recursively by default. This can be avoided by
1944 setting `recursive' to False. `exclude' is a function that should
1945 return True for each filename to be excluded. `filter' is a function
1946 that expects a TarInfo object argument and returns the changed
1947 TarInfo object, if it returns None the TarInfo object will be
1948 excluded from the archive.
1949 """
1950 self._check("aw")
1951
1952 if arcname is None:
1953 arcname = name
1954
1955 # Exclude pathnames.
1956 if exclude is not None:
1957 import warnings
1958 warnings.warn("use the filter argument instead",
1959 DeprecationWarning, 2)
1960 if exclude(name):
1961 self._dbg(2, "tarfile: Excluded %r" % name)
1962 return
1963
1964 # Skip if somebody tries to archive the archive...
1965 if self.name is not None and os.path.abspath(name) == self.name:
1966 self._dbg(2, "tarfile: Skipped %r" % name)
1967 return
1968
1969 self._dbg(1, name)
1970
1971 # Create a TarInfo object from the file.
1972 tarinfo = self.gettarinfo(name, arcname)
1973
1974 if tarinfo is None:
1975 self._dbg(1, "tarfile: Unsupported type %r" % name)
1976 return
1977
1978 # Change or exclude the TarInfo object.
1979 if filter is not None:
1980 tarinfo = filter(tarinfo)
1981 if tarinfo is None:
1982 self._dbg(2, "tarfile: Excluded %r" % name)
1983 return
1984
1985 # Append the tar header and data to the archive.
1986 if tarinfo.isreg():
1987 f = bltn_open(name, "rb")
1988 self.addfile(tarinfo, f)
1989 f.close()
1990
1991 elif tarinfo.isdir():
1992 self.addfile(tarinfo)
1993 if recursive:
1994 for f in os.listdir(name):
1995 self.add(os.path.join(name, f), os.path.join(arcname, f),
1996 recursive, exclude, filter)
1997
1998 else:
1999 self.addfile(tarinfo)
2000
2001 def addfile(self, tarinfo, fileobj=None):
2002 """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
2003 given, tarinfo.size bytes are read from it and added to the archive.
2004 You can create TarInfo objects using gettarinfo().
2005 On Windows platforms, `fileobj' should always be opened with mode
2006 'rb' to avoid irritation about the file size.
2007 """
2008 self._check("aw")
2009
2010 tarinfo = copy.copy(tarinfo)
2011
2012 buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
2013 self.fileobj.write(buf)
2014 self.offset += len(buf)
2015
2016 # If there's data to follow, append it.
2017 if fileobj is not None:
2018 copyfileobj(fileobj, self.fileobj, tarinfo.size)
2019 blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
2020 if remainder > 0:
2021 self.fileobj.write(NUL * (BLOCKSIZE - remainder))
2022 blocks += 1
2023 self.offset += blocks * BLOCKSIZE
2024
2025 self.members.append(tarinfo)
2026
2027 def extractall(self, path=".", members=None):
2028 """Extract all members from the archive to the current working
2029 directory and set owner, modification time and permissions on
2030 directories afterwards. `path' specifies a different directory
2031 to extract to. `members' is optional and must be a subset of the
2032 list returned by getmembers().
2033 """
2034 directories = []
2035
2036 if members is None:
2037 members = self
2038
2039 for tarinfo in members:
2040 if tarinfo.isdir():
2041 # Extract directories with a safe mode.
2042 directories.append(tarinfo)
2043 tarinfo = copy.copy(tarinfo)
2044 tarinfo.mode = 0700
2045 self.extract(tarinfo, path)
2046
2047 # Reverse sort directories.
2048 directories.sort(key=operator.attrgetter('name'))
2049 directories.reverse()
2050
2051 # Set correct owner, mtime and filemode on directories.
2052 for tarinfo in directories:
2053 dirpath = os.path.join(path, tarinfo.name)
2054 try:
2055 self.chown(tarinfo, dirpath)
2056 self.utime(tarinfo, dirpath)
2057 self.chmod(tarinfo, dirpath)
2058 except ExtractError, e:
2059 if self.errorlevel > 1:
2060 raise
2061 else:
2062 self._dbg(1, "tarfile: %s" % e)
2063
2064 def extract(self, member, path=""):
2065 """Extract a member from the archive to the current working directory,
2066 using its full name. Its file information is extracted as accurately
2067 as possible. `member' may be a filename or a TarInfo object. You can
2068 specify a different directory using `path'.
2069 """
2070 self._check("r")
2071
2072 if isinstance(member, basestring):
2073 tarinfo = self.getmember(member)
2074 else:
2075 tarinfo = member
2076
2077 # Prepare the link target for makelink().
2078 if tarinfo.islnk():
2079 tarinfo._link_target = os.path.join(path, tarinfo.linkname)
2080
2081 try:
2082 self._extract_member(tarinfo, os.path.join(path, tarinfo.name))
2083 except EnvironmentError, e:
2084 if self.errorlevel > 0:
2085 raise
2086 else:
2087 if e.filename is None:
2088 self._dbg(1, "tarfile: %s" % e.strerror)
2089 else:
2090 self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
2091 except ExtractError, e:
2092 if self.errorlevel > 1:
2093 raise
2094 else:
2095 self._dbg(1, "tarfile: %s" % e)
2096
2097 def extractfile(self, member):
2098 """Extract a member from the archive as a file object. `member' may be
2099 a filename or a TarInfo object. If `member' is a regular file, a
2100 file-like object is returned. If `member' is a link, a file-like
2101 object is constructed from the link's target. If `member' is none of
2102 the above, None is returned.
2103 The file-like object is read-only and provides the following
2104 methods: read(), readline(), readlines(), seek() and tell()
2105 """
2106 self._check("r")
2107
2108 if isinstance(member, basestring):
2109 tarinfo = self.getmember(member)
2110 else:
2111 tarinfo = member
2112
2113 if tarinfo.isreg():
2114 return self.fileobject(self, tarinfo)
2115
2116 elif tarinfo.type not in SUPPORTED_TYPES:
2117 # If a member's type is unknown, it is treated as a
2118 # regular file.
2119 return self.fileobject(self, tarinfo)
2120
2121 elif tarinfo.islnk() or tarinfo.issym():
2122 if isinstance(self.fileobj, _Stream):
2123 # A small but ugly workaround for the case that someone tries
2124 # to extract a (sym)link as a file-object from a non-seekable
2125 # stream of tar blocks.
2126 raise StreamError("cannot extract (sym)link as file object")
2127 else:
2128 # A (sym)link's file object is its target's file object.
2129 return self.extractfile(self._find_link_target(tarinfo))
2130 else:
2131 # If there's no data associated with the member (directory, chrdev,
2132 # blkdev, etc.), return None instead of a file object.
2133 return None
2134
2135 def _extract_member(self, tarinfo, targetpath):
2136 """Extract the TarInfo object tarinfo to a physical
2137 file called targetpath.
2138 """
2139 # Fetch the TarInfo object for the given name
2140 # and build the destination pathname, replacing
2141 # forward slashes to platform specific separators.
2142 targetpath = targetpath.rstrip("/")
2143 targetpath = targetpath.replace("/", os.sep)
2144
2145 # Create all upper directories.
2146 upperdirs = os.path.dirname(targetpath)
2147 if upperdirs and not os.path.exists(upperdirs):
2148 # Create directories that are not part of the archive with
2149 # default permissions.
2150 os.makedirs(upperdirs)
2151
2152 if tarinfo.islnk() or tarinfo.issym():
2153 self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
2154 else:
2155 self._dbg(1, tarinfo.name)
2156
2157 if tarinfo.isreg():
2158 self.makefile(tarinfo, targetpath)
2159 elif tarinfo.isdir():
2160 self.makedir(tarinfo, targetpath)
2161 elif tarinfo.isfifo():
2162 self.makefifo(tarinfo, targetpath)
2163 elif tarinfo.ischr() or tarinfo.isblk():
2164 self.makedev(tarinfo, targetpath)
2165 elif tarinfo.islnk() or tarinfo.issym():
2166 self.makelink(tarinfo, targetpath)
2167 elif tarinfo.type not in SUPPORTED_TYPES:
2168 self.makeunknown(tarinfo, targetpath)
2169 else:
2170 self.makefile(tarinfo, targetpath)
2171
2172 self.chown(tarinfo, targetpath)
2173 if not tarinfo.issym():
2174 self.chmod(tarinfo, targetpath)
2175 self.utime(tarinfo, targetpath)
2176
2177 #--------------------------------------------------------------------------
2178 # Below are the different file methods. They are called via
2179 # _extract_member() when extract() is called. They can be replaced in a
2180 # subclass to implement other functionality.
2181
2182 def makedir(self, tarinfo, targetpath):
2183 """Make a directory called targetpath.
2184 """
2185 try:
2186 # Use a safe mode for the directory, the real mode is set
2187 # later in _extract_member().
2188 os.mkdir(targetpath, 0700)
2189 except EnvironmentError, e:
2190 if e.errno != errno.EEXIST:
2191 raise
2192
2193 def makefile(self, tarinfo, targetpath):
2194 """Make a file called targetpath.
2195 """
2196 source = self.extractfile(tarinfo)
2197 target = bltn_open(targetpath, "wb")
2198 copyfileobj(source, target)
2199 source.close()
2200 target.close()
2201
2202 def makeunknown(self, tarinfo, targetpath):
2203 """Make a file from a TarInfo object with an unknown type
2204 at targetpath.
2205 """
2206 self.makefile(tarinfo, targetpath)
2207 self._dbg(1, "tarfile: Unknown file type %r, " \
2208 "extracted as regular file." % tarinfo.type)
2209
2210 def makefifo(self, tarinfo, targetpath):
2211 """Make a fifo called targetpath.
2212 """
2213 if hasattr(os, "mkfifo"):
2214 os.mkfifo(targetpath)
2215 else:
2216 raise ExtractError("fifo not supported by system")
2217
2218 def makedev(self, tarinfo, targetpath):
2219 """Make a character or block device called targetpath.
2220 """
2221 if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
2222 raise ExtractError("special devices not supported by system")
2223
2224 mode = tarinfo.mode
2225 if tarinfo.isblk():
2226 mode |= stat.S_IFBLK
2227 else:
2228 mode |= stat.S_IFCHR
2229
2230 os.mknod(targetpath, mode,
2231 os.makedev(tarinfo.devmajor, tarinfo.devminor))
2232
2233 def makelink(self, tarinfo, targetpath):
2234 """Make a (symbolic) link called targetpath. If it cannot be created
2235 (platform limitation), we try to make a copy of the referenced file
2236 instead of a link.
2237 """
2238 if hasattr(os, "symlink") and hasattr(os, "link"):
2239 # For systems that support symbolic and hard links.
2240 if tarinfo.issym():
2241 if os.path.lexists(targetpath):
2242 os.unlink(targetpath)
2243 os.symlink(tarinfo.linkname, targetpath)
2244 else:
2245 # See extract().
2246 if os.path.exists(tarinfo._link_target):
2247 if os.path.lexists(targetpath):
2248 os.unlink(targetpath)
2249 os.link(tarinfo._link_target, targetpath)
2250 else:
2251 self._extract_member(self._find_link_target(tarinfo), targetpath)
2252 else:
2253 try:
2254 self._extract_member(self._find_link_target(tarinfo), targetpath)
2255 except KeyError:
2256 raise ExtractError("unable to resolve link inside archive")
2257
2258 def chown(self, tarinfo, targetpath):
2259 """Set owner of targetpath according to tarinfo.
2260 """
2261 if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
2262 # We have to be root to do so.
2263 try:
2264 g = grp.getgrnam(tarinfo.gname)[2]
2265 except KeyError:
2266 try:
2267 g = grp.getgrgid(tarinfo.gid)[2]
2268 except KeyError:
2269 g = os.getgid()
2270 try:
2271 u = pwd.getpwnam(tarinfo.uname)[2]
2272 except KeyError:
2273 try:
2274 u = pwd.getpwuid(tarinfo.uid)[2]
2275 except KeyError:
2276 u = os.getuid()
2277 try:
2278 if tarinfo.issym() and hasattr(os, "lchown"):
2279 os.lchown(targetpath, u, g)
2280 else:
2281 if sys.platform != "os2emx":
2282 os.chown(targetpath, u, g)
2283 except EnvironmentError, e:
2284 raise ExtractError("could not change owner")
2285
2286 def chmod(self, tarinfo, targetpath):
2287 """Set file permissions of targetpath according to tarinfo.
2288 """
2289 if hasattr(os, 'chmod'):
2290 try:
2291 os.chmod(targetpath, tarinfo.mode)
2292 except EnvironmentError, e:
2293 raise ExtractError("could not change mode")
2294
2295 def utime(self, tarinfo, targetpath):
2296 """Set modification time of targetpath according to tarinfo.
2297 """
2298 if not hasattr(os, 'utime'):
2299 return
2300 try:
2301 os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
2302 except EnvironmentError, e:
2303 raise ExtractError("could not change modification time")
2304
2305 #--------------------------------------------------------------------------
2306 def next(self):
2307 """Return the next member of the archive as a TarInfo object, when
2308 TarFile is opened for reading. Return None if there is no more
2309 available.
2310 """
2311 self._check("ra")
2312 if self.firstmember is not None:
2313 m = self.firstmember
2314 self.firstmember = None
2315 return m
2316
2317 # Read the next block.
2318 self.fileobj.seek(self.offset)
2319 tarinfo = None
2320 while True:
2321 try:
2322 tarinfo = self.tarinfo.fromtarfile(self)
2323 except EOFHeaderError, e:
2324 if self.ignore_zeros:
2325 self._dbg(2, "0x%X: %s" % (self.offset, e))
2326 self.offset += BLOCKSIZE
2327 continue
2328 except InvalidHeaderError, e:
2329 if self.ignore_zeros:
2330 self._dbg(2, "0x%X: %s" % (self.offset, e))
2331 self.offset += BLOCKSIZE
2332 continue
2333 elif self.offset == 0:
2334 raise ReadError(str(e))
2335 except EmptyHeaderError:
2336 if self.offset == 0:
2337 raise ReadError("empty file")
2338 except TruncatedHeaderError, e:
2339 if self.offset == 0:
2340 raise ReadError(str(e))
2341 except SubsequentHeaderError, e:
2342 raise ReadError(str(e))
2343 break
2344
2345 if tarinfo is not None:
2346 self.members.append(tarinfo)
2347 else:
2348 self._loaded = True
2349
2350 return tarinfo
2351
2352 #--------------------------------------------------------------------------
2353 # Little helper methods:
2354
2355 def _getmember(self, name, tarinfo=None, normalize=False):
2356 """Find an archive member by name from bottom to top.
2357 If tarinfo is given, it is used as the starting point.
2358 """
2359 # Ensure that all members have been loaded.
2360 members = self.getmembers()
2361
2362 # Limit the member search list up to tarinfo.
2363 if tarinfo is not None:
2364 members = members[:members.index(tarinfo)]
2365
2366 if normalize:
2367 name = os.path.normpath(name)
2368
2369 for member in reversed(members):
2370 if normalize:
2371 member_name = os.path.normpath(member.name)
2372 else:
2373 member_name = member.name
2374
2375 if name == member_name:
2376 return member
2377
2378 def _load(self):
2379 """Read through the entire archive file and look for readable
2380 members.
2381 """
2382 while True:
2383 tarinfo = self.next()
2384 if tarinfo is None:
2385 break
2386 self._loaded = True
2387
2388 def _check(self, mode=None):
2389 """Check if TarFile is still open, and if the operation's mode
2390 corresponds to TarFile's mode.
2391 """
2392 if self.closed:
2393 raise IOError("%s is closed" % self.__class__.__name__)
2394 if mode is not None and self.mode not in mode:
2395 raise IOError("bad operation for mode %r" % self.mode)
2396
2397 def _find_link_target(self, tarinfo):
2398 """Find the target member of a symlink or hardlink member in the
2399 archive.
2400 """
2401 if tarinfo.issym():
2402 # Always search the entire archive.
2403 linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
2404 limit = None
2405 else:
2406 # Search the archive before the link, because a hard link is
2407 # just a reference to an already archived file.
2408 linkname = tarinfo.linkname
2409 limit = tarinfo
2410
2411 member = self._getmember(linkname, tarinfo=limit, normalize=True)
2412 if member is None:
2413 raise KeyError("linkname %r not found" % linkname)
2414 return member
2415
2416 def __iter__(self):
2417 """Provide an iterator object.
2418 """
2419 if self._loaded:
2420 return iter(self.members)
2421 else:
2422 return TarIter(self)
2423
2424 def _dbg(self, level, msg):
2425 """Write debugging output to sys.stderr.
2426 """
2427 if level <= self.debug:
2428 print >> sys.stderr, msg
2429
2430 def __enter__(self):
2431 self._check()
2432 return self
2433
2434 def __exit__(self, type, value, traceback):
2435 if type is None:
2436 self.close()
2437 else:
2438 # An exception occurred. We must not call close() because
2439 # it would try to write end-of-archive blocks and padding.
2440 if not self._extfileobj:
2441 self.fileobj.close()
2442 self.closed = True
2443# class TarFile
2444
2445class TarIter:
2446 """Iterator Class.
2447
2448 for tarinfo in TarFile(...):
2449 suite...
2450 """
2451
2452 def __init__(self, tarfile):
2453 """Construct a TarIter object.
2454 """
2455 self.tarfile = tarfile
2456 self.index = 0
2457 def __iter__(self):
2458 """Return iterator object.
2459 """
2460 return self
2461 def next(self):
2462 """Return the next item using TarFile's next() method.
2463 When all members have been read, set TarFile as _loaded.
2464 """
2465 # Fix for SF #1100429: Under rare circumstances it can
2466 # happen that getmembers() is called during iteration,
2467 # which will cause TarIter to stop prematurely.
2468 if not self.tarfile._loaded:
2469 tarinfo = self.tarfile.next()
2470 if not tarinfo:
2471 self.tarfile._loaded = True
2472 raise StopIteration
2473 else:
2474 try:
2475 tarinfo = self.tarfile.members[self.index]
2476 except IndexError:
2477 raise StopIteration
2478 self.index += 1
2479 return tarinfo
2480
2481# Helper classes for sparse file support
2482class _section:
2483 """Base class for _data and _hole.
2484 """
2485 def __init__(self, offset, size):
2486 self.offset = offset
2487 self.size = size
2488 def __contains__(self, offset):
2489 return self.offset <= offset < self.offset + self.size
2490
2491class _data(_section):
2492 """Represent a data section in a sparse file.
2493 """
2494 def __init__(self, offset, size, realpos):
2495 _section.__init__(self, offset, size)
2496 self.realpos = realpos
2497
2498class _hole(_section):
2499 """Represent a hole section in a sparse file.
2500 """
2501 pass
2502
2503class _ringbuffer(list):
2504 """Ringbuffer class which increases performance
2505 over a regular list.
2506 """
2507 def __init__(self):
2508 self.idx = 0
2509 def find(self, offset):
2510 idx = self.idx
2511 while True:
2512 item = self[idx]
2513 if offset in item:
2514 break
2515 idx += 1
2516 if idx == len(self):
2517 idx = 0
2518 if idx == self.idx:
2519 # End of File
2520 return None
2521 self.idx = idx
2522 return item
2523
2524#---------------------------------------------
2525# zipfile compatible TarFile class
2526#---------------------------------------------
2527TAR_PLAIN = 0 # zipfile.ZIP_STORED
2528TAR_GZIPPED = 8 # zipfile.ZIP_DEFLATED
2529class TarFileCompat:
2530 """TarFile class compatible with standard module zipfile's
2531 ZipFile class.
2532 """
2533 def __init__(self, file, mode="r", compression=TAR_PLAIN):
2534 from warnings import warnpy3k
2535 warnpy3k("the TarFileCompat class has been removed in Python 3.0",
2536 stacklevel=2)
2537 if compression == TAR_PLAIN:
2538 self.tarfile = TarFile.taropen(file, mode)
2539 elif compression == TAR_GZIPPED:
2540 self.tarfile = TarFile.gzopen(file, mode)
2541 else:
2542 raise ValueError("unknown compression constant")
2543 if mode[0:1] == "r":
2544 members = self.tarfile.getmembers()
2545 for m in members:
2546 m.filename = m.name
2547 m.file_size = m.size
2548 m.date_time = time.gmtime(m.mtime)[:6]
2549 def namelist(self):
2550 return map(lambda m: m.name, self.infolist())
2551 def infolist(self):
2552 return filter(lambda m: m.type in REGULAR_TYPES,
2553 self.tarfile.getmembers())
2554 def printdir(self):
2555 self.tarfile.list()
2556 def testzip(self):
2557 return
2558 def getinfo(self, name):
2559 return self.tarfile.getmember(name)
2560 def read(self, name):
2561 return self.tarfile.extractfile(self.tarfile.getmember(name)).read()
2562 def write(self, filename, arcname=None, compress_type=None):
2563 self.tarfile.add(filename, arcname)
2564 def writestr(self, zinfo, bytes):
2565 try:
2566 from cStringIO import StringIO
2567 except ImportError:
2568 from StringIO import StringIO
2569 import calendar
2570 tinfo = TarInfo(zinfo.filename)
2571 tinfo.size = len(bytes)
2572 tinfo.mtime = calendar.timegm(zinfo.date_time)
2573 self.tarfile.addfile(tinfo, StringIO(bytes))
2574 def close(self):
2575 self.tarfile.close()
2576#class TarFileCompat
2577
2578#--------------------
2579# exported functions
2580#--------------------
2581def is_tarfile(name):
2582 """Return True if name points to a tar archive that we
2583 are able to handle, else return False.
2584 """
2585 try:
2586 t = open(name)
2587 t.close()
2588 return True
2589 except TarError:
2590 return False
2591
2592bltn_open = open
2593open = TarFile.open