aboutsummaryrefslogtreecommitdiffstats
path: root/src/3rdparty
diff options
context:
space:
mode:
authorJake Petroules <jake.petroules@qt.io>2017-04-26 16:29:13 -0700
committerJake Petroules <jake.petroules@qt.io>2017-05-29 08:56:47 +0000
commit035979a5eb56726001aca6d65757595139324f91 (patch)
tree184459f5dc26211952151fa8483b324ad7d2000c /src/3rdparty
parent2185e6b7e2b2518740ac14268d2bc6b190e30d5c (diff)
Add some third party Python modules for use in dmg module
They are all MIT or BSD 3-clause licensed. They will be used in a followup commit in order to aid in the creation of DMG files on macOS. Change-Id: Icdca2bc8604ee058ea807e9570a3ac9fb6d903d9 Reviewed-by: Lars Knoll <lars.knoll@qt.io> Reviewed-by: Christian Kandeler <christian.kandeler@qt.io>
Diffstat (limited to 'src/3rdparty')
-rw-r--r--src/3rdparty/python/.gitignore3
-rwxr-xr-xsrc/3rdparty/python/bin/dmgbuild36
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/biplist/LICENSE25
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/biplist/__init__.py870
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/biplist/qt_attribution.json13
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/LICENSE19
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/__init__.py3
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/badge.py143
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/colors.py494
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/core.py592
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/licensing.py461
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/qt_attribution.json13
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/resources.py355
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/ds_store/LICENSE19
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/ds_store/__init__.py3
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/ds_store/buddy.py473
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/ds_store/qt_attribution.json13
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/ds_store/store.py1231
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/mac_alias/LICENSE19
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/mac_alias/__init__.py27
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/mac_alias/alias.py587
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/mac_alias/bookmark.py647
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/mac_alias/osx.py823
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/mac_alias/qt_attribution.json13
-rw-r--r--src/3rdparty/python/lib/python2.7/site-packages/mac_alias/utils.py18
-rwxr-xr-xsrc/3rdparty/python/update.sh3
26 files changed, 6903 insertions, 0 deletions
diff --git a/src/3rdparty/python/.gitignore b/src/3rdparty/python/.gitignore
new file mode 100644
index 000000000..3a4de078a
--- /dev/null
+++ b/src/3rdparty/python/.gitignore
@@ -0,0 +1,3 @@
+*.pyc
+*.dist-info
+*.egg-info
diff --git a/src/3rdparty/python/bin/dmgbuild b/src/3rdparty/python/bin/dmgbuild
new file mode 100755
index 000000000..113ae0998
--- /dev/null
+++ b/src/3rdparty/python/bin/dmgbuild
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import dmgbuild
+import sys
+import argparse
+
+parser = argparse.ArgumentParser(description='Construct a disk image file.')
+parser.add_argument('volume_name', metavar='volume-name',
+ help='The name to give to the volume (this will appear in the title bar when the user mounts the disk image).')
+parser.add_argument('filename', metavar='output.dmg',
+ help='The filename of the disk image to create.')
+parser.add_argument('-s', '--settings',
+ help='The path of the settings file.')
+parser.add_argument('-D', dest='defines', action='append', default=[],
+ help='Define a value for the settings file (e.g. -Dfoo=bar).')
+parser.add_argument('--no-hidpi', dest='lookForHiDPI', action='store_false', default=True,
+ help='Do not search for HiDPI versions of the background image (if specified)')
+
+
+args = parser.parse_args()
+
+defines = {}
+for d in args.defines:
+ k,v = d.split('=', 1)
+ k = k.strip()
+ v = v.strip()
+ if (v.startswith("'") and v.endswith("'")) \
+ or (v.startswith('"') and v.endswith('"')):
+ v = v[1:-1]
+ defines[k] = v
+
+dmgbuild.build_dmg(args.filename, args.volume_name, args.settings, defines=defines, lookForHiDPI=args.lookForHiDPI)
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/biplist/LICENSE b/src/3rdparty/python/lib/python2.7/site-packages/biplist/LICENSE
new file mode 100644
index 000000000..1c7ba6cc1
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/biplist/LICENSE
@@ -0,0 +1,25 @@
+Copyright (c) 2010, Andrew Wooster
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of biplist nor the names of its contributors may be
+ used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/biplist/__init__.py b/src/3rdparty/python/lib/python2.7/site-packages/biplist/__init__.py
new file mode 100644
index 000000000..9cab05ec3
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/biplist/__init__.py
@@ -0,0 +1,870 @@
+"""biplist -- a library for reading and writing binary property list files.
+
+Binary Property List (plist) files provide a faster and smaller serialization
+format for property lists on OS X. This is a library for generating binary
+plists which can be read by OS X, iOS, or other clients.
+
+The API models the plistlib API, and will call through to plistlib when
+XML serialization or deserialization is required.
+
+To generate plists with UID values, wrap the values with the Uid object. The
+value must be an int.
+
+To generate plists with NSData/CFData values, wrap the values with the
+Data object. The value must be a string.
+
+Date values can only be datetime.datetime objects.
+
+The exceptions InvalidPlistException and NotBinaryPlistException may be
+thrown to indicate that the data cannot be serialized or deserialized as
+a binary plist.
+
+Plist generation example:
+
+ from biplist import *
+ from datetime import datetime
+ plist = {'aKey':'aValue',
+ '0':1.322,
+ 'now':datetime.now(),
+ 'list':[1,2,3],
+ 'tuple':('a','b','c')
+ }
+ try:
+ writePlist(plist, "example.plist")
+ except (InvalidPlistException, NotBinaryPlistException), e:
+ print "Something bad happened:", e
+
+Plist parsing example:
+
+ from biplist import *
+ try:
+ plist = readPlist("example.plist")
+ print plist
+ except (InvalidPlistException, NotBinaryPlistException), e:
+ print "Not a plist:", e
+"""
+
+from collections import namedtuple
+import datetime
+import io
+import math
+import plistlib
+from struct import pack, unpack, unpack_from
+from struct import error as struct_error
+import sys
+import time
+
+try:
+ unicode
+ unicodeEmpty = r''
+except NameError:
+ unicode = str
+ unicodeEmpty = ''
+try:
+ long
+except NameError:
+ long = int
+try:
+ {}.iteritems
+ iteritems = lambda x: x.iteritems()
+except AttributeError:
+ iteritems = lambda x: x.items()
+
+__all__ = [
+ 'Uid', 'Data', 'readPlist', 'writePlist', 'readPlistFromString',
+ 'writePlistToString', 'InvalidPlistException', 'NotBinaryPlistException'
+]
+
+# Apple uses Jan 1, 2001 as a base for all plist date/times.
+apple_reference_date = datetime.datetime.utcfromtimestamp(978307200)
+
+class Uid(object):
+ """Wrapper around integers for representing UID values. This
+ is used in keyed archiving."""
+ integer = 0
+ def __init__(self, integer):
+ self.integer = integer
+
+ def __repr__(self):
+ return "Uid(%d)" % self.integer
+
+ def __eq__(self, other):
+ if isinstance(self, Uid) and isinstance(other, Uid):
+ return self.integer == other.integer
+ return False
+
+ def __cmp__(self, other):
+ return self.integer - other.integer
+
+ def __lt__(self, other):
+ return self.integer < other.integer
+
+ def __hash__(self):
+ return self.integer
+
+ def __int__(self):
+ return int(self.integer)
+
+class Data(bytes):
+ """Wrapper around bytes to distinguish Data values."""
+
+class InvalidPlistException(Exception):
+ """Raised when the plist is incorrectly formatted."""
+
+class NotBinaryPlistException(Exception):
+ """Raised when a binary plist was expected but not encountered."""
+
+def readPlist(pathOrFile):
+ """Raises NotBinaryPlistException, InvalidPlistException"""
+ didOpen = False
+ result = None
+ if isinstance(pathOrFile, (bytes, unicode)):
+ pathOrFile = open(pathOrFile, 'rb')
+ didOpen = True
+ try:
+ reader = PlistReader(pathOrFile)
+ result = reader.parse()
+ except NotBinaryPlistException as e:
+ try:
+ pathOrFile.seek(0)
+ result = None
+ if hasattr(plistlib, 'loads'):
+ contents = None
+ if isinstance(pathOrFile, (bytes, unicode)):
+ with open(pathOrFile, 'rb') as f:
+ contents = f.read()
+ else:
+ contents = pathOrFile.read()
+ result = plistlib.loads(contents)
+ else:
+ result = plistlib.readPlist(pathOrFile)
+ result = wrapDataObject(result, for_binary=True)
+ except Exception as e:
+ raise InvalidPlistException(e)
+ finally:
+ if didOpen:
+ pathOrFile.close()
+ return result
+
+def wrapDataObject(o, for_binary=False):
+ if isinstance(o, Data) and not for_binary:
+ v = sys.version_info
+ if not (v[0] >= 3 and v[1] >= 4):
+ o = plistlib.Data(o)
+ elif isinstance(o, (bytes, plistlib.Data)) and for_binary:
+ if hasattr(o, 'data'):
+ o = Data(o.data)
+ elif isinstance(o, tuple):
+ o = wrapDataObject(list(o), for_binary)
+ o = tuple(o)
+ elif isinstance(o, list):
+ for i in range(len(o)):
+ o[i] = wrapDataObject(o[i], for_binary)
+ elif isinstance(o, dict):
+ for k in o:
+ o[k] = wrapDataObject(o[k], for_binary)
+ return o
+
+def writePlist(rootObject, pathOrFile, binary=True):
+ if not binary:
+ rootObject = wrapDataObject(rootObject, binary)
+ if hasattr(plistlib, "dump"):
+ if isinstance(pathOrFile, (bytes, unicode)):
+ with open(pathOrFile, 'wb') as f:
+ return plistlib.dump(rootObject, f)
+ else:
+ return plistlib.dump(rootObject, pathOrFile)
+ else:
+ return plistlib.writePlist(rootObject, pathOrFile)
+ else:
+ didOpen = False
+ if isinstance(pathOrFile, (bytes, unicode)):
+ pathOrFile = open(pathOrFile, 'wb')
+ didOpen = True
+ writer = PlistWriter(pathOrFile)
+ result = writer.writeRoot(rootObject)
+ if didOpen:
+ pathOrFile.close()
+ return result
+
+def readPlistFromString(data):
+ return readPlist(io.BytesIO(data))
+
+def writePlistToString(rootObject, binary=True):
+ if not binary:
+ rootObject = wrapDataObject(rootObject, binary)
+ if hasattr(plistlib, "dumps"):
+ return plistlib.dumps(rootObject)
+ elif hasattr(plistlib, "writePlistToBytes"):
+ return plistlib.writePlistToBytes(rootObject)
+ else:
+ return plistlib.writePlistToString(rootObject)
+ else:
+ ioObject = io.BytesIO()
+ writer = PlistWriter(ioObject)
+ writer.writeRoot(rootObject)
+ return ioObject.getvalue()
+
+def is_stream_binary_plist(stream):
+ stream.seek(0)
+ header = stream.read(7)
+ if header == b'bplist0':
+ return True
+ else:
+ return False
+
+PlistTrailer = namedtuple('PlistTrailer', 'offsetSize, objectRefSize, offsetCount, topLevelObjectNumber, offsetTableOffset')
+PlistByteCounts = namedtuple('PlistByteCounts', 'nullBytes, boolBytes, intBytes, realBytes, dateBytes, dataBytes, stringBytes, uidBytes, arrayBytes, setBytes, dictBytes')
+
+class PlistReader(object):
+ file = None
+ contents = ''
+ offsets = None
+ trailer = None
+ currentOffset = 0
+
+ def __init__(self, fileOrStream):
+ """Raises NotBinaryPlistException."""
+ self.reset()
+ self.file = fileOrStream
+
+ def parse(self):
+ return self.readRoot()
+
+ def reset(self):
+ self.trailer = None
+ self.contents = ''
+ self.offsets = []
+ self.currentOffset = 0
+
+ def readRoot(self):
+ result = None
+ self.reset()
+ # Get the header, make sure it's a valid file.
+ if not is_stream_binary_plist(self.file):
+ raise NotBinaryPlistException()
+ self.file.seek(0)
+ self.contents = self.file.read()
+ if len(self.contents) < 32:
+ raise InvalidPlistException("File is too short.")
+ trailerContents = self.contents[-32:]
+ try:
+ self.trailer = PlistTrailer._make(unpack("!xxxxxxBBQQQ", trailerContents))
+ offset_size = self.trailer.offsetSize * self.trailer.offsetCount
+ offset = self.trailer.offsetTableOffset
+ offset_contents = self.contents[offset:offset+offset_size]
+ offset_i = 0
+ while offset_i < self.trailer.offsetCount:
+ begin = self.trailer.offsetSize*offset_i
+ tmp_contents = offset_contents[begin:begin+self.trailer.offsetSize]
+ tmp_sized = self.getSizedInteger(tmp_contents, self.trailer.offsetSize)
+ self.offsets.append(tmp_sized)
+ offset_i += 1
+ self.setCurrentOffsetToObjectNumber(self.trailer.topLevelObjectNumber)
+ result = self.readObject()
+ except TypeError as e:
+ raise InvalidPlistException(e)
+ return result
+
+ def setCurrentOffsetToObjectNumber(self, objectNumber):
+ self.currentOffset = self.offsets[objectNumber]
+
+ def readObject(self):
+ result = None
+ tmp_byte = self.contents[self.currentOffset:self.currentOffset+1]
+ marker_byte = unpack("!B", tmp_byte)[0]
+ format = (marker_byte >> 4) & 0x0f
+ extra = marker_byte & 0x0f
+ self.currentOffset += 1
+
+ def proc_extra(extra):
+ if extra == 0b1111:
+ #self.currentOffset += 1
+ extra = self.readObject()
+ return extra
+
+ # bool, null, or fill byte
+ if format == 0b0000:
+ if extra == 0b0000:
+ result = None
+ elif extra == 0b1000:
+ result = False
+ elif extra == 0b1001:
+ result = True
+ elif extra == 0b1111:
+ pass # fill byte
+ else:
+ raise InvalidPlistException("Invalid object found at offset: %d" % (self.currentOffset - 1))
+ # int
+ elif format == 0b0001:
+ extra = proc_extra(extra)
+ result = self.readInteger(pow(2, extra))
+ # real
+ elif format == 0b0010:
+ extra = proc_extra(extra)
+ result = self.readReal(extra)
+ # date
+ elif format == 0b0011 and extra == 0b0011:
+ result = self.readDate()
+ # data
+ elif format == 0b0100:
+ extra = proc_extra(extra)
+ result = self.readData(extra)
+ # ascii string
+ elif format == 0b0101:
+ extra = proc_extra(extra)
+ result = self.readAsciiString(extra)
+ # Unicode string
+ elif format == 0b0110:
+ extra = proc_extra(extra)
+ result = self.readUnicode(extra)
+ # uid
+ elif format == 0b1000:
+ result = self.readUid(extra)
+ # array
+ elif format == 0b1010:
+ extra = proc_extra(extra)
+ result = self.readArray(extra)
+ # set
+ elif format == 0b1100:
+ extra = proc_extra(extra)
+ result = set(self.readArray(extra))
+ # dict
+ elif format == 0b1101:
+ extra = proc_extra(extra)
+ result = self.readDict(extra)
+ else:
+ raise InvalidPlistException("Invalid object found: {format: %s, extra: %s}" % (bin(format), bin(extra)))
+ return result
+
+ def readInteger(self, byteSize):
+ result = 0
+ original_offset = self.currentOffset
+ data = self.contents[self.currentOffset:self.currentOffset + byteSize]
+ result = self.getSizedInteger(data, byteSize, as_number=True)
+ self.currentOffset = original_offset + byteSize
+ return result
+
+ def readReal(self, length):
+ result = 0.0
+ to_read = pow(2, length)
+ data = self.contents[self.currentOffset:self.currentOffset+to_read]
+ if length == 2: # 4 bytes
+ result = unpack('>f', data)[0]
+ elif length == 3: # 8 bytes
+ result = unpack('>d', data)[0]
+ else:
+ raise InvalidPlistException("Unknown real of length %d bytes" % to_read)
+ return result
+
+ def readRefs(self, count):
+ refs = []
+ i = 0
+ while i < count:
+ fragment = self.contents[self.currentOffset:self.currentOffset+self.trailer.objectRefSize]
+ ref = self.getSizedInteger(fragment, len(fragment))
+ refs.append(ref)
+ self.currentOffset += self.trailer.objectRefSize
+ i += 1
+ return refs
+
+ def readArray(self, count):
+ result = []
+ values = self.readRefs(count)
+ i = 0
+ while i < len(values):
+ self.setCurrentOffsetToObjectNumber(values[i])
+ value = self.readObject()
+ result.append(value)
+ i += 1
+ return result
+
+ def readDict(self, count):
+ result = {}
+ keys = self.readRefs(count)
+ values = self.readRefs(count)
+ i = 0
+ while i < len(keys):
+ self.setCurrentOffsetToObjectNumber(keys[i])
+ key = self.readObject()
+ self.setCurrentOffsetToObjectNumber(values[i])
+ value = self.readObject()
+ result[key] = value
+ i += 1
+ return result
+
+ def readAsciiString(self, length):
+ result = unpack("!%ds" % length, self.contents[self.currentOffset:self.currentOffset+length])[0]
+ self.currentOffset += length
+ return str(result.decode('ascii'))
+
+ def readUnicode(self, length):
+ actual_length = length*2
+ data = self.contents[self.currentOffset:self.currentOffset+actual_length]
+ # unpack not needed?!! data = unpack(">%ds" % (actual_length), data)[0]
+ self.currentOffset += actual_length
+ return data.decode('utf_16_be')
+
+ def readDate(self):
+ result = unpack(">d", self.contents[self.currentOffset:self.currentOffset+8])[0]
+ # Use timedelta to workaround time_t size limitation on 32-bit python.
+ result = datetime.timedelta(seconds=result) + apple_reference_date
+ self.currentOffset += 8
+ return result
+
+ def readData(self, length):
+ result = self.contents[self.currentOffset:self.currentOffset+length]
+ self.currentOffset += length
+ return Data(result)
+
+ def readUid(self, length):
+ return Uid(self.readInteger(length+1))
+
+ def getSizedInteger(self, data, byteSize, as_number=False):
+ """Numbers of 8 bytes are signed integers when they refer to numbers, but unsigned otherwise."""
+ result = 0
+ # 1, 2, and 4 byte integers are unsigned
+ if byteSize == 1:
+ result = unpack('>B', data)[0]
+ elif byteSize == 2:
+ result = unpack('>H', data)[0]
+ elif byteSize == 4:
+ result = unpack('>L', data)[0]
+ elif byteSize == 8:
+ if as_number:
+ result = unpack('>q', data)[0]
+ else:
+ result = unpack('>Q', data)[0]
+ elif byteSize <= 16:
+ # Handle odd-sized or integers larger than 8 bytes
+ # Don't naively go over 16 bytes, in order to prevent infinite loops.
+ result = 0
+ if hasattr(int, 'from_bytes'):
+ result = int.from_bytes(data, 'big')
+ else:
+ for byte in data:
+ if not isinstance(byte, int): # Python3.0-3.1.x return ints, 2.x return str
+ byte = unpack_from('>B', byte)[0]
+ result = (result << 8) | byte
+ else:
+ raise InvalidPlistException("Encountered integer longer than 16 bytes.")
+ return result
+
+class HashableWrapper(object):
+ def __init__(self, value):
+ self.value = value
+ def __repr__(self):
+ return "<HashableWrapper: %s>" % [self.value]
+
+class BoolWrapper(object):
+ def __init__(self, value):
+ self.value = value
+ def __repr__(self):
+ return "<BoolWrapper: %s>" % self.value
+
+class FloatWrapper(object):
+ _instances = {}
+ def __new__(klass, value):
+ # Ensure FloatWrapper(x) for a given float x is always the same object
+ wrapper = klass._instances.get(value)
+ if wrapper is None:
+ wrapper = object.__new__(klass)
+ wrapper.value = value
+ klass._instances[value] = wrapper
+ return wrapper
+ def __repr__(self):
+ return "<FloatWrapper: %s>" % self.value
+
+class StringWrapper(object):
+ __instances = {}
+
+ encodedValue = None
+ encoding = None
+
+ def __new__(cls, value):
+ '''Ensure we only have a only one instance for any string,
+ and that we encode ascii as 1-byte-per character when possible'''
+
+ encodedValue = None
+
+ for encoding in ('ascii', 'utf_16_be'):
+ try:
+ encodedValue = value.encode(encoding)
+ except: pass
+ if encodedValue is not None:
+ if encodedValue not in cls.__instances:
+ cls.__instances[encodedValue] = super(StringWrapper, cls).__new__(cls)
+ cls.__instances[encodedValue].encodedValue = encodedValue
+ cls.__instances[encodedValue].encoding = encoding
+ return cls.__instances[encodedValue]
+
+ raise ValueError('Unable to get ascii or utf_16_be encoding for %s' % repr(value))
+
+ def __len__(self):
+ '''Return roughly the number of characters in this string (half the byte length)'''
+ if self.encoding == 'ascii':
+ return len(self.encodedValue)
+ else:
+ return len(self.encodedValue)//2
+
+ @property
+ def encodingMarker(self):
+ if self.encoding == 'ascii':
+ return 0b0101
+ else:
+ return 0b0110
+
+ def __repr__(self):
+ return '<StringWrapper (%s): %s>' % (self.encoding, self.encodedValue)
+
+class PlistWriter(object):
+ header = b'bplist00bybiplist1.0'
+ file = None
+ byteCounts = None
+ trailer = None
+ computedUniques = None
+ writtenReferences = None
+ referencePositions = None
+ wrappedTrue = None
+ wrappedFalse = None
+
+ def __init__(self, file):
+ self.reset()
+ self.file = file
+ self.wrappedTrue = BoolWrapper(True)
+ self.wrappedFalse = BoolWrapper(False)
+
+ def reset(self):
+ self.byteCounts = PlistByteCounts(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+ self.trailer = PlistTrailer(0, 0, 0, 0, 0)
+
+ # A set of all the uniques which have been computed.
+ self.computedUniques = set()
+ # A list of all the uniques which have been written.
+ self.writtenReferences = {}
+ # A dict of the positions of the written uniques.
+ self.referencePositions = {}
+
+ def positionOfObjectReference(self, obj):
+ """If the given object has been written already, return its
+ position in the offset table. Otherwise, return None."""
+ return self.writtenReferences.get(obj)
+
+ def writeRoot(self, root):
+ """
+ Strategy is:
+ - write header
+ - wrap root object so everything is hashable
+ - compute size of objects which will be written
+ - need to do this in order to know how large the object refs
+ will be in the list/dict/set reference lists
+ - write objects
+ - keep objects in writtenReferences
+ - keep positions of object references in referencePositions
+ - write object references with the length computed previously
+ - computer object reference length
+ - write object reference positions
+ - write trailer
+ """
+ output = self.header
+ wrapped_root = self.wrapRoot(root)
+ self.computeOffsets(wrapped_root, asReference=True, isRoot=True)
+ self.trailer = self.trailer._replace(**{'objectRefSize':self.intSize(len(self.computedUniques))})
+ self.writeObjectReference(wrapped_root, output)
+ output = self.writeObject(wrapped_root, output, setReferencePosition=True)
+
+ # output size at this point is an upper bound on how big the
+ # object reference offsets need to be.
+ self.trailer = self.trailer._replace(**{
+ 'offsetSize':self.intSize(len(output)),
+ 'offsetCount':len(self.computedUniques),
+ 'offsetTableOffset':len(output),
+ 'topLevelObjectNumber':0
+ })
+
+ output = self.writeOffsetTable(output)
+ output += pack('!xxxxxxBBQQQ', *self.trailer)
+ self.file.write(output)
+
+ def wrapRoot(self, root):
+ if isinstance(root, bool):
+ if root is True:
+ return self.wrappedTrue
+ else:
+ return self.wrappedFalse
+ elif isinstance(root, float):
+ return FloatWrapper(root)
+ elif isinstance(root, set):
+ n = set()
+ for value in root:
+ n.add(self.wrapRoot(value))
+ return HashableWrapper(n)
+ elif isinstance(root, dict):
+ n = {}
+ for key, value in iteritems(root):
+ n[self.wrapRoot(key)] = self.wrapRoot(value)
+ return HashableWrapper(n)
+ elif isinstance(root, list):
+ n = []
+ for value in root:
+ n.append(self.wrapRoot(value))
+ return HashableWrapper(n)
+ elif isinstance(root, tuple):
+ n = tuple([self.wrapRoot(value) for value in root])
+ return HashableWrapper(n)
+ elif isinstance(root, (str, unicode)) and not isinstance(root, Data):
+ return StringWrapper(root)
+ elif isinstance(root, bytes):
+ return Data(root)
+ else:
+ return root
+
+ def incrementByteCount(self, field, incr=1):
+ self.byteCounts = self.byteCounts._replace(**{field:self.byteCounts.__getattribute__(field) + incr})
+
+ def computeOffsets(self, obj, asReference=False, isRoot=False):
+ def check_key(key):
+ if key is None:
+ raise InvalidPlistException('Dictionary keys cannot be null in plists.')
+ elif isinstance(key, Data):
+ raise InvalidPlistException('Data cannot be dictionary keys in plists.')
+ elif not isinstance(key, StringWrapper):
+ raise InvalidPlistException('Keys must be strings.')
+
+ def proc_size(size):
+ if size > 0b1110:
+ size += self.intSize(size)
+ return size
+ # If this should be a reference, then we keep a record of it in the
+ # uniques table.
+ if asReference:
+ if obj in self.computedUniques:
+ return
+ else:
+ self.computedUniques.add(obj)
+
+ if obj is None:
+ self.incrementByteCount('nullBytes')
+ elif isinstance(obj, BoolWrapper):
+ self.incrementByteCount('boolBytes')
+ elif isinstance(obj, Uid):
+ size = self.intSize(obj.integer)
+ self.incrementByteCount('uidBytes', incr=1+size)
+ elif isinstance(obj, (int, long)):
+ size = self.intSize(obj)
+ self.incrementByteCount('intBytes', incr=1+size)
+ elif isinstance(obj, FloatWrapper):
+ size = self.realSize(obj)
+ self.incrementByteCount('realBytes', incr=1+size)
+ elif isinstance(obj, datetime.datetime):
+ self.incrementByteCount('dateBytes', incr=2)
+ elif isinstance(obj, Data):
+ size = proc_size(len(obj))
+ self.incrementByteCount('dataBytes', incr=1+size)
+ elif isinstance(obj, StringWrapper):
+ size = proc_size(len(obj))
+ self.incrementByteCount('stringBytes', incr=1+size)
+ elif isinstance(obj, HashableWrapper):
+ obj = obj.value
+ if isinstance(obj, set):
+ size = proc_size(len(obj))
+ self.incrementByteCount('setBytes', incr=1+size)
+ for value in obj:
+ self.computeOffsets(value, asReference=True)
+ elif isinstance(obj, (list, tuple)):
+ size = proc_size(len(obj))
+ self.incrementByteCount('arrayBytes', incr=1+size)
+ for value in obj:
+ asRef = True
+ self.computeOffsets(value, asReference=True)
+ elif isinstance(obj, dict):
+ size = proc_size(len(obj))
+ self.incrementByteCount('dictBytes', incr=1+size)
+ for key, value in iteritems(obj):
+ check_key(key)
+ self.computeOffsets(key, asReference=True)
+ self.computeOffsets(value, asReference=True)
+ else:
+ raise InvalidPlistException("Unknown object type: %s (%s)" % (type(obj).__name__, repr(obj)))
+
+ def writeObjectReference(self, obj, output):
+ """Tries to write an object reference, adding it to the references
+ table. Does not write the actual object bytes or set the reference
+ position. Returns a tuple of whether the object was a new reference
+ (True if it was, False if it already was in the reference table)
+ and the new output.
+ """
+ position = self.positionOfObjectReference(obj)
+ if position is None:
+ self.writtenReferences[obj] = len(self.writtenReferences)
+ output += self.binaryInt(len(self.writtenReferences) - 1, byteSize=self.trailer.objectRefSize)
+ return (True, output)
+ else:
+ output += self.binaryInt(position, byteSize=self.trailer.objectRefSize)
+ return (False, output)
+
+ def writeObject(self, obj, output, setReferencePosition=False):
+ """Serializes the given object to the output. Returns output.
+ If setReferencePosition is True, will set the position the
+ object was written.
+ """
+ def proc_variable_length(format, length):
+ result = b''
+ if length > 0b1110:
+ result += pack('!B', (format << 4) | 0b1111)
+ result = self.writeObject(length, result)
+ else:
+ result += pack('!B', (format << 4) | length)
+ return result
+
+ def timedelta_total_seconds(td):
+ # Shim for Python 2.6 compatibility, which doesn't have total_seconds.
+ # Make one argument a float to ensure the right calculation.
+ return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10.0**6) / 10.0**6
+
+ if setReferencePosition:
+ self.referencePositions[obj] = len(output)
+
+ if obj is None:
+ output += pack('!B', 0b00000000)
+ elif isinstance(obj, BoolWrapper):
+ if obj.value is False:
+ output += pack('!B', 0b00001000)
+ else:
+ output += pack('!B', 0b00001001)
+ elif isinstance(obj, Uid):
+ size = self.intSize(obj.integer)
+ output += pack('!B', (0b1000 << 4) | size - 1)
+ output += self.binaryInt(obj.integer)
+ elif isinstance(obj, (int, long)):
+ byteSize = self.intSize(obj)
+ root = math.log(byteSize, 2)
+ output += pack('!B', (0b0001 << 4) | int(root))
+ output += self.binaryInt(obj, as_number=True)
+ elif isinstance(obj, FloatWrapper):
+ # just use doubles
+ output += pack('!B', (0b0010 << 4) | 3)
+ output += self.binaryReal(obj)
+ elif isinstance(obj, datetime.datetime):
+ try:
+ timestamp = (obj - apple_reference_date).total_seconds()
+ except AttributeError:
+ timestamp = timedelta_total_seconds(obj - apple_reference_date)
+ output += pack('!B', 0b00110011)
+ output += pack('!d', float(timestamp))
+ elif isinstance(obj, Data):
+ output += proc_variable_length(0b0100, len(obj))
+ output += obj
+ elif isinstance(obj, StringWrapper):
+ output += proc_variable_length(obj.encodingMarker, len(obj))
+ output += obj.encodedValue
+ elif isinstance(obj, bytes):
+ output += proc_variable_length(0b0101, len(obj))
+ output += obj
+ elif isinstance(obj, HashableWrapper):
+ obj = obj.value
+ if isinstance(obj, (set, list, tuple)):
+ if isinstance(obj, set):
+ output += proc_variable_length(0b1100, len(obj))
+ else:
+ output += proc_variable_length(0b1010, len(obj))
+
+ objectsToWrite = []
+ for objRef in obj:
+ (isNew, output) = self.writeObjectReference(objRef, output)
+ if isNew:
+ objectsToWrite.append(objRef)
+ for objRef in objectsToWrite:
+ output = self.writeObject(objRef, output, setReferencePosition=True)
+ elif isinstance(obj, dict):
+ output += proc_variable_length(0b1101, len(obj))
+ keys = []
+ values = []
+ objectsToWrite = []
+ for key, value in iteritems(obj):
+ keys.append(key)
+ values.append(value)
+ for key in keys:
+ (isNew, output) = self.writeObjectReference(key, output)
+ if isNew:
+ objectsToWrite.append(key)
+ for value in values:
+ (isNew, output) = self.writeObjectReference(value, output)
+ if isNew:
+ objectsToWrite.append(value)
+ for objRef in objectsToWrite:
+ output = self.writeObject(objRef, output, setReferencePosition=True)
+ return output
+
+ def writeOffsetTable(self, output):
+ """Writes all of the object reference offsets."""
+ all_positions = []
+ writtenReferences = list(self.writtenReferences.items())
+ writtenReferences.sort(key=lambda x: x[1])
+ for obj,order in writtenReferences:
+ # Porting note: Elsewhere we deliberately replace empty unicdoe strings
+ # with empty binary strings, but the empty unicode string
+ # goes into writtenReferences. This isn't an issue in Py2
+ # because u'' and b'' have the same hash; but it is in
+ # Py3, where they don't.
+ if bytes != str and obj == unicodeEmpty:
+ obj = b''
+ position = self.referencePositions.get(obj)
+ if position is None:
+ raise InvalidPlistException("Error while writing offsets table. Object not found. %s" % obj)
+ output += self.binaryInt(position, self.trailer.offsetSize)
+ all_positions.append(position)
+ return output
+
+ def binaryReal(self, obj):
+ # just use doubles
+ result = pack('>d', obj.value)
+ return result
+
+ def binaryInt(self, obj, byteSize=None, as_number=False):
+ result = b''
+ if byteSize is None:
+ byteSize = self.intSize(obj)
+ if byteSize == 1:
+ result += pack('>B', obj)
+ elif byteSize == 2:
+ result += pack('>H', obj)
+ elif byteSize == 4:
+ result += pack('>L', obj)
+ elif byteSize == 8:
+ if as_number:
+ result += pack('>q', obj)
+ else:
+ result += pack('>Q', obj)
+ elif byteSize <= 16:
+ try:
+ result = pack('>Q', 0) + pack('>Q', obj)
+ except struct_error as e:
+ raise InvalidPlistException("Unable to pack integer %d: %s" % (obj, e))
+ else:
+ raise InvalidPlistException("Core Foundation can't handle integers with size greater than 16 bytes.")
+ return result
+
+ def intSize(self, obj):
+ """Returns the number of bytes necessary to store the given integer."""
+ # SIGNED
+ if obj < 0: # Signed integer, always 8 bytes
+ return 8
+ # UNSIGNED
+ elif obj <= 0xFF: # 1 byte
+ return 1
+ elif obj <= 0xFFFF: # 2 bytes
+ return 2
+ elif obj <= 0xFFFFFFFF: # 4 bytes
+ return 4
+ # SIGNED
+ # 0x7FFFFFFFFFFFFFFF is the max.
+ elif obj <= 0x7FFFFFFFFFFFFFFF: # 8 bytes signed
+ return 8
+ elif obj <= 0xffffffffffffffff: # 8 bytes unsigned
+ return 16
+ else:
+ raise InvalidPlistException("Core Foundation can't handle integers with size greater than 8 bytes.")
+
+ def realSize(self, obj):
+ return 8
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/biplist/qt_attribution.json b/src/3rdparty/python/lib/python2.7/site-packages/biplist/qt_attribution.json
new file mode 100644
index 000000000..6b0df2d21
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/biplist/qt_attribution.json
@@ -0,0 +1,13 @@
+{
+ "Id": "biplist",
+ "Name": "biplist",
+ "QDocModule": "qbs",
+ "QtUsage": "Used in the qbs dmg module for building Apple disk images.",
+ "Description": "biplist is a library for reading/writing binary plists.",
+ "Homepage": "https://bitbucket.org/wooster/biplist",
+ "Version": "1.0.1",
+ "License": "BSD 3-clause "New" or "Revised" License",
+ "LicenseId": "BSD-3-Clause",
+ "LicenseFile": "LICENSE",
+ "Copyright": "Copyright (c) 2010, Andrew Wooster"
+}
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/LICENSE b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/LICENSE
new file mode 100644
index 000000000..e91f4eb38
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2014 Alastair Houghton
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/__init__.py b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/__init__.py
new file mode 100644
index 000000000..e7f985c32
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/__init__.py
@@ -0,0 +1,3 @@
+from .core import build_dmg
+
+__all__ = ['dmgbuild']
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/badge.py b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/badge.py
new file mode 100644
index 000000000..159a53708
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/badge.py
@@ -0,0 +1,143 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from Quartz import *
+import math
+
+_REMOVABLE_DISK_PATH = '/System/Library/Extensions/IOStorageFamily.kext/Contents/Resources/Removable.icns'
+
+def badge_disk_icon(badge_file, output_file):
+ # Load the Removable disk icon
+ url = CFURLCreateWithFileSystemPath(None, _REMOVABLE_DISK_PATH,
+ kCFURLPOSIXPathStyle, False)
+ backdrop = CGImageSourceCreateWithURL(url, None)
+ backdropCount = CGImageSourceGetCount(backdrop)
+
+ # Load the badge
+ url = CFURLCreateWithFileSystemPath(None, badge_file,
+ kCFURLPOSIXPathStyle, False)
+ badge = CGImageSourceCreateWithURL(url, None)
+ assert badge is not None, 'Unable to process image file: %s' % badge_file
+ badgeCount = CGImageSourceGetCount(badge)
+
+ # Set up a destination for our target
+ url = CFURLCreateWithFileSystemPath(None, output_file,
+ kCFURLPOSIXPathStyle, False)
+ target = CGImageDestinationCreateWithURL(url, 'com.apple.icns',
+ backdropCount, None)
+
+ # Get the RGB colorspace
+ rgbColorSpace = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB)
+
+ # Scale
+ scale = 1.0
+
+ # Perspective transform
+ corners = ((0.2, 0.95), (0.8, 0.95), (0.85, 0.35), (0.15, 0.35))
+
+ # Translation
+ position = (0.5, 0.5)
+
+ for n in range(backdropCount):
+ props = CGImageSourceCopyPropertiesAtIndex(backdrop, n, None)
+ width = props['PixelWidth']
+ height = props['PixelHeight']
+ dpi = props['DPIWidth']
+ depth = props['Depth']
+
+ # Choose the best sized badge image
+ bestWidth = None
+ bestHeight = None
+ bestBadge = None
+ bestDepth = None
+ bestDPI = None
+ for m in range(badgeCount):
+ badgeProps = CGImageSourceCopyPropertiesAtIndex(badge, m, None)
+ badgeWidth = badgeProps['PixelWidth']
+ badgeHeight = badgeProps['PixelHeight']
+ badgeDPI = badgeProps['DPIWidth']
+ badgeDepth = badgeProps['Depth']
+
+ if bestBadge is None or (badgeWidth <= width
+ and (bestWidth > width
+ or badgeWidth > bestWidth
+ or (badgeWidth == bestWidth
+ and badgeDPI == dpi
+ and badgeDepth <= depth
+ and (bestDepth is None
+ or badgeDepth > bestDepth)))):
+ bestBadge = m
+ bestWidth = badgeWidth
+ bestHeight = badgeHeight
+ bestDPI = badgeDPI
+ bestDepth = badgeDepth
+
+ badgeImage = CGImageSourceCreateImageAtIndex(badge, bestBadge, None)
+ badgeCI = CIImage.imageWithCGImage_(badgeImage)
+
+ backgroundImage = CGImageSourceCreateImageAtIndex(backdrop, n, None)
+ backgroundCI = CIImage.imageWithCGImage_(backgroundImage)
+
+ compositor = CIFilter.filterWithName_('CISourceOverCompositing')
+ lanczos = CIFilter.filterWithName_('CILanczosScaleTransform')
+ perspective = CIFilter.filterWithName_('CIPerspectiveTransform')
+ transform = CIFilter.filterWithName_('CIAffineTransform')
+
+ lanczos.setValue_forKey_(badgeCI, kCIInputImageKey)
+ lanczos.setValue_forKey_(scale * float(width)/bestWidth, kCIInputScaleKey)
+ lanczos.setValue_forKey_(1.0, kCIInputAspectRatioKey)
+
+ topLeft = (width * scale * corners[0][0],
+ width * scale * corners[0][1])
+ topRight = (width * scale * corners[1][0],
+ width * scale * corners[1][1])
+ bottomRight = (width * scale * corners[2][0],
+ width * scale * corners[2][1])
+ bottomLeft = (width * scale * corners[3][0],
+ width * scale * corners[3][1])
+
+ out = lanczos.valueForKey_(kCIOutputImageKey)
+ if width >= 16:
+ perspective.setValue_forKey_(out, kCIInputImageKey)
+ perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*topLeft),
+ 'inputTopLeft')
+ perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*topRight),
+ 'inputTopRight')
+ perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*bottomRight),
+ 'inputBottomRight')
+ perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*bottomLeft),
+ 'inputBottomLeft')
+ out = perspective.valueForKey_(kCIOutputImageKey)
+
+ tfm = NSAffineTransform.transform()
+ tfm.translateXBy_yBy_(math.floor((position[0] - 0.5 * scale) * width),
+ math.floor((position[1] - 0.5 * scale) * height))
+
+ transform.setValue_forKey_(out, kCIInputImageKey)
+ transform.setValue_forKey_(tfm, 'inputTransform')
+ out = transform.valueForKey_(kCIOutputImageKey)
+
+ compositor.setValue_forKey_(out, kCIInputImageKey)
+ compositor.setValue_forKey_(backgroundCI, kCIInputBackgroundImageKey)
+
+ result = compositor.valueForKey_(kCIOutputImageKey)
+
+ cgContext = CGBitmapContextCreate(None,
+ width,
+ height,
+ 8,
+ 0,
+ rgbColorSpace,
+ kCGImageAlphaPremultipliedLast)
+ context = CIContext.contextWithCGContext_options_(cgContext, None)
+
+ context.drawImage_inRect_fromRect_(result,
+ ((0, 0), (width, height)),
+ ((0, 0), (width, height)))
+
+ image = CGBitmapContextCreateImage(cgContext)
+
+ CGImageDestinationAddImage(target, image, props)
+
+ CGImageDestinationFinalize(target)
+
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/colors.py b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/colors.py
new file mode 100644
index 000000000..1d252a6bd
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/colors.py
@@ -0,0 +1,494 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+import re
+import math
+
+class Color (object):
+ def to_rgb(self):
+ raise Exception('Must implement to_rgb() in subclasses')
+
+class RGB (Color):
+ def __init__(self, r, g, b):
+ self.r = r
+ self.g = g
+ self.b = b
+
+ def to_rgb(self):
+ return self
+
+class HSL (Color):
+ def __init__(self, h, s, l):
+ self.h = h
+ self.s = s
+ self.l = l
+
+ @staticmethod
+ def _hue_to_rgb(t1, t2, hue):
+ if hue < 0:
+ hue += 6
+ elif hue >= 6:
+ hue -= 6
+
+ if hue < 1:
+ return (t2 - t1) * hue + t1
+ elif hue < 3:
+ return t2
+ elif hue < 4:
+ return (t2 - t1) * (4 - hue) + t1
+ else:
+ return t1
+
+ def to_rgb(self):
+ hue = self.h / 60.0
+ if self.l <= 0.5:
+ t2 = self.l * (self.s + 1)
+ else:
+ t2 = self.l + self.s - (self.l * self.s)
+ t1 = self.l * 2 - t2
+ r = self._hue_to_rgb(t1, t2, hue + 2)
+ g = self._hue_to_rgb(t1, t2, hue)
+ b = self._hue_to_rgb(t1, t2, hue - 2)
+ return RGB(r, g, b)
+
+class HWB (Color):
+ def __init__(self, h, w, b):
+ self.h = h
+ self.w = w
+ self.b = b
+
+ @staticmethod
+ def _hue_to_rgb(hue):
+ if hue < 0:
+ hue += 6
+ elif hue >= 6:
+ hue -= 6
+
+ if hue < 1:
+ return hue
+ elif hue < 3:
+ return 1
+ elif hue < 4:
+ return (4 - hue)
+ else:
+ return 0
+
+ def to_rgb(self):
+ hue = self.h / 60.0
+ t1 = 1 - self.w - self.b
+ r = self._hue_to_rgb(hue + 2) * t1 + self.w
+ g = self._hue_to_rgb(hue) * t1 + self.w
+ b = self._hue_to_rgb(hue - 2) * t1 + self.w
+ return RGB(r, g, b)
+
+class CMYK (Color):
+ def __init__(self, c, m, y, k):
+ self.c = c
+ self.m = m
+ self.y = y
+ self.k = k
+
+ def to_rgb(self):
+ r = 1.0 - min(1.0, self.c + self.k)
+ g = 1.0 - min(1.0, self.m + self.k)
+ b = 1.0 - min(1.0, self.y + self.k)
+ return RGB(r, g, b)
+
+class Gray (Color):
+ def __init__(self, g):
+ self.g = g
+
+ def to_rgb(self):
+ return RGB(g, g, g)
+
+_x11_colors = {
+ 'aliceblue': (240, 248, 255),
+ 'antiquewhite': (250, 235, 215),
+ 'aqua': ( 0, 255, 255),
+ 'aquamarine': (127, 255, 212),
+ 'azure': (240, 255, 255),
+ 'beige': (245, 245, 220),
+ 'bisque': (255, 228, 196),
+ 'black': ( 0, 0, 0),
+ 'blanchedalmond': (255, 235, 205),
+ 'blue': ( 0, 0, 255),
+ 'blueviolet': (138, 43, 226),
+ 'brown': (165, 42, 42),
+ 'burlywood': (222, 184, 135),
+ 'cadetblue': ( 95, 158, 160),
+ 'chartreuse': (127, 255, 0),
+ 'chocolate': (210, 105, 30),
+ 'coral': (255, 127, 80),
+ 'cornflowerblue': (100, 149, 237),
+ 'cornsilk': (255, 248, 220),
+ 'crimson': (220, 20, 60),
+ 'cyan': ( 0, 255, 255),
+ 'darkblue': ( 0, 0, 139),
+ 'darkcyan': ( 0, 139, 139),
+ 'darkgoldenrod': (184, 134, 11),
+ 'darkgray': (169, 169, 169),
+ 'darkgreen': ( 0, 100, 0),
+ 'darkgrey': (169, 169, 169),
+ 'darkkhaki': (189, 183, 107),
+ 'darkmagenta': (139, 0, 139),
+ 'darkolivegreen': ( 85, 107, 47),
+ 'darkorange': (255, 140, 0),
+ 'darkorchid': (153, 50, 204),
+ 'darkred': (139, 0, 0),
+ 'darksalmon': (233, 150, 122),
+ 'darkseagreen': (143, 188, 143),
+ 'darkslateblue': ( 72, 61, 139),
+ 'darkslategray': ( 47, 79, 79),
+ 'darkslategrey': ( 47, 79, 79),
+ 'darkturquoise': ( 0, 206, 209),
+ 'darkviolet': (148, 0, 211),
+ 'deeppink': (255, 20, 147),
+ 'deepskyblue': ( 0, 191, 255),
+ 'dimgray': (105, 105, 105),
+ 'dimgrey': (105, 105, 105),
+ 'dodgerblue': ( 30, 144, 255),
+ 'firebrick': (178, 34, 34),
+ 'floralwhite': (255, 250, 240),
+ 'forestgreen': ( 34, 139, 34),
+ 'fuchsia': (255, 0, 255),
+ 'gainsboro': (220, 220, 220),
+ 'ghostwhite': (248, 248, 255),
+ 'gold': (255, 215, 0),
+ 'goldenrod': (218, 165, 32),
+ 'gray': (128, 128, 128),
+ 'grey': (128, 128, 128),
+ 'green': ( 0, 128, 0),
+ 'greenyellow': (173, 255, 47),
+ 'honeydew': (240, 255, 240),
+ 'hotpink': (255, 105, 180),
+ 'indianred': (205, 92, 92),
+ 'indigo': ( 75, 0, 130),
+ 'ivory': (255, 255, 240),
+ 'khaki': (240, 230, 140),
+ 'lavender': (230, 230, 250),
+ 'lavenderblush': (255, 240, 245),
+ 'lawngreen': (124, 252, 0),
+ 'lemonchiffon': (255, 250, 205),
+ 'lightblue': (173, 216, 230),
+ 'lightcoral': (240, 128, 128),
+ 'lightcyan': (224, 255, 255),
+ 'lightgoldenrodyellow': (250, 250, 210),
+ 'lightgray': (211, 211, 211),
+ 'lightgreen': (144, 238, 144),
+ 'lightgrey': (211, 211, 211),
+ 'lightpink': (255, 182, 193),
+ 'lightsalmon': (255, 160, 122),
+ 'lightseagreen': ( 32, 178, 170),
+ 'lightskyblue': (135, 206, 250),
+ 'lightslategray': (119, 136, 153),
+ 'lightslategrey': (119, 136, 153),
+ 'lightsteelblue': (176, 196, 222),
+ 'lightyellow': (255, 255, 224),
+ 'lime': ( 0, 255, 0),
+ 'limegreen': ( 50, 205, 50),
+ 'linen': (250, 240, 230),
+ 'magenta': (255, 0, 255),
+ 'maroon': (128, 0, 0),
+ 'mediumaquamarine': (102, 205, 170),
+ 'mediumblue': ( 0, 0, 205),
+ 'mediumorchid': (186, 85, 211),
+ 'mediumpurple': (147, 112, 219),
+ 'mediumseagreen': ( 60, 179, 113),
+ 'mediumslateblue': (123, 104, 238),
+ 'mediumspringgreen': ( 0, 250, 154),
+ 'mediumturquoise': ( 72, 209, 204),
+ 'mediumvioletred': (199, 21, 133),
+ 'midnightblue': ( 25, 25, 112),
+ 'mintcream': (245, 255, 250),
+ 'mistyrose': (255, 228, 225),
+ 'moccasin': (255, 228, 181),
+ 'navajowhite': (255, 222, 173),
+ 'navy': ( 0, 0, 128),
+ 'oldlace': (253, 245, 230),
+ 'olive': (128, 128, 0),
+ 'olivedrab': (107, 142, 35),
+ 'orange': (255, 165, 0),
+ 'orangered': (255, 69, 0),
+ 'orchid': (218, 112, 214),
+ 'palegoldenrod': (238, 232, 170),
+ 'palegreen': (152, 251, 152),
+ 'paleturquoise': (175, 238, 238),
+ 'palevioletred': (219, 112, 147),
+ 'papayawhip': (255, 239, 213),
+ 'peachpuff': (255, 218, 185),
+ 'peru': (205, 133, 63),
+ 'pink': (255, 192, 203),
+ 'plum': (221, 160, 221),
+ 'powderblue': (176, 224, 230),
+ 'purple': (128, 0, 128),
+ 'red': (255, 0, 0),
+ 'rosybrown': (188, 143, 143),
+ 'royalblue': ( 65, 105, 225),
+ 'saddlebrown': (139, 69, 19),
+ 'salmon': (250, 128, 114),
+ 'sandybrown': (244, 164, 96),
+ 'seagreen': ( 46, 139, 87),
+ 'seashell': (255, 245, 238),
+ 'sienna': (160, 82, 45),
+ 'silver': (192, 192, 192),
+ 'skyblue': (135, 206, 235),
+ 'slateblue': (106, 90, 205),
+ 'slategray': (112, 128, 144),
+ 'slategrey': (112, 128, 144),
+ 'snow': (255, 250, 250),
+ 'springgreen': ( 0, 255, 127),
+ 'steelblue': ( 70, 130, 180),
+ 'tan': (210, 180, 140),
+ 'teal': ( 0, 128, 128),
+ 'thistle': (216, 191, 216),
+ 'tomato': (255, 99, 71),
+ 'turquoise': ( 64, 224, 208),
+ 'violet': (238, 130, 238),
+ 'wheat': (245, 222, 179),
+ 'white': (255, 255, 255),
+ 'whitesmoke': (245, 245, 245),
+ 'yellow': (255, 255, 0),
+ 'yellowgreen': (154, 205, 50)
+ }
+
+_ws_re = re.compile('\s+')
+_token_re = re.compile('[A-Za-z_][A-Za-z0-9_]*')
+_hex_re = re.compile('#([0-9a-f]{3}(?:[0-9a-f]{3})?)$')
+_number_re = re.compile('[0-9]*(\.[0-9]*)')
+
+class ColorParser (object):
+ def __init__(self, s):
+ self._string = s
+ self._pos = 0
+
+ def skipws(self):
+ m = _ws_re.match(self._string, self._pos)
+ if m:
+ self._pos = m.end(0)
+
+ def expect(self, s, context=''):
+ if len(self._string) - self._pos < len(s) \
+ or self._string[self._pos:self._pos + len(s)] != s:
+ raise ValueError('bad color "%s" - expected "%s"%s'
+ % (self._string, s, context))
+ self._pos += len(s)
+
+ def expectEnd(self):
+ if self._pos != len(self._string):
+ raise ValueError('junk at end of color "%s"' % self._string)
+
+ def getToken(self):
+ m = _token_re.match(self._string, self._pos)
+ if m:
+ token = m.group(0)
+
+ self._pos = m.end(0)
+ return token
+ return None
+
+ def parseNumber(self, context=''):
+ m = _number_re.match(self._string, self._pos)
+ if m:
+ self._pos = m.end(0)
+ return float(m.group(0))
+ raise ValueError('bad color "%s" - expected a number%s'
+ % (self._string, context))
+
+ def parseColor(self):
+ self.skipws()
+
+ token = self.getToken()
+ if token:
+ if token == 'rgb':
+ return self.parseRGB()
+ elif token == 'hsl':
+ return self.parseHSL()
+ elif token == 'hwb':
+ return self.parseHWB()
+ elif token == 'cmyk':
+ return self.parseCMYK()
+ elif token == 'gray' or token == 'grey':
+ return self.parseGray()
+
+ try:
+ r, g, b = _x11_colors[token]
+ except KeyError:
+ raise ValueError('unknown color name "%s"' % token)
+
+ self.expectEnd()
+
+ return RGB(r / 255.0, g / 255.0, b / 255.0)
+
+ m = _hex_re.match(self._string, self._pos)
+ if m:
+ hrgb = m.group(1)
+
+ if len(hrgb) == 3:
+ r = int('0x' + 2 * hrgb[0], 16)
+ g = int('0x' + 2 * hrgb[1], 16)
+ b = int('0x' + 2 * hrgb[2], 16)
+ else:
+ r = int('0x' + hrgb[0:2], 16)
+ g = int('0x' + hrgb[2:4], 16)
+ b = int('0x' + hrgb[4:6], 16)
+
+ self._pos = m.end(0)
+ self.skipws()
+
+ self.expectEnd()
+
+ return RGB(r / 255.0, g / 255.0, b / 255.0)
+
+ raise ValueError('bad color syntax "%s"' % self._string)
+
+ def parseRGB(self):
+ self.expect('(', 'after "rgb"')
+ self.skipws()
+
+ r = self.parseValue()
+
+ self.skipws()
+ self.expect(',', 'in "rgb"')
+ self.skipws()
+
+ g = self.parseValue()
+
+ self.skipws()
+ self.expect(',', 'in "rgb"')
+ self.skipws()
+
+ b = self.parseValue()
+
+ self.skipws()
+ self.expect(')', 'at end of "rgb"')
+
+ self.skipws()
+ self.expectEnd()
+
+ return RGB(r, g, b)
+
+ def parseHSL(self):
+ self.expect('(', 'after "hsl"')
+ self.skipws()
+
+ h = self.parseAngle()
+
+ self.skipws()
+ self.expect(',', 'in "hsl"')
+ self.skipws()
+
+ s = self.parseValue()
+
+ self.skipws()
+ self.expect(',', 'in "hsl"')
+ self.skipws()
+
+ l = self.parseValue()
+
+ self.skipws()
+ self.expect(')', 'at end of "hsl"')
+
+ self.skipws()
+ self.expectEnd()
+
+ return HSL(h, s, l)
+
+ def parseHWB(self):
+ self.expect('(', 'after "hwb"')
+ self.skipws()
+
+ h = self.parseAngle()
+
+ self.skipws()
+ self.expect(',', 'in "hwb"')
+ self.skipws()
+
+ w = self.parseValue()
+
+ self.skipws()
+ self.expect(',', 'in "hwb"')
+ self.skipws()
+
+ b = self.parseValue()
+
+ self.skipws()
+ self.expect(')', 'at end of "hwb"')
+
+ self.skipws()
+ self.expectEnd()
+
+ return HWB(h, w, b)
+
+ def parseCMYK(self):
+ self.expect('(', 'after "cmyk"')
+ self.skipws()
+
+ c = self.parseValue()
+
+ self.skipws()
+ self.expect(',', 'in "cmyk"')
+ self.skipws()
+
+ m = self.parseValue()
+
+ self.skipws()
+ self.expect(',', 'in "cmyk"')
+ self.skipws()
+
+ y = self.parseValue()
+
+ self.skipws()
+ self.expect(',', 'in "cmyk"')
+ self.skipws()
+
+ k = self.parseValue()
+
+ self.skipws()
+ self.expect(')', 'at end of "cmyk"')
+
+ self.skipws()
+ self.expectEnd()
+
+ return CMYK(c, m, y, k)
+
+ def parseGray(self):
+ self.expect('(', 'after "gray"')
+ self.skipws()
+
+ g = self.parseValue()
+
+ self.skipws()
+ self.expect(')', 'at end of "gray')
+
+ self.skipws()
+ self.expectEnd()
+
+ return Gray(g)
+
+ def parseValue(self):
+ n = self.parseNumber()
+ self.skipws()
+ if self._string[self._pos] == '%':
+ n = n / 100.0
+ self.pos += 1
+ return n
+
+ def parseAngle(self):
+ n = self.parseNumber()
+ self.skipws()
+ tok = self.getToken()
+ if tok == 'rad':
+ n = n * 180.0 / math.pi
+ elif tok == 'grad' or tok == 'gon':
+ n = n * 0.9
+ elif tok != 'deg':
+ raise ValueError('bad angle unit "%s"' % tok)
+ return n
+
+_color_re = re.compile('\s*(#|rgb|hsl|hwb|cmyk|gray|grey|%s)'
+ % '|'.join(_x11_colors.keys()))
+def isAColor(s):
+ return _color_re.match(s)
+
+def parseColor(s):
+ return ColorParser(s).parseColor()
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/core.py b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/core.py
new file mode 100644
index 000000000..91f5e6c24
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/core.py
@@ -0,0 +1,592 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+import os
+import pkg_resources
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import tokenize
+import json
+
+try:
+ {}.iteritems
+ iteritems = lambda x: x.iteritems()
+ iterkeys = lambda x: x.iterkeys()
+except AttributeError:
+ iteritems = lambda x: x.items()
+ iterkeys = lambda x: x.keys()
+try:
+ unicode
+except NameError:
+ unicode = str
+
+import biplist
+from mac_alias import *
+from ds_store import *
+
+from . import colors
+from . import licensing
+
+try:
+ from . import badge
+except ImportError:
+ badge = None
+
+_hexcolor_re = re.compile(r'#[0-9a-f]{3}(?:[0-9a-f]{3})?')
+
+class DMGError(Exception):
+ pass
+
+def hdiutil(cmd, *args, **kwargs):
+ plist = kwargs.get('plist', True)
+ all_args = ['/usr/bin/hdiutil', cmd]
+ all_args.extend(args)
+ if plist:
+ all_args.append('-plist')
+ p = subprocess.Popen(all_args, stdout=subprocess.PIPE, close_fds=True)
+ output, errors = p.communicate()
+ if plist:
+ results = biplist.readPlistFromString(output)
+ else:
+ results = output
+ retcode = p.wait()
+ return retcode, results
+
+# On Python 2 we can just execfile() it, but Python 3 deprecated that
+def load_settings(filename, settings):
+ if sys.version_info[0] == 2:
+ execfile(filename, settings, settings)
+ else:
+ encoding = 'utf-8'
+ with open(filename, 'rb') as fp:
+ try:
+ encoding = tokenize.detect_encoding(fp.readline)[0]
+ except SyntaxError:
+ pass
+
+ with open(filename, 'r', encoding=encoding) as fp:
+ exec(compile(fp.read(), filename, 'exec'), settings, settings)
+
+def load_json(filename, settings):
+ """Read an appdmg .json spec. Uses the defaults for appdmg, rather than
+ the usual defaults for dmgbuild. """
+
+ with open(filename, 'r') as fp:
+ json_data = json.load(fp)
+
+ if 'title' not in json_data:
+ raise ValueError('missing \'title\' in JSON settings file')
+ if 'contents' not in json_data:
+ raise ValueError('missing \'contents\' in JSON settings file')
+
+ settings['volume_name'] = json_data['title']
+ settings['icon'] = json_data.get('icon', None)
+ settings['badge_icon'] = json_data.get('badge-icon', None)
+ bk = json_data.get('background', None)
+ if bk is None:
+ bk = json_data.get('background-color', None)
+ if bk is not None:
+ settings['background'] = bk
+ settings['icon_size'] = json_data.get('icon-size', 80)
+ wnd = json_data.get('window', { 'position': (100, 100),
+ 'size': (640, 480) })
+ pos = wnd.get('position', { 'x': 100, 'y': 100 })
+ siz = wnd.get('size', { 'width': 640, 'height': 480 })
+ settings['window_rect'] = ((pos.get('x', 100), pos.get('y', 100)),
+ (siz.get('width', 640), siz.get('height', 480)))
+ settings['format'] = json_data.get('format', 'UDZO')
+ settings['compression_level'] = json_data.get('compression-level', None)
+ settings['license'] = json_data.get('license', None)
+ files = []
+ symlinks = {}
+ icon_locations = {}
+ for fileinfo in json_data.get('contents', []):
+ if 'path' not in fileinfo:
+ raise ValueError('missing \'path\' in contents in JSON settings file')
+ if 'x' not in fileinfo:
+ raise ValueError('missing \'x\' in contents in JSON settings file')
+ if 'y' not in fileinfo:
+ raise ValueError('missing \'y\' in contents in JSON settings file')
+
+ kind = fileinfo.get('type', 'file')
+ path = fileinfo['path']
+ name = fileinfo.get('name', os.path.basename(path.rstrip('/')))
+ if kind == 'file':
+ files.append((path, name))
+ elif kind == 'link':
+ symlinks[name] = path
+ elif kind == 'position':
+ pass
+ icon_locations[name] = (fileinfo['x'], fileinfo['y'])
+
+ settings['files'] = files
+ settings['symlinks'] = symlinks
+ settings['icon_locations'] = icon_locations
+
+def build_dmg(filename, volume_name, settings_file=None, defines={}, lookForHiDPI=True):
+ settings = {
+ # Default settings
+ 'filename': filename,
+ 'volume_name': volume_name,
+ 'format': 'UDBZ',
+ 'compression_level': None,
+ 'size': None,
+ 'files': [],
+ 'symlinks': {},
+ 'icon': None,
+ 'badge_icon': None,
+ 'background': None,
+ 'show_status_bar': False,
+ 'show_tab_view': False,
+ 'show_toolbar': False,
+ 'show_pathbar': False,
+ 'show_sidebar': False,
+ 'sidebar_width': 180,
+ 'arrange_by': None,
+ 'grid_offset': (0, 0),
+ 'grid_spacing': 100.0,
+ 'scroll_position': (0.0, 0.0),
+ 'show_icon_preview': False,
+ 'show_item_info': False,
+ 'label_pos': 'bottom',
+ 'text_size': 16.0,
+ 'icon_size': 128.0,
+ 'include_icon_view_settings': 'auto',
+ 'include_list_view_settings': 'auto',
+ 'list_icon_size': 16.0,
+ 'list_text_size': 12.0,
+ 'list_scroll_position': (0, 0),
+ 'list_sort_by': 'name',
+ 'list_use_relative_dates': True,
+ 'list_calculate_all_sizes': False,
+ 'list_columns': ('name', 'date-modified', 'size', 'kind', 'date-added'),
+ 'list_column_widths': {
+ 'name': 300,
+ 'date-modified': 181,
+ 'date-created': 181,
+ 'date-added': 181,
+ 'date-last-opened': 181,
+ 'size': 97,
+ 'kind': 115,
+ 'label': 100,
+ 'version': 75,
+ 'comments': 300,
+ },
+ 'list_column_sort_directions': {
+ 'name': 'ascending',
+ 'date-modified': 'descending',
+ 'date-created': 'descending',
+ 'date-added': 'descending',
+ 'date-last-opened': 'descending',
+ 'size': 'descending',
+ 'kind': 'ascending',
+ 'label': 'ascending',
+ 'version': 'ascending',
+ 'comments': 'ascending',
+ },
+ 'window_rect': ((100, 100), (640, 280)),
+ 'default_view': 'icon-view',
+ 'icon_locations': {},
+ 'license': None,
+ 'defines': defines
+ }
+
+ # Execute the settings file
+ if settings_file:
+ # We now support JSON settings files using appdmg's format
+ if settings_file.endswith('.json'):
+ load_json(settings_file, settings)
+ else:
+ load_settings(settings_file, settings)
+
+ # Set up the finder data
+ bounds = settings['window_rect']
+
+ bwsp = {
+ b'ShowStatusBar': settings['show_status_bar'],
+ b'WindowBounds': b'{{%s, %s}, {%s, %s}}' % (bounds[0][0],
+ bounds[0][1],
+ bounds[1][0],
+ bounds[1][1]),
+ b'ContainerShowSidebar': False,
+ b'PreviewPaneVisibility': False,
+ b'SidebarWidth': settings['sidebar_width'],
+ b'ShowTabView': settings['show_tab_view'],
+ b'ShowToolbar': settings['show_toolbar'],
+ b'ShowPathbar': settings['show_pathbar'],
+ b'ShowSidebar': settings['show_sidebar']
+ }
+
+ arrange_options = {
+ 'name': 'name',
+ 'date-modified': 'dateModified',
+ 'date-created': 'dateCreated',
+ 'date-added': 'dateAdded',
+ 'date-last-opened': 'dateLastOpened',
+ 'size': 'size',
+ 'kind': 'kind',
+ 'label': 'label',
+ }
+
+ icvp = {
+ b'viewOptionsVersion': 1,
+ b'backgroundType': 0,
+ b'backgroundColorRed': 1.0,
+ b'backgroundColorGreen': 1.0,
+ b'backgroundColorBlue': 1.0,
+ b'gridOffsetX': float(settings['grid_offset'][0]),
+ b'gridOffsetY': float(settings['grid_offset'][1]),
+ b'gridSpacing': float(settings['grid_spacing']),
+ b'arrangeBy': str(arrange_options.get(settings['arrange_by'], 'none')),
+ b'showIconPreview': settings['show_icon_preview'] == True,
+ b'showItemInfo': settings['show_item_info'] == True,
+ b'labelOnBottom': settings['label_pos'] == 'bottom',
+ b'textSize': float(settings['text_size']),
+ b'iconSize': float(settings['icon_size']),
+ b'scrollPositionX': float(settings['scroll_position'][0]),
+ b'scrollPositionY': float(settings['scroll_position'][1])
+ }
+
+ background = settings['background']
+
+ columns = {
+ 'name': 'name',
+ 'date-modified': 'dateModified',
+ 'date-created': 'dateCreated',
+ 'date-added': 'dateAdded',
+ 'date-last-opened': 'dateLastOpened',
+ 'size': 'size',
+ 'kind': 'kind',
+ 'label': 'label',
+ 'version': 'version',
+ 'comments': 'comments'
+ }
+
+ default_widths = {
+ 'name': 300,
+ 'date-modified': 181,
+ 'date-created': 181,
+ 'date-added': 181,
+ 'date-last-opened': 181,
+ 'size': 97,
+ 'kind': 115,
+ 'label': 100,
+ 'version': 75,
+ 'comments': 300,
+ }
+
+ default_sort_directions = {
+ 'name': 'ascending',
+ 'date-modified': 'descending',
+ 'date-created': 'descending',
+ 'date-added': 'descending',
+ 'date-last-opened': 'descending',
+ 'size': 'descending',
+ 'kind': 'ascending',
+ 'label': 'ascending',
+ 'version': 'ascending',
+ 'comments': 'ascending',
+ }
+
+ lsvp = {
+ b'viewOptionsVersion': 1,
+ b'sortColumn': columns.get(settings['list_sort_by'], 'name'),
+ b'textSize': float(settings['list_text_size']),
+ b'iconSize': float(settings['list_icon_size']),
+ b'showIconPreview': settings['show_icon_preview'],
+ b'scrollPositionX': settings['list_scroll_position'][0],
+ b'scrollPositionY': settings['list_scroll_position'][1],
+ b'useRelativeDates': settings['list_use_relative_dates'],
+ b'calculateAllSizes': settings['list_calculate_all_sizes'],
+ }
+
+ lsvp['columns'] = {}
+ cndx = {}
+
+ for n, column in enumerate(settings['list_columns']):
+ cndx[column] = n
+ width = settings['list_column_widths'].get(column,
+ default_widths[column])
+ asc = 'ascending' == settings['list_column_sort_directions'].get(column,
+ default_sort_directions[column])
+
+ lsvp['columns'][columns[column]] = {
+ 'index': n,
+ 'width': width,
+ 'identifier': columns[column],
+ 'visible': True,
+ 'ascending': asc
+ }
+
+ n = len(settings['list_columns'])
+ for k in iterkeys(columns):
+ if cndx.get(k, None) is None:
+ cndx[k] = n
+ width = default_widths[k]
+ asc = 'ascending' == default_sort_directions[k]
+
+ lsvp['columns'][columns[column]] = {
+ 'index': n,
+ 'width': width,
+ 'identifier': columns[column],
+ 'visible': False,
+ 'ascending': asc
+ }
+
+ n += 1
+
+ default_view = settings['default_view']
+ views = {
+ 'icon-view': b'icnv',
+ 'column-view': b'clmv',
+ 'list-view': b'Nlsv',
+ 'coverflow': b'Flwv'
+ }
+
+ icvl = (b'type', views.get(default_view, 'icnv'))
+
+ include_icon_view_settings = default_view == 'icon-view' \
+ or settings['include_icon_view_settings'] not in \
+ ('auto', 'no', 0, False, None)
+ include_list_view_settings = default_view in ('list-view', 'coverflow') \
+ or settings['include_list_view_settings'] not in \
+ ('auto', 'no', 0, False, None)
+
+ filename = settings['filename']
+ volume_name = settings['volume_name']
+
+ # Construct a writeable image to start with
+ dirname, basename = os.path.split(os.path.realpath(filename))
+ if not basename.endswith('.dmg'):
+ basename += '.dmg'
+ writableFile = tempfile.NamedTemporaryFile(dir=dirname, prefix='.temp',
+ suffix=basename)
+
+ total_size = settings['size']
+ if total_size == None:
+ # Start with a size of 128MB - this way we don't need to calculate the
+ # size of the background image, volume icon, and .DS_Store file (and
+ # 128 MB should be well sufficient for even the most outlandish image
+ # sizes, like an uncompressed 5K multi-resolution TIFF)
+ total_size = 128 * 1024 * 1024
+
+ def roundup(x, n):
+ return x if x % n == 0 else x + n - x % n
+
+ for path in settings['files']:
+ if isinstance(path, tuple):
+ path = path[0]
+
+ if not os.path.islink(path) and os.path.isdir(path):
+ for dirpath, dirnames, filenames in os.walk(path):
+ for f in filenames:
+ fp = os.path.join(dirpath, f)
+ total_size += roundup(os.lstat(fp).st_size, 4096)
+ else:
+ total_size += roundup(os.lstat(path).st_size, 4096)
+
+ for name,target in iteritems(settings['symlinks']):
+ total_size += 4096
+
+ total_size = str(max(total_size / 1024, 1024)) + 'K'
+
+ ret, output = hdiutil('create',
+ '-ov',
+ '-volname', volume_name,
+ '-fs', 'HFS+',
+ '-fsargs', '-c c=64,a=16,e=16',
+ '-size', total_size,
+ writableFile.name)
+
+ if ret:
+ raise DMGError('Unable to create disk image')
+
+ ret, output = hdiutil('attach',
+ '-nobrowse',
+ '-owners', 'off',
+ '-noidme',
+ writableFile.name)
+
+ if ret:
+ raise DMGError('Unable to attach disk image')
+
+ try:
+ for info in output['system-entities']:
+ if info.get('mount-point', None):
+ device = info['dev-entry']
+ mount_point = info['mount-point']
+
+ icon = settings['icon']
+ if badge:
+ badge_icon = settings['badge_icon']
+ else:
+ badge_icon = None
+ icon_target_path = os.path.join(mount_point, '.VolumeIcon.icns')
+ if icon:
+ shutil.copyfile(icon, icon_target_path)
+ elif badge_icon:
+ badge.badge_disk_icon(badge_icon, icon_target_path)
+
+ if icon or badge_icon:
+ subprocess.call(['/usr/bin/SetFile', '-a', 'C', mount_point])
+
+ background_bmk = None
+
+ if not isinstance(background, (str, unicode)):
+ pass
+ elif colors.isAColor(background):
+ c = colors.parseColor(background).to_rgb()
+
+ icvp['backgroundType'] = 1
+ icvp['backgroundColorRed'] = float(c.r)
+ icvp['backgroundColorGreen'] = float(c.g)
+ icvp['backgroundColorBlue'] = float(c.b)
+ else:
+ if os.path.isfile(background):
+ # look to see if there are HiDPI resources available
+
+ if lookForHiDPI is True:
+ name, extension = os.path.splitext(os.path.basename(background))
+ orderedImages = [background]
+ imageDirectory = os.path.dirname(background)
+ if imageDirectory == '':
+ imageDirectory = '.'
+ for candidateName in os.listdir(imageDirectory):
+ hasScale = re.match(
+ '^(?P<name>.+)@(?P<scale>\d+)x(?P<extension>\.\w+)$',
+ candidateName)
+ if hasScale and name == hasScale.group('name') and \
+ extension == hasScale.group('extension'):
+ scale = int(hasScale.group('scale'))
+ if len(orderedImages) < scale:
+ orderedImages += [None] * (scale - len(orderedImages))
+ orderedImages[scale - 1] = os.path.join(imageDirectory, candidateName)
+
+ if len(orderedImages) > 1:
+ # compile the grouped tiff
+ backgroundFile = tempfile.NamedTemporaryFile(suffix='.tiff')
+ background = backgroundFile.name
+ output = tempfile.TemporaryFile(mode='w+')
+ try:
+ subprocess.check_call(
+ ['/usr/bin/tiffutil', '-cathidpicheck'] +
+ filter(None, orderedImages) +
+ ['-out', background], stdout=output, stderr=output)
+ except Exception as e:
+ output.seek(0)
+ raise ValueError(
+ 'unable to compile combined HiDPI file "%s" got error: %s\noutput: %s'
+ % (background, str(e), output.read()))
+
+ _, kind = os.path.splitext(background)
+ path_in_image = os.path.join(mount_point, '.background' + kind)
+ shutil.copyfile(background, path_in_image)
+ elif pkg_resources.resource_exists('dmgbuild', 'resources/' + background + '.tiff'):
+ tiffdata = pkg_resources.resource_string(
+ 'dmgbuild',
+ 'resources/' + background + '.tiff')
+ path_in_image = os.path.join(mount_point, '.background.tiff')
+
+ with open(path_in_image, 'w') as f:
+ f.write(tiffdata)
+ else:
+ raise ValueError('background file "%s" not found' % background)
+
+ alias = Alias.for_file(path_in_image)
+ background_bmk = Bookmark.for_file(path_in_image)
+
+ icvp['backgroundType'] = 2
+ icvp['backgroundImageAlias'] = biplist.Data(alias.to_bytes())
+
+ for f in settings['files']:
+ if isinstance(f, tuple):
+ f_in_image = os.path.join(mount_point, f[1])
+ f = f[0]
+ else:
+ basename = os.path.basename(f.rstrip('/'))
+ f_in_image = os.path.join(mount_point, basename)
+
+ # use system ditto command to preserve code signing, etc.
+ subprocess.call(['/usr/bin/ditto', f, f_in_image])
+
+ for name,target in iteritems(settings['symlinks']):
+ name_in_image = os.path.join(mount_point, name)
+ os.symlink(target, name_in_image)
+
+ userfn = settings.get('create_hook', None)
+ if callable(userfn):
+ userfn(mount_point, settings)
+
+ image_dsstore = os.path.join(mount_point, '.DS_Store')
+
+ with DSStore.open(image_dsstore, 'w+') as d:
+ d['.']['vSrn'] = ('long', 1)
+ d['.']['bwsp'] = bwsp
+ if include_icon_view_settings:
+ d['.']['icvp'] = icvp
+ if background_bmk:
+ d['.']['pBBk'] = background_bmk
+ if include_list_view_settings:
+ d['.']['lsvp'] = lsvp
+ d['.']['icvl'] = icvl
+
+ for k,v in iteritems(settings['icon_locations']):
+ d[k]['Iloc'] = v
+
+ # Delete .Trashes, if it gets created
+ shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)
+ except:
+ # Always try to detach
+ hdiutil('detach', '-force', device, plist=False)
+ raise
+
+ ret, output = hdiutil('detach', device, plist=False)
+
+ if ret:
+ hdiutil('detach', '-force', device, plist=False)
+ raise DMGError('Unable to detach device cleanly')
+
+ # Shrink the output to the minimum possible size
+ ret, output = hdiutil('resize',
+ '-quiet',
+ '-sectors', 'min',
+ writableFile.name,
+ plist=False)
+
+ if ret:
+ raise DMGError('Unable to shrink')
+
+ key_prefix = {'UDZO': 'zlib', 'UDBZ': 'bzip2', 'ULFO': 'lzfse'}
+ compression_level = settings['compression_level']
+ if settings['format'] in key_prefix and compression_level:
+ compression_args = [
+ '-imagekey',
+ key_prefix[settings['format']] + '-level=' + str(compression_level)
+ ]
+ else:
+ compression_args = []
+
+ ret, output = hdiutil('convert', writableFile.name,
+ '-format', settings['format'],
+ '-ov',
+ '-o', filename, *compression_args)
+
+ if ret:
+ raise DMGError('Unable to convert')
+
+ if settings['license']:
+ ret, output = hdiutil('unflatten', '-quiet', filename, plist=False)
+
+ if ret:
+ raise DMGError('Unable to unflatten to add license')
+
+ licensing.add_license(filename, settings['license'])
+
+ ret, output = hdiutil('flatten', '-quiet', filename, plist=False)
+
+ if ret:
+ raise DMGError('Unable to flatten after adding license')
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/licensing.py b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/licensing.py
new file mode 100644
index 000000000..5c2679096
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/licensing.py
@@ -0,0 +1,461 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+import os
+import struct
+
+from .resources import *
+
+# ISO language and country codes to Macintosh Region codes (from Script.h)
+# <key> == CFLocaleCreateCanonicalLocaleIdentifierFromScriptManagerCodes(NULL,
+# kTextLanguageDontCare,
+# <value>)
+region_codes = {
+ "en_US": 0,
+ "fr_FR": 1,
+ "en_GB": 2,
+ "de_DE": 3,
+ "it_IT": 4,
+ "nl_NL": 5,
+ "nl_BE": 6,
+ "sv_SE": 7,
+ "es_ES": 8,
+ "da_DK": 9,
+ "pt_PT": 10,
+ "fr_CA": 11,
+ "nb_NO": 12,
+ "he_IL": 13,
+ "ja_JP": 14,
+ "en_AU": 15,
+ "ar": 16,
+ "fi_FI": 17,
+ "fr_CH": 18,
+ "de_CH": 19,
+ "el_GR": 20,
+ "is_IS": 21,
+ "mt_MT": 22,
+ "el_CY": 23,
+ "tr_TR": 24,
+ "hi_IN": 33,
+ "ur_PK": 34,
+ "it_CH": 36,
+ "ro_RO": 39,
+ "grc": 40,
+ "lt_LT": 41,
+ "pl_PL": 42,
+ "hu_HU": 43,
+ "et_EE": 44,
+ "lv_LV": 45,
+ "se": 46,
+ "fo_FO": 47,
+ "fa_IR": 48,
+ "ru_RU": 49,
+ "ga_IE": 50,
+ "ko_KR": 51,
+ "zh_CN": 52,
+ "zh_TW": 53,
+ "th_TH": 54,
+ "cs_CZ": 56,
+ "sk_SK": 57,
+ "bn": 60,
+ "be_BY": 61,
+ "uk_UA": 62,
+ "sr_RS": 65,
+ "sl_SI": 66,
+ "mk_MK": 67,
+ "hr_HR": 68,
+ "pt_BR": 71,
+ "bg_BG": 72,
+ "ca_ES": 73,
+ "gd": 75,
+ "gv": 76,
+ "br": 77,
+ "iu_CA": 78,
+ "cy": 79,
+ "ga-Latg_IE": 81,
+ "en_CA": 82,
+ "dz_BT": 83,
+ "hy_AM": 84,
+ "ka_GE": 85,
+ "es_419": 86,
+ "to_TO": 88,
+ "fr_001": 91,
+ "de_AT": 92,
+ "gu_IN": 94,
+ "pa": 95,
+ "ur_IN": 96,
+ "vi_VN": 97,
+ "fr_BE": 98,
+ "uz_UZ": 99,
+ "en_SG": 100,
+ "nn_NO": 101,
+ "af_ZA": 102,
+ "eo": 103,
+ "mr_IN": 104,
+ "bo": 105,
+ "ne_NP": 106,
+ "kl": 107,
+ "en_IE": 108
+}
+
+# Map of region constants to script constants (from Script.h)
+# TextEncoding textEncoding;
+# GetTextEncodingFromScriptInfo(kTextScriptDontCare, kTextLanguageDontCare, <key>, &textEncoding);
+# <value> == GetTextEncodingBase(textEncoding);
+script_codes = {
+ 0: 0,
+ 1: 0,
+ 2: 0,
+ 3: 0,
+ 4: 0,
+ 5: 0,
+ 6: 0,
+ 7: 0,
+ 8: 0,
+ 9: 0,
+ 10: 0,
+ 11: 0,
+ 12: 0,
+ 13: 5,
+ 14: 1,
+ 15: 0,
+ 16: 4,
+ 17: 0,
+ 18: 0,
+ 19: 0,
+ 20: 6,
+ 21: 37,
+ 22: 0,
+ 23: 6,
+ 24: 35,
+ 25: 36,
+ 26: 0,
+ 27: 0,
+ 30: 0,
+ 31: 0,
+ 32: 0,
+ 33: 9,
+ 34: 4,
+ 35: 35,
+ 36: 0,
+ 37: 0,
+ 39: 38,
+ 40: 6,
+ 41: 29,
+ 42: 29,
+ 43: 29,
+ 44: 29,
+ 45: 29,
+ 46: 0,
+ 47: 37,
+ 48: 140,
+ 49: 7,
+ 50: 39,
+ 51: 3,
+ 52: 25,
+ 53: 2,
+ 54: 21,
+ 56: 29,
+ 57: 29,
+ 59: 29,
+ 60: 13,
+ 61: 7,
+ 62: 7,
+ 64: 6,
+ 65: 7,
+ 66: 36,
+ 67: 7,
+ 68: 36,
+ 70: 0,
+ 71: 0,
+ 72: 7,
+ 73: 0,
+ 75: 39,
+ 76: 39,
+ 77: 39,
+ 78: 236,
+ 79: 39,
+ 81: 40,
+ 82: 0,
+ 83: 26,
+ 84: 24,
+ 85: 23,
+ 86: 0,
+ 88: 0,
+ 91: 0,
+ 92: 0,
+ 94: 11,
+ 95: 10,
+ 96: 4,
+ 97: 30,
+ 98: 0,
+ 99: 7,
+ 100: 0,
+ 101: 0,
+ 102: 0,
+ 103: 0,
+ 104: 9,
+ 105: 26,
+ 106: 9,
+ 107: 0,
+ 108: 0
+}
+
+# Map of TextEncodingBase constants to Python encoder names (from TextCommon.h)
+encodings_map = {
+ 0: 'mac_roman', # kTextEncodingMacRoman
+ 1: 'shift_jis', # kTextEncodingMacJapanese
+ 2: 'big5', # kTextEncodingMacChineseTrad
+ 3: 'euc_kr', # kTextEncodingMacKorean
+ 4: 'mac_arabic', # kTextEncodingMacArabic
+ 6: 'mac_greek', # kTextEncodingMacGreek
+ 7: 'mac_cyrillic', # kTextEncodingMacCyrillic
+ 21: 'iso8859_11', # kTextEncodingMacThai
+ 25: 'euc-cn', # kTextEncodingMacChineseSimp
+ 29: 'mac_centeuro', # kTextEncodingMacCentralEurRoman
+ 35: 'mac_turkish', # kTextEncodingMacTurkish
+ 36: 'mac_croatian', # kTextEncodingMacCroatian
+ 37: 'mac_iceland', # kTextEncodingMacIcelandic
+ 38: 'mac_romanian', # kTextEncodingMacRomanian
+ 140: 'mac_farsi' # kTextEncodingMacFarsi
+}
+
+# Standard fonts
+fonts = {
+ 'New York': 2,
+ 'Geneva': 3,
+ 'Monaco': 4,
+ 'Venice': 5,
+ 'London': 6,
+ 'Athens': 7,
+ 'San Francisco': 8,
+ 'Toronto': 9,
+ 'Cairo': 11,
+ 'Los Angeles': 12,
+ 'Times': 20,
+ 'Helvetica': 21,
+ 'Courier': 22,
+ 'Symbol': 23,
+ 'Mobile': 24
+}
+
+# Buttons (these come from the SLAResources file which you can find in the SLA
+# SDK on developer.apple.com)
+default_buttons = {
+ 0: (
+ b'English',
+ b'Agree',
+ b'Disagree',
+ b'Print',
+ b'Save',
+ b'If you agree with the terms of this license, press "Agree" to '
+ b'install the software. If you do not agree, press "Disagree".'
+ ),
+
+ 3: (
+ b'Deutsch',
+ b'Akzeptieren',
+ b'Ablehnen',
+ b'Drucken',
+ b'Sichern...',
+ b'Klicken Sie in \xd2Akzeptieren\xd3, wenn Sie mit den Bestimmungen des Software-Lizenzvertrags einverstanden sind. Falls nicht, bitte \xd2Ablehnen\xd3 anklicken. Sie k\x9annen die Software nur installieren, wenn Sie \xd2Akzeptieren\xd3 angeklickt haben.'
+ ),
+
+ 8: (
+ b'Espa\x96ol',
+ b'Aceptar',
+ b'No aceptar',
+ b'Imprimir',
+ b'Guardar...',
+ b'Si est\x87 de acuerdo con los t\x8erminos de esta licencia, pulse "Aceptar" para instalar el software. En el supuesto de que no est\x8e de acuerdo con los t\x8erminos de esta licencia, pulse "No aceptar."'
+ ),
+
+ 1: (
+ b'Fran\x8dais',
+ b'Accepter',
+ b'Refuser',
+ b'Imprimer',
+ b'Enregistrer...',
+ b'Si vous acceptez les termes de la pr\x8esente licence, cliquez sur "Accepter" afin d\'installer le logiciel. Si vous n\'\x90tes pas d\'accord avec les termes de la licence, cliquez sur "Refuser".'
+ ),
+
+ 4: (
+ b'Italiano',
+ b'Accetto',
+ b'Rifiuto',
+ b'Stampa',
+ b'Registra...',
+ b'Se accetti le condizioni di questa licenza, fai clic su "Accetto" per installare il software. Altrimenti fai clic su "Rifiuto".'
+ ),
+
+ 14: (
+ b'Japanese',
+ b'\x93\xaf\x88\xd3\x82\xb5\x82\xdc\x82\xb7',
+ b'\x93\xaf\x88\xd3\x82\xb5\x82\xdc\x82\xb9\x82\xf1',
+ b'\x88\xf3\x8d\xfc\x82\xb7\x82\xe9',
+ b'\x95\xdb\x91\xb6...',
+ b'\x96{\x83\\\x83t\x83g\x83E\x83G\x83A\x8eg\x97p\x8b\x96\x91\xf8\x8c_\x96\xf1\x82\xcc\x8f\xf0\x8c\x8f\x82\xc9\x93\xaf\x88\xd3\x82\xb3\x82\xea\x82\xe9\x8f\xea\x8d\x87\x82\xc9\x82\xcd\x81A\x83\\\x83t\x83g\x83E\x83G\x83A\x82\xf0\x83C\x83\x93\x83X\x83g\x81[\x83\x8b\x82\xb7\x82\xe9\x82\xbd\x82\xdf\x82\xc9\x81u\x93\xaf\x88\xd3\x82\xb5\x82\xdc\x82\xb7\x81v\x82\xf0\x89\x9f\x82\xb5\x82\xc4\x82\xad\x82\xbe\x82\xb3\x82\xa2\x81B\x81@\x93\xaf\x88\xd3\x82\xb3\x82\xea\x82\xc8\x82\xa2\x8f\xea\x8d\x87\x82\xc9\x82\xcd\x81A\x81u\x93\xaf\x88\xd3\x82\xb5\x82\xdc\x82\xb9\x82\xf1\x81v\x82\xf0\x89\x9f\x82\xb5\x82\xc4\x82\xad\x82\xbe\x82\xb3\x82\xa2\x81B'
+ ),
+
+ 5: (
+ b'Nederlands',
+ b'Ja',
+ b'Nee',
+ b'Print',
+ b'Bewaar...',
+ b'Indien u akkoord gaat met de voorwaarden van deze licentie, kunt u op \'Ja\' klikken om de programmatuur te installeren. Indien u niet akkoord gaat, klikt u op \'Nee\'.'
+ ),
+
+ 7: (
+ b'Svensk',
+ b'Godk\x8anns',
+ b'Avb\x9ajs',
+ b'Skriv ut',
+ b'Spara...',
+ b'Om Du godk\x8anner licensvillkoren klicka p\x8c "Godk\x8anns" f\x9ar att installera programprodukten. Om Du inte godk\x8anner licensvillkoren, klicka p\x8c "Avb\x9ajs".'
+ ),
+
+ 71: (
+ b'Portugu\x90s',
+ b'Concordar',
+ b'Discordar',
+ b'Imprimir',
+ b'Salvar...',
+ b'Se est\x87 de acordo com os termos desta licen\x8da, pressione "Concordar" para instalar o software. Se n\x8bo est\x87 de acordo, pressione "Discordar".'
+ ),
+
+ 52: (
+ b'Simplified Chinese',
+ b'\xcd\xac\xd2\xe2',
+ b'\xb2\xbb\xcd\xac\xd2\xe2',
+ b'\xb4\xf2\xd3\xa1',
+ b'\xb4\xe6\xb4\xa2\xa1\xad',
+ b'\xc8\xe7\xb9\xfb\xc4\xfa\xcd\xac\xd2\xe2\xb1\xbe\xd0\xed\xbf\xc9\xd0\xad\xd2\xe9\xb5\xc4\xcc\xf5\xbf\xee\xa3\xac\xc7\xeb\xb0\xb4\xa1\xb0\xcd\xac\xd2\xe2\xa1\xb1\xc0\xb4\xb0\xb2\xd7\xb0\xb4\xcb\xc8\xed\xbc\xfe\xa1\xa3\xc8\xe7\xb9\xfb\xc4\xfa\xb2\xbb\xcd\xac\xd2\xe2\xa3\xac\xc7\xeb\xb0\xb4\xa1\xb0\xb2\xbb\xcd\xac\xd2\xe2\xa1\xb1\xa1\xa3'
+ ),
+
+ 53: (
+ b'Traditional Chinese',
+ b'\xa6P\xb7N',
+ b'\xa4\xa3\xa6P\xb7N',
+ b'\xa6C\xa6L',
+ b'\xc0x\xa6s\xa1K',
+ b'\xa6p\xaaG\xb1z\xa6P\xb7N\xa5\xbb\xb3\\\xa5i\xc3\xd2\xb8\xcc\xaa\xba\xb1\xf8\xb4\xda\xa1A\xbd\xd0\xab\xf6\xa1\xa7\xa6P\xb7N\xa1\xa8\xa5H\xa6w\xb8\xcb\xb3n\xc5\xe9\xa1C\xa6p\xaaG\xa4\xa3\xa6P\xb7N\xa1A\xbd\xd0\xab\xf6\xa1\xa7\xa4\xa3\xa6P\xb7N\xa1\xa8\xa1C'
+ ),
+
+ 9: (
+ b'Dansk',
+ b'Enig',
+ b'Uenig',
+ b'Udskriv',
+ b'Arkiver...',
+ b'Hvis du accepterer betingelserne i licensaftalen, skal du klikke p\x8c \xd2Enig\xd3 for at installere softwaren. Klik p\x8c \xd2Uenig\xd3 for at annullere installeringen.'
+ ),
+
+ 17: (
+ b'Suomi',
+ b'Hyv\x8aksyn',
+ b'En hyv\x8aksy',
+ b'Tulosta',
+ b'Tallenna\xc9',
+ b'Hyv\x8aksy lisenssisopimuksen ehdot osoittamalla \xd5Hyv\x8aksy\xd5. Jos et hyv\x8aksy sopimuksen ehtoja, osoita \xd5En hyv\x8aksy\xd5.'
+ ),
+
+ 51: (
+ b'Korean',
+ b'\xb5\xbf\xc0\xc7',
+ b'\xb5\xbf\xc0\xc7 \xbe\xc8\xc7\xd4',
+ b'\xc7\xc1\xb8\xb0\xc6\xae',
+ b'\xc0\xfa\xc0\xe5...',
+ b'\xbb\xe7\xbf\xeb \xb0\xe8\xbe\xe0\xbc\xad\xc0\xc7 \xb3\xbb\xbf\xeb\xbf\xa1 \xb5\xbf\xc0\xc7\xc7\xcf\xb8\xe9, "\xb5\xbf\xc0\xc7" \xb4\xdc\xc3\xdf\xb8\xa6 \xb4\xad\xb7\xaf \xbc\xd2\xc7\xc1\xc6\xae\xbf\xfe\xbe\xee\xb8\xa6 \xbc\xb3\xc4\xa1\xc7\xcf\xbd\xca\xbd\xc3\xbf\xc0. \xb5\xbf\xc0\xc7\xc7\xcf\xc1\xf6 \xbe\xca\xb4\xc2\xb4\xd9\xb8\xe9, "\xb5\xbf\xc0\xc7 \xbe\xc8\xc7\xd4" \xb4\xdc\xc3\xdf\xb8\xa6 \xb4\xa9\xb8\xa3\xbd\xca\xbd\xc3\xbf\xc0.'
+ ),
+
+ 12: (
+ b'Norsk',
+ b'Enig',
+ b'Ikke enig',
+ b'Skriv ut',
+ b'Arkiver...',
+ b'Hvis De er enig i bestemmelsene i denne lisensavtalen, klikker De p\x8c "Enig"-knappen for \x8c installere programvaren. Hvis De ikke er enig, klikker De p\x8c "Ikke enig".'
+ ),
+}
+
+class LPicResource (Resource):
+ def __init__(self, res_id, res_name, default_lang, lpic, res_attrs=0):
+ data = []
+ data.append(struct.pack(b'>HH', default_lang, len(lpic)))
+ for lang,rid,two_byte in lpic:
+ data.append(struct.pack(b'>HHH', lang, rid, int(two_byte)))
+ super(LPicResource, self).__init__(b'LPic', res_id, res_name,
+ b''.join(data), res_attrs)
+
+def get_encoder_name(locale):
+ if locale not in region_codes:
+ raise Exception("Cannot determine region code for locale '%s'" % locale)
+ region_code = region_codes[locale]
+
+ if region_code not in script_codes:
+ raise Exception("Cannot determine script code for locale '%s'" % locale)
+ script_code = script_codes[region_code]
+
+ if script_code not in encodings_map:
+ raise Exception("Cannot determine Python encoder name for locale '%s' - "
+ "encode the string data manually as a byte array instead" % locale)
+ return encodings_map[script_code]
+
+def maybe_encode(s, encoding):
+ if isinstance(s, bytes):
+ return s
+ return s.encode(encoding)
+
+def add_license(filename, license_info):
+ """Add a license agreement to the specified disk image file, which should
+ have been unflattened first."""
+
+ fork = ResourceFork.from_file(filename)
+
+ default_lang = license_info.get('default-language', 'en_US')
+ default_lang_id = region_codes.get(default_lang, 0)
+
+ lpic = []
+ ndx = 1
+ for language,license_data in license_info['licenses'].items():
+ if language not in region_codes:
+ raise Exception("Unknown language '" + language + "'. Valid languages are: " +
+ ", ".join(sorted(region_codes.keys())))
+ encoding_name = get_encoder_name(language)
+ lang_id = region_codes[language]
+
+ is_two_byte = lang_id in (14, 51, 52, 53) # Japanese, Korean, SimpChinese, TradChinese
+
+ if os.path.isfile(license_data):
+ with open(license_data) as f:
+ license_data = f.read()
+
+ if license_data.startswith('{\\rtf1'):
+ fork.add(Resource(b'RTF ', 5000 + ndx, language + ' SLA',
+ str(license_data)))
+ else:
+ fork.add(TextResource(5000 + ndx, language + ' SLA',
+ maybe_encode(license_data, encoding_name)))
+ fork.add(StyleResource(5000 + ndx, language + ' SLA',
+ [Style(0, 12, 9, Style.Helvetica,
+ 0, 0, (0, 0, 0))]))
+
+ buttons = license_info.get('buttons', {}).get(language, None)
+ if buttons is None:
+ buttons = default_buttons.get(lang_id, None)
+ if buttons is None:
+ buttons = default_buttons[0]
+
+ buttons = [maybe_encode(b, encoding_name) for b in buttons]
+
+ fork.add(StringListResource(5000 + ndx, language + ' Buttons',
+ buttons))
+
+ lpic.append((lang_id, ndx, is_two_byte))
+
+ ndx += 1
+
+ fork.add(LPicResource(5000, None, default_lang_id, lpic))
+
+ fork.write_to_file(filename)
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/qt_attribution.json b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/qt_attribution.json
new file mode 100644
index 000000000..9f318e6d2
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/qt_attribution.json
@@ -0,0 +1,13 @@
+{
+ "Id": "dmgbuild",
+ "Name": "dmgbuild",
+ "QDocModule": "qbs",
+ "QtUsage": "Used in the qbs dmg module for building Apple disk images.",
+ "Description": "macOS command line utility to build disk images",
+ "Homepage": "https://bitbucket.org/al45tair/dmgbuild",
+ "Version": "1.3.0~16",
+ "License": "MIT License",
+ "LicenseId": "MIT",
+ "LicenseFile": "LICENSE",
+ "Copyright": "Copyright (c) 2014 Alastair Houghton"
+}
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/resources.py b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/resources.py
new file mode 100644
index 000000000..d2f58e64a
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/dmgbuild/resources.py
@@ -0,0 +1,355 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+import struct
+
+class Resource (object):
+ def __init__(self, res_type, res_id, res_name, data=None, res_attrs=0):
+ self.res_type = str(res_type)
+ self.res_id = res_id
+ if isinstance(res_name, basestring):
+ res_name = str(res_name)
+ self.res_name = res_name
+ self.res_attrs = res_attrs
+ if data is None:
+ self.data = None
+ self.data = str(data)
+
+ self.data_offset = None
+ self.name_offset = None
+
+ def __repr__(self):
+ return 'Resource(%r, %r, %r, data=%r, res_attrs=%r)' % (self.res_type,
+ self.res_id,
+ self.res_name,
+ self.data,
+ self.res_attrs)
+
+class TMPLResource (Resource):
+ def __init__(self, res_id, res_name, tmpl, res_attrs=0):
+ data = []
+ for name,typecode in tmpl:
+ data.append(struct.pack(b'B', len(name)))
+ data.append(str(name))
+ data.append(str(typecode))
+ super(TMPLResource, self).__init__(b'TMPL', res_id, res_name,
+ b''.join(data), res_attrs)
+
+class StringListResource (Resource):
+ def __init__(self, res_id, res_name, strings, res_attrs=0):
+ data = []
+ data.append(struct.pack(b'>H', len(strings)))
+ for s in strings:
+ data.append(struct.pack(b'B', len(s)))
+ data.append(str(s))
+ super(StringListResource, self).__init__(b'STR#', res_id, res_name,
+ b''.join(data), res_attrs)
+
+class TextResource (Resource):
+ def __init__(self, res_id, res_name, string, res_attrs=0):
+ super(TextResource, self).__init__(b'TEXT', res_id, res_name,
+ str(string), res_attrs)
+
+class Style (object):
+ # Fonts
+ NewYork = 2
+ Geneva = 3
+ Monaco = 4
+ Venice = 5
+ London = 6
+ Athens = 7
+ SanFrancisco = 8
+ Toronto = 9
+ Cairo = 11
+ LosAngeles = 12
+ Times = 20
+ Helvetica = 21
+ Courier = 22
+ Symbol = 23
+ Mobile = 24
+
+ # Styles
+ Bold = 0x0100
+ Italic = 0x0200
+ Underline = 0x0400
+ Outline = 0x0800
+ Shadow = 0x1000
+ Condense = 0x2000
+ Expand = 0x4000
+
+ def __init__(self, start_character, height, ascent, font_id, face,
+ size, color):
+ self.start_character = start_character
+ self.height = height
+ self.ascent = ascent
+ self.font_id = font_id
+ self.face = face
+ self.size = size
+ self.color = color
+
+ def __repr__(self):
+ styles = []
+ if self.face & Style.Bold:
+ styles.append('Style.Bold')
+ if self.face & Style.Italic:
+ styles.append('Style.Italic')
+ if self.face & Style.Underline:
+ styles.append('Style.Underline')
+ if self.face & Style.Outline:
+ styles.append('Style.Outline')
+ if self.face & Style.Shadow:
+ styles.append('Style.Shadow')
+ if self.face & Style.Condense:
+ styles.append('Style.Condense')
+ if self.face & Style.Expand:
+ styles.append('Style.Expand')
+ if self.face & ~0x4f00:
+ styles.append('%#06x' % (self.face & ~0x4f00))
+ if styles:
+ styles = '|'.join(styles)
+ else:
+ styles = '0'
+
+ font_revmap = {
+ 2: 'Style.NewYork',
+ 3: 'Style.Geneva',
+ 4: 'Style.Monaco',
+ 5: 'Style.Venice',
+ 6: 'Style.London',
+ 7: 'Style.Athens',
+ 8: 'Style.SanFrancisco',
+ 9: 'Style.Toronto',
+ 11: 'Style.Cairo',
+ 12: 'Style.LosAngeles',
+ 20: 'Style.Times',
+ 21: 'Style.Helvetica',
+ 22: 'Style.Courier',
+ 23: 'Style.Symbol',
+ 24: 'Style.Mobile'
+ }
+
+ font = font_revmap.get(self.font_id, '%s' % self.font_id)
+
+ return 'Style(%r, %r, %r, %s, %s, %r, %r)' % (
+ self.start_character,
+ self.height,
+ self.ascent,
+ font,
+ styles,
+ self.size,
+ self.color)
+
+class StyleResource (Resource):
+ def __init__(self, res_id, res_name, styles, res_attrs=0):
+ data = []
+ data.append(struct.pack(b'>H', len(styles)))
+ for style in styles:
+ data.append(struct.pack(b'>LHHHHHHHH',
+ style.start_character,
+ style.height,
+ style.ascent,
+ style.font_id,
+ style.face,
+ style.size,
+ style.color[0],
+ style.color[1],
+ style.color[2]))
+ super(StyleResource, self).__init__(b'styl', res_id, res_name,
+ b''.join(data), res_attrs)
+
+class ResourceFork (object):
+ def __init__(self, resources=None):
+ self.types = {}
+ self.attrs = 0
+ if resources is not None:
+ for res in resources:
+ self.add(res)
+
+ @classmethod
+ def from_data(clss, data):
+ if len(data) < 16:
+ raise ValueError('Bad resource data - data too short')
+
+ # Read the header
+ data_start, map_start, data_len, map_len = struct.unpack(b'>LLLL',
+ data[0:16])
+
+ if data_start + data_len > len(data):
+ raise ValueError('Bad resource data - data out of range')
+ if map_start + map_len > len(data):
+ raise ValueError('Bad resource data - map out of range')
+ if map_len < 30:
+ raise ValueError('Bad resource data - map too short')
+
+ # Read the map header
+ fork_attrs, type_offset, name_offset, max_type_ndx \
+ = struct.unpack(b'>HHHH', data[map_start + 22:map_start + 30])
+ num_types = max_type_ndx + 1
+
+ if type_offset + 8 * num_types > map_len:
+ raise ValueError('Bad resource data - type data outside map')
+
+ if name_offset > map_len:
+ raise ValueError('Bad resource data - names outside map')
+
+ type_offset += map_start
+ name_offset += map_start
+
+ result = ResourceFork()
+
+ # Now read the type list
+ for ntype in range(0, num_types):
+ type_pos = 2 + type_offset + 8 * ntype
+ res_type, max_item_ndx, ref_offset \
+ = struct.unpack(b'>4sHH', data[type_pos:type_pos+8])
+ num_items = max_item_ndx + 1
+
+ result.types[res_type] = []
+
+ ref_list_offset = type_offset + ref_offset
+ if ref_list_offset + 12 * num_items > map_start + map_len:
+ raise ValueError('Bad resource data - ref list outside map')
+
+ for nitem in range(0, num_items):
+ ref_elt = ref_list_offset + 12 * nitem
+ res_id, res_name_offset, data_offset \
+ = struct.unpack(b'>hHL', data[ref_elt:ref_elt+8])
+
+ res_attrs = data_offset >> 24
+ data_offset &= 0xffffff
+
+ if data_offset >= data_len:
+ raise ValueError('Bad resource data - item data out of range')
+
+ data_offset += data_start
+ res_len = struct.unpack(b'>L', data[data_offset:data_offset+4])[0]
+ if data_offset + res_len >= data_start + data_len:
+ raise ValueError('Bad resource data - item data too large')
+
+ res_data = data[data_offset + 4:data_offset + res_len + 4]
+
+ if res_name_offset == 0xffff:
+ res_name = None
+ else:
+ res_name_offset += name_offset
+ if res_name_offset >= map_start + map_len:
+ raise ValueError('Bad resource data - name out of range')
+ res_name_len = struct.unpack(b'B', data[res_name_offset])[0]
+ res_name = data[res_name_offset + 1:res_name_offset + res_name_len + 1]
+
+ result.types[res_type].append(Resource(res_type, res_id,
+ res_name,
+ res_data, res_attrs))
+
+ return result
+
+ @classmethod
+ def from_file(clss, filename):
+ with open(filename + '/..namedfork/rsrc', 'rb') as f:
+ data = f.read()
+ return clss.from_data(data)
+
+ def to_data(self):
+ data = []
+ data_len = 0
+ names = []
+ names_len = 0
+ types_len = len(self.types) * 8
+ types_data = []
+ reflist_data = []
+ reflist_len = 0
+
+ for res_type, items in self.types.items():
+ types_data.append(struct.pack(b'>4sHH',
+ res_type,
+ len(items) - 1,
+ 2 + types_len + reflist_len))
+ for item in items:
+ data_offset = data_len
+
+ if item.res_name is None:
+ name_offset = 65535
+ else:
+ name_offset = names_len
+ n = str(item.res_name)
+ names.append(struct.pack(b'B', len(n)) + n)
+ names_len += 1 + len(n)
+
+ if item.data is None:
+ data_len += 4
+ else:
+ data_len += 4 + (len(item.data) + 3) & ~3
+
+ reflist_len += 12
+ reflist_data.append(struct.pack(b'>hHLL',
+ item.res_id,
+ name_offset,
+ (item.res_attrs << 24) \
+ | data_offset,
+ 0))
+
+ # Header
+ data.append(struct.pack(b'>LLLL240s', 256, 256 + data_len, data_len,
+ 30 + types_len + reflist_len + names_len,
+ b''))
+
+ # Resource data
+ for res_type, items in self.types.items():
+ for item in items:
+ if item.data is None:
+ dlen = 0
+ else:
+ dlen = len(item.data)
+ plen = (dlen + 3) & ~3
+ data.append(struct.pack(b'>L', dlen))
+ if item.data is not None:
+ data.append(item.data)
+ if plen != dlen:
+ data.append(b'\0' * (plen - dlen))
+
+ # Resource map header
+ data.append(struct.pack(b'>16sLHHHHH',
+ b'', 0, 0,
+ self.attrs, 28, 30 + types_len + reflist_len,
+ len(self.types) - 1))
+
+ # Type list
+ data.append(b''.join(types_data))
+
+ # Reference lists
+ data.append(b''.join(reflist_data))
+
+ # Name list
+ data.append(b''.join(names))
+
+ return b''.join(data)
+
+ def write_to_file(self, filename):
+ with open(filename + '/..namedfork/rsrc', 'wb') as f:
+ f.write(self.to_data())
+
+ def __len__(self):
+ return len(self.types)
+
+ def __getitem__(self, key):
+ return self.types[key]
+
+ def __iter__(self):
+ for res_type, items in self.types.items():
+ for item in items:
+ yield item
+
+ def __repr__(self):
+ output = []
+ for item in self:
+ output.append(repr(item))
+ return 'ResourceFork([%s])' % ', '.join(output)
+
+ def add(self, res):
+ if res.res_type in self.types:
+ self.types[res.res_type].append(res)
+ else:
+ self.types[res.res_type] = [res]
+
+ def remove(self, res):
+ self.types[res.res_type].remove(res)
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/ds_store/LICENSE b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/LICENSE
new file mode 100644
index 000000000..e91f4eb38
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2014 Alastair Houghton
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/ds_store/__init__.py b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/__init__.py
new file mode 100644
index 000000000..a6b812104
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/__init__.py
@@ -0,0 +1,3 @@
+from .store import DSStore, DSStoreEntry
+
+__all__ = ['DSStore', 'DSStoreEntry']
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/ds_store/buddy.py b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/buddy.py
new file mode 100644
index 000000000..a94ab6e22
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/buddy.py
@@ -0,0 +1,473 @@
+# -*- coding: utf-8 -*-
+import os
+import bisect
+import struct
+import binascii
+
+try:
+ {}.iterkeys
+ iterkeys = lambda x: x.iterkeys()
+except AttributeError:
+ iterkeys = lambda x: x.keys()
+try:
+ unicode
+except NameError:
+ unicode = str
+
+class BuddyError(Exception):
+ pass
+
+class Block(object):
+ def __init__(self, allocator, offset, size):
+ self._allocator = allocator
+ self._offset = offset
+ self._size = size
+ self._value = bytearray(allocator.read(offset, size))
+ self._pos = 0
+ self._dirty = False
+
+ def __len__(self):
+ return self._size
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def close(self):
+ if self._dirty:
+ self.flush()
+
+ def flush(self):
+ if self._dirty:
+ self._dirty = False
+ self._allocator.write(self._offset, self._value)
+
+ def invalidate(self):
+ self._dirty = False
+
+ def zero_fill(self):
+ len = self._size - self._pos
+ zeroes = b'\0' * len
+ self._value[self._pos:self._size] = zeroes
+ self._dirty = True
+
+ def tell(self):
+ return self._pos
+
+ def seek(self, pos, whence=os.SEEK_SET):
+ if whence == os.SEEK_CUR:
+ pos += self._pos
+ elif whence == os.SEEK_END:
+ pos = self._size - pos
+
+ if pos < 0 or pos > self._size:
+ raise ValueError('Seek out of range in Block instance')
+
+ self._pos = pos
+
+ def read(self, size_or_format):
+ if isinstance(size_or_format, (str, unicode, bytes)):
+ size = struct.calcsize(size_or_format)
+ fmt = size_or_format
+ else:
+ size = size_or_format
+ fmt = None
+
+ if self._size - self._pos < size:
+ raise BuddyError('Unable to read %lu bytes in block' % size)
+
+ data = self._value[self._pos:self._pos + size]
+ self._pos += size
+
+ if fmt is not None:
+ if isinstance(data, bytearray):
+ return struct.unpack_from(fmt, bytes(data))
+ else:
+ return struct.unpack(fmt, data)
+ else:
+ return data
+
+ def write(self, data_or_format, *args):
+ if len(args):
+ data = struct.pack(data_or_format, *args)
+ else:
+ data = data_or_format
+
+ if self._pos + len(data) > self._size:
+ raise ValueError('Attempt to write past end of Block')
+
+ self._value[self._pos:self._pos + len(data)] = data
+ self._pos += len(data)
+
+ self._dirty = True
+
+ def insert(self, data_or_format, *args):
+ if len(args):
+ data = struct.pack(data_or_format, *args)
+ else:
+ data = data_or_format
+
+ del self._value[-len(data):]
+ self._value[self._pos:self._pos] = data
+ self._pos += len(data)
+
+ self._dirty = True
+
+ def delete(self, size):
+ if self._pos + size > self._size:
+ raise ValueError('Attempt to delete past end of Block')
+ del self._value[self._pos:self._pos + size]
+ self._value += b'\0' * size
+ self._dirty = True
+
+ def __str__(self):
+ return binascii.b2a_hex(self._value)
+
+class Allocator(object):
+ def __init__(self, the_file):
+ self._file = the_file
+ self._dirty = False
+
+ self._file.seek(0)
+
+ # Read the header
+ magic1, magic2, offset, size, offset2, self._unknown1 \
+ = self.read(-4, '>I4sIII16s')
+
+ if magic2 != b'Bud1' or magic1 != 1:
+ raise BuddyError('Not a buddy file')
+
+ if offset != offset2:
+ raise BuddyError('Root addresses differ')
+
+ self._root = Block(self, offset, size)
+
+ # Read the block offsets
+ count, self._unknown2 = self._root.read('>II')
+ self._offsets = []
+ c = (count + 255) & ~255
+ while c:
+ self._offsets += self._root.read('>256I')
+ c -= 256
+ self._offsets = self._offsets[:count]
+
+ # Read the TOC
+ self._toc = {}
+ count = self._root.read('>I')[0]
+ for n in range(count):
+ nlen = self._root.read('B')[0]
+ name = str(self._root.read(nlen))
+ value = self._root.read('>I')[0]
+ self._toc[name] = value
+
+ # Read the free lists
+ self._free = []
+ for n in range(32):
+ count = self._root.read('>I')
+ self._free.append(list(self._root.read('>%uI' % count)))
+
+ @classmethod
+ def open(cls, file_or_name, mode='r+'):
+ if isinstance(file_or_name, (str, unicode)):
+ if not 'b' in mode:
+ mode = mode[:1] + 'b' + mode[1:]
+ f = open(file_or_name, mode)
+ else:
+ f = file_or_name
+
+ if 'w' in mode:
+ # Create an empty file in this case
+ f.truncate()
+
+ # An empty root block needs 1264 bytes:
+ #
+ # 0 4 offset count
+ # 4 4 unknown
+ # 8 4 root block offset (2048)
+ # 12 255 * 4 padding (offsets are in multiples of 256)
+ # 1032 4 toc count (0)
+ # 1036 228 free list
+ # total 1264
+
+ # The free list will contain the following:
+ #
+ # 0 5 * 4 no blocks of width less than 5
+ # 20 6 * 8 1 block each of widths 5 to 10
+ # 68 4 no blocks of width 11 (allocated for the root)
+ # 72 19 * 8 1 block each of widths 12 to 30
+ # 224 4 no blocks of width 31
+ # total 228
+ #
+ # (The reason for this layout is that we allocate 2**5 bytes for
+ # the header, which splits the initial 2GB region into every size
+ # below 2**31, including *two* blocks of size 2**5, one of which
+ # we take. The root block itself then needs a block of size
+ # 2**11. Conveniently, each of these initial blocks will be
+ # located at offset 2**n where n is its width.)
+
+ # Write the header
+ header = struct.pack(b'>I4sIII16s',
+ 1, b'Bud1',
+ 2048, 1264, 2048,
+ b'\x00\x00\x10\x0c'
+ b'\x00\x00\x00\x87'
+ b'\x00\x00\x20\x0b'
+ b'\x00\x00\x00\x00')
+ f.write(header)
+ f.write(b'\0' * 2016)
+
+ # Write the root block
+ free_list = [struct.pack(b'>5I', 0, 0, 0, 0, 0)]
+ for n in range(5, 11):
+ free_list.append(struct.pack(b'>II', 1, 2**n))
+ free_list.append(struct.pack(b'>I', 0))
+ for n in range(12, 31):
+ free_list.append(struct.pack(b'>II', 1, 2**n))
+ free_list.append(struct.pack(b'>I', 0))
+
+ root = b''.join([struct.pack(b'>III', 1, 0, 2048 | 5),
+ struct.pack(b'>I', 0) * 255,
+ struct.pack(b'>I', 0)] + free_list)
+ f.write(root)
+
+ return Allocator(f)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def close(self):
+ self.flush()
+ self._file.close()
+
+ def flush(self):
+ if self._dirty:
+ size = self._root_block_size()
+ self.allocate(size, 0)
+ with self.get_block(0) as rblk:
+ self._write_root_block_into(rblk)
+
+ addr = self._offsets[0]
+ offset = addr & ~0x1f
+ size = 1 << (addr & 0x1f)
+
+ self._file.seek(0, os.SEEK_SET)
+ self._file.write(struct.pack(b'>I4sIII16s',
+ 1, b'Bud1',
+ offset, size, offset,
+ self._unknown1))
+
+ self._dirty = False
+
+ self._file.flush()
+
+ def read(self, offset, size_or_format):
+ """Read data at `offset', or raise an exception. `size_or_format'
+ may either be a byte count, in which case we return raw data,
+ or a format string for `struct.unpack', in which case we
+ work out the size and unpack the data before returning it."""
+ # N.B. There is a fixed offset of four bytes(!)
+ self._file.seek(offset + 4, os.SEEK_SET)
+
+ if isinstance(size_or_format, (str, unicode)):
+ size = struct.calcsize(size_or_format)
+ fmt = size_or_format
+ else:
+ size = size_or_format
+ fmt = None
+
+ ret = self._file.read(size)
+ if len(ret) < size:
+ ret += b'\0' * (size - len(ret))
+
+ if fmt is not None:
+ if isinstance(ret, bytearray):
+ ret = struct.unpack_from(fmt, bytes(ret))
+ else:
+ ret = struct.unpack(fmt, ret)
+
+ return ret
+
+ def write(self, offset, data_or_format, *args):
+ """Write data at `offset', or raise an exception. `data_or_format'
+ may either be the data to write, or a format string for `struct.pack',
+ in which case we pack the additional arguments and write the
+ resulting data."""
+ # N.B. There is a fixed offset of four bytes(!)
+ self._file.seek(offset + 4, os.SEEK_SET)
+
+ if len(args):
+ data = struct.pack(data_or_format, *args)
+ else:
+ data = data_or_format
+
+ self._file.write(data)
+
+ def get_block(self, block):
+ try:
+ addr = self._offsets[block]
+ except IndexError:
+ return None
+
+ offset = addr & ~0x1f
+ size = 1 << (addr & 0x1f)
+
+ return Block(self, offset, size)
+
+ def _root_block_size(self):
+ """Return the number of bytes required by the root block."""
+ # Offsets
+ size = 8
+ size += 4 * ((len(self._offsets) + 255) & ~255)
+
+ # TOC
+ size += 4
+ size += sum([5 + len(s) for s in self._toc])
+
+ # Free list
+ size += sum([4 + 4 * len(fl) for fl in self._free])
+
+ return size
+
+ def _write_root_block_into(self, block):
+ # Offsets
+ block.write('>II', len(self._offsets), self._unknown2)
+ block.write('>%uI' % len(self._offsets), *self._offsets)
+ extra = len(self._offsets) & 255
+ if extra:
+ block.write(b'\0\0\0\0' * (256 - extra))
+
+ # TOC
+ keys = list(self._toc.keys())
+ keys.sort()
+
+ block.write('>I', len(keys))
+ for k in keys:
+ b = k.encode('utf-8')
+ block.write('B', len(b))
+ block.write(b)
+ block.write('>I', self._toc[k])
+
+ # Free list
+ for w, f in enumerate(self._free):
+ block.write('>I', len(f))
+ if len(f):
+ block.write('>%uI' % len(f), *f)
+
+ def _buddy(self, offset, width):
+ f = self._free[width]
+ b = offset ^ (1 << width)
+
+ try:
+ ndx = f.index(b)
+ except ValueError:
+ ndx = None
+
+ return (f, b, ndx)
+
+ def _release(self, offset, width):
+ # Coalesce
+ while True:
+ f,b,ndx = self._buddy(offset, width)
+
+ if ndx is None:
+ break
+
+ offset &= b
+ width += 1
+ del f[ndx]
+
+ # Add to the list
+ bisect.insort(f, offset)
+
+ # Mark as dirty
+ self._dirty = True
+
+ def _alloc(self, width):
+ w = width
+ while not self._free[w]:
+ w += 1
+ while w > width:
+ offset = self._free[w].pop(0)
+ w -= 1
+ self._free[w] = [offset, offset ^ (1 << w)]
+ self._dirty = True
+ return self._free[width].pop(0)
+
+ def allocate(self, bytes, block=None):
+ """Allocate or reallocate a block such that it has space for at least
+ `bytes' bytes."""
+ if block is None:
+ # Find the first unused block
+ try:
+ block = self._offsets.index(0)
+ except ValueError:
+ block = len(self._offsets)
+ self._offsets.append(0)
+
+ # Compute block width
+ width = max(bytes.bit_length(), 5)
+
+ addr = self._offsets[block]
+ offset = addr & ~0x1f
+
+ if addr:
+ blkwidth = addr & 0x1f
+ if blkwidth == width:
+ return block
+ self._release(offset, width)
+ self._offsets[block] = 0
+
+ offset = self._alloc(width)
+ self._offsets[block] = offset | width
+ return block
+
+ def release(self, block):
+ addr = self._offsets[block]
+
+ if addr:
+ width = addr & 0x1f
+ offset = addr & ~0x1f
+ self._release(offset, width)
+
+ if block == len(self._offsets):
+ del self._offsets[block]
+ else:
+ self._offsets[block] = 0
+
+ def __len__(self):
+ return len(self._toc)
+
+ def __getitem__(self, key):
+ if not isinstance(key, (str, unicode)):
+ raise TypeError('Keys must be of string type')
+ return self._toc[key]
+
+ def __setitem__(self, key, value):
+ if not isinstance(key, (str, unicode)):
+ raise TypeError('Keys must be of string type')
+ self._toc[key] = value
+ self._dirty = True
+
+ def __delitem__(self, key):
+ if not isinstance(key, (str, unicode)):
+ raise TypeError('Keys must be of string type')
+ del self._toc[key]
+ self._dirty = True
+
+ def iterkeys(self):
+ return iterkeys(self._toc)
+
+ def keys(self):
+ return iterkeys(self._toc)
+
+ def __iter__(self):
+ return iterkeys(self._toc)
+
+ def __contains__(self, key):
+ return key in self._toc
+
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/ds_store/qt_attribution.json b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/qt_attribution.json
new file mode 100644
index 000000000..dda98b937
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/qt_attribution.json
@@ -0,0 +1,13 @@
+{
+ "Id": "ds_store",
+ "Name": "ds_store",
+ "QDocModule": "qbs",
+ "QtUsage": "Used in the qbs dmg module for building Apple disk images.",
+ "Description": "Manipulate Finder .DS_Store files from Python",
+ "Homepage": "https://bitbucket.org/al45tair/ds_store",
+ "Version": "1.1.0",
+ "License": "MIT License",
+ "LicenseId": "MIT",
+ "LicenseFile": "LICENSE",
+ "Copyright": "Copyright (c) 2014 Alastair Houghton"
+}
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/ds_store/store.py b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/store.py
new file mode 100644
index 000000000..bf680d77a
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/ds_store/store.py
@@ -0,0 +1,1231 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+from __future__ import print_function
+from __future__ import division
+
+import binascii
+import struct
+import biplist
+import mac_alias
+
+try:
+ next
+except NameError:
+ next = lambda x: x.next()
+try:
+ unicode
+except NameError:
+ unicode = str
+
+from . import buddy
+
+class ILocCodec(object):
+ @staticmethod
+ def encode(point):
+ return struct.pack(b'>IIII', point[0], point[1],
+ 0xffffffff, 0xffff0000)
+
+ @staticmethod
+ def decode(bytesData):
+ if isinstance(bytesData, bytearray):
+ x, y = struct.unpack_from(b'>II', bytes(bytesData[:8]))
+ else:
+ x, y = struct.unpack(b'>II', bytesData[:8])
+ return (x, y)
+
+class PlistCodec(object):
+ @staticmethod
+ def encode(plist):
+ return biplist.writePlistToString(plist)
+
+ @staticmethod
+ def decode(bytes):
+ return biplist.readPlistFromString(bytes)
+
+class BookmarkCodec(object):
+ @staticmethod
+ def encode(bmk):
+ return bmk.to_bytes()
+
+ @staticmethod
+ def decode(bytes):
+ return mac_alias.Bookmark.from_bytes(bytes)
+
+# This list tells the code how to decode particular kinds of entry in the
+# .DS_Store file. This is really a convenience, and we currently only
+# support a tiny subset of the possible entry types.
+codecs = {
+ 'Iloc': ILocCodec,
+ 'bwsp': PlistCodec,
+ 'lsvp': PlistCodec,
+ 'lsvP': PlistCodec,
+ 'icvp': PlistCodec,
+ 'pBBk': BookmarkCodec
+ }
+
+class DSStoreEntry(object):
+ """Holds the data from an entry in a ``.DS_Store`` file. Note that this is
+ not meant to represent the entry itself---i.e. if you change the type
+ or value, your changes will *not* be reflected in the underlying file.
+
+ If you want to make a change, you should either use the :class:`DSStore`
+ object's :meth:`DSStore.insert` method (which will replace a key if it
+ already exists), or the mapping access mode for :class:`DSStore` (often
+ simpler anyway).
+ """
+ def __init__(self, filename, code, typecode, value=None):
+ if str != bytes and type(filename) == bytes:
+ filename = filename.decode('utf-8')
+ self.filename = filename
+ self.code = code
+ self.type = typecode
+ self.value = value
+
+ @classmethod
+ def read(cls, block):
+ """Read a ``.DS_Store`` entry from the containing Block"""
+ # First read the filename
+ nlen = block.read(b'>I')[0]
+ filename = block.read(2 * nlen).decode('utf-16be')
+
+ # Next, read the code and type
+ code, typecode = block.read(b'>4s4s')
+
+ # Finally, read the data
+ if typecode == b'bool':
+ value = block.read(b'>?')[0]
+ elif typecode == b'long' or typecode == b'shor':
+ value = block.read(b'>I')[0]
+ elif typecode == b'blob':
+ vlen = block.read(b'>I')[0]
+ value = block.read(vlen)
+
+ codec = codecs.get(code, None)
+ if codec:
+ value = codec.decode(value)
+ typecode = codec
+ elif typecode == b'ustr':
+ vlen = block.read(b'>I')[0]
+ value = block.read(2 * vlen).decode('utf-16be')
+ elif typecode == b'type':
+ value = block.read(b'>4s')[0]
+ elif typecode == b'comp' or typecode == b'dutc':
+ value = block.read(b'>Q')[0]
+ else:
+ raise ValueError('Unknown type code "%s"' % typecode)
+
+ return DSStoreEntry(filename, code, typecode, value)
+
+ def __lt__(self, other):
+ if not isinstance(other, DSStoreEntry):
+ raise TypeError('Can only compare against other DSStoreEntry objects')
+ sfl = self.filename.lower()
+ ofl = other.filename.lower()
+ return (sfl < ofl
+ or (self.filename == other.filename
+ and self.code < other.code))
+
+ def __le__(self, other):
+ if not isinstance(other, DSStoreEntry):
+ raise TypeError('Can only compare against other DSStoreEntry objects')
+ sfl = self.filename.lower()
+ ofl = other.filename.lower()
+ return (sfl < ofl
+ or (sfl == ofl
+ and self.code <= other.code))
+
+ def __eq__(self, other):
+ if not isinstance(other, DSStoreEntry):
+ raise TypeError('Can only compare against other DSStoreEntry objects')
+ sfl = self.filename.lower()
+ ofl = other.filename.lower()
+ return (sfl == ofl
+ and self.code == other.code)
+
+ def __ne__(self, other):
+ if not isinstance(other, DSStoreEntry):
+ raise TypeError('Can only compare against other DSStoreEntry objects')
+ sfl = self.filename.lower()
+ ofl = other.filename.lower()
+ return (sfl != ofl
+ or self.code != other.code)
+
+ def __gt__(self, other):
+ if not isinstance(other, DSStoreEntry):
+ raise TypeError('Can only compare against other DSStoreEntry objects')
+ sfl = self.filename.lower()
+ ofl = other.filename.lower()
+
+ selfCode = self.code
+ if str != bytes and type(selfCode) is bytes:
+ selfCode = selfCode.decode('utf-8')
+ otherCode = other.code
+ if str != bytes and type(otherCode) is bytes:
+ otherCode = otherCode.decode('utf-8')
+
+ return (sfl > ofl or (sfl == ofl and selfCode > otherCode))
+
+ def __ge__(self, other):
+ if not isinstance(other, DSStoreEntry):
+ raise TypeError('Can only compare against other DSStoreEntry objects')
+ sfl = self.filename.lower()
+ ofl = other.filename.lower()
+ return (sfl > ofl
+ or (sfl == ofl
+ and self.code >= other.code))
+
+ def __cmp__(self, other):
+ if not isinstance(other, DSStoreEntry):
+ raise TypeError('Can only compare against other DSStoreEntry objects')
+ r = cmp(self.filename.lower(), other.filename.lower())
+ if r:
+ return r
+ return cmp(self.code, other.code)
+
+ def byte_length(self):
+ """Compute the length of this entry, in bytes"""
+ utf16 = self.filename.encode('utf-16be')
+ l = 4 + len(utf16) + 8
+
+ if isinstance(self.type, (str, unicode)):
+ entry_type = self.type
+ value = self.value
+ else:
+ entry_type = 'blob'
+ value = self.type.encode(self.value)
+
+ if entry_type == 'bool':
+ l += 1
+ elif entry_type == 'long' or entry_type == 'shor':
+ l += 4
+ elif entry_type == 'blob':
+ l += 4 + len(value)
+ elif entry_type == 'ustr':
+ utf16 = value.encode('utf-16be')
+ l += 4 + len(utf16)
+ elif entry_type == 'type':
+ l += 4
+ elif entry_type == 'comp' or entry_type == 'dutc':
+ l += 8
+ else:
+ raise ValueError('Unknown type code "%s"' % entry_type)
+
+ return l
+
+ def write(self, block, insert=False):
+ """Write this entry to the specified Block"""
+ if insert:
+ w = block.insert
+ else:
+ w = block.write
+
+ if isinstance(self.type, (str, unicode)):
+ entry_type = self.type
+ value = self.value
+ else:
+ entry_type = 'blob'
+ value = self.type.encode(self.value)
+
+ utf16 = self.filename.encode('utf-16be')
+ w(b'>I', len(utf16) // 2)
+ w(utf16)
+ w(b'>4s4s', self.code.encode('utf-8'), entry_type.encode('utf-8'))
+
+ if entry_type == 'bool':
+ w(b'>?', value)
+ elif entry_type == 'long' or entry_type == 'shor':
+ w(b'>I', value)
+ elif entry_type == 'blob':
+ w(b'>I', len(value))
+ w(value)
+ elif entry_type == 'ustr':
+ utf16 = value.encode('utf-16be')
+ w(b'>I', len(utf16) // 2)
+ w(utf16)
+ elif entry_type == 'type':
+ w(b'>4s', value.encode('utf-8'))
+ elif entry_type == 'comp' or entry_type == 'dutc':
+ w(b'>Q', value)
+ else:
+ raise ValueError('Unknown type code "%s"' % entry_type)
+
+ def __repr__(self):
+ return '<%s %s>' % (self.filename, self.code)
+
+class DSStore(object):
+ """Python interface to a ``.DS_Store`` file. Works by manipulating the file
+ on the disk---so this code will work with ``.DS_Store`` files for *very*
+ large directories.
+
+ A :class:`DSStore` object can be used as if it was a mapping, e.g.::
+
+ d['foobar.dat']['Iloc']
+
+ will fetch the "Iloc" record for "foobar.dat", or raise :class:`KeyError` if
+ there is no such record. If used in this manner, the :class:`DSStore` object
+ will return (type, value) tuples, unless the type is "blob" and the module
+ knows how to decode it.
+
+ Currently, we know how to decode "Iloc", "bwsp", "lsvp", "lsvP" and "icvp"
+ blobs. "Iloc" decodes to an (x, y) tuple, while the others are all decoded
+ using ``biplist``.
+
+ Assignment also works, e.g.::
+
+ d['foobar.dat']['note'] = ('ustr', u'Hello World!')
+
+ as does deletion with ``del``::
+
+ del d['foobar.dat']['note']
+
+ This is usually going to be the most convenient interface, though
+ occasionally (for instance when creating a new ``.DS_Store`` file) you
+ may wish to drop down to using :class:`DSStoreEntry` objects directly."""
+ def __init__(self, store):
+ self._store = store
+ self._superblk = self._store['DSDB']
+ with self._get_block(self._superblk) as s:
+ self._rootnode, self._levels, self._records, \
+ self._nodes, self._page_size = s.read(b'>IIIII')
+ self._min_usage = 2 * self._page_size // 3
+ self._dirty = False
+
+ @classmethod
+ def open(cls, file_or_name, mode='r+', initial_entries=None):
+ """Open a ``.DS_Store`` file; pass either a Python file object, or a
+ filename in the ``file_or_name`` argument and a file access mode in
+ the ``mode`` argument. If you are creating a new file using the "w"
+ or "w+" modes, you may also specify a list of entries with which
+ to initialise the file."""
+ store = buddy.Allocator.open(file_or_name, mode)
+
+ if mode == 'w' or mode == 'w+':
+ superblk = store.allocate(20)
+ store['DSDB'] = superblk
+ page_size = 4096
+
+ if not initial_entries:
+ root = store.allocate(page_size)
+
+ with store.get_block(root) as rootblk:
+ rootblk.zero_fill()
+
+ with store.get_block(superblk) as s:
+ s.write(b'>IIIII', root, 0, 0, 1, page_size)
+ else:
+ # Make sure they're in sorted order
+ initial_entries = list(initial_entries)
+ initial_entries.sort()
+
+ # Construct the tree
+ current_level = initial_entries
+ next_level = []
+ levels = []
+ ptr_size = 0
+ node_count = 0
+ while True:
+ total = 8
+ nodes = []
+ node = []
+ for e in current_level:
+ new_total = total + ptr_size + e.byte_length()
+ if new_total > page_size:
+ nodes.append(node)
+ next_level.append(e)
+ total = 8
+ node = []
+ else:
+ total = new_total
+ node.append(e)
+ if node:
+ nodes.append(node)
+
+ node_count += len(nodes)
+ levels.append(nodes)
+
+ if len(nodes) == 1:
+ break
+
+ current_level = next_level
+ next_level = []
+ ptr_size = 4
+
+ # Allocate nodes
+ ptrs = [store.allocate(page_size) for n in range(node_count)]
+
+ # Generate nodes
+ pointers = []
+ prev_pointers = None
+ for level in levels:
+ ppndx = 0
+ lptrs = ptrs[-len(level):]
+ del ptrs[-len(level):]
+ for node in level:
+ ndx = lptrs.pop(0)
+ if prev_pointers is None:
+ with store.get_block(ndx) as block:
+ block.write(b'>II', 0, len(node))
+ for e in node:
+ e.write(block)
+ else:
+ next_node = prev_pointers[ppndx + len(node)]
+ node_ptrs = prev_pointers[ppndx:ppndx+len(node)]
+
+ with store.get_block(ndx) as block:
+ block.write(b'>II', next_node, len(node))
+ for ptr, e in zip(node_ptrs, node):
+ block.write(b'>I', ptr)
+ e.write(block)
+
+ pointers.append(ndx)
+ prev_pointers = pointers
+ pointers = []
+
+ root = prev_pointers[0]
+
+ with store.get_block(superblk) as s:
+ s.write(b'>IIIII', root, len(levels), len(initial_entries),
+ node_count, page_size)
+
+ return DSStore(store)
+
+ def _get_block(self, number):
+ return self._store.get_block(number)
+
+ def flush(self):
+ """Flush any dirty data back to the file."""
+ if self._dirty:
+ self._dirty = False
+
+ with self._get_block(self._superblk) as s:
+ s.write(b'>IIIII', self._rootnode, self._levels, self._records,
+ self._nodes, self._page_size)
+ self._store.flush()
+
+ def close(self):
+ """Flush dirty data and close the underlying file."""
+ self.flush()
+ self._store.close()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ # Internal B-Tree nodes look like this:
+ #
+ # [ next | count | (ptr0 | rec0) | (ptr1 | rec1) ... (ptrN | recN) ]
+
+ # Leaf nodes look like this:
+ #
+ # [ 0 | count | rec0 | rec1 ... recN ]
+
+ # Iterate over the tree, starting at `node'
+ def _traverse(self, node):
+ if node is None:
+ node = self._rootnode
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+ if next_node:
+ for n in range(count):
+ ptr = block.read(b'>I')[0]
+ for e in self._traverse(ptr):
+ yield e
+ e = DSStoreEntry.read(block)
+ yield e
+ for e in self._traverse(next_node):
+ yield e
+ else:
+ for n in range(count):
+ e = DSStoreEntry.read(block)
+ yield e
+
+ # Display the data in `node'
+ def _dump_node(self, node):
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+ print('next: %u\ncount: %u\n' % (next_node, count))
+ for n in range(count):
+ if next_node:
+ ptr = block.read(b'>I')[0]
+ print('%8u ' % ptr, end=' ')
+ else:
+ print(' ', end=' ')
+ e = DSStoreEntry.read(block)
+ print(e, ' (%u)' % e.byte_length())
+ print('used: %u' % block.tell())
+
+ # Display the data in the super block
+ def _dump_super(self):
+ print('root: %u\nlevels: %u\nrecords: %u\nnodes: %u\npage-size: %u' \
+ % (self._rootnode, self._levels, self._records,
+ self._nodes, self._page_size))
+
+ # Splits entries across two blocks, returning one pivot
+ #
+ # Tries to balance the block usage across the two as best it can
+ def _split2(self, blocks, entries, pointers, before, internal):
+ left_block = blocks[0]
+ right_block = blocks[1]
+
+ count = len(entries)
+
+ # Find the feasible splits
+ best_split = None
+ best_diff = None
+ total = before[count]
+
+ if 8 + total <= self._page_size:
+ # We can use a *single* node for this
+ best_split = count
+ else:
+ # Split into two nodes
+ for n in range(1, count - 1):
+ left_size = 8 + before[n]
+ right_size = 8 + total - before[n + 1]
+
+ if left_size > self._page_size:
+ break
+ if right_size > self._page_size:
+ continue
+
+ diff = abs(left_size - right_size)
+
+ if best_split is None or diff < best_diff:
+ best_split = n
+ best_diff = diff
+
+ if best_split is None:
+ return None
+
+ # Write the nodes
+ left_block.seek(0)
+ if internal:
+ next_node = pointers[best_split]
+ else:
+ next_node = 0
+ left_block.write(b'>II', next_node, best_split)
+
+ for n in range(best_split):
+ if internal:
+ left_block.write(b'>I', pointers[n])
+ entries[n].write(left_block)
+
+ left_block.zero_fill()
+
+ if best_split == count:
+ return []
+
+ right_block.seek(0)
+ if internal:
+ next_node = pointers[count]
+ else:
+ next_node = 0
+ right_block.write(b'>II', next_node, count - best_split - 1)
+
+ for n in range(best_split + 1, count):
+ if internal:
+ right_block.write(b'>I', pointers[n])
+ entries[n].write(right_block)
+
+ right_block.zero_fill()
+
+ pivot = entries[best_split]
+
+ return [pivot]
+
+ def _split(self, node, entry, right_ptr=0):
+ self._nodes += 1
+ self._dirty = True
+ new_right = self._store.allocate(self._page_size)
+ with self._get_block(node) as block, \
+ self._get_block(new_right) as right_block:
+
+ # First, measure and extract all the elements
+ entry_size = entry.byte_length()
+ entry_pos = None
+ next_node, count = block.read(b'>II')
+ if next_node:
+ entry_size += 4
+ pointers = []
+ entries = []
+ before = []
+ total = 0
+ for n in range(count):
+ pos = block.tell()
+ if next_node:
+ ptr = block.read(b'>I')[0]
+ pointers.append(ptr)
+ e = DSStoreEntry.read(block)
+ if e > entry:
+ entry_pos = n
+ entries.append(entry)
+ pointers.append(right_ptr)
+ before.append(total)
+ total += entry_size
+ entries.append(e)
+ before.append(total)
+ total += block.tell() - pos
+ before.append(total)
+ if next_node:
+ pointers.append(next_node)
+
+ pivot = self._split2([block, right_block],
+ entries, pointers, before,
+ bool(next_node))[0]
+
+ self._records += 1
+ self._nodes += 1
+ self._dirty = True
+
+ return (pivot, new_right)
+
+ # Allocate a new root node containing the element `pivot' and the pointers
+ # `left' and `right'
+ def _new_root(self, left, pivot, right):
+ new_root = self._store.allocate(self._page_size)
+ with self._get_block(new_root) as block:
+ block.write(b'>III', right, 1, left)
+ pivot.write(block)
+ self._rootnode = new_root
+ self._levels += 1
+ self._nodes += 1
+ self._dirty = True
+
+ # Insert an entry into an inner node; `path' is the path from the root
+ # to `node', not including `node' itself. `right_ptr' is the new node
+ # pointer (inserted to the RIGHT of `entry')
+ def _insert_inner(self, path, node, entry, right_ptr):
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+ insert_pos = None
+ insert_ndx = None
+ n = 0
+ while n < count:
+ pos = block.tell()
+ ptr = block.read(b'>I')[0]
+ e = DSStoreEntry.read(block)
+ if e == entry:
+ if n == count - 1:
+ right_ptr = next_node
+ next_node = ptr
+ block_seek(pos)
+ else:
+ right_ptr = block.read(b'>I')[0]
+ block.seek(pos + 4)
+ insert_pos = pos
+ insert_ndx = n
+ block.delete(e.byte_length() + 4)
+ count -= 1
+ self._records += 1
+ self._dirty = True
+ continue
+ elif insert_pos is None and e > entry:
+ insert_pos = pos
+ insert_ndx = n
+ n += 1
+ if insert_pos is None:
+ insert_pos = block.tell()
+ insert_ndx = count
+ remaining = self._page_size - block.tell()
+
+ if remaining < entry.byte_length() + 4:
+ pivot, new_right = self._split(node, entry, right_ptr)
+ if path:
+ self._insert_inner(path[:-1], path[-1], pivot, new_right)
+ else:
+ self._new_root(node, pivot, new_right)
+ else:
+ if insert_ndx == count:
+ block.seek(insert_pos)
+ block.write(b'>I', next_node)
+ entry.write(block)
+ next_node = right_ptr
+ else:
+ block.seek(insert_pos + 4)
+ entry.write(block, True)
+ block.insert('>I', right_ptr)
+ block.seek(0)
+ count += 1
+ block.write(b'>II', next_node, count)
+ self._records += 1
+ self._dirty = True
+
+ # Insert `entry' into the leaf node `node'
+ def _insert_leaf(self, path, node, entry):
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+ insert_pos = None
+ insert_ndx = None
+ n = 0
+ while n < count:
+ pos = block.tell()
+ e = DSStoreEntry.read(block)
+ if e == entry:
+ insert_pos = pos
+ insert_ndx = n
+ block.seek(pos)
+ block.delete(e.byte_length())
+ count -= 1
+ self._records += 1
+ self._dirty = True
+ continue
+ elif insert_pos is None and e > entry:
+ insert_pos = pos
+ insert_ndx = n
+ n += 1
+ if insert_pos is None:
+ insert_pos = block.tell()
+ insert_ndx = count
+ remaining = self._page_size - block.tell()
+
+ if remaining < entry.byte_length():
+ pivot, new_right = self._split(node, entry)
+ if path:
+ self._insert_inner(path[:-1], path[-1], pivot, new_right)
+ else:
+ self._new_root(node, pivot, new_right)
+ else:
+ block.seek(insert_pos)
+ entry.write(block, True)
+ block.seek(0)
+ count += 1
+ block.write(b'>II', next_node, count)
+ self._records += 1
+ self._dirty = True
+
+ def insert(self, entry):
+ """Insert ``entry`` (which should be a :class:`DSStoreEntry`)
+ into the B-Tree."""
+ path = []
+ node = self._rootnode
+ while True:
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+ if next_node:
+ for n in range(count):
+ ptr = block.read(b'>I')[0]
+ e = DSStoreEntry.read(block)
+ if entry < e:
+ next_node = ptr
+ break
+ elif entry == e:
+ # If we find an existing entry the same, replace it
+ self._insert_inner(path, node, entry, None)
+ return
+ path.append(node)
+ node = next_node
+ else:
+ self._insert_leaf(path, node, entry)
+ return
+
+ # Return usage information for the specified `node'
+ def _block_usage(self, node):
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+
+ for n in range(count):
+ if next_node:
+ ptr = block.read(b'>I')[0]
+ e = DSStoreEntry.read(block)
+
+ used = block.tell()
+
+ return (count, used)
+
+ # Splits entries across three blocks, returning two pivots
+ def _split3(self, blocks, entries, pointers, before, internal):
+ count = len(entries)
+
+ # Find the feasible splits
+ best_split = None
+ best_diff = None
+ total = before[count]
+ for n in range(1, count - 3):
+ left_size = 8 + before[n]
+ remaining = 16 + total - before[n + 1]
+
+ if left_size > self._page_size:
+ break
+ if remaining > 2 * self._page_size:
+ continue
+
+ for m in range(n + 2, count - 1):
+ mid_size = 8 + before[m] - before[n + 1]
+ right_size = 8 + total - before[m + 1]
+
+ if mid_size > self._page_size:
+ break
+ if right_size > self._page_size:
+ continue
+
+ diff = abs(left_size - mid_size) * abs(right_size - mid_size)
+
+ if best_split is None or diff < best_diff:
+ best_split = (n, m, count)
+ best_diff = diff
+
+ if best_split is None:
+ return None
+
+ # Write the nodes
+ prev_split = -1
+ for block, split in zip(blocks, best_split):
+ block.seek(0)
+ if internal:
+ next_node = pointers[split]
+ else:
+ next_node = 0
+ block.write(b'>II', next_node, split)
+
+ for n in range(prev_split + 1, split):
+ if internal:
+ block.write(b'>I', pointers[n])
+ entries[n].write(block)
+
+ block.zero_fill()
+
+ prev_split = split
+
+ return (entries[best_split[0]], entries[best_split[1]])
+
+ # Extract all of the entries from the specified list of `blocks',
+ # separating them by the specified `pivots'. Also computes the
+ # amount of space used before each entry.
+ def _extract(self, blocks, pivots):
+ pointers = []
+ entries = []
+ before = []
+ total = 0
+ ppivots = pivots + [None]
+ for b,p in zip(blocks, ppivots):
+ b.seek(0)
+ next_node, count = b.read(b'>II')
+ for n in range(count):
+ pos = b.tell()
+ if next_node:
+ ptr = b.read(b'>I')[0]
+ pointers.append(ptr)
+ e = DSStoreEntry.read(b)
+ entries.append(e)
+ before.append(total)
+ total += b.tell() - pos
+ if next_node:
+ pointers.append(next_node)
+ if p:
+ entries.append(p)
+ before.append(total)
+ total += p.byte_length()
+ if next_node:
+ total += 4
+ before.append(total)
+
+ return (entries, pointers, before)
+
+ # Rebalance the specified `node', whose path from the root is `path'.
+ def _rebalance(self, path, node):
+ # Can't rebalance the root
+ if not path:
+ return
+
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+
+ with self._get_block(path[-1]) as parent:
+ # Find the left and right siblings and respective pivots
+ parent_next, parent_count = parent.read(b'>II')
+ left_pos = None
+ left_node = None
+ left_pivot = None
+ node_pos = None
+ right_pos = None
+ right_node = None
+ right_pivot = None
+ prev_e = prev_ptr = prev_pos = None
+ for n in range(parent_count):
+ pos = parent.tell()
+ ptr = parent.read(b'>I')[0]
+ e = DSStoreEntry.read(parent)
+
+ if ptr == node:
+ node_pos = pos
+ right_pivot = e
+ left_pos = prev_pos
+ left_pivot = prev_e
+ left_node = prev_ptr
+ elif prev_ptr == node:
+ right_node = ptr
+ right_pos = pos
+ break
+
+ prev_e = e
+ prev_ptr = ptr
+ prev_pos = pos
+
+ if parent_next == node:
+ node_pos = parent.tell()
+ left_pos = prev_pos
+ left_pivot = prev_e
+ left_node = prev_ptr
+ elif right_node is None:
+ right_node = parent_next
+ right_pos = parent.tell()
+
+ parent_used = parent.tell()
+
+ if left_node and right_node:
+ with self._get_block(left_node) as left, \
+ self._get_block(right_node) as right:
+ blocks = [left, block, right]
+ pivots = [left_pivot, right_pivot]
+
+ entries, pointers, before = self._extract(blocks, pivots)
+
+ # If there's a chance that we could use two pages instead
+ # of three, go for it
+ pivots = self._split2(blocks, entries, pointers,
+ before, bool(next_node))
+ if pivots is None:
+ ptrs = [left_node, node, right_node]
+ pivots = self._split3(blocks, entries, pointers,
+ before, bool(next_node))
+ else:
+ if pivots:
+ ptrs = [left_node, node]
+ else:
+ ptrs = [left_node]
+ self._store.release(node)
+ self._nodes -= 1
+ node = left_node
+ self._store.release(right_node)
+ self._nodes -= 1
+ self._dirty = True
+
+ # Remove the pivots from the parent
+ with self._get_block(path[-1]) as parent:
+ if right_node == parent_next:
+ parent.seek(left_pos)
+ parent.delete(right_pos - left_pos)
+ parent_next = left_node
+ else:
+ parent.seek(left_pos + 4)
+ parent.delete(right_pos - left_pos)
+ parent.seek(0)
+ parent_count -= 2
+ parent.write(b'>II', parent_next, parent_count)
+ self._records -= 2
+
+ # Replace with those in pivots
+ for e,rp in zip(pivots, ptrs[1:]):
+ self._insert_inner(path[:-1], path[-1], e, rp)
+ elif left_node:
+ with self._get_block(left_node) as left:
+ blocks = [left, block]
+ pivots = [left_pivot]
+
+ entries, pointers, before = self._extract(blocks, pivots)
+
+ pivots = self._split2(blocks, entries, pointers,
+ before, bool(next_node))
+
+ # Remove the pivot from the parent
+ with self._get_block(path[-1]) as parent:
+ if node == parent_next:
+ parent.seek(left_pos)
+ parent.delete(node_pos - left_pos)
+ parent_next = left_node
+ else:
+ parent.seek(left_pos + 4)
+ parent.delete(node_pos - left_pos)
+ parent.seek(0)
+ parent_count -= 1
+ parent.write(b'>II', parent_next, parent_count)
+ self._records -= 1
+
+ # Replace the pivot
+ if pivots:
+ self._insert_inner(path[:-1], path[-1], pivots[0], node)
+ elif right_node:
+ with self._get_block(right_node) as right:
+ blocks = [block, right]
+ pivots = [right_pivot]
+
+ entries, pointers, before = self._extract(blocks, pivots)
+
+ pivots = self._split2(blocks, entries, pointers,
+ before, bool(next_node))
+
+ # Remove the pivot from the parent
+ with self._get_block(path[-1]) as parent:
+ if right_node == parent_next:
+ parent.seek(pos)
+ parent.delete(right_pos - node_pos)
+ parent_next = node
+ else:
+ parent.seek(pos + 4)
+ parent.delete(right_pos - node_pos)
+ parent.seek(0)
+ parent_count -= 1
+ parent.write(b'>II', parent_next, parent_count)
+ self._records -= 1
+
+ # Replace the pivot
+ if pivots:
+ self._insert_inner(path[:-1], path[-1], pivots[0],
+ right_node)
+
+ if not path and not parent_count:
+ self._store.release(path[-1])
+ self._nodes -= 1
+ self._dirty = True
+ self._rootnode = node
+ else:
+ count, used = self._block_usage(path[-1])
+
+ if used < self._page_size // 2:
+ self._rebalance(path[:-1], path[-1])
+
+ # Delete from the leaf node `node'. `filename_lc' has already been
+ # lower-cased.
+ def _delete_leaf(self, node, filename_lc, code):
+ found = False
+
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+
+ for n in range(count):
+ pos = block.tell()
+ e = DSStoreEntry.read(block)
+ if e.filename.lower() == filename_lc \
+ and (code is None or e.code == code):
+ block.seek(pos)
+ block.delete(e.byte_length())
+ found = True
+
+ # This does not affect the loop; THIS IS NOT A BUG
+ count -= 1
+
+ self._records -= 1
+ self._dirty = True
+
+ if found:
+ used = block.tell()
+
+ block.seek(0)
+ block.write(b'>II', next_node, count)
+
+ return used < self._page_size // 2
+ else:
+ return False
+
+ # Remove the largest entry from the subtree starting at `node' (with
+ # path from root `path'). Returns a tuple (rebalance, entry) where
+ # rebalance is either None if no rebalancing is required, or a
+ # (path, node) tuple giving the details of the node to rebalance.
+ def _take_largest(self, path, node):
+ path = list(path)
+ rebalance = None
+ while True:
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+
+ if next_node:
+ path.append(node)
+ node = next_node
+ continue
+
+ for n in range(count):
+ pos = block.tell()
+ e = DSStoreEntry.read(block)
+
+ count -= 1
+ block.seek(0)
+ block.write(b'>II', next_node, count)
+
+ if pos < self._page_size // 2:
+ rebalance = (path, node)
+ break
+
+ return rebalance, e
+
+ # Delete an entry from an inner node, `node'
+ def _delete_inner(self, path, node, filename_lc, code):
+ rebalance = False
+
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+
+ for n in range(count):
+ pos = block.tell()
+ ptr = block.read(b'>I')[0]
+ e = DSStoreEntry.read(block)
+ if e.filename.lower() == filename_lc \
+ and (code is None or e.code == code):
+ # Take the largest from the left subtree
+ rebalance, largest = self._take_largest(path, ptr)
+
+ # Delete this entry
+ if n == count - 1:
+ right_ptr = next_node
+ next_node = ptr
+ block.seek(pos)
+ else:
+ right_ptr = block.read(b'>I')[0]
+ block.seek(pos + 4)
+
+ block.delete(e.byte_length() + 4)
+
+ count -= 1
+ block.seek(0)
+ block.write(b'>II', next_node, count)
+
+ self._records -= 1
+ self._dirty = True
+
+ break
+
+ # Replace the pivot value
+ self._insert_inner(path, node, largest, right_ptr)
+
+ # Rebalance from the node we stole from
+ if rebalance:
+ self._rebalance(rebalance[0], rebalance[1])
+ return True
+ return False
+
+ def delete(self, filename, code):
+ """Delete an item, identified by ``filename`` and ``code``
+ from the B-Tree."""
+ if isinstance(filename, DSStoreEntry):
+ code = filename.code
+ filename = filename.filename
+
+ # If we're deleting *every* node for "filename", we must recurse
+ if code is None:
+ ###TODO: Fix this so we can do bulk deletes
+ raise ValueError('You must delete items individually. Sorry')
+
+ # Otherwise, we're deleting *one* specific node
+ filename_lc = filename.lower()
+ path = []
+ node = self._rootnode
+ while True:
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+ if next_node:
+ for n in range(count):
+ ptr = block.read(b'>I')[0]
+ e = DSStoreEntry.read(block)
+ e_lc = e.filename.lower()
+ if filename_lc < e_lc \
+ or (filename_lc == e_lc and code < e.code):
+ next_node = ptr
+ break
+ elif filename_lc == e_lc and code == e.code:
+ self._delete_inner(path, node, filename_lc, code)
+ return
+ path.append(node)
+ node = next_node
+ else:
+ if self._delete_leaf(node, filename_lc, code):
+ self._rebalance(path, node)
+ return
+
+ # Find implementation
+ def _find(self, node, filename_lc, code=None):
+ with self._get_block(node) as block:
+ next_node, count = block.read(b'>II')
+ if next_node:
+ for n in range(count):
+ ptr = block.read(b'>I')[0]
+ e = DSStoreEntry.read(block)
+ if filename_lc < e.filename.lower():
+ for e in self._find(ptr, filename_lc, code):
+ yield e
+ return
+ elif filename_lc == e.filename.lower():
+ if code is None or (code and code < e.code):
+ for e in self._find(ptr, filename_lc, code):
+ yield e
+ if code is None or code == e.code:
+ yield e
+ elif code < e.code:
+ return
+ for e in self._find(next_node, filename_lc, code):
+ yield e
+ else:
+ for n in range(count):
+ e = DSStoreEntry.read(block)
+ if filename_lc == e.filename.lower():
+ if code is None or code == e.code:
+ yield e
+ elif code < e.code:
+ return
+
+ def find(self, filename, code=None):
+ """Returns a generator that will iterate over matching entries in
+ the B-Tree."""
+ if isinstance(filename, DSStoreEntry):
+ code = filename.code
+ filename = filename.filename
+
+ filename_lc = filename.lower()
+
+ return self._find(self._rootnode, filename_lc, code)
+
+ def __len__(self):
+ return self._records
+
+ def __iter__(self):
+ return self._traverse(self._rootnode)
+
+ class Partial(object):
+ """This is used to implement indexing."""
+ def __init__(self, store, filename):
+ self._store = store
+ self._filename = filename
+
+ def __getitem__(self, code):
+ if code is None:
+ raise KeyError('no such key - [%s][None]' % self._filename)
+
+ try:
+ item = next(self._store.find(self._filename, code))
+ except StopIteration:
+ raise KeyError('no such key - [%s][%s]' % (self._filename,
+ code))
+
+ if not isinstance(item.type, (str, unicode)):
+ return item.value
+
+ return (item.type, item.value)
+
+ def __setitem__(self, code, value):
+ if code is None:
+ raise KeyError('bad key - [%s][None]' % self._filename)
+
+ codec = codecs.get(code, None)
+ if codec:
+ entry_type = codec
+ entry_value = value
+ else:
+ entry_type = value[0]
+ entry_value = value[1]
+
+ self._store.insert(DSStoreEntry(self._filename, code,
+ entry_type, entry_value))
+
+ def __delitem__(self, code):
+ if code is None:
+ raise KeyError('no such key - [%s][None]' % self._filename)
+
+ self._store.delete(self._filename, code)
+
+ def __iter__(self):
+ for item in self._store.find(self._filename):
+ yield item
+
+ def __getitem__(self, filename):
+ return self.Partial(self, filename)
+
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/LICENSE b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/LICENSE
new file mode 100644
index 000000000..e91f4eb38
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2014 Alastair Houghton
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/__init__.py b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/__init__.py
new file mode 100644
index 000000000..7eb314107
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/__init__.py
@@ -0,0 +1,27 @@
+from .alias import *
+from .bookmark import *
+
+__all__ = [ 'ALIAS_KIND_FILE', 'ALIAS_KIND_FOLDER',
+ 'ALIAS_HFS_VOLUME_SIGNATURE',
+ 'ALIAS_FIXED_DISK', 'ALIAS_NETWORK_DISK', 'ALIAS_400KB_FLOPPY_DISK',
+ 'ALIAS_800KB_FLOPPY_DISK', 'ALIAS_1_44MB_FLOPPY_DISK',
+ 'ALIAS_EJECTABLE_DISK',
+ 'ALIAS_NO_CNID',
+ 'kBookmarkPath', 'kBookmarkCNIDPath', 'kBookmarkFileProperties',
+ 'kBookmarkFileName', 'kBookmarkFileID', 'kBookmarkFileCreationDate',
+ 'kBookmarkTOCPath', 'kBookmarkVolumePath',
+ 'kBookmarkVolumeURL', 'kBookmarkVolumeName', 'kBookmarkVolumeUUID',
+ 'kBookmarkVolumeSize', 'kBookmarkVolumeCreationDate',
+ 'kBookmarkVolumeProperties', 'kBookmarkContainingFolder',
+ 'kBookmarkUserName', 'kBookmarkUID', 'kBookmarkWasFileReference',
+ 'kBookmarkCreationOptions', 'kBookmarkURLLengths',
+ 'kBookmarkSecurityExtension',
+ 'AppleShareInfo',
+ 'VolumeInfo',
+ 'TargetInfo',
+ 'Alias',
+ 'Bookmark',
+ 'Data',
+ 'URL' ]
+
+
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/alias.py b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/alias.py
new file mode 100644
index 000000000..b35f4029b
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/alias.py
@@ -0,0 +1,587 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+from __future__ import division
+
+import struct
+import datetime
+import io
+import re
+import os
+import os.path
+import stat
+import sys
+
+if sys.platform == 'darwin':
+ from . import osx
+
+from .utils import *
+
+ALIAS_KIND_FILE = 0
+ALIAS_KIND_FOLDER = 1
+
+ALIAS_HFS_VOLUME_SIGNATURE = b'H+'
+
+ALIAS_FIXED_DISK = 0
+ALIAS_NETWORK_DISK = 1
+ALIAS_400KB_FLOPPY_DISK = 2
+ALIAS_800KB_FLOPPY_DISK = 3
+ALIAS_1_44MB_FLOPPY_DISK = 4
+ALIAS_EJECTABLE_DISK = 5
+
+ALIAS_NO_CNID = 0xffffffff
+
+class AppleShareInfo (object):
+ def __init__(self, zone=None, server=None, user=None):
+ #: The AppleShare zone
+ self.zone = zone
+ #: The AFP server
+ self.server = server
+ #: The username
+ self.user = user
+
+ def __repr__(self):
+ return 'AppleShareInfo(%r,%r,%r)' % (self.zone, self.server, self.user)
+
+class VolumeInfo (object):
+ def __init__(self, name, creation_date, fs_type, disk_type,
+ attribute_flags, fs_id, appleshare_info=None,
+ driver_name=None, posix_path=None, disk_image_alias=None,
+ dialup_info=None, network_mount_info=None):
+ #: The name of the volume on which the target resides
+ self.name = name
+
+ #: The creation date of the target's volume
+ self.creation_date = creation_date
+
+ #: The filesystem type (a two character code, e.g. ``b'H+'`` for HFS+)
+ self.fs_type = fs_type
+
+ #: The type of disk; should be one of
+ #:
+ #: * ALIAS_FIXED_DISK
+ #: * ALIAS_NETWORK_DISK
+ #: * ALIAS_400KB_FLOPPY_DISK
+ #: * ALIAS_800KB_FLOPPY_DISK
+ #: * ALIAS_1_44MB_FLOPPY_DISK
+ #: * ALIAS_EJECTABLE_DISK
+ self.disk_type = disk_type
+
+ #: Filesystem attribute flags (from HFS volume header)
+ self.attribute_flags = attribute_flags
+
+ #: Filesystem identifier
+ self.fs_id = fs_id
+
+ #: AppleShare information (for automatic remounting of network shares)
+ #: *(optional)*
+ self.appleshare_info = appleshare_info
+
+ #: Driver name (*probably* contains a disk driver name on older Macs)
+ #: *(optional)*
+ self.driver_name = driver_name
+
+ #: POSIX path of the mount point of the target's volume
+ #: *(optional)*
+ self.posix_path = posix_path
+
+ #: :class:`Alias` object pointing at the disk image on which the
+ #: target's volume resides *(optional)*
+ self.disk_image_alias = disk_image_alias
+
+ #: Dialup information (for automatic establishment of dialup connections)
+ self.dialup_info = dialup_info
+
+ #: Network mount information (for automatic remounting)
+ self.network_mount_info = network_mount_info
+
+ def __repr__(self):
+ args = ['name', 'creation_date', 'fs_type', 'disk_type',
+ 'attribute_flags', 'fs_id']
+ values = []
+ for a in args:
+ v = getattr(self, a)
+ values.append(repr(v))
+
+ kwargs = ['appleshare_info', 'driver_name', 'posix_path',
+ 'disk_image_alias', 'dialup_info', 'network_mount_info']
+ for a in kwargs:
+ v = getattr(self, a)
+ if v is not None:
+ values.append('%s=%r' % (a, v))
+ return 'VolumeInfo(%s)' % ','.join(values)
+
+class TargetInfo (object):
+ def __init__(self, kind, filename, folder_cnid, cnid, creation_date,
+ creator_code, type_code, levels_from=-1, levels_to=-1,
+ folder_name=None, cnid_path=None, carbon_path=None,
+ posix_path=None, user_home_prefix_len=None):
+ #: Either ALIAS_KIND_FILE or ALIAS_KIND_FOLDER
+ self.kind = kind
+
+ #: The filename of the target
+ self.filename = filename
+
+ #: The CNID (Catalog Node ID) of the target's containing folder;
+ #: CNIDs are similar to but different than traditional UNIX inode
+ #: numbers
+ self.folder_cnid = folder_cnid
+
+ #: The CNID (Catalog Node ID) of the target
+ self.cnid = cnid
+
+ #: The target's *creation* date.
+ self.creation_date = creation_date
+
+ #: The target's Mac creator code (a four-character binary string)
+ self.creator_code = creator_code
+
+ #: The target's Mac type code (a four-character binary string)
+ self.type_code = type_code
+
+ #: The depth of the alias? Always seems to be -1 on OS X.
+ self.levels_from = levels_from
+
+ #: The depth of the target? Always seems to be -1 on OS X.
+ self.levels_to = levels_to
+
+ #: The (POSIX) name of the target's containing folder. *(optional)*
+ self.folder_name = folder_name
+
+ #: The path from the volume root as a sequence of CNIDs. *(optional)*
+ self.cnid_path = cnid_path
+
+ #: The Carbon path of the target *(optional)*
+ self.carbon_path = carbon_path
+
+ #: The POSIX path of the target relative to the volume root. Note
+ #: that this may or may not have a leading '/' character, but it is
+ #: always relative to the containing volume. *(optional)*
+ self.posix_path = posix_path
+
+ #: If the path points into a user's home folder, the number of folders
+ #: deep that we go before we get to that home folder. *(optional)*
+ self.user_home_prefix_len = user_home_prefix_len
+
+ def __repr__(self):
+ args = ['kind', 'filename', 'folder_cnid', 'cnid', 'creation_date',
+ 'creator_code', 'type_code']
+ values = []
+ for a in args:
+ v = getattr(self, a)
+ values.append(repr(v))
+
+ if self.levels_from != -1:
+ values.append('levels_from=%r' % self.levels_from)
+ if self.levels_to != -1:
+ values.append('levels_to=%r' % self.levels_to)
+
+ kwargs = ['folder_name', 'cnid_path', 'carbon_path',
+ 'posix_path', 'user_home_prefix_len']
+ for a in kwargs:
+ v = getattr(self, a)
+ values.append('%s=%r' % (a, v))
+
+ return 'TargetInfo(%s)' % ','.join(values)
+
+TAG_CARBON_FOLDER_NAME = 0
+TAG_CNID_PATH = 1
+TAG_CARBON_PATH = 2
+TAG_APPLESHARE_ZONE = 3
+TAG_APPLESHARE_SERVER_NAME = 4
+TAG_APPLESHARE_USERNAME = 5
+TAG_DRIVER_NAME = 6
+TAG_NETWORK_MOUNT_INFO = 9
+TAG_DIALUP_INFO = 10
+TAG_UNICODE_FILENAME = 14
+TAG_UNICODE_VOLUME_NAME = 15
+TAG_HIGH_RES_VOLUME_CREATION_DATE = 16
+TAG_HIGH_RES_CREATION_DATE = 17
+TAG_POSIX_PATH = 18
+TAG_POSIX_PATH_TO_MOUNTPOINT = 19
+TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE = 20
+TAG_USER_HOME_LENGTH_PREFIX = 21
+
+class Alias (object):
+ def __init__(self, appinfo=b'\0\0\0\0', version=2, volume=None,
+ target=None, extra=[]):
+ """Construct a new :class:`Alias` object with the specified
+ contents."""
+
+ #: Application specific information (four byte byte-string)
+ self.appinfo = appinfo
+
+ #: Version (we support only version 2)
+ self.version = version
+
+ #: A :class:`VolumeInfo` object describing the target's volume
+ self.volume = volume
+
+ #: A :class:`TargetInfo` object describing the target
+ self.target = target
+
+ #: A list of extra `(tag, value)` pairs
+ self.extra = list(extra)
+
+ @classmethod
+ def _from_fd(cls, b):
+ appinfo, recsize, version = struct.unpack(b'>4shh', b.read(8))
+
+ if recsize < 150:
+ raise ValueError('Incorrect alias length')
+
+ if version != 2:
+ raise ValueError('Unsupported alias version %u' % version)
+
+ kind, volname, voldate, fstype, disktype, \
+ folder_cnid, filename, cnid, crdate, creator_code, type_code, \
+ levels_from, levels_to, volattrs, volfsid, reserved = \
+ struct.unpack(b'>h28pI2shI64pII4s4shhI2s10s', b.read(142))
+
+ voldate = mac_epoch + datetime.timedelta(seconds=voldate)
+ crdate = mac_epoch + datetime.timedelta(seconds=crdate)
+
+ alias = Alias()
+ alias.appinfo = appinfo
+
+ alias.volume = VolumeInfo (volname.replace('/',':'),
+ voldate, fstype, disktype,
+ volattrs, volfsid)
+ alias.target = TargetInfo (kind, filename.replace('/',':'),
+ folder_cnid, cnid,
+ crdate, creator_code, type_code)
+ alias.target.levels_from = levels_from
+ alias.target.levels_to = levels_to
+
+ tag = struct.unpack(b'>h', b.read(2))[0]
+
+ while tag != -1:
+ length = struct.unpack(b'>h', b.read(2))[0]
+ value = b.read(length)
+ if length & 1:
+ b.read(1)
+
+ if tag == TAG_CARBON_FOLDER_NAME:
+ alias.target.folder_name = value.replace('/',':')
+ elif tag == TAG_CNID_PATH:
+ alias.target.cnid_path = struct.unpack(b'>%uI' % (length // 4),
+ value)
+ elif tag == TAG_CARBON_PATH:
+ alias.target.carbon_path = value
+ elif tag == TAG_APPLESHARE_ZONE:
+ if alias.volume.appleshare_info is None:
+ alias.volume.appleshare_info = AppleShareInfo()
+ alias.volume.appleshare_info.zone = value
+ elif tag == TAG_APPLESHARE_SERVER_NAME:
+ if alias.volume.appleshare_info is None:
+ alias.volume.appleshare_info = AppleShareInfo()
+ alias.volume.appleshare_info.server = value
+ elif tag == TAG_APPLESHARE_USERNAME:
+ if alias.volume.appleshare_info is None:
+ alias.volume.appleshare_info = AppleShareInfo()
+ alias.volume.appleshare_info.user = value
+ elif tag == TAG_DRIVER_NAME:
+ alias.volume.driver_name = value
+ elif tag == TAG_NETWORK_MOUNT_INFO:
+ alias.volume.network_mount_info = value
+ elif tag == TAG_DIALUP_INFO:
+ alias.volume.dialup_info = value
+ elif tag == TAG_UNICODE_FILENAME:
+ alias.target.filename = value[2:].decode('utf-16be')
+ elif tag == TAG_UNICODE_VOLUME_NAME:
+ alias.volume.name = value[2:].decode('utf-16be')
+ elif tag == TAG_HIGH_RES_VOLUME_CREATION_DATE:
+ seconds = struct.unpack(b'>Q', value)[0] / 65536.0
+ alias.volume.creation_date \
+ = mac_epoch + datetime.timedelta(seconds=seconds)
+ elif tag == TAG_HIGH_RES_CREATION_DATE:
+ seconds = struct.unpack(b'>Q', value)[0] / 65536.0
+ alias.target.creation_date \
+ = mac_epoch + datetime.timedelta(seconds=seconds)
+ elif tag == TAG_POSIX_PATH:
+ alias.target.posix_path = value
+ elif tag == TAG_POSIX_PATH_TO_MOUNTPOINT:
+ alias.volume.posix_path = value
+ elif tag == TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE:
+ alias.volume.disk_image_alias = Alias.from_bytes(value)
+ elif tag == TAG_USER_HOME_LENGTH_PREFIX:
+ alias.target.user_home_prefix_len = struct.unpack(b'>h', value)[0]
+ else:
+ alias.extra.append((tag, value))
+
+ tag = struct.unpack(b'>h', b.read(2))[0]
+
+ return alias
+
+ @classmethod
+ def from_bytes(cls, bytes):
+ """Construct an :class:`Alias` object given binary Alias data."""
+ with io.BytesIO(bytes) as b:
+ return cls._from_fd(b)
+
+ @classmethod
+ def for_file(cls, path):
+ """Create an :class:`Alias` that points at the specified file."""
+ if sys.platform != 'darwin':
+ raise Exception('Not implemented (requires special support)')
+
+ a = Alias()
+
+ # Find the filesystem
+ st = osx.statfs(path)
+ vol_path = st.f_mntonname
+
+ # Grab its attributes
+ attrs = [osx.ATTR_CMN_CRTIME,
+ osx.ATTR_VOL_NAME,
+ 0, 0, 0]
+ volinfo = osx.getattrlist(vol_path, attrs, 0)
+
+ vol_crtime = volinfo[0]
+ vol_name = volinfo[1]
+
+ # Also grab various attributes of the file
+ attrs = [(osx.ATTR_CMN_OBJTYPE
+ | osx.ATTR_CMN_CRTIME
+ | osx.ATTR_CMN_FNDRINFO
+ | osx.ATTR_CMN_FILEID
+ | osx.ATTR_CMN_PARENTID), 0, 0, 0, 0]
+ info = osx.getattrlist(path, attrs, osx.FSOPT_NOFOLLOW)
+
+ if info[0] == osx.VDIR:
+ kind = ALIAS_KIND_FOLDER
+ else:
+ kind = ALIAS_KIND_FILE
+
+ cnid = info[3]
+ folder_cnid = info[4]
+
+ dirname, filename = os.path.split(path)
+
+ if dirname == '' or dirname == '.':
+ dirname = os.getcwd()
+
+ foldername = os.path.basename(dirname)
+
+ creation_date = info[1]
+
+ if kind == ALIAS_KIND_FILE:
+ creator_code = struct.pack(b'I', info[2].fileInfo.fileCreator)
+ type_code = struct.pack(b'I', info[2].fileInfo.fileType)
+ else:
+ creator_code = b'\0\0\0\0'
+ type_code = b'\0\0\0\0'
+
+ a.target = TargetInfo(kind, filename, folder_cnid, cnid, creation_date,
+ creator_code, type_code)
+ a.volume = VolumeInfo(vol_name, vol_crtime, b'H+',
+ ALIAS_FIXED_DISK, 0, b'\0\0')
+
+ a.target.folder_name = foldername
+ a.volume.posix_path = vol_path
+
+ rel_path = os.path.relpath(path, vol_path)
+
+ # Leave off the initial '/' if vol_path is '/' (no idea why)
+ if vol_path == '/':
+ a.target.posix_path = rel_path
+ else:
+ a.target.posix_path = '/' + rel_path
+
+ # Construct the Carbon and CNID paths
+ carbon_path = []
+ cnid_path = []
+ head, tail = os.path.split(rel_path)
+ if not tail:
+ head, tail = os.path.split(head)
+ while head or tail:
+ if head:
+ attrs = [osx.ATTR_CMN_FILEID, 0, 0, 0, 0]
+ info = osx.getattrlist(os.path.join(vol_path, head), attrs, 0)
+ cnid_path.append(info[0])
+ carbon_tail = tail.replace(':','/')
+ carbon_path.insert(0, carbon_tail)
+ head, tail = os.path.split(head)
+ carbon_path = vol_name + ':' + ':\0'.join(carbon_path)
+
+ a.target.carbon_path = carbon_path
+ a.target.cnid_path = cnid_path
+
+ return a
+
+ def _to_fd(self, b):
+ # We'll come back and fix the length when we're done
+ pos = b.tell()
+ b.write(struct.pack(b'>4shh', self.appinfo, 0, self.version))
+
+ carbon_volname = self.volume.name.replace(':','/').encode('utf-8')
+ carbon_filename = self.target.filename.replace(':','/').encode('utf-8')
+ voldate = (self.volume.creation_date - mac_epoch).total_seconds()
+ crdate = (self.target.creation_date - mac_epoch).total_seconds()
+
+ # NOTE: crdate should be in local time, but that's system dependent
+ # (so doing so is ridiculous, and nothing could rely on it).
+ b.write(struct.pack(b'>h28pI2shI64pII4s4shhI2s10s',
+ self.target.kind,
+ carbon_volname, voldate,
+ self.volume.fs_type,
+ self.volume.disk_type,
+ self.target.folder_cnid,
+ carbon_filename,
+ self.target.cnid,
+ crdate,
+ self.target.creator_code,
+ self.target.type_code,
+ self.target.levels_from,
+ self.target.levels_to,
+ self.volume.attribute_flags,
+ self.volume.fs_id,
+ b'\0'*10))
+
+ # Excuse the odd order; we're copying Finder
+ if self.target.folder_name:
+ carbon_foldername = self.target.folder_name.replace(':','/')\
+ .encode('utf-8')
+ b.write(struct.pack(b'>hh', TAG_CARBON_FOLDER_NAME,
+ len(carbon_foldername)))
+ b.write(carbon_foldername)
+ if len(carbon_foldername) & 1:
+ b.write(b'\0')
+
+ b.write(struct.pack(b'>hhQhhQ',
+ TAG_HIGH_RES_VOLUME_CREATION_DATE,
+ 8, long(voldate * 65536),
+ TAG_HIGH_RES_CREATION_DATE,
+ 8, long(crdate * 65536)))
+
+ if self.target.cnid_path:
+ cnid_path = struct.pack(b'>%uI' % len(self.target.cnid_path),
+ *self.target.cnid_path)
+ b.write(struct.pack(b'>hh', TAG_CNID_PATH,
+ len(cnid_path)))
+ b.write(cnid_path)
+
+ if self.target.carbon_path:
+ carbon_path=self.target.carbon_path.encode('utf-8')
+ b.write(struct.pack(b'>hh', TAG_CARBON_PATH,
+ len(carbon_path)))
+ b.write(carbon_path)
+ if len(carbon_path) & 1:
+ b.write(b'\0')
+
+ if self.volume.appleshare_info:
+ ai = self.volume.appleshare_info
+ if ai.zone:
+ b.write(struct.pack(b'>hh', TAG_APPLESHARE_ZONE,
+ len(ai.zone)))
+ b.write(ai.zone)
+ if len(ai.zone) & 1:
+ b.write(b'\0')
+ if ai.server:
+ b.write(struct.pack(b'>hh', TAG_APPLESHARE_SERVER_NAME,
+ len(ai.server)))
+ b.write(ai.server)
+ if len(ai.server) & 1:
+ b.write(b'\0')
+ if ai.username:
+ b.write(struct.pack(b'>hh', TAG_APPLESHARE_USERNAME,
+ len(ai.username)))
+ b.write(ai.username)
+ if len(ai.username) & 1:
+ b.write(b'\0')
+
+ if self.volume.driver_name:
+ driver_name = self.volume.driver_name.encode('utf-8')
+ b.write(struct.pack(b'>hh', TAG_DRIVER_NAME,
+ len(driver_name)))
+ b.write(driver_name)
+ if len(driver_name) & 1:
+ b.write(b'\0')
+
+ if self.volume.network_mount_info:
+ b.write(struct.pack(b'>hh', TAG_NETWORK_MOUNT_INFO,
+ len(self.volume.network_mount_info)))
+ b.write(self.volume.network_mount_info)
+ if len(self.volume.network_mount_info) & 1:
+ b.write(b'\0')
+
+ if self.volume.dialup_info:
+ b.write(struct.pack(b'>hh', TAG_DIALUP_INFO,
+ len(self.volume.network_mount_info)))
+ b.write(self.volume.network_mount_info)
+ if len(self.volume.network_mount_info) & 1:
+ b.write(b'\0')
+
+ utf16 = self.target.filename.replace(':','/').encode('utf-16-be')
+ b.write(struct.pack(b'>hhh', TAG_UNICODE_FILENAME,
+ len(utf16) + 2,
+ len(utf16) // 2))
+ b.write(utf16)
+
+ utf16 = self.volume.name.replace(':','/').encode('utf-16-be')
+ b.write(struct.pack(b'>hhh', TAG_UNICODE_VOLUME_NAME,
+ len(utf16) + 2,
+ len(utf16) // 2))
+ b.write(utf16)
+
+ if self.target.posix_path:
+ posix_path = self.target.posix_path.encode('utf-8')
+ b.write(struct.pack(b'>hh', TAG_POSIX_PATH,
+ len(posix_path)))
+ b.write(posix_path)
+ if len(posix_path) & 1:
+ b.write(b'\0')
+
+ if self.volume.posix_path:
+ posix_path = self.volume.posix_path.encode('utf-8')
+ b.write(struct.pack(b'>hh', TAG_POSIX_PATH_TO_MOUNTPOINT,
+ len(posix_path)))
+ b.write(posix_path)
+ if len(posix_path) & 1:
+ b.write(b'\0')
+
+ if self.volume.disk_image_alias:
+ d = self.volume.disk_image_alias.to_bytes()
+ b.write(struct.pack(b'>hh', TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE,
+ len(d)))
+ b.write(d)
+ if len(d) & 1:
+ b.write(b'\0')
+
+ if self.target.user_home_prefix_len is not None:
+ b.write(struct.pack(b'>hhh', TAG_USER_HOME_LENGTH_PREFIX,
+ 2, self.target.user_home_prefix_len))
+
+ for t,v in self.extra:
+ b.write(struct.pack(b'>hh', t, len(v)))
+ b.write(v)
+ if len(v) & 1:
+ b.write(b'\0')
+
+ b.write(struct.pack(b'>hh', -1, 0))
+
+ blen = b.tell() - pos
+ b.seek(pos + 4, os.SEEK_SET)
+ b.write(struct.pack(b'>h', blen))
+
+ def to_bytes(self):
+ """Returns the binary representation for this :class:`Alias`."""
+ with io.BytesIO() as b:
+ self._to_fd(b)
+ return b.getvalue()
+
+ def __str__(self):
+ return '<Alias target=%s>' % self.target.filename
+
+ def __repr__(self):
+ values = []
+ if self.appinfo != b'\0\0\0\0':
+ values.append('appinfo=%r' % self.appinfo)
+ if self.version != 2:
+ values.append('version=%r' % self.version)
+ if self.volume is not None:
+ values.append('volume=%r' % self.volume)
+ if self.target is not None:
+ values.append('target=%r' % self.target)
+ if self.extra:
+ values.append('extra=%r' % self.extra)
+ return 'Alias(%s)' % ','.join(values)
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/bookmark.py b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/bookmark.py
new file mode 100644
index 000000000..58a76a6e1
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/bookmark.py
@@ -0,0 +1,647 @@
+# -*- coding: utf-8 -*-
+#
+# This file implements the Apple "bookmark" format, which is the replacement
+# for the old-fashioned alias format. The details of this format were
+# reverse engineered; some things are still not entirely clear.
+#
+from __future__ import unicode_literals
+
+import struct
+import uuid
+import datetime
+import os
+import sys
+import pprint
+
+from urlparse import urljoin
+
+if sys.platform == 'darwin':
+ from . import osx
+
+from .utils import *
+
+BMK_DATA_TYPE_MASK = 0xffffff00
+BMK_DATA_SUBTYPE_MASK = 0x000000ff
+
+BMK_STRING = 0x0100
+BMK_DATA = 0x0200
+BMK_NUMBER = 0x0300
+BMK_DATE = 0x0400
+BMK_BOOLEAN = 0x0500
+BMK_ARRAY = 0x0600
+BMK_DICT = 0x0700
+BMK_UUID = 0x0800
+BMK_URL = 0x0900
+BMK_NULL = 0x0a00
+
+BMK_ST_ZERO = 0x0000
+BMK_ST_ONE = 0x0001
+
+BMK_BOOLEAN_ST_FALSE = 0x0000
+BMK_BOOLEAN_ST_TRUE = 0x0001
+
+# Subtypes for BMK_NUMBER are really CFNumberType values
+kCFNumberSInt8Type = 1
+kCFNumberSInt16Type = 2
+kCFNumberSInt32Type = 3
+kCFNumberSInt64Type = 4
+kCFNumberFloat32Type = 5
+kCFNumberFloat64Type = 6
+kCFNumberCharType = 7
+kCFNumberShortType = 8
+kCFNumberIntType = 9
+kCFNumberLongType = 10
+kCFNumberLongLongType = 11
+kCFNumberFloatType = 12
+kCFNumberDoubleType = 13
+kCFNumberCFIndexType = 14
+kCFNumberNSIntegerType = 15
+kCFNumberCGFloatType = 16
+
+# Resource property flags (from CFURLPriv.h)
+kCFURLResourceIsRegularFile = 0x00000001
+kCFURLResourceIsDirectory = 0x00000002
+kCFURLResourceIsSymbolicLink = 0x00000004
+kCFURLResourceIsVolume = 0x00000008
+kCFURLResourceIsPackage = 0x00000010
+kCFURLResourceIsSystemImmutable = 0x00000020
+kCFURLResourceIsUserImmutable = 0x00000040
+kCFURLResourceIsHidden = 0x00000080
+kCFURLResourceHasHiddenExtension = 0x00000100
+kCFURLResourceIsApplication = 0x00000200
+kCFURLResourceIsCompressed = 0x00000400
+kCFURLResourceIsSystemCompressed = 0x00000400
+kCFURLCanSetHiddenExtension = 0x00000800
+kCFURLResourceIsReadable = 0x00001000
+kCFURLResourceIsWriteable = 0x00002000
+kCFURLResourceIsExecutable = 0x00004000
+kCFURLIsAliasFile = 0x00008000
+kCFURLIsMountTrigger = 0x00010000
+
+# Volume property flags (from CFURLPriv.h)
+kCFURLVolumeIsLocal = 0x1 #
+kCFURLVolumeIsAutomount = 0x2 #
+kCFURLVolumeDontBrowse = 0x4 #
+kCFURLVolumeIsReadOnly = 0x8 #
+kCFURLVolumeIsQuarantined = 0x10
+kCFURLVolumeIsEjectable = 0x20 #
+kCFURLVolumeIsRemovable = 0x40 #
+kCFURLVolumeIsInternal = 0x80 #
+kCFURLVolumeIsExternal = 0x100 #
+kCFURLVolumeIsDiskImage = 0x200 #
+kCFURLVolumeIsFileVault = 0x400
+kCFURLVolumeIsLocaliDiskMirror = 0x800
+kCFURLVolumeIsiPod = 0x1000 #
+kCFURLVolumeIsiDisk = 0x2000
+kCFURLVolumeIsCD = 0x4000
+kCFURLVolumeIsDVD = 0x8000
+kCFURLVolumeIsDeviceFileSystem = 0x10000
+kCFURLVolumeSupportsPersistentIDs = 0x100000000
+kCFURLVolumeSupportsSearchFS = 0x200000000
+kCFURLVolumeSupportsExchange = 0x400000000
+# reserved 0x800000000
+kCFURLVolumeSupportsSymbolicLinks = 0x1000000000
+kCFURLVolumeSupportsDenyModes = 0x2000000000
+kCFURLVolumeSupportsCopyFile = 0x4000000000
+kCFURLVolumeSupportsReadDirAttr = 0x8000000000
+kCFURLVolumeSupportsJournaling = 0x10000000000
+kCFURLVolumeSupportsRename = 0x20000000000
+kCFURLVolumeSupportsFastStatFS = 0x40000000000
+kCFURLVolumeSupportsCaseSensitiveNames = 0x80000000000
+kCFURLVolumeSupportsCasePreservedNames = 0x100000000000
+kCFURLVolumeSupportsFLock = 0x200000000000
+kCFURLVolumeHasNoRootDirectoryTimes = 0x400000000000
+kCFURLVolumeSupportsExtendedSecurity = 0x800000000000
+kCFURLVolumeSupports2TBFileSize = 0x1000000000000
+kCFURLVolumeSupportsHardLinks = 0x2000000000000
+kCFURLVolumeSupportsMandatoryByteRangeLocks = 0x4000000000000
+kCFURLVolumeSupportsPathFromID = 0x8000000000000
+# reserved 0x10000000000000
+kCFURLVolumeIsJournaling = 0x20000000000000
+kCFURLVolumeSupportsSparseFiles = 0x40000000000000
+kCFURLVolumeSupportsZeroRuns = 0x80000000000000
+kCFURLVolumeSupportsVolumeSizes = 0x100000000000000
+kCFURLVolumeSupportsRemoteEvents = 0x200000000000000
+kCFURLVolumeSupportsHiddenFiles = 0x400000000000000
+kCFURLVolumeSupportsDecmpFSCompression = 0x800000000000000
+kCFURLVolumeHas64BitObjectIDs = 0x1000000000000000
+kCFURLVolumePropertyFlagsAll = 0xffffffffffffffff
+
+BMK_URL_ST_ABSOLUTE = 0x0001
+BMK_URL_ST_RELATIVE = 0x0002
+
+# Bookmark keys
+# = 0x1003
+kBookmarkPath = 0x1004 # Array of path components
+kBookmarkCNIDPath = 0x1005 # Array of CNIDs
+kBookmarkFileProperties = 0x1010 # (CFURL rp flags,
+ # CFURL rp flags asked for,
+ # 8 bytes NULL)
+kBookmarkFileName = 0x1020
+kBookmarkFileID = 0x1030
+kBookmarkFileCreationDate = 0x1040
+# = 0x1054 # ?
+# = 0x1055 # ?
+# = 0x1056 # ?
+# = 0x1101 # ?
+# = 0x1102 # ?
+kBookmarkTOCPath = 0x2000 # A list of (TOC id, ?) pairs
+kBookmarkVolumePath = 0x2002
+kBookmarkVolumeURL = 0x2005
+kBookmarkVolumeName = 0x2010
+kBookmarkVolumeUUID = 0x2011 # Stored (perversely) as a string
+kBookmarkVolumeSize = 0x2012
+kBookmarkVolumeCreationDate = 0x2013
+kBookmarkVolumeProperties = 0x2020 # (CFURL vp flags,
+ # CFURL vp flags asked for,
+ # 8 bytes NULL)
+kBookmarkVolumeIsRoot = 0x2030 # True if volume is FS root
+kBookmarkVolumeBookmark = 0x2040 # Embedded bookmark for disk image (TOC id)
+kBookmarkVolumeMountPoint = 0x2050 # A URL
+# = 0x2070
+kBookmarkContainingFolder = 0xc001 # Index of containing folder in path
+kBookmarkUserName = 0xc011 # User that created bookmark
+kBookmarkUID = 0xc012 # UID that created bookmark
+kBookmarkWasFileReference = 0xd001 # True if the URL was a file reference
+kBookmarkCreationOptions = 0xd010
+kBookmarkURLLengths = 0xe003 # See below
+# = 0xf017 # Localized name?
+# = 0xf022
+kBookmarkSecurityExtension = 0xf080
+# = 0xf081
+
+# kBookmarkURLLengths is an array that is set if the URL encoded by the
+# bookmark had a base URL; in that case, each entry is the length of the
+# base URL in question. Thus a URL
+#
+# file:///foo/bar/baz blam/blat.html
+#
+# will result in [3, 2], while the URL
+#
+# file:///foo bar/baz blam blat.html
+#
+# would result in [1, 2, 1, 1]
+
+
+class Data (object):
+ def __init__(self, bytedata=None):
+ #: The bytes, stored as a byte string
+ self.bytes = bytes(bytedata)
+
+ def __repr__(self):
+ return 'Data(%r)' % self.bytes
+
+class URL (object):
+ def __init__(self, base, rel=None):
+ if rel is not None:
+ #: The base URL, if any (a :class:`URL`)
+ self.base = base
+ #: The rest of the URL (a string)
+ self.relative = rel
+ else:
+ self.base = None
+ self.relative = base
+
+ @property
+ def absolute(self):
+ """Return an absolute URL."""
+ if self.base is None:
+ return self.relative
+ else:
+ base_abs = self.base.absolute
+ return urljoin(self.base.absolute, self.relative)
+
+ def __repr__(self):
+ return 'URL(%r)' % self.absolute
+
+class Bookmark (object):
+ def __init__(self, tocs=None):
+ if tocs is None:
+ #: The TOCs for this Bookmark
+ self.tocs = []
+ else:
+ self.tocs = tocs
+
+ @classmethod
+ def _get_item(cls, data, hdrsize, offset):
+ offset += hdrsize
+ if offset > len(data) - 8:
+ raise ValueError('Offset out of range')
+
+ length,typecode = struct.unpack(b'<II', data[offset:offset+8])
+
+ if len(data) - offset < 8 + length:
+ raise ValueError('Data item truncated')
+
+ databytes = data[offset+8:offset+8+length]
+
+ dsubtype = typecode & BMK_DATA_SUBTYPE_MASK
+ dtype = typecode & BMK_DATA_TYPE_MASK
+
+ if dtype == BMK_STRING:
+ return databytes.decode('utf-8')
+ elif dtype == BMK_DATA:
+ return Data(databytes)
+ elif dtype == BMK_NUMBER:
+ if dsubtype == kCFNumberSInt8Type:
+ return ord(databytes[0])
+ elif dsubtype == kCFNumberSInt16Type:
+ return struct.unpack(b'<h', databytes)[0]
+ elif dsubtype == kCFNumberSInt32Type:
+ return struct.unpack(b'<i', databytes)[0]
+ elif dsubtype == kCFNumberSInt64Type:
+ return struct.unpack(b'<q', databytes)[0]
+ elif dsubtype == kCFNumberFloat32Type:
+ return struct.unpack(b'<f', databytes)[0]
+ elif dsubtype == kCFNumberFloat64Type:
+ return struct.unpack(b'<d', databytes)[0]
+ elif dtype == BMK_DATE:
+ # Yes, dates really are stored as *BIG-endian* doubles; everything
+ # else is little-endian
+ secs = datetime.timedelta(seconds=struct.unpack(b'>d', databytes)[0])
+ return osx_epoch + secs
+ elif dtype == BMK_BOOLEAN:
+ if dsubtype == BMK_BOOLEAN_ST_TRUE:
+ return True
+ elif dsubtype == BMK_BOOLEAN_ST_FALSE:
+ return False
+ elif dtype == BMK_UUID:
+ return uuid.UUID(bytes=databytes)
+ elif dtype == BMK_URL:
+ if dsubtype == BMK_URL_ST_ABSOLUTE:
+ return URL(databytes.decode('utf-8'))
+ elif dsubtype == BMK_URL_ST_RELATIVE:
+ baseoff,reloff = struct.unpack(b'<II', databytes)
+ base = cls._get_item(data, hdrsize, baseoff)
+ rel = cls._get_item(data, hdrsize, reloff)
+ return URL(base, rel)
+ elif dtype == BMK_ARRAY:
+ result = []
+ for aoff in xrange(offset+8,offset+8+length,4):
+ eltoff, = struct.unpack(b'<I', data[aoff:aoff+4])
+ result.append(cls._get_item(data, hdrsize, eltoff))
+ return result
+ elif dtype == BMK_DICT:
+ result = {}
+ for eoff in xrange(offset+8,offset+8+length,8):
+ keyoff,valoff = struct.unpack(b'<II', data[eoff:eoff+8])
+ key = cls._get_item(data, hdrsize, keyoff)
+ val = cls._get_item(data, hdrsize, valoff)
+ result[key] = val
+ return result
+ elif dtype == BMK_NULL:
+ return None
+
+ print 'Unknown data type %08x' % typecode
+ return (typecode, databytes)
+
+ @classmethod
+ def from_bytes(cls, data):
+ """Create a :class:`Bookmark` given byte data."""
+
+ if len(data) < 16:
+ raise ValueError('Not a bookmark file (too short)')
+
+ magic,size,dummy,hdrsize = struct.unpack(b'<4sIII', data[0:16])
+
+ if magic != 'book':
+ raise ValueError('Not a bookmark file (bad magic)')
+
+ if hdrsize < 16:
+ raise ValueError('Not a bookmark file (header size too short)')
+
+ if hdrsize > size:
+ raise ValueError('Not a bookmark file (header size too large)')
+
+ if size != len(data):
+ raise ValueError('Not a bookmark file (truncated)')
+
+ tocoffset, = struct.unpack(b'<I', data[hdrsize:hdrsize+4])
+
+ tocs = []
+
+ while tocoffset != 0:
+ tocbase = hdrsize + tocoffset
+ if tocoffset > size - hdrsize \
+ or size - tocbase < 20:
+ raise ValueError('TOC offset out of range')
+
+ tocsize,tocmagic,tocid,nexttoc,toccount \
+ = struct.unpack(b'<IIIII',
+ data[tocbase:tocbase+20])
+
+ if tocmagic != 0xfffffffe:
+ break
+
+ tocsize += 8
+
+ if size - tocbase < tocsize:
+ raise ValueError('TOC truncated')
+
+ if tocsize < 12 * toccount:
+ raise ValueError('TOC entries overrun TOC size')
+
+ toc = {}
+ for n in xrange(0,toccount):
+ ebase = tocbase + 20 + 12 * n
+ eid,eoffset,edummy = struct.unpack(b'<III',
+ data[ebase:ebase+12])
+
+ if eid & 0x80000000:
+ eid = cls._get_item(data, hdrsize, eid & 0x7fffffff)
+
+ toc[eid] = cls._get_item(data, hdrsize, eoffset)
+
+ tocs.append((tocid, toc))
+
+ tocoffset = nexttoc
+
+ return cls(tocs)
+
+ def __getitem__(self, key):
+ for tid,toc in self.tocs:
+ if key in toc:
+ return toc[key]
+ raise KeyError('Key not found')
+
+ def __setitem__(self, key, value):
+ if len(self.tocs) == 0:
+ self.tocs = [(1, {})]
+ self.tocs[0][1][key] = value
+
+ def get(self, key, default=None):
+ """Lookup the value for a given key, returning a default if not
+ present."""
+ for tid,toc in self.tocs:
+ if key in toc:
+ return toc[key]
+ return default
+
+ @classmethod
+ def _encode_item(cls, item, offset):
+ if item is True:
+ result = struct.pack(b'<II', 0, BMK_BOOLEAN | BMK_BOOLEAN_ST_TRUE)
+ elif item is False:
+ result = struct.pack(b'<II', 0, BMK_BOOLEAN | BMK_BOOLEAN_ST_FALSE)
+ elif isinstance(item, unicode):
+ encoded = item.encode('utf-8')
+ result = (struct.pack(b'<II', len(encoded), BMK_STRING | BMK_ST_ONE)
+ + encoded)
+ elif isinstance(item, bytes):
+ result = (struct.pack(b'<II', len(item), BMK_STRING | BMK_ST_ONE)
+ + item)
+ elif isinstance(item, Data):
+ result = (struct.pack(b'<II', len(item.bytes),
+ BMK_DATA | BMK_ST_ONE)
+ + bytes(item.bytes))
+ elif isinstance(item, bytearray):
+ result = (struct.pack(b'<II', len(item),
+ BMK_DATA | BMK_ST_ONE)
+ + bytes(item))
+ elif isinstance(item, int) or isinstance(item, long):
+ if item > -0x80000000 and item < 0x7fffffff:
+ result = struct.pack(b'<IIi', 4,
+ BMK_NUMBER | kCFNumberSInt32Type, item)
+ else:
+ result = struct.pack(b'<IIq', 8,
+ BMK_NUMBER | kCFNumberSInt64Type, item)
+ elif isinstance(item, float):
+ result = struct.pack(b'<IId', 8,
+ BMK_NUMBER | kCFNumberFloat64Type, item)
+ elif isinstance(item, datetime.datetime):
+ secs = item - osx_epoch
+ result = struct.pack(b'<II', 8, BMK_DATE | BMK_ST_ZERO) \
+ + struct.pack(b'>d', float(secs.total_seconds()))
+ elif isinstance(item, uuid.UUID):
+ result = struct.pack(b'<II', 16, BMK_UUID | BMK_ST_ONE) \
+ + item.bytes
+ elif isinstance(item, URL):
+ if item.base:
+ baseoff = offset + 16
+ reloff, baseenc = cls._encode_item(item.base, baseoff)
+ xoffset, relenc = cls._encode_item(item.relative, reloff)
+ result = b''.join([
+ struct.pack(b'<IIII', 8, BMK_URL | BMK_URL_ST_RELATIVE,
+ baseoff, reloff),
+ baseenc,
+ relenc])
+ else:
+ encoded = item.relative.encode('utf-8')
+ result = struct.pack(b'<II', len(encoded),
+ BMK_URL | BMK_URL_ST_ABSOLUTE) + encoded
+ elif isinstance(item, list):
+ ioffset = offset + 8 + len(item) * 4
+ result = [struct.pack(b'<II', len(item) * 4, BMK_ARRAY | BMK_ST_ONE)]
+ enc = []
+ for elt in item:
+ result.append(struct.pack(b'<I', ioffset))
+ ioffset, ienc = cls._encode_item(elt, ioffset)
+ enc.append(ienc)
+ result = b''.join(result + enc)
+ elif isinstance(item, dict):
+ ioffset = offset + 8 + len(item) * 8
+ result = [struct.pack(b'<II', len(item) * 8, BMK_DICT | BMK_ST_ONE)]
+ enc = []
+ for k,v in item.iteritems():
+ result.append(struct.pack(b'<I', ioffset))
+ ioffset, ienc = cls._encode_item(k, ioffset)
+ enc.append(ienc)
+ result.append(struct.pack(b'<I', ioffset))
+ ioffset, ienc = cls._encode_item(v, ioffset)
+ enc.append(ienc)
+ result = b''.join(result + enc)
+ elif item is None:
+ result = struct.pack(b'<II', 0, BMK_NULL | BMK_ST_ONE)
+ else:
+ raise ValueError('Unknown item type when encoding: %s' % item)
+
+ offset += len(result)
+
+ # Pad to a multiple of 4 bytes
+ if offset & 3:
+ extra = 4 - (offset & 3)
+ result += b'\0' * extra
+ offset += extra
+
+ return (offset, result)
+
+ def to_bytes(self):
+ """Convert this :class:`Bookmark` to a byte representation."""
+
+ result = []
+ tocs = []
+ offset = 4 # For the offset to the first TOC
+
+ # Generate the data and build the TOCs
+ for tid,toc in self.tocs:
+ entries = []
+
+ for k,v in toc.iteritems():
+ if isinstance(k, basestring):
+ noffset = offset
+ voffset, enc = self._encode_item(k, offset)
+ result.append(enc)
+ offset, enc = self._encode_item(v, voffset)
+ result.append(enc)
+ entries.append((noffset | 0x80000000, voffset))
+ else:
+ entries.append((k, offset))
+ offset, enc = self._encode_item(v, offset)
+ result.append(enc)
+
+ # TOC entries must be sorted - CoreServicesInternal does a
+ # binary search to find data
+ entries.sort()
+
+ tocs.append((tid, b''.join([struct.pack('<III',k,o,0)
+ for k,o in entries])))
+
+ first_toc_offset = offset
+
+ # Now generate the TOC headers
+ for ndx,toc in enumerate(tocs):
+ tid, data = toc
+ if ndx == len(tocs) - 1:
+ next_offset = 0
+ else:
+ next_offset = offset + 20 + len(data)
+
+ result.append(struct.pack(b'<IIIII', len(data) - 8,
+ 0xfffffffe,
+ tid,
+ next_offset,
+ len(data) / 12))
+ result.append(data)
+
+ offset += 20 + len(data)
+
+ # Finally, add the header (and the first TOC offset, which isn't part
+ # of the header, but goes just after it)
+ header = struct.pack(b'<4sIIIQQQQI', b'book',
+ offset + 48,
+ 0x10040000,
+ 48,
+ 0, 0, 0, 0, first_toc_offset)
+
+ result.insert(0, header)
+
+ return b''.join(result)
+
+ @classmethod
+ def for_file(cls, path):
+ """Construct a :class:`Bookmark` for a given file."""
+
+ # Find the filesystem
+ st = osx.statfs(path)
+ vol_path = st.f_mntonname
+
+ # Grab its attributes
+ attrs = [osx.ATTR_CMN_CRTIME,
+ osx.ATTR_VOL_SIZE
+ | osx.ATTR_VOL_NAME
+ | osx.ATTR_VOL_UUID,
+ 0, 0, 0]
+ volinfo = osx.getattrlist(vol_path, attrs, 0)
+
+ vol_crtime = volinfo[0]
+ vol_size = volinfo[1]
+ vol_name = volinfo[2]
+ vol_uuid = volinfo[3]
+
+ # Also grab various attributes of the file
+ attrs = [(osx.ATTR_CMN_OBJTYPE
+ | osx.ATTR_CMN_CRTIME
+ | osx.ATTR_CMN_FILEID), 0, 0, 0, 0]
+ info = osx.getattrlist(path, attrs, osx.FSOPT_NOFOLLOW)
+
+ cnid = info[2]
+ crtime = info[1]
+
+ if info[0] == osx.VREG:
+ flags = kCFURLResourceIsRegularFile
+ elif info[0] == osx.VDIR:
+ flags = kCFURLResourceIsDirectory
+ elif info[0] == osx.VLNK:
+ flags = kCFURLResourceIsSymbolicLink
+ else:
+ flags = kCFURLResourceIsRegularFile
+
+ dirname, filename = os.path.split(path)
+
+ relcount = 0
+ if not os.path.isabs(dirname):
+ curdir = os.getcwd()
+ head, tail = os.path.split(curdir)
+ relcount = 0
+ while head and tail:
+ relcount += 1
+ head, tail = os.path.split(head)
+ dirname = os.path.join(curdir, dirname)
+
+ foldername = os.path.basename(dirname)
+
+ rel_path = os.path.relpath(path, vol_path)
+
+ # Build the path arrays
+ name_path = []
+ cnid_path = []
+ head, tail = os.path.split(rel_path)
+ if not tail:
+ head, tail = os.path.split(head)
+ while head or tail:
+ if head:
+ attrs = [osx.ATTR_CMN_FILEID, 0, 0, 0, 0]
+ info = osx.getattrlist(os.path.join(vol_path, head), attrs, 0)
+ cnid_path.insert(0, info[0])
+ head, tail = os.path.split(head)
+ name_path.insert(0, tail)
+ else:
+ head, tail = os.path.split(head)
+ name_path.append(filename)
+ cnid_path.append(cnid)
+
+ url_lengths = [relcount, len(name_path) - relcount]
+
+ fileprops = Data(struct.pack(b'<QQQ', flags, 0x0f, 0))
+ volprops = Data(struct.pack(b'<QQQ', 0x81 | kCFURLVolumeSupportsPersistentIDs,
+ 0x13ef | kCFURLVolumeSupportsPersistentIDs, 0))
+
+ toc = {
+ kBookmarkPath: name_path,
+ kBookmarkCNIDPath: cnid_path,
+ kBookmarkFileCreationDate: crtime,
+ kBookmarkFileProperties: fileprops,
+ kBookmarkContainingFolder: len(name_path) - 2,
+ kBookmarkVolumePath: vol_path,
+ kBookmarkVolumeIsRoot: vol_path == '/',
+ kBookmarkVolumeURL: URL('file://' + vol_path),
+ kBookmarkVolumeName: vol_name,
+ kBookmarkVolumeSize: vol_size,
+ kBookmarkVolumeCreationDate: vol_crtime,
+ kBookmarkVolumeUUID: str(vol_uuid).upper(),
+ kBookmarkVolumeProperties: volprops,
+ kBookmarkCreationOptions: 512,
+ kBookmarkWasFileReference: True,
+ kBookmarkUserName: 'unknown',
+ kBookmarkUID: 99,
+ }
+
+ if relcount:
+ toc[kBookmarkURLLengths] = url_lengths
+
+ return Bookmark([(1, toc)])
+
+ def __repr__(self):
+ result = ['Bookmark([']
+ for tid,toc in self.tocs:
+ result.append('(0x%x, {\n' % tid)
+ for k,v in toc.iteritems():
+ if isinstance(k, basestring):
+ kf = repr(k)
+ else:
+ kf = '0x%04x' % k
+ result.append(' %s: %r\n' % (kf, v))
+ result.append('}),\n')
+ result.append('])')
+
+ return ''.join(result)
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/osx.py b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/osx.py
new file mode 100644
index 000000000..bdd5d09e9
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/osx.py
@@ -0,0 +1,823 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from ctypes import *
+import struct
+import os
+import datetime
+import uuid
+
+from .utils import *
+
+libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
+
+# Constants
+FSOPT_NOFOLLOW = 0x00000001
+FSOPT_NOINMEMUPDATE = 0x00000002
+FSOPT_REPORT_FULLSIZE = 0x00000004
+FSOPT_PACK_INVAL_ATTRS = 0x00000008
+
+VOL_CAPABILITIES_FORMAT = 0
+VOL_CAPABILITIES_INTERFACES = 1
+
+VOL_CAP_FMT_PERSISTENTOBJECTIDS = 0x00000001
+VOL_CAP_FMT_SYMBOLICLINKS = 0x00000002
+VOL_CAP_FMT_HARDLINKS = 0x00000004
+VOL_CAP_FMT_JOURNAL = 0x00000008
+VOL_CAP_FMT_JOURNAL_ACTIVE = 0x00000010
+VOL_CAP_FMT_NO_ROOT_TIMES = 0x00000020
+VOL_CAP_FMT_SPARSE_FILES = 0x00000040
+VOL_CAP_FMT_ZERO_RUNS = 0x00000080
+VOL_CAP_FMT_CASE_SENSITIVE = 0x00000100
+VOL_CAP_FMT_CASE_PRESERVING = 0x00000200
+VOL_CAP_FMT_FAST_STATFS = 0x00000400
+VOL_CAP_FMT_2TB_FILESIZE = 0x00000800
+VOL_CAP_FMT_OPENDENYMODES = 0x00001000
+VOL_CAP_FMT_HIDDEN_FILES = 0x00002000
+VOL_CAP_FMT_PATH_FROM_ID = 0x00004000
+VOL_CAP_FMT_NO_VOLUME_SIZES = 0x00008000
+VOL_CAP_FMT_DECMPFS_COMPRESSION = 0x00010000
+VOL_CAP_FMT_64BIT_OBJECT_IDS = 0x00020000
+
+VOL_CAP_INT_SEARCHFS = 0x00000001
+VOL_CAP_INT_ATTRLIST = 0x00000002
+VOL_CAP_INT_NFSEXPORT = 0x00000004
+VOL_CAP_INT_READDIRATTR = 0x00000008
+VOL_CAP_INT_EXCHANGEDATA = 0x00000010
+VOL_CAP_INT_COPYFILE = 0x00000020
+VOL_CAP_INT_ALLOCATE = 0x00000040
+VOL_CAP_INT_VOL_RENAME = 0x00000080
+VOL_CAP_INT_ADVLOCK = 0x00000100
+VOL_CAP_INT_FLOCK = 0x00000200
+VOL_CAP_INT_EXTENDED_SECURITY = 0x00000400
+VOL_CAP_INT_USERACCESS = 0x00000800
+VOL_CAP_INT_MANLOCK = 0x00001000
+VOL_CAP_INT_NAMEDSTREAMS = 0x00002000
+VOL_CAP_INT_EXTENDED_ATTR = 0x00004000
+
+ATTR_CMN_NAME = 0x00000001
+ATTR_CMN_DEVID = 0x00000002
+ATTR_CMN_FSID = 0x00000004
+ATTR_CMN_OBJTYPE = 0x00000008
+ATTR_CMN_OBJTAG = 0x00000010
+ATTR_CMN_OBJID = 0x00000020
+ATTR_CMN_OBJPERMANENTID = 0x00000040
+ATTR_CMN_PAROBJID = 0x00000080
+ATTR_CMN_SCRIPT = 0x00000100
+ATTR_CMN_CRTIME = 0x00000200
+ATTR_CMN_MODTIME = 0x00000400
+ATTR_CMN_CHGTIME = 0x00000800
+ATTR_CMN_ACCTIME = 0x00001000
+ATTR_CMN_BKUPTIME = 0x00002000
+ATTR_CMN_FNDRINFO = 0x00004000
+ATTR_CMN_OWNERID = 0x00008000
+ATTR_CMN_GRPID = 0x00010000
+ATTR_CMN_ACCESSMASK = 0x00020000
+ATTR_CMN_FLAGS = 0x00040000
+ATTR_CMN_USERACCESS = 0x00200000
+ATTR_CMN_EXTENDED_SECURITY = 0x00400000
+ATTR_CMN_UUID = 0x00800000
+ATTR_CMN_GRPUUID = 0x01000000
+ATTR_CMN_FILEID = 0x02000000
+ATTR_CMN_PARENTID = 0x04000000
+ATTR_CMN_FULLPATH = 0x08000000
+ATTR_CMN_ADDEDTIME = 0x10000000
+ATTR_CMN_RETURNED_ATTRS = 0x80000000
+ATTR_CMN_ALL_ATTRS = 0x9fe7ffff
+
+ATTR_VOL_FSTYPE = 0x00000001
+ATTR_VOL_SIGNATURE = 0x00000002
+ATTR_VOL_SIZE = 0x00000004
+ATTR_VOL_SPACEFREE = 0x00000008
+ATTR_VOL_SPACEAVAIL = 0x00000010
+ATTR_VOL_MINALLOCATION = 0x00000020
+ATTR_VOL_ALLOCATIONCLUMP = 0x00000040
+ATTR_VOL_IOBLOCKSIZE = 0x00000080
+ATTR_VOL_OBJCOUNT = 0x00000100
+ATTR_VOL_FILECOUNT = 0x00000200
+ATTR_VOL_DIRCOUNT = 0x00000400
+ATTR_VOL_MAXOBJCOUNT = 0x00000800
+ATTR_VOL_MOUNTPOINT = 0x00001000
+ATTR_VOL_NAME = 0x00002000
+ATTR_VOL_MOUNTFLAGS = 0x00004000
+ATTR_VOL_MOUNTEDDEVICE = 0x00008000
+ATTR_VOL_ENCODINGSUSED = 0x00010000
+ATTR_VOL_CAPABILITIES = 0x00020000
+ATTR_VOL_UUID = 0x00040000
+ATTR_VOL_ATTRIBUTES = 0x40000000
+ATTR_VOL_INFO = 0x80000000
+ATTR_VOL_ALL_ATTRS = 0xc007ffff
+
+ATTR_DIR_LINKCOUNT = 0x00000001
+ATTR_DIR_ENTRYCOUNT = 0x00000002
+ATTR_DIR_MOUNTSTATUS = 0x00000004
+DIR_MNTSTATUS_MNTPOINT = 0x00000001
+DIR_MNTSTATUS_TRIGGER = 0x00000002
+ATTR_DIR_ALL_ATTRS = 0x00000007
+
+ATTR_FILE_LINKCOUNT = 0x00000001
+ATTR_FILE_TOTALSIZE = 0x00000002
+ATTR_FILE_ALLOCSIZE = 0x00000004
+ATTR_FILE_IOBLOCKSIZE = 0x00000008
+ATTR_FILE_DEVTYPE = 0x00000020
+ATTR_FILE_DATALENGTH = 0x00000200
+ATTR_FILE_DATAALLOCSIZE = 0x00000400
+ATTR_FILE_RSRCLENGTH = 0x00001000
+ATTR_FILE_RSRCALLOCSIZE = 0x00002000
+
+ATTR_FILE_ALL_ATTRS = 0x0000362f
+
+ATTR_FORK_TOTALSIZE = 0x00000001
+ATTR_FORK_ALLOCSIZE = 0x00000002
+ATTR_FORK_ALL_ATTRS = 0x00000003
+
+# These can't be used
+ATTR_FILE_FORKCOUNT = 0x00000080
+ATTR_FILE_FORKLIST = 0x00000100
+ATTR_CMN_NAMEDATTRCOUNT = 0x00080000
+ATTR_CMN_NAMEDATTRLIST = 0x00100000
+ATTR_FILE_DATAEXTENTS = 0x00000800
+ATTR_FILE_RSRCEXTENTS = 0x00004000
+ATTR_FILE_CLUMPSIZE = 0x00000010
+ATTR_FILE_FILETYPE = 0x00000040
+
+class attrlist(Structure):
+ _fields_ = [('bitmapcount', c_ushort),
+ ('reserved', c_ushort),
+ ('commonattr', c_uint),
+ ('volattr', c_uint),
+ ('dirattr', c_uint),
+ ('fileattr', c_uint),
+ ('forkattr', c_uint)]
+
+class attribute_set_t(Structure):
+ _fields_ = [('commonattr', c_uint),
+ ('volattr', c_uint),
+ ('dirattr', c_uint),
+ ('fileattr', c_uint),
+ ('forkattr', c_uint)]
+
+class fsobj_id_t(Structure):
+ _fields_ = [('fid_objno', c_uint),
+ ('fid_generation', c_uint)]
+
+class timespec(Structure):
+ _fields_ = [('tv_sec', c_long),
+ ('tv_nsec', c_long)]
+
+class attrreference_t(Structure):
+ _fields_ = [('attr_dataoffset', c_int),
+ ('attr_length', c_uint)]
+
+class fsid_t(Structure):
+ _fields_ = [('val', c_uint * 2)]
+
+class guid_t(Structure):
+ _fields_ = [('g_guid', c_byte*16)]
+
+class kauth_ace(Structure):
+ _fields_ = [('ace_applicable', guid_t),
+ ('ace_flags', c_uint)]
+
+class kauth_acl(Structure):
+ _fields_ = [('acl_entrycount', c_uint),
+ ('acl_flags', c_uint),
+ ('acl_ace', kauth_ace * 128)]
+
+class kauth_filesec(Structure):
+ _fields_ = [('fsec_magic', c_uint),
+ ('fsec_owner', guid_t),
+ ('fsec_group', guid_t),
+ ('fsec_acl', kauth_acl)]
+
+class diskextent(Structure):
+ _fields_ = [('startblock', c_uint),
+ ('blockcount', c_uint)]
+
+OSType = c_uint
+UInt16 = c_ushort
+SInt16 = c_short
+SInt32 = c_int
+
+class Point(Structure):
+ _fields_ = [('x', SInt16),
+ ('y', SInt16)]
+class Rect(Structure):
+ _fields_ = [('x', SInt16),
+ ('y', SInt16),
+ ('w', SInt16),
+ ('h', SInt16)]
+class FileInfo(Structure):
+ _fields_ = [('fileType', OSType),
+ ('fileCreator', OSType),
+ ('finderFlags', UInt16),
+ ('location', Point),
+ ('reservedField', UInt16),
+ ('reserved1', SInt16 * 4),
+ ('extendedFinderFlags', UInt16),
+ ('reserved2', SInt16),
+ ('putAwayFolderID', SInt32)]
+class FolderInfo(Structure):
+ _fields_ = [('windowBounds', Rect),
+ ('finderFlags', UInt16),
+ ('location', Point),
+ ('reservedField', UInt16),
+ ('scrollPosition', Point),
+ ('reserved1', SInt32),
+ ('extendedFinderFlags', UInt16),
+ ('reserved2', SInt16),
+ ('putAwayFolderID', SInt32)]
+class FinderInfo(Union):
+ _fields_ = [('fileInfo', FileInfo),
+ ('folderInfo', FolderInfo)]
+
+extentrecord = diskextent * 8
+
+vol_capabilities_set_t = c_uint * 4
+
+class vol_capabilities_attr_t(Structure):
+ _fields_ = [('capabilities', vol_capabilities_set_t),
+ ('valid', vol_capabilities_set_t)]
+
+class vol_attributes_attr_t(Structure):
+ _fields_ = [('validattr', attribute_set_t),
+ ('nativeattr', attribute_set_t)]
+
+dev_t = c_uint
+
+fsobj_type_t = c_uint
+
+VNON = 0
+VREG = 1
+VDIR = 2
+VBLK = 3
+VCHR = 4
+VLNK = 5
+VSOCK = 6
+VFIFO = 7
+VBAD = 8
+VSTR = 9
+VCPLX = 10
+
+fsobj_tag_t = c_uint
+
+VT_NON = 0
+VT_UFS = 1
+VT_NFS = 2
+VT_MFS = 3
+VT_MSDOSFS = 4
+VT_LFS = 5
+VT_LOFS = 6
+VT_FDESC = 7
+VT_PORTAL = 8
+VT_NULL = 9
+VT_UMAP = 10
+VT_KERNFS = 11
+VT_PROCFS = 12
+VT_AFS = 13
+VT_ISOFS = 14
+VT_UNION = 15
+VT_HFS = 16
+VT_ZFS = 17
+VT_DEVFS = 18
+VT_WEBDAV = 19
+VT_UDF = 20
+VT_AFP = 21
+VT_CDDA = 22
+VT_CIFS = 23
+VT_OTHER = 24
+
+fsfile_type_t = c_uint
+fsvolid_t = c_uint
+text_encoding_t = c_uint
+uid_t = c_uint
+gid_t = c_uint
+int32_t = c_int
+uint32_t = c_uint
+int64_t = c_longlong
+uint64_t = c_ulonglong
+off_t = c_long
+size_t = c_ulong
+uuid_t = c_byte*16
+
+NAME_MAX = 255
+PATH_MAX = 1024
+
+class struct_statfs(Structure):
+ _fields_ = [('f_bsize', uint32_t),
+ ('f_iosize', int32_t),
+ ('f_blocks', uint64_t),
+ ('f_bfree', uint64_t),
+ ('f_bavail', uint64_t),
+ ('f_files', uint64_t),
+ ('f_ffree', uint64_t),
+ ('f_fsid', fsid_t),
+ ('f_owner', uid_t),
+ ('f_type', uint32_t),
+ ('f_flags', uint32_t),
+ ('f_fssubtype', uint32_t),
+ ('f_fstypename', c_char * 16),
+ ('f_mntonname', c_char * PATH_MAX),
+ ('f_mntfromname', c_char * PATH_MAX),
+ ('f_reserved', uint32_t * 8)]
+
+# Calculate the maximum number of bytes required for the attribute buffer
+_attr_info = (
+ # Common attributes
+ (0, ATTR_CMN_RETURNED_ATTRS, sizeof(attribute_set_t)),
+ (0, ATTR_CMN_NAME, sizeof(attrreference_t) + NAME_MAX * 3 + 1),
+ (0, ATTR_CMN_DEVID, sizeof(dev_t)),
+ (0, ATTR_CMN_FSID, sizeof(fsid_t)),
+ (0, ATTR_CMN_OBJTYPE, sizeof(fsobj_type_t)),
+ (0, ATTR_CMN_OBJTAG, sizeof(fsobj_tag_t)),
+ (0, ATTR_CMN_OBJPERMANENTID, sizeof(fsobj_id_t)),
+ (0, ATTR_CMN_PAROBJID, sizeof(fsobj_id_t)),
+ (0, ATTR_CMN_SCRIPT, sizeof(text_encoding_t)),
+ (0, ATTR_CMN_CRTIME, sizeof(timespec)),
+ (0, ATTR_CMN_MODTIME, sizeof(timespec)),
+ (0, ATTR_CMN_CHGTIME, sizeof(timespec)),
+ (0, ATTR_CMN_ACCTIME, sizeof(timespec)),
+ (0, ATTR_CMN_BKUPTIME, sizeof(timespec)),
+ (0, ATTR_CMN_FNDRINFO, sizeof(FinderInfo)),
+ (0, ATTR_CMN_OWNERID, sizeof(uid_t)),
+ (0, ATTR_CMN_GRPID, sizeof(gid_t)),
+ (0, ATTR_CMN_ACCESSMASK, sizeof(uint32_t)),
+ (0, ATTR_CMN_NAMEDATTRCOUNT, None),
+ (0, ATTR_CMN_NAMEDATTRLIST, None),
+ (0, ATTR_CMN_FLAGS, sizeof(uint32_t)),
+ (0, ATTR_CMN_USERACCESS, sizeof(uint32_t)),
+ (0, ATTR_CMN_EXTENDED_SECURITY, sizeof(attrreference_t) + sizeof(kauth_filesec)),
+ (0, ATTR_CMN_UUID, sizeof(guid_t)),
+ (0, ATTR_CMN_GRPUUID, sizeof(guid_t)),
+ (0, ATTR_CMN_FILEID, sizeof(uint64_t)),
+ (0, ATTR_CMN_PARENTID, sizeof(uint64_t)),
+ (0, ATTR_CMN_FULLPATH, sizeof(attrreference_t) + PATH_MAX),
+ (0, ATTR_CMN_ADDEDTIME, sizeof(timespec)),
+
+ # Volume attributes
+ (1, ATTR_VOL_FSTYPE, sizeof(uint32_t)),
+ (1, ATTR_VOL_SIGNATURE, sizeof(uint32_t)),
+ (1, ATTR_VOL_SIZE, sizeof(off_t)),
+ (1, ATTR_VOL_SPACEFREE, sizeof(off_t)),
+ (1, ATTR_VOL_SPACEAVAIL, sizeof(off_t)),
+ (1, ATTR_VOL_MINALLOCATION, sizeof(off_t)),
+ (1, ATTR_VOL_ALLOCATIONCLUMP, sizeof(off_t)),
+ (1, ATTR_VOL_IOBLOCKSIZE, sizeof(uint32_t)),
+ (1, ATTR_VOL_OBJCOUNT, sizeof(uint32_t)),
+ (1, ATTR_VOL_FILECOUNT, sizeof(uint32_t)),
+ (1, ATTR_VOL_DIRCOUNT, sizeof(uint32_t)),
+ (1, ATTR_VOL_MAXOBJCOUNT, sizeof(uint32_t)),
+ (1, ATTR_VOL_MOUNTPOINT, sizeof(attrreference_t) + PATH_MAX),
+ (1, ATTR_VOL_NAME, sizeof(attrreference_t) + NAME_MAX + 1),
+ (1, ATTR_VOL_MOUNTFLAGS, sizeof(uint32_t)),
+ (1, ATTR_VOL_MOUNTEDDEVICE, sizeof(attrreference_t) + PATH_MAX),
+ (1, ATTR_VOL_ENCODINGSUSED, sizeof(c_ulonglong)),
+ (1, ATTR_VOL_CAPABILITIES, sizeof(vol_capabilities_attr_t)),
+ (1, ATTR_VOL_UUID, sizeof(uuid_t)),
+ (1, ATTR_VOL_ATTRIBUTES, sizeof(vol_attributes_attr_t)),
+
+ # Directory attributes
+ (2, ATTR_DIR_LINKCOUNT, sizeof(uint32_t)),
+ (2, ATTR_DIR_ENTRYCOUNT, sizeof(uint32_t)),
+ (2, ATTR_DIR_MOUNTSTATUS, sizeof(uint32_t)),
+
+ # File attributes
+ (3, ATTR_FILE_LINKCOUNT, sizeof(uint32_t)),
+ (3, ATTR_FILE_TOTALSIZE, sizeof(off_t)),
+ (3, ATTR_FILE_ALLOCSIZE, sizeof(off_t)),
+ (3, ATTR_FILE_IOBLOCKSIZE, sizeof(uint32_t)),
+ (3, ATTR_FILE_CLUMPSIZE, sizeof(uint32_t)),
+ (3, ATTR_FILE_DEVTYPE, sizeof(uint32_t)),
+ (3, ATTR_FILE_FILETYPE, sizeof(uint32_t)),
+ (3, ATTR_FILE_FORKCOUNT, sizeof(uint32_t)),
+ (3, ATTR_FILE_FORKLIST, None),
+ (3, ATTR_FILE_DATALENGTH, sizeof(off_t)),
+ (3, ATTR_FILE_DATAALLOCSIZE, sizeof(off_t)),
+ (3, ATTR_FILE_DATAEXTENTS, sizeof(extentrecord)),
+ (3, ATTR_FILE_RSRCLENGTH, sizeof(off_t)),
+ (3, ATTR_FILE_RSRCALLOCSIZE, sizeof(off_t)),
+ (3, ATTR_FILE_RSRCEXTENTS, sizeof(extentrecord)),
+
+ # Fork attributes
+ (4, ATTR_FORK_TOTALSIZE, sizeof(off_t)),
+ (4, ATTR_FORK_ALLOCSIZE, sizeof(off_t))
+ )
+
+def _attrbuf_size(attrs):
+ size = 4
+ for entry in _attr_info:
+ if attrs[entry[0]] & entry[1]:
+ if entry[2] is None:
+ raise ValueError('Unsupported attribute (%u, %x)'
+ % (entry[0], entry[1]))
+ size += entry[2]
+ return size
+
+_getattrlist = libc.getattrlist
+_getattrlist.argtypes = [c_char_p, POINTER(attrlist), c_void_p, c_ulong, c_ulong]
+_getattrlist.restype = c_int
+
+_fgetattrlist = libc.fgetattrlist
+_fgetattrlist.argtypes = [c_int, POINTER(attrlist), c_void_p, c_ulong, c_ulong]
+_fgetattrlist.restype = c_int
+
+_statfs = libc['statfs$INODE64']
+_statfs.argtypes = [c_char_p, POINTER(struct_statfs)]
+_statfs.restype = c_int
+
+_fstatfs = libc['fstatfs$INODE64']
+_fstatfs.argtypes = [c_int, POINTER(struct_statfs)]
+_fstatfs.restype = c_int
+
+def _datetime_from_timespec(ts):
+ td = datetime.timedelta(seconds=ts.tv_sec + 1.0e-9 * ts.tv_nsec)
+ return unix_epoch + td
+
+def _decode_utf8_nul(sz):
+ nul = sz.find('\0')
+ if nul > -1:
+ sz = sz[:nul]
+ return sz.decode('utf-8')
+
+def _decode_attrlist_result(buf, attrs, options):
+ result = []
+
+ assert len(buf) >= 4
+ total_size = uint32_t.from_buffer(buf, 0).value
+ assert total_size <= len(buf)
+
+ offset = 4
+
+ # Common attributes
+ if attrs[0] & ATTR_CMN_RETURNED_ATTRS:
+ a = attribute_set_t.from_buffer(buf, offset)
+ result.append(a)
+ offset += sizeof (attribute_set_t)
+ if not (options & FSOPT_PACK_INVAL_ATTRS):
+ attrs = [a.commonattr, a.volattr, a.dirattr, a.fileattr, a.forkattr]
+ if attrs[0] & ATTR_CMN_NAME:
+ a = attrreference_t.from_buffer(buf, offset)
+ ofs = offset + a.attr_dataoffset
+ name = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
+ offset += sizeof (attrreference_t)
+ result.append(name)
+ if attrs[0] & ATTR_CMN_DEVID:
+ a = dev_t.from_buffer(buf, offset)
+ offset += sizeof(dev_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_FSID:
+ a = fsid_t.from_buffer(buf, offset)
+ offset += sizeof(fsid_t)
+ result.append(a)
+ if attrs[0] & ATTR_CMN_OBJTYPE:
+ a = fsobj_type_t.from_buffer(buf, offset)
+ offset += sizeof(fsobj_type_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_OBJTAG:
+ a = fsobj_tag_t.from_buffer(buf, offset)
+ offset += sizeof(fsobj_tag_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_OBJID:
+ a = fsobj_id_t.from_buffer(buf, offset)
+ offset += sizeof(fsobj_id_t)
+ result.append(a)
+ if attrs[0] & ATTR_CMN_OBJPERMANENTID:
+ a = fsobj_id_t.from_buffer(buf, offset)
+ offset += sizeof(fsobj_id_t)
+ result.append(a)
+ if attrs[0] & ATTR_CMN_PAROBJID:
+ a = fsobj_id_t.from_buffer(buf, offset)
+ offset += sizeof(fsobj_id_t)
+ result.append(a)
+ if attrs[0] & ATTR_CMN_SCRIPT:
+ a = text_encoding_t.from_buffer(buf, offset)
+ offset += sizeof(text_encoding_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_CRTIME:
+ a = timespec.from_buffer(buf, offset)
+ offset += sizeof(timespec)
+ result.append(_datetime_from_timespec(a))
+ if attrs[0] & ATTR_CMN_MODTIME:
+ a = timespec.from_buffer(buf, offset)
+ offset += sizeof(timespec)
+ result.append(_datetime_from_timespec(a))
+ if attrs[0] & ATTR_CMN_CHGTIME:
+ a = timespec.from_buffer(buf, offset)
+ offset += sizeof(timespec)
+ result.append(_datetime_from_timespec(a))
+ if attrs[0] & ATTR_CMN_ACCTIME:
+ a = timespec.from_buffer(buf, offset)
+ offset += sizeof(timespec)
+ result.append(_datetime_from_timespec(a))
+ if attrs[0] & ATTR_CMN_BKUPTIME:
+ a = timespec.from_buffer(buf, offset)
+ offset += sizeof(timespec)
+ result.append(_datetime_from_timespec(a))
+ if attrs[0] & ATTR_CMN_FNDRINFO:
+ a = FinderInfo.from_buffer(buf, offset)
+ offset += sizeof(FinderInfo)
+ result.append(a)
+ if attrs[0] & ATTR_CMN_OWNERID:
+ a = uid_t.from_buffer(buf, offset)
+ offset += sizeof(uid_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_GRPID:
+ a = gid_t.from_buffer(buf, offset)
+ offset += sizeof(gid_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_ACCESSMASK:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_FLAGS:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_USERACCESS:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_EXTENDED_SECURITY:
+ a = attrreference_t.from_buffer(buf, offset)
+ ofs = offset + a.attr_dataoffset
+ offset += sizeof(attrreference_t)
+ ec = uint32_t.from_buffer(buf, ofs + 36).value
+ class kauth_acl(Structure):
+ _fields_ = [('acl_entrycount', c_uint),
+ ('acl_flags', c_uint),
+ ('acl_ace', kauth_ace * ec)]
+ class kauth_filesec(Structure):
+ _fields_ = [('fsec_magic', c_uint),
+ ('fsec_owner', guid_t),
+ ('fsec_group', guid_t),
+ ('fsec_acl', kauth_acl)]
+ a = kauth_filesec.from_buffer(buf, ofs)
+ result.append(a)
+ if attrs[0] & ATTR_CMN_UUID:
+ result.append(uuid.UUID(bytes=buf[offset:offset+16]))
+ offset += sizeof(guid_t)
+ if attrs[0] & ATTR_CMN_GRPUUID:
+ result.append(uuid.UUID(bytes=buf[offset:offset+16]))
+ offset += sizeof(guid_t)
+ if attrs[0] & ATTR_CMN_FILEID:
+ a = uint64_t.from_buffer(buf, offset)
+ offset += sizeof(uint64_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_PARENTID:
+ a = uint64_t.from_buffer(buf, offset)
+ offset += sizeof(uint64_t)
+ result.append(a.value)
+ if attrs[0] & ATTR_CMN_FULLPATH:
+ a = attrreference_t.from_buffer(buf, offset)
+ ofs = offset + a.attr_dataoffset
+ path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
+ offset += sizeof (attrreference_t)
+ result.append(path)
+ if attrs[0] & ATTR_CMN_ADDEDTIME:
+ a = timespec.from_buffer(buf, offset)
+ offset += sizeof(timespec)
+ result.append(_datetime_from_timespec(a))
+
+ # Volume attributes
+ if attrs[1] & ATTR_VOL_FSTYPE:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_SIGNATURE:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_SIZE:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_SPACEFREE:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_SPACEAVAIL:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_MINALLOCATION:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_ALLOCATIONCLUMP:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_IOBLOCKSIZE:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_OBJCOUNT:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_FILECOUNT:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_DIRCOUNT:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_MAXOBJCOUNT:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_MOUNTPOINT:
+ a = attrreference_t.from_buffer(buf, offset)
+ ofs = offset + a.attr_dataoffset
+ path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
+ offset += sizeof (attrreference_t)
+ result.append(path)
+ if attrs[1] & ATTR_VOL_NAME:
+ a = attrreference_t.from_buffer(buf, offset)
+ ofs = offset + a.attr_dataoffset
+ name = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
+ offset += sizeof (attrreference_t)
+ result.append(name)
+ if attrs[1] & ATTR_VOL_MOUNTFLAGS:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_MOUNTEDDEVICE:
+ a = attrreference_t.from_buffer(buf, offset)
+ ofs = offset + a.attr_dataoffset
+ path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
+ offset += sizeof (attrreference_t)
+ result.append(path)
+ if attrs[1] & ATTR_VOL_ENCODINGSUSED:
+ a = c_ulonglong.from_buffer(buf, offset)
+ offset += sizeof(c_ulonglong)
+ result.append(a.value)
+ if attrs[1] & ATTR_VOL_CAPABILITIES:
+ a = vol_capabilities_attr_t.from_buffer(buf, offset)
+ offset += sizeof(vol_capabilities_attr_t)
+ result.append(a)
+ if attrs[1] & ATTR_VOL_UUID:
+ result.append(uuid.UUID(bytes=buf[offset:offset+16]))
+ offset += sizeof(uuid_t)
+ if attrs[1] & ATTR_VOL_ATTRIBUTES:
+ a = vol_attributes_attr_t.from_buffer(buf, offset)
+ offset += sizeof(vol_attributes_attr_t)
+ result.append(a)
+
+ # Directory attributes
+ if attrs[2] & ATTR_DIR_LINKCOUNT:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[2] & ATTR_DIR_ENTRYCOUNT:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[2] & ATTR_DIR_MOUNTSTATUS:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+
+ # File attributes
+ if attrs[3] & ATTR_FILE_LINKCOUNT:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_TOTALSIZE:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_ALLOCSIZE:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_IOBLOCKSIZE:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_CLUMPSIZE:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_DEVTYPE:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_FILETYPE:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_FORKCOUNT:
+ a = uint32_t.from_buffer(buf, offset)
+ offset += sizeof(uint32_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_DATALENGTH:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_DATAALLOCSIZE:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_DATAEXTENTS:
+ a = extentrecord.from_buffer(buf, offset)
+ offset += sizeof(extentrecord)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_RSRCLENGTH:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_RSRCALLOCSIZE:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[3] & ATTR_FILE_RSRCEXTENTS:
+ a = extentrecord.from_buffer(buf, offset)
+ offset += sizeof(extentrecord)
+ result.append(a.value)
+
+ # Fork attributes
+ if attrs[4] & ATTR_FORK_TOTALSIZE:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+ if attrs[4] & ATTR_FORK_ALLOCSIZE:
+ a = off_t.from_buffer(buf, offset)
+ offset += sizeof(off_t)
+ result.append(a.value)
+
+ return result
+
+# Sadly, ctypes.get_errno() seems not to work
+__error = libc.__error
+__error.restype = POINTER(c_int)
+
+def _get_errno():
+ return __error().contents.value
+
+def getattrlist(path, attrs, options):
+ attrs = list(attrs)
+ if attrs[1]:
+ attrs[1] |= ATTR_VOL_INFO
+ alist = attrlist(bitmapcount=5,
+ commonattr=attrs[0],
+ volattr=attrs[1],
+ dirattr=attrs[2],
+ fileattr=attrs[3],
+ forkattr=attrs[4])
+
+ bufsize = _attrbuf_size(attrs)
+ buf = create_string_buffer(bufsize)
+
+ ret = _getattrlist(path, byref(alist), buf, bufsize,
+ options | FSOPT_REPORT_FULLSIZE)
+
+ if ret < 0:
+ err = _get_errno()
+ raise OSError(err, os.strerror(err), path)
+
+ return _decode_attrlist_result(buf, attrs, options)
+
+def fgetattrlist(fd, attrs, options):
+ if hasattr(fd, 'fileno'):
+ fd = fd.fileno()
+ attrs = list(attrs)
+ if attrs[1]:
+ attrs[1] |= ATTR_VOL_INFO
+ alist = attrlist(bitmapcount=5,
+ commonattr=attrs[0],
+ volattr=attrs[1],
+ dirattr=attrs[2],
+ fileattr=attrs[3],
+ forkattr=attrs[4])
+
+ bufsize = _attrbuf_size(attrs)
+ buf = create_string_buffer(bufsize)
+
+ ret = _fgetattrlist(fd, byref(alist), buf, bufsize,
+ options | FSOPT_REPORT_FULLSIZE)
+
+ if ret < 0:
+ err = _get_errno()
+ raise OSError(err, os.strerror(err))
+
+ return _decode_attrlist_result(buf, attrs, options)
+
+def statfs(path):
+ result = struct_statfs()
+ ret = _statfs(path, byref(result))
+ if ret < 0:
+ err = _get_errno()
+ raise OSError(err, os.strerror(err), path)
+ return result
+
+def fstatfs(fd):
+ if hasattr(fd, 'fileno'):
+ fd = fd.fileno()
+ result = struct_statfs()
+ ret = _fstatfs(fd, byref(result))
+ if ret < 0:
+ err = _get_errno()
+ raise OSError(err, os.strerror(err))
+ return result
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/qt_attribution.json b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/qt_attribution.json
new file mode 100644
index 000000000..562440cd6
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/qt_attribution.json
@@ -0,0 +1,13 @@
+{
+ "Id": "mac_alias",
+ "Name": "mac_alias",
+ "QDocModule": "qbs",
+ "QtUsage": "Used in the qbs dmg module for building Apple disk images.",
+ "Description": "Generate/parse Mac OS Alias records from Python",
+ "Homepage": "https://bitbucket.org/al45tair/mac_alias",
+ "Version": "2.0.1",
+ "License": "MIT License",
+ "LicenseId": "MIT",
+ "LicenseFile": "LICENSE",
+ "Copyright": "Copyright (c) 2014 Alastair Houghton"
+}
diff --git a/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/utils.py b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/utils.py
new file mode 100644
index 000000000..6a7d0a121
--- /dev/null
+++ b/src/3rdparty/python/lib/python2.7/site-packages/mac_alias/utils.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+import datetime
+
+ZERO = datetime.timedelta(0)
+class UTC (datetime.tzinfo):
+ def utcoffset(self, dt):
+ return ZERO
+ def dst(self, dt):
+ return ZERO
+ def tzname(self, dt):
+ return 'UTC'
+
+utc = UTC()
+mac_epoch = datetime.datetime(1904,1,1,0,0,0,0,utc)
+unix_epoch = datetime.datetime(1970,1,1,0,0,0,0,utc)
+osx_epoch = datetime.datetime(2001,1,1,0,0,0,0,utc)
diff --git a/src/3rdparty/python/update.sh b/src/3rdparty/python/update.sh
new file mode 100755
index 000000000..2eb3138c9
--- /dev/null
+++ b/src/3rdparty/python/update.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+pip install -U --isolated --prefix=$PWD --no-binary :all: --no-compile --no-deps biplist dmgbuild ds_store mac_alias
+rm lib/python2.7/site-packages/dmgbuild/resources/*.tiff