aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--checklibs.py153
-rw-r--r--ez_setup.py49
-rw-r--r--missing_bindings.py67
-rw-r--r--popenasync.py70
-rw-r--r--prepare_coin_sources.py43
-rw-r--r--qtinfo.py15
-rw-r--r--utils.py322
7 files changed, 421 insertions, 298 deletions
diff --git a/checklibs.py b/checklibs.py
index 172508706..ecedf836d 100644
--- a/checklibs.py
+++ b/checklibs.py
@@ -64,7 +64,7 @@ class MachOFile:
self.header_info = {}
self.load_info()
self.add_to_cache()
-
+
def load_info(self):
if not self.image_path.exists():
return
@@ -72,18 +72,24 @@ class MachOFile:
self.load_rpaths()
def load_header(self):
- # Get the mach-o header info, we're interested in the file type (executable, dylib)
+ # Get the mach-o header info, we're interested in the file type
+ # (executable, dylib)
cmd = 'otool -arch {0} -h "{1}"'
- output = self.shell(cmd, [self.arch, self.image_path.resolved_path], fatal = True)
+ output = self.shell(cmd, [self.arch, self.image_path.resolved_path],
+ fatal = True)
if not output:
- print >> sys.stderr, 'Unable to load mach header for {0} ({1}), architecture mismatch? Use --arch option to pick architecture'.format(self.image_path.resolved_path, self.arch)
+ print("Unable to load mach header for {} ({}), architecture "
+ "mismatch? Use --arch option to pick architecture".format(
+ self.image_path.resolved_path, self.arch), file=sys.stderr)
exit()
(keys, values) = output.splitlines()[2:]
self.header_info = dict(zip(keys.split(), values.split()))
def load_rpaths(self):
- output = self.shell('otool -arch {0} -l "{1}"', [self.arch, self.image_path.resolved_path], fatal = True)
- load_commands = re.split('Load command (\d+)', output)[1:] # skip file name on first line
+ output = self.shell('otool -arch {0} -l "{1}"',
+ [self.arch, self.image_path.resolved_path], fatal = True)
+ # skip file name on first line
+ load_commands = re.split('Load command (\d+)', output)[1:]
self._rpaths = []
load_commands = collections.deque(load_commands)
while load_commands:
@@ -91,7 +97,7 @@ class MachOFile:
command = load_commands.popleft().strip().splitlines()
if command[0].find('LC_RPATH') == -1:
continue
-
+
path = re.findall('path (.+) \(offset \d+\)$', command[2])[0]
image_path = self.image_path_for_recorded_path(path)
image_path.rpath_source = self
@@ -103,26 +109,26 @@ class MachOFile:
while parent:
ancestors.append(parent)
parent = parent.parent
-
+
return ancestors
def self_and_ancestors(self):
return [self] + self.ancestors()
-
+
def rpaths(self):
return self._rpaths
-
+
def all_rpaths(self):
rpaths = []
for image in self.self_and_ancestors():
rpaths.extend(image.rpaths())
return rpaths
-
+
def root(self):
if not self.parent:
return self
return self.ancestors()[-1]
-
+
def executable_path(self):
root = self.root()
if root.is_executable():
@@ -131,26 +137,26 @@ class MachOFile:
def filetype(self):
return long(self.header_info.get('filetype', 0))
-
+
def is_dylib(self):
return self.filetype() == MachOFile.MH_DYLIB
def is_executable(self):
return self.filetype() == MachOFile.MH_EXECUTE
-
+
def all_dependencies(self):
self.walk_dependencies()
return self.cache()['order']
-
+
def walk_dependencies(self, known = {}):
if known.get(self.image_path.resolved_path):
return
-
+
known[self.image_path.resolved_path] = self
-
+
for item in self.dependencies():
item.walk_dependencies(known)
-
+
def dependencies(self):
if not self.image_path.exists():
return []
@@ -158,11 +164,13 @@ class MachOFile:
if self._dependencies:
return self._dependencies
- output = self.shell('otool -arch {0} -L "{1}"', [self.arch, self.image_path.resolved_path], fatal = True)
+ output = self.shell('otool -arch {0} -L "{1}"',
+ [self.arch, self.image_path.resolved_path], fatal = True)
output = [line.strip() for line in output.splitlines()]
del(output[0])
if self.is_dylib():
- del(output[0]) # In the case of dylibs, the first line is the id line
+ # In the case of dylibs, the first line is the id line
+ del(output[0])
self._dependencies = []
for line in output:
@@ -173,25 +181,26 @@ class MachOFile:
image_path = self.image_path_for_recorded_path(recorded_path)
image = self.lookup_or_make_item(image_path)
self._dependencies.append(image)
-
+
return self._dependencies
- # The root item holds the cache, all lower-level requests bubble up the parent chain
+ # The root item holds the cache, all lower-level requests bubble up
+ # the parent chain
def cache(self):
if self.parent:
return self.parent.cache()
return self._cache
-
+
def add_to_cache(self):
cache = self.cache()
cache['paths'][self.image_path.resolved_path] = self
cache['order'].append(self)
-
+
def cached_item_for_path(self, path):
if not path:
return None
return self.cache()['paths'].get(path)
-
+
def lookup_or_make_item(self, image_path):
image = self.cached_item_for_path(image_path.resolved_path)
if not image: # cache miss
@@ -201,20 +210,26 @@ class MachOFile:
def image_path_for_recorded_path(self, recorded_path):
path = ImagePath(None, recorded_path)
- # handle @executable_path
+ # handle @executable_path
if recorded_path.startswith(ImagePath.EXECUTABLE_PATH_TOKEN):
executable_image_path = self.executable_path()
if executable_image_path:
- path.resolved_path = os.path.normpath(recorded_path.replace(ImagePath.EXECUTABLE_PATH_TOKEN, os.path.dirname(executable_image_path.resolved_path)))
+ path.resolved_path = os.path.normpath(
+ recorded_path.replace(
+ ImagePath.EXECUTABLE_PATH_TOKEN,
+ os.path.dirname(executable_image_path.resolved_path)))
# handle @loader_path
elif recorded_path.startswith(ImagePath.LOADER_PATH_TOKEN):
- path.resolved_path = os.path.normpath(recorded_path.replace(ImagePath.LOADER_PATH_TOKEN, os.path.dirname(self.image_path.resolved_path)))
+ path.resolved_path = os.path.normpath(recorded_path.replace(
+ ImagePath.LOADER_PATH_TOKEN,
+ os.path.dirname(self.image_path.resolved_path)))
# handle @rpath
elif recorded_path.startswith(ImagePath.RPATH_TOKEN):
for rpath in self.all_rpaths():
- resolved_path = os.path.normpath(recorded_path.replace(ImagePath.RPATH_TOKEN, rpath.resolved_path))
+ resolved_path = os.path.normpath(recorded_path.replace(
+ ImagePath.RPATH_TOKEN, rpath.resolved_path))
if os.path.exists(resolved_path):
path.resolved_path = resolved_path
path.rpath_source = rpath.rpath_source
@@ -228,19 +243,20 @@ class MachOFile:
def __repr__(self):
return str(self.image_path)
-
+
def dump(self):
- print self.image_path
+ print(self.image_path)
for dependency in self.dependencies():
- print '\t{0}'.format(dependency)
-
+ print('\t{0}'.format(dependency))
+
@staticmethod
def shell(cmd_format, args, fatal = False):
cmd = cmd_format.format(*args)
popen = subprocess.Popen(cmd, shell = True, stdout = subprocess.PIPE)
output = popen.communicate()[0]
if popen.returncode and fatal:
- print >> sys.stderr, 'Nonzero exit status for shell command "{0}"'.format(cmd)
+ print("Nonzero exit status for shell command '{}'".format(cmd),
+ file=sys.stderr)
sys.exit(1)
return output
@@ -250,13 +266,14 @@ class MachOFile:
output = cls.shell('file "{}"', [path])
file_architectures = re.findall(r' executable (\w+)', output)
ordering = 'x86_64 i386'.split()
- file_architectures = sorted(file_architectures, lambda a, b: cmp(ordering.index(a), ordering.index(b)))
+ file_architectures = sorted(file_architectures, lambda a, b: cmp(
+ ordering.index(a), ordering.index(b)))
return file_architectures
MH_EXECUTE = 0x2
MH_DYLIB = 0x6
MH_BUNDLE = 0x8
-
+
# ANSI terminal coloring sequences
class Color:
@@ -265,25 +282,27 @@ class Color:
GREEN = '\033[92m'
RED = '\033[91m'
ENDC = '\033[0m'
-
+
@staticmethod
def red(string):
return Color.wrap(string, Color.RED)
-
+
@staticmethod
def blue(string):
return Color.wrap(string, Color.BLUE)
-
+
@staticmethod
def wrap(string, color):
return Color.HEADER + color + string + Color.ENDC
-# This class holds path information for a mach-0 image file. It holds the path as it was recorded
-# in the loading binary as well as the effective, resolved file system path.
+# This class holds path information for a mach-0 image file.
+# It holds the path as it was recorded in the loading binary as well as
+# the effective, resolved file system path.
# The former can contain @-replacement tokens.
-# In the case where the recorded path contains an @rpath token that was resolved successfully, we also
-# capture the path of the binary that supplied the rpath value that was used.
+# In the case where the recorded path contains an @rpath token that was
+# resolved successfully, we also capture the path of the binary that
+# supplied the rpath value that was used.
# That path itself can contain replacement tokens such as @loader_path.
class ImagePath:
@@ -291,35 +310,38 @@ class ImagePath:
self.recorded_path = recorded_path
self.resolved_path = resolved_path
self.rpath_source = None
-
+
def __repr__(self):
description = None
-
+
if self.resolved_equals_recorded() or self.recorded_path == None:
description = self.resolved_path
else:
- description = '{0} ({1})'.format(self.resolved_path, self.recorded_path)
-
+ description = '{0} ({1})'.format(self.resolved_path,
+ self.recorded_path)
+
if (not self.is_system_location()) and (not self.uses_dyld_token()):
description = Color.blue(description)
-
+
if self.rpath_source:
- description += ' (rpath source: {0})'.format(self.rpath_source.image_path.resolved_path)
-
+ description += ' (rpath source: {0})'.format(
+ self.rpath_source.image_path.resolved_path)
+
if not self.exists():
description += Color.red(' (missing)')
-
+
return description
-
+
def exists(self):
return self.resolved_path and os.path.exists(self.resolved_path)
-
+
def resolved_equals_recorded(self):
- return self.resolved_path and self.recorded_path and self.resolved_path == self.recorded_path
-
+ return (self.resolved_path and self.recorded_path and
+ self.resolved_path == self.recorded_path)
+
def uses_dyld_token(self):
return self.recorded_path and self.recorded_path.startswith('@')
-
+
def is_system_location(self):
system_prefixes = ['/System/Library', '/usr/lib']
for prefix in system_prefixes:
@@ -332,9 +354,13 @@ class ImagePath:
# Command line driver
-parser = optparse.OptionParser(usage = "Usage: %prog [options] path_to_mach_o_file")
-parser.add_option("--arch", dest = "arch", help = "architecture", metavar = "ARCH")
-parser.add_option("--all", dest = "include_system_libraries", help = "Include system frameworks and libraries", action="store_true")
+parser = optparse.OptionParser(
+ usage = "Usage: %prog [options] path_to_mach_o_file")
+parser.add_option(
+ "--arch", dest = "arch", help = "architecture", metavar = "ARCH")
+parser.add_option(
+ "--all", dest = "include_system_libraries",
+ help = "Include system frameworks and libraries", action="store_true")
(options, args) = parser.parse_args()
if len(args) < 1:
@@ -343,15 +369,18 @@ if len(args) < 1:
archs = MachOFile.architectures_for_image_at_path(args[0])
if archs and not options.arch:
- print >> sys.stderr, 'Analyzing architecture {}, override with --arch if needed'.format(archs[0])
+ print('Analyzing architecture {}, override with --arch if needed'.format(
+ archs[0]), file=sys.stderr)
options.arch = archs[0]
toplevel_image = MachOFile(ImagePath(args[0]), options.arch)
for dependency in toplevel_image.all_dependencies():
- if dependency.image_path.exists() and (not options.include_system_libraries) and dependency.image_path.is_system_location():
+ if (dependency.image_path.exists() and
+ (not options.include_system_libraries) and
+ dependency.image_path.is_system_location()):
continue
dependency.dump()
- print
+ print("\n")
diff --git a/ez_setup.py b/ez_setup.py
index eee5013c4..48d86d2a4 100644
--- a/ez_setup.py
+++ b/ez_setup.py
@@ -38,7 +38,8 @@
##
#############################################################################
-"""Bootstrap setuptools installation
+"""
+Bootstrap setuptools installation
To use setuptools in your package's setup.py, include this
file in the same directory and add this to the top of your setup.py::
@@ -100,7 +101,7 @@ def _install(archive_filename, install_args=()):
def _build_egg(egg, archive_filename, to_dir):
with archive_context(archive_filename):
# building an egg
- log.warn('Building a Setuptools egg in %s', to_dir)
+ log.warn('Building a Setuptools egg in {}'.format(to_dir))
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
# returning the result
log.warn(egg)
@@ -132,7 +133,7 @@ class ContextualZipFile(zipfile.ZipFile):
def archive_context(filename):
# extracting the archive
tmpdir = tempfile.mkdtemp()
- log.warn('Extracting in %s', tmpdir)
+ log.warn('Extracting in {}'.format(tmpdir))
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
@@ -142,7 +143,7 @@ def archive_context(filename):
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
- log.warn('Now working in %s', subdir)
+ log.warn('Now working in {}'.format(subdir))
yield
finally:
@@ -185,9 +186,9 @@ def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
except pkg_resources.VersionConflict as VC_err:
if imported:
msg = textwrap.dedent("""
- The required version of setuptools (>={version}) is not available,
- and can't be installed while this script is running. Please
- install a more recent version first, using
+ The required version of setuptools (>={version}) is not
+ available, and can't be installed while this script is running.
+ Please install a more recent version first, using
'easy_install -U setuptools'.
(Currently using {VC_err.args[0]!r})
@@ -201,8 +202,8 @@ def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
def _clean_check(cmd, target):
"""
- Run the command to download target. If the command fails, clean up before
- re-raising the error.
+ Run the command to download target.
+ If the command fails, clean up before re-raising the error.
"""
try:
subprocess.check_call(cmd)
@@ -213,15 +214,16 @@ def _clean_check(cmd, target):
def download_file_powershell(url, target):
"""
- Download the file at url to target using Powershell (which will validate
- trust). Raise an exception if the command cannot complete.
+ Download the file at url to target using Powershell
+ (which will validate trust).
+ Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
ps_cmd = (
"[System.Net.WebRequest]::DefaultWebProxy.Credentials = "
"[System.Net.CredentialCache]::DefaultCredentials; "
- "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)"
- % vars()
+ "(new-object System.Net.WebClient).DownloadFile({}, {})".format(
+ url, target))
)
cmd = [
'powershell',
@@ -275,8 +277,8 @@ download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""
- Use Python to download the file, even though it cannot authenticate the
- connection.
+ Use Python to download the file, even though it cannot authenticate
+ the connection.
"""
src = urlopen(url)
try:
@@ -304,11 +306,13 @@ def get_best_downloader():
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
"""
- Download setuptools from a specified location and return its filename
+ Download setuptools from a specified location and return its
+ filename
- `version` should be a valid setuptools version number that is available
- as an sdist for download under the `download_base` URL (which should end
- with a '/'). `to_dir` is the directory where the egg will be downloaded.
+ `version` should be a valid setuptools version number that is
+ available as an sdist for download under the `download_base` URL
+ (which should end with a '/').
+ `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
@@ -317,18 +321,19 @@ def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
- zip_name = "setuptools-%s.zip" % version
+ zip_name = "setuptools-{}.zip".format(version)
url = download_base + zip_name
saveto = os.path.join(to_dir, zip_name)
if not os.path.exists(saveto): # Avoid repeated downloads
- log.warn("Downloading %s", url)
+ log.warn("Downloading {}".format(url))
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _build_install_args(options):
"""
- Build the arguments to 'python setup.py install' on the setuptools package
+ Build the arguments to 'python setup.py install' on the
+ setuptools package
"""
return ['--user'] if options.user_install else []
diff --git a/missing_bindings.py b/missing_bindings.py
index 894fdbd58..ddc33a04d 100644
--- a/missing_bindings.py
+++ b/missing_bindings.py
@@ -37,19 +37,21 @@
##
#############################################################################
-# This script is used to generate a summary of missing types / classes which are present in C++ Qt5,
-# but are missing in PySide2.
+# This script is used to generate a summary of missing types / classes
+# which are present in C++ Qt5, but are missing in PySide2.
#
# Required packages: bs4
# Installed via: pip install bs4
#
-# The script uses beautiful soup 4 to parse out the class names from the online Qt
-# documentation. It then tries to import the types from PySide2.
+# The script uses beautiful soup 4 to parse out the class names from
+# the online Qt documentation. It then tries to import the types from
+# PySide2.
#
# Example invocation of script:
# python missing_bindings.py --qt-version 5.9 -w all
# --qt-version - specify which version of qt documentation to load.
-# -w - if PyQt5 is an installed package, check if the tested class also exists there.
+# -w - if PyQt5 is an installed package, check if the tested
+# class also exists there.
from __future__ import print_function
@@ -237,8 +239,8 @@ parser.add_argument("--which-missing",
choices=['all', 'in-pyqt', 'not-in-pyqt'],
type=str,
dest='which_missing',
- help="Which missing types to show (all, or just those that are not present in "
- "PyQt)")
+ help="Which missing types to show (all, or just those "
+ "that are not present in PyQt)")
args = parser.parse_args()
@@ -292,18 +294,18 @@ def log(*pargs, **kw):
log('PySide2 bindings for Qt {}'.format(args.version), style='heading1')
-log("""
-Using Qt version {} documentation to find public API Qt types and test if the types are present \
-in the PySide2 package.""".format(args.version))
+log("""Using Qt version {} documentation to find public API Qt types and test "
+ "if the types are present in the PySide2 package.""".format(args.version))
-log("""
-Results are usually stored at https://wiki.qt.io/PySide2_Missing_Bindings so consider \
-taking the contents of the generated missing_bindings_for_wiki_qt_io.txt file and updating \
-the linked wiki page.""", style='end')
+log("""Results are usually stored at
+https://wiki.qt.io/PySide2_Missing_Bindings
+so consider taking the contents of the generated
+missing_bindings_for_wiki_qt_io.txt file and updating the linked wiki page.""",
+style='end')
-log("""
-Similar report: https://gist.github.com/ethanhs/6c626ca4e291f3682589699296377d3a \
-""", style='text_with_link')
+log("""Similar report:
+https://gist.github.com/ethanhs/6c626ca4e291f3682589699296377d3a""",
+style='text_with_link')
python_executable = os.path.basename(sys.executable or '')
command_line_arguments = ' '.join(sys.argv)
@@ -324,20 +326,22 @@ for module_name in modules_to_test.keys():
# Import the tested module
try:
- pyside_tested_module = getattr(__import__(pyside_package_name, fromlist=[module_name]),
- module_name)
+ pyside_tested_module = getattr(__import__(pyside_package_name,
+ fromlist=[module_name]), module_name)
except Exception as e:
- log('\nCould not load {}.{}. Received error: {}. Skipping.\n'
- .format(pyside_package_name, module_name, str(e).replace("'", '')), style='error')
+ log('\nCould not load {}.{}. Received error: {}. Skipping.\n'.format(
+ pyside_package_name, module_name, str(e).replace("'", '')),
+ style='error')
total_missing_modules_count += 1
continue
try:
- pyqt_tested_module = getattr(__import__(pyqt_package_name, fromlist=[module_name]),
- module_name)
+ pyqt_tested_module = getattr(__import__(pyqt_package_name,
+ fromlist=[module_name]), module_name)
except Exception as e:
- log('\nCould not load {}.{} for comparison. Received error: {}.\n'
- .format(pyqt_package_name, module_name, str(e).replace("'", '')), style='error')
+ log("\nCould not load {}.{} for comparison. "
+ "Received error: {}.\n".format(pyqt_package_name, module_name,
+ str(e).replace("'", '')), style='error')
# Get C++ class list from documentation page.
page = urllib2.urlopen(url)
@@ -353,8 +357,8 @@ for module_name in modules_to_test.keys():
if link_text not in types_to_ignore:
types_on_html_page.append(link_text)
- log('Number of types in {}: {}'.format(module_name, len(types_on_html_page)),
- style='bold_colon')
+ log('Number of types in {}: {}'.format(module_name,
+ len(types_on_html_page)), style='bold_colon')
missing_types_count = 0
missing_types_compared_to_pyqt = 0
@@ -383,7 +387,8 @@ for module_name in modules_to_test.keys():
missing_types.append(missing_type)
elif args.which_missing == 'in-pyqt' and is_present_in_pyqt:
missing_types.append(missing_type)
- elif args.which_missing == 'not-in-pyqt' and not is_present_in_pyqt:
+ elif (args.which_missing == 'not-in-pyqt' and
+ not is_present_in_pyqt):
missing_types.append(missing_type)
if len(missing_types) > 0:
@@ -393,7 +398,8 @@ for module_name in modules_to_test.keys():
log(missing_type, style='code')
log('')
- log('Number of missing types: {}'.format(missing_types_count), style='bold_colon')
+ log('Number of missing types: {}'.format(missing_types_count),
+ style='bold_colon')
if len(missing_types) > 0:
log('Number of missing types that are present in PyQt5: {}'
.format(missing_types_compared_to_pyqt), style='bold_colon')
@@ -402,7 +408,8 @@ for module_name in modules_to_test.keys():
log('', style='end')
log('Summary', style='heading5')
-log('Total number of missing types: {}'.format(total_missing_types_count), style='bold_colon')
+log('Total number of missing types: {}'.format(total_missing_types_count),
+ style='bold_colon')
log('Total number of missing types that are present in PyQt5: {}'
.format(total_missing_types_count_compared_to_pyqt), style='bold_colon')
log('Total number of missing modules: {}'
diff --git a/popenasync.py b/popenasync.py
index eedc2fd8b..2a5af3dd8 100644
--- a/popenasync.py
+++ b/popenasync.py
@@ -76,10 +76,10 @@ if mswindows:
# Strings only; do nothing
def encode(s):
return s
-
+
def decode(b):
return b
-
+
try:
import ctypes
from ctypes.wintypes import DWORD
@@ -87,12 +87,15 @@ if mswindows:
TerminateProcess = ctypes.windll.kernel32.TerminateProcess
def WriteFile(handle, data, ol = None):
c_written = DWORD()
- success = ctypes.windll.kernel32.WriteFile(handle, ctypes.create_string_buffer(encode(data)), len(data), ctypes.byref(c_written), ol)
+ success = ctypes.windll.kernel32.WriteFile(handle,
+ ctypes.create_string_buffer(encode(data)), len(data),
+ ctypes.byref(c_written), ol)
return ctypes.windll.kernel32.GetLastError(), c_written.value
def ReadFile(handle, desired_bytes, ol = None):
c_read = DWORD()
buffer = ctypes.create_string_buffer(desired_bytes+1)
- success = ctypes.windll.kernel32.ReadFile(handle, buffer, desired_bytes, ctypes.byref(c_read), ol)
+ success = ctypes.windll.kernel32.ReadFile(handle, buffer,
+ desired_bytes, ctypes.byref(c_read), ol)
buffer[c_read.value] = null_byte
return ctypes.windll.kernel32.GetLastError(), decode(buffer.value)
def PeekNamedPipe(handle, desired_bytes):
@@ -101,19 +104,23 @@ if mswindows:
if desired_bytes > 0:
c_read = DWORD()
buffer = ctypes.create_string_buffer(desired_bytes+1)
- success = ctypes.windll.kernel32.PeekNamedPipe(handle, buffer, desired_bytes, ctypes.byref(c_read), ctypes.byref(c_avail), ctypes.byref(c_message))
+ success = ctypes.windll.kernel32.PeekNamedPipe(handle, buffer,
+ desired_bytes, ctypes.byref(c_read), ctypes.byref(c_avail),
+ ctypes.byref(c_message))
buffer[c_read.value] = null_byte
return decode(buffer.value), c_avail.value, c_message.value
else:
- success = ctypes.windll.kernel32.PeekNamedPipe(handle, None, desired_bytes, None, ctypes.byref(c_avail), ctypes.byref(c_message))
+ success = ctypes.windll.kernel32.PeekNamedPipe(handle, None,
+ desired_bytes, None, ctypes.byref(c_avail),
+ ctypes.byref(c_message))
return "", c_avail.value, c_message.value
-
+
except ImportError:
from win32file import ReadFile, WriteFile
from win32pipe import PeekNamedPipe
from win32api import TerminateProcess
import msvcrt
-
+
else:
from signal import SIGINT, SIGTERM, SIGKILL
import select
@@ -128,16 +135,16 @@ PIPE = subprocess.PIPE
class Popen(subprocess.Popen):
def __init__(self, *args, **kwargs):
subprocess.Popen.__init__(self, *args, **kwargs)
-
+
def recv(self, maxsize=None):
return self._recv('stdout', maxsize)
-
+
def recv_err(self, maxsize=None):
return self._recv('stderr', maxsize)
def send_recv(self, input='', maxsize=None):
return self.send(input), self.recv(maxsize), self.recv_err(maxsize)
-
+
def read_async(self, wait=.1, e=1, tr=5, stderr=0):
if tr < 1:
tr = 1
@@ -159,21 +166,21 @@ class Popen(subprocess.Popen):
else:
time.sleep(max((x-time.time())/tr, 0))
return ''.join(y)
-
+
def send_all(self, data):
while len(data):
sent = self.send(data)
if sent is None:
raise Exception("Other end disconnected!")
data = buffer(data, sent)
-
+
def get_conn_maxsize(self, which, maxsize):
if maxsize is None:
maxsize = 1024
elif maxsize < 1:
maxsize = 1
return getattr(self, which), maxsize
-
+
def _close(self, which):
conn = getattr(self, which)
flags = fcntl.fcntl(conn, fcntl.F_GETFL)
@@ -182,13 +189,13 @@ class Popen(subprocess.Popen):
assert conn.read() == ''
getattr(self, which).close()
setattr(self, which, None)
-
+
if mswindows:
def kill(self):
# Recipes
#http://me.in-berlin.de/doc/python/faq/windows.html#how-do-i-emulate-os-kill-in-windows
#http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/347462
-
+
"""kill function for Win32"""
TerminateProcess(int(self._handle), 0) # returns None
@@ -212,7 +219,7 @@ class Popen(subprocess.Popen):
conn, maxsize = self.get_conn_maxsize(which, maxsize)
if conn is None:
return None
-
+
try:
x = msvcrt.get_osfhandle(conn.fileno())
(read, nAvail, nMessage) = PeekNamedPipe(x, 0)
@@ -226,7 +233,7 @@ class Popen(subprocess.Popen):
if geterror()[0] in (109, errno.ESHUTDOWN):
return self._close(which)
raise
-
+
if self.universal_newlines:
# Translate newlines. For Python 3.x assume read is text.
# If bytes then another solution is needed.
@@ -241,7 +248,7 @@ class Popen(subprocess.Popen):
killed_pid, stat = os.waitpid(self.pid, os.WNOHANG)
if killed_pid != 0: return
-
+
def send(self, input):
if not self.stdin:
return None
@@ -262,15 +269,15 @@ class Popen(subprocess.Popen):
conn, maxsize = self.get_conn_maxsize(which, maxsize)
if conn is None:
return None
-
+
flags = fcntl.fcntl(conn, fcntl.F_GETFL)
if not conn.closed:
fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK)
-
+
try:
if not select.select([conn], [], [], 0)[0]:
return ''
-
+
try:
r = conn.read(maxsize)
except IOError as e:
@@ -279,7 +286,7 @@ class Popen(subprocess.Popen):
raise
if not r:
return self._close(which)
-
+
if self.universal_newlines:
r = r.replace("\r\n", "\n").replace("\r", "\n")
return r
@@ -292,7 +299,7 @@ class Popen(subprocess.Popen):
def proc_in_time_or_kill(cmd, time_out, wd = None, env = None):
proc = Popen (
cmd, cwd = wd, env = env,
- stdin = subprocess.PIPE, stdout = subprocess.PIPE,
+ stdin = subprocess.PIPE, stdout = subprocess.PIPE,
stderr = subprocess.STDOUT, universal_newlines = 1
)
@@ -305,13 +312,13 @@ def proc_in_time_or_kill(cmd, time_out, wd = None, env = None):
response += [proc.read_async(wait=0.1, e=0)]
if ret_code is None:
- ret_code = '"Process timed out (time_out = %s secs) ' % time_out
+ ret_code = '"Process timed out (time_out = {} secs) '.format(time_out)
try:
proc.kill()
ret_code += 'and was successfully terminated"'
except Exception:
- ret_code += ('and termination failed (exception: %s)"' %
- (geterror(),))
+ ret_code += ("and termination failed "
+ "(exception: {})".format(geterror(),))
return ret_code, ''.join(response)
@@ -322,7 +329,7 @@ class AsyncTest(unittest.TestCase):
ret_code, response = proc_in_time_or_kill(
[sys.executable, '-c', 'while 1: pass'], time_out = 1
)
-
+
self.assert_( 'rocess timed out' in ret_code )
self.assert_( 'successfully terminated' in ret_code )
@@ -330,10 +337,11 @@ class AsyncTest(unittest.TestCase):
def _example():
if sys.platform == 'win32':
- shell, commands, tail = ('cmd', ('echo "hello"', 'echo "HELLO WORLD"'), '\r\n')
+ shell, commands, tail = ('cmd', ('echo "hello"', 'echo "HELLO WORLD"'),
+ '\r\n')
else:
shell, commands, tail = ('sh', ('ls', 'echo HELLO WORLD'), '\n')
-
+
a = Popen(shell, stdin=PIPE, stdout=PIPE)
sys.stdout.write(a.read_async())
sys.stdout.write(" ")
@@ -346,7 +354,7 @@ def _example():
a.wait()
################################################################################
-
+
if __name__ == '__main__':
if 1: unittest.main()
else: _example()
diff --git a/prepare_coin_sources.py b/prepare_coin_sources.py
index 7ef291d77..58b83d7c3 100644
--- a/prepare_coin_sources.py
+++ b/prepare_coin_sources.py
@@ -55,12 +55,12 @@ submodules = {
}
def usage():
- print("""\
-This is a utility script for pyside-setup to prepare its source tree for testing
-by the Qt Continuous Integration (CI). The script will checkout all submodules in the
-pyside-setup/ sources directory except the one under test in the CI. The submodule
-to be tested is expected to be found as a sibling directory of pyside-setup,
-from which it is moved under the pyside-setup/sources directory.
+ print("""This is a utility script for pyside-setup to prepare its source
+tree for testing by the Qt Continuous Integration (CI). The script will
+checkout all submodules in the pyside-setup/ sources directory except the one
+under test in the CI.
+The submodule to be tested is expected to be found as a sibling directory of
+pyside-setup, from which it is moved under the pyside-setup/sources directory.
Usage:
python prepare-sources.py --module=pyside/<submodule> --branch=<branch>
@@ -85,40 +85,49 @@ def prepare_sources():
try:
shutil.move(module_dir, module_dir + "_removed")
except Exception as e:
- raise Exception("!!!!!!!!!!!!! Failed to rename %s " % module_dir)
+ raise Exception("!!!!!!!!!!!!! Failed to rename {} ".format(
+ module_dir))
git_checkout_cmd = ["git", "clone", sub_repo, module_dir]
if run_process(git_checkout_cmd) != 0:
- raise Exception("!!!!!!!!!!!!! Failed to clone the git submodule %s" % sub_repo)
+ raise Exception("!!!!!!!!!!!!! Failed to clone the git "
+ "submodule {}".format(sub_repo))
print("************************* CLONED **********************")
for module_name, repo_name in submodules.items():
- print("***** Preparing %s" % module_name)
+ print("***** Preparing {}".format(module_name))
if repo_name == QT_CI_TESTED_SUBMODULE:
- print("Skipping tested module %s and using sources from Coin storage instead" % module_name)
+ print("Skipping tested module {} and using sources from Coin "
+ "storage instead".format(module_name))
module_dir = os.path.join("sources", module_name)
storage_dir = os.path.join("..", QT_CI_TESTED_SUBMODULE)
try:
shutil.move(module_dir, module_dir + "_replaced_as_tested")
except Exception as e:
- raise Exception("!!!!!!!!!!!!! Failed to rename %s " % module_dir)
+ raise Exception("!!!!!!!!!!!!! Failed to rename {} ".format(
+ module_dir))
shutil.move(storage_dir, module_dir)
else:
module_dir = os.path.join("sources", module_name)
os.chdir(module_dir)
#Make sure the branch exists, if not use dev
_branch = SUBMODULE_BRANCH
- git_list_branch_cmd = ["git", "ls-remote", "origin", "refs/heads/" + _branch]
+ git_list_branch_cmd = ["git", "ls-remote", "origin",
+ "refs/heads/" + _branch]
shell = (sys.platform == "win32")
result = Popen(git_list_branch_cmd, stdout=PIPE, shell=shell)
if len(result.communicate()[0].split())==0:
- print("Warning: Requested %s branch doesn't exist so we'll fall back to 'dev' branch instead"\
- % SUBMODULE_BRANCH)
+ print("Warning: Requested {} branch doesn't exist so we'll "
+ "fall back to 'dev' branch instead".format(
+ SUBMODULE_BRANCH))
_branch = "dev"
- print("Checking out submodule %s to branch %s" % (module_name, _branch))
+ print("Checking out submodule {} to branch {}".format(module_name,
+ _branch))
git_checkout_cmd = ["git", "checkout", _branch]
if run_process(git_checkout_cmd) != 0:
- print("Failed to initialize the git submodule %s" % module_name)
+ print("Failed to initialize the git submodule {}".format(
+ module_name))
else:
- print("Submodule %s has branch %s checked out" % (module_name, _branch))
+ print("Submodule {} has branch {} checked out".format(
+ module_name, _branch))
os.chdir(script_dir)
diff --git a/qtinfo.py b/qtinfo.py
index c61e796b0..e3591d987 100644
--- a/qtinfo.py
+++ b/qtinfo.py
@@ -100,7 +100,9 @@ class QtInfo(object):
return self.getProperty("QMAKE_MACOSX_DEPLOYMENT_TARGET")
def getBuildType(self):
- """ Return value is either debug, release, debug_release, or None. """
+ """
+ Return value is either debug, release, debug_release, or None.
+ """
return self.getProperty("BUILD_TYPE")
def getSrcDir(self):
@@ -168,8 +170,8 @@ class QtInfo(object):
return None
def _getOtherProperties(self):
- # Get the src property separately, because it is not returned by qmake unless explicitly
- # specified.
+ # Get the src property separately, because it is not returned by
+ # qmake unless explicitly specified.
key = 'QT_INSTALL_PREFIX/src'
result = self._getQMakeOutput(['-query', key])
self._query_dict[key] = result
@@ -182,7 +184,8 @@ class QtInfo(object):
if key in self._mkspecs_dict:
self._query_dict[key] = self._mkspecs_dict[key]
- # Figure out how Qt was built: debug mode, release mode, or both.
+ # Figure out how Qt was built:
+ # debug mode, release mode, or both.
build_type = self._parseQtBuildType()
if build_type:
self._query_dict['BUILD_TYPE'] = build_type
@@ -207,8 +210,8 @@ class QtInfo(object):
value = found.group(2).strip()
self._mkspecs_dict[key] = value
- # We need to clean up after qmake, which always creates a .qmake.stash file after a -E
- # invocation.
+ # We need to clean up after qmake, which always creates a
+ # .qmake.stash file after a -E invocation.
qmake_stash_file = os.path.join(os.getcwd(), ".qmake.stash")
if os.path.exists(qmake_stash_file):
os.remove(qmake_stash_file)
diff --git a/utils.py b/utils.py
index d1bf2d6b9..c80e6dd6c 100644
--- a/utils.py
+++ b/utils.py
@@ -71,7 +71,7 @@ except NameError:
def has_option(name):
try:
- sys.argv.remove('--%s' % name)
+ sys.argv.remove("--{}".format(name))
return True
except ValueError:
pass
@@ -82,8 +82,8 @@ def option_value(name):
for index, option in enumerate(sys.argv):
if option == '--' + name:
if index+1 >= len(sys.argv):
- raise DistutilsOptionError(
- 'The option %s requires a value' % option)
+ raise DistutilsOptionError("The option {} requires a "
+ "value".format(option))
value = sys.argv[index+1]
sys.argv[index:index+2] = []
return value
@@ -108,7 +108,7 @@ def update_env_path(newpaths):
paths = os.environ['PATH'].lower().split(os.pathsep)
for path in newpaths:
if not path.lower() in paths:
- log.info("Inserting path \"%s\" to environment" % path)
+ log.info("Inserting path '{}' to environment".format(path))
paths.insert(0, path)
os.environ['PATH'] = path + os.pathsep + os.environ['PATH']
@@ -127,7 +127,8 @@ def winsdk_setenv(platform_arch, build_type):
"v7.1": 10.0
}
- log.info("Searching Windows SDK with MSVC compiler version %s" % MSVC_VERSION)
+ log.info("Searching Windows SDK with MSVC compiler version {}".format(
+ MSVC_VERSION))
setenv_paths = []
for base in HKEYS:
sdk_versions = Reg.read_keys(base, WINSDK_BASE)
@@ -148,14 +149,14 @@ def winsdk_setenv(platform_arch, build_type):
setenv_paths.append(setenv_path)
if len(setenv_paths) == 0:
raise DistutilsSetupError(
- "Failed to find the Windows SDK with MSVC compiler version %s"
- % MSVC_VERSION)
+ "Failed to find the Windows SDK with MSVC compiler "
+ "version {}".format(MSVC_VERSION))
for setenv_path in setenv_paths:
- log.info("Found %s" % setenv_path)
+ log.info("Found {}".format(setenv_path))
# Get SDK env (use latest SDK version installed on system)
setenv_path = setenv_paths[-1]
- log.info("Using %s " % setenv_path)
+ log.info("Using {} ".format(setenv_path))
build_arch = "/x86" if platform_arch.startswith("32") else "/x64"
build_type = "/Debug" if build_type.lower() == "debug" else "/Release"
setenv_cmd = [setenv_path, build_arch, build_type]
@@ -168,22 +169,22 @@ def winsdk_setenv(platform_arch, build_type):
update_env_path(setenv_env_paths)
for k in sorted(setenv_env_without_paths):
v = setenv_env_without_paths[k]
- log.info("Inserting \"%s = %s\" to environment" % (k, v))
+ log.info("Inserting '{} = {}' to environment".format(k, v))
os.environ[k] = v
log.info("Done initializing Windows SDK env")
def find_vcdir(version):
"""
- This is the customized version of distutils.msvc9compiler.find_vcvarsall method
+ This is the customized version of
+ distutils.msvc9compiler.find_vcvarsall method
"""
from distutils.msvc9compiler import VS_BASE
from distutils.msvc9compiler import Reg
from distutils import log
vsbase = VS_BASE % version
try:
- productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
- "productdir")
+ productdir = Reg.get_value(r"{}\Setup\VC".format(vsbase), "productdir")
except KeyError:
productdir = None
@@ -196,7 +197,7 @@ def find_vcdir(version):
else:
vsbase = VSEXPRESS_BASE % version
try:
- productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
+ productdir = Reg.get_value(r"{}\Setup\VC".format(vsbase),
"productdir")
except KeyError:
productdir = None
@@ -210,10 +211,10 @@ def find_vcdir(version):
productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
productdir = os.path.abspath(productdir)
if not os.path.isdir(productdir):
- log.debug("%s is not a valid directory" % productdir)
+ log.debug("{} is not a valid directory".format(productdir))
return None
else:
- log.debug("Env var %s is not set or invalid" % toolskey)
+ log.debug("Env var {} is not set or invalid".format(toolskey))
if not productdir:
log.debug("No productdir found")
return None
@@ -223,37 +224,41 @@ def find_vcdir(version):
def init_msvc_env(platform_arch, build_type):
from distutils.msvc9compiler import VERSION as MSVC_VERSION
- log.info("Searching MSVC compiler version %s" % MSVC_VERSION)
+ log.info("Searching MSVC compiler version {}".format(MSVC_VERSION))
vcdir_path = find_vcdir(MSVC_VERSION)
if not vcdir_path:
raise DistutilsSetupError(
- "Failed to find the MSVC compiler version %s on your system." % MSVC_VERSION)
+ "Failed to find the MSVC compiler version {} on your "
+ "system.".formar(MSVC_VERSION))
else:
- log.info("Found %s" % vcdir_path)
+ log.info("Found {}".format(vcdir_path))
- log.info("Searching MSVC compiler %s environment init script" % MSVC_VERSION)
+ log.info("Searching MSVC compiler {} environment init script".format(
+ MSVC_VERSION))
if platform_arch.startswith("32"):
vcvars_path = os.path.join(vcdir_path, "bin", "vcvars32.bat")
else:
vcvars_path = os.path.join(vcdir_path, "bin", "vcvars64.bat")
if not os.path.exists(vcvars_path):
- vcvars_path = os.path.join(vcdir_path, "bin", "amd64", "vcvars64.bat")
+ vcvars_path = os.path.join(vcdir_path, "bin", "amd64",
+ "vcvars64.bat")
if not os.path.exists(vcvars_path):
- vcvars_path = os.path.join(vcdir_path, "bin", "amd64", "vcvarsamd64.bat")
+ vcvars_path = os.path.join(vcdir_path, "bin", "amd64",
+ "vcvarsamd64.bat")
if not os.path.exists(vcvars_path):
# MSVC init script not found, try to find and init Windows SDK env
- log.error(
- "Failed to find the MSVC compiler environment init script (vcvars.bat) on your system.")
+ log.error("Failed to find the MSVC compiler environment init script "
+ "(vcvars.bat) on your system.")
winsdk_setenv(platform_arch, build_type)
return
else:
- log.info("Found %s" % vcvars_path)
+ log.info("Found {}".format(vcvars_path))
# Get MSVC env
- log.info("Using MSVC %s in %s" % (MSVC_VERSION, vcvars_path))
+ log.info("Using MSVC {} in {}".format(MSVC_VERSION, vcvars_path))
msvc_arch = "x86" if platform_arch.startswith("32") else "amd64"
- log.info("Getting MSVC env for %s architecture" % msvc_arch)
+ log.info("Getting MSVC env for {} architecture".format(msvc_arch))
vcvars_cmd = [vcvars_path, msvc_arch]
msvc_env = get_environment_from_batch_command(vcvars_cmd)
msvc_env_paths = os.pathsep.join([msvc_env[k] for k in msvc_env if k.upper() == 'PATH']).split(os.pathsep)
@@ -264,7 +269,7 @@ def init_msvc_env(platform_arch, build_type):
update_env_path(msvc_env_paths)
for k in sorted(msvc_env_without_paths):
v = msvc_env_without_paths[k]
- log.info("Inserting \"%s = %s\" to environment" % (k, v))
+ log.info("Inserting '{} = {}' to environment".format(k, v))
os.environ[k] = v
log.info("Done initializing MSVC env")
@@ -275,11 +280,12 @@ def copyfile(src, dst, force=True, vars=None, force_copy_symlink=False):
dst = dst.format(**vars)
if not os.path.exists(src) and not force:
- log.info("**Skiping copy file %s to %s. Source does not exists." % (src, dst))
+ log.info("**Skiping copy file {} to {}. "
+ "Source does not exists.".format(src, dst))
return
if not os.path.islink(src) or force_copy_symlink:
- log.info("Copying file %s to %s." % (src, dst))
+ log.info("Copying file {} to {}.".format(src, dst))
shutil.copy2(src, dst)
else:
linkTargetPath = os.path.realpath(src)
@@ -292,14 +298,17 @@ def copyfile(src, dst, force=True, vars=None, force_copy_symlink=False):
os.chdir(targetDir)
if os.path.exists(linkName):
os.remove(linkName)
- log.info("Symlinking %s -> %s in %s." % (linkName, linkTarget, targetDir))
+ log.info("Symlinking {} -> {} in {}.".format(linkName,
+ linkTarget, targetDir))
os.symlink(linkTarget, linkName)
except OSError:
- log.error("%s -> %s: Error creating symlink" % (linkName, linkTarget))
+ log.error("{} -> {}: Error creating symlink".format(linkName,
+ linkTarget))
finally:
os.chdir(currentDirectory)
else:
- log.error("%s -> %s: Can only create symlinks within the same directory" % (src, linkTargetPath))
+ log.error("{} -> {}: Can only create symlinks within the same "
+ "directory".format(src, linkTargetPath))
return dst
@@ -310,7 +319,7 @@ def makefile(dst, content=None, vars=None):
content = content.format(**vars)
dst = dst.format(**vars)
- log.info("Making file %s." % (dst))
+ log.info("Making file {}.".format(dst))
dstdir = os.path.dirname(dst)
if not os.path.exists(dstdir):
@@ -322,8 +331,9 @@ def makefile(dst, content=None, vars=None):
f.close()
-def copydir(src, dst, filter=None, ignore=None, force=True, recursive=True, vars=None,
- dir_filter_function=None, file_filter_function=None, force_copy_symlinks=False):
+def copydir(src, dst, filter=None, ignore=None, force=True, recursive=True,
+ vars=None, dir_filter_function=None, file_filter_function=None,
+ force_copy_symlinks=False):
if vars is not None:
src = src.format(**vars)
@@ -336,12 +346,12 @@ def copydir(src, dst, filter=None, ignore=None, force=True, recursive=True, vars
ignore[i] = ignore[i].format(**vars)
if not os.path.exists(src) and not force:
- log.info("**Skiping copy tree %s to %s. Source does not exists. filter=%s. ignore=%s." % \
- (src, dst, filter, ignore))
+ log.info("**Skiping copy tree {} to {}. Source does not exists. "
+ "filter={}. ignore={}.".format(src, dst, filter, ignore))
return []
- log.info("Copying tree %s to %s. filter=%s. ignore=%s." % \
- (src, dst, filter, ignore))
+ log.info("Copying tree {} to {}. filter={}. ignore={}.".format(src, dst,
+ filter, ignore))
names = os.listdir(src)
@@ -352,21 +362,25 @@ def copydir(src, dst, filter=None, ignore=None, force=True, recursive=True, vars
dstname = os.path.join(dst, name)
try:
if os.path.isdir(srcname):
- if dir_filter_function and not dir_filter_function(name, src, srcname):
+ if (dir_filter_function and
+ not dir_filter_function(name, src, srcname)):
continue
if recursive:
results.extend(
- copydir(srcname, dstname, filter, ignore, force, recursive,
- vars, dir_filter_function, file_filter_function,
- force_copy_symlinks))
+ copydir(srcname, dstname, filter, ignore, force,
+ recursive, vars, dir_filter_function,
+ file_filter_function, force_copy_symlinks))
else:
- if (file_filter_function is not None and not file_filter_function(name, srcname)) \
- or (filter is not None and not filter_match(name, filter)) \
- or (ignore is not None and filter_match(name, ignore)):
+ if ((file_filter_function is not None and
+ not file_filter_function(name, srcname)) or
+ (filter is not None and
+ not filter_match(name, filter)) or
+ (ignore is not None and filter_match(name, ignore))):
continue
if not os.path.exists(dst):
os.makedirs(dst)
- results.append(copyfile(srcname, dstname, True, vars, force_copy_symlinks))
+ results.append(copyfile(srcname, dstname, True, vars,
+ force_copy_symlinks))
# catch the Error from the recursive copytree so that we can
# continue with other files
except shutil.Error as err:
@@ -423,7 +437,8 @@ def run_process(args, initial_env=None):
for line in lines:
log.info(line.rstrip('\r'))
return buffer
- _log("Running process in {0}: {1}".format(os.getcwd(), " ".join([(" " in x and '"{0}"'.format(x) or x) for x in args])))
+ _log("Running process in {0}: {1}".format(os.getcwd(),
+ " ".join([(" " in x and '"{0}"'.format(x) or x) for x in args])))
if sys.platform != "win32":
try:
@@ -471,7 +486,7 @@ def get_environment_from_batch_command(env_cmd, initial=None):
def validate_pair(ob):
try:
if not (len(ob) == 2):
- print("Unexpected result: %s" % ob)
+ print("Unexpected result: {}".format(ob))
raise ValueError
except:
return False
@@ -490,7 +505,8 @@ def get_environment_from_batch_command(env_cmd, initial=None):
# create a tag so we can tell in the output when the proc is done
tag = 'Done running command'
# construct a cmd.exe command to do accomplish this
- cmd = 'cmd.exe /E:ON /V:ON /s /c "{env_cmd} && echo "{tag}" && set"'.format(**vars())
+ cmd = 'cmd.exe /E:ON /V:ON /s /c "{} && echo "{}" && set"'.format(env_cmd,
+ tag)
# launch the process
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=initial)
# parse the output sent to stdout
@@ -527,15 +543,17 @@ def regenerate_qt_resources(src, pyside_rcc_path, pyside_rcc_options):
srcname_split = srcname.rsplit('.qrc', 1)
dstname = '_rc.py'.join(srcname_split)
if os.path.exists(dstname):
- log.info('Regenerating %s from %s' % \
- (dstname, os.path.basename(srcname)))
+ log.info('Regenerating {} from {}'.format(dstname,
+ os.path.basename(srcname)))
run_process([pyside_rcc_path,
pyside_rcc_options,
srcname, '-o', dstname])
def back_tick(cmd, ret_err=False):
- """ Run command `cmd`, return stdout, or stdout, stderr, return_code if `ret_err` is True.
+ """
+ Run command `cmd`, return stdout, or stdout, stderr,
+ return_code if `ret_err` is True.
Roughly equivalent to ``check_output`` in Python 2.7
@@ -544,22 +562,25 @@ def back_tick(cmd, ret_err=False):
cmd : str
command to execute
ret_err : bool, optional
- If True, return stderr and return_code in addition to stdout. If False, just return
- stdout
+ If True, return stderr and return_code in addition to stdout.
+ If False, just return stdout
Returns
-------
out : str or tuple
If `ret_err` is False, return stripped string containing stdout from
- `cmd`. If `ret_err` is True, return tuple of (stdout, stderr, return_code) where
- ``stdout`` is the stripped stdout, and ``stderr`` is the stripped
+ `cmd`.
+ If `ret_err` is True, return tuple of (stdout, stderr, return_code)
+ where ``stdout`` is the stripped stdout, and ``stderr`` is the stripped
stderr, and ``return_code`` is the process exit code.
Raises
------
- Raises RuntimeError if command returns non-zero exit code when ret_err isn't set.
+ Raises RuntimeError if command returns non-zero exit code when ret_err
+ isn't set.
"""
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, shell=True)
out, err = proc.communicate()
if not isinstance(out, str):
# python 3
@@ -570,8 +591,8 @@ def back_tick(cmd, ret_err=False):
proc.terminate()
raise RuntimeError(cmd + ' process did not terminate')
if retcode != 0 and not ret_err:
- raise RuntimeError(cmd + ' process returned code %d\n*** %s' %
- (retcode, err))
+ raise RuntimeError("{} process returned code {}\n*** {}".format(
+ (cmd, retcode, err)))
out = out.strip()
if not ret_err:
return out
@@ -582,7 +603,8 @@ OSX_OUTNAME_RE = re.compile(r'\(compatibility version [\d.]+, current version '
'[\d.]+\)')
def osx_get_install_names(libpath):
- """ Get OSX library install names from library `libpath` using ``otool``
+ """
+ Get macOS library install names from library `libpath` using ``otool``
Parameters
----------
@@ -639,12 +661,13 @@ def osx_get_rpaths(libpath):
def osx_fix_rpaths_for_library(library_path, qt_lib_dir):
""" Adds required rpath load commands to given library.
- This is a necessary post-installation step, to allow loading PySide modules without setting
- DYLD_LIBRARY_PATH or DYLD_FRAMEWORK_PATH.
- The CMake rpath commands which are added at build time are used only for testing (make check),
- and they are stripped once the equivalent of make install is executed (except for shiboken,
- which currently uses CMAKE_INSTALL_RPATH_USE_LINK_PATH, which might be necessary to remove in
- the future).
+ This is a necessary post-installation step, to allow loading PySide
+ modules without setting DYLD_LIBRARY_PATH or DYLD_FRAMEWORK_PATH.
+ The CMake rpath commands which are added at build time are used only
+ for testing (make check), and they are stripped once the equivalent
+ of make install is executed (except for shiboken, which currently
+ uses CMAKE_INSTALL_RPATH_USE_LINK_PATH, which might be necessary to
+ remove in the future).
Parameters
----------
@@ -663,9 +686,10 @@ def osx_fix_rpaths_for_library(library_path, qt_lib_dir):
if install_name[0] == '/':
continue
- # If there are dynamic library install names that contain @rpath tokens, we will
- # provide an rpath load command with the value of "@loader_path". This will allow loading
- # dependent libraries from within the same directory as 'library_path'.
+ # If there are dynamic library install names that contain
+ # @rpath tokens, we will provide an rpath load command with the
+ # value of "@loader_path". This will allow loading dependent
+ # libraries from within the same directory as 'library_path'.
if install_name[0] == '@':
needs_loader_path = True
break
@@ -674,16 +698,19 @@ def osx_fix_rpaths_for_library(library_path, qt_lib_dir):
back_tick('install_name_tool -add_rpath {rpath} {library_path}'.format(
rpath="@loader_path", library_path=library_path))
- # If the library depends on a Qt library, add an rpath load comment pointing to the Qt lib
- # directory.
- osx_add_qt_rpath(library_path, qt_lib_dir, existing_rpath_commands, install_names)
+ # If the library depends on a Qt library, add an rpath load comment
+ # pointing to the Qt lib directory.
+ osx_add_qt_rpath(library_path, qt_lib_dir, existing_rpath_commands,
+ install_names)
def osx_add_qt_rpath(library_path, qt_lib_dir,
existing_rpath_commands = [], library_dependencies = []):
- """ Adds an rpath load command to the Qt lib directory if necessary
+ """
+ Adds an rpath load command to the Qt lib directory if necessary
- Checks if library pointed to by 'library_path' has Qt dependencies, and adds an rpath load
- command that points to the Qt lib directory (qt_lib_dir).
+ Checks if library pointed to by 'library_path' has Qt dependencies,
+ and adds an rpath load command that points to the Qt lib directory
+ (qt_lib_dir).
"""
if not existing_rpath_commands:
existing_rpath_commands = osx_get_rpaths(library_path)
@@ -760,27 +787,29 @@ def detectClang():
def download_and_extract_7z(fileurl, target):
""" Downloads 7z file from fileurl and extract to target """
- print("Downloading fileUrl %s " % fileurl)
+ print("Downloading fileUrl {} ".format(fileurl))
info = ""
try:
localfile, info = urllib.urlretrieve(fileurl)
except:
- print("Error downloading %r : %r" % (fileurl, info))
- raise RuntimeError(' Error downloading ' + fileurl)
+ print("Error downloading {} : {}".format(fileurl, info))
+ raise RuntimeError(' Error downloading {}'.format(fileurl))
try:
outputDir = "-o" + target
- print("calling 7z x %s %s" % (localfile, outputDir))
+ print("calling 7z x {} {}".format(localfile, outputDir))
subprocess.call(["7z", "x", "-y", localfile, outputDir])
except:
- raise RuntimeError(' Error extracting ' + localfile)
+ raise RuntimeError(' Error extracting {}'.format(localfile))
def split_and_strip(input):
lines = [s.strip() for s in input.splitlines()]
return lines
def ldd_get_dependencies(executable_path):
- """ Returns a dictionary of dependencies that `executable_path` depends on.
+ """
+ Returns a dictionary of dependencies that `executable_path`
+ depends on.
The keys are library names and the values are the library paths.
@@ -795,16 +824,20 @@ def ldd_get_dependencies(executable_path):
dependencies[match.group(1)] = match.group(2)
return dependencies
-def ldd_get_paths_for_dependencies(dependencies_regex, executable_path = None, dependencies = None):
- """ Returns file paths to shared library dependencies that match given `dependencies_regex`
- against given `executable_path`.
+def ldd_get_paths_for_dependencies(dependencies_regex, executable_path = None,
+ dependencies = None):
+ """
+ Returns file paths to shared library dependencies that match given
+ `dependencies_regex` against given `executable_path`.
- The function retrieves the list of shared library dependencies using ld.so for the given
- `executable_path` in order to search for libraries that match the `dependencies_regex`, and
- then returns a list of absolute paths of the matching libraries.
+ The function retrieves the list of shared library dependencies using
+ ld.so for the given `executable_path` in order to search for
+ libraries that match the `dependencies_regex`, and then returns a
+ list of absolute paths of the matching libraries.
- If no matching library is found in the list of dependencies, an empty list is returned.
- """
+ If no matching library is found in the list of dependencies,
+ an empty list is returned.
+ """
if not dependencies and not executable_path:
return None
@@ -823,14 +856,19 @@ def ldd_get_paths_for_dependencies(dependencies_regex, executable_path = None, d
return paths
def ldd(executable_path):
- """ Returns ld.so output of shared library dependencies for given `executable_path`.
-
- This is a partial port of /usr/bin/ldd from bash to Python. The dependency list is retrieved
- by setting the LD_TRACE_LOADED_OBJECTS=1 environment variable, and executing the given path
- via the dynamic loader ld.so.
-
- Only works on Linux. The port is required to make this work on systems that might not have ldd.
- This is because ldd (on Ubuntu) is shipped in the libc-bin package that, which might have a
+ """
+ Returns ld.so output of shared library dependencies for given
+ `executable_path`.
+
+ This is a partial port of /usr/bin/ldd from bash to Python.
+ The dependency list is retrieved by setting the
+ LD_TRACE_LOADED_OBJECTS=1 environment variable, and executing the
+ given path via the dynamic loader ld.so.
+
+ Only works on Linux. The port is required to make this work on
+ systems that might not have ldd.
+ This is because ldd (on Ubuntu) is shipped in the libc-bin package
+ that, which might have a
minuscule percentage of not being installed.
Parameters
@@ -845,24 +883,29 @@ def ldd(executable_path):
"""
chosen_rtld = None
- # List of ld's considered by ldd on Ubuntu (here's hoping it's the same on all distros).
- rtld_list = ["/lib/ld-linux.so.2", "/lib64/ld-linux-x86-64.so.2", "/libx32/ld-linux-x32.so.2"]
+ # List of ld's considered by ldd on Ubuntu (here's hoping it's the
+ # same on all distros).
+ rtld_list = ["/lib/ld-linux.so.2", "/lib64/ld-linux-x86-64.so.2",
+ "/libx32/ld-linux-x32.so.2"]
# Choose appropriate runtime dynamic linker.
for rtld in rtld_list:
if os.path.isfile(rtld) and os.access(rtld, os.X_OK):
(_, _, code) = back_tick(rtld, True)
- # Code 127 is returned by ld.so when called without any arguments (some kind of sanity
- # check I guess).
+ # Code 127 is returned by ld.so when called without any
+ # arguments (some kind of sanity check I guess).
if code == 127:
- (_, _, code) = back_tick("{} --verify {}".format(rtld, executable_path), True)
- # Codes 0 and 2 mean given executable_path can be understood by ld.so.
+ (_, _, code) = back_tick("{} --verify {}".format(rtld,
+ executable_path), True)
+ # Codes 0 and 2 mean given executable_path can be
+ # understood by ld.so.
if code in [0, 2]:
chosen_rtld = rtld
break
if not chosen_rtld:
- raise RuntimeError('Could not find appropriate ld.so to query for dependencies.')
+ raise RuntimeError("Could not find appropriate ld.so to query "
+ "for dependencies.")
# Query for shared library dependencies.
rtld_env = "LD_TRACE_LOADED_OBJECTS=1"
@@ -871,8 +914,8 @@ def ldd(executable_path):
if return_code == 0:
return out
else:
- raise RuntimeError('ld.so failed to query for dependent shared libraries '
- 'of {} '.format(executable_path))
+ raise RuntimeError("ld.so failed to query for dependent shared "
+ "libraries of {} ".format(executable_path))
def find_files_using_glob(path, pattern):
""" Returns list of files that matched glob `pattern` in `path`. """
@@ -887,17 +930,21 @@ def find_qt_core_library_glob(lib_dir):
return maybe_file[0]
return None
-# @TODO: Possibly fix ICU library copying on macOS and Windows. This would require
-# to implement the equivalent of the custom written ldd for the specified platforms.
-# This has less priority because ICU libs are not used in the default Qt configuration build.
-
+# @TODO: Possibly fix ICU library copying on macOS and Windows.
+# This would require to implement the equivalent of the custom written
+# ldd for the specified platforms.
+# This has less priority because ICU libs are not used in the default
+# Qt configuration build.
def copy_icu_libs(destination_lib_dir):
- """ Copy ICU libraries that QtCore depends on, to given `destination_lib_dir`. """
+ """
+ Copy ICU libraries that QtCore depends on,
+ to given `destination_lib_dir`.
+ """
qt_core_library_path = find_qt_core_library_glob(destination_lib_dir)
if not qt_core_library_path or not os.path.exists(qt_core_library_path):
raise RuntimeError('QtCore library does not exist at path: {}. '
- 'Failed to copy ICU libraries.'.format(qt_core_library_path))
+ 'Failed to copy ICU libraries.'.format(qt_core_library_path))
dependencies = ldd_get_dependencies(qt_core_library_path)
@@ -911,9 +958,11 @@ def copy_icu_libs(destination_lib_dir):
break
if icu_required:
- paths = ldd_get_paths_for_dependencies(icu_regex, dependencies=dependencies)
+ paths = ldd_get_paths_for_dependencies(icu_regex,
+ dependencies=dependencies)
if not paths:
- raise RuntimeError('Failed to find the necessary ICU libraries required by QtCore.')
+ raise RuntimeError("Failed to find the necessary ICU libraries "
+ "required by QtCore.")
log.info('Copying the detected ICU libraries required by QtCore.')
if not os.path.exists(destination_lib_dir):
@@ -923,12 +972,14 @@ def copy_icu_libs(destination_lib_dir):
basename = os.path.basename(path)
destination = os.path.join(destination_lib_dir, basename)
copyfile(path, destination, force_copy_symlink=True)
- # Patch the ICU libraries to contain the $ORIGIN rpath value, so that only the local
- # package libraries are used.
+ # Patch the ICU libraries to contain the $ORIGIN rpath
+ # value, so that only the local package libraries are used.
linuxSetRPaths(destination, '$ORIGIN')
- # Patch the QtCore library to find the copied over ICU libraries (if necessary).
- log.info('Checking if QtCore library needs a new rpath to make it work with ICU libs.')
+ # Patch the QtCore library to find the copied over ICU libraries
+ # (if necessary).
+ log.info("Checking if QtCore library needs a new rpath to make it "
+ "work with ICU libs.")
rpaths = linuxGetRPaths(qt_core_library_path)
if not rpaths or not rpathsHasOrigin(rpaths):
log.info('Patching QtCore library to contain $ORIGIN rpath.')
@@ -944,19 +995,24 @@ def linuxSetRPaths(executable_path, rpath_string):
patchelf_path = os.path.join(script_dir, "patchelf")
setattr(linuxSetRPaths, "patchelf_path", patchelf_path)
- cmd = [linuxSetRPaths.patchelf_path, '--set-rpath', rpath_string, executable_path]
+ cmd = [linuxSetRPaths.patchelf_path, '--set-rpath',
+ rpath_string, executable_path]
if run_process(cmd) != 0:
- raise RuntimeError("Error patching rpath in {}".format(executable_path))
+ raise RuntimeError("Error patching rpath in {}".format(
+ executable_path))
def linuxGetRPaths(executable_path):
- """ Returns a list of run path values embedded in the executable or just an empty list. """
+ """
+ Returns a list of run path values embedded in the executable or just
+ an empty list.
+ """
cmd = "readelf -d {}".format(executable_path)
(out, err, code) = back_tick(cmd, True)
if code != 0:
- raise RuntimeError('Running `readelf -d {}` failed with '
- 'error output:\n {}. '.format(executable_path, err))
+ raise RuntimeError("Running `readelf -d {}` failed with error "
+ "output:\n {}. ".format(executable_path, err))
lines = split_and_strip(out)
pattern = re.compile(r"^.+?\(RUNPATH\).+?\[(.+?)\]$")
@@ -975,7 +1031,10 @@ def linuxGetRPaths(executable_path):
return rpaths
def rpathsHasOrigin(rpaths):
- """ Return True if the specified list of rpaths has an "$ORIGIN" value (aka current dir). """
+ """
+ Return True if the specified list of rpaths has an "$ORIGIN" value
+ (aka current dir).
+ """
if not rpaths:
return False
@@ -987,8 +1046,10 @@ def rpathsHasOrigin(rpaths):
return False
def memoize(function):
- """ Decorator to wrap a function with a memoizing callable.
- It returns cached values when the wrapped function is called with the same arguments.
+ """
+ Decorator to wrap a function with a memoizing callable.
+ It returns cached values when the wrapped function is called with
+ the same arguments.
"""
memo = {}
def wrapper(*args):
@@ -1008,5 +1069,6 @@ def get_python_dict(python_script_path):
exec(code, {}, python_dict)
return python_dict
except IOError as e:
- print("get_python_dict: Couldn't get dict from python file: {}.".format(python_script_path))
+ print("get_python_dict: Couldn't get dict from python "
+ "file: {}.".format(python_script_path))
raise