Format code using yapf

This commit is contained in:
Eloston
2018-07-29 07:24:29 +00:00
parent 2bcee6553d
commit 4d527713d1
17 changed files with 335 additions and 229 deletions

8
.style.yapf Normal file
View File

@@ -0,0 +1,8 @@
[style]
based_on_style = pep8
allow_split_before_dict_value = false
coalesce_brackets = true
column_limit = 100
indent_width = 4
join_multiple_lines = true
spaces_before_comment = 1

View File

@@ -4,7 +4,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
""" """
CLI entry point when invoking the module directly CLI entry point when invoking the module directly

View File

@@ -4,7 +4,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
""" """
buildkit: A small helper utility for building ungoogled-chromium. buildkit: A small helper utility for building ungoogled-chromium.
@@ -24,9 +23,11 @@ from .extraction import prune_dir
# Classes # Classes
class _CLIError(RuntimeError): class _CLIError(RuntimeError):
"""Custom exception for printing argument parser errors from callbacks""" """Custom exception for printing argument parser errors from callbacks"""
class NewBundleAction(argparse.Action): #pylint: disable=too-few-public-methods class NewBundleAction(argparse.Action): #pylint: disable=too-few-public-methods
"""argparse.ArgumentParser action handler with more verbose logging""" """argparse.ArgumentParser action handler with more verbose logging"""
@@ -46,35 +47,50 @@ class NewBundleAction(argparse.Action): #pylint: disable=too-few-public-methods
parser.exit(status=1) parser.exit(status=1)
setattr(namespace, self.dest, bundle) setattr(namespace, self.dest, bundle)
# Methods # Methods
def setup_bundle_arg(parser): def setup_bundle_arg(parser):
"""Helper to add an argparse.ArgumentParser argument for a config bundle""" """Helper to add an argparse.ArgumentParser argument for a config bundle"""
parser.add_argument( parser.add_argument(
'-b', '--bundle', metavar='PATH', dest='bundle', required=True, action=NewBundleAction, '-b',
'--bundle',
metavar='PATH',
dest='bundle',
required=True,
action=NewBundleAction,
help='Path to the bundle. Dependencies must reside next to the bundle.') help='Path to the bundle. Dependencies must reside next to the bundle.')
def _add_downloads(subparsers): def _add_downloads(subparsers):
"""Retrieve, check, and unpack downloads""" """Retrieve, check, and unpack downloads"""
def _add_common_args(parser): def _add_common_args(parser):
setup_bundle_arg(parser) setup_bundle_arg(parser)
parser.add_argument( parser.add_argument(
'-c', '--cache', type=Path, required=True, '-c',
'--cache',
type=Path,
required=True,
help='Path to the directory to cache downloads.') help='Path to the directory to cache downloads.')
def _retrieve_callback(args): def _retrieve_callback(args):
downloads.retrieve_downloads( downloads.retrieve_downloads(args.bundle, args.cache, args.show_progress,
args.bundle, args.cache, args.show_progress, args.disable_ssl_verification) args.disable_ssl_verification)
try: try:
downloads.check_downloads(args.bundle, args.cache) downloads.check_downloads(args.bundle, args.cache)
except downloads.HashMismatchError as exc: except downloads.HashMismatchError as exc:
get_logger().error('File checksum does not match: %s', exc) get_logger().error('File checksum does not match: %s', exc)
raise _CLIError() raise _CLIError()
def _unpack_callback(args): def _unpack_callback(args):
extractors = { extractors = {
ExtractorEnum.SEVENZIP: args.sevenz_path, ExtractorEnum.SEVENZIP: args.sevenz_path,
ExtractorEnum.TAR: args.tar_path, ExtractorEnum.TAR: args.tar_path,
} }
downloads.unpack_downloads(args.bundle, args.cache, args.output, extractors) downloads.unpack_downloads(args.bundle, args.cache, args.output, extractors)
# downloads # downloads
parser = subparsers.add_parser( parser = subparsers.add_parser(
'downloads', help=_add_downloads.__doc__ + '.', description=_add_downloads.__doc__) 'downloads', help=_add_downloads.__doc__ + '.', description=_add_downloads.__doc__)
@@ -83,36 +99,45 @@ def _add_downloads(subparsers):
# downloads retrieve # downloads retrieve
retrieve_parser = subsubparsers.add_parser( retrieve_parser = subsubparsers.add_parser(
'retrieve', help='Retrieve and check download files', 'retrieve',
help='Retrieve and check download files',
description='Retrieves and checks downloads without unpacking.') description='Retrieves and checks downloads without unpacking.')
_add_common_args(retrieve_parser) _add_common_args(retrieve_parser)
retrieve_parser.add_argument( retrieve_parser.add_argument(
'--hide-progress-bar', action='store_false', dest='show_progress', '--hide-progress-bar',
action='store_false',
dest='show_progress',
help='Hide the download progress.') help='Hide the download progress.')
retrieve_parser.add_argument( retrieve_parser.add_argument(
'--disable-ssl-verification', action='store_true', '--disable-ssl-verification',
action='store_true',
help='Disables certification verification for downloads using HTTPS.') help='Disables certification verification for downloads using HTTPS.')
retrieve_parser.set_defaults(callback=_retrieve_callback) retrieve_parser.set_defaults(callback=_retrieve_callback)
# downloads unpack # downloads unpack
unpack_parser = subsubparsers.add_parser( unpack_parser = subsubparsers.add_parser(
'unpack', help='Unpack download files', 'unpack',
help='Unpack download files',
description='Verifies hashes of and unpacks download files into the specified directory.') description='Verifies hashes of and unpacks download files into the specified directory.')
_add_common_args(unpack_parser) _add_common_args(unpack_parser)
unpack_parser.add_argument( unpack_parser.add_argument(
'--tar-path', default='tar', '--tar-path',
default='tar',
help=('(Linux and macOS only) Command or path to the BSD or GNU tar ' help=('(Linux and macOS only) Command or path to the BSD or GNU tar '
'binary for extraction. Default: %(default)s')) 'binary for extraction. Default: %(default)s'))
unpack_parser.add_argument( unpack_parser.add_argument(
'--7z-path', dest='sevenz_path', default=SEVENZIP_USE_REGISTRY, '--7z-path',
dest='sevenz_path',
default=SEVENZIP_USE_REGISTRY,
help=('Command or path to 7-Zip\'s "7z" binary. If "_use_registry" is ' help=('Command or path to 7-Zip\'s "7z" binary. If "_use_registry" is '
'specified, determine the path from the registry. Default: %(default)s')) 'specified, determine the path from the registry. Default: %(default)s'))
unpack_parser.add_argument( unpack_parser.add_argument('output', type=Path, help='The directory to unpack to.')
'output', type=Path, help='The directory to unpack to.')
unpack_parser.set_defaults(callback=_unpack_callback) unpack_parser.set_defaults(callback=_unpack_callback)
def _add_prune(subparsers): def _add_prune(subparsers):
"""Prunes binaries in the given path.""" """Prunes binaries in the given path."""
def _callback(args): def _callback(args):
if not args.directory.exists(): if not args.directory.exists():
get_logger().error('Specified directory does not exist: %s', args.directory) get_logger().error('Specified directory does not exist: %s', args.directory)
@@ -121,15 +146,16 @@ def _add_prune(subparsers):
if unremovable_files: if unremovable_files:
get_logger().error('Files could not be pruned: %s', unremovable_files) get_logger().error('Files could not be pruned: %s', unremovable_files)
raise _CLIError() raise _CLIError()
parser = subparsers.add_parser(
'prune', help=_add_prune.__doc__, description=_add_prune.__doc__) parser = subparsers.add_parser('prune', help=_add_prune.__doc__, description=_add_prune.__doc__)
setup_bundle_arg(parser) setup_bundle_arg(parser)
parser.add_argument( parser.add_argument('directory', type=Path, help='The directory to apply binary pruning.')
'directory', type=Path, help='The directory to apply binary pruning.')
parser.set_defaults(callback=_callback) parser.set_defaults(callback=_callback)
def _add_domains(subparsers): def _add_domains(subparsers):
"""Operations with domain substitution""" """Operations with domain substitution"""
def _callback(args): def _callback(args):
try: try:
if args.reverting: if args.reverting:
@@ -148,6 +174,7 @@ def _add_domains(subparsers):
except KeyError as exc: except KeyError as exc:
get_logger().error('%s', exc) get_logger().error('%s', exc)
raise _CLIError() raise _CLIError()
# domains # domains
parser = subparsers.add_parser( parser = subparsers.add_parser(
'domains', help=_add_domains.__doc__, description=_add_domains.__doc__) 'domains', help=_add_domains.__doc__, description=_add_domains.__doc__)
@@ -158,39 +185,49 @@ def _add_domains(subparsers):
# domains apply # domains apply
apply_parser = subsubparsers.add_parser( apply_parser = subsubparsers.add_parser(
'apply', help='Apply domain substitution', 'apply',
help='Apply domain substitution',
description='Applies domain substitution and creates the domain substitution cache.') description='Applies domain substitution and creates the domain substitution cache.')
setup_bundle_arg(apply_parser) setup_bundle_arg(apply_parser)
apply_parser.add_argument( apply_parser.add_argument(
'-c', '--cache', type=Path, required=True, '-c',
'--cache',
type=Path,
required=True,
help='The path to the domain substitution cache. The path must not already exist.') help='The path to the domain substitution cache. The path must not already exist.')
apply_parser.add_argument( apply_parser.add_argument(
'directory', type=Path, 'directory', type=Path, help='The directory to apply domain substitution')
help='The directory to apply domain substitution')
apply_parser.set_defaults(reverting=False) apply_parser.set_defaults(reverting=False)
# domains revert # domains revert
revert_parser = subsubparsers.add_parser( revert_parser = subsubparsers.add_parser(
'revert', help='Revert domain substitution', 'revert',
help='Revert domain substitution',
description='Reverts domain substitution based only on the domain substitution cache.') description='Reverts domain substitution based only on the domain substitution cache.')
revert_parser.add_argument( revert_parser.add_argument(
'directory', type=Path, 'directory', type=Path, help='The directory to reverse domain substitution')
help='The directory to reverse domain substitution')
revert_parser.add_argument( revert_parser.add_argument(
'-c', '--cache', type=Path, required=True, '-c',
'--cache',
type=Path,
required=True,
help=('The path to the domain substitution cache. ' help=('The path to the domain substitution cache. '
'The path must exist and will be removed if successful.')) 'The path must exist and will be removed if successful.'))
revert_parser.set_defaults(reverting=True) revert_parser.set_defaults(reverting=True)
def _add_patches(subparsers): def _add_patches(subparsers):
"""Operations with patches""" """Operations with patches"""
def _export_callback(args): def _export_callback(args):
patches.export_patches(args.bundle, args.output) patches.export_patches(args.bundle, args.output)
def _apply_callback(args): def _apply_callback(args):
patches.apply_patches( patches.apply_patches(
patches.patch_paths_by_bundle(args.bundle), patches.patch_paths_by_bundle(args.bundle),
args.directory, args.directory,
patch_bin_path=args.patch_bin) patch_bin_path=args.patch_bin)
# patches # patches
parser = subparsers.add_parser( parser = subparsers.add_parser(
'patches', help=_add_patches.__doc__, description=_add_patches.__doc__) 'patches', help=_add_patches.__doc__, description=_add_patches.__doc__)
@@ -199,11 +236,13 @@ def _add_patches(subparsers):
# patches export # patches export
export_parser = subsubparsers.add_parser( export_parser = subsubparsers.add_parser(
'export', help='Export patches in GNU quilt-compatible format', 'export',
help='Export patches in GNU quilt-compatible format',
description='Export a config bundle\'s patches to a quilt-compatible format') description='Export a config bundle\'s patches to a quilt-compatible format')
setup_bundle_arg(export_parser) setup_bundle_arg(export_parser)
export_parser.add_argument( export_parser.add_argument(
'output', type=Path, 'output',
type=Path,
help='The directory to write to. It must either be empty or not exist.') help='The directory to write to. It must either be empty or not exist.')
export_parser.set_defaults(callback=_export_callback) export_parser.set_defaults(callback=_export_callback)
@@ -216,10 +255,13 @@ def _add_patches(subparsers):
apply_parser.add_argument('directory', type=Path, help='The source tree to apply patches.') apply_parser.add_argument('directory', type=Path, help='The source tree to apply patches.')
apply_parser.set_defaults(callback=_apply_callback) apply_parser.set_defaults(callback=_apply_callback)
def _add_gnargs(subparsers): def _add_gnargs(subparsers):
"""Operations with GN arguments""" """Operations with GN arguments"""
def _print_callback(args): def _print_callback(args):
print(str(args.bundle.gn_flags), end='') print(str(args.bundle.gn_flags), end='')
# gnargs # gnargs
parser = subparsers.add_parser( parser = subparsers.add_parser(
'gnargs', help=_add_gnargs.__doc__, description=_add_gnargs.__doc__) 'gnargs', help=_add_gnargs.__doc__, description=_add_gnargs.__doc__)
@@ -227,15 +269,17 @@ def _add_gnargs(subparsers):
# gnargs print # gnargs print
print_parser = subsubparsers.add_parser( print_parser = subsubparsers.add_parser(
'print', help='Prints GN args in args.gn format', 'print',
help='Prints GN args in args.gn format',
description='Prints a list of GN args in args.gn format to standard output') description='Prints a list of GN args in args.gn format to standard output')
setup_bundle_arg(print_parser) setup_bundle_arg(print_parser)
print_parser.set_defaults(callback=_print_callback) print_parser.set_defaults(callback=_print_callback)
def main(arg_list=None): def main(arg_list=None):
"""CLI entry point""" """CLI entry point"""
parser = argparse.ArgumentParser(description=__doc__, parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter) description=__doc__, formatter_class=argparse.RawTextHelpFormatter)
subparsers = parser.add_subparsers(title='Available commands', dest='command') subparsers = parser.add_subparsers(title='Available commands', dest='command')
subparsers.required = True # Workaround for http://bugs.python.org/issue9253#msg186387 subparsers.required = True # Workaround for http://bugs.python.org/issue9253#msg186387

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Common code and constants""" """Common code and constants"""
import configparser import configparser
@@ -25,19 +24,24 @@ _ENV_FORMAT = "BUILDKIT_{}"
# Helpers for third_party.schema # Helpers for third_party.schema
def schema_dictcast(data): def schema_dictcast(data):
"""Cast data to dictionary for third_party.schema and configparser data structures""" """Cast data to dictionary for third_party.schema and configparser data structures"""
return schema.And(schema.Use(dict), data) return schema.And(schema.Use(dict), data)
def schema_inisections(data): def schema_inisections(data):
"""Cast configparser data structure to dict and remove DEFAULT section""" """Cast configparser data structure to dict and remove DEFAULT section"""
return schema_dictcast({configparser.DEFAULTSECT: object, **data}) return schema_dictcast({configparser.DEFAULTSECT: object, **data})
# Public classes # Public classes
class BuildkitError(Exception): class BuildkitError(Exception):
"""Represents a generic custom error from buildkit""" """Represents a generic custom error from buildkit"""
class BuildkitAbort(BuildkitError): class BuildkitAbort(BuildkitError):
""" """
Exception thrown when all details have been logged and buildkit aborts. Exception thrown when all details have been logged and buildkit aborts.
@@ -45,20 +49,24 @@ class BuildkitAbort(BuildkitError):
It should only be caught by the user of buildkit's library interface. It should only be caught by the user of buildkit's library interface.
""" """
class PlatformEnum(enum.Enum): class PlatformEnum(enum.Enum):
"""Enum for platforms that need distinction for certain functionality""" """Enum for platforms that need distinction for certain functionality"""
UNIX = 'unix' # Currently covers anything that isn't Windows UNIX = 'unix' # Currently covers anything that isn't Windows
WINDOWS = 'windows' WINDOWS = 'windows'
class ExtractorEnum: #pylint: disable=too-few-public-methods class ExtractorEnum: #pylint: disable=too-few-public-methods
"""Enum for extraction binaries""" """Enum for extraction binaries"""
SEVENZIP = '7z' SEVENZIP = '7z'
TAR = 'tar' TAR = 'tar'
# Public methods # Public methods
def get_logger(name=__package__, initial_level=logging.DEBUG,
prepend_timestamp=True, log_init=True): def get_logger(name=__package__, initial_level=logging.DEBUG, prepend_timestamp=True,
log_init=True):
'''Gets the named logger''' '''Gets the named logger'''
logger = logging.getLogger(name) logger = logging.getLogger(name)
@@ -84,6 +92,7 @@ def get_logger(name=__package__, initial_level=logging.DEBUG,
logger.debug("Initialized logger '%s'", name) logger.debug("Initialized logger '%s'", name)
return logger return logger
def dir_empty(path): def dir_empty(path):
""" """
Returns True if the directory is empty; False otherwise Returns True if the directory is empty; False otherwise
@@ -96,6 +105,7 @@ def dir_empty(path):
return True return True
return False return False
def ensure_empty_dir(path, parents=False): def ensure_empty_dir(path, parents=False):
""" """
Makes a directory at path if it doesn't exist. If it exists, check if it is empty. Makes a directory at path if it doesn't exist. If it exists, check if it is empty.
@@ -111,6 +121,7 @@ def ensure_empty_dir(path, parents=False):
if not dir_empty(path): if not dir_empty(path):
raise exc raise exc
def get_running_platform(): def get_running_platform():
""" """
Returns a PlatformEnum value indicating the platform that buildkit is running on. Returns a PlatformEnum value indicating the platform that buildkit is running on.
@@ -124,18 +135,19 @@ def get_running_platform():
# Only Windows and UNIX-based platforms need to be distinguished right now. # Only Windows and UNIX-based platforms need to be distinguished right now.
return PlatformEnum.UNIX return PlatformEnum.UNIX
def _read_version_ini(): def _read_version_ini():
version_schema = schema.Schema(schema_inisections({ version_schema = schema.Schema(
'version': schema_dictcast({ schema_inisections({
'chromium_version': schema.And(str, len), 'version': schema_dictcast({
'release_revision': schema.And(str, len), 'chromium_version': schema.And(str, len),
schema.Optional('release_extra'): schema.And(str, len), 'release_revision': schema.And(str, len),
}) schema.Optional('release_extra'): schema.And(str, len),
})) })
}))
version_parser = configparser.ConfigParser() version_parser = configparser.ConfigParser()
version_parser.read( version_parser.read(
str(Path(__file__).absolute().parent.parent / 'version.ini'), str(Path(__file__).absolute().parent.parent / 'version.ini'), encoding=ENCODING)
encoding=ENCODING)
try: try:
version_schema.validate(version_parser) version_schema.validate(version_parser)
except schema.SchemaError as exc: except schema.SchemaError as exc:
@@ -143,20 +155,24 @@ def _read_version_ini():
raise exc raise exc
return version_parser return version_parser
def get_chromium_version(): def get_chromium_version():
"""Returns the Chromium version.""" """Returns the Chromium version."""
return _VERSION_INI['version']['chromium_version'] return _VERSION_INI['version']['chromium_version']
def get_release_revision(): def get_release_revision():
"""Returns the release revision.""" """Returns the release revision."""
return _VERSION_INI['version']['release_revision'] return _VERSION_INI['version']['release_revision']
def get_release_extra(fallback=None): def get_release_extra(fallback=None):
""" """
Return the release revision extra info, or returns fallback if it is not defined. Return the release revision extra info, or returns fallback if it is not defined.
""" """
return _VERSION_INI['version'].get('release_extra', fallback=fallback) return _VERSION_INI['version'].get('release_extra', fallback=fallback)
def get_version_string(): def get_version_string():
""" """
Returns a version string containing all information in a Debian-like format. Returns a version string containing all information in a Debian-like format.
@@ -167,4 +183,5 @@ def get_version_string():
result += '~{}'.format(release_extra) result += '~{}'.format(release_extra)
return result return result
_VERSION_INI = _read_version_ini() _VERSION_INI = _read_version_ini()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
""" """
Build configuration generation implementation Build configuration generation implementation
""" """
@@ -16,16 +15,17 @@ import io
import re import re
from pathlib import Path from pathlib import Path
from .common import ( from .common import (ENCODING, BuildkitError, ExtractorEnum, get_logger, get_chromium_version)
ENCODING, BuildkitError, ExtractorEnum, get_logger, get_chromium_version)
from .downloads import HashesURLEnum from .downloads import HashesURLEnum
from .third_party import schema from .third_party import schema
# Classes # Classes
class BuildkitConfigError(BuildkitError): class BuildkitConfigError(BuildkitError):
"""Exception class for the config module""" """Exception class for the config module"""
class _ConfigFile(abc.ABC): #pylint: disable=too-few-public-methods class _ConfigFile(abc.ABC): #pylint: disable=too-few-public-methods
""" """
Base config file class Base config file class
@@ -66,6 +66,7 @@ class _ConfigFile(abc.ABC): #pylint: disable=too-few-public-methods
def __str__(self): def __str__(self):
"""String contents of the config file""" """String contents of the config file"""
class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
""" """
Base class for INI config files Base class for INI config files
@@ -82,13 +83,14 @@ class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
Raises schema.SchemaError if validation fails Raises schema.SchemaError if validation fails
""" """
def _section_generator(data): def _section_generator(data):
for section in data: for section in data:
if section == configparser.DEFAULTSECT: if section == configparser.DEFAULTSECT:
continue continue
yield section, dict(filter( yield section, dict(
lambda x: x[0] not in self._ini_vars, filter(lambda x: x[0] not in self._ini_vars, data.items(section)))
data.items(section)))
new_data = configparser.ConfigParser(defaults=self._ini_vars) new_data = configparser.ConfigParser(defaults=self._ini_vars)
with path.open(encoding=ENCODING) as ini_file: with path.open(encoding=ENCODING) as ini_file:
new_data.read_file(ini_file, source=str(path)) new_data.read_file(ini_file, source=str(path))
@@ -97,8 +99,8 @@ class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
try: try:
self._schema.validate(dict(_section_generator(new_data))) self._schema.validate(dict(_section_generator(new_data)))
except schema.SchemaError as exc: except schema.SchemaError as exc:
get_logger().error( get_logger().error('INI file for %s failed schema validation: %s',
'INI file for %s failed schema validation: %s', type(self).__name__, path) type(self).__name__, path)
raise exc raise exc
return new_data return new_data
@@ -138,6 +140,7 @@ class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
"""Returns an iterator over the section names""" """Returns an iterator over the section names"""
return iter(self._data.sections()) return iter(self._data.sections())
class ListConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods class ListConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
""" """
Represents a simple newline-delimited list Represents a simple newline-delimited list
@@ -165,6 +168,7 @@ class ListConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
"""Returns an iterator over the list items""" """Returns an iterator over the list items"""
return iter(self._data) return iter(self._data)
class MapConfigFile(_ConfigFile): class MapConfigFile(_ConfigFile):
"""Represents a simple string-keyed and string-valued dictionary""" """Represents a simple string-keyed and string-valued dictionary"""
@@ -178,8 +182,7 @@ class MapConfigFile(_ConfigFile):
key, value = line.split('=') key, value = line.split('=')
if key in new_data: if key in new_data:
raise ValueError( raise ValueError(
'Map file "%s" contains key "%s" at least twice.' % 'Map file "%s" contains key "%s" at least twice.' % (path, key))
(path, key))
new_data[key] = value new_data[key] = value
return new_data return new_data
@@ -218,6 +221,7 @@ class MapConfigFile(_ConfigFile):
""" """
return self._data.items() return self._data.items()
class BundleMetaIni(_IniConfigFile): class BundleMetaIni(_IniConfigFile):
"""Represents bundlemeta.ini files""" """Represents bundlemeta.ini files"""
@@ -245,6 +249,7 @@ class BundleMetaIni(_IniConfigFile):
return [x.strip() for x in self['bundle']['depends'].split(',')] return [x.strip() for x in self['bundle']['depends'].split(',')]
return tuple() return tuple()
class DomainRegexList(ListConfigFile): class DomainRegexList(ListConfigFile):
"""Representation of a domain_regex_list file""" """Representation of a domain_regex_list file"""
_regex_pair_tuple = collections.namedtuple('DomainRegexPair', ('pattern', 'replacement')) _regex_pair_tuple = collections.namedtuple('DomainRegexPair', ('pattern', 'replacement'))
@@ -278,15 +283,18 @@ class DomainRegexList(ListConfigFile):
""" """
Returns a single expression to search for domains Returns a single expression to search for domains
""" """
return re.compile('|'.join( return re.compile('|'.join(map(lambda x: x.split(self._PATTERN_REPLACE_DELIM, 1)[0], self)))
map(lambda x: x.split(self._PATTERN_REPLACE_DELIM, 1)[0], self)))
class DownloadsIni(_IniConfigFile): #pylint: disable=too-few-public-methods class DownloadsIni(_IniConfigFile): #pylint: disable=too-few-public-methods
"""Representation of an downloads.ini file""" """Representation of an downloads.ini file"""
_hashes = ('md5', 'sha1', 'sha256', 'sha512') _hashes = ('md5', 'sha1', 'sha256', 'sha512')
_nonempty_keys = ('url', 'download_filename') _nonempty_keys = ('url', 'download_filename')
_optional_keys = ('version', 'strip_leading_dirs',) _optional_keys = (
'version',
'strip_leading_dirs',
)
_passthrough_properties = (*_nonempty_keys, *_optional_keys, 'extractor') _passthrough_properties = (*_nonempty_keys, *_optional_keys, 'extractor')
_ini_vars = { _ini_vars = {
'_chromium_version': get_chromium_version(), '_chromium_version': get_chromium_version(),
@@ -294,9 +302,11 @@ class DownloadsIni(_IniConfigFile): #pylint: disable=too-few-public-methods
_schema = schema.Schema({ _schema = schema.Schema({
schema.Optional(schema.And(str, len)): { schema.Optional(schema.And(str, len)): {
**{x: schema.And(str, len) for x in _nonempty_keys}, **{x: schema.And(str, len)
for x in _nonempty_keys},
'output_path': (lambda x: str(Path(x).relative_to(''))), 'output_path': (lambda x: str(Path(x).relative_to(''))),
**{schema.Optional(x): schema.And(str, len) for x in _optional_keys}, **{schema.Optional(x): schema.And(str, len)
for x in _optional_keys},
schema.Optional('extractor'): schema.Or(ExtractorEnum.TAR, ExtractorEnum.SEVENZIP), schema.Optional('extractor'): schema.Or(ExtractorEnum.TAR, ExtractorEnum.SEVENZIP),
schema.Optional(schema.Or(*_hashes)): schema.And(str, len), schema.Optional(schema.Or(*_hashes)): schema.And(str, len),
schema.Optional('hash_url'): ( schema.Optional('hash_url'): (
@@ -329,17 +339,16 @@ class DownloadsIni(_IniConfigFile): #pylint: disable=too-few-public-methods
hashes_dict[hash_name] = value hashes_dict[hash_name] = value
return hashes_dict return hashes_dict
else: else:
raise AttributeError( raise AttributeError('"{}" has no attribute "{}"'.format(type(self).__name__, name))
'"{}" has no attribute "{}"'.format(type(self).__name__, name))
def __getitem__(self, section): def __getitem__(self, section):
""" """
Returns an object with keys as attributes and Returns an object with keys as attributes and
values already pre-processed strings values already pre-processed strings
""" """
return self._DownloadsProperties( return self._DownloadsProperties(self._data[section], self._passthrough_properties,
self._data[section], self._passthrough_properties, self._hashes)
self._hashes)
class ConfigBundle: #pylint: disable=too-few-public-methods class ConfigBundle: #pylint: disable=too-few-public-methods
"""Config bundle implementation""" """Config bundle implementation"""
@@ -409,8 +418,7 @@ class ConfigBundle: #pylint: disable=too-few-public-methods
if name in self._ATTR_MAPPING: if name in self._ATTR_MAPPING:
return self.files[self._ATTR_MAPPING[name]] return self.files[self._ATTR_MAPPING[name]]
else: else:
raise AttributeError( raise AttributeError('%s has no attribute "%s"' % type(self).__name__, name)
'%s has no attribute "%s"' % type(self).__name__, name)
def rebase(self, other): def rebase(self, other):
"""Rebase the current bundle onto other, saving changes into self""" """Rebase the current bundle onto other, saving changes into self"""

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
""" """
Module for substituting domain names in the source tree with blockable strings. Module for substituting domain names in the source tree with blockable strings.
""" """
@@ -28,6 +27,7 @@ _ORIG_DIR = 'orig'
# Private Methods # Private Methods
def _substitute_path(path, regex_iter): def _substitute_path(path, regex_iter):
""" """
Perform domain substitution on path and add it to the domain substitution cache. Perform domain substitution on path and add it to the domain substitution cache.
@@ -58,8 +58,7 @@ def _substitute_path(path, regex_iter):
raise UnicodeDecodeError('Unable to decode with any encoding: %s' % path) raise UnicodeDecodeError('Unable to decode with any encoding: %s' % path)
file_subs = 0 file_subs = 0
for regex_pair in regex_iter: for regex_pair in regex_iter:
content, sub_count = regex_pair.pattern.subn( content, sub_count = regex_pair.pattern.subn(regex_pair.replacement, content)
regex_pair.replacement, content)
file_subs += sub_count file_subs += sub_count
if file_subs > 0: if file_subs > 0:
substituted_content = content.encode(encoding) substituted_content = content.encode(encoding)
@@ -69,6 +68,7 @@ def _substitute_path(path, regex_iter):
return (zlib.crc32(substituted_content), original_content) return (zlib.crc32(substituted_content), original_content)
return (None, None) return (None, None)
def _validate_file_index(index_file, resolved_tree, cache_index_files): def _validate_file_index(index_file, resolved_tree, cache_index_files):
""" """
Validation of file index and hashes against the source tree. Validation of file index and hashes against the source tree.
@@ -85,31 +85,30 @@ def _validate_file_index(index_file, resolved_tree, cache_index_files):
get_logger().error('Could not split entry "%s": %s', entry, exc) get_logger().error('Could not split entry "%s": %s', entry, exc)
continue continue
if not relative_path or not file_hash: if not relative_path or not file_hash:
get_logger().error( get_logger().error('Entry %s of domain substitution cache file index is not valid',
'Entry %s of domain substitution cache file index is not valid', _INDEX_HASH_DELIMITER.join((relative_path, file_hash)))
_INDEX_HASH_DELIMITER.join((relative_path, file_hash)))
all_hashes_valid = False all_hashes_valid = False
continue continue
if not crc32_regex.match(file_hash): if not crc32_regex.match(file_hash):
get_logger().error( get_logger().error('File index hash for %s does not appear to be a CRC32 hash',
'File index hash for %s does not appear to be a CRC32 hash', relative_path) relative_path)
all_hashes_valid = False all_hashes_valid = False
continue continue
if zlib.crc32((resolved_tree / relative_path).read_bytes()) != int(file_hash, 16): if zlib.crc32((resolved_tree / relative_path).read_bytes()) != int(file_hash, 16):
get_logger().error( get_logger().error('Hashes do not match for: %s', relative_path)
'Hashes do not match for: %s', relative_path)
all_hashes_valid = False all_hashes_valid = False
continue continue
if relative_path in cache_index_files: if relative_path in cache_index_files:
get_logger().error( get_logger().error('File %s shows up at least twice in the file index', relative_path)
'File %s shows up at least twice in the file index', relative_path)
all_hashes_valid = False all_hashes_valid = False
continue continue
cache_index_files.add(relative_path) cache_index_files.add(relative_path)
return all_hashes_valid return all_hashes_valid
# Public Methods # Public Methods
def apply_substitution(config_bundle, source_tree, domainsub_cache): def apply_substitution(config_bundle, source_tree, domainsub_cache):
""" """
Substitute domains in source_tree with files and substitutions from config_bundle, Substitute domains in source_tree with files and substitutions from config_bundle,
@@ -132,8 +131,9 @@ def apply_substitution(config_bundle, source_tree, domainsub_cache):
resolved_tree = source_tree.resolve() resolved_tree = source_tree.resolve()
regex_pairs = config_bundle.domain_regex.get_pairs() regex_pairs = config_bundle.domain_regex.get_pairs()
fileindex_content = io.BytesIO() fileindex_content = io.BytesIO()
with tarfile.open(str(domainsub_cache), with tarfile.open(
'w:%s' % domainsub_cache.suffix[1:], compresslevel=1) as cache_tar: str(domainsub_cache), 'w:%s' % domainsub_cache.suffix[1:],
compresslevel=1) as cache_tar:
orig_dir = Path(_ORIG_DIR) orig_dir = Path(_ORIG_DIR)
for relative_path in config_bundle.domain_substitution: for relative_path in config_bundle.domain_substitution:
if _INDEX_HASH_DELIMITER in relative_path: if _INDEX_HASH_DELIMITER in relative_path:
@@ -141,8 +141,8 @@ def apply_substitution(config_bundle, source_tree, domainsub_cache):
cache_tar.close() cache_tar.close()
domainsub_cache.unlink() domainsub_cache.unlink()
raise ValueError( raise ValueError(
'Path "%s" contains the file index hash delimiter "%s"' % 'Path "%s" contains the file index hash delimiter "%s"' % relative_path,
relative_path, _INDEX_HASH_DELIMITER) _INDEX_HASH_DELIMITER)
path = resolved_tree / relative_path path = resolved_tree / relative_path
if not path.exists(): if not path.exists():
get_logger().warning('Skipping non-existant path: %s', path) get_logger().warning('Skipping non-existant path: %s', path)
@@ -150,8 +150,8 @@ def apply_substitution(config_bundle, source_tree, domainsub_cache):
if crc32_hash is None: if crc32_hash is None:
get_logger().info('Path has no substitutions: %s', relative_path) get_logger().info('Path has no substitutions: %s', relative_path)
continue continue
fileindex_content.write('{}{}{:08x}\n'.format( fileindex_content.write('{}{}{:08x}\n'.format(relative_path, _INDEX_HASH_DELIMITER,
relative_path, _INDEX_HASH_DELIMITER, crc32_hash).encode(ENCODING)) crc32_hash).encode(ENCODING))
orig_tarinfo = tarfile.TarInfo(str(orig_dir / relative_path)) orig_tarinfo = tarfile.TarInfo(str(orig_dir / relative_path))
orig_tarinfo.size = len(orig_content) orig_tarinfo.size = len(orig_content)
with io.BytesIO(orig_content) as orig_file: with io.BytesIO(orig_content) as orig_file:
@@ -161,6 +161,7 @@ def apply_substitution(config_bundle, source_tree, domainsub_cache):
fileindex_content.seek(0) fileindex_content.seek(0)
cache_tar.addfile(fileindex_tarinfo, fileindex_content) cache_tar.addfile(fileindex_tarinfo, fileindex_content)
def revert_substitution(domainsub_cache, source_tree): def revert_substitution(domainsub_cache, source_tree):
""" """
Revert domain substitution on source_tree using the pre-domain Revert domain substitution on source_tree using the pre-domain
@@ -196,8 +197,8 @@ def revert_substitution(domainsub_cache, source_tree):
cache_index_files = set() # All files in the file index cache_index_files = set() # All files in the file index
with tempfile.TemporaryDirectory(prefix='domsubcache_files', with tempfile.TemporaryDirectory(
dir=str(resolved_tree)) as tmp_extract_name: prefix='domsubcache_files', dir=str(resolved_tree)) as tmp_extract_name:
extract_path = Path(tmp_extract_name) extract_path = Path(tmp_extract_name)
get_logger().debug('Extracting domain substitution cache...') get_logger().debug('Extracting domain substitution cache...')
extract_tar_file(domainsub_cache, extract_path, Path()) extract_tar_file(domainsub_cache, extract_path, Path())
@@ -206,9 +207,8 @@ def revert_substitution(domainsub_cache, source_tree):
get_logger().debug('Validating substituted files in source tree...') get_logger().debug('Validating substituted files in source tree...')
with (extract_path / _INDEX_LIST).open('rb') as index_file: #pylint: disable=no-member with (extract_path / _INDEX_LIST).open('rb') as index_file: #pylint: disable=no-member
if not _validate_file_index(index_file, resolved_tree, cache_index_files): if not _validate_file_index(index_file, resolved_tree, cache_index_files):
raise KeyError( raise KeyError('Domain substitution cache file index is corrupt or hashes mismatch '
'Domain substitution cache file index is corrupt or hashes mismatch ' 'the source tree.')
'the source tree.')
# Move original files over substituted ones # Move original files over substituted ones
get_logger().debug('Moving original files over substituted ones...') get_logger().debug('Moving original files over substituted ones...')

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
""" """
Module for the downloading, checking, and unpacking of necessary files into the source tree Module for the downloading, checking, and unpacking of necessary files into the source tree
""" """
@@ -18,18 +17,23 @@ from .extraction import extract_tar_file, extract_with_7z
# Constants # Constants
class HashesURLEnum(str, enum.Enum): class HashesURLEnum(str, enum.Enum):
"""Enum for supported hash URL schemes""" """Enum for supported hash URL schemes"""
chromium = 'chromium' chromium = 'chromium'
# Custom Exceptions # Custom Exceptions
class HashMismatchError(BuildkitError): class HashMismatchError(BuildkitError):
"""Exception for computed hashes not matching expected hashes""" """Exception for computed hashes not matching expected hashes"""
pass pass
class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
"""Hook for urllib.request.urlretrieve to log progress information to console""" """Hook for urllib.request.urlretrieve to log progress information to console"""
def __init__(self): def __init__(self):
self._max_len_printed = 0 self._max_len_printed = 0
self._last_percentage = None self._last_percentage = None
@@ -48,6 +52,7 @@ class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
self._max_len_printed = len(status_line) self._max_len_printed = len(status_line)
print('\r' + status_line, end='') print('\r' + status_line, end='')
def _download_if_needed(file_path, url, show_progress): def _download_if_needed(file_path, url, show_progress):
""" """
Downloads a file from url to the specified path file_path if necessary. Downloads a file from url to the specified path file_path if necessary.
@@ -65,6 +70,7 @@ def _download_if_needed(file_path, url, show_progress):
if show_progress: if show_progress:
print() print()
def _chromium_hashes_generator(hashes_path): def _chromium_hashes_generator(hashes_path):
with hashes_path.open(encoding=ENCODING) as hashes_file: with hashes_path.open(encoding=ENCODING) as hashes_file:
hash_lines = hashes_file.read().splitlines() hash_lines = hashes_file.read().splitlines()
@@ -74,10 +80,12 @@ def _chromium_hashes_generator(hashes_path):
else: else:
get_logger().warning('Skipping unknown hash algorithm: %s', hash_name) get_logger().warning('Skipping unknown hash algorithm: %s', hash_name)
def _downloads_iter(config_bundle): def _downloads_iter(config_bundle):
"""Iterator for the downloads ordered by output path""" """Iterator for the downloads ordered by output path"""
return sorted(config_bundle.downloads, key=(lambda x: str(Path(x.output_path)))) return sorted(config_bundle.downloads, key=(lambda x: str(Path(x.output_path))))
def _get_hash_pairs(download_properties, cache_dir): def _get_hash_pairs(download_properties, cache_dir):
"""Generator of (hash_name, hash_hex) for the given download""" """Generator of (hash_name, hash_hex) for the given download"""
for entry_type, entry_value in download_properties.hashes.items(): for entry_type, entry_value in download_properties.hashes.items():
@@ -90,6 +98,7 @@ def _get_hash_pairs(download_properties, cache_dir):
else: else:
yield entry_type, entry_value yield entry_type, entry_value
def retrieve_downloads(config_bundle, cache_dir, show_progress, disable_ssl_verification=False): def retrieve_downloads(config_bundle, cache_dir, show_progress, disable_ssl_verification=False):
""" """
Retrieve downloads into the downloads cache. Retrieve downloads into the downloads cache.
@@ -128,6 +137,7 @@ def retrieve_downloads(config_bundle, cache_dir, show_progress, disable_ssl_veri
if disable_ssl_verification: if disable_ssl_verification:
ssl._create_default_https_context = orig_https_context #pylint: disable=protected-access ssl._create_default_https_context = orig_https_context #pylint: disable=protected-access
def check_downloads(config_bundle, cache_dir): def check_downloads(config_bundle, cache_dir):
""" """
Check integrity of the downloads cache. Check integrity of the downloads cache.
@@ -149,6 +159,7 @@ def check_downloads(config_bundle, cache_dir):
if not hasher.hexdigest().lower() == hash_hex.lower(): if not hasher.hexdigest().lower() == hash_hex.lower():
raise HashMismatchError(download_path) raise HashMismatchError(download_path)
def unpack_downloads(config_bundle, cache_dir, output_dir, extractors=None): def unpack_downloads(config_bundle, cache_dir, output_dir, extractors=None):
""" """
Unpack downloads in the downloads cache to output_dir. Assumes all downloads are retrieved. Unpack downloads in the downloads cache to output_dir. Assumes all downloads are retrieved.
@@ -180,6 +191,8 @@ def unpack_downloads(config_bundle, cache_dir, output_dir, extractors=None):
strip_leading_dirs_path = Path(download_properties.strip_leading_dirs) strip_leading_dirs_path = Path(download_properties.strip_leading_dirs)
extractor_func( extractor_func(
archive_path=download_path, output_dir=output_dir, archive_path=download_path,
output_dir=output_dir,
unpack_dir=Path(download_properties.output_path), unpack_dir=Path(download_properties.output_path),
relative_to=strip_leading_dirs_path, extractors=extractors) relative_to=strip_leading_dirs_path,
extractors=extractors)

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
""" """
Archive extraction utilities Archive extraction utilities
""" """
@@ -14,15 +13,15 @@ import subprocess
import tarfile import tarfile
from pathlib import Path, PurePosixPath from pathlib import Path, PurePosixPath
from .common import ( from .common import (SEVENZIP_USE_REGISTRY, BuildkitAbort, PlatformEnum, ExtractorEnum, get_logger,
SEVENZIP_USE_REGISTRY, BuildkitAbort, PlatformEnum, ExtractorEnum, get_logger, get_running_platform)
get_running_platform)
DEFAULT_EXTRACTORS = { DEFAULT_EXTRACTORS = {
ExtractorEnum.SEVENZIP: SEVENZIP_USE_REGISTRY, ExtractorEnum.SEVENZIP: SEVENZIP_USE_REGISTRY,
ExtractorEnum.TAR: 'tar', ExtractorEnum.TAR: 'tar',
} }
def _find_7z_by_registry(): def _find_7z_by_registry():
""" """
Return a string to 7-zip's 7z.exe from the Windows Registry. Return a string to 7-zip's 7z.exe from the Windows Registry.
@@ -42,6 +41,7 @@ def _find_7z_by_registry():
get_logger().error('7z.exe not found at path from registry: %s', sevenzip_path) get_logger().error('7z.exe not found at path from registry: %s', sevenzip_path)
return sevenzip_path return sevenzip_path
def _find_extractor_by_cmd(extractor_cmd): def _find_extractor_by_cmd(extractor_cmd):
"""Returns a string path to the binary; None if it couldn't be found""" """Returns a string path to the binary; None if it couldn't be found"""
if not extractor_cmd: if not extractor_cmd:
@@ -50,6 +50,7 @@ def _find_extractor_by_cmd(extractor_cmd):
return extractor_cmd return extractor_cmd
return shutil.which(extractor_cmd) return shutil.which(extractor_cmd)
def _process_relative_to(unpack_root, relative_to): def _process_relative_to(unpack_root, relative_to):
""" """
For an extractor that doesn't support an automatic transform, move the extracted For an extractor that doesn't support an automatic transform, move the extracted
@@ -57,14 +58,15 @@ def _process_relative_to(unpack_root, relative_to):
""" """
relative_root = unpack_root / relative_to relative_root = unpack_root / relative_to
if not relative_root.is_dir(): if not relative_root.is_dir():
get_logger().error( get_logger().error('Could not find relative_to directory in extracted files: %s',
'Could not find relative_to directory in extracted files: %s', relative_to) relative_to)
raise BuildkitAbort() raise BuildkitAbort()
for src_path in relative_root.iterdir(): for src_path in relative_root.iterdir():
dest_path = unpack_root / src_path.name dest_path = unpack_root / src_path.name
src_path.rename(dest_path) src_path.rename(dest_path)
relative_root.rmdir() relative_root.rmdir()
def prune_dir(unpack_root, ignore_files): def prune_dir(unpack_root, ignore_files):
""" """
Delete files under unpack_root listed in ignore_files. Returns an iterable of unremovable files. Delete files under unpack_root listed in ignore_files. Returns an iterable of unremovable files.
@@ -81,16 +83,16 @@ def prune_dir(unpack_root, ignore_files):
unremovable_files.add(Path(relative_file).as_posix()) unremovable_files.add(Path(relative_file).as_posix())
return unremovable_files return unremovable_files
def _extract_tar_with_7z(binary, archive_path, output_dir, relative_to): def _extract_tar_with_7z(binary, archive_path, output_dir, relative_to):
get_logger().debug('Using 7-zip extractor') get_logger().debug('Using 7-zip extractor')
if not relative_to is None and (output_dir / relative_to).exists(): if not relative_to is None and (output_dir / relative_to).exists():
get_logger().error( get_logger().error('Temporary unpacking directory already exists: %s',
'Temporary unpacking directory already exists: %s', output_dir / relative_to) output_dir / relative_to)
raise BuildkitAbort() raise BuildkitAbort()
cmd1 = (binary, 'x', str(archive_path), '-so') cmd1 = (binary, 'x', str(archive_path), '-so')
cmd2 = (binary, 'x', '-si', '-aoa', '-ttar', '-o{}'.format(str(output_dir))) cmd2 = (binary, 'x', '-si', '-aoa', '-ttar', '-o{}'.format(str(output_dir)))
get_logger().debug('7z command line: %s | %s', get_logger().debug('7z command line: %s | %s', ' '.join(cmd1), ' '.join(cmd2))
' '.join(cmd1), ' '.join(cmd2))
proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE) proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout, stdout=subprocess.PIPE) proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout, stdout=subprocess.PIPE)
@@ -105,6 +107,7 @@ def _extract_tar_with_7z(binary, archive_path, output_dir, relative_to):
if not relative_to is None: if not relative_to is None:
_process_relative_to(output_dir, relative_to) _process_relative_to(output_dir, relative_to)
def _extract_tar_with_tar(binary, archive_path, output_dir, relative_to): def _extract_tar_with_tar(binary, archive_path, output_dir, relative_to):
get_logger().debug('Using BSD or GNU tar extractor') get_logger().debug('Using BSD or GNU tar extractor')
output_dir.mkdir(exist_ok=True) output_dir.mkdir(exist_ok=True)
@@ -120,10 +123,13 @@ def _extract_tar_with_tar(binary, archive_path, output_dir, relative_to):
if not relative_to is None: if not relative_to is None:
_process_relative_to(output_dir, relative_to) _process_relative_to(output_dir, relative_to)
def _extract_tar_with_python(archive_path, output_dir, relative_to): def _extract_tar_with_python(archive_path, output_dir, relative_to):
get_logger().debug('Using pure Python tar extractor') get_logger().debug('Using pure Python tar extractor')
class NoAppendList(list): class NoAppendList(list):
"""Hack to workaround memory issues with large tar files""" """Hack to workaround memory issues with large tar files"""
def append(self, obj): def append(self, obj):
pass pass
@@ -149,8 +155,7 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
if relative_to is None: if relative_to is None:
destination = output_dir / PurePosixPath(tarinfo.name) destination = output_dir / PurePosixPath(tarinfo.name)
else: else:
destination = output_dir / PurePosixPath(tarinfo.name).relative_to( destination = output_dir / PurePosixPath(tarinfo.name).relative_to(relative_to)
relative_to)
if tarinfo.issym() and not symlink_supported: if tarinfo.issym() and not symlink_supported:
# In this situation, TarFile.makelink() will try to create a copy of the # In this situation, TarFile.makelink() will try to create a copy of the
# target. But this fails because TarFile.members is empty # target. But this fails because TarFile.members is empty
@@ -159,8 +164,8 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
continue continue
if tarinfo.islnk(): if tarinfo.islnk():
# Derived from TarFile.extract() # Derived from TarFile.extract()
new_target = output_dir / PurePosixPath(tarinfo.linkname).relative_to( new_target = output_dir / PurePosixPath(
relative_to) tarinfo.linkname).relative_to(relative_to)
tarinfo._link_target = new_target.as_posix() # pylint: disable=protected-access tarinfo._link_target = new_target.as_posix() # pylint: disable=protected-access
if destination.is_symlink(): if destination.is_symlink():
destination.unlink() destination.unlink()
@@ -169,8 +174,12 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
get_logger().exception('Exception thrown for tar member: %s', tarinfo.name) get_logger().exception('Exception thrown for tar member: %s', tarinfo.name)
raise BuildkitAbort() raise BuildkitAbort()
def extract_tar_file(archive_path, output_dir, relative_to, #pylint: disable=too-many-arguments
extractors=None): def extract_tar_file(
archive_path,
output_dir,
relative_to, #pylint: disable=too-many-arguments
extractors=None):
""" """
Extract regular or compressed tar archive into the output directory. Extract regular or compressed tar archive into the output directory.
@@ -208,8 +217,12 @@ def extract_tar_file(archive_path, output_dir, relative_to, #pylint: disable=too
# Fallback to Python-based extractor on all platforms # Fallback to Python-based extractor on all platforms
_extract_tar_with_python(archive_path, output_dir, relative_to) _extract_tar_with_python(archive_path, output_dir, relative_to)
def extract_with_7z(archive_path, output_dir, relative_to, #pylint: disable=too-many-arguments
extractors=None): def extract_with_7z(
archive_path,
output_dir,
relative_to, #pylint: disable=too-many-arguments
extractors=None):
""" """
Extract archives with 7-zip into the output directory. Extract archives with 7-zip into the output directory.
Only supports archives with one layer of unpacking, so compressed tar archives don't work. Only supports archives with one layer of unpacking, so compressed tar archives don't work.
@@ -237,8 +250,8 @@ def extract_with_7z(archive_path, output_dir, relative_to, #pylint: disable=too-
sevenzip_bin = _find_extractor_by_cmd(sevenzip_cmd) sevenzip_bin = _find_extractor_by_cmd(sevenzip_cmd)
if not relative_to is None and (output_dir / relative_to).exists(): if not relative_to is None and (output_dir / relative_to).exists():
get_logger().error( get_logger().error('Temporary unpacking directory already exists: %s',
'Temporary unpacking directory already exists: %s', output_dir / relative_to) output_dir / relative_to)
raise BuildkitAbort() raise BuildkitAbort()
cmd = (sevenzip_bin, 'x', str(archive_path), '-aoa', '-o{}'.format(str(output_dir))) cmd = (sevenzip_bin, 'x', str(archive_path), '-aoa', '-o{}'.format(str(output_dir)))
get_logger().debug('7z command line: %s', ' '.join(cmd)) get_logger().debug('7z command line: %s', ' '.join(cmd))

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Utilities for reading and copying patches""" """Utilities for reading and copying patches"""
import shutil import shutil
@@ -15,6 +14,7 @@ from .common import ENCODING, get_logger, ensure_empty_dir
# Default patches/ directory is next to buildkit # Default patches/ directory is next to buildkit
_DEFAULT_PATCH_DIR = Path(__file__).absolute().parent.parent / 'patches' _DEFAULT_PATCH_DIR = Path(__file__).absolute().parent.parent / 'patches'
def patch_paths_by_bundle(config_bundle, patch_dir=_DEFAULT_PATCH_DIR): def patch_paths_by_bundle(config_bundle, patch_dir=_DEFAULT_PATCH_DIR):
""" """
Returns an iterator of pathlib.Path to patch files in the proper order Returns an iterator of pathlib.Path to patch files in the proper order
@@ -29,6 +29,7 @@ def patch_paths_by_bundle(config_bundle, patch_dir=_DEFAULT_PATCH_DIR):
for relative_path in config_bundle.patch_order: for relative_path in config_bundle.patch_order:
yield patch_dir / relative_path yield patch_dir / relative_path
def export_patches(config_bundle, path, series=Path('series'), patch_dir=_DEFAULT_PATCH_DIR): def export_patches(config_bundle, path, series=Path('series'), patch_dir=_DEFAULT_PATCH_DIR):
""" """
Writes patches and a series file to the directory specified by path. Writes patches and a series file to the directory specified by path.
@@ -53,6 +54,7 @@ def export_patches(config_bundle, path, series=Path('series'), patch_dir=_DEFAUL
with (path / series).open('w', encoding=ENCODING) as file_obj: with (path / series).open('w', encoding=ENCODING) as file_obj:
file_obj.write(str(config_bundle.patch_order)) file_obj.write(str(config_bundle.patch_order))
def apply_patches(patch_path_iter, tree_path, reverse=False, patch_bin_path=None): def apply_patches(patch_path_iter, tree_path, reverse=False, patch_bin_path=None):
""" """
Applies or reverses a list of patches Applies or reverses a list of patches
@@ -68,8 +70,7 @@ def apply_patches(patch_path_iter, tree_path, reverse=False, patch_bin_path=None
""" """
patch_paths = list(patch_path_iter) patch_paths = list(patch_path_iter)
if patch_bin_path is None: if patch_bin_path is None:
windows_patch_bin_path = (tree_path / windows_patch_bin_path = (tree_path / 'third_party' / 'git' / 'usr' / 'bin' / 'patch.exe')
'third_party' / 'git' / 'usr' / 'bin' / 'patch.exe')
patch_bin_path = Path(shutil.which('patch') or windows_patch_bin_path) patch_bin_path = Path(shutil.which('patch') or windows_patch_bin_path)
if not patch_bin_path.exists(): if not patch_bin_path.exists():
raise ValueError('Could not find the patch binary') raise ValueError('Could not find the patch binary')
@@ -79,15 +80,16 @@ def apply_patches(patch_path_iter, tree_path, reverse=False, patch_bin_path=None
logger = get_logger() logger = get_logger()
for patch_path, patch_num in zip(patch_paths, range(1, len(patch_paths) + 1)): for patch_path, patch_num in zip(patch_paths, range(1, len(patch_paths) + 1)):
cmd = [ cmd = [
str(patch_bin_path), '-p1', '--ignore-whitespace', '-i', str(patch_path), str(patch_bin_path), '-p1', '--ignore-whitespace', '-i',
'-d', str(tree_path), '--no-backup-if-mismatch'] str(patch_path), '-d',
str(tree_path), '--no-backup-if-mismatch'
]
if reverse: if reverse:
cmd.append('--reverse') cmd.append('--reverse')
log_word = 'Reversing' log_word = 'Reversing'
else: else:
cmd.append('--forward') cmd.append('--forward')
log_word = 'Applying' log_word = 'Applying'
logger.info( logger.info('* %s %s (%s/%s)', log_word, patch_path.name, patch_num, len(patch_paths))
'* %s %s (%s/%s)', log_word, patch_path.name, patch_num, len(patch_paths))
logger.debug(' '.join(cmd)) logger.debug(' '.join(cmd))
subprocess.run(cmd, check=True) subprocess.run(cmd, check=True)

View File

@@ -4,7 +4,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Generates updating_patch_order.list in the buildspace for updating patches""" """Generates updating_patch_order.list in the buildspace for updating patches"""
import argparse import argparse
@@ -16,18 +15,25 @@ from buildkit.common import ENCODING
from buildkit.cli import NewBaseBundleAction from buildkit.cli import NewBaseBundleAction
sys.path.pop(0) sys.path.pop(0)
def main(arg_list=None): def main(arg_list=None):
"""CLI entrypoint""" """CLI entrypoint"""
parser = argparse.ArgumentParser(description=__doc__) parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('base_bundle', action=NewBaseBundleAction, parser.add_argument(
help='The base bundle to generate a patch order from') 'base_bundle',
parser.add_argument('--output', metavar='PATH', type=Path, action=NewBaseBundleAction,
default='buildspace/updating_patch_order.list', help='The base bundle to generate a patch order from')
help='The patch order file to write') parser.add_argument(
'--output',
metavar='PATH',
type=Path,
default='buildspace/updating_patch_order.list',
help='The patch order file to write')
args = parser.parse_args(args=arg_list) args = parser.parse_args(args=arg_list)
with args.output.open('w', encoding=ENCODING) as file_obj: with args.output.open('w', encoding=ENCODING) as file_obj:
file_obj.writelines('%s\n' % x for x in args.base_bundle.patches) file_obj.writelines('%s\n' % x for x in args.base_bundle.patches)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Run Pylint over buildkit""" """Run Pylint over buildkit"""
import argparse import argparse
@@ -14,18 +13,18 @@ sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
import pylint_devutils import pylint_devutils
sys.path.pop(0) sys.path.pop(0)
def main(): def main():
"""CLI entrypoint""" """CLI entrypoint"""
parser = argparse.ArgumentParser(description='Run Pylint over buildkit') parser = argparse.ArgumentParser(description='Run Pylint over buildkit')
parser.add_argument('--hide-fixme', action='store_true', help='Hide "fixme" Pylint warnings.')
parser.add_argument( parser.add_argument(
'--hide-fixme', action='store_true', '--show-locally-disabled',
help='Hide "fixme" Pylint warnings.') action='store_true',
parser.add_argument(
'--show-locally-disabled', action='store_true',
help='Show "locally-disabled" Pylint warnings.') help='Show "locally-disabled" Pylint warnings.')
args = parser.parse_args() args = parser.parse_args()
disable = list() disable = ['bad-continuation']
if args.hide_fixme: if args.hide_fixme:
disable.append('fixme') disable.append('fixme')
@@ -46,5 +45,6 @@ def main():
exit(1) exit(1)
exit(0) exit(0)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Run Pylint over any module""" """Run Pylint over any module"""
import argparse import argparse
@@ -13,6 +12,7 @@ from pathlib import Path
from pylint import epylint as lint from pylint import epylint as lint
def run_pylint(modulepath, pylint_options): def run_pylint(modulepath, pylint_options):
"""Runs Pylint. Returns a boolean indicating success""" """Runs Pylint. Returns a boolean indicating success"""
pylint_stats = Path('/run/user/{}/pylint_stats'.format(os.getuid())) pylint_stats = Path('/run/user/{}/pylint_stats'.format(os.getuid()))
@@ -34,19 +34,17 @@ def run_pylint(modulepath, pylint_options):
return False return False
return True return True
def main(): def main():
"""CLI entrypoint""" """CLI entrypoint"""
parser = argparse.ArgumentParser(description='Run Pylint over an arbitrary module') parser = argparse.ArgumentParser(description='Run Pylint over an arbitrary module')
parser.add_argument('--hide-fixme', action='store_true', help='Hide "fixme" Pylint warnings.')
parser.add_argument( parser.add_argument(
'--hide-fixme', action='store_true', '--show-locally-disabled',
help='Hide "fixme" Pylint warnings.') action='store_true',
parser.add_argument(
'--show-locally-disabled', action='store_true',
help='Show "locally-disabled" Pylint warnings.') help='Show "locally-disabled" Pylint warnings.')
parser.add_argument( parser.add_argument('modulepath', type=Path, help='Path to the module to check')
'modulepath', type=Path,
help='Path to the module to check')
args = parser.parse_args() args = parser.parse_args()
if not args.modulepath.exists(): if not args.modulepath.exists():
@@ -55,6 +53,7 @@ def main():
disables = [ disables = [
'wrong-import-position', 'wrong-import-position',
'bad-continuation',
] ]
if args.hide_fixme: if args.hide_fixme:
@@ -71,5 +70,6 @@ def main():
exit(1) exit(1)
exit(0) exit(0)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
""" """
Update binary pruning and domain substitution lists automatically. Update binary pruning and domain substitution lists automatically.
@@ -19,17 +18,15 @@ from pathlib import Path, PurePosixPath
sys.path.insert(0, str(Path(__file__).resolve().parent.parent)) sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from buildkit.cli import get_basebundle_verbosely from buildkit.cli import get_basebundle_verbosely
from buildkit.common import ( from buildkit.common import (BUILDSPACE_DOWNLOADS, BUILDSPACE_TREE, ENCODING, BuildkitAbort,
BUILDSPACE_DOWNLOADS, BUILDSPACE_TREE, ENCODING, BuildkitAbort, get_logger, dir_empty) get_logger, dir_empty)
from buildkit.domain_substitution import TREE_ENCODINGS from buildkit.domain_substitution import TREE_ENCODINGS
from buildkit import source_retrieval from buildkit import source_retrieval
sys.path.pop(0) sys.path.pop(0)
# NOTE: Include patterns have precedence over exclude patterns # NOTE: Include patterns have precedence over exclude patterns
# pathlib.Path.match() paths to include in binary pruning # pathlib.Path.match() paths to include in binary pruning
PRUNING_INCLUDE_PATTERNS = [ PRUNING_INCLUDE_PATTERNS = ['components/domain_reliability/baked_in_configs/*']
'components/domain_reliability/baked_in_configs/*'
]
# pathlib.Path.match() paths to exclude from binary pruning # pathlib.Path.match() paths to exclude from binary pruning
PRUNING_EXCLUDE_PATTERNS = [ PRUNING_EXCLUDE_PATTERNS = [
@@ -72,43 +69,19 @@ PRUNING_EXCLUDE_PATTERNS = [
# NOTE: Domain substitution path prefix exclusion has precedence over inclusion patterns # NOTE: Domain substitution path prefix exclusion has precedence over inclusion patterns
# Paths to exclude by prefixes of the POSIX representation for domain substitution # Paths to exclude by prefixes of the POSIX representation for domain substitution
DOMAIN_EXCLUDE_PREFIXES = [ DOMAIN_EXCLUDE_PREFIXES = ['components/test/', 'net/http/transport_security_state_static.json']
'components/test/',
'net/http/transport_security_state_static.json'
]
# pathlib.Path.match() patterns to include in domain substitution # pathlib.Path.match() patterns to include in domain substitution
DOMAIN_INCLUDE_PATTERNS = [ DOMAIN_INCLUDE_PATTERNS = [
'*.h', '*.h', '*.hh', '*.hpp', '*.hxx', '*.cc', '*.cpp', '*.cxx', '*.c', '*.h', '*.json', '*.js',
'*.hh', '*.html', '*.htm', '*.css', '*.py*', '*.grd', '*.sql', '*.idl', '*.mk', '*.gyp*', 'makefile',
'*.hpp', '*.txt', '*.xml', '*.mm', '*.jinja*'
'*.hxx',
'*.cc',
'*.cpp',
'*.cxx',
'*.c',
'*.h',
'*.json',
'*.js',
'*.html',
'*.htm',
'*.css',
'*.py*',
'*.grd',
'*.sql',
'*.idl',
'*.mk',
'*.gyp*',
'makefile',
'*.txt',
'*.xml',
'*.mm',
'*.jinja*'
] ]
# Binary-detection constant # Binary-detection constant
_TEXTCHARS = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f}) _TEXTCHARS = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f})
def _is_binary(bytes_data): def _is_binary(bytes_data):
""" """
Returns True if the data seems to be binary data (i.e. not human readable); False otherwise Returns True if the data seems to be binary data (i.e. not human readable); False otherwise
@@ -116,6 +89,7 @@ def _is_binary(bytes_data):
# From: https://stackoverflow.com/a/7392391 # From: https://stackoverflow.com/a/7392391
return bool(bytes_data.translate(None, _TEXTCHARS)) return bool(bytes_data.translate(None, _TEXTCHARS))
def should_prune(path, relative_path): def should_prune(path, relative_path):
""" """
Returns True if a path should be pruned from the buildspace tree; False otherwise Returns True if a path should be pruned from the buildspace tree; False otherwise
@@ -141,6 +115,7 @@ def should_prune(path, relative_path):
# Passed all filtering; do not prune # Passed all filtering; do not prune
return False return False
def _check_regex_match(file_path, search_regex): def _check_regex_match(file_path, search_regex):
""" """
Returns True if a regex pattern matches a file; False otherwise Returns True if a regex pattern matches a file; False otherwise
@@ -161,6 +136,7 @@ def _check_regex_match(file_path, search_regex):
return True return True
return False return False
def should_domain_substitute(path, relative_path, search_regex): def should_domain_substitute(path, relative_path, search_regex):
""" """
Returns True if a path should be domain substituted in the buildspace tree; False otherwise Returns True if a path should be domain substituted in the buildspace tree; False otherwise
@@ -178,6 +154,7 @@ def should_domain_substitute(path, relative_path, search_regex):
return _check_regex_match(path, search_regex) return _check_regex_match(path, search_regex)
return False return False
def compute_lists(buildspace_tree, search_regex): def compute_lists(buildspace_tree, search_regex):
""" """
Compute the binary pruning and domain substitution lists of the buildspace tree. Compute the binary pruning and domain substitution lists of the buildspace tree.
@@ -229,32 +206,51 @@ def compute_lists(buildspace_tree, search_regex):
raise BuildkitAbort() raise BuildkitAbort()
return sorted(pruning_set), sorted(domain_substitution_set) return sorted(pruning_set), sorted(domain_substitution_set)
def main(args_list=None): def main(args_list=None):
"""CLI entrypoint""" """CLI entrypoint"""
parser = argparse.ArgumentParser(description=__doc__) parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument( parser.add_argument(
'-a', '--auto-download', action='store_true', '-a',
'--auto-download',
action='store_true',
help='If specified, it will download the source code and dependencies ' help='If specified, it will download the source code and dependencies '
'for the --base-bundle given. Otherwise, only an existing ' 'for the --base-bundle given. Otherwise, only an existing '
'buildspace tree will be used.') 'buildspace tree will be used.')
parser.add_argument( parser.add_argument(
'-b', '--base-bundle', metavar='NAME', type=get_basebundle_verbosely, '-b',
default='common', help='The base bundle to use. Default: %(default)s') '--base-bundle',
metavar='NAME',
type=get_basebundle_verbosely,
default='common',
help='The base bundle to use. Default: %(default)s')
parser.add_argument( parser.add_argument(
'-p', '--pruning', metavar='PATH', type=Path, '-p',
'--pruning',
metavar='PATH',
type=Path,
default='resources/config_bundles/common/pruning.list', default='resources/config_bundles/common/pruning.list',
help='The path to store pruning.list. Default: %(default)s') help='The path to store pruning.list. Default: %(default)s')
parser.add_argument( parser.add_argument(
'-d', '--domain-substitution', metavar='PATH', type=Path, '-d',
'--domain-substitution',
metavar='PATH',
type=Path,
default='resources/config_bundles/common/domain_substitution.list', default='resources/config_bundles/common/domain_substitution.list',
help='The path to store domain_substitution.list. Default: %(default)s') help='The path to store domain_substitution.list. Default: %(default)s')
parser.add_argument( parser.add_argument(
'--tree', metavar='PATH', type=Path, default=BUILDSPACE_TREE, '--tree',
metavar='PATH',
type=Path,
default=BUILDSPACE_TREE,
help=('The path to the buildspace tree to create. ' help=('The path to the buildspace tree to create. '
'If it is not empty, the source will not be unpacked. ' 'If it is not empty, the source will not be unpacked. '
'Default: %(default)s')) 'Default: %(default)s'))
parser.add_argument( parser.add_argument(
'--downloads', metavar='PATH', type=Path, default=BUILDSPACE_DOWNLOADS, '--downloads',
metavar='PATH',
type=Path,
default=BUILDSPACE_DOWNLOADS,
help=('The path to the buildspace downloads directory. ' help=('The path to the buildspace downloads directory. '
'It must already exist. Default: %(default)s')) 'It must already exist. Default: %(default)s'))
try: try:
@@ -278,5 +274,6 @@ def main(args_list=None):
with args.domain_substitution.open('w', encoding=ENCODING) as file_obj: with args.domain_substitution.open('w', encoding=ENCODING) as file_obj:
file_obj.writelines('%s\n' % line for line in domain_substitution_list) file_obj.writelines('%s\n' % line for line in domain_substitution_list)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
""" """
Refreshes patches of all configs via quilt until the first patch that Refreshes patches of all configs via quilt until the first patch that
requires manual modification requires manual modification

View File

@@ -4,7 +4,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved. # Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Run sanity checking algorithms over the base bundles and patches. """Run sanity checking algorithms over the base bundles and patches.
It checks the following: It checks the following:
@@ -33,19 +32,16 @@ import sys
from pathlib import Path from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent)) sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from buildkit.common import ( from buildkit.common import (CONFIG_BUNDLES_DIR, ENCODING, PATCHES_DIR, BuildkitAbort, get_logger,
CONFIG_BUNDLES_DIR, ENCODING, PATCHES_DIR, BuildkitAbort, get_logger, get_resources_dir)
get_resources_dir)
from buildkit.config import BASEBUNDLEMETA_INI, BaseBundleMetaIni, ConfigBundle from buildkit.config import BASEBUNDLEMETA_INI, BaseBundleMetaIni, ConfigBundle
from buildkit.third_party import unidiff from buildkit.third_party import unidiff
sys.path.pop(0) sys.path.pop(0)
BaseBundleResult = collections.namedtuple( BaseBundleResult = collections.namedtuple('BaseBundleResult', ('leaves', 'gn_flags', 'patches'))
'BaseBundleResult',
('leaves', 'gn_flags', 'patches'))
ExplorationJournal = collections.namedtuple( ExplorationJournal = collections.namedtuple(
'ExplorationJournal', 'ExplorationJournal', ('unexplored_set', 'results', 'dependents', 'unused_patches'))
('unexplored_set', 'results', 'dependents', 'unused_patches'))
def _check_patches(bundle, logger): def _check_patches(bundle, logger):
""" """
@@ -69,6 +65,7 @@ def _check_patches(bundle, logger):
warnings = False warnings = False
return warnings return warnings
def _merge_disjoints(pair_iterable, current_name, logger): def _merge_disjoints(pair_iterable, current_name, logger):
""" """
Merges disjoint sets with errors Merges disjoint sets with errors
@@ -93,6 +90,7 @@ def _merge_disjoints(pair_iterable, current_name, logger):
warnings = True warnings = True
return warnings return warnings
def _populate_set_with_gn_flags(new_set, base_bundle, logger): def _populate_set_with_gn_flags(new_set, base_bundle, logger):
""" """
Adds items into set new_set from the base bundle's GN flags Adds items into set new_set from the base bundle's GN flags
@@ -111,14 +109,14 @@ def _populate_set_with_gn_flags(new_set, base_bundle, logger):
return warnings return warnings
for current in iterator: for current in iterator:
if current < previous: if current < previous:
logger.warning( logger.warning('In base bundle "%s" GN flags: "%s" should be sorted before "%s"',
'In base bundle "%s" GN flags: "%s" should be sorted before "%s"', base_bundle.name, current, previous)
base_bundle.name, current, previous)
warnings = True warnings = True
new_set.add('%s=%s' % (current, base_bundle.gn_flags[current])) new_set.add('%s=%s' % (current, base_bundle.gn_flags[current]))
previous = current previous = current
return warnings return warnings
def _populate_set_with_patches(new_set, unused_patches, base_bundle, logger): def _populate_set_with_patches(new_set, unused_patches, base_bundle, logger):
""" """
Adds entries to set new_set from the base bundle's patch_order if they are unique. Adds entries to set new_set from the base bundle's patch_order if they are unique.
@@ -128,15 +126,15 @@ def _populate_set_with_patches(new_set, unused_patches, base_bundle, logger):
warnings = False warnings = False
for current in base_bundle.patches: for current in base_bundle.patches:
if current in new_set: if current in new_set:
logger.warning( logger.warning('In base bundle "%s" patch_order: "%s" already appeared once',
'In base bundle "%s" patch_order: "%s" already appeared once', base_bundle.name, current)
base_bundle.name, current)
warnings = True warnings = True
else: else:
unused_patches.discard(current) unused_patches.discard(current)
new_set.add(current) new_set.add(current)
return warnings return warnings
def _explore_base_bundle(current_name, journal, logger): def _explore_base_bundle(current_name, journal, logger):
""" """
Explore the base bundle given by current_name. Modifies journal Explore the base bundle given by current_name. Modifies journal
@@ -162,16 +160,12 @@ def _explore_base_bundle(current_name, journal, logger):
current_meta = BaseBundleMetaIni(current_base_bundle.path / BASEBUNDLEMETA_INI) current_meta = BaseBundleMetaIni(current_base_bundle.path / BASEBUNDLEMETA_INI)
# Populate current base bundle's data # Populate current base bundle's data
current_results = BaseBundleResult( current_results = BaseBundleResult(leaves=set(), gn_flags=set(), patches=set())
leaves=set(), warnings = _populate_set_with_gn_flags(current_results.gn_flags, current_base_bundle,
gn_flags=set(), logger) or warnings
patches=set()) warnings = _populate_set_with_patches(current_results.patches, journal.unused_patches,
warnings = _populate_set_with_gn_flags( current_base_bundle, logger) or warnings
current_results.gn_flags, current_base_bundle, logger) or warnings warnings = _check_patches(current_base_bundle, logger) or warnings
warnings = _populate_set_with_patches(
current_results.patches, journal.unused_patches, current_base_bundle, logger) or warnings
warnings = _check_patches(
current_base_bundle, logger) or warnings
# Set an empty set just in case this node has no dependents # Set an empty set just in case this node has no dependents
if current_name not in journal.dependents: if current_name not in journal.dependents:
@@ -188,12 +182,10 @@ def _explore_base_bundle(current_name, journal, logger):
# Merge sets of dependencies with the current # Merge sets of dependencies with the current
warnings = _merge_disjoints(( warnings = _merge_disjoints((
('Patches', current_results.patches, ('Patches', current_results.patches, journal.results[dependency_name].patches, False),
journal.results[dependency_name].patches, False), ('GN flags', current_results.gn_flags, journal.results[dependency_name].gn_flags,
('GN flags', current_results.gn_flags, False),
journal.results[dependency_name].gn_flags, False), ('Dependencies', current_results.leaves, journal.results[dependency_name].leaves, True),
('Dependencies', current_results.leaves,
journal.results[dependency_name].leaves, True),
), current_name, logger) or warnings ), current_name, logger) or warnings
if not current_results.leaves: if not current_results.leaves:
# This node is a leaf node # This node is a leaf node
@@ -204,6 +196,7 @@ def _explore_base_bundle(current_name, journal, logger):
return warnings return warnings
def _check_mergability(info_tuple_list, dependents, logger): def _check_mergability(info_tuple_list, dependents, logger):
""" """
Checks if entries of config files from dependents can be combined into a common dependency Checks if entries of config files from dependents can be combined into a common dependency
@@ -222,19 +215,18 @@ def _check_mergability(info_tuple_list, dependents, logger):
# Keep only common entries between the current dependent and # Keep only common entries between the current dependent and
# other processed dependents for the current dependency # other processed dependents for the current dependency
for display_name, set_getter in info_tuple_list: for display_name, set_getter in info_tuple_list:
set_dict[display_name].intersection_update( set_dict[display_name].intersection_update(set_getter(dependent_name))
set_getter(dependent_name))
# Check if there are any common entries in all dependents for the # Check if there are any common entries in all dependents for the
# given dependency # given dependency
for display_name, common_set in set_dict.items(): for display_name, common_set in set_dict.items():
if common_set: if common_set:
logger.warning( logger.warning('Base bundles %s can combine %s into "%s": %s',
'Base bundles %s can combine %s into "%s": %s', dependents[dependency_name], display_name, dependency_name,
dependents[dependency_name], display_name, dependency_name, common_set)
common_set)
warnings = True warnings = True
return warnings return warnings
def main(): def main():
"""CLI entrypoint""" """CLI entrypoint"""
@@ -246,23 +238,20 @@ def main():
journal = ExplorationJournal( journal = ExplorationJournal(
# base bundles not explored yet # base bundles not explored yet
unexplored_set=set(map( unexplored_set=set(map(lambda x: x.name, config_bundles_dir.iterdir())),
lambda x: x.name,
config_bundles_dir.iterdir())),
# base bundle name -> namedtuple(leaves=set(), gn_flags=set()) # base bundle name -> namedtuple(leaves=set(), gn_flags=set())
results=dict(), results=dict(),
# dependency -> set of dependents # dependency -> set of dependents
dependents=dict(), dependents=dict(),
# patches unused by patch orders # patches unused by patch orders
unused_patches=set(map( unused_patches=set(
lambda x: str(x.relative_to(patches_dir)), map(lambda x: str(x.relative_to(patches_dir)),
filter(lambda x: not x.is_dir(), patches_dir.rglob('*')))) filter(lambda x: not x.is_dir(), patches_dir.rglob('*')))))
)
try: try:
# Explore and validate base bundles # Explore and validate base bundles
while journal.unexplored_set: while journal.unexplored_set:
warnings = _explore_base_bundle( warnings = _explore_base_bundle(next(iter(journal.unexplored_set)), journal,
next(iter(journal.unexplored_set)), journal, logger) or warnings logger) or warnings
# Check for config file entries that should be merged into dependencies # Check for config file entries that should be merged into dependencies
warnings = _check_mergability(( warnings = _check_mergability((
('GN flags', lambda x: journal.results[x].gn_flags), ('GN flags', lambda x: journal.results[x].gn_flags),
@@ -278,6 +267,7 @@ def main():
exit(1) exit(1)
exit(0) exit(0)
if __name__ == '__main__': if __name__ == '__main__':
if sys.argv[1:]: if sys.argv[1:]:
print(__doc__) print(__doc__)

5
devutils/yapf_buildkit.sh Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -eux
python3 -m yapf --style '.style.yapf' -e '*/third_party/*' -rpi buildkit

5
devutils/yapf_devutils.sh Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -eux
python3 -m yapf --style '.style.yapf' -ri $@