Reformat scripts for new yapf

This commit is contained in:
Blaise
2024-03-30 11:43:39 -05:00
parent cd55fa9f8a
commit 75654057f1
18 changed files with 217 additions and 232 deletions

View File

@@ -46,13 +46,12 @@ def main():
default_downloads_ini = [str(root_dir / 'downloads.ini')]
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'-d',
'--downloads-ini',
type=Path,
nargs='*',
default=default_downloads_ini,
help='List of downloads.ini files to check. Default: %(default)s')
parser.add_argument('-d',
'--downloads-ini',
type=Path,
nargs='*',
default=default_downloads_ini,
help='List of downloads.ini files to check. Default: %(default)s')
args = parser.parse_args()
if check_downloads_ini(args.downloads_ini):

View File

@@ -27,9 +27,9 @@ def main():
Path(input_name).read_text(encoding='UTF-8').splitlines()))
for file_name in file_iter:
if not Path(args.root_dir, file_name).exists():
print(
'ERROR: Path "{}" from file "{}" does not exist.'.format(file_name, input_name),
file=sys.stderr)
print('ERROR: Path "{}" from file "{}" does not exist.'.format(
file_name, input_name),
file=sys.stderr)
exit(1)

View File

@@ -63,12 +63,11 @@ def main():
default_flags_gn = root_dir / 'flags.gn'
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'-f',
'--flags-gn',
type=Path,
default=default_flags_gn,
help='Path to the GN flags to use. Default: %(default)s')
parser.add_argument('-f',
'--flags-gn',
type=Path,
default=default_flags_gn,
help='Path to the GN flags to use. Default: %(default)s')
args = parser.parse_args()
if check_gn_flags(args.flags_gn):

View File

@@ -118,12 +118,11 @@ def main():
default_patches_dir = root_dir / 'patches'
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'-p',
'--patches',
type=Path,
default=default_patches_dir,
help='Path to the patches directory to use. Default: %(default)s')
parser.add_argument('-p',
'--patches',
type=Path,
default=default_patches_dir,
help='Path to the patches directory to use. Default: %(default)s')
args = parser.parse_args()
warnings = False

View File

@@ -16,10 +16,9 @@ def main():
"""CLI entrypoint"""
parser = argparse.ArgumentParser(description='Run Pylint over devutils')
parser.add_argument('--hide-fixme', action='store_true', help='Hide "fixme" Pylint warnings.')
parser.add_argument(
'--show-locally-disabled',
action='store_true',
help='Show "locally-disabled" Pylint warnings.')
parser.add_argument('--show-locally-disabled',
action='store_true',
help='Show "locally-disabled" Pylint warnings.')
args = parser.parse_args()
disables = [

View File

@@ -17,7 +17,6 @@ class ChangeDir:
"""
Changes directory to path in with statement
"""
def __init__(self, path):
self._path = path
self._orig_path = os.getcwd()
@@ -68,10 +67,9 @@ def main():
parser = argparse.ArgumentParser(description='Run Pylint over arbitrary module')
parser.add_argument('--hide-fixme', action='store_true', help='Hide "fixme" Pylint warnings.')
parser.add_argument(
'--show-locally-disabled',
action='store_true',
help='Show "locally-disabled" Pylint warnings.')
parser.add_argument('--show-locally-disabled',
action='store_true',
help='Show "locally-disabled" Pylint warnings.')
parser.add_argument('module_path', type=Path, help='Path to the module to check')
args = parser.parse_args()

View File

@@ -16,10 +16,9 @@ def main():
"""CLI entrypoint"""
parser = argparse.ArgumentParser(description='Run Pylint over utils')
parser.add_argument('--hide-fixme', action='store_true', help='Hide "fixme" Pylint warnings.')
parser.add_argument(
'--show-locally-disabled',
action='store_true',
help='Show "locally-disabled" Pylint warnings.')
parser.add_argument('--show-locally-disabled',
action='store_true',
help='Show "locally-disabled" Pylint warnings.')
args = parser.parse_args()
disable = ['bad-continuation']

View File

@@ -322,31 +322,27 @@ def compute_lists(source_tree, search_regex, processes):
def main(args_list=None):
"""CLI entrypoint"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--pruning',
metavar='PATH',
type=Path,
default='pruning.list',
help='The path to store pruning.list. Default: %(default)s')
parser.add_argument(
'--domain-substitution',
metavar='PATH',
type=Path,
default='domain_substitution.list',
help='The path to store domain_substitution.list. Default: %(default)s')
parser.add_argument(
'--domain-regex',
metavar='PATH',
type=Path,
default='domain_regex.list',
help='The path to domain_regex.list. Default: %(default)s')
parser.add_argument(
'-t',
'--tree',
metavar='PATH',
type=Path,
required=True,
help='The path to the source tree to use.')
parser.add_argument('--pruning',
metavar='PATH',
type=Path,
default='pruning.list',
help='The path to store pruning.list. Default: %(default)s')
parser.add_argument('--domain-substitution',
metavar='PATH',
type=Path,
default='domain_substitution.list',
help='The path to store domain_substitution.list. Default: %(default)s')
parser.add_argument('--domain-regex',
metavar='PATH',
type=Path,
default='domain_regex.list',
help='The path to domain_regex.list. Default: %(default)s')
parser.add_argument('-t',
'--tree',
metavar='PATH',
type=Path,
required=True,
help='The path to the source tree to use.')
parser.add_argument(
'--processes',
metavar='NUM',
@@ -354,17 +350,15 @@ def main(args_list=None):
default=None,
help=
'The maximum number of worker processes to create. Defaults to the number of system CPUs.')
parser.add_argument(
'--domain-exclude-prefix',
metavar='PREFIX',
type=str,
action='append',
help='Additional exclusion for domain_substitution.list.')
parser.add_argument(
'--no-error-unused',
action='store_false',
dest='error_unused',
help='Do not treat unused patterns/prefixes as an error.')
parser.add_argument('--domain-exclude-prefix',
metavar='PREFIX',
type=str,
action='append',
help='Additional exclusion for domain_substitution.list.')
parser.add_argument('--no-error-unused',
action='store_false',
dest='error_unused',
help='Do not treat unused patterns/prefixes as an error.')
args = parser.parse_args(args_list)
if args.domain_exclude_prefix is not None:
DOMAIN_EXCLUDE_PREFIXES.extend(args.domain_exclude_prefix)

View File

@@ -127,8 +127,8 @@ def unmerge_platform_patches(platform_patches_dir):
get_logger().error('Unable to find series.merged at: %s',
platform_patches_dir / _SERIES_MERGED)
return False
new_series = filter(
len, (platform_patches_dir / _SERIES_MERGED).read_text(encoding=ENCODING).splitlines())
new_series = filter(len, (platform_patches_dir /
_SERIES_MERGED).read_text(encoding=ENCODING).splitlines())
new_series = filter((lambda x: x not in prepend_series), new_series)
new_series = list(new_series)
series_index = 0
@@ -157,14 +157,12 @@ def unmerge_platform_patches(platform_patches_dir):
def main():
"""CLI Entrypoint"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'command',
choices=('merge', 'unmerge'),
help='Merge or unmerge ungoogled-chromium patches with platform patches')
parser.add_argument(
'platform_patches',
type=Path,
help='The path to the platform patches in GNU Quilt format to merge into')
parser.add_argument('command',
choices=('merge', 'unmerge'),
help='Merge or unmerge ungoogled-chromium patches with platform patches')
parser.add_argument('platform_patches',
type=Path,
help='The path to the platform patches in GNU Quilt format to merge into')
args = parser.parse_args()
repo_dir = Path(__file__).resolve().parent.parent

View File

@@ -38,7 +38,6 @@ try:
class _VerboseRetry(urllib3.util.Retry):
"""A more verbose version of HTTP Adatper about retries"""
def sleep_for_retry(self, response=None):
"""Sleeps for Retry-After, and logs the sleep time"""
if response:
@@ -61,13 +60,12 @@ try:
def _get_requests_session():
session = requests.Session()
http_adapter = requests.adapters.HTTPAdapter(
max_retries=_VerboseRetry(
total=10,
read=10,
connect=10,
backoff_factor=8,
status_forcelist=urllib3.Retry.RETRY_AFTER_STATUS_CODES,
raise_on_status=False))
max_retries=_VerboseRetry(total=10,
read=10,
connect=10,
backoff_factor=8,
status_forcelist=urllib3.Retry.RETRY_AFTER_STATUS_CODES,
raise_on_status=False))
session.mount('http://', http_adapter)
session.mount('https://', http_adapter)
return session
@@ -126,7 +124,6 @@ def _validate_deps(deps_text):
def _deps_var(deps_globals):
"""Return a function that implements DEPS's Var() function"""
def _var_impl(var_name):
"""Implementation of Var() in DEPS"""
return deps_globals['vars'][var_name]
@@ -445,8 +442,9 @@ def _retrieve_remote_files(file_iter):
last_progress = current_progress
logger.info('%d files downloaded', current_progress)
try:
files[file_path] = _download_source_file(
download_session, root_deps_tree, fallback_repo_manager, file_path).split('\n')
files[file_path] = _download_source_file(download_session, root_deps_tree,
fallback_repo_manager,
file_path).split('\n')
except _NotInRepoError:
get_logger().warning('Could not find "%s" remotely. Skipping...', file_path)
return files
@@ -580,10 +578,9 @@ def _test_patches(series_iter, patch_cache, files_under_test):
return True
except: #pylint: disable=bare-except
get_logger().warning('Patch failed validation: %s', patch_path_str)
get_logger().debug(
'Specifically, file "%s" caused exception while applying:',
patched_file.path,
exc_info=True)
get_logger().debug('Specifically, file "%s" caused exception while applying:',
patched_file.path,
exc_info=True)
return True
return False
@@ -599,8 +596,9 @@ def _load_all_patches(series_iter, patches_dir):
for relative_path in series_iter:
if relative_path in unidiff_dict:
continue
unidiff_dict[relative_path] = unidiff.PatchSet.from_filename(
str(patches_dir / relative_path), encoding=ENCODING)
unidiff_dict[relative_path] = unidiff.PatchSet.from_filename(str(patches_dir /
relative_path),
encoding=ENCODING)
if not (patches_dir / relative_path).read_text(encoding=ENCODING).endswith('\n'):
had_failure = True
get_logger().warning('Patch file does not end with newline: %s',
@@ -644,20 +642,18 @@ def _get_files_under_test(args, required_files, parser):
def main():
"""CLI Entrypoint"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'-s',
'--series',
type=Path,
metavar='FILE',
default=str(Path('patches', 'series')),
help='The series file listing patches to apply. Default: %(default)s')
parser.add_argument(
'-p',
'--patches',
type=Path,
metavar='DIRECTORY',
default='patches',
help='The patches directory to read from. Default: %(default)s')
parser.add_argument('-s',
'--series',
type=Path,
metavar='FILE',
default=str(Path('patches', 'series')),
help='The series file listing patches to apply. Default: %(default)s')
parser.add_argument('-p',
'--patches',
type=Path,
metavar='DIRECTORY',
default='patches',
help='The patches directory to read from. Default: %(default)s')
add_common_params(parser)
file_source_group = parser.add_mutually_exclusive_group(required=True)