merge: update to ungoogled-chromium 138.0.7204.100

This commit is contained in:
wukko
2025-07-10 22:20:03 +06:00
28 changed files with 177 additions and 154 deletions

View File

@@ -9,7 +9,7 @@ container:
code_check_task:
pip_cache:
folder: /usr/local/lib/python3.9/site-packages
folder: /usr/local/lib/python3.10/site-packages
fingerprint_script: cat .cirrus_requirements.txt
populate_script: pip install -r .cirrus_requirements.txt
utils_script:
@@ -26,7 +26,7 @@ validate_config_task:
validate_with_source_task:
pip_cache:
folder: /usr/local/lib/python3.9/site-packages
folder: /usr/local/lib/python3.10/site-packages
fingerprint_script: cat .cirrus_requirements.txt
populate_script: pip install -r .cirrus_requirements.txt
chromium_download_script: |

View File

@@ -1,5 +1,5 @@
# Dockerfile for Python 3 with xz-utils (for tar.xz unpacking)
FROM python:3.9-slim-bullseye
FROM python:3.10-slim-bookworm
RUN apt update && apt install -y xz-utils patch axel curl git

View File

@@ -1,9 +1,9 @@
# Based on Python package versions in Debian bullseye
# https://packages.debian.org/bullseye/python/
astroid==2.5.1 # via pylint
pylint==2.7.2
pytest-cov==2.10.1
pytest==6.0.2
httplib2==0.18.1
requests==2.25.1
yapf==0.30.0
# Based on Python package versions in Debian bookworm
# https://packages.debian.org/bookworm/python/
astroid==2.14.2 # via pylint
pylint==2.16.2
pytest-cov==4.0.0
pytest==7.2.1
httplib2==0.20.4
requests==2.28.1
yapf==0.32.0

View File

@@ -1 +1 @@
138.0.7204.96
138.0.7204.100

View File

@@ -21,6 +21,7 @@ from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
from downloads import DownloadInfo, schema
sys.path.pop(0)

View File

@@ -27,8 +27,7 @@ def main():
Path(input_name).read_text(encoding='UTF-8').splitlines()))
for file_name in file_iter:
if not Path(args.root_dir, file_name).exists():
print('ERROR: Path "{}" from file "{}" does not exist.'.format(
file_name, input_name),
print(f'ERROR: Path "{file_name}" from file "{input_name}" does not exist.',
file=sys.stderr)
sys.exit(1)

View File

@@ -21,6 +21,7 @@ from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
from _common import ENCODING, get_logger
sys.path.pop(0)

View File

@@ -24,6 +24,7 @@ from third_party import unidiff
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
from _common import ENCODING, get_logger, parse_series # pylint: disable=wrong-import-order
sys.path.pop(0)
# File suffixes to ignore for checking unused patches

View File

@@ -23,7 +23,6 @@ def main():
disables = [
'wrong-import-position',
'bad-continuation',
'duplicate-code',
]
@@ -33,7 +32,7 @@ def main():
disables.append('locally-disabled')
pylint_options = [
'--disable={}'.format(','.join(disables)),
f"--disable={','.join(disables)}",
'--jobs=4',
'--score=n',
'--persistent=n',

View File

@@ -18,6 +18,7 @@ class ChangeDir:
"""
Changes directory to path in with statement
"""
def __init__(self, path):
self._path = path
self._orig_path = os.getcwd()
@@ -31,12 +32,12 @@ class ChangeDir:
def run_pylint(module_path, pylint_options, ignore_prefixes=tuple()):
"""Runs Pylint. Returns a boolean indicating success"""
pylint_stats = Path('/run/user/{}/pylint_stats'.format(os.getuid()))
pylint_stats = Path(f'/run/user/{os.getuid()}/pylint_stats')
if not pylint_stats.parent.is_dir(): #pylint: disable=no-member
pylint_stats = Path('/run/shm/pylint_stats')
os.environ['PYLINTHOME'] = str(pylint_stats)
input_paths = list()
input_paths = []
if not module_path.exists():
print('ERROR: Cannot find', module_path)
sys.exit(1)
@@ -75,12 +76,11 @@ def main():
args = parser.parse_args()
if not args.module_path.exists():
print('ERROR: Module path "{}" does not exist'.format(args.module_path))
print(f'ERROR: Module path "{args.module_path}" does not exist')
sys.exit(1)
disables = [
'wrong-import-position',
'bad-continuation',
]
if args.hide_fixme:
@@ -89,7 +89,7 @@ def main():
disables.append('locally-disabled')
pylint_options = [
'--disable={}'.format(','.join(disables)),
f"--disable={','.join(disables)}",
'--jobs=4',
'--score=n',
'--persistent=n',

View File

@@ -21,21 +21,18 @@ def main():
help='Show "locally-disabled" Pylint warnings.')
args = parser.parse_args()
disable = ['bad-continuation']
if args.hide_fixme:
disable.append('fixme')
if not args.show_locally_disabled:
disable.append('locally-disabled')
pylint_options = [
'--disable={}'.format(','.join(disable)),
'--jobs=4',
'--max-args=7',
'--score=n',
'--persistent=n',
]
if args.hide_fixme:
pylint_options.append('--disable=fixme')
if not args.show_locally_disabled:
pylint_options.append('--disable=locally-disabled')
ignore_prefixes = [
('third_party', ),
('tests', ),

View File

@@ -11,11 +11,13 @@ import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent / 'utils'))
from _common import get_logger, set_logging_level
from _common import ENCODING, get_logger, set_logging_level
sys.path.pop(0)
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from check_patch_files import check_series_duplicates
sys.path.pop(0)
@@ -33,7 +35,7 @@ def test_check_series_duplicates():
'a.patch',
'b.patch',
'c.patch',
]))
]), encoding=ENCODING)
assert not check_series_duplicates(patches_dir)
get_logger().info('Check duplicates')
@@ -42,7 +44,8 @@ def test_check_series_duplicates():
'b.patch',
'c.patch',
'a.patch',
]))
]),
encoding=ENCODING)
assert check_series_duplicates(patches_dir)

View File

@@ -11,11 +11,13 @@ import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent / 'utils'))
from _common import get_logger, set_logging_level
from _common import ENCODING, get_logger, set_logging_level
sys.path.pop(0)
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
import validate_patches
sys.path.pop(0)
@@ -30,8 +32,8 @@ def test_test_patches():
def _run_test_patches(patch_content):
with tempfile.TemporaryDirectory() as tmpdirname:
Path(tmpdirname, 'foobar.txt').write_text(orig_file_content)
Path(tmpdirname, 'test.patch').write_text(patch_content)
Path(tmpdirname, 'foobar.txt').write_text(orig_file_content, encoding=ENCODING)
Path(tmpdirname, 'test.patch').write_text(patch_content, encoding=ENCODING)
_, patch_cache = validate_patches._load_all_patches(series_iter, Path(tmpdirname))
required_files = validate_patches._get_required_files(patch_cache)
files_under_test = validate_patches._retrieve_local_files(required_files,

View File

@@ -28,6 +28,7 @@ sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
from _common import get_logger
from domain_substitution import DomainRegexList, TREE_ENCODINGS
from prune_binaries import CONTINGENT_PATHS
sys.path.pop(0)
# Encoding for output files
@@ -398,9 +399,9 @@ def main(args_list=None):
args.tree,
DomainRegexList(args.domain_regex).search_regex, args.processes)
with args.pruning.open('w', encoding=_ENCODING) as file_obj:
file_obj.writelines('%s\n' % line for line in pruning_set)
file_obj.writelines(f'{line}\n' for line in pruning_set)
with args.domain_substitution.open('w', encoding=_ENCODING) as file_obj:
file_obj.writelines('%s\n' % line for line in domain_substitution_set)
file_obj.writelines(f'{line}\n' for line in domain_substitution_set)
if unused_patterns.log_unused(args.error_unused) and args.error_unused:
get_logger().error('Please update or remove unused patterns and/or prefixes. '
'The lists have still been updated with the remaining valid entries.')

View File

@@ -22,6 +22,7 @@ from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
from _common import ENCODING, get_logger
from patches import merge_patches
sys.path.pop(0)
_SERIES = 'series'
@@ -114,14 +115,14 @@ def unmerge_platform_patches(platform_patches_dir):
return False
orig_series = (platform_patches_dir / _SERIES_ORIG).read_text(encoding=ENCODING).splitlines()
# patch path -> list of lines after patch path and before next patch path
path_comments = dict()
path_comments = {}
# patch path -> inline comment for patch
path_inline_comments = dict()
path_inline_comments = {}
previous_path = None
for partial_path in orig_series:
if not partial_path or partial_path.startswith('#'):
if partial_path not in path_comments:
path_comments[previous_path] = list()
path_comments[previous_path] = []
path_comments[previous_path].append(partial_path)
else:
path_parts = partial_path.split(' #', maxsplit=1)

View File

@@ -23,12 +23,14 @@ from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent / 'third_party'))
import unidiff
from unidiff.constants import LINE_TYPE_EMPTY, LINE_TYPE_NO_NEWLINE
sys.path.pop(0)
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
from domain_substitution import TREE_ENCODINGS
from _common import ENCODING, get_logger, get_chromium_version, parse_series, add_common_params
from patches import dry_run_check
sys.path.pop(0)
try:
@@ -38,6 +40,7 @@ try:
class _VerboseRetry(urllib3.util.Retry):
"""A more verbose version of HTTP Adatper about retries"""
def sleep_for_retry(self, response=None):
"""Sleeps for Retry-After, and logs the sleep time"""
if response:
@@ -100,16 +103,16 @@ class _DepsNodeVisitor(ast.NodeVisitor):
def visit_Call(self, node): #pylint: disable=invalid-name
"""Override Call syntax handling"""
if node.func.id not in self._allowed_callables:
raise _UnexpectedSyntaxError('Unexpected call of "%s" at line %s, column %s' %
(node.func.id, node.lineno, node.col_offset))
raise _UnexpectedSyntaxError(f'Unexpected call of "{node.func.id}" '
f'at line {node.lineno}, column {node.col_offset}')
def generic_visit(self, node):
for ast_type in self._valid_syntax_types:
if isinstance(node, ast_type):
super().generic_visit(node)
return
raise _UnexpectedSyntaxError('Unexpected {} at line {}, column {}'.format(
type(node).__name__, node.lineno, node.col_offset))
raise _UnexpectedSyntaxError(f'Unexpected {type(node).__name__} '
f'at line {node.lineno}, column {node.col_offset}')
def _validate_deps(deps_text):
@@ -124,6 +127,7 @@ def _validate_deps(deps_text):
def _deps_var(deps_globals):
"""Return a function that implements DEPS's Var() function"""
def _var_impl(var_name):
"""Implementation of Var() in DEPS"""
return deps_globals['vars'][var_name]
@@ -145,8 +149,8 @@ def _download_googlesource_file(download_session, repo_url, version, relative_pa
googlesource.com repo as a string.
"""
if 'googlesource.com' not in repo_url:
raise ValueError('Repository URL is not a googlesource.com URL: {}'.format(repo_url))
full_url = repo_url + '/+/{}/{}?format=TEXT'.format(version, str(relative_path))
raise ValueError(f'Repository URL is not a googlesource.com URL: {repo_url}')
full_url = repo_url + f'/+/{version}/{str(relative_path)}?format=TEXT'
get_logger().debug('Downloading: %s', full_url)
response = download_session.get(full_url)
if response.status_code == 404:
@@ -172,13 +176,13 @@ def _get_dep_value_url(deps_globals, dep_value):
# Probably a Python format string
url = url.format(**deps_globals['vars'])
if url.count('@') != 1:
raise _PatchValidationError('Invalid number of @ symbols in URL: {}'.format(url))
raise _PatchValidationError(f'Invalid number of @ symbols in URL: {url}')
return url
def _process_deps_entries(deps_globals, child_deps_tree, child_path, deps_use_relative_paths):
"""Helper for _get_child_deps_tree"""
for dep_path_str, dep_value in deps_globals.get('deps', dict()).items():
for dep_path_str, dep_value in deps_globals.get('deps', {}).items():
url = _get_dep_value_url(deps_globals, dep_value)
if url is None:
continue
@@ -200,7 +204,7 @@ def _process_deps_entries(deps_globals, child_deps_tree, child_path, deps_use_re
grandchild_deps_tree = recursedeps_item_depsfile
if grandchild_deps_tree is None:
# This dep is not recursive; i.e. it is fully loaded
grandchild_deps_tree = dict()
grandchild_deps_tree = {}
child_deps_tree[dep_path] = (*url.split('@'), grandchild_deps_tree)
@@ -211,7 +215,7 @@ def _get_child_deps_tree(download_session, current_deps_tree, child_path, deps_u
# Load unloaded DEPS
deps_globals = _parse_deps(
_download_googlesource_file(download_session, repo_url, version, child_deps_tree))
child_deps_tree = dict()
child_deps_tree = {}
current_deps_tree[child_path] = (repo_url, version, child_deps_tree)
deps_use_relative_paths = deps_globals.get('use_relative_paths', False)
_process_deps_entries(deps_globals, child_deps_tree, child_path, deps_use_relative_paths)
@@ -221,9 +225,8 @@ def _get_child_deps_tree(download_session, current_deps_tree, child_path, deps_u
def _get_last_chromium_modification():
"""Returns the last modification date of the chromium-browser-official tar file"""
with _get_requests_session() as session:
response = session.head(
'https://storage.googleapis.com/chromium-browser-official/chromium-{}.tar.xz'.format(
get_chromium_version()))
response = session.head('https://storage.googleapis.com/chromium-browser-official/'
f'chromium-{get_chromium_version()}.tar.xz')
response.raise_for_status()
return email.utils.parsedate_to_datetime(response.headers['Last-Modified'])
@@ -235,7 +238,7 @@ def _get_gitiles_git_log_date(log_entry):
def _get_gitiles_commit_before_date(repo_url, target_branch, target_datetime):
"""Returns the hexadecimal hash of the closest commit before target_datetime"""
json_log_url = '{repo}/+log/{branch}?format=JSON'.format(repo=repo_url, branch=target_branch)
json_log_url = f'{repo_url}/+log/{target_branch}?format=JSON'
with _get_requests_session() as session:
response = session.get(json_log_url)
response.raise_for_status()
@@ -410,7 +413,7 @@ def _retrieve_remote_files(file_iter):
Returns a dict of relative UNIX path strings to a list of lines in the file as strings
"""
files = dict()
files = {}
root_deps_tree = _initialize_deps_tree()
@@ -459,7 +462,7 @@ def _retrieve_local_files(file_iter, source_dir):
Returns a dict of relative UNIX path strings to a list of lines in the file as strings
"""
files = dict()
files = {}
for file_path in file_iter:
try:
raw_content = (source_dir / file_path).read_bytes()
@@ -473,7 +476,7 @@ def _retrieve_local_files(file_iter, source_dir):
except UnicodeDecodeError:
continue
if not content:
raise UnicodeDecodeError('Unable to decode with any encoding: %s' % file_path)
raise UnicodeDecodeError(f'Unable to decode with any encoding: {file_path}')
files[file_path] = content.split('\n')
if not files:
get_logger().error('All files used by patches are missing!')
@@ -488,7 +491,7 @@ def _modify_file_lines(patched_file, file_lines):
for hunk in patched_file:
# Validate hunk will match
if not hunk.is_valid():
raise _PatchValidationError('Hunk is not valid: {}'.format(repr(hunk)))
raise _PatchValidationError(f'Hunk is not valid: {repr(hunk)}')
line_cursor = hunk.target_start - 1
for line in hunk:
normalized_line = line.value.rstrip('\n')
@@ -497,18 +500,16 @@ def _modify_file_lines(patched_file, file_lines):
line_cursor += 1
elif line.is_removed:
if normalized_line != file_lines[line_cursor]:
raise _PatchValidationError(
"Line '{}' does not match removal line '{}' from patch".format(
file_lines[line_cursor], normalized_line))
raise _PatchValidationError(f"Line '{file_lines[line_cursor]}' does not match "
f"removal line '{normalized_line}' from patch")
del file_lines[line_cursor]
elif line.is_context:
if not normalized_line and line_cursor == len(file_lines):
# We reached the end of the file
break
if normalized_line != file_lines[line_cursor]:
raise _PatchValidationError(
"Line '{}' does not match context line '{}' from patch".format(
file_lines[line_cursor], normalized_line))
raise _PatchValidationError(f"Line '{file_lines[line_cursor]}' does not match "
f"context line '{normalized_line}' from patch")
line_cursor += 1
else:
assert line.line_type in (LINE_TYPE_EMPTY, LINE_TYPE_NO_NEWLINE)
@@ -592,7 +593,7 @@ def _load_all_patches(series_iter, patches_dir):
- dict of relative UNIX path strings to unidiff.PatchSet
"""
had_failure = False
unidiff_dict = dict()
unidiff_dict = {}
for relative_path in series_iter:
if relative_path in unidiff_dict:
continue
@@ -682,12 +683,12 @@ def main():
if args.cache_remote.parent.exists():
args.cache_remote.mkdir()
else:
parser.error('Parent of cache path {} does not exist'.format(args.cache_remote))
parser.error(f'Parent of cache path {args.cache_remote} does not exist')
if not args.series.is_file():
parser.error('--series path is not a file or not found: {}'.format(args.series))
parser.error(f'--series path is not a file or not found: {args.series}')
if not args.patches.is_dir():
parser.error('--patches path is not a directory or not found: {}'.format(args.patches))
parser.error(f'--patches path is not a directory or not found: {args.patches}')
series_iterable = tuple(parse_series(args.series))
had_failure, patch_cache = _load_all_patches(series_iterable, args.patches)

View File

@@ -3604,7 +3604,7 @@
#include "components/saved_tab_groups/public/features.h"
#include "components/signin/public/base/signin_buildflags.h"
#include "components/signin/public/base/signin_switches.h"
@@ -3450,11 +3449,6 @@ void AddSiteSettingsStrings(content::Web
@@ -3453,11 +3452,6 @@ void AddSiteSettingsStrings(content::Web
html_source->AddLocalizedStrings(kSensorsLocalizedStrings);
html_source->AddBoolean(
@@ -3618,15 +3618,15 @@
--- a/chrome/browser/ui/webui/settings/settings_ui.cc
+++ b/chrome/browser/ui/webui/settings/settings_ui.cc
@@ -114,7 +114,6 @@
#include "components/regional_capabilities/regional_capabilities_service.h"
@@ -116,7 +116,6 @@
#include "components/safe_browsing/core/common/features.h"
#include "components/safe_browsing/core/common/hashprefix_realtime/hash_realtime_utils.h"
#include "components/search_engines/template_url_service.h"
-#include "components/signin/public/base/signin_pref_names.h"
#include "components/signin/public/base/signin_switches.h"
#include "components/sync/base/features.h"
#include "content/public/browser/url_data_source.h"
@@ -296,9 +295,7 @@ SettingsUI::SettingsUI(content::WebUI* w
@@ -298,9 +297,7 @@ SettingsUI::SettingsUI(content::WebUI* w
}
#endif // BUILDFLAG(IS_WIN) && BUILDFLAG(GOOGLE_CHROME_BRANDING)

View File

@@ -99,19 +99,6 @@ approach to change color components.
+}
+
} // namespace blink
--- a/third_party/blink/renderer/platform/graphics/image_data_buffer.cc
+++ b/third_party/blink/renderer/platform/graphics/image_data_buffer.cc
@@ -130,6 +130,10 @@ base::span<const uint8_t> ImageDataBuffe
bool ImageDataBuffer::EncodeImage(const ImageEncodingMimeType mime_type,
const double& quality,
Vector<unsigned char>* encoded_image) const {
+ if (RuntimeEnabledFeatures::FingerprintingCanvasImageDataNoiseEnabled()) {
+ // shuffle subchannel color data within the pixmap
+ StaticBitmapImage::ShuffleSubchannelColorData(pixmap_.writable_addr(), pixmap_.info(), 0, 0);
+ }
return ImageEncoder::Encode(encoded_image, pixmap_, mime_type, quality);
}
--- a/third_party/blink/renderer/platform/graphics/static_bitmap_image.cc
+++ b/third_party/blink/renderer/platform/graphics/static_bitmap_image.cc
@@ -4,6 +4,8 @@
@@ -303,6 +290,28 @@ approach to change color components.
bool IsStaticBitmapImage() const override { return true; }
// Methods overridden by all sub-classes
--- a/third_party/blink/renderer/platform/image-encoders/image_encoder.cc
+++ b/third_party/blink/renderer/platform/image-encoders/image_encoder.cc
@@ -14,6 +14,8 @@
#include <stdio.h> // Needed by jpeglib.h
#include "jpeglib.h" // for JPEG_MAX_DIMENSION
+#include "third_party/blink/renderer/platform/graphics/static_bitmap_image.h"
+#include "third_party/blink/renderer/platform/runtime_enabled_features.h"
#include "third_party/libwebp/src/src/webp/encode.h" // for WEBP_MAX_DIMENSION
namespace blink {
@@ -43,6 +45,10 @@ bool ImageEncoder::Encode(Vector<unsigne
const SkPixmap& src,
ImageEncodingMimeType mime_type,
double quality) {
+ if (RuntimeEnabledFeatures::FingerprintingCanvasImageDataNoiseEnabled()) {
+ // shuffle subchannel color data within the pixmap
+ StaticBitmapImage::ShuffleSubchannelColorData(src.writable_addr(), src.info(), 0, 0);
+ }
switch (mime_type) {
case kMimeTypeJpeg: {
SkJpegEncoder::Options options;
--- a/third_party/blink/renderer/platform/runtime_enabled_features.json5
+++ b/third_party/blink/renderer/platform/runtime_enabled_features.json5
@@ -2260,6 +2260,9 @@

View File

@@ -15,7 +15,7 @@
#include "build/chromeos_buildflags.h"
--- /dev/null
+++ b/chrome/browser/existing_switch_flag_choices.h
@@ -0,0 +1,21 @@
@@ -0,0 +1,19 @@
+// Copyright (c) 2023 The ungoogled-chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE.ungoogled_chromium file.
@@ -23,9 +23,7 @@
+#ifndef CHROME_BROWSER_EXISTING_SWITCH_FLAG_CHOICES_H_
+#define CHROME_BROWSER_EXISTING_SWITCH_FLAG_CHOICES_H_
+const FeatureEntry::Choice kWebRTCIPPolicy[] = {
+ {"Disable non proxied udp",
+ "",
+ ""},
+ {"Disable non proxied udp", "", ""},
+ {"Default",
+ "webrtc-ip-handling-policy",
+ "default"},

View File

@@ -34,7 +34,7 @@
--- a/chrome/app/settings_chromium_strings.grdp
+++ b/chrome/app/settings_chromium_strings.grdp
@@ -369,13 +369,13 @@
@@ -372,13 +372,13 @@
<!-- Performance Page -->
<message name="IDS_SETTINGS_PERFORMANCE_MEMORY_SAVER_MODE_SETTING_DESCRIPTION" desc="Description for the memory saver mode setting">
@@ -304,7 +304,7 @@
<settings-toggle-button id="spellCheckControl"
--- a/chrome/browser/resources/settings/privacy_page/personalization_options.ts
+++ b/chrome/browser/resources/settings/privacy_page/personalization_options.ts
@@ -179,10 +179,6 @@ export class SettingsPersonalizationOpti
@@ -201,10 +201,6 @@ export class SettingsPersonalizationOpti
'chrome-signin-user-choice-info-change',
this.setChromeSigninUserChoiceInfo_.bind(this));
// </if>
@@ -547,7 +547,7 @@
html_source->AddString("discardRingTreatmentLearnMoreUrl",
chrome::kDiscardRingTreatmentLearnMoreUrl);
@@ -3468,7 +3443,7 @@ void AddSiteSettingsStrings(content::Web
@@ -3471,7 +3446,7 @@ void AddSiteSettingsStrings(content::Web
base::FeatureList::IsEnabled(blink::features::kWebPrinting));
html_source->AddBoolean("enableFederatedIdentityApiContentSetting",
@@ -558,7 +558,7 @@
html_source->AddBoolean(
--- a/chrome/browser/ui/webui/settings/settings_ui.cc
+++ b/chrome/browser/ui/webui/settings/settings_ui.cc
@@ -619,27 +619,20 @@ SettingsUI::SettingsUI(content::WebUI* w
@@ -627,27 +627,20 @@ SettingsUI::SettingsUI(content::WebUI* w
autofill::AutofillAiAction::kListEntityInstancesInSettings)},
};

View File

@@ -36,6 +36,7 @@ class ExtractorEnum: #pylint: disable=too-few-public-methods
class SetLogLevel(argparse.Action): #pylint: disable=too-few-public-methods
"""Sets logging level based on command line arguments it receives"""
def __init__(self, option_strings, dest, nargs=None, **kwargs):
super().__init__(option_strings, dest, nargs=nargs, **kwargs)

View File

@@ -80,7 +80,7 @@ def _process_relative_to(unpack_root, relative_to):
if not relative_root.is_dir():
get_logger().error('Could not find relative_to directory in extracted files: %s',
relative_to)
raise Exception()
raise FileNotFoundError()
for src_path in relative_root.iterdir():
dest_path = unpack_root / src_path.name
if os.path.isdir(dest_path):
@@ -94,20 +94,20 @@ def _extract_tar_with_7z(binary, archive_path, output_dir, relative_to):
if not relative_to is None and (output_dir / relative_to).exists():
get_logger().error('Temporary unpacking directory already exists: %s',
output_dir / relative_to)
raise Exception()
raise FileExistsError()
cmd1 = (binary, 'x', str(archive_path), '-so')
cmd2 = (binary, 'x', '-si', '-aoa', '-ttar', '-o{}'.format(str(output_dir)))
cmd2 = (binary, 'x', '-si', '-aoa', '-ttar', f'-o{str(output_dir)}')
get_logger().debug('7z command line: %s | %s', ' '.join(cmd1), ' '.join(cmd2))
proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout, stdout=subprocess.PIPE)
proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE) #pylint: disable=consider-using-with
proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout, stdout=subprocess.PIPE) #pylint: disable=consider-using-with
proc1.stdout.close()
(stdout_data, stderr_data) = proc2.communicate()
if proc2.returncode != 0:
get_logger().error('7z commands returned non-zero status: %s', proc2.returncode)
get_logger().debug('stdout: %s', stdout_data)
get_logger().debug('stderr: %s', stderr_data)
raise Exception()
raise ChildProcessError()
_process_relative_to(output_dir, relative_to)
@@ -120,7 +120,7 @@ def _extract_tar_with_tar(binary, archive_path, output_dir, relative_to):
result = subprocess.run(cmd, check=False)
if result.returncode != 0:
get_logger().error('tar command returned %s', result.returncode)
raise Exception()
raise ChildProcessError()
# for gnu tar, the --transform option could be used. but to keep compatibility with
# bsdtar on macos, we just do this ourselves
@@ -135,7 +135,7 @@ def _extract_tar_with_winrar(binary, archive_path, output_dir, relative_to):
result = subprocess.run(cmd, check=False)
if result.returncode != 0:
get_logger().error('WinRAR command returned %s', result.returncode)
raise Exception()
raise ChildProcessError()
_process_relative_to(output_dir, relative_to)
@@ -145,10 +145,12 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
class NoAppendList(list):
"""Hack to workaround memory issues with large tar files"""
def append(self, obj):
pass
# Simple hack to check if symlinks are supported
symlink_supported = False
try:
os.symlink('', '')
except FileNotFoundError:
@@ -157,13 +159,12 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
except OSError:
# Symlinks probably not supported
get_logger().info('System does not support symlinks. Ignoring them.')
symlink_supported = False
except BaseException:
# Unexpected exception
get_logger().exception('Unexpected exception during symlink support check.')
raise
with tarfile.open(str(archive_path), 'r|%s' % archive_path.suffix[1:]) as tar_file_obj:
with tarfile.open(str(archive_path), f'r|{archive_path.suffix[1:]}') as tar_file_obj:
tar_file_obj.members = NoAppendList()
for tarinfo in tar_file_obj:
try:
@@ -260,21 +261,21 @@ def extract_with_7z(archive_path, output_dir, relative_to, extractors=None):
if sevenzip_cmd == USE_REGISTRY:
if not get_running_platform() == PlatformEnum.WINDOWS:
get_logger().error('"%s" for 7-zip is only available on Windows', sevenzip_cmd)
raise Exception()
raise EnvironmentError()
sevenzip_cmd = str(_find_7z_by_registry())
sevenzip_bin = _find_extractor_by_cmd(sevenzip_cmd)
if not relative_to is None and (output_dir / relative_to).exists():
get_logger().error('Temporary unpacking directory already exists: %s',
output_dir / relative_to)
raise Exception()
cmd = (sevenzip_bin, 'x', str(archive_path), '-aoa', '-o{}'.format(str(output_dir)))
raise FileExistsError()
cmd = (sevenzip_bin, 'x', str(archive_path), '-aoa', f'-o{str(output_dir)}')
get_logger().debug('7z command line: %s', ' '.join(cmd))
result = subprocess.run(cmd, check=False)
if result.returncode != 0:
get_logger().error('7z command returned %s', result.returncode)
raise Exception()
raise ChildProcessError()
_process_relative_to(output_dir, relative_to)
@@ -298,20 +299,20 @@ def extract_with_winrar(archive_path, output_dir, relative_to, extractors=None):
if winrar_cmd == USE_REGISTRY:
if not get_running_platform() == PlatformEnum.WINDOWS:
get_logger().error('"%s" for WinRAR is only available on Windows', winrar_cmd)
raise Exception()
raise EnvironmentError()
winrar_cmd = str(_find_winrar_by_registry())
winrar_bin = _find_extractor_by_cmd(winrar_cmd)
if not relative_to is None and (output_dir / relative_to).exists():
get_logger().error('Temporary unpacking directory already exists: %s',
output_dir / relative_to)
raise Exception()
raise FileExistsError()
cmd = (winrar_bin, 'x', '-o+', str(archive_path), str(output_dir))
get_logger().debug('WinRAR command line: %s', ' '.join(cmd))
result = subprocess.run(cmd, check=False)
if result.returncode != 0:
get_logger().error('WinRAR command returned %s', result.returncode)
raise Exception()
raise ChildProcessError()
_process_relative_to(output_dir, relative_to)

View File

@@ -17,7 +17,7 @@ from shutil import copytree, copy, move
from stat import S_IWRITE
from subprocess import run
from _common import add_common_params, get_chromium_version, get_logger
from _common import ENCODING, add_common_params, get_chromium_version, get_logger
# Config file for gclient
# Instances of 'src' replaced with UC_OUT, which will be replaced with the output directory
@@ -53,7 +53,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
ucstaging = args.output / 'uc_staging'
dtpath = ucstaging / 'depot_tools'
gsuver = '5.30'
gsupath = dtpath / 'external_bin' / 'gsutil' / ('gsutil_%s' % gsuver) / 'gsutil'
gsupath = dtpath / 'external_bin' / 'gsutil' / f'gsutil_{gsuver}' / 'gsutil'
gnpath = ucstaging / 'gn'
environ['GCLIENT_FILE'] = str(ucstaging / '.gclient')
environ['PATH'] += pathsep + str(dtpath)
@@ -88,7 +88,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
get_logger().info('Cloning depot_tools')
dt_commit = re.search(r"depot_tools\.git'\s*\+\s*'@'\s*\+\s*'([^']+)',",
Path(args.output / 'DEPS').read_text()).group(1)
Path(args.output / 'DEPS').read_text(encoding=ENCODING)).group(1)
if not dt_commit:
get_logger().error('Unable to obtain commit for depot_tools checkout')
sys.exit(1)
@@ -108,7 +108,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
(dtpath / 'git.bat').write_text('git')
# Apply changes to gclient
run(['git', 'apply', '--ignore-whitespace'],
input=Path(__file__).with_name('depot_tools.patch').read_text().replace(
input=Path(__file__).with_name('depot_tools.patch').read_text(encoding=ENCODING).replace(
'UC_OUT', str(args.output)).replace('UC_STAGING',
str(ucstaging)).replace('GSUVER', gsuver),
cwd=dtpath,
@@ -123,7 +123,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
run(['git', 'remote', 'add', 'origin', 'https://github.com/GoogleCloudPlatform/gsutil'],
cwd=gsupath,
check=True)
run(['git', 'fetch', '--depth=1', 'origin', 'v%s' % gsuver], cwd=gsupath, check=True)
run(['git', 'fetch', '--depth=1', 'origin', f'v{gsuver}'], cwd=gsupath, check=True)
run(['git', 'reset', '--hard', 'FETCH_HEAD'], cwd=gsupath, check=True)
run(['git', 'clean', '-ffdx'], cwd=gsupath, check=True)
get_logger().info('Updating gsutil submodules')
@@ -142,7 +142,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
# gn requires full history to be able to generate last_commit_position.h
get_logger().info('Cloning gn')
gn_commit = re.search(r"gn_version': 'git_revision:([^']+)',",
Path(args.output / 'DEPS').read_text()).group(1)
Path(args.output / 'DEPS').read_text(encoding=ENCODING)).group(1)
if not gn_commit:
get_logger().error('Unable to obtain commit for gn checkout')
sys.exit(1)
@@ -226,7 +226,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
for item in gnpath.iterdir():
if not item.is_dir():
copy(item, args.output / 'tools' / 'gn')
elif item.name != '.git' and item.name != 'out':
elif item.name not in ('.git', 'out'):
copytree(item, args.output / 'tools' / 'gn' / item.name)
move(str(gnpath / 'out' / 'last_commit_position.h'),
str(args.output / 'tools' / 'gn' / 'bootstrap'))

View File

@@ -111,7 +111,7 @@ def _substitute_path(path, regex_iter):
except UnicodeDecodeError:
continue
if not content:
raise UnicodeDecodeError('Unable to decode with any encoding: %s' % path)
raise UnicodeDecodeError(f'Unable to decode with any encoding: {path}')
file_subs = 0
for regex_pair in regex_iter:
content, sub_count = regex_pair.pattern.subn(regex_pair.replacement, content)
@@ -211,16 +211,17 @@ def apply_substitution(regex_path, files_path, source_tree, domainsub_cache):
resolved_tree = source_tree.resolve()
regex_pairs = DomainRegexList(regex_path).regex_pairs
fileindex_content = io.BytesIO()
with tarfile.open(str(domainsub_cache), 'w:%s' % domainsub_cache.suffix[1:]) if domainsub_cache else open(os.devnull, 'w') as cache_tar:
with tarfile.open(str(domainsub_cache), f'w:{domainsub_cache.suffix[1:]}',
compresslevel=1) if domainsub_cache else open(
os.devnull, 'w', encoding=ENCODING) as cache_tar:
for relative_path in filter(len, files_path.read_text().splitlines()):
if _INDEX_HASH_DELIMITER in relative_path:
if domainsub_cache:
# Cache tar will be incomplete; remove it for convenience
cache_tar.close()
domainsub_cache.unlink()
raise ValueError(
'Path "%s" contains the file index hash delimiter "%s"' % relative_path,
_INDEX_HASH_DELIMITER)
raise ValueError(f'Path "{relative_path}" contains '
f'the file index hash delimiter "{_INDEX_HASH_DELIMITER}"')
path = resolved_tree / relative_path
if not path.exists():
get_logger().warning('Skipping non-existant path: %s', path)
@@ -234,8 +235,8 @@ def apply_substitution(regex_path, files_path, source_tree, domainsub_cache):
get_logger().info('Path has no substitutions: %s', relative_path)
continue
if domainsub_cache:
fileindex_content.write('{}{}{:08x}\n'.format(relative_path, _INDEX_HASH_DELIMITER,
crc32_hash).encode(ENCODING))
fileindex_content.write(
f'{relative_path}{_INDEX_HASH_DELIMITER}{crc32_hash:08x}\n'.encode(ENCODING))
orig_tarinfo = tarfile.TarInfo(str(Path(_ORIG_DIR) / relative_path))
orig_tarinfo.size = len(orig_content)
with io.BytesIO(orig_content) as orig_file:

View File

@@ -25,6 +25,7 @@ from _extraction import extract_tar_file, extract_with_7z, extract_with_winrar
sys.path.insert(0, str(Path(__file__).parent / 'third_party'))
import schema #pylint: disable=wrong-import-position, wrong-import-order
sys.path.pop(0)
# Constants
@@ -74,6 +75,7 @@ class DownloadInfo: #pylint: disable=too-few-public-methods
})
class _DownloadsProperties: #pylint: disable=too-few-public-methods
def __init__(self, section_dict, passthrough_properties, hashes):
self._section_dict = section_dict
self._passthrough_properties = passthrough_properties
@@ -97,7 +99,7 @@ class DownloadInfo: #pylint: disable=too-few-public-methods
value = value.split(DownloadInfo.hash_url_delimiter)
hashes_dict[hash_name] = value
return hashes_dict
raise AttributeError('"{}" has no attribute "{}"'.format(type(self).__name__, name))
raise AttributeError(f'"{type(self).__name__}" has no attribute "{name}"')
def _parse_data(self, path):
"""
@@ -105,6 +107,7 @@ class DownloadInfo: #pylint: disable=too-few-public-methods
Raises schema.SchemaError if validation fails
"""
def _section_generator(data):
for section in data:
if section == configparser.DEFAULTSECT:
@@ -157,11 +160,12 @@ class DownloadInfo: #pylint: disable=too-few-public-methods
return
for name in section_names:
if name not in self:
raise KeyError('"{}" has no section "{}"'.format(type(self).__name__, name))
raise KeyError(f'"{type(self).__name__}" has no section "{name}"')
class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
"""Hook for urllib.request.urlretrieve to log progress information to console"""
def __init__(self):
self._max_len_printed = 0
self._last_percentage = None
@@ -181,10 +185,10 @@ class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
return
self._last_percentage = percentage
print('\r' + ' ' * self._max_len_printed, end='')
status_line = 'Progress: {:.1%} of {:,d} B'.format(percentage, total_size)
status_line = f'Progress: {percentage:.1%} of {total_size:,d} B'
else:
downloaded_estimate = block_count * block_size
status_line = 'Progress: {:,d} B of unknown size'.format(downloaded_estimate)
status_line = f'Progress: {downloaded_estimate:,d} B of unknown size'
self._max_len_printed = len(status_line)
print('\r' + status_line, end='')
@@ -259,7 +263,7 @@ def _get_hash_pairs(download_properties, cache_dir):
if hash_processor == 'chromium':
yield from _chromium_hashes_generator(cache_dir / hash_filename)
else:
raise ValueError('Unknown hash_url processor: %s' % hash_processor)
raise ValueError(f'Unknown hash_url processor: {hash_processor}')
else:
yield entry_type, entry_value

View File

@@ -57,10 +57,10 @@ def _get_archive_writer(output_path, timestamp=None):
timestamp is a file timestamp to use for all files, if set.
"""
if not output_path.suffixes:
raise ValueError('Output name has no suffix: %s' % output_path.name)
raise ValueError(f'Output name has no suffix: {output_path.name}')
if output_path.suffixes[-1].lower() == '.zip':
archive_root = Path(output_path.stem)
output_archive = zipfile.ZipFile(str(output_path), 'w', zipfile.ZIP_DEFLATED)
output_archive = zipfile.ZipFile(str(output_path), 'w', zipfile.ZIP_DEFLATED) # pylint: disable=consider-using-with
zip_date_time = None
if timestamp:
zip_date_time = datetime.datetime.fromtimestamp(timestamp).timetuple()[:6]
@@ -83,17 +83,18 @@ def _get_archive_writer(output_path, timestamp=None):
zip_write(str(in_path), str(arc_path))
elif '.tar' in output_path.name.lower():
if len(output_path.suffixes) >= 2 and output_path.suffixes[-2].lower() == '.tar':
tar_mode = 'w:%s' % output_path.suffixes[-1][1:]
tar_mode = f'w:{output_path.suffixes[-1][1:]}'
archive_root = Path(output_path.with_suffix('').stem)
elif output_path.suffixes[-1].lower() == '.tar':
tar_mode = 'w'
archive_root = Path(output_path.stem)
else:
raise ValueError('Could not detect tar format for output: %s' % output_path.name)
raise ValueError(f'Could not detect tar format for output: {output_path.name}')
if timestamp:
class TarInfoFixedTimestamp(tarfile.TarInfo):
"""TarInfo class with predefined constant mtime"""
@property
def mtime(self):
"""Return predefined timestamp"""
@@ -106,10 +107,13 @@ def _get_archive_writer(output_path, timestamp=None):
tarinfo_class = TarInfoFixedTimestamp
else:
tarinfo_class = tarfile.TarInfo
output_archive = tarfile.open(str(output_path), tar_mode, tarinfo=tarinfo_class)
add_func = lambda in_path, arc_path: output_archive.add(str(in_path), str(arc_path))
output_archive = tarfile.open(str(output_path), tar_mode, tarinfo=tarinfo_class) # pylint: disable=consider-using-with
def add_func(in_path, arc_path):
"""Add files to tar archive"""
output_archive.add(str(in_path), str(arc_path))
else:
raise ValueError('Unknown archive extension with name: %s' % output_path.name)
raise ValueError(f'Unknown archive extension with name: {output_path.name}')
return output_archive, add_func, archive_root
@@ -147,7 +151,7 @@ def _files_generator_by_args(args):
def _list_callback(args):
"""List files needed to run Chromium."""
sys.stdout.writelines('%s\n' % x for x in _files_generator_by_args(args))
sys.stdout.writelines(f'{x}\n' for x in _files_generator_by_args(args))
def _archive_callback(args):

View File

@@ -11,6 +11,7 @@ Generate standalone script that performs the domain substitution.
from pathlib import Path
import argparse
import re
from _common import ENCODING
def make_domain_substitution_script(regex_path, files_path, output_path):
@@ -41,8 +42,8 @@ def make_domain_substitution_script(regex_path, files_path, output_path):
files_list_str = '\n'.join(files_list)
perl_replace_list_str = '\n'.join([f' {x};' for x in perl_replace_list])
with open(output_path, 'w') as out:
out.write("""#!/bin/sh -e
with open(output_path, 'w', encoding=ENCODING) as out:
out.write(f"""#!/bin/sh -e
#
# This script performs domain substitution on the Chromium source files.
#
@@ -54,25 +55,25 @@ def make_domain_substitution_script(regex_path, files_path, output_path):
test -f build/config/compiler/BUILD.gn
# These filenames may contain spaces and/or other unusual characters
print_file_list() {
print_file_list() {{
cat <<'__END__'
%s
{files_list_str}
__END__
}
}}
echo "Creating backup archive ..."
backup=domain-substitution.orig.tar
print_file_list | tar cf $backup --verbatim-files-from --files-from=-
echo "Applying ungoogled-chromium domain substitution to %d files ..."
echo "Applying ungoogled-chromium domain substitution to {len(files_list)} files ..."
print_file_list | xargs -d '\\n' perl -0777 -C0 -pwi -e '
%s
{perl_replace_list_str}
'
# end
""" % (files_list_str, len(files_list), perl_replace_list_str))
""")
def _callback(args):

View File

@@ -60,7 +60,7 @@ def find_and_check_patch(patch_bin_path=None):
raise ValueError('Could not find patch from PATCH_BIN env var or "which patch"')
if not patch_bin_path.exists():
raise ValueError('Could not find the patch binary: {}'.format(patch_bin_path))
raise ValueError(f'Could not find the patch binary: {patch_bin_path}')
# Ensure patch actually runs
cmd = [str(patch_bin_path), '--version']
@@ -73,7 +73,7 @@ def find_and_check_patch(patch_bin_path=None):
get_logger().error('"%s" returned non-zero exit code', ' '.join(cmd))
get_logger().error('stdout:\n%s', result.stdout)
get_logger().error('stderr:\n%s', result.stderr)
raise RuntimeError('Got non-zero exit code running "{}"'.format(' '.join(cmd)))
raise RuntimeError(f"Got non-zero exit code running \"{' '.join(cmd)}\"")
return patch_bin_path
@@ -167,18 +167,16 @@ def merge_patches(source_iter, destination, prepend=False):
if prepend:
if not (destination / 'series').exists():
raise FileNotFoundError(
'Could not find series file in existing destination: {}'.format(destination /
'series'))
f"Could not find series file in existing destination: {destination / 'series'}")
known_paths.update(generate_patches_from_series(destination))
else:
raise FileExistsError('destination already exists: {}'.format(destination))
raise FileExistsError(f'destination already exists: {destination}')
for source_dir in source_iter:
patch_paths = tuple(generate_patches_from_series(source_dir))
patch_intersection = known_paths.intersection(patch_paths)
if patch_intersection:
raise FileExistsError(
'Patches from {} have conflicting paths with other sources: {}'.format(
source_dir, patch_intersection))
raise FileExistsError(f'Patches from {source_dir} have conflicting paths '
f'with other sources: {patch_intersection}')
series.extend(patch_paths)
_copy_files(patch_paths, source_dir, destination)
if prepend and (destination / 'series').exists():