Merge pull request #28743 from mrc0mmand/lint-python-scripts

tools: lint Python scripts
This commit is contained in:
Luca Boccassi
2023-08-10 19:13:45 +01:00
committed by GitHub
24 changed files with 205 additions and 166 deletions

23
.pylintrc Normal file
View File

@@ -0,0 +1,23 @@
[MAIN]
extension-pkg-allow-list=lxml
[MESSAGES CONTROL]
disable=fixme,
invalid-name,
line-too-long,
missing-class-docstring,
missing-docstring,
missing-function-docstring,
missing-module-docstring,
too-few-public-methods,
too-many-arguments,
too-many-boolean-expressions,
too-many-branches,
too-many-instance-attributes,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
unspecified-encoding,

View File

@@ -17,10 +17,7 @@
# You should have received a copy of the GNU Lesser General Public License
# along with systemd; If not, see <https://www.gnu.org/licenses/>.
# pylint: disable=missing-docstring,invalid-name,import-outside-toplevel
# pylint: disable=consider-using-with,unspecified-encoding,line-too-long
# pylint: disable=too-many-locals,too-many-statements,too-many-return-statements
# pylint: disable=too-many-branches,redefined-builtin,fixme
# pylint: disable=import-outside-toplevel,consider-using-with,disable=redefined-builtin
import argparse
import os

View File

@@ -1,10 +1,9 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=missing-docstring,redefined-outer-name,invalid-name
# pylint: disable=unused-import,import-outside-toplevel,useless-else-on-loop
# pylint: disable=consider-using-with,wrong-import-position,unspecified-encoding
# pylint: disable=protected-access
# pylint: disable=protected-access,redefined-outer-name
import base64
import json

View File

@@ -16,12 +16,8 @@
# You should have received a copy of the GNU Lesser General Public License
# along with systemd; If not, see <https://www.gnu.org/licenses/>.
# pylint: disable=missing-docstring,invalid-name,import-outside-toplevel
# pylint: disable=consider-using-with,unspecified-encoding,line-too-long
# pylint: disable=too-many-locals,too-many-statements,too-many-return-statements
# pylint: disable=too-many-branches,too-many-lines,too-many-instance-attributes
# pylint: disable=too-many-arguments,unnecessary-lambda-assignment,fixme
# pylint: disable=unused-argument
# pylint: disable=import-outside-toplevel,consider-using-with,unused-argument
# pylint: disable=unnecessary-lambda-assignment
import argparse
import configparser

View File

@@ -1,8 +1,5 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=line-too-long,too-many-lines,too-many-branches,too-many-statements,too-many-arguments
# pylint: disable=too-many-public-methods,too-many-boolean-expressions,invalid-name,no-self-use
# pylint: disable=missing-function-docstring,missing-class-docstring,missing-module-docstring
#
# Copyright © 2017 Michal Sekletar <msekleta@redhat.com>

View File

@@ -1,8 +1,5 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=line-too-long,too-many-lines,too-many-branches,too-many-statements,too-many-arguments
# pylint: disable=too-many-public-methods,too-many-boolean-expressions,invalid-name
# pylint: disable=missing-function-docstring,missing-class-docstring,missing-module-docstring
# systemd-networkd tests
# These tests can be executed in the systemd mkosi image when booted in QEMU. After booting the QEMU VM,

View File

@@ -1,6 +1,6 @@
#!/usr/bin/python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=line-too-long,invalid-name,missing-module-docstring,missing-function-docstring,too-many-statements,broad-except
# pylint: disable=broad-except
import argparse
import logging

View File

@@ -1,10 +1,8 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=missing-docstring,redefined-outer-name,invalid-name
# pylint: disable=unspecified-encoding,no-else-return,line-too-long,too-many-lines
# pylint: disable=multiple-imports,too-many-instance-attributes,consider-using-with
# pylint: disable=global-statement
# pylint: disable=redefined-outer-name,no-else-return,multiple-imports
# pylint: disable=consider-using-with,global-statement
# udev test
#

View File

@@ -1,5 +1,6 @@
#!/usr/bin/python
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=consider-using-with
"""
A helper to compare 'systemd-analyze dump' outputs.
@@ -13,8 +14,9 @@ tools/analyze-dump-sort.py /var/tmp/{dump1,dump2} → this does a diff from dump
"""
import argparse
import tempfile
import subprocess
import tempfile
def sort_dump(sourcefile, destfile=None):
if destfile is None:

View File

@@ -28,57 +28,57 @@ Prints out journal entries with no or bad catalog explanations.
"""
import re
from systemd import journal, id128
j = journal.Reader()
from systemd import id128, journal
logged = set()
pattern = re.compile('@[A-Z0-9_]+@')
mids = {v:k for k,v in id128.__dict__.items()
if k.startswith('SD_MESSAGE')}
freq = 1000
def log_entry(x):
if 'CODE_FILE' in x:
# pylint: disable=consider-using-f-string
def log_entry(entry):
if 'CODE_FILE' in entry:
# some of our code was using 'CODE_FUNCTION' instead of 'CODE_FUNC'
print('{}:{} {}'.format(x.get('CODE_FILE', '???'),
x.get('CODE_LINE', '???'),
x.get('CODE_FUNC', None) or x.get('CODE_FUNCTION', '???')))
print(' {}'.format(x.get('MESSAGE', 'no message!')))
for k, v in x.items():
print('{}:{} {}'.format(entry.get('CODE_FILE', '???'),
entry.get('CODE_LINE', '???'),
entry.get('CODE_FUNC', None) or entry.get('CODE_FUNCTION', '???')))
print(' {}'.format(entry.get('MESSAGE', 'no message!')))
for k, v in entry.items():
if k.startswith('CODE_') or k in {'MESSAGE_ID', 'MESSAGE'}:
continue
print(' {}={}'.format(k, v))
print(f' {k}={v}')
print()
for i, x in enumerate(j):
if i % freq == 0:
print(i, end='\r')
if __name__ == '__main__':
j = journal.Reader()
logged = set()
pattern = re.compile('@[A-Z0-9_]+@')
try:
mid = x['MESSAGE_ID']
except KeyError:
continue
name = mids.get(mid, 'unknown')
mids = { v:k for k,v in id128.__dict__.items() if k.startswith('SD_MESSAGE') }
try:
desc = journal.get_catalog(mid)
except FileNotFoundError:
if mid in logged:
for i, x in enumerate(j):
if i % 1000 == 0:
print(i, end='\r')
try:
mid = x['MESSAGE_ID']
except KeyError:
continue
name = mids.get(mid, 'unknown')
try:
desc = journal.get_catalog(mid)
except FileNotFoundError:
if mid in logged:
continue
print(f'{name} {mid.hex}: no catalog entry')
log_entry(x)
logged.add(mid)
continue
print('{} {.hex}: no catalog entry'.format(name, mid))
log_entry(x)
logged.add(mid)
continue
fields = [field[1:-1] for field in pattern.findall(desc)]
for field in fields:
index = (mid, field)
if field in x or index in logged:
continue
print('{} {.hex}: no field {}'.format(name, mid, field))
log_entry(x)
logged.add(index)
fields = [field[1:-1] for field in pattern.findall(desc)]
for field in fields:
index = (mid, field)
if field in x or index in logged:
continue
print(f'{name} {mid.hex}: no field {field}')
log_entry(x)
logged.add(index)

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=missing-docstring,invalid-name,unspecified-encoding,consider-using-with
# pylint: disable=consider-using-with
import os
import pathlib
@@ -28,5 +28,5 @@ def check_file(filename):
return good
if __name__ == '__main__':
good = all(check_file(name) for name in sys.argv[1:])
sys.exit(0 if good else 1)
all_good = all(check_file(name) for name in sys.argv[1:])
sys.exit(0 if all_good else 1)

View File

@@ -2,7 +2,8 @@
# SPDX-License-Identifier: LGPL-2.1-or-later
from argparse import ArgumentParser
from pathlib import Path
from subprocess import run, PIPE
from subprocess import PIPE, run
def extract_interfaces_xml(output_dir, executable):
proc = run(

View File

@@ -1,5 +1,9 @@
#!/usr/bin/python
# SPDX-License-Identifier: LGPL-2.1-or-later
#
# Note: the no-value-for-parameter here is expected, as the click module
# decorators modify function arguments which pylint doesn't know
# pylint: disable=no-value-for-parameter
"""
A program to parse auxv (e.g. /proc/self/auxv).
@@ -109,12 +113,12 @@ def dump(endian, field_width, file):
width = {32:'II', 64:'QQ'}[field_width]
format = f'{endian}{width}'
print(f'# {format=}')
format_str = f'{endian}{width}'
print(f'# {format_str=}')
seen_null = False
for item in struct.iter_unpack(format, data):
for item in struct.iter_unpack(format_str, data):
key, val = item
name = AT_AUXV_NAMES.get(key, f'unknown ({key})')
if name.endswith(('UID', 'GID')):
@@ -123,7 +127,7 @@ def dump(endian, field_width, file):
pref, fmt = '0x', 'x'
if seen_null:
print('# trailing garbarbage after AT_NULL')
print('# trailing garbage after AT_NULL')
print(f'{name:18} = {pref}{val:{fmt}}')

View File

@@ -19,7 +19,7 @@
# the resulting binary useless. gnu-efi relies on this method and contains a stub that performs the
# ELF dynamic relocations at runtime.
# pylint: disable=missing-docstring,invalid-name,attribute-defined-outside-init
# pylint: disable=attribute-defined-outside-init
import argparse
import hashlib

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=consider-using-f-string
import gdb
@@ -9,7 +10,7 @@ class sd_dump_hashmaps(gdb.Command):
def __init__(self):
super().__init__("sd_dump_hashmaps", gdb.COMMAND_DATA, gdb.COMPLETE_NONE)
def invoke(self, arg, from_tty):
def invoke(self, arg, _from_tty):
d = gdb.parse_and_eval("hashmap_debug_list")
hashmap_type_info = gdb.parse_and_eval("hashmap_type_info")
uchar_t = gdb.lookup_type("unsigned char")

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=unbalanced-tuple-unpacking,consider-using-f-string,consider-using-with
"""
Generate %-from-name.gperf from %-list.txt
@@ -7,18 +8,22 @@ Generate %-from-name.gperf from %-list.txt
import sys
name, prefix, input = sys.argv[1:]
if __name__ == '__main__':
if len(sys.argv) != 4:
sys.exit(f'Usage: {sys.argv[0]} name prefix file')
print("""\
name, prefix, file = sys.argv[1:]
print("""\
%{
#if __GNUC__ >= 7
_Pragma("GCC diagnostic ignored \\"-Wimplicit-fallthrough\\"")
#endif
%}""")
print("""\
struct {}_name {{ const char* name; int id; }};
print(f"""\
struct {name}_name {{ const char* name; int id; }};
%null-strings
%%""".format(name))
%%""")
for line in open(input):
print("{0}, {1}{0}".format(line.rstrip(), prefix))
for line in open(file):
print("{0}, {1}{0}".format(line.rstrip(), prefix))

View File

@@ -5,7 +5,7 @@ import re
import sys
import uuid
HEADER = f'''\
HEADER = '''\
| Name | Partition Type UUID | Allowed File Systems | Explanation |
|------|---------------------|----------------------|-------------|
'''
@@ -149,21 +149,21 @@ def extract(file):
name = line.split()[1]
if m2 := re.match(r'^(ROOT|USR)_([A-Z0-9]+|X86_64|PPC64_LE|MIPS_LE|MIPS64_LE)(|_VERITY|_VERITY_SIG)\s+SD_ID128_MAKE\((.*)\)', m.group(1)):
type, arch, suffix, u = m2.groups()
ptype, arch, suffix, u = m2.groups()
u = uuid.UUID(u.replace(',', ''))
assert arch in ARCHITECTURES, f'{arch} not in f{ARCHITECTURES}'
type = f'{type}{suffix}'
assert type in TYPES
ptype = f'{type}{suffix}'
assert ptype in TYPES
yield name, type, arch, u
yield name, ptype, arch, u
elif m2 := re.match(r'(\w+)\s+SD_ID128_MAKE\((.*)\)', m.group(1)):
type, u = m2.groups()
ptype, u = m2.groups()
u = uuid.UUID(u.replace(',', ''))
yield name, type, None, u
yield name, ptype, None, u
else:
raise Exception(f'Failed to match: {m.group(1)}')
raise ValueError(f'Failed to match: {m.group(1)}')
def generate(defines):
prevtype = None
@@ -172,21 +172,21 @@ def generate(defines):
uuids = set()
for name, type, arch, uuid in defines:
tdesc = TYPES[type]
for name, ptype, arch, puuid in defines:
tdesc = TYPES[ptype]
adesc = '' if arch is None else f' ({ARCHITECTURES[arch]})'
# Let's make sure that we didn't select&paste the same value twice
assert uuid not in uuids
uuids.add(uuid)
assert puuid not in uuids
uuids.add(puuid)
if type != prevtype:
prevtype = type
morea, moreb = DESCRIPTIONS[type]
if ptype != prevtype:
prevtype = ptype
morea, moreb = DESCRIPTIONS[ptype]
else:
morea = moreb = 'ditto'
print(f'| _{tdesc}{adesc}_ | `{uuid}` `{name}` | {morea} | {moreb} |')
print(f'| _{tdesc}{adesc}_ | `{puuid}` `{name}` | {morea} | {moreb} |')
if __name__ == '__main__':
known = extract(sys.stdin)

View File

@@ -12,13 +12,13 @@ for entry in chromiumos.gen_autosuspend_rules.PCI_IDS:
vendor, device = entry.split(':')
vendor = int(vendor, 16)
device = int(device, 16)
print('pci:v{:08X}d{:08X}*'.format(vendor, device))
print(f'pci:v{vendor:08X}d{device:08X}*')
print('# usb:v<VEND>p<PROD> (4 uppercase hexadecimal digits twice)')
for entry in chromiumos.gen_autosuspend_rules.USB_IDS:
vendor, product = entry.split(':')
vendor = int(vendor, 16)
product = int(product, 16)
print('usb:v{:04X}p{:04X}*'.format(vendor, product))
print(f'usb:v{vendor:04X}p{product:04X}*')
print(' ID_AUTOSUSPEND=1')

View File

@@ -1,12 +1,13 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
import sys
import collections
import re
from xml_helper import xml_parse, xml_print, tree
import sys
from copy import deepcopy
from xml_helper import tree, xml_parse, xml_print
COLOPHON = '''\
This index contains {count} entries in {sections} sections,
referring to {pages} individual manual pages.
@@ -101,7 +102,7 @@ def _extract_directives(directive_groups, formatting, page):
formatting[name.text] = name
def _make_section(template, name, directives, formatting):
varlist = template.find(".//*[@id='{}']".format(name))
varlist = template.find(f".//*[@id='{name}']")
for varname, manpages in sorted(directives.items()):
entry = tree.SubElement(varlist, 'varlistentry')
term = tree.SubElement(entry, 'term')
@@ -161,14 +162,14 @@ def make_page(template_path, xml_files):
for page in xml_files:
try:
_extract_directives(directive_groups, formatting, page)
except Exception:
raise ValueError("failed to process " + page)
except Exception as e:
raise ValueError("failed to process " + page) from e
return _make_page(template, directive_groups, formatting)
if __name__ == '__main__':
with open(sys.argv[1], 'wb') as f:
template_path = sys.argv[2]
xml_files = sys.argv[3:]
xml = make_page(template_path, xml_files)
f.write(xml_print(xml))
_template_path = sys.argv[2]
_xml_files = sys.argv[3:]
_xml = make_page(_template_path, _xml_files)
f.write(xml_print(_xml))

View File

@@ -2,9 +2,10 @@
# SPDX-License-Identifier: LGPL-2.1-or-later
import collections
import sys
import re
from xml_helper import xml_parse, xml_print, tree
import sys
from xml_helper import tree, xml_parse, xml_print
MDASH = '' if sys.version_info.major >= 3 else ' -- '
@@ -44,9 +45,9 @@ This index contains {count} entries, referring to {pages} individual manual page
def check_id(page, t):
id = t.getroot().get('id')
if not re.search('/' + id + '[.]', page):
raise ValueError("id='{}' is not the same as page name '{}'".format(id, page))
page_id = t.getroot().get('id')
if not re.search('/' + page_id + '[.]', page):
raise ValueError(f"id='{page_id}' is not the same as page name '{page}'")
def make_index(pages):
index = collections.defaultdict(list)
@@ -68,7 +69,7 @@ def add_letter(template, letter, pages):
title.text = letter
para = tree.SubElement(refsect1, 'para')
for info in sorted(pages, key=lambda info: str.lower(info[0])):
refname, section, purpose, realname = info
refname, section, purpose, _realname = info
b = tree.SubElement(para, 'citerefentry')
c = tree.SubElement(b, 'refentrytitle')
@@ -86,7 +87,7 @@ def add_summary(template, indexpages):
for group in indexpages:
count += len(group)
for info in group:
refname, section, purpose, realname = info
_refname, section, _purpose, realname = info
pages.add((realname, section))
refsect1 = tree.fromstring(SUMMARY)
@@ -107,5 +108,5 @@ def make_page(*xml_files):
return template
if __name__ == '__main__':
with open(sys.argv[1], 'wb') as f:
f.write(xml_print(make_page(*sys.argv[2:])))
with open(sys.argv[1], 'wb') as file:
file.write(xml_print(make_page(*sys.argv[2:])))

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=consider-using-with
import ast
import os
@@ -30,7 +31,7 @@ def render(filename, defines):
undefined=jinja2.StrictUndefined)
return template.render(defines)
if __name__ == '__main__':
def main():
defines = parse_config_h(sys.argv[1])
defines.update(parse_config_h(sys.argv[2]))
output = render(sys.argv[3], defines)
@@ -38,3 +39,6 @@ if __name__ == '__main__':
f.write(output)
info = os.stat(sys.argv[3])
os.chmod(sys.argv[4], info.st_mode)
if __name__ == '__main__':
main()

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=superfluous-parens,consider-using-with
import argparse
import collections
@@ -36,6 +37,8 @@ GREEN = '\x1b[32m'
YELLOW = '\x1b[33m'
RESET = '\x1b[39m'
arguments = None
def xml_parser():
return etree.XMLParser(no_network=True,
remove_comments=False,
@@ -62,38 +65,38 @@ def print_method(declarations, elem, *, prefix, file, is_signal=False):
argname = arg.get('name')
if argname is None:
if opts.print_errors:
if arguments.print_errors:
print(f'method {name}: argument {num+1} has no name', file=sys.stderr)
argname = 'UNNAMED'
type = arg.get('type')
argtype = arg.get('type')
if not is_signal:
direction = arg.get('direction')
print(f'''{lead if num > 0 else ''}{direction:3} {type} {argname}''', file=file, end='')
print(f'''{lead if num > 0 else ''}{direction:3} {argtype} {argname}''', file=file, end='')
else:
print(f'''{lead if num > 0 else ''}{type} {argname}''', file=file, end='')
print(f'''{lead if num > 0 else ''}{argtype} {argname}''', file=file, end='')
print(f');', file=file)
print(');', file=file)
ACCESS_MAP = {
'read' : 'readonly',
'write' : 'readwrite',
}
def value_ellipsis(type):
if type == 's':
return "'...'";
if type[0] == 'a':
inner = value_ellipsis(type[1:])
return f"[{inner}{', ...' if inner != '...' else ''}]";
def value_ellipsis(prop_type):
if prop_type == 's':
return "'...'"
if prop_type[0] == 'a':
inner = value_ellipsis(prop_type[1:])
return f"[{inner}{', ...' if inner != '...' else ''}]"
return '...'
def print_property(declarations, elem, *, prefix, file):
name = elem.get('name')
type = elem.get('type')
access = elem.get('access')
prop_name = elem.get('name')
prop_type = elem.get('type')
prop_access = elem.get('access')
declarations['property'].append(name)
declarations['property'].append(prop_name)
# @org.freedesktop.DBus.Property.EmitsChangedSignal("false")
# @org.freedesktop.systemd1.Privileged("true")
@@ -104,8 +107,8 @@ def print_property(declarations, elem, *, prefix, file):
anno_value = anno.get('value')
print(f'''{prefix}@{anno_name}("{anno_value}")''', file=file)
access = ACCESS_MAP.get(access, access)
print(f'''{prefix}{access} {type} {name} = {value_ellipsis(type)};''', file=file)
prop_access = ACCESS_MAP.get(prop_access, prop_access)
print(f'''{prefix}{prop_access} {prop_type} {prop_name} = {value_ellipsis(prop_type)};''', file=file)
def print_interface(iface, *, prefix, file, print_boring, only_interface, declarations):
name = iface.get('name')
@@ -163,7 +166,7 @@ def check_documented(document, declarations, stats):
assert False, (klass, item)
if not document_has_elem_with_text(document, elem, item_repr):
if opts.print_errors:
if arguments.print_errors:
print(f'{klass} {item} is not documented :(')
missing.append((klass, item))
@@ -189,7 +192,7 @@ def xml_to_text(destination, xml, *, only_interface=None):
if not name in BORING_INTERFACES:
interfaces.append(name)
print(f'''}};''', file=file)
print('''};''', file=file)
return file.getvalue(), declarations, interfaces
@@ -202,7 +205,7 @@ def subst_output(document, programlisting, stats):
node = programlisting.get('node')
interface = programlisting.get('interface')
argv = [f'{opts.build_dir}/{executable}', f'--bus-introspect={interface}']
argv = [f'{arguments.build_dir}/{executable}', f'--bus-introspect={interface}']
if isinstance(shlex_join, Exception):
print(f'COMMAND: {" ".join(shlex_quote(arg) for arg in argv)}')
else:
@@ -225,14 +228,11 @@ def subst_output(document, programlisting, stats):
# delete old comments
for child in parent:
if (child.tag == etree.Comment
and 'Autogenerated' in child.text):
if child.tag is etree.Comment and 'Autogenerated' in child.text:
parent.remove(child)
if (child.tag == etree.Comment
and 'not documented' in child.text):
if child.tag is etree.Comment and 'not documented' in child.text:
parent.remove(child)
if (child.tag == "variablelist"
and child.attrib.get("generated",False) == "True"):
if child.tag == "variablelist" and child.attrib.get("generated", False) == "True":
parent.remove(child)
# insert pointer for systemd-directives generation
@@ -282,7 +282,7 @@ def process(page):
# print('parsing {}'.format(name), file=sys.stderr)
if xml.tag != 'refentry':
return
return None
stats = collections.Counter()
@@ -297,11 +297,11 @@ def process(page):
out_text[out_text.find('<refentryinfo'):] +
'\n')
if not opts.test:
if not arguments.test:
with open(page, 'w') as out:
out.write(out_text)
return dict(stats=stats, modified=(out_text != src))
return { "stats" : stats, "modified" : out_text != src }
def parse_args():
p = argparse.ArgumentParser()
@@ -313,25 +313,27 @@ def parse_args():
opts.print_errors = not opts.test
return opts
if __name__ == '__main__':
opts = parse_args()
def main():
# pylint: disable=global-statement
global arguments
arguments = parse_args()
for item in (etree, shlex_quote):
if isinstance(item, Exception):
print(item, file=sys.stderr)
exit(77 if opts.test else 1)
sys.exit(77 if arguments.test else 1)
if not os.path.exists(f'{opts.build_dir}/systemd'):
exit(f"{opts.build_dir}/systemd doesn't exist. Use --build-dir=.")
if not os.path.exists(f'{arguments.build_dir}/systemd'):
sys.exit(f"{arguments.build_dir}/systemd doesn't exist. Use --build-dir=.")
stats = {page.split('/')[-1] : process(page) for page in opts.pages}
stats = {page.split('/')[-1] : process(page) for page in arguments.pages}
# Let's print all statistics at the end
mlen = max(len(page) for page in stats)
total = sum((item['stats'] for item in stats.values()), collections.Counter())
total = 'total', dict(stats=total, modified=False)
total = 'total', { "stats" : total, "modified" : False }
modified = []
classification = 'OUTDATED' if opts.test else 'MODIFIED'
classification = 'OUTDATED' if arguments.test else 'MODIFIED'
for page, info in sorted(stats.items()) + [total]:
m = info['stats']['missing']
t = info['stats']['total']
@@ -342,6 +344,9 @@ if __name__ == '__main__':
color = RED if m > t/2 else (YELLOW if m else GREEN)
print(f'{color}{p:{mlen + 1}} {t - m}/{t} {c}{RESET}')
if opts.test and modified:
exit(f'Outdated pages: {", ".join(modified)}\n'
f'Hint: ninja -C {opts.build_dir} update-dbus-docs')
if arguments.test and modified:
sys.exit(f'Outdated pages: {", ".join(modified)}\n'
f'Hint: ninja -C {arguments.build_dir} update-dbus-docs')
if __name__ == '__main__':
main()

View File

@@ -1,16 +1,17 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
from __future__ import print_function
import collections
import glob
import pprint
import sys
from pathlib import Path
import pprint
from xml_helper import xml_parse
def man(page, number):
return '{}.{}'.format(page, number)
return f'{page}.{number}'
def add_rules(rules, name):
xml = xml_parse(name)
@@ -60,7 +61,7 @@ MESON_FOOTER = '''\
# Really, do not edit.
'''
def make_mesonfile(rules, dist_files):
def make_mesonfile(rules, _dist_files):
# reformat rules as
# grouped = [ [name, section, [alias...], condition], ...]
#
@@ -77,7 +78,7 @@ def make_mesonfile(rules, dist_files):
for p, aliases in sorted(grouped.items()) ]
return '\n'.join((MESON_HEADER, pprint.pformat(lines)[1:-1], MESON_FOOTER))
if __name__ == '__main__':
def main():
source_glob = sys.argv[1]
target = Path(sys.argv[2])
@@ -95,3 +96,6 @@ if __name__ == '__main__':
tmp = target.with_suffix('.tmp')
tmp.write_text(text)
tmp.rename(target)
if __name__ == '__main__':
main()

View File

@@ -3,14 +3,18 @@
from lxml import etree as tree
class CustomResolver(tree.Resolver):
def resolve(self, url, id, context):
def resolve(self, url, _id, context):
if 'custom-entities.ent' in url:
return self.resolve_filename('man/custom-entities.ent', context)
if 'ethtool-link-mode' in url:
return self.resolve_filename('src/shared/ethtool-link-mode.xml', context)
return None
_parser = tree.XMLParser()
# pylint: disable=no-member
_parser.resolvers.add(CustomResolver())
def xml_parse(page):