mirror of
https://github.com/morgan9e/helium
synced 2026-04-14 00:14:20 +09:00
buildkit: Initial commit
buildkit will succeed utilikit. See #248 This is still a WIP. This commit contains a mostly-completed common.py module.
This commit is contained in:
17
buildkit-launcher.py
Executable file
17
buildkit-launcher.py
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
# Copyright (c) 2017 The ungoogled-chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Simple buildkit launcher for running from anywhere"""
|
||||
|
||||
import sys
|
||||
import pathlib
|
||||
|
||||
sys.path.insert(0, str(pathlib.Path(__file__).resolve().parent))
|
||||
import buildkit.cli
|
||||
sys.path.pop(0)
|
||||
|
||||
buildkit.cli.main()
|
||||
0
buildkit/__init__.py
Normal file
0
buildkit/__init__.py
Normal file
12
buildkit/__main__.py
Normal file
12
buildkit/__main__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
# Copyright (c) 2017 The ungoogled-chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""CLI entry point for invoking module directly"""
|
||||
|
||||
from . import cli
|
||||
|
||||
cli.main()
|
||||
8
buildkit/_lib/README.md
Normal file
8
buildkit/_lib/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
This directory contains third-party libraries used by utilikit.
|
||||
|
||||
Contents:
|
||||
|
||||
* [python-quilt](//github.com/bjoernricks/python-quilt)
|
||||
* This is used if `quilt` is not provided by the user or environment.
|
||||
* [python-unidiff](//github.com/matiasb/python-unidiff)
|
||||
* For parsing and modifying unified diffs.
|
||||
41
buildkit/_lib/unidiff/__init__.py
Normal file
41
buildkit/_lib/unidiff/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# The MIT License (MIT)
|
||||
# Copyright (c) 2014-2017 Matias Bordese
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||
# OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
"""Unidiff parsing library."""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from unidiff import __version__
|
||||
from unidiff.patch import (
|
||||
DEFAULT_ENCODING,
|
||||
LINE_TYPE_ADDED,
|
||||
LINE_TYPE_CONTEXT,
|
||||
LINE_TYPE_REMOVED,
|
||||
Hunk,
|
||||
PatchedFile,
|
||||
PatchSet,
|
||||
UnidiffParseError,
|
||||
)
|
||||
|
||||
VERSION = __version__.__version__
|
||||
24
buildkit/_lib/unidiff/__version__.py
Normal file
24
buildkit/_lib/unidiff/__version__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# The MIT License (MIT)
|
||||
# Copyright (c) 2014-2017 Matias Bordese
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||
# OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
__version__ = '0.5.5'
|
||||
60
buildkit/_lib/unidiff/constants.py
Normal file
60
buildkit/_lib/unidiff/constants.py
Normal file
@@ -0,0 +1,60 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# The MIT License (MIT)
|
||||
# Copyright (c) 2014-2017 Matias Bordese
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||
# OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
"""Useful constants and regexes used by the package."""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
|
||||
|
||||
RE_SOURCE_FILENAME = re.compile(
|
||||
r'^--- (?P<filename>[^\t\n]+)(?:\t(?P<timestamp>[^\n]+))?')
|
||||
RE_TARGET_FILENAME = re.compile(
|
||||
r'^\+\+\+ (?P<filename>[^\t\n]+)(?:\t(?P<timestamp>[^\n]+))?')
|
||||
|
||||
# @@ (source offset, length) (target offset, length) @@ (section header)
|
||||
RE_HUNK_HEADER = re.compile(
|
||||
r"^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))?\ @@[ ]?(.*)")
|
||||
|
||||
# kept line (context)
|
||||
# \n empty line (treat like context)
|
||||
# + added line
|
||||
# - deleted line
|
||||
# \ No newline case
|
||||
RE_HUNK_BODY_LINE = re.compile(
|
||||
r'^(?P<line_type>[- \+\\])(?P<value>.*)', re.DOTALL)
|
||||
RE_HUNK_EMPTY_BODY_LINE = re.compile(
|
||||
r'^(?P<line_type>[- \+\\]?)(?P<value>[\r\n]{1,2})', re.DOTALL)
|
||||
|
||||
RE_NO_NEWLINE_MARKER = re.compile(r'^\\ No newline at end of file')
|
||||
|
||||
DEFAULT_ENCODING = 'UTF-8'
|
||||
|
||||
LINE_TYPE_ADDED = '+'
|
||||
LINE_TYPE_REMOVED = '-'
|
||||
LINE_TYPE_CONTEXT = ' '
|
||||
LINE_TYPE_EMPTY = ''
|
||||
LINE_TYPE_NO_NEWLINE = '\\'
|
||||
LINE_VALUE_NO_NEWLINE = ' No newline at end of file'
|
||||
31
buildkit/_lib/unidiff/errors.py
Normal file
31
buildkit/_lib/unidiff/errors.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# The MIT License (MIT)
|
||||
# Copyright (c) 2014-2017 Matias Bordese
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||
# OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
"""Errors and exceptions raised by the package."""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
class UnidiffParseError(Exception):
|
||||
"""Exception when parsing the unified diff data."""
|
||||
463
buildkit/_lib/unidiff/patch.py
Normal file
463
buildkit/_lib/unidiff/patch.py
Normal file
@@ -0,0 +1,463 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# The MIT License (MIT)
|
||||
# Copyright (c) 2014-2017 Matias Bordese
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||
# OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
"""Classes used by the unified diff parser to keep the diff data."""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import codecs
|
||||
import sys
|
||||
|
||||
from unidiff.constants import (
|
||||
DEFAULT_ENCODING,
|
||||
LINE_TYPE_ADDED,
|
||||
LINE_TYPE_CONTEXT,
|
||||
LINE_TYPE_EMPTY,
|
||||
LINE_TYPE_REMOVED,
|
||||
LINE_TYPE_NO_NEWLINE,
|
||||
LINE_VALUE_NO_NEWLINE,
|
||||
RE_HUNK_BODY_LINE,
|
||||
RE_HUNK_EMPTY_BODY_LINE,
|
||||
RE_HUNK_HEADER,
|
||||
RE_SOURCE_FILENAME,
|
||||
RE_TARGET_FILENAME,
|
||||
RE_NO_NEWLINE_MARKER,
|
||||
)
|
||||
from unidiff.errors import UnidiffParseError
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
from StringIO import StringIO
|
||||
open_file = codecs.open
|
||||
make_str = lambda x: x.encode(DEFAULT_ENCODING)
|
||||
|
||||
def implements_to_string(cls):
|
||||
cls.__unicode__ = cls.__str__
|
||||
cls.__str__ = lambda x: x.__unicode__().encode(DEFAULT_ENCODING)
|
||||
return cls
|
||||
else:
|
||||
from io import StringIO
|
||||
open_file = open
|
||||
make_str = str
|
||||
implements_to_string = lambda x: x
|
||||
unicode = str
|
||||
basestring = str
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class Line(object):
|
||||
"""A diff line."""
|
||||
|
||||
def __init__(self, value, line_type,
|
||||
source_line_no=None, target_line_no=None, diff_line_no=None):
|
||||
super(Line, self).__init__()
|
||||
self.source_line_no = source_line_no
|
||||
self.target_line_no = target_line_no
|
||||
self.diff_line_no = diff_line_no
|
||||
self.line_type = line_type
|
||||
self.value = value
|
||||
|
||||
def __repr__(self):
|
||||
return make_str("<Line: %s%s>") % (self.line_type, self.value)
|
||||
|
||||
def __str__(self):
|
||||
return "%s%s" % (self.line_type, self.value)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.source_line_no == other.source_line_no and
|
||||
self.target_line_no == other.target_line_no and
|
||||
self.diff_line_no == other.diff_line_no and
|
||||
self.line_type == other.line_type and
|
||||
self.value == other.value)
|
||||
|
||||
@property
|
||||
def is_added(self):
|
||||
return self.line_type == LINE_TYPE_ADDED
|
||||
|
||||
@property
|
||||
def is_removed(self):
|
||||
return self.line_type == LINE_TYPE_REMOVED
|
||||
|
||||
@property
|
||||
def is_context(self):
|
||||
return self.line_type == LINE_TYPE_CONTEXT
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class PatchInfo(list):
|
||||
"""Lines with extended patch info.
|
||||
|
||||
Format of this info is not documented and it very much depends on
|
||||
patch producer.
|
||||
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
value = "<PatchInfo: %s>" % self[0].strip()
|
||||
return make_str(value)
|
||||
|
||||
def __str__(self):
|
||||
return ''.join(unicode(line) for line in self)
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class Hunk(list):
|
||||
"""Each of the modified blocks of a file."""
|
||||
|
||||
def __init__(self, src_start=0, src_len=0, tgt_start=0, tgt_len=0,
|
||||
section_header=''):
|
||||
if src_len is None:
|
||||
src_len = 1
|
||||
if tgt_len is None:
|
||||
tgt_len = 1
|
||||
self.added = 0 # number of added lines
|
||||
self.removed = 0 # number of removed lines
|
||||
self.source = []
|
||||
self.source_start = int(src_start)
|
||||
self.source_length = int(src_len)
|
||||
self.target = []
|
||||
self.target_start = int(tgt_start)
|
||||
self.target_length = int(tgt_len)
|
||||
self.section_header = section_header
|
||||
|
||||
def __repr__(self):
|
||||
value = "<Hunk: @@ %d,%d %d,%d @@ %s>" % (self.source_start,
|
||||
self.source_length,
|
||||
self.target_start,
|
||||
self.target_length,
|
||||
self.section_header)
|
||||
return make_str(value)
|
||||
|
||||
def __str__(self):
|
||||
# section header is optional and thus we output it only if it's present
|
||||
head = "@@ -%d,%d +%d,%d @@%s\n" % (
|
||||
self.source_start, self.source_length,
|
||||
self.target_start, self.target_length,
|
||||
' ' + self.section_header if self.section_header else '')
|
||||
content = ''.join(unicode(line) for line in self)
|
||||
return head + content
|
||||
|
||||
def append(self, line):
|
||||
"""Append the line to hunk, and keep track of source/target lines."""
|
||||
super(Hunk, self).append(line)
|
||||
s = str(line)
|
||||
if line.is_added:
|
||||
self.added += 1
|
||||
self.target.append(s)
|
||||
elif line.is_removed:
|
||||
self.removed += 1
|
||||
self.source.append(s)
|
||||
elif line.is_context:
|
||||
self.target.append(s)
|
||||
self.source.append(s)
|
||||
|
||||
def is_valid(self):
|
||||
"""Check hunk header data matches entered lines info."""
|
||||
return (len(self.source) == self.source_length and
|
||||
len(self.target) == self.target_length)
|
||||
|
||||
def source_lines(self):
|
||||
"""Hunk lines from source file (generator)."""
|
||||
return (l for l in self if l.is_context or l.is_removed)
|
||||
|
||||
def target_lines(self):
|
||||
"""Hunk lines from target file (generator)."""
|
||||
return (l for l in self if l.is_context or l.is_added)
|
||||
|
||||
|
||||
class PatchedFile(list):
|
||||
"""Patch updated file, it is a list of Hunks."""
|
||||
|
||||
def __init__(self, patch_info=None, source='', target='',
|
||||
source_timestamp=None, target_timestamp=None):
|
||||
super(PatchedFile, self).__init__()
|
||||
self.patch_info = patch_info
|
||||
self.source_file = source
|
||||
self.source_timestamp = source_timestamp
|
||||
self.target_file = target
|
||||
self.target_timestamp = target_timestamp
|
||||
|
||||
def __repr__(self):
|
||||
return make_str("<PatchedFile: %s>") % make_str(self.path)
|
||||
|
||||
def __str__(self):
|
||||
# patch info is optional
|
||||
info = '' if self.patch_info is None else str(self.patch_info)
|
||||
source = "--- %s%s\n" % (
|
||||
self.source_file,
|
||||
'\t' + self.source_timestamp if self.source_timestamp else '')
|
||||
target = "+++ %s%s\n" % (
|
||||
self.target_file,
|
||||
'\t' + self.target_timestamp if self.target_timestamp else '')
|
||||
hunks = ''.join(unicode(hunk) for hunk in self)
|
||||
return info + source + target + hunks
|
||||
|
||||
def _parse_hunk(self, header, diff, encoding):
|
||||
"""Parse hunk details."""
|
||||
header_info = RE_HUNK_HEADER.match(header)
|
||||
hunk_info = header_info.groups()
|
||||
hunk = Hunk(*hunk_info)
|
||||
|
||||
source_line_no = hunk.source_start
|
||||
target_line_no = hunk.target_start
|
||||
expected_source_end = source_line_no + hunk.source_length
|
||||
expected_target_end = target_line_no + hunk.target_length
|
||||
|
||||
for diff_line_no, line in diff:
|
||||
if encoding is not None:
|
||||
line = line.decode(encoding)
|
||||
|
||||
valid_line = RE_HUNK_EMPTY_BODY_LINE.match(line)
|
||||
if not valid_line:
|
||||
valid_line = RE_HUNK_BODY_LINE.match(line)
|
||||
|
||||
if not valid_line:
|
||||
raise UnidiffParseError('Hunk diff line expected: %s' % line)
|
||||
|
||||
line_type = valid_line.group('line_type')
|
||||
if line_type == LINE_TYPE_EMPTY:
|
||||
line_type = LINE_TYPE_CONTEXT
|
||||
value = valid_line.group('value')
|
||||
original_line = Line(value, line_type=line_type)
|
||||
if line_type == LINE_TYPE_ADDED:
|
||||
original_line.target_line_no = target_line_no
|
||||
target_line_no += 1
|
||||
elif line_type == LINE_TYPE_REMOVED:
|
||||
original_line.source_line_no = source_line_no
|
||||
source_line_no += 1
|
||||
elif line_type == LINE_TYPE_CONTEXT:
|
||||
original_line.target_line_no = target_line_no
|
||||
target_line_no += 1
|
||||
original_line.source_line_no = source_line_no
|
||||
source_line_no += 1
|
||||
elif line_type == LINE_TYPE_NO_NEWLINE:
|
||||
pass
|
||||
else:
|
||||
original_line = None
|
||||
|
||||
# stop parsing if we got past expected number of lines
|
||||
if (source_line_no > expected_source_end or
|
||||
target_line_no > expected_target_end):
|
||||
raise UnidiffParseError('Hunk is longer than expected')
|
||||
|
||||
if original_line:
|
||||
original_line.diff_line_no = diff_line_no
|
||||
hunk.append(original_line)
|
||||
|
||||
# if hunk source/target lengths are ok, hunk is complete
|
||||
if (source_line_no == expected_source_end and
|
||||
target_line_no == expected_target_end):
|
||||
break
|
||||
|
||||
# report an error if we haven't got expected number of lines
|
||||
if (source_line_no < expected_source_end or
|
||||
target_line_no < expected_target_end):
|
||||
raise UnidiffParseError('Hunk is shorter than expected')
|
||||
|
||||
self.append(hunk)
|
||||
|
||||
def _add_no_newline_marker_to_last_hunk(self):
|
||||
if not self:
|
||||
raise UnidiffParseError(
|
||||
'Unexpected marker:' + LINE_VALUE_NO_NEWLINE)
|
||||
last_hunk = self[-1]
|
||||
last_hunk.append(
|
||||
Line(LINE_VALUE_NO_NEWLINE + '\n', line_type=LINE_TYPE_NO_NEWLINE))
|
||||
|
||||
def _append_trailing_empty_line(self):
|
||||
if not self:
|
||||
raise UnidiffParseError('Unexpected trailing newline character')
|
||||
last_hunk = self[-1]
|
||||
last_hunk.append(Line('\n', line_type=LINE_TYPE_EMPTY))
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""Return the file path abstracted from VCS."""
|
||||
if (self.source_file.startswith('a/') and
|
||||
self.target_file.startswith('b/')):
|
||||
filepath = self.source_file[2:]
|
||||
elif (self.source_file.startswith('a/') and
|
||||
self.target_file == '/dev/null'):
|
||||
filepath = self.source_file[2:]
|
||||
elif (self.target_file.startswith('b/') and
|
||||
self.source_file == '/dev/null'):
|
||||
filepath = self.target_file[2:]
|
||||
else:
|
||||
filepath = self.source_file
|
||||
return filepath
|
||||
|
||||
@property
|
||||
def added(self):
|
||||
"""Return the file total added lines."""
|
||||
return sum([hunk.added for hunk in self])
|
||||
|
||||
@property
|
||||
def removed(self):
|
||||
"""Return the file total removed lines."""
|
||||
return sum([hunk.removed for hunk in self])
|
||||
|
||||
@property
|
||||
def is_added_file(self):
|
||||
"""Return True if this patch adds the file."""
|
||||
return (len(self) == 1 and self[0].source_start == 0 and
|
||||
self[0].source_length == 0)
|
||||
|
||||
@property
|
||||
def is_removed_file(self):
|
||||
"""Return True if this patch removes the file."""
|
||||
return (len(self) == 1 and self[0].target_start == 0 and
|
||||
self[0].target_length == 0)
|
||||
|
||||
@property
|
||||
def is_modified_file(self):
|
||||
"""Return True if this patch modifies the file."""
|
||||
return not (self.is_added_file or self.is_removed_file)
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class PatchSet(list):
|
||||
"""A list of PatchedFiles."""
|
||||
|
||||
def __init__(self, f, encoding=None):
|
||||
super(PatchSet, self).__init__()
|
||||
|
||||
# convert string inputs to StringIO objects
|
||||
if isinstance(f, basestring):
|
||||
f = self._convert_string(f, encoding)
|
||||
|
||||
# make sure we pass an iterator object to parse
|
||||
data = iter(f)
|
||||
# if encoding is None, assume we are reading unicode data
|
||||
self._parse(data, encoding=encoding)
|
||||
|
||||
def __repr__(self):
|
||||
return make_str('<PatchSet: %s>') % super(PatchSet, self).__repr__()
|
||||
|
||||
def __str__(self):
|
||||
return ''.join(unicode(patched_file) for patched_file in self)
|
||||
|
||||
def _parse(self, diff, encoding):
|
||||
current_file = None
|
||||
patch_info = None
|
||||
|
||||
diff = enumerate(diff, 1)
|
||||
for unused_diff_line_no, line in diff:
|
||||
if encoding is not None:
|
||||
line = line.decode(encoding)
|
||||
|
||||
# check for source file header
|
||||
is_source_filename = RE_SOURCE_FILENAME.match(line)
|
||||
if is_source_filename:
|
||||
source_file = is_source_filename.group('filename')
|
||||
source_timestamp = is_source_filename.group('timestamp')
|
||||
# reset current file
|
||||
current_file = None
|
||||
continue
|
||||
|
||||
# check for target file header
|
||||
is_target_filename = RE_TARGET_FILENAME.match(line)
|
||||
if is_target_filename:
|
||||
if current_file is not None:
|
||||
raise UnidiffParseError('Target without source: %s' % line)
|
||||
target_file = is_target_filename.group('filename')
|
||||
target_timestamp = is_target_filename.group('timestamp')
|
||||
# add current file to PatchSet
|
||||
current_file = PatchedFile(
|
||||
patch_info, source_file, target_file,
|
||||
source_timestamp, target_timestamp)
|
||||
self.append(current_file)
|
||||
patch_info = None
|
||||
continue
|
||||
|
||||
# check for hunk header
|
||||
is_hunk_header = RE_HUNK_HEADER.match(line)
|
||||
if is_hunk_header:
|
||||
if current_file is None:
|
||||
raise UnidiffParseError('Unexpected hunk found: %s' % line)
|
||||
current_file._parse_hunk(line, diff, encoding)
|
||||
continue
|
||||
|
||||
# check for no newline marker
|
||||
is_no_newline = RE_NO_NEWLINE_MARKER.match(line)
|
||||
if is_no_newline:
|
||||
if current_file is None:
|
||||
raise UnidiffParseError('Unexpected marker: %s' % line)
|
||||
current_file._add_no_newline_marker_to_last_hunk()
|
||||
continue
|
||||
|
||||
# sometimes hunks can be followed by empty lines
|
||||
if line == '\n' and current_file is not None:
|
||||
current_file._append_trailing_empty_line()
|
||||
continue
|
||||
|
||||
# if nothing has matched above then this line is a patch info
|
||||
if patch_info is None:
|
||||
current_file = None
|
||||
patch_info = PatchInfo()
|
||||
patch_info.append(line)
|
||||
|
||||
@classmethod
|
||||
def from_filename(cls, filename, encoding=DEFAULT_ENCODING, errors=None):
|
||||
"""Return a PatchSet instance given a diff filename."""
|
||||
with open_file(filename, 'r', encoding=encoding, errors=errors) as f:
|
||||
instance = cls(f)
|
||||
return instance
|
||||
|
||||
@staticmethod
|
||||
def _convert_string(data, encoding=None, errors='strict'):
|
||||
if encoding is not None:
|
||||
# if encoding is given, assume bytes and decode
|
||||
data = unicode(data, encoding=encoding, errors=errors)
|
||||
return StringIO(data)
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, data, encoding=None, errors='strict'):
|
||||
"""Return a PatchSet instance given a diff string."""
|
||||
return cls(cls._convert_string(data, encoding, errors))
|
||||
|
||||
@property
|
||||
def added_files(self):
|
||||
"""Return patch added files as a list."""
|
||||
return [f for f in self if f.is_added_file]
|
||||
|
||||
@property
|
||||
def removed_files(self):
|
||||
"""Return patch removed files as a list."""
|
||||
return [f for f in self if f.is_removed_file]
|
||||
|
||||
@property
|
||||
def modified_files(self):
|
||||
"""Return patch modified files as a list."""
|
||||
return [f for f in self if f.is_modified_file]
|
||||
|
||||
@property
|
||||
def added(self):
|
||||
"""Return the patch total added lines."""
|
||||
return sum([f.added for f in self])
|
||||
|
||||
@property
|
||||
def removed(self):
|
||||
"""Return the patch total removed lines."""
|
||||
return sum([f.removed for f in self])
|
||||
16
buildkit/cli.py
Normal file
16
buildkit/cli.py
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
# Copyright (c) 2017 The ungoogled-chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""CLI implementation"""
|
||||
|
||||
import pathlib
|
||||
|
||||
from . import common
|
||||
|
||||
def main():
|
||||
"""CLI entry point"""
|
||||
pass
|
||||
378
buildkit/common.py
Normal file
378
buildkit/common.py
Normal file
@@ -0,0 +1,378 @@
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
# Copyright (c) 2017 The ungoogled-chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Common code"""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import abc
|
||||
import configparser
|
||||
import collections
|
||||
import logging
|
||||
import itertools
|
||||
|
||||
# Constants
|
||||
|
||||
CONFIGS_DIR = "configs"
|
||||
PACKAGING_DIR = "packaging"
|
||||
PATCHES_DIR = "patches"
|
||||
|
||||
CLEANING_LIST = "cleaning_list"
|
||||
DOMAIN_REGEX_LIST = "domain_regex_list"
|
||||
DOMAIN_SUBSTITUTION_LIST = "domain_substitution_list"
|
||||
EXTRA_DEPS_INI = "extra_deps.ini"
|
||||
GN_FLAGS = "gn_flags"
|
||||
METADATA_INI = "metadata.ini"
|
||||
PATCH_ORDER = "patch_order"
|
||||
VERSION_INI = "version.ini"
|
||||
|
||||
_ENV_FORMAT = "UTILIKIT_{}"
|
||||
|
||||
# Module-wide methods
|
||||
|
||||
def get_logger(name=__package__, level=logging.DEBUG):
|
||||
'''Gets the named logger'''
|
||||
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(level)
|
||||
|
||||
if not logger.hasHandlers():
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(level)
|
||||
|
||||
formatter = logging.Formatter("%(asctime)s - %(levelname)s: %(message)s")
|
||||
console_handler.setFormatter(formatter)
|
||||
|
||||
logger.addHandler(console_handler)
|
||||
if name is None:
|
||||
logger.info("Initialized root logger")
|
||||
else:
|
||||
logger.info("Initialized logger '%s'", name)
|
||||
return logger
|
||||
|
||||
def get_resources_dir():
|
||||
"""
|
||||
Returns the path to the root of the resources directory
|
||||
|
||||
Raises NotADirectoryError if the directory is not found.
|
||||
"""
|
||||
env_value = os.environ.get(_ENV_FORMAT.format("RESOURCES"))
|
||||
if env_value:
|
||||
path = pathlib.Path(env_value)
|
||||
else:
|
||||
# Assume that this resides in the repository
|
||||
path = pathlib.Path(__file__).absolute().parent.parent / "resources"
|
||||
if not path.is_dir():
|
||||
raise NotADirectoryError(str(path))
|
||||
return path
|
||||
|
||||
# Classes
|
||||
|
||||
class _ConfigABC(abc.ABC):
|
||||
"""Abstract base class for assemblable configuration files or directories"""
|
||||
|
||||
def __init__(self, path, name=None):
|
||||
self.path = path
|
||||
if name:
|
||||
self.name = name
|
||||
else:
|
||||
self.name = path.name
|
||||
# List of paths to inherit from ordered by decreasing distance from left to right
|
||||
self._family_order = collections.deque()
|
||||
self._family_order.appendleft(path)
|
||||
|
||||
def add_older_ancestor(self, path):
|
||||
"""
|
||||
Associates a config as the oldest known ancestor if it is not already known.
|
||||
|
||||
Returns True if the ancestor was added,
|
||||
False if the ancestor is already known.
|
||||
|
||||
Raises FileNotFoundError if path does not exist
|
||||
"""
|
||||
if path in self._family_order:
|
||||
return False
|
||||
if not path.exists():
|
||||
get_logger().error('Unable to add ancestor for "%s"', self.name)
|
||||
raise FileNotFoundError(str(path))
|
||||
self._family_order.appendleft(path)
|
||||
return True
|
||||
|
||||
@abc.abstractmethod
|
||||
def _parse(self):
|
||||
"""Reads and returns the parsed consolidated config"""
|
||||
pass
|
||||
|
||||
def _get_config(self):
|
||||
"""Returns the parsed consolidated config"""
|
||||
parsed = self._parse()
|
||||
if parsed is None:
|
||||
# Assuming no parser intentionally returns None
|
||||
get_logger().error('Got None from parser of "%s"', self.name)
|
||||
raise TypeError('Got None from parser')
|
||||
return parsed
|
||||
|
||||
@abc.abstractmethod
|
||||
def write(self, path):
|
||||
"""Writes the consolidated config to path"""
|
||||
pass
|
||||
|
||||
class _CacheConfigMixin: #pylint: disable=too-few-public-methods
|
||||
"""Mixin for _ConfigABC to cache parse output"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._read_cache = None
|
||||
|
||||
def _get_config(self):
|
||||
"""Returns the cached consolidated config"""
|
||||
if self._read_cache:
|
||||
return self._read_cache
|
||||
self._read_cache = self._parse()
|
||||
if self._read_cache is None:
|
||||
# Assuming no parser intentionally returns None
|
||||
get_logger().error('Got None from parser of "%s"', self.name)
|
||||
raise TypeError('Got None from parser')
|
||||
return self._read_cache
|
||||
|
||||
class IniConfigFile(_CacheConfigMixin, _ConfigABC):
|
||||
"""Represents an INI file"""
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
Returns a section from the INI
|
||||
|
||||
Raises KeyError if the section does not exist
|
||||
"""
|
||||
return self._get_config()[key]
|
||||
|
||||
def __contains__(self, item):
|
||||
"""
|
||||
Returns True if item is a name of a section; False otherwise.
|
||||
"""
|
||||
return item in self._get_config()
|
||||
|
||||
def __iter__(self):
|
||||
"""Returns an iterator over the section names"""
|
||||
return filter(lambda x: x != 'DEFAULT', iter(self._get_config()))
|
||||
|
||||
def _parse(self):
|
||||
"""Returns a parsed INI file"""
|
||||
parsed_ini = None
|
||||
for ini_path in self._family_order:
|
||||
config = configparser.ConfigParser()
|
||||
config.read(str(ini_path))
|
||||
if not parsed_ini:
|
||||
parsed_ini = config
|
||||
continue
|
||||
parsed_ini.update(config)
|
||||
return parsed_ini
|
||||
|
||||
def write(self, path):
|
||||
config = configparser.ConfigParser()
|
||||
for section in self:
|
||||
config.add_section(section)
|
||||
for option, value in self[section].items():
|
||||
config.set(section, option, value)
|
||||
with path.open("w") as output_file:
|
||||
config.write(output_file)
|
||||
|
||||
class ListConfigFile(_ConfigABC):
|
||||
"""Represents a simple newline-delimited list"""
|
||||
def __contains__(self, item):
|
||||
"""Returns True if item is in the list; False otherwise"""
|
||||
return item in self._get_config()
|
||||
|
||||
def _line_generator(self):
|
||||
for list_path in self._family_order:
|
||||
with list_path.open() as list_file:
|
||||
line_iter = list_file.read().splitlines()
|
||||
yield from filter(len, line_iter)
|
||||
|
||||
def __iter__(self):
|
||||
"""Returns an iterator over the list items"""
|
||||
return iter(self._get_config())
|
||||
|
||||
def _parse(self):
|
||||
"""Returns a file object of the item's values"""
|
||||
return self._line_generator()
|
||||
|
||||
def write(self, path):
|
||||
with path.open('w') as output_file:
|
||||
output_file.writelines(map(lambda x: '%s\n' % x, self._get_config()))
|
||||
|
||||
class MappingConfigFile(_CacheConfigMixin, _ConfigABC):
|
||||
"""Represents a simple string-keyed and string-valued dictionary"""
|
||||
def __contains__(self, item):
|
||||
"""Returns True if item is a key in the mapping; False otherwise"""
|
||||
return item in self._get_config()
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
Returns the value associated with the key
|
||||
|
||||
Raises KeyError if the key is not in the mapping
|
||||
"""
|
||||
return self._get_config()[key]
|
||||
|
||||
def __iter__(self):
|
||||
"""Returns an iterator over the keys"""
|
||||
return iter(self._get_config())
|
||||
|
||||
def _parse(self):
|
||||
"""Return a dictionary of the mapping of keys and values"""
|
||||
new_dict = dict()
|
||||
for mapping_path in self._family_order:
|
||||
with mapping_path.open() as mapping_file:
|
||||
for line in filter(len, mapping_file.read().splitlines()):
|
||||
key, value = line.split('=')
|
||||
new_dict[key] = value
|
||||
return new_dict
|
||||
|
||||
def write(self, path):
|
||||
with path.open('w') as output_file:
|
||||
for item in self._get_config().items():
|
||||
output_file.write('%s=%s\n' % item)
|
||||
|
||||
class ConfigSet(_CacheConfigMixin, _ConfigABC):
|
||||
"""Represents a configuration type"""
|
||||
|
||||
@classmethod
|
||||
def new_from_resources(cls, name):
|
||||
"""
|
||||
Return a new ConfigDirectory from a configuration directory in resources/configs
|
||||
|
||||
Raises NotADirectoryError if resources/ could not be found.
|
||||
"""
|
||||
configs_dir = get_resources_dir() / CONFIGS_DIR
|
||||
new_config_dir = cls(configs_dir / name)
|
||||
pending_explore = collections.deque()
|
||||
pending_explore.appendleft(name)
|
||||
while pending_explore:
|
||||
config_name = pending_explore.pop()
|
||||
metadata = MetadataIni(configs_dir / config_name / METADATA_INI)
|
||||
for parent_name in metadata.parents:
|
||||
if new_config_dir.add_older_ancestor(configs_dir / parent_name):
|
||||
pending_explore.appendleft(parent_name)
|
||||
return new_config_dir
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
Returns the config file object for the given configuration file name
|
||||
|
||||
Raises KeyError if the file is not found.
|
||||
Raises ValueError if the configuration directory is malformed.
|
||||
"""
|
||||
return self._get_config()[key]
|
||||
|
||||
def __contains__(self, item):
|
||||
"""
|
||||
Checks if a configuration file name exists
|
||||
|
||||
Raises ValueError if the configuration directory is malformed.
|
||||
"""
|
||||
return item in self._get_config()
|
||||
|
||||
def _parse(self):
|
||||
"""
|
||||
Returns a dictionary of file names to their representing objects
|
||||
|
||||
Raises ValueError if a configuration directory contains unknown files.
|
||||
"""
|
||||
file_dict = dict()
|
||||
for directory in self._family_order:
|
||||
for config_path in directory.iterdir():
|
||||
if config_path.name in file_dict:
|
||||
file_dict[config_path.name].add_older_ancestor(config_path)
|
||||
else:
|
||||
try:
|
||||
config_class = _FILE_DEF[config_path.name]
|
||||
except KeyError:
|
||||
logger = get_logger()
|
||||
logger.error('Unknown file type at "%s"', config_path)
|
||||
logger.error('Config directory "%s" has unknown files', directory.name)
|
||||
raise ValueError(
|
||||
'Unknown files in configuration directory: {}'.format(directory))
|
||||
if config_class:
|
||||
file_dict[config_path.name] = config_class(config_path)
|
||||
return file_dict
|
||||
|
||||
def write(self, path):
|
||||
"""
|
||||
Writes the consolidated configuration directory to the specified path.
|
||||
|
||||
Raises FileExistsError if the directory already exists.
|
||||
Raises ValueError if the configuration is malformed.
|
||||
"""
|
||||
path.mkdir()
|
||||
for config_file in self._get_config().values():
|
||||
config_file.write(path / config_file.name)
|
||||
|
||||
class MetadataIni(IniConfigFile):
|
||||
"""Represents metadata.ini files"""
|
||||
|
||||
@property
|
||||
def parents(self):
|
||||
"""
|
||||
Returns an iterable of the parents defined in the metadata.
|
||||
Parents are ordered in increasing precedence.
|
||||
"""
|
||||
if 'parents' in self['config']:
|
||||
try:
|
||||
return [x.strip() for x in self['config']['parents'].split(',')]
|
||||
except KeyError as exc:
|
||||
logger = get_logger()
|
||||
logger.error('Malformed configuration metadata file: %s', self.path)
|
||||
raise exc
|
||||
else:
|
||||
return tuple()
|
||||
|
||||
class DomainRegexList(ListConfigFile):
|
||||
"""Representation of a domain_regex_list file"""
|
||||
# TODO
|
||||
|
||||
class ExtraDepsIni(IniConfigFile):
|
||||
"""Representation of an extra_deps.ini file"""
|
||||
|
||||
_VERSION = 'version'
|
||||
_extra_deps_properties = (_VERSION, 'url', 'download_name', 'strip_leading_dirs')
|
||||
_extra_deps_tuple = collections.namedtuple(
|
||||
'ExtraDepsProperties', _extra_deps_properties)
|
||||
|
||||
@staticmethod
|
||||
def _process_key(key, section_dict, version):
|
||||
try:
|
||||
return section_dict[key].format(version=version)
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def _parse(self):
|
||||
parsed = super()._parse()
|
||||
for section in parsed:
|
||||
for key in parsed[section]:
|
||||
if key not in self._extra_deps_properties:
|
||||
get_logger().error('Malformed extra_deps.ini file at: %s', self.path)
|
||||
raise NameError('Unknown key "{}" in section "{}"'.format(key, section))
|
||||
return parsed
|
||||
|
||||
def __getitem__(self, section):
|
||||
"""Returns a named tuple with values already pre-processed"""
|
||||
config = self._get_config()
|
||||
return self._extra_deps_tuple(*map(
|
||||
self._process_key,
|
||||
self._extra_deps_properties,
|
||||
itertools.repeat(config[section]),
|
||||
itertools.repeat(config[section][self._VERSION])))
|
||||
|
||||
_FILE_DEF = {
|
||||
METADATA_INI: None, # This file has special handling, so ignore it
|
||||
CLEANING_LIST: ListConfigFile,
|
||||
DOMAIN_REGEX_LIST: DomainRegexList,
|
||||
DOMAIN_SUBSTITUTION_LIST: ListConfigFile,
|
||||
EXTRA_DEPS_INI: ExtraDepsIni,
|
||||
GN_FLAGS: MappingConfigFile,
|
||||
PATCH_ORDER: ListConfigFile,
|
||||
}
|
||||
11
buildkit/config_generation.py
Normal file
11
buildkit/config_generation.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
# Copyright (c) 2017 The ungoogled-chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Build configuration generation implementation
|
||||
"""
|
||||
|
||||
# TODO
|
||||
@@ -8,6 +8,5 @@ if __name__ == "__main__":
|
||||
from pylint import epylint as lint
|
||||
import pathlib
|
||||
|
||||
lint.lint(filename=str(pathlib.Path(__file__).parent.parent / "utilikit"),
|
||||
options=["--disable=logging-format-interpolation",
|
||||
"--disable=locally-disabled"])
|
||||
lint.lint(filename=str(pathlib.Path(__file__).parent.parent / "buildkit"),
|
||||
options=["--disable=locally-disabled"])
|
||||
|
||||
Reference in New Issue
Block a user