Просмотр исходного кода

Draft for new cmake generator
f6ec7e0899aeffe4c6164718e3f54e93f18f4bd1

dimdim11 10 месяцев назад
Родитель
Сommit
008968f9d8

+ 200 - 0
build/export_generators/cmake/build/scripts/create_recursive_library_for_cmake.py

@@ -0,0 +1,200 @@
+# Custom script is necessary because CMake does not yet support creating static libraries combined with dependencies
+# https://gitlab.kitware.com/cmake/cmake/-/issues/22975
+#
+# This script is intended to be used set as a CXX_LINKER_LAUNCHER property for recursive library targets.
+# It parses the linking command and transforms it to archiving commands combining static libraries from dependencies.
+
+import argparse
+import os
+import re
+import shlex
+import subprocess
+import sys
+import tempfile
+
+
+class Opts(object):
+    def __init__(self, args):
+        argparser = argparse.ArgumentParser(allow_abbrev=False)
+        argparser.add_argument('--project-binary-dir', required=True)
+        argparser.add_argument('--cmake-ar', required=True)
+        argparser.add_argument('--cmake-ranlib', required=True)
+        argparser.add_argument('--cmake-host-system-name', required=True)
+        argparser.add_argument('--cmake-cxx-standard-libraries')
+        argparser.add_argument('--global-part-suffix', required=True)
+        self.parsed_args, other_args = argparser.parse_known_args(args=args)
+
+        if len(other_args) < 2:
+            # must contain at least '--linking-cmdline' and orginal linking tool name
+            raise Exception('not enough arguments')
+        if other_args[0] != '--linking-cmdline':
+            raise Exception("expected '--linking-cmdline' arg, got {}".format(other_args[0]))
+
+        self.is_msvc_compatible_linker = other_args[1].endswith('\\link.exe') or other_args[1].endswith('\\lld-link.exe')
+
+        is_host_system_windows = self.parsed_args.cmake_host_system_name == 'Windows'
+        std_libraries_to_exclude_from_input = (
+            set(self.parsed_args.cmake_cxx_standard_libraries.split())
+            if self.parsed_args.cmake_cxx_standard_libraries is not None
+            else set()
+        )
+        msvc_preserved_option_prefixes = [
+            'machine:',
+            'nodefaultlib',
+            'nologo',
+        ]
+
+        self.preserved_options = []
+
+        # these variables can contain paths absolute or relative to project_binary_dir
+        self.global_libs_and_objects_input = []
+        self.non_global_libs_input = []
+        self.output = None
+
+        def is_external_library(path):
+            """
+            Check whether this library has been built in this CMake project or came from Conan-provided dependencies
+            (these use absolute paths).
+            If it is a library that is added from some other path (like CUDA) return True
+            """
+            return not (os.path.exists(path) or os.path.exists(os.path.join(self.parsed_args.project_binary_dir, path)))
+
+        def process_input(args):
+            i = 0
+            is_in_whole_archive = False
+
+            while i < len(args):
+                arg = args[i]
+                if is_host_system_windows and ((arg[0] == '/') or (arg[0] == '-')):
+                    arg_wo_specifier_lower = arg[1:].lower()
+                    if arg_wo_specifier_lower.startswith('out:'):
+                        self.output = arg[len('/out:') :]
+                    elif arg_wo_specifier_lower.startswith('wholearchive:'):
+                        lib_path = arg[len('/wholearchive:') :]
+                        if not is_external_library(lib_path):
+                            self.global_libs_and_objects_input.append(lib_path)
+                    else:
+                        for preserved_option_prefix in msvc_preserved_option_prefixes:
+                            if arg_wo_specifier_lower.startswith(preserved_option_prefix):
+                                self.preserved_options.append(arg)
+                                break
+                    # other flags are non-linking related and just ignored
+                elif arg[0] == '-':
+                    if arg == '-o':
+                        if (i + 1) >= len(args):
+                            raise Exception('-o flag without an argument')
+                        self.output = args[i + 1]
+                        i += 1
+                    elif arg == '-Wl,--whole-archive':
+                        is_in_whole_archive = True
+                    elif arg == '-Wl,--no-whole-archive':
+                        is_in_whole_archive = False
+                    elif arg.startswith('-Wl,-force_load,'):
+                        lib_path = arg[len('-Wl,-force_load,') :]
+                        if not is_external_library(lib_path):
+                            self.global_libs_and_objects_input.append(lib_path)
+                    elif arg == '-isysroot':
+                        i += 1
+                    # other flags are non-linking related and just ignored
+                elif arg[0] == '@':
+                    # response file with args
+                    with open(arg[1:]) as response_file:
+                        parsed_args = shlex.shlex(response_file, posix=False, punctuation_chars=False)
+                        parsed_args.whitespace_split = True
+                        args_in_response_file = list(arg.strip('"') for arg in parsed_args)
+                        process_input(args_in_response_file)
+                elif not is_external_library(arg):
+                    if is_in_whole_archive or arg.endswith('.o') or arg.endswith('.obj'):
+                        self.global_libs_and_objects_input.append(arg)
+                    elif arg not in std_libraries_to_exclude_from_input:
+                        self.non_global_libs_input.append(arg)
+                i += 1
+
+        process_input(other_args[2:])
+
+        if self.output is None:
+            raise Exception("No output specified")
+
+        if (len(self.global_libs_and_objects_input) == 0) and (len(self.non_global_libs_input) == 0):
+            raise Exception("List of input objects and libraries is empty")
+
+
+class FilesCombiner(object):
+    def __init__(self, opts):
+        self.opts = opts
+
+        archiver_tool_path = opts.parsed_args.cmake_ar
+        if sys.platform.startswith('darwin'):
+            # force LIBTOOL even if CMAKE_AR is defined because 'ar' under Darwin does not contain the necessary options
+            arch_type = 'LIBTOOL'
+            archiver_tool_path = 'libtool'
+        elif opts.is_msvc_compatible_linker:
+            arch_type = 'LIB'
+        elif re.match(r'^(|.*/)llvm\-ar(\-[\d])?', opts.parsed_args.cmake_ar):
+            arch_type = 'LLVM_AR'
+        elif re.match(r'^(|.*/)(gcc\-)?ar(\-[\d])?', opts.parsed_args.cmake_ar):
+            arch_type = 'GNU_AR'
+        else:
+            raise Exception('Unsupported arch type for CMAKE_AR={}'.format(opts.parsed_args.cmake_ar))
+
+        self.archiving_cmd_prefix = [
+            sys.executable,
+            os.path.join(os.path.dirname(os.path.abspath(__file__)), 'link_lib.py'),
+            archiver_tool_path,
+            arch_type,
+            'gnu',  # llvm_ar_format, used only if arch_type == 'LLVM_AR'
+            opts.parsed_args.project_binary_dir,
+            'None',  # plugin. Unused for now
+        ]
+        # the remaining archiving cmd args are [output, .. input .. ]
+
+    def do(self, output, input_list):
+        input_file_path = None
+        try:
+            if self.opts.is_msvc_compatible_linker:
+                # use response file for input (because of Windows cmdline length limitations)
+
+                # can't use NamedTemporaryFile because of permissions issues on Windows
+                input_file_fd, input_file_path = tempfile.mkstemp()
+                try:
+                    input_file = os.fdopen(input_file_fd, 'w')
+                    for input in input_list:
+                        if ' ' in input:
+                            input_file.write('"{}" '.format(input))
+                        else:
+                            input_file.write('{} '.format(input))
+                    input_file.flush()
+                finally:
+                    os.close(input_file_fd)
+                input_args = ['@' + input_file_path]
+            else:
+                input_args = input_list
+
+            cmd = self.archiving_cmd_prefix + [output] + self.opts.preserved_options + input_args
+            subprocess.check_call(cmd)
+        finally:
+            if input_file_path is not None:
+                os.remove(input_file_path)
+
+        if not self.opts.is_msvc_compatible_linker:
+            subprocess.check_call([self.opts.parsed_args.cmake_ranlib, output])
+
+
+if __name__ == "__main__":
+    opts = Opts(sys.argv[1:])
+
+    output_prefix, output_ext = os.path.splitext(opts.output)
+    globals_output = output_prefix + opts.parsed_args.global_part_suffix + output_ext
+
+    if os.path.exists(globals_output):
+        os.remove(globals_output)
+    if os.path.exists(opts.output):
+        os.remove(opts.output)
+
+    files_combiner = FilesCombiner(opts)
+
+    if len(opts.global_libs_and_objects_input) > 0:
+        files_combiner.do(globals_output, opts.global_libs_and_objects_input)
+
+    if len(opts.non_global_libs_input) > 0:
+        files_combiner.do(opts.output, opts.non_global_libs_input)

+ 132 - 0
build/export_generators/cmake/build/scripts/export_script_gen.py

@@ -0,0 +1,132 @@
+import argparse
+import collections
+import sys
+
+
+def parse_export_file(src):
+    for line in src:
+        line = line.strip()
+
+        if line and '#' not in line:
+            words = line.split()
+            if len(words) == 2 and words[0] == 'linux_version':
+                yield {'linux_version': words[1]}
+            elif len(words) == 2:
+                yield {'lang': words[0], 'sym': words[1]}
+            elif len(words) == 1:
+                yield {'lang': 'C', 'sym': words[0]}
+            else:
+                raise Exception('unsupported exports line: "{}"'.format(line))
+
+
+def to_c(sym):
+    symbols = collections.deque(sym.split('::'))
+    c_prefixes = [  # demangle prefixes for c++ symbols
+        '_ZN',  # namespace
+        '_ZTIN',  # typeinfo for
+        '_ZTSN',  # typeinfo name for
+        '_ZTTN',  # VTT for
+        '_ZTVN',  # vtable for
+        '_ZNK',  # const methods
+    ]
+    c_sym = ''
+    while symbols:
+        s = symbols.popleft()
+        if s == '*':
+            c_sym += '*'
+            break
+        if '*' in s and len(s) > 1:
+            raise Exception('Unsupported format, cannot guess length of symbol: ' + s)
+        c_sym += str(len(s)) + s
+    if symbols:
+        raise Exception('Unsupported format: ' + sym)
+    if c_sym[-1] != '*':
+        c_sym += 'E*'
+    return ['{prefix}{sym}'.format(prefix=prefix, sym=c_sym) for prefix in c_prefixes]
+
+
+def to_gnu(src, dest):
+    d = collections.defaultdict(list)
+    version = None
+    for item in parse_export_file(src):
+        if item.get('linux_version'):
+            if not version:
+                version = item.get('linux_version')
+            else:
+                raise Exception('More than one linux_version defined')
+        elif item['lang'] == 'C++':
+            d['C'].extend(to_c(item['sym']))
+        else:
+            d[item['lang']].append(item['sym'])
+
+    if version:
+        dest.write('{} {{\nglobal:\n'.format(version))
+    else:
+        dest.write('{\nglobal:\n')
+
+    for k, v in d.items():
+        dest.write('    extern "' + k + '" {\n')
+
+        for x in v:
+            dest.write('        ' + x + ';\n')
+
+        dest.write('    };\n')
+
+    dest.write('local: *;\n};\n')
+
+
+def to_msvc(src, dest):
+    dest.write('EXPORTS\n')
+    for item in parse_export_file(src):
+        if item.get('linux_version'):
+            continue
+        if item.get('lang') == 'C':
+            dest.write('    {}\n'.format(item.get('sym')))
+
+
+def to_darwin(src, dest):
+    pre = ''
+    for item in parse_export_file(src):
+        if item.get('linux_version'):
+            continue
+
+        if item['lang'] == 'C':
+            dest.write(pre + '-Wl,-exported_symbol,_' + item['sym'])
+        elif item['lang'] == 'C++':
+            for sym in to_c(item['sym']):
+                dest.write(pre + '-Wl,-exported_symbol,_' + sym)
+        else:
+            raise Exception('unsupported lang: ' + item['lang'])
+        if pre == '':
+            pre = ' '
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description='Convert self-invented platform independent export file format to the format required by specific linker'
+    )
+    parser.add_argument(
+        'src', type=argparse.FileType('r', encoding='UTF-8'), help='platform independent export file path'
+    )
+    parser.add_argument(
+        'dest', type=argparse.FileType('w', encoding='UTF-8'), help='destination export file for required linker'
+    )
+    parser.add_argument('--format', help='destination file type format: gnu, msvc or darwin')
+
+    args = parser.parse_args()
+    if args.format == 'gnu':
+        to_gnu(args.src, args.dest)
+    elif args.format == 'msvc':
+        to_msvc(args.src, args.dest)
+    elif args.format == 'darwin':
+        to_darwin(args.src, args.dest)
+    else:
+        print('Unknown destination file format: {}'.format(args.format), file=sys.stderr)
+        sys.exit(1)
+
+    args.src.close()
+    args.dest.close()
+
+
+if __name__ == '__main__':
+    main()

+ 3 - 0
build/export_generators/cmake/build/scripts/gather_swig_java.cmake

@@ -0,0 +1,3 @@
+file(GLOB JAVA_FILES ${JAVA_SRC_DIR}/*.java)
+list(JOIN JAVA_FILES "\n" JAVA_LST_CONTENT)
+file(WRITE ${JAVA_LST} ${JAVA_LST_CONTENT})

+ 319 - 0
build/export_generators/cmake/build/scripts/generate_vcs_info.py

@@ -0,0 +1,319 @@
+# coding: utf-8
+import json
+import locale
+import re
+import os
+import subprocess
+import sys
+import time
+
+
+INDENT = " " * 4
+
+
+def _get_vcs_dictionary(vcs_type, *arg):
+    if vcs_type == 'git':
+        return _GitVersion.parse(*arg)
+    else:
+        raise Exception("Unknown VCS type {}".format(str(vcs_type)))
+
+
+def _get_user_locale():
+    try:
+        return [locale.getencoding()]
+    except Exception:
+        return []
+
+
+class _GitVersion:
+    @classmethod
+    def parse(cls, commit_hash, author_info, summary_info, body_info, tag_info, branch_info, depth=None):
+        r"""Parses output of
+        git rev-parse HEAD
+        git log -1 --format='format:%an <%ae>'
+        git log -1 --format='format:%s'
+        git log -1 --grep='^git-svn-id: ' --format='format:%b' or
+        git log -1 --grep='^Revision: r?\d*' --format='format:%b
+        git describe --exact-match --tags HEAD
+        git describe --exact-match --all HEAD
+        and depth as computed by _get_git_depth
+        '"""
+
+        info = {}
+        info['hash'] = commit_hash
+        info['commit_author'] = _SystemInfo._to_text(author_info)
+        info['summary'] = _SystemInfo._to_text(summary_info)
+
+        if 'svn_commit_revision' not in info:
+            url = re.search("git?-svn?-id: (.*)@(\\d*).*", body_info)
+            if url:
+                info['svn_url'] = url.group(1)
+                info['svn_commit_revision'] = int(url.group(2))
+
+        if 'svn_commit_revision' not in info:
+            rev = re.search('Revision: r?(\\d*).*', body_info)
+            if rev:
+                info['svn_commit_revision'] = int(rev.group(1))
+
+        info['tag'] = tag_info
+        info['branch'] = branch_info
+        info['scm_text'] = cls._format_scm_data(info)
+        info['vcs'] = 'git'
+
+        if depth:
+            info['patch_number'] = int(depth)
+        return info
+
+    @staticmethod
+    def _format_scm_data(info):
+        scm_data = "Git info:\n"
+        scm_data += INDENT + "Commit: " + info['hash'] + "\n"
+        scm_data += INDENT + "Branch: " + info['branch'] + "\n"
+        scm_data += INDENT + "Author: " + info['commit_author'] + "\n"
+        scm_data += INDENT + "Summary: " + info['summary'] + "\n"
+        if 'svn_commit_revision' in info or 'svn_url' in info:
+            scm_data += INDENT + "git-svn info:\n"
+        if 'svn_url' in info:
+            scm_data += INDENT + "URL: " + info['svn_url'] + "\n"
+        if 'svn_commit_revision' in info:
+            scm_data += INDENT + "Last Changed Rev: " + str(info['svn_commit_revision']) + "\n"
+        return scm_data
+
+    @staticmethod
+    def external_data(arc_root):
+        env = os.environ.copy()
+        env['TZ'] = ''
+
+        hash_args = ['rev-parse', 'HEAD']
+        author_args = ['log', '-1', '--format=format:%an <%ae>']
+        summary_args = ['log', '-1', '--format=format:%s']
+        svn_args = ['log', '-1', '--grep=^git-svn-id: ', '--format=format:%b']
+        svn_args_alt = ['log', '-1', '--grep=^Revision: r\\?\\d*', '--format=format:%b']
+        tag_args = ['describe', '--exact-match', '--tags', 'HEAD']
+        branch_args = ['describe', '--exact-match', '--all', 'HEAD']
+
+        # using local 'Popen' wrapper
+        commit = _SystemInfo._system_command_call(['git'] + hash_args, env=env, cwd=arc_root).rstrip()
+        author = _SystemInfo._system_command_call(['git'] + author_args, env=env, cwd=arc_root)
+        commit = _SystemInfo._system_command_call(['git'] + hash_args, env=env, cwd=arc_root).rstrip()
+        author = _SystemInfo._system_command_call(['git'] + author_args, env=env, cwd=arc_root)
+        summary = _SystemInfo._system_command_call(['git'] + summary_args, env=env, cwd=arc_root)
+        svn_id = _SystemInfo._system_command_call(['git'] + svn_args, env=env, cwd=arc_root)
+        if not svn_id:
+            svn_id = _SystemInfo._system_command_call(['git'] + svn_args_alt, env=env, cwd=arc_root)
+
+        try:
+            tag_info = _SystemInfo._system_command_call(['git'] + tag_args, env=env, cwd=arc_root).splitlines()
+        except Exception:
+            tag_info = [''.encode('utf-8')]
+
+        try:
+            branch_info = _SystemInfo._system_command_call(['git'] + branch_args, env=env, cwd=arc_root).splitlines()
+        except Exception:
+            branch_info = [''.encode('utf-8')]
+
+        depth = str(_GitVersion._get_git_depth(env, arc_root)).encode('utf-8')
+
+        # logger.debug('Git info commit:{}, author:{}, summary:{}, svn_id:{}'.format(commit, author, summary, svn_id))
+        return [commit, author, summary, svn_id, tag_info[0], branch_info[0], depth]
+
+    # YT's patch number.
+    @staticmethod
+    def _get_git_depth(env, arc_root):
+        graph = {}
+        full_history_args = ["log", "--full-history", "--format=%H %P", "HEAD"]
+        history = _SystemInfo._system_command_call(['git'] + full_history_args, env=env, cwd=arc_root).decode('utf-8')
+
+        head = None
+        for line in history.splitlines():
+            values = line.split()
+            if values:
+                if head is None:
+                    head = values[0]
+                graph[values[0]] = values[1:]
+
+        assert head
+        cache = {}
+        stack = [(head, None, False)]
+        while stack:
+            commit, child, calculated = stack.pop()
+            if commit in cache:
+                calculated = True
+            if calculated:
+                if child is not None:
+                    cache[child] = max(cache.get(child, 0), cache[commit] + 1)
+            else:
+                stack.append((commit, child, True))
+                parents = graph[commit]
+                if not parents:
+                    cache[commit] = 0
+                else:
+                    for parent in parents:
+                        stack.append((parent, commit, False))
+        return cache[head]
+
+
+class _SystemInfo:
+    LOCALE_LIST = _get_user_locale() + [sys.getfilesystemencoding(), 'utf-8']
+
+    @classmethod
+    def get_locale(cls):
+        import codecs
+
+        for i in cls.LOCALE_LIST:
+            if not i:
+                continue
+            try:
+                codecs.lookup(i)
+                return i
+            except LookupError:
+                continue
+
+    @staticmethod
+    def _to_text(s):
+        if isinstance(s, bytes):
+            return s.decode(_SystemInfo.get_locale(), errors='replace')
+        return s
+
+    @staticmethod
+    def get_user():
+        sys_user = os.environ.get("USER")
+        if not sys_user:
+            sys_user = os.environ.get("USERNAME")
+        if not sys_user:
+            sys_user = os.environ.get("LOGNAME")
+        if not sys_user:
+            sys_user = "Unknown user"
+        return sys_user
+
+    @staticmethod
+    def get_date(stamp=None):
+        # Format compatible with SVN-xml format.
+        return time.strftime("%Y-%m-%dT%H:%M:%S.000000Z", time.gmtime(stamp))
+
+    @staticmethod
+    def get_timestamp():
+        # Unix timestamp.
+        return int(time.time())
+
+    @staticmethod
+    def get_other_data(src_dir, data_file='local.ymake'):
+        other_data = "Other info:\n"
+        other_data += INDENT + "Build by: " + _SystemInfo.get_user() + "\n"
+        other_data += INDENT + "Top src dir: {}\n".format(src_dir)
+
+        # logger.debug("Other data: %s", other_data)
+
+        return other_data
+
+    @staticmethod
+    def _get_host_info(fake_build_info=False):
+        if fake_build_info:
+            host_info = '*sys localhost 1.0.0 #dummy information '
+        elif not on_win():
+            host_info = ' '.join(os.uname())
+        else:
+            host_info = _SystemInfo._system_command_call("VER")  # XXX: check shell from cygwin to call VER this way!
+        return INDENT + INDENT + host_info.strip() + "\n" if host_info else ""
+
+    @staticmethod
+    def _system_command_call(command, **kwargs):
+        if isinstance(command, list):
+            command = subprocess.list2cmdline(command)
+        try:
+            process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, **kwargs)
+            stdout, stderr = process.communicate()
+            if process.returncode != 0:
+                # logger.debug('{}\nRunning {} failed with exit code {}\n'.format(stderr, command, process.returncode))
+                raise get_svn_exception()(stdout=stdout, stderr=stderr, rc=process.returncode, cmd=[command])
+            return stdout
+        except OSError as e:
+            msg = e.strerror
+            errcodes = 'error {}'.format(e.errno)
+            if on_win() and isinstance(e, WindowsError):
+                errcodes += ', win-error {}'.format(e.winerror)
+                try:
+                    import ctypes
+
+                    msg = str(ctypes.FormatError(e.winerror), _SystemInfo.get_locale()).encode('utf-8')
+                except ImportError:
+                    pass
+            # logger.debug('System command call {} failed [{}]: {}\n'.format(command, errcodes, msg))
+            return None
+
+
+def _get_raw_data(vcs_type, vcs_root):
+    lines = []
+    if vcs_type == 'git':
+        lines = _GitVersion.external_data(vcs_root)
+
+    return [l.decode('utf-8') for l in lines]
+
+
+def _get_json(vcs_root):
+    try:
+        vcs_type = "git"
+        info = _get_vcs_dictionary(vcs_type, *_get_raw_data(vcs_type, vcs_root))
+        return info, vcs_root
+    except Exception:
+        return None, ""
+
+
+def _dump_json(
+    arc_root,
+    info,
+    other_data=None,
+    build_user=None,
+    build_date=None,
+    build_timestamp=0,
+    custom_version='',
+):
+    j = {}
+    j['PROGRAM_VERSION'] = info['scm_text'] + "\n" + _SystemInfo._to_text(other_data)
+    j['CUSTOM_VERSION'] = str(_SystemInfo._to_text(custom_version))
+    j['SCM_DATA'] = info['scm_text']
+    j['ARCADIA_SOURCE_PATH'] = _SystemInfo._to_text(arc_root)
+    j['ARCADIA_SOURCE_URL'] = info.get('url', info.get('svn_url', ''))
+    j['ARCADIA_SOURCE_REVISION'] = info.get('revision', -1)
+    j['ARCADIA_SOURCE_HG_HASH'] = info.get('hash', '')
+    j['ARCADIA_SOURCE_LAST_CHANGE'] = info.get('commit_revision', info.get('svn_commit_revision', -1))
+    j['ARCADIA_SOURCE_LAST_AUTHOR'] = info.get('commit_author', '')
+    j['ARCADIA_PATCH_NUMBER'] = info.get('patch_number', 0)
+    j['BUILD_USER'] = _SystemInfo._to_text(build_user)
+    j['VCS'] = info.get('vcs', '')
+    j['BRANCH'] = info.get('branch', '')
+    j['ARCADIA_TAG'] = info.get('tag', '')
+    j['DIRTY'] = info.get('dirty', '')
+
+    if 'url' in info or 'svn_url' in info:
+        j['SVN_REVISION'] = info.get('svn_commit_revision', info.get('revision', -1))
+        j['SVN_ARCROOT'] = info.get('url', info.get('svn_url', ''))
+        j['SVN_TIME'] = info.get('commit_date', info.get('svn_commit_date', ''))
+
+    j['BUILD_DATE'] = build_date
+    j['BUILD_TIMESTAMP'] = build_timestamp
+
+    return json.dumps(j, sort_keys=True, indent=4, separators=(',', ': '))
+
+
+def get_version_info(arc_root, custom_version=""):
+    info, vcs_root = _get_json(arc_root)
+    if info is None:
+        return ""
+
+    return _dump_json(
+        vcs_root,
+        info,
+        other_data=_SystemInfo.get_other_data(
+            src_dir=vcs_root,
+        ),
+        build_user=_SystemInfo.get_user(),
+        build_date=_SystemInfo.get_date(None),
+        build_timestamp=_SystemInfo.get_timestamp(),
+        custom_version=custom_version,
+    )
+
+
+if __name__ == '__main__':
+    with open(sys.argv[1], 'w') as f:
+        f.write(get_version_info(sys.argv[2]))

+ 47 - 0
build/export_generators/cmake/build/scripts/re_replace.py

@@ -0,0 +1,47 @@
+import sys
+from typing import List
+import argparse
+import re
+
+# Usage: re_replace.py --from-re <REGEXP> --to-re <REGEXP_REPLACE> FILE [FILE ...]
+
+
+def patch_line(line: str, from_re: re.Pattern, to_re: str) -> str:
+    return re.sub(from_re, to_re, line)
+
+
+def main(args: List[str]):
+    argparser = argparse.ArgumentParser(allow_abbrev=False)
+    argparser.add_argument('--from-re', required=True)
+    argparser.add_argument('--to-re', required=True)
+    parsed_args, files = argparser.parse_known_args(args=args)
+    from_re = re.compile(parsed_args.from_re)
+    if not files:
+        raise Exception('No input files')
+
+    patched_files = []
+    skipped_files = []
+    for file in files:
+        patched = False
+        with open(file, 'r') as f:
+            lines = f.readlines()
+            for i in range(len(lines)):
+                line = lines[i]
+                patched_line = patch_line(line, from_re, parsed_args.to_re)
+                if patched_line != line:
+                    patched = True
+                    lines[i] = patched_line
+        if patched:
+            with open(file, 'w') as f:
+                f.writelines(lines)
+            patched_files.append(file)
+        else:
+            skipped_files.append(file)
+    if patched_files:
+        print("Patched by re_replace: " + ", ".join(patched_files))
+    if skipped_files:
+        print("Skipped by re_replace: " + ", ".join(skipped_files))
+
+
+if __name__ == '__main__':
+    main(sys.argv[1:])

+ 80 - 0
build/export_generators/cmake/build/scripts/split_unittest.py

@@ -0,0 +1,80 @@
+import argparse
+import tempfile
+import shlex
+import subprocess
+
+
+def parse_args():
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--split-factor", type=int, default=0)
+    parser.add_argument("--shard", type=int, default=0)
+    parser.add_argument("--fork-mode", type=str, default="SEQUENTIAL")
+    parser.add_argument("command", nargs=argparse.REMAINDER)
+    return parser.parse_args()
+
+
+def get_sequential_chunk(tests, modulo, modulo_index):
+    chunk_size = len(tests) // modulo
+    not_used = len(tests) % modulo
+    shift = chunk_size + (modulo_index < not_used)
+    start = chunk_size * modulo_index + min(modulo_index, not_used)
+    end = start + shift
+    return [] if end > len(tests) else tests[start:end]
+
+
+def get_shuffled_chunk(tests, modulo, modulo_index):
+    result_tests = []
+    for i, test in enumerate(tests):
+        if i % modulo == modulo_index:
+            result_tests.append(test)
+    return result_tests
+
+
+def list_tests(binary):
+    with tempfile.NamedTemporaryFile() as tmpfile:
+        cmd = [binary, "--list-verbose", "--list-path", tmpfile.name]
+        subprocess.check_call(cmd)
+
+        with open(tmpfile.name) as afile:
+            lines = afile.read().strip().split("\n")
+            lines = [x.strip() for x in lines]
+            return [x for x in lines if x]
+
+
+def get_shard_tests(args):
+    test_names = list_tests(args.command[0])
+    test_names = sorted(test_names)
+
+    if args.fork_mode == "MODULO":
+        return get_shuffled_chunk(test_names, args.split_factor, args.shard)
+    elif args.fork_mode == "SEQUENTIAL":
+        return get_sequential_chunk(test_names, args.split_factor, args.shard)
+    else:
+        raise ValueError("detected unknown partition mode: {}".format(args.fork_mode))
+
+
+def get_shard_cmd_args(args):
+    return ["+{}".format(x) for x in get_shard_tests(args)]
+
+
+def main():
+    args = parse_args()
+
+    if args.split_factor:
+        shard_cmd = get_shard_cmd_args(args)
+        if shard_cmd:
+            cmd = args.command + shard_cmd
+        else:
+            print("No tests for {} shard".format(args.shard))
+            return 0
+    else:
+        cmd = args.command
+
+    rc = subprocess.call(cmd)
+    if rc:
+        print("Some tests failed. To reproduce run: {}".format(shlex.join(cmd)))
+    return rc
+
+
+if __name__ == "__main__":
+    exit(main())

+ 26 - 0
build/export_generators/cmake/cmake/FindAIO.cmake

@@ -0,0 +1,26 @@
+# - Find AIO
+#
+# AIO_INCLUDE - Where to find libaio.h
+# AIO_LIBS - List of libraries when using AIO.
+# AIO_FOUND - True if AIO found.
+
+find_path(AIO_INCLUDE_DIR
+  libaio.h
+  HINTS $ENV{AIO_ROOT}/include)
+
+find_library(AIO_LIBRARIES
+  aio
+  HINTS $ENV{AIO_ROOT}/lib)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(AIO DEFAULT_MSG AIO_LIBRARIES AIO_INCLUDE_DIR)
+
+mark_as_advanced(AIO_INCLUDE_DIR AIO_LIBRARIES)
+
+if (AIO_FOUND AND NOT TARGET AIO::aio)
+  add_library(AIO::aio UNKNOWN IMPORTED)
+  set_target_properties(AIO::aio PROPERTIES
+    IMPORTED_LOCATION ${AIO_LIBRARIES}
+    INTERFACE_INCLUDE_DIRECTORIES ${AIO_INCLUDE_DIR}
+  )
+endif()

+ 26 - 0
build/export_generators/cmake/cmake/FindIDN.cmake

@@ -0,0 +1,26 @@
+# - Find IDN
+#
+# IDN_INCLUDE - Where to find LibIDN public headers
+# IDN_LIBS - List of libraries when using LibIDN.
+# IDN_FOUND - True if LibIDN found.
+
+find_path(IDN_INCLUDE_DIR
+  idna.h
+  HINTS $ENV{IDN_ROOT}/include)
+
+find_library(IDN_LIBRARIES
+  idn
+  HINTS $ENV{IDN_ROOT}/lib)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(IDN DEFAULT_MSG IDN_LIBRARIES IDN_INCLUDE_DIR)
+
+mark_as_advanced(IDN_INCLUDE_DIR IDN_LIBRARIES)
+
+if (IDN_FOUND AND NOT TARGET IDN::IDN)
+  add_library(IDN::IDN UNKNOWN IMPORTED)
+  set_target_properties(IDN::IDN PROPERTIES
+    IMPORTED_LOCATION ${IDN_LIBRARIES}
+    INTERFACE_INCLUDE_DIRECTORIES ${IDN_INCLUDE_DIR}
+  )
+endif()

+ 33 - 0
build/export_generators/cmake/cmake/FindJNITarget.cmake

@@ -0,0 +1,33 @@
+if(JNITarget_FIND_QUIETLY)
+  find_package(JNI QUIET)
+elseif(JNITarget_FIND_REQUIRED)
+  find_package(JNI REQUIRED)
+else()
+  find_package(JNI)
+endif()
+
+set(JNI_TARGET_INCLUDE_DIRS ${JNI_INCLUDE_DIRS})
+set(JNI_TARGET_LIBRARIES ${JNI_LIBRARIES})
+
+if (JNI_FOUND)
+  add_library(JNITarget::jni IMPORTED UNKNOWN)
+  set_property(TARGET JNITarget::jni PROPERTY
+    IMPORTED_LOCATION ${JAVA_JVM_LIBRARY}
+  )
+  set_property(TARGET JNITarget::jni PROPERTY
+    INTERFACE_INCLUDE_DIRECTORIES ${JAVA_INCLUDE_PATH} ${JAVA_INCLUDE_PATH2}
+  )
+
+  add_library(JNITarget::jni_awt IMPORTED UNKNOWN)
+  set_property(TARGET JNITarget::jni_awt PROPERTY
+    IMPORTED_LOCATION ${JAVA_AWT_LIBRARY}
+  )
+  set_property(TARGET JNITarget::jni_awt PROPERTY
+    INTERFACE_INCLUDE_DIRECTORIES ${JAVA_AWT_INCLUDE_PATH}
+  )
+endif()
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(JNITarget DEFAULT_MSG JNI_TARGET_LIBRARIES JNI_TARGET_INCLUDE_DIRS)
+
+mark_as_advanced(JNI_TARGET_INCLUDE_DIRS JNI_TARGET_LIBRARIES)

+ 31 - 0
build/export_generators/cmake/cmake/antlr.cmake

@@ -0,0 +1,31 @@
+function(ensure_antlr)
+    if(NOT ANTLR3_EXECUTABLE)
+        find_program(ANTLR3_EXECUTABLE
+                     NAMES antlr3)
+        if (NOT ANTLR3_EXECUTABLE)
+            message(FATAL_ERROR "Unable to find antlr3 program. Please install antlr3 and make sure executable file present in the $PATH env.")
+        endif()
+    endif()
+endfunction()
+
+function(run_antlr)
+    ensure_antlr()
+    set(options "")
+    set(oneValueArgs WORKING_DIRECTORY)
+    set(multiValueArgs OUTPUT DEPENDS ANTLER_ARGS)
+    cmake_parse_arguments(
+        RUN_ANTLR
+         "${options}"
+         "${oneValueArgs}"
+         "${multiValueArgs}"
+         ${ARGN}
+    )
+
+    add_custom_command(
+        OUTPUT ${RUN_ANTLR_OUTPUT}
+        COMMAND ${ANTLR3_EXECUTABLE} ${RUN_ANTLR_ANTLER_ARGS}
+        WORKING_DIRECTORY ${RUN_ANTLR_WORKING_DIRECTORY}
+        DEPENDS ${RUN_ANTLR_DEPENDS}
+    )
+
+endfunction()

Некоторые файлы не были показаны из-за большого количества измененных файлов