diff --git a/.clang-tidy b/.clang-tidy index 7f632b63c..4c945a053 100644 --- a/.clang-tidy +++ b/.clang-tidy @@ -1,8 +1,70 @@ +# Here is an explanation for why some of the checks are disabled: + +# bugprone-easily-swappable-parameters: This check would require significant +# refactoring effort. + +# bugprone-unchecked-optional-access: Too many false positives. For example, +# an explicit comparison with std::nullopt isn't considered a check. + +# cppcoreguidelines-avoid-const-or-ref-data-members: We believe, ref data +# members are a good way to express ownership, and const data members improve +# readability. + +# cppcoreguidelines-rvalue-reference-param-not-moved: Too many false positives, +# especially with partial moves from STL containers. + +# misc-const-correctness: Too many false positives, especially with STL +# containers. + +# misc-include-cleaner: There is no way for symbol mapping. For example, this +# check requires to delete the "gsl/gsl" header, but at the same time asks to +# include one for gsl::not_null. + +# misc-use-anonymous-namespace: This check would require significant refactoring +# effort, but wouldn't improve readability equally. + +# misc-no-recursion: There are legitimate uses for us: we use recursion for +# trees a lot. + +# modernize-return-braced-init-list: We think removing typenames and using only +# braced-init can hurt readability. + +# performance-avoid-endl: There are too many legitimate uses of std::endl for +# us. + +# readability-identifier-length: We would like to enable this check, but it +# would require significant refactoring effort. + FormatStyle: Google -Checks: '*,-abseil-*,-altera-*,-android-*,-boost-*,-cert-*,-darwin-*,-fuchsia-*,-linuxkernel-*,-llvm-*,-llvmlibc-*,-mpi-*,-objc-*,-zircon-*' -CheckOptions: [ - { - key: misc-non-private-member-variables-in-classes.IgnoreClassesWithAllMemberVariablesBeingPublic, - value: '1' - } -] \ No newline at end of file +Checks: >- + *,-abseil-*,-altera-*,-android-*,-boost-*,-cert-*,-darwin-*,-fuchsia-*,-linuxkernel-*,-llvm-*,-llvmlibc-*,-mpi-*,-objc-*,-zircon-*, + -bugprone-easily-swappable-parameters,-bugprone-unchecked-optional-access, + -clang-analyzer-cplusplus.NewDeleteLeaks, + -clang-diagnostic-unused-command-line-argument, + -concurrency-mt-unsafe, + -cppcoreguidelines-avoid-const-or-ref-data-members,-cppcoreguidelines-rvalue-reference-param-not-moved, + -misc-const-correctness,-misc-include-cleaner,-misc-use-anonymous-namespace,-misc-no-recursion, + -modernize-return-braced-init-list, + -performance-avoid-endl, + -readability-function-cognitive-complexity,-readability-identifier-length +WarningsAsErrors: '*' +CheckOptions: + - { key: misc-non-private-member-variables-in-classes.IgnoreClassesWithAllMemberVariablesBeingPublic, value: '1' } + - { key: readability-identifier-naming.ClassCase, value: CamelCase } + - { key: readability-identifier-naming.EnumCase, value: CamelCase } + - { key: readability-identifier-naming.StructCase, value: CamelCase } + - { key: readability-identifier-naming.TypeTemplateParameterCase, value: CamelCase } + - { key: readability-identifier-naming.ConstexprVariableCase, value: CamelCase } + - { key: readability-identifier-naming.ConstexprVariablePrefix, value: k } + - { key: readability-identifier-naming.GlobalConstantCase, value: CamelCase } + - { key: readability-identifier-naming.GlobalConstantPrefix, value: k } + - { key: readability-identifier-naming.MemberConstantCase, value: CamelCase } + - { key: readability-identifier-naming.MemberConstantPrefix, value: k } + - { key: readability-identifier-naming.StaticConstantCase, value: CamelCase } + - { key: readability-identifier-naming.StaticConstantPrefix, value: k } + - { key: readability-identifier-naming.ValueTemplateParameterCase, value: CamelCase } + - { key: readability-identifier-naming.ValueTemplateParameterPrefix, value: k } + - { key: readability-identifier-naming.VariableCase, value: lower_case } + - { key: readability-identifier-naming.ClassMemberCase, value: lower_case } + - { key: readability-identifier-naming.PrivateMemberSuffix, value: _ } + - { key: readability-identifier-naming.ProtectedMemberSuffix, value: _ } diff --git a/CHANGELOG.md b/CHANGELOG.md index 6cb40fbd6..db1f6672e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,16 @@ -## Release `1.4.0` (UNRELEASED) +## Release `1.4.0` (2024-11-04) -A feature release on top of `1.3.0`, backwards compatible. +A feature release on top of `1.3.0`, backwards compatible with +respect to rule language, build description, repository description, +and wire protocols. However, the internal representation in local +build root has changed; it is therefore recommended to remove the +local build root on upgrade. ### New features +- `just serve` now also works together with a compatible remote-execution + endpoint. This uses an extended version of the serve protocol, so + both, `just-mr` and `just serve` need to be at the new version. - User-defined rules, as well as the built-in rule `"generic"` can now specify a subdirectory in which an action is to be executed. - `just-mr` now supports garbage collection for repository roots @@ -20,8 +27,8 @@ A feature release on top of `1.3.0`, backwards compatible. will not lead to an inconsistent state; however, the directories at the old location will not be used anymore while still using disk space. -- The expression language has been extended to contain quote and - quasi-quote expressions, as well as new built-in functions +- The expression language has been extended to contain quote + and quasi-quote expressions, as well as new built-in functions `"from_subdir"`, `"nub_left"`. ### Fixes @@ -29,6 +36,10 @@ A feature release on top of `1.3.0`, backwards compatible. - The built-in rule `"generic"` now properly enforces that the obtained outputs form a well-formed artifact stage; a conflicting arrangement of artifacts was possilbe beforehand. +- The built-in expression functions `"join"` and `"join_cmd"` + now properly enforce that the argument is a list of strings. + So far, they used to accept a single string, treating it as a + singleton list. - A bug was fixed that cased `just serve` to fail with an internal error when building against ignore-special roots. - `just` now accurately reports internal errors that occurred on @@ -37,13 +48,13 @@ A feature release on top of `1.3.0`, backwards compatible. depended upon are also written to or found in cache; previously, it was assumed that all export targets not analysed locally were local cache hits, an assumption that no longer holds in - the presence of serve end points. This fixes a cache consistency + the presence of serve endpoints. This fixes a cache consistency problem if the same remote-execution endpoint is used both, with and without a serve endpoint. -- A race condition in reconstructing executables from large CAS has - been removed that could lead to an open file descriptor being kept - alive for too long, resulting EBUSY failures of actions using this - binary. +- A race condition in reconstructing executables from large CAS + has been removed that could lead to an open file descriptor being + kept alive for too long, resulting EBUSY failures of actions + using this binary. - Internal code clean up, reducing memory footprint, in particular for simultaneous upload of a large number of blobs. - Avoidence of duplicate requests and performance improvements when @@ -51,20 +62,27 @@ A feature release on top of `1.3.0`, backwards compatible. - Dependencies have been updated to also build with gcc 14. - Portability improvements of the code by not relying on implementation details of the compiler. -- Local execution no longer has the requirement that there exist no - more files with identical content than the hardlink limit of the - underlying file system. -- The size of large object entries has been reduced. The cache and - CAS must be cleaned up since stable versions before `1.4.0` cannot use - the new format. -- The way of storing intermediate keys of the action cache has been changed. - The cache must be cleaned up since stable versions before `1.4.0` cannot - use the new format. +- Local execution no longer has the requirement that there exist + no more files with identical content than the hardlink limit of + the underlying file system. +- Inside action descriptions, paths are always normalized; this improves + compatibility with existing remote-execution implementations. +- The size of large object entries has been reduced. The cache + and CAS must be cleaned up since stable versions before `1.4.0` + cannot use the new format. +- The way of storing intermediate keys of the action cache has + been changed. The cache must be cleaned up since stable versions + before `1.4.0` cannot use the new format. - Various improvements to the tests: dispatching of the summary action is now possible, tests are independent of a .just-mrrc file the user might have in their home directory - Various improvements of the documentation. +## Release `1.4.0~beta1` (2024-10-30) + +First beta release for the upcoming `1.4.0` release; see release +notes there. + ## Release `1.3.0` (2024-05-08) A feature release on top of `1.2.0`, backwards compatible. diff --git a/README.md b/README.md index 012e3395a..aceffeb70 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ taken from user-defined rules described by functional expressions. - [Targets versus `FILE`, `GLOB`, and `TREE`](doc/tutorial/target-file-glob-tree.md) - [Ensuring reproducibility](doc/tutorial/rebuild.md) - [Using protobuf](doc/tutorial/proto.md) + - [Running linters](doc/tutorial/lint.md) - [How to create a single-node remote execution service](doc/tutorial/just-execute.org) - [Dependency management using Target-level Cache as a Service](doc/tutorial/just-serve.md) - [Cross compiling and testing cross-compiled targets](doc/tutorial/cross-compiling.md) diff --git a/bin/bootstrap-traverser.py b/bin/bootstrap-traverser.py index 46388cde0..301870785 100755 --- a/bin/bootstrap-traverser.py +++ b/bin/bootstrap-traverser.py @@ -16,6 +16,7 @@ import hashlib import json import os +import shutil import subprocess import sys from typing import Any, Dict, List, Optional, cast @@ -79,12 +80,14 @@ def build_tree(desc: Json, *, config: Json, root: str, graph: Json) -> str: tree_dir = os.path.normpath(os.path.join(root, "TREE", tree_id)) if os.path.isdir(tree_dir): return tree_dir + tree_dir_tmp = tree_dir + ".tmp" tree_desc = graph["trees"][tree_id] for location, desc in tree_desc.items(): link(cast(str, build(desc, config=config, root=root, graph=graph)), - os.path.join(tree_dir, location)) + os.path.join(tree_dir_tmp, location)) # correctly handle the empty tree - os.makedirs(tree_dir, exist_ok=True) + os.makedirs(tree_dir_tmp, exist_ok=True) + shutil.copytree(tree_dir_tmp, tree_dir) return tree_dir diff --git a/bin/parallel-bootstrap-traverser.py b/bin/parallel-bootstrap-traverser.py index 86f50e6bd..a6c64643f 100755 --- a/bin/parallel-bootstrap-traverser.py +++ b/bin/parallel-bootstrap-traverser.py @@ -17,6 +17,7 @@ import json import multiprocessing import os +import shutil import subprocess import sys import threading @@ -254,6 +255,7 @@ def build_tree(desc: Json, *, config: Json, root: str, graph: Json, if state != AtomicListMap.Entry.INSERTED: # tree ready, run callback callback(tree_dir) return + tree_dir_tmp = tree_dir + ".tmp" tree_desc: Json = graph["trees"][tree_id] num_entries = AtomicInt(len(tree_desc.items())) @@ -261,7 +263,8 @@ def build_tree(desc: Json, *, config: Json, root: str, graph: Json, def run_callbacks() -> None: if num_entries.fetch_dec() <= 1: # correctly handle the empty tree - os.makedirs(tree_dir, exist_ok=True) + os.makedirs(tree_dir_tmp, exist_ok=True) + shutil.copytree(tree_dir_tmp, tree_dir) vals = g_CALLBACKS_PER_ID.fetch_clear(f"TREE/{tree_id}") if vals: for cb in vals: # mark ready @@ -274,7 +277,7 @@ def run_callbacks() -> None: def create_link(location: str) -> Callable[..., None]: def do_link(path: str) -> None: - link(path, os.path.join(tree_dir, location)) + link(path, os.path.join(tree_dir_tmp, location)) run_callbacks() return do_link diff --git a/doc/concepts/expressions.md b/doc/concepts/expressions.md index 4db7c8cbc..79b114f3b 100644 --- a/doc/concepts/expressions.md +++ b/doc/concepts/expressions.md @@ -248,7 +248,7 @@ those) argument(s) to obtain the final result. result is the input list, except that for all duplicate values, all but the rightmost occurrence is removed. - - `"nub_list"` The argument has to be a list. It is an error + - `"nub_left"` The argument has to be a list. It is an error if that list contains (directly or indirectly) a name. The result is the input list, except that for all duplicate values, all but the leftmost occurrence is removed. diff --git a/doc/tutorial/hello-world.md b/doc/tutorial/hello-world.md index 317c0a63d..b700252de 100644 --- a/doc/tutorial/hello-world.md +++ b/doc/tutorial/hello-world.md @@ -30,7 +30,7 @@ Second, we also need to create the multi-repository configuration { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } @@ -46,7 +46,7 @@ Second, we also need to create the multi-repository configuration In that configuration, two repositories are defined: 1. The `"rules-cc"` repository located in the subdirectory `rules` of - [just-buildsystem/rules-cc:0e9d13a761c878a647fde5a543946a02bf2c693d](https://github.com/just-buildsystem/rules-cc/tree/0e9d13a761c878a647fde5a543946a02bf2c693d), + [just-buildsystem/rules-cc:b8ae7e38c0c51467ead55361362a0fd0da3666d5](https://github.com/just-buildsystem/rules-cc/tree/b8ae7e38c0c51467ead55361362a0fd0da3666d5), which contains the high-level concepts for building C/C++ binaries and libraries. @@ -228,7 +228,7 @@ the following content: { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } diff --git a/doc/tutorial/just-serve.md b/doc/tutorial/just-serve.md index 37f6419ab..aefce3d86 100644 --- a/doc/tutorial/just-serve.md +++ b/doc/tutorial/just-serve.md @@ -293,7 +293,7 @@ Our `repos.json` at this stage reads: { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } @@ -504,7 +504,7 @@ We are now ready to see how this setup works. At this point the `repos.json` is { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } @@ -643,7 +643,7 @@ removing any `"absent"` pragma fields { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } diff --git a/doc/tutorial/lint.md b/doc/tutorial/lint.md new file mode 100644 index 000000000..7b4884051 --- /dev/null +++ b/doc/tutorial/lint.md @@ -0,0 +1,400 @@ +# Running Linters + +It is common to run some form of linters over the code base. It is +desirable to also use our build tool for this to have the benefit +of parallel (or even remote) build and sound caching. Additionally, +this also allows the lint tools to see the file layout as it occurs +in the actual compile action, including generated files. Remember +that even for source files this layout does not have to coincide +with the layout of files in the source repository. + +Conveniently, our build rules have support for collecting the +relevant information needed for linting built in. If a target is +built with the configuration variable `LINT` set to a true value, lint +information is provided for the transitive sources; +as [third-party dependencies](third-party-software.md) are typically +exported without `LINT` among the flexible variables, that naturally +forms a boundary of the "own" code (to be linted, as opposed to +third-party code). So, continuing the +[third-party tutorial](third-party-software.md), we can obtain +abstract nodes for our sources (`main.cpp`, `greet/greet.hpp`, +`greet/greet.cpp`). + +``` sh +$ just-mr analyse -D '{"LINT": true}' --dump-nodes - +INFO: Performing repositories setup +INFO: Found 5 repositories to set up +INFO: Setup finished, exec ["just","analyse","-C","...","-D","{\"LINT\": true}","--dump-nodes","-"] +INFO: Requested target is [["@","tutorial","","helloworld"],{"LINT":true}] +INFO: Analysed target [["@","tutorial","","helloworld"],{"LINT":true}] +INFO: Export targets found: 0 cached, 1 uncached, 0 not eligible for caching +INFO: Result of target [["@","tutorial","","helloworld"],{"LINT":true}]: { + "artifacts": { + "helloworld": {"data":{"id":"edf0113a4dff26d1d2453947fe0c7ae11a6cabb125a5ddb2f15e10106e01781c","path":"work/helloworld"},"type":"ACTION"} + }, + "provides": { + "debug-hdrs": { + }, + "debug-srcs": { + }, + "lint": [ + {"id":"28df1af04041db0c150bbdef440fc3265a57e1163258fd15a4373b7279e4b91a","type":"NODE"}, + {"id":"f76b7acba64fc35a5c67f81d52a4aa47e0b0c8966eaf19c1f51477a4e0b8fc79","type":"NODE"}, + {"id":"bd8ee55d88fade7ebc8121ab7e230aed3888b27f9e87841482b2b08ecf47acb0","type":"NODE"} + ], + "package": { + "to_bin": true + }, + "run-libs": { + } + }, + "runfiles": { + } + } +INFO: Target nodes of target [["@","tutorial","","helloworld"],{"LINT":true}]: +{ + "28df1af04041db0c150bbdef440fc3265a57e1163258fd15a4373b7279e4b91a": { + "result": { + "artifact_stage": { + "include": { + "data": { + "id": "ac340b9e4bcdf82d972ff9286bbda4cd7219d6d3487867875418aeb2b03012b5" + }, + "type": "TREE" + }, + "work/main.cpp": { + "data": { + "path": "main.cpp", + "repository": "tutorial" + }, + "type": "LOCAL" + } + }, + "provides": { + "cmd": ["c++","-O2","-Wall","-I","work","-isystem","include","-c","work/main.cpp","-o","work/main.o"], + "src": "work/main.cpp" + }, + "runfiles": { + } + }, + "type": "VALUE_NODE" + }, + "bd8ee55d88fade7ebc8121ab7e230aed3888b27f9e87841482b2b08ecf47acb0": { + "result": { + "artifact_stage": { + "include": { + "data": { + "id": "124bb6d1afd5839463acf1f602109c4229ea303dc5dbfc63d2d4ce21fa590d24" + }, + "type": "TREE" + }, + "work/greet/greet.cpp": { + "data": { + "path": "greet/greet.cpp", + "repository": "tutorial" + }, + "type": "LOCAL" + }, + "work/greet/greet.hpp": { + "data": { + "path": "greet/greet.hpp", + "repository": "tutorial" + }, + "type": "LOCAL" + } + }, + "provides": { + "cmd": ["c++","-O2","-Wall","-I","work","-isystem","include","-c","work/greet/greet.cpp","-o","work/greet/greet.o"], + "src": "work/greet/greet.cpp" + }, + "runfiles": { + } + }, + "type": "VALUE_NODE" + }, + "f76b7acba64fc35a5c67f81d52a4aa47e0b0c8966eaf19c1f51477a4e0b8fc79": { + "result": { + "artifact_stage": { + "include": { + "data": { + "id": "124bb6d1afd5839463acf1f602109c4229ea303dc5dbfc63d2d4ce21fa590d24" + }, + "type": "TREE" + }, + "work/greet/greet.hpp": { + "data": { + "path": "greet/greet.hpp", + "repository": "tutorial" + }, + "type": "LOCAL" + } + }, + "provides": { + "cmd": ["c++","-O2","-Wall","-I","work","-isystem","include","-E","work/greet/greet.hpp"], + "src": "work/greet/greet.hpp" + }, + "runfiles": { + } + }, + "type": "VALUE_NODE" + } +} +``` + +We find the sources in correct staging, together with the respective +compile command (or preprocessing, in case of headers) provided. +The latter is important, to find the correct include files and to +know the correct defines to be used. + +Of course, those abstract nodes are just an implementation detail +and there is a rule to define linting for the collected sources. +It takes two programs (targets consisting of a single artifact), +- the `linter` for running the lint task on a single file, and +- the `summarizer` for summarizing the lint results; +additionally, arbitrary `config` data can be given to have config +files available, but also to use a linter built from source. + +As for every rule, the details can be obtained with the `describe` +subcommand. + +``` sh +$ just-mr --main rules-cc describe --rule lint targets +INFO: Performing repositories setup +INFO: Found 2 repositories to set up +INFO: Setup finished, exec ["just","describe","-C","...","--rule","lint","targets"] + | Run a given linter on the lint information provided by the given targets. + Target fields + - "linter" + | Single artifact running the lint checks. + | + | This program is invoked with +... +``` + +Let's go through these programs we have to provide one by one. The +first one is supposed to call the actual linter; as many linters, +including `clang-tidy` which we use as an example, prefer to obtain +the command information through a +[compilation database](https://clang.llvm.org/docs/JSONCompilationDatabase.html) +there is actually some work to do, especially as the directory entry +has to be an absolute path. We also move the configuration file +`.clang-tidy` from the configuration directory (located in a directory given +to us through the environment variable `CONFIG`) to the position +expected by `clang-tidy`. + +``` {.python srcname="run_clang_tidy.py"} +#!/usr/bin/env python3 + +import json +import os +import shutil +import subprocess +import sys + +def dump_meta(src, cmd): + OUT = os.environ.get("OUT") + with open(os.path.join(OUT, "config.json"), "w") as f: + json.dump({"src": src, "cmd": cmd}, f) + +def run_lint(src, cmd): + dump_meta(src, cmd) + config = os.environ.get("CONFIG") + shutil.copyfile(os.path.join(config, ".clang-tidy"), + ".clang-tidy") + db = [ {"directory": os.getcwd(), + "arguments": cmd, + "file": src}] + with open("compile_commands.json", "w") as f: + json.dump(db,f) + new_cmd = [ "clang-tidy", src ] + return subprocess.run(new_cmd).returncode + +if __name__ == "__main__": + sys.exit(run_lint(sys.argv[1], sys.argv[2:])) +``` + +The actual information on success or failure is provided through +the exit code and information on the problems discovered (if any) +is reported on stdout or stderr. Additionally, our launcher also +writes the meta data in a file `config.json` in the directory for +additional (usually machine-readable) diagnose output; the location +of this directory is given to us by the environment variable `OUT`. + +We use a pretty simple `.clang-tidy` for demonstration purpose. + +``` {.md srcname=".clang-tidy"} +Checks: 'clang-analyzer-*,misc-*,-misc-include-*' +WarningsAsErrors: 'clang-analyzer-*,misc-*,-misc-include-*' +``` + +Computing a summary of the individual lint results (given to the +summarizer as subdirectories of the current working directory) is +straight forward: the overall linting passed if all individual checks +passed and for the failed tests we format stdout and stderr in some +easy-to-read way; additionally, we also provide a machine-readable +summary of the failures. + +``` {.py srcname="summary.py"} +#!/usr/bin/env python3 + +import json +import os +import sys + +FAILED = {} + +for lint in sorted(os.listdir()): + if os.path.isdir(lint): + with open(os.path.join(lint, "result")) as f: + result = f.read().strip() + if result != "PASS": + record = {} + with open(os.path.join(lint, "out/config.json")) as f: + record["config"] = json.load(f) + with open(os.path.join(lint, "stdout")) as f: + log = f.read() + with open(os.path.join(lint, "stderr")) as f: + log += f.read() + record["log"] = log + FAILED[lint] = record + +with open(os.path.join(os.environ.get("OUT"), "failures.json"), "w") as f: + json.dump(FAILED, f) + +failures = list(FAILED.keys()) + +for f in failures: + src = FAILED[f]["config"]["src"] + log = FAILED[f]["log"] + + print("%s %s" % (f, src)) + print("".join([" " + line + "\n" + for line in log.splitlines()])) + + +if failures: + sys.exit(1) +``` + +Of course, our launcher and summarizer have to be executable + +``` sh +$ chmod 755 run_clang_tidy.py summary.py +``` + +Now we can define our lint target. + +``` {.jsonc srcname="TARGETS"} +... +, "lint": + { "type": ["@", "rules", "lint", "targets"] + , "targets": ["helloworld"] + , "linter": ["run_clang_tidy.py"] + , "summarizer": ["summary.py"] + , "config": [".clang-tidy"] + } +... +``` + +As most rules, the lint rules also have a `"defaults"` target, +which allows to set `PATH` appropriately for all lint actions. +This can be useful if the linters are installed in a non-standard +directory. + +``` sh +$ mkdir -p tutorial-defaults/lint +$ echo '{"defaults": {"type": "defaults", "PATH": ["'"${TOOLCHAIN_PATH}"'"]}}' > tutorial-defaults/lint/TARGETS +$ git add tutorial-defaults +$ git commit -m 'add lint defaults' +``` + +We now can build our lint report in the same way as any test report. + +``` sh +$ just-mr build lint -P report +INFO: Performing repositories setup +INFO: Found 5 repositories to set up +INFO: Setup finished, exec ["just","build","-C","...","lint","-P","report"] +INFO: Requested target is [["@","tutorial","","lint"],{}] +INFO: Analysed target [["@","tutorial","","lint"],{}] +INFO: Export targets found: 0 cached, 1 uncached, 0 not eligible for caching +INFO: Target tainted ["lint"]. +INFO: Discovered 11 actions, 7 trees, 0 blobs +INFO: Building [["@","tutorial","","lint"],{}]. +INFO: Processed 7 actions, 3 cache hits. +INFO: Artifacts built, logical paths are: + out [a90a9e3a8ac23526eb31ae46c80434cfd5810ed5:41:t] + report [e69de29bb2d1d6434b8b29ae775ad8c2e48c5391:0:f] + result [7ef22e9a431ad0272713b71fdc8794016c8ef12f:5:f] + work [52b9cfc07b53c59fb066bc95329f4ca6457e7338:111:t] +INFO: Backing up artifacts of 1 export targets +INFO: Target tainted ["lint"]. +``` + +To see that some real linting is going on, let's modify one +of our source files. Say, we'll make the greeting independent +of the recipient. + +``` {.cpp srcname="greet/greet.cpp"} +#include "greet.hpp" +#include + +void greet(std::string const& s) { + fmt::print("Hello!\n"); +} +``` + +Building succeeds without any warning. + +``` sh +$ just-mr build helloworld +INFO: Performing repositories setup +INFO: Found 5 repositories to set up +INFO: Setup finished, exec ["just","build","-C","...","helloworld"] +INFO: Requested target is [["@","tutorial","","helloworld"],{}] +INFO: Analysed target [["@","tutorial","","helloworld"],{}] +INFO: Export targets found: 1 cached, 0 uncached, 0 not eligible for caching +INFO: Discovered 4 actions, 2 trees, 0 blobs +INFO: Building [["@","tutorial","","helloworld"],{}]. +INFO: Processed 4 actions, 1 cache hits. +INFO: Artifacts built, logical paths are: + helloworld [2cb87c743e9fd3d18543732945df3ef9ca084be6:132736:x] +``` + +However, the linter reports it. +``` sh +$ just-mr build lint -P report || : +INFO: Performing repositories setup +INFO: Found 5 repositories to set up +INFO: Setup finished, exec ["just","build","-C","...","lint","-P","report"] +INFO: Requested target is [["@","tutorial","","lint"],{}] +INFO: Analysed target [["@","tutorial","","lint"],{}] +INFO: Export targets found: 1 cached, 0 uncached, 0 not eligible for caching +INFO: Target tainted ["lint"]. +INFO: Discovered 8 actions, 6 trees, 0 blobs +INFO: Building [["@","tutorial","","lint"],{}]. +WARN (action:b9abc2d5c9766644da1f9db5ec6586f6ced35f36670046b14f73ad532ce12ba4): lint failed for work/greet/greet.cpp (exit code 1) +INFO: Processed 4 actions, 2 cache hits. +INFO: Artifacts built, logical paths are: + out [c298959107421711f8d87a2b96e95858c065b9b9:41:t] FAILED + report [0b0ab9eb90c28ece0f14a13a6ae5c97da4a32170:531:f] FAILED + result [94e1707e853c36f514de3876408c09a0e0ca6fc4:5:f] FAILED + work [007eec6bad8b691c067dd2c54165ac2912711474:111:t] FAILED +INFO: Failed artifacts: + out [c298959107421711f8d87a2b96e95858c065b9b9:41:t] FAILED + report [0b0ab9eb90c28ece0f14a13a6ae5c97da4a32170:531:f] FAILED + result [94e1707e853c36f514de3876408c09a0e0ca6fc4:5:f] FAILED + work [007eec6bad8b691c067dd2c54165ac2912711474:111:t] FAILED +0000000002 work/greet/greet.cpp + work/greet/greet.cpp:4:31: error: parameter 's' is unused [misc-unused-parameters,-warnings-as-errors] + 4 | void greet(std::string const& s) { + | ^ + | /*s*/ + 287 warnings generated. + Suppressed 286 warnings (286 in non-user code). + Use -header-filter=.* to display errors from all non-system headers. Use -system-headers to display errors from system headers as well. + 1 warning treated as error + +INFO: Target tainted ["lint"]. +WARN: Build result contains failed artifacts. +``` diff --git a/doc/tutorial/rebuild.md b/doc/tutorial/rebuild.md index 023d47fd3..d4a217229 100644 --- a/doc/tutorial/rebuild.md +++ b/doc/tutorial/rebuild.md @@ -108,7 +108,7 @@ the current working directory { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } diff --git a/doc/tutorial/target-file-glob-tree.md b/doc/tutorial/target-file-glob-tree.md index 8810a8bd4..3ae2ef637 100644 --- a/doc/tutorial/target-file-glob-tree.md +++ b/doc/tutorial/target-file-glob-tree.md @@ -36,7 +36,7 @@ following content. { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } diff --git a/doc/tutorial/third-party-software.md b/doc/tutorial/third-party-software.md index 0ba2203eb..d8ac97c2f 100644 --- a/doc/tutorial/third-party-software.md +++ b/doc/tutorial/third-party-software.md @@ -140,7 +140,7 @@ additional binding `"format"` for it: { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } @@ -273,7 +273,7 @@ be set for them in `repos.json`: { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } @@ -396,7 +396,7 @@ example, the following `repos.json` defines the overlay { "repository": { "type": "git" , "branch": "master" - , "commit": "0e9d13a761c878a647fde5a543946a02bf2c693d" + , "commit": "b8ae7e38c0c51467ead55361362a0fd0da3666d5" , "repository": "https://github.com/just-buildsystem/rules-cc.git" , "subdir": "rules" } diff --git a/etc/defaults/CC/TARGETS.boringssl b/etc/defaults/CC/TARGETS.boringssl index f65a0ce0e..fa4e7b131 100644 --- a/etc/defaults/CC/TARGETS.boringssl +++ b/etc/defaults/CC/TARGETS.boringssl @@ -183,7 +183,7 @@ , "$1": {"type": "var", "name": "PLATFORM"} , "$2": "windows_x86_64" } - , ["-DWIN32_LEAN_AND_MEAN", "-DOPENSSL_NO_ASM"] + , ["-DWIN32_LEAN_AND_MEAN", "-utf-8", "-DOPENSSL_NO_ASM"] ] ] , "default": ["-DOPENSSL_NO_ASM"] diff --git a/etc/import.pkgconfig/TARGETS.google_apis b/etc/import.pkgconfig/TARGETS.google_apis index 20ff6876e..66e670c5c 100644 --- a/etc/import.pkgconfig/TARGETS.google_apis +++ b/etc/import.pkgconfig/TARGETS.google_apis @@ -18,6 +18,12 @@ { "type": ["@", "rules", "proto", "library"] , "name": ["google_api_client_proto"] , "srcs": ["google/api/client.proto"] + , "deps": ["google_api_launch_stage_proto"] + } +, "google_api_launch_stage_proto": + { "type": ["@", "rules", "proto", "library"] + , "name": ["google_api_launch_stage_proto"] + , "srcs": ["google/api/launch_stage.proto"] } , "google_api_expr_v1alpha1_checked_proto": { "type": ["@", "rules", "proto", "library"] diff --git a/etc/import/TARGETS.boringssl b/etc/import/TARGETS.boringssl index 36a32b51b..59314c8e5 100644 --- a/etc/import/TARGETS.boringssl +++ b/etc/import/TARGETS.boringssl @@ -56,8 +56,9 @@ , "pkg-name": ["libcrypto"] , "arguments_config": ["OS", "ARCH", "TARGET_ARCH"] , "hdrs": [["./", "src/include/openssl", "crypto_headers"]] - , "private-hdrs": ["fips_fragments", "crypto_internal_headers"] - , "srcs": ["crypto_sources", "asm_sources"] + , "private-hdrs": ["bcm_internal_headers", "crypto_internal_headers"] + , "srcs": + ["bcm_sources", "crypto_sources", "bcm_sources_asm", "crypto_sources_asm"] , "pure C": ["YES"] , "private-ldflags": { "type": "++" @@ -65,7 +66,7 @@ [ { "type": "if" , "cond": {"type": "==", "$1": {"type": "var", "name": "OS"}, "$2": "windows"} - , "then": ["-defaultlib:advapi32.lib"] + , "then": ["-defaultlib:advapi32.lib", "-defaultlib:ws2_32.lib"] } , ["-pthread", "-Wl,--whole-archive,-lpthread,--no-whole-archive"] ] @@ -96,6 +97,16 @@ } } } + , "deps": ["crypto-mlkem"] + } +, "crypto-mlkem": + { "type": ["@", "rules", "CC", "library"] + , "name": ["mlkem"] + , "srcs": ["src/crypto/mlkem/mlkem.cc"] + , "private-hdrs": + [ ["./", "src/include/openssl", "crypto_headers"] + , "crypto_internal_headers" + ] } , "ssl-lib": { "type": ["@", "rules", "CC", "library"] @@ -106,218 +117,194 @@ , "srcs": ["ssl_sources"] , "deps": ["crypto"] } -, "fips_fragments": - { "type": "install" - , "deps": - [ "src/crypto/fipsmodule/aes/aes.c" - , "src/crypto/fipsmodule/aes/aes_nohw.c" - , "src/crypto/fipsmodule/aes/key_wrap.c" - , "src/crypto/fipsmodule/aes/mode_wrappers.c" - , "src/crypto/fipsmodule/bn/add.c" - , "src/crypto/fipsmodule/bn/asm/x86_64-gcc.c" - , "src/crypto/fipsmodule/bn/bn.c" - , "src/crypto/fipsmodule/bn/bytes.c" - , "src/crypto/fipsmodule/bn/cmp.c" - , "src/crypto/fipsmodule/bn/ctx.c" - , "src/crypto/fipsmodule/bn/div.c" - , "src/crypto/fipsmodule/bn/div_extra.c" - , "src/crypto/fipsmodule/bn/exponentiation.c" - , "src/crypto/fipsmodule/bn/gcd.c" - , "src/crypto/fipsmodule/bn/gcd_extra.c" - , "src/crypto/fipsmodule/bn/generic.c" - , "src/crypto/fipsmodule/bn/jacobi.c" - , "src/crypto/fipsmodule/bn/montgomery.c" - , "src/crypto/fipsmodule/bn/montgomery_inv.c" - , "src/crypto/fipsmodule/bn/mul.c" - , "src/crypto/fipsmodule/bn/prime.c" - , "src/crypto/fipsmodule/bn/random.c" - , "src/crypto/fipsmodule/bn/rsaz_exp.c" - , "src/crypto/fipsmodule/bn/shift.c" - , "src/crypto/fipsmodule/bn/sqrt.c" - , "src/crypto/fipsmodule/cipher/aead.c" - , "src/crypto/fipsmodule/cipher/cipher.c" - , "src/crypto/fipsmodule/cipher/e_aes.c" - , "src/crypto/fipsmodule/cipher/e_aesccm.c" - , "src/crypto/fipsmodule/cmac/cmac.c" - , "src/crypto/fipsmodule/dh/check.c" - , "src/crypto/fipsmodule/dh/dh.c" - , "src/crypto/fipsmodule/digest/digest.c" - , "src/crypto/fipsmodule/digest/digests.c" - , "src/crypto/fipsmodule/digestsign/digestsign.c" - , "src/crypto/fipsmodule/ec/ec.c" - , "src/crypto/fipsmodule/ec/ec_key.c" - , "src/crypto/fipsmodule/ec/ec_montgomery.c" - , "src/crypto/fipsmodule/ec/felem.c" - , "src/crypto/fipsmodule/ec/oct.c" - , "src/crypto/fipsmodule/ec/p224-64.c" - , "src/crypto/fipsmodule/ec/p256-nistz.c" - , "src/crypto/fipsmodule/ec/p256.c" - , "src/crypto/fipsmodule/ec/scalar.c" - , "src/crypto/fipsmodule/ec/simple.c" - , "src/crypto/fipsmodule/ec/simple_mul.c" - , "src/crypto/fipsmodule/ec/util.c" - , "src/crypto/fipsmodule/ec/wnaf.c" - , "src/crypto/fipsmodule/ecdh/ecdh.c" - , "src/crypto/fipsmodule/ecdsa/ecdsa.c" - , "src/crypto/fipsmodule/hkdf/hkdf.c" - , "src/crypto/fipsmodule/hmac/hmac.c" - , "src/crypto/fipsmodule/md4/md4.c" - , "src/crypto/fipsmodule/md5/md5.c" - , "src/crypto/fipsmodule/modes/cbc.c" - , "src/crypto/fipsmodule/modes/cfb.c" - , "src/crypto/fipsmodule/modes/ctr.c" - , "src/crypto/fipsmodule/modes/gcm.c" - , "src/crypto/fipsmodule/modes/gcm_nohw.c" - , "src/crypto/fipsmodule/modes/ofb.c" - , "src/crypto/fipsmodule/modes/polyval.c" - , "src/crypto/fipsmodule/rand/ctrdrbg.c" - , "src/crypto/fipsmodule/rand/fork_detect.c" - , "src/crypto/fipsmodule/rand/rand.c" - , "src/crypto/fipsmodule/rand/urandom.c" - , "src/crypto/fipsmodule/rsa/blinding.c" - , "src/crypto/fipsmodule/rsa/padding.c" - , "src/crypto/fipsmodule/rsa/rsa.c" - , "src/crypto/fipsmodule/rsa/rsa_impl.c" - , "src/crypto/fipsmodule/self_check/fips.c" - , "src/crypto/fipsmodule/self_check/self_check.c" - , "src/crypto/fipsmodule/service_indicator/service_indicator.c" - , "src/crypto/fipsmodule/sha/sha1.c" - , "src/crypto/fipsmodule/sha/sha256.c" - , "src/crypto/fipsmodule/sha/sha512.c" - , "src/crypto/fipsmodule/tls/kdf.c" - ] - } -, "ssl_internal_headers": {"type": "install", "deps": ["src/ssl/internal.h"]} -, "ssl_sources": +, "bcm_sources": {"type": "install", "deps": ["src/crypto/fipsmodule/bcm.c"]} +, "bcm_internal_headers": { "type": "install" , "deps": - [ "src/ssl/bio_ssl.cc" - , "src/ssl/d1_both.cc" - , "src/ssl/d1_lib.cc" - , "src/ssl/d1_pkt.cc" - , "src/ssl/d1_srtp.cc" - , "src/ssl/dtls_method.cc" - , "src/ssl/dtls_record.cc" - , "src/ssl/encrypted_client_hello.cc" - , "src/ssl/extensions.cc" - , "src/ssl/handoff.cc" - , "src/ssl/handshake.cc" - , "src/ssl/handshake_client.cc" - , "src/ssl/handshake_server.cc" - , "src/ssl/s3_both.cc" - , "src/ssl/s3_lib.cc" - , "src/ssl/s3_pkt.cc" - , "src/ssl/ssl_aead_ctx.cc" - , "src/ssl/ssl_asn1.cc" - , "src/ssl/ssl_buffer.cc" - , "src/ssl/ssl_cert.cc" - , "src/ssl/ssl_cipher.cc" - , "src/ssl/ssl_file.cc" - , "src/ssl/ssl_key_share.cc" - , "src/ssl/ssl_lib.cc" - , "src/ssl/ssl_privkey.cc" - , "src/ssl/ssl_session.cc" - , "src/ssl/ssl_stat.cc" - , "src/ssl/ssl_transcript.cc" - , "src/ssl/ssl_versions.cc" - , "src/ssl/ssl_x509.cc" - , "src/ssl/t1_enc.cc" - , "src/ssl/tls13_both.cc" - , "src/ssl/tls13_client.cc" - , "src/ssl/tls13_enc.cc" - , "src/ssl/tls13_server.cc" - , "src/ssl/tls_method.cc" - , "src/ssl/tls_record.cc" + [ "src/crypto/fipsmodule/aes/aes.c.inc" + , "src/crypto/fipsmodule/aes/aes_nohw.c.inc" + , "src/crypto/fipsmodule/aes/key_wrap.c.inc" + , "src/crypto/fipsmodule/aes/mode_wrappers.c.inc" + , "src/crypto/fipsmodule/bn/add.c.inc" + , "src/crypto/fipsmodule/bn/asm/x86_64-gcc.c.inc" + , "src/crypto/fipsmodule/bn/bn.c.inc" + , "src/crypto/fipsmodule/bn/bytes.c.inc" + , "src/crypto/fipsmodule/bn/cmp.c.inc" + , "src/crypto/fipsmodule/bn/ctx.c.inc" + , "src/crypto/fipsmodule/bn/div.c.inc" + , "src/crypto/fipsmodule/bn/div_extra.c.inc" + , "src/crypto/fipsmodule/bn/exponentiation.c.inc" + , "src/crypto/fipsmodule/bn/gcd.c.inc" + , "src/crypto/fipsmodule/bn/gcd_extra.c.inc" + , "src/crypto/fipsmodule/bn/generic.c.inc" + , "src/crypto/fipsmodule/bn/jacobi.c.inc" + , "src/crypto/fipsmodule/bn/montgomery.c.inc" + , "src/crypto/fipsmodule/bn/montgomery_inv.c.inc" + , "src/crypto/fipsmodule/bn/mul.c.inc" + , "src/crypto/fipsmodule/bn/prime.c.inc" + , "src/crypto/fipsmodule/bn/random.c.inc" + , "src/crypto/fipsmodule/bn/rsaz_exp.c.inc" + , "src/crypto/fipsmodule/bn/shift.c.inc" + , "src/crypto/fipsmodule/bn/sqrt.c.inc" + , "src/crypto/fipsmodule/cipher/aead.c.inc" + , "src/crypto/fipsmodule/cipher/cipher.c.inc" + , "src/crypto/fipsmodule/cipher/e_aes.c.inc" + , "src/crypto/fipsmodule/cipher/e_aesccm.c.inc" + , "src/crypto/fipsmodule/cmac/cmac.c.inc" + , "src/crypto/fipsmodule/dh/check.c.inc" + , "src/crypto/fipsmodule/dh/dh.c.inc" + , "src/crypto/fipsmodule/digest/digest.c.inc" + , "src/crypto/fipsmodule/digest/digests.c.inc" + , "src/crypto/fipsmodule/digestsign/digestsign.c.inc" + , "src/crypto/fipsmodule/ec/ec.c.inc" + , "src/crypto/fipsmodule/ec/ec_key.c.inc" + , "src/crypto/fipsmodule/ec/ec_montgomery.c.inc" + , "src/crypto/fipsmodule/ec/felem.c.inc" + , "src/crypto/fipsmodule/ec/oct.c.inc" + , "src/crypto/fipsmodule/ec/p224-64.c.inc" + , "src/crypto/fipsmodule/ec/p256-nistz.c.inc" + , "src/crypto/fipsmodule/ec/p256.c.inc" + , "src/crypto/fipsmodule/ec/scalar.c.inc" + , "src/crypto/fipsmodule/ec/simple.c.inc" + , "src/crypto/fipsmodule/ec/simple_mul.c.inc" + , "src/crypto/fipsmodule/ec/util.c.inc" + , "src/crypto/fipsmodule/ec/wnaf.c.inc" + , "src/crypto/fipsmodule/ecdh/ecdh.c.inc" + , "src/crypto/fipsmodule/ecdsa/ecdsa.c.inc" + , "src/crypto/fipsmodule/hkdf/hkdf.c.inc" + , "src/crypto/fipsmodule/hmac/hmac.c.inc" + , "src/crypto/fipsmodule/md4/md4.c.inc" + , "src/crypto/fipsmodule/md5/md5.c.inc" + , "src/crypto/fipsmodule/modes/cbc.c.inc" + , "src/crypto/fipsmodule/modes/cfb.c.inc" + , "src/crypto/fipsmodule/modes/ctr.c.inc" + , "src/crypto/fipsmodule/modes/gcm.c.inc" + , "src/crypto/fipsmodule/modes/gcm_nohw.c.inc" + , "src/crypto/fipsmodule/modes/ofb.c.inc" + , "src/crypto/fipsmodule/modes/polyval.c.inc" + , "src/crypto/fipsmodule/rand/ctrdrbg.c.inc" + , "src/crypto/fipsmodule/rand/rand.c.inc" + , "src/crypto/fipsmodule/rsa/blinding.c.inc" + , "src/crypto/fipsmodule/rsa/padding.c.inc" + , "src/crypto/fipsmodule/rsa/rsa.c.inc" + , "src/crypto/fipsmodule/rsa/rsa_impl.c.inc" + , "src/crypto/fipsmodule/self_check/fips.c.inc" + , "src/crypto/fipsmodule/self_check/self_check.c.inc" + , "src/crypto/fipsmodule/service_indicator/service_indicator.c.inc" + , "src/crypto/fipsmodule/sha/sha1.c.inc" + , "src/crypto/fipsmodule/sha/sha256.c.inc" + , "src/crypto/fipsmodule/sha/sha512.c.inc" + , "src/crypto/fipsmodule/tls/kdf.c.inc" ] } -, "crypto_internal_headers": +, "bcm_sources_asm": { "type": "install" , "deps": - [ "src/crypto/asn1/internal.h" - , "src/crypto/bio/internal.h" - , "src/crypto/bytestring/internal.h" - , "src/crypto/chacha/internal.h" - , "src/crypto/cipher_extra/internal.h" - , "src/crypto/conf/conf_def.h" - , "src/crypto/conf/internal.h" - , "src/crypto/cpu_arm_linux.h" - , "src/crypto/curve25519/curve25519_tables.h" - , "src/crypto/curve25519/internal.h" - , "src/crypto/des/internal.h" - , "src/crypto/dsa/internal.h" - , "src/crypto/ec_extra/internal.h" - , "src/crypto/err/internal.h" - , "src/crypto/evp/internal.h" - , "src/crypto/fipsmodule/aes/internal.h" - , "src/crypto/fipsmodule/bn/internal.h" - , "src/crypto/fipsmodule/bn/rsaz_exp.h" - , "src/crypto/fipsmodule/cipher/internal.h" - , "src/crypto/fipsmodule/delocate.h" - , "src/crypto/fipsmodule/dh/internal.h" - , "src/crypto/fipsmodule/digest/internal.h" - , "src/crypto/fipsmodule/digest/md32_common.h" - , "src/crypto/fipsmodule/ec/builtin_curves.h" - , "src/crypto/fipsmodule/ec/internal.h" - , "src/crypto/fipsmodule/ec/p256-nistz-table.h" - , "src/crypto/fipsmodule/ec/p256-nistz.h" - , "src/crypto/fipsmodule/ec/p256_table.h" - , "src/crypto/fipsmodule/ecdsa/internal.h" - , "src/crypto/fipsmodule/md5/internal.h" - , "src/crypto/fipsmodule/modes/internal.h" - , "src/crypto/fipsmodule/rand/fork_detect.h" - , "src/crypto/fipsmodule/rand/getrandom_fillin.h" - , "src/crypto/fipsmodule/rand/internal.h" - , "src/crypto/fipsmodule/rsa/internal.h" - , "src/crypto/fipsmodule/service_indicator/internal.h" - , "src/crypto/fipsmodule/sha/internal.h" - , "src/crypto/fipsmodule/tls/internal.h" - , "src/crypto/hrss/internal.h" - , "src/crypto/internal.h" - , "src/crypto/keccak/internal.h" - , "src/crypto/kyber/internal.h" - , "src/crypto/lhash/internal.h" - , "src/crypto/obj/obj_dat.h" - , "src/crypto/pkcs7/internal.h" - , "src/crypto/pkcs8/internal.h" - , "src/crypto/poly1305/internal.h" - , "src/crypto/pool/internal.h" - , "src/crypto/rsa_extra/internal.h" - , "src/crypto/spx/address.h" - , "src/crypto/spx/fors.h" - , "src/crypto/spx/internal.h" - , "src/crypto/spx/merkle.h" - , "src/crypto/spx/params.h" - , "src/crypto/spx/spx_util.h" - , "src/crypto/spx/thash.h" - , "src/crypto/spx/wots.h" - , "src/crypto/trust_token/internal.h" - , "src/crypto/x509/ext_dat.h" - , "src/crypto/x509/internal.h" - , "src/third_party/fiat/curve25519_32.h" - , "src/third_party/fiat/curve25519_64.h" - , "src/third_party/fiat/curve25519_64_adx.h" - , "src/third_party/fiat/curve25519_64_msvc.h" - , "src/third_party/fiat/p256_32.h" - , "src/third_party/fiat/p256_64.h" - , "src/third_party/fiat/p256_64_msvc.h" - ] - } -, "src/crypto/internal.h": - { "type": ["@", "rules", "patch", "file"] - , "src": [["FILE", null, "src/crypto/internal.h"]] - , "patch": - [ [ "@" - , "patches" - , "" - , "crypto-use-_Generic-only-if-defined-__cplusplus.patch" - ] + [ "src/gen/bcm/aesni-gcm-x86_64-apple.S" + , "src/gen/bcm/aesni-gcm-x86_64-linux.S" + , "src/gen/bcm/aesni-x86-apple.S" + , "src/gen/bcm/aesni-x86-linux.S" + , "src/gen/bcm/aesni-x86_64-apple.S" + , "src/gen/bcm/aesni-x86_64-linux.S" + , "src/gen/bcm/aesv8-armv7-linux.S" + , "src/gen/bcm/aesv8-armv8-apple.S" + , "src/gen/bcm/aesv8-armv8-linux.S" + , "src/gen/bcm/aesv8-armv8-win.S" + , "src/gen/bcm/aesv8-gcm-armv8-apple.S" + , "src/gen/bcm/aesv8-gcm-armv8-linux.S" + , "src/gen/bcm/aesv8-gcm-armv8-win.S" + , "src/gen/bcm/armv4-mont-linux.S" + , "src/gen/bcm/armv8-mont-apple.S" + , "src/gen/bcm/armv8-mont-linux.S" + , "src/gen/bcm/armv8-mont-win.S" + , "src/gen/bcm/bn-586-apple.S" + , "src/gen/bcm/bn-586-linux.S" + , "src/gen/bcm/bn-armv8-apple.S" + , "src/gen/bcm/bn-armv8-linux.S" + , "src/gen/bcm/bn-armv8-win.S" + , "src/gen/bcm/bsaes-armv7-linux.S" + , "src/gen/bcm/co-586-apple.S" + , "src/gen/bcm/co-586-linux.S" + , "src/gen/bcm/ghash-armv4-linux.S" + , "src/gen/bcm/ghash-neon-armv8-apple.S" + , "src/gen/bcm/ghash-neon-armv8-linux.S" + , "src/gen/bcm/ghash-neon-armv8-win.S" + , "src/gen/bcm/ghash-ssse3-x86-apple.S" + , "src/gen/bcm/ghash-ssse3-x86-linux.S" + , "src/gen/bcm/ghash-ssse3-x86_64-apple.S" + , "src/gen/bcm/ghash-ssse3-x86_64-linux.S" + , "src/gen/bcm/ghash-x86-apple.S" + , "src/gen/bcm/ghash-x86-linux.S" + , "src/gen/bcm/ghash-x86_64-apple.S" + , "src/gen/bcm/ghash-x86_64-linux.S" + , "src/gen/bcm/ghashv8-armv7-linux.S" + , "src/gen/bcm/ghashv8-armv8-apple.S" + , "src/gen/bcm/ghashv8-armv8-linux.S" + , "src/gen/bcm/ghashv8-armv8-win.S" + , "src/gen/bcm/md5-586-apple.S" + , "src/gen/bcm/md5-586-linux.S" + , "src/gen/bcm/md5-x86_64-apple.S" + , "src/gen/bcm/md5-x86_64-linux.S" + , "src/gen/bcm/p256-armv8-asm-apple.S" + , "src/gen/bcm/p256-armv8-asm-linux.S" + , "src/gen/bcm/p256-armv8-asm-win.S" + , "src/gen/bcm/p256-x86_64-asm-apple.S" + , "src/gen/bcm/p256-x86_64-asm-linux.S" + , "src/gen/bcm/p256_beeu-armv8-asm-apple.S" + , "src/gen/bcm/p256_beeu-armv8-asm-linux.S" + , "src/gen/bcm/p256_beeu-armv8-asm-win.S" + , "src/gen/bcm/p256_beeu-x86_64-asm-apple.S" + , "src/gen/bcm/p256_beeu-x86_64-asm-linux.S" + , "src/gen/bcm/rdrand-x86_64-apple.S" + , "src/gen/bcm/rdrand-x86_64-linux.S" + , "src/gen/bcm/rsaz-avx2-apple.S" + , "src/gen/bcm/rsaz-avx2-linux.S" + , "src/gen/bcm/sha1-586-apple.S" + , "src/gen/bcm/sha1-586-linux.S" + , "src/gen/bcm/sha1-armv4-large-linux.S" + , "src/gen/bcm/sha1-armv8-apple.S" + , "src/gen/bcm/sha1-armv8-linux.S" + , "src/gen/bcm/sha1-armv8-win.S" + , "src/gen/bcm/sha1-x86_64-apple.S" + , "src/gen/bcm/sha1-x86_64-linux.S" + , "src/gen/bcm/sha256-586-apple.S" + , "src/gen/bcm/sha256-586-linux.S" + , "src/gen/bcm/sha256-armv4-linux.S" + , "src/gen/bcm/sha256-armv8-apple.S" + , "src/gen/bcm/sha256-armv8-linux.S" + , "src/gen/bcm/sha256-armv8-win.S" + , "src/gen/bcm/sha256-x86_64-apple.S" + , "src/gen/bcm/sha256-x86_64-linux.S" + , "src/gen/bcm/sha512-586-apple.S" + , "src/gen/bcm/sha512-586-linux.S" + , "src/gen/bcm/sha512-armv4-linux.S" + , "src/gen/bcm/sha512-armv8-apple.S" + , "src/gen/bcm/sha512-armv8-linux.S" + , "src/gen/bcm/sha512-armv8-win.S" + , "src/gen/bcm/sha512-x86_64-apple.S" + , "src/gen/bcm/sha512-x86_64-linux.S" + , "src/gen/bcm/vpaes-armv7-linux.S" + , "src/gen/bcm/vpaes-armv8-apple.S" + , "src/gen/bcm/vpaes-armv8-linux.S" + , "src/gen/bcm/vpaes-armv8-win.S" + , "src/gen/bcm/vpaes-x86-apple.S" + , "src/gen/bcm/vpaes-x86-linux.S" + , "src/gen/bcm/vpaes-x86_64-apple.S" + , "src/gen/bcm/vpaes-x86_64-linux.S" + , "src/gen/bcm/x86-mont-apple.S" + , "src/gen/bcm/x86-mont-linux.S" + , "src/gen/bcm/x86_64-mont-apple.S" + , "src/gen/bcm/x86_64-mont-linux.S" + , "src/gen/bcm/x86_64-mont5-apple.S" + , "src/gen/bcm/x86_64-mont5-linux.S" + , "src/third_party/fiat/asm/fiat_p256_adx_mul.S" + , "src/third_party/fiat/asm/fiat_p256_adx_sqr.S" ] } , "crypto_sources": { "type": "install" , "deps": - [ "err_data.c" - , "src/crypto/asn1/a_bitstr.c" + [ "src/crypto/asn1/a_bitstr.c" , "src/crypto/asn1/a_bool.c" , "src/crypto/asn1/a_d2i_fp.c" , "src/crypto/asn1/a_dup.c" @@ -395,6 +382,7 @@ , "src/crypto/dh_extra/dh_asn1.c" , "src/crypto/dh_extra/params.c" , "src/crypto/digest_extra/digest_extra.c" + , "src/crypto/dilithium/dilithium.c" , "src/crypto/dsa/dsa.c" , "src/crypto/dsa/dsa_asn1.c" , "src/crypto/ec_extra/ec_asn1.c" @@ -407,6 +395,8 @@ , "src/crypto/evp/evp.c" , "src/crypto/evp/evp_asn1.c" , "src/crypto/evp/evp_ctx.c" + , "src/crypto/evp/p_dh.c" + , "src/crypto/evp/p_dh_asn1.c" , "src/crypto/evp/p_dsa_asn1.c" , "src/crypto/evp/p_ec.c" , "src/crypto/evp/p_ec_asn1.c" @@ -422,7 +412,6 @@ , "src/crypto/evp/scrypt.c" , "src/crypto/evp/sign.c" , "src/crypto/ex_data.c" - , "src/crypto/fipsmodule/bcm.c" , "src/crypto/fipsmodule/fips_shared_support.c" , "src/crypto/hpke/hpke.c" , "src/crypto/hrss/hrss.c" @@ -430,6 +419,7 @@ , "src/crypto/kyber/kyber.c" , "src/crypto/lhash/lhash.c" , "src/crypto/mem.c" + , "src/crypto/mldsa/mldsa.c" , "src/crypto/obj/obj.c" , "src/crypto/obj/obj_xref.c" , "src/crypto/pem/pem_all.c" @@ -450,12 +440,14 @@ , "src/crypto/poly1305/poly1305_vec.c" , "src/crypto/pool/pool.c" , "src/crypto/rand_extra/deterministic.c" + , "src/crypto/rand_extra/fork_detect.c" , "src/crypto/rand_extra/forkunsafe.c" , "src/crypto/rand_extra/getentropy.c" , "src/crypto/rand_extra/ios.c" , "src/crypto/rand_extra/passive.c" , "src/crypto/rand_extra/rand_extra.c" , "src/crypto/rand_extra/trusty.c" + , "src/crypto/rand_extra/urandom.c" , "src/crypto/rand_extra/windows.c" , "src/crypto/rc4/rc4.c" , "src/crypto/refcount.c" @@ -548,256 +540,151 @@ , "src/crypto/x509/x_val.c" , "src/crypto/x509/x_x509.c" , "src/crypto/x509/x_x509a.c" + , "src/gen/crypto/err_data.c" ] } -, "crypto_sources_apple_aarch64": - { "type": "install" - , "deps": - [ "apple-aarch64/crypto/chacha/chacha-armv8-apple.S" - , "apple-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/aesv8-armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/armv8-mont-apple.S" - , "apple-aarch64/crypto/fipsmodule/bn-armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/ghash-neon-armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/ghashv8-armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/p256-armv8-asm-apple.S" - , "apple-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-apple.S" - , "apple-aarch64/crypto/fipsmodule/sha1-armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/sha256-armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/sha512-armv8-apple.S" - , "apple-aarch64/crypto/fipsmodule/vpaes-armv8-apple.S" - , "apple-aarch64/crypto/test/trampoline-armv8-apple.S" - ] - } -, "crypto_sources_apple_x86": - { "type": "install" - , "deps": - [ "apple-x86/crypto/chacha/chacha-x86-apple.S" - , "apple-x86/crypto/fipsmodule/aesni-x86-apple.S" - , "apple-x86/crypto/fipsmodule/bn-586-apple.S" - , "apple-x86/crypto/fipsmodule/co-586-apple.S" - , "apple-x86/crypto/fipsmodule/ghash-ssse3-x86-apple.S" - , "apple-x86/crypto/fipsmodule/ghash-x86-apple.S" - , "apple-x86/crypto/fipsmodule/md5-586-apple.S" - , "apple-x86/crypto/fipsmodule/sha1-586-apple.S" - , "apple-x86/crypto/fipsmodule/sha256-586-apple.S" - , "apple-x86/crypto/fipsmodule/sha512-586-apple.S" - , "apple-x86/crypto/fipsmodule/vpaes-x86-apple.S" - , "apple-x86/crypto/fipsmodule/x86-mont-apple.S" - , "apple-x86/crypto/test/trampoline-x86-apple.S" - ] - } -, "crypto_sources_apple_x86_64": - { "type": "install" - , "deps": - [ "apple-x86_64/crypto/chacha/chacha-x86_64-apple.S" - , "apple-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-apple.S" - , "apple-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/aesni-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/ghash-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/md5-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/p256-x86_64-asm-apple.S" - , "apple-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-apple.S" - , "apple-x86_64/crypto/fipsmodule/rdrand-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/rsaz-avx2-apple.S" - , "apple-x86_64/crypto/fipsmodule/sha1-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/sha256-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/sha512-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/vpaes-x86_64-apple.S" - , "apple-x86_64/crypto/fipsmodule/x86_64-mont-apple.S" - , "apple-x86_64/crypto/fipsmodule/x86_64-mont5-apple.S" - , "apple-x86_64/crypto/test/trampoline-x86_64-apple.S" - , "src/third_party/fiat/asm/fiat_curve25519_adx_mul.S" - , "src/third_party/fiat/asm/fiat_curve25519_adx_square.S" - , "src/third_party/fiat/asm/fiat_p256_adx_mul.S" - , "src/third_party/fiat/asm/fiat_p256_adx_sqr.S" - ] - } -, "crypto_sources_linux_aarch64": - { "type": "install" - , "deps": - [ "linux-aarch64/crypto/chacha/chacha-armv8-linux.S" - , "linux-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/aesv8-armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/armv8-mont-linux.S" - , "linux-aarch64/crypto/fipsmodule/bn-armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/ghash-neon-armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/ghashv8-armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/p256-armv8-asm-linux.S" - , "linux-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-linux.S" - , "linux-aarch64/crypto/fipsmodule/sha1-armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/sha256-armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/sha512-armv8-linux.S" - , "linux-aarch64/crypto/fipsmodule/vpaes-armv8-linux.S" - , "linux-aarch64/crypto/test/trampoline-armv8-linux.S" - ] - } -, "crypto_sources_linux_arm": - { "type": "install" - , "deps": - [ "linux-arm/crypto/chacha/chacha-armv4-linux.S" - , "linux-arm/crypto/fipsmodule/aesv8-armv7-linux.S" - , "linux-arm/crypto/fipsmodule/armv4-mont-linux.S" - , "linux-arm/crypto/fipsmodule/bsaes-armv7-linux.S" - , "linux-arm/crypto/fipsmodule/ghash-armv4-linux.S" - , "linux-arm/crypto/fipsmodule/ghashv8-armv7-linux.S" - , "linux-arm/crypto/fipsmodule/sha1-armv4-large-linux.S" - , "linux-arm/crypto/fipsmodule/sha256-armv4-linux.S" - , "linux-arm/crypto/fipsmodule/sha512-armv4-linux.S" - , "linux-arm/crypto/fipsmodule/vpaes-armv7-linux.S" - , "linux-arm/crypto/test/trampoline-armv4-linux.S" - , "src/crypto/curve25519/asm/x25519-asm-arm.S" - , "src/crypto/poly1305/poly1305_arm_asm.S" - ] - } -, "crypto_sources_linux_x86": +, "crypto_internal_headers": { "type": "install" , "deps": - [ "linux-x86/crypto/chacha/chacha-x86-linux.S" - , "linux-x86/crypto/fipsmodule/aesni-x86-linux.S" - , "linux-x86/crypto/fipsmodule/bn-586-linux.S" - , "linux-x86/crypto/fipsmodule/co-586-linux.S" - , "linux-x86/crypto/fipsmodule/ghash-ssse3-x86-linux.S" - , "linux-x86/crypto/fipsmodule/ghash-x86-linux.S" - , "linux-x86/crypto/fipsmodule/md5-586-linux.S" - , "linux-x86/crypto/fipsmodule/sha1-586-linux.S" - , "linux-x86/crypto/fipsmodule/sha256-586-linux.S" - , "linux-x86/crypto/fipsmodule/sha512-586-linux.S" - , "linux-x86/crypto/fipsmodule/vpaes-x86-linux.S" - , "linux-x86/crypto/fipsmodule/x86-mont-linux.S" - , "linux-x86/crypto/test/trampoline-x86-linux.S" + [ "src/crypto/asn1/internal.h" + , "src/crypto/bcm_support.h" + , "src/crypto/bio/internal.h" + , "src/crypto/bytestring/internal.h" + , "src/crypto/chacha/internal.h" + , "src/crypto/cipher_extra/internal.h" + , "src/crypto/conf/internal.h" + , "src/crypto/cpu_arm_linux.h" + , "src/crypto/curve25519/curve25519_tables.h" + , "src/crypto/curve25519/internal.h" + , "src/crypto/des/internal.h" + , "src/crypto/dilithium/internal.h" + , "src/crypto/dsa/internal.h" + , "src/crypto/ec_extra/internal.h" + , "src/crypto/err/internal.h" + , "src/crypto/evp/internal.h" + , "src/crypto/fipsmodule/aes/internal.h" + , "src/crypto/fipsmodule/bcm_interface.h" + , "src/crypto/fipsmodule/bn/internal.h" + , "src/crypto/fipsmodule/bn/rsaz_exp.h" + , "src/crypto/fipsmodule/cipher/internal.h" + , "src/crypto/fipsmodule/delocate.h" + , "src/crypto/fipsmodule/dh/internal.h" + , "src/crypto/fipsmodule/digest/internal.h" + , "src/crypto/fipsmodule/digest/md32_common.h" + , "src/crypto/fipsmodule/ec/builtin_curves.h" + , "src/crypto/fipsmodule/ec/internal.h" + , "src/crypto/fipsmodule/ec/p256-nistz-table.h" + , "src/crypto/fipsmodule/ec/p256-nistz.h" + , "src/crypto/fipsmodule/ec/p256_table.h" + , "src/crypto/fipsmodule/ecdsa/internal.h" + , "src/crypto/fipsmodule/md5/internal.h" + , "src/crypto/fipsmodule/modes/internal.h" + , "src/crypto/fipsmodule/rand/internal.h" + , "src/crypto/fipsmodule/rsa/internal.h" + , "src/crypto/fipsmodule/service_indicator/internal.h" + , "src/crypto/fipsmodule/sha/internal.h" + , "src/crypto/fipsmodule/tls/internal.h" + , "src/crypto/hrss/internal.h" + , "src/crypto/internal.h" + , "src/crypto/keccak/internal.h" + , "src/crypto/kyber/internal.h" + , "src/crypto/lhash/internal.h" + , "src/crypto/mldsa/internal.h" + , "src/crypto/mlkem/internal.h" + , "src/crypto/obj/obj_dat.h" + , "src/crypto/pkcs7/internal.h" + , "src/crypto/pkcs8/internal.h" + , "src/crypto/poly1305/internal.h" + , "src/crypto/pool/internal.h" + , "src/crypto/rand_extra/getrandom_fillin.h" + , "src/crypto/rand_extra/sysrand_internal.h" + , "src/crypto/rsa_extra/internal.h" + , "src/crypto/spx/address.h" + , "src/crypto/spx/fors.h" + , "src/crypto/spx/merkle.h" + , "src/crypto/spx/params.h" + , "src/crypto/spx/spx_util.h" + , "src/crypto/spx/thash.h" + , "src/crypto/spx/wots.h" + , "src/crypto/trust_token/internal.h" + , "src/crypto/x509/ext_dat.h" + , "src/crypto/x509/internal.h" + , "src/third_party/fiat/curve25519_32.h" + , "src/third_party/fiat/curve25519_64.h" + , "src/third_party/fiat/curve25519_64_adx.h" + , "src/third_party/fiat/curve25519_64_msvc.h" + , "src/third_party/fiat/p256_32.h" + , "src/third_party/fiat/p256_64.h" + , "src/third_party/fiat/p256_64_msvc.h" ] } -, "crypto_sources_linux_x86_64": +, "crypto_sources_asm": { "type": "install" , "deps": - [ "linux-x86_64/crypto/chacha/chacha-x86_64-linux.S" - , "linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-linux.S" - , "linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/aesni-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/ghash-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/md5-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/p256-x86_64-asm-linux.S" - , "linux-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-linux.S" - , "linux-x86_64/crypto/fipsmodule/rdrand-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/rsaz-avx2-linux.S" - , "linux-x86_64/crypto/fipsmodule/sha1-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/sha256-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/sha512-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/vpaes-x86_64-linux.S" - , "linux-x86_64/crypto/fipsmodule/x86_64-mont-linux.S" - , "linux-x86_64/crypto/fipsmodule/x86_64-mont5-linux.S" - , "linux-x86_64/crypto/test/trampoline-x86_64-linux.S" + [ "src/crypto/curve25519/asm/x25519-asm-arm.S" , "src/crypto/hrss/asm/poly_rq_mul.S" + , "src/crypto/poly1305/poly1305_arm_asm.S" + , "src/gen/crypto/aes128gcmsiv-x86_64-apple.S" + , "src/gen/crypto/aes128gcmsiv-x86_64-linux.S" + , "src/gen/crypto/chacha-armv4-linux.S" + , "src/gen/crypto/chacha-armv8-apple.S" + , "src/gen/crypto/chacha-armv8-linux.S" + , "src/gen/crypto/chacha-armv8-win.S" + , "src/gen/crypto/chacha-x86-apple.S" + , "src/gen/crypto/chacha-x86-linux.S" + , "src/gen/crypto/chacha-x86_64-apple.S" + , "src/gen/crypto/chacha-x86_64-linux.S" + , "src/gen/crypto/chacha20_poly1305_armv8-apple.S" + , "src/gen/crypto/chacha20_poly1305_armv8-linux.S" + , "src/gen/crypto/chacha20_poly1305_armv8-win.S" + , "src/gen/crypto/chacha20_poly1305_x86_64-apple.S" + , "src/gen/crypto/chacha20_poly1305_x86_64-linux.S" , "src/third_party/fiat/asm/fiat_curve25519_adx_mul.S" , "src/third_party/fiat/asm/fiat_curve25519_adx_square.S" - , "src/third_party/fiat/asm/fiat_p256_adx_mul.S" - , "src/third_party/fiat/asm/fiat_p256_adx_sqr.S" - ] - } -, "crypto_sources_win_aarch64": - { "type": "install" - , "deps": - [ "win-aarch64/crypto/chacha/chacha-armv8-win.S" - , "win-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-win.S" - , "win-aarch64/crypto/fipsmodule/aesv8-armv8-win.S" - , "win-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-win.S" - , "win-aarch64/crypto/fipsmodule/armv8-mont-win.S" - , "win-aarch64/crypto/fipsmodule/bn-armv8-win.S" - , "win-aarch64/crypto/fipsmodule/ghash-neon-armv8-win.S" - , "win-aarch64/crypto/fipsmodule/ghashv8-armv8-win.S" - , "win-aarch64/crypto/fipsmodule/p256-armv8-asm-win.S" - , "win-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-win.S" - , "win-aarch64/crypto/fipsmodule/sha1-armv8-win.S" - , "win-aarch64/crypto/fipsmodule/sha256-armv8-win.S" - , "win-aarch64/crypto/fipsmodule/sha512-armv8-win.S" - , "win-aarch64/crypto/fipsmodule/vpaes-armv8-win.S" - , "win-aarch64/crypto/test/trampoline-armv8-win.S" - ] - } -, "crypto_sources_win_x86": - { "type": "install" - , "deps": - [ "win-x86/crypto/chacha/chacha-x86-win.asm" - , "win-x86/crypto/fipsmodule/aesni-x86-win.asm" - , "win-x86/crypto/fipsmodule/bn-586-win.asm" - , "win-x86/crypto/fipsmodule/co-586-win.asm" - , "win-x86/crypto/fipsmodule/ghash-ssse3-x86-win.asm" - , "win-x86/crypto/fipsmodule/ghash-x86-win.asm" - , "win-x86/crypto/fipsmodule/md5-586-win.asm" - , "win-x86/crypto/fipsmodule/sha1-586-win.asm" - , "win-x86/crypto/fipsmodule/sha256-586-win.asm" - , "win-x86/crypto/fipsmodule/sha512-586-win.asm" - , "win-x86/crypto/fipsmodule/vpaes-x86-win.asm" - , "win-x86/crypto/fipsmodule/x86-mont-win.asm" - , "win-x86/crypto/test/trampoline-x86-win.asm" ] } -, "crypto_sources_win_x86_64": +, "ssl_sources": { "type": "install" , "deps": - [ "win-x86_64/crypto/chacha/chacha-x86_64-win.asm" - , "win-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-win.asm" - , "win-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/aesni-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/ghash-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/md5-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/p256-x86_64-asm-win.asm" - , "win-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-win.asm" - , "win-x86_64/crypto/fipsmodule/rdrand-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/rsaz-avx2-win.asm" - , "win-x86_64/crypto/fipsmodule/sha1-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/sha256-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/sha512-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/vpaes-x86_64-win.asm" - , "win-x86_64/crypto/fipsmodule/x86_64-mont-win.asm" - , "win-x86_64/crypto/fipsmodule/x86_64-mont5-win.asm" - , "win-x86_64/crypto/test/trampoline-x86_64-win.asm" + [ "src/ssl/bio_ssl.cc" + , "src/ssl/d1_both.cc" + , "src/ssl/d1_lib.cc" + , "src/ssl/d1_pkt.cc" + , "src/ssl/d1_srtp.cc" + , "src/ssl/dtls_method.cc" + , "src/ssl/dtls_record.cc" + , "src/ssl/encrypted_client_hello.cc" + , "src/ssl/extensions.cc" + , "src/ssl/handoff.cc" + , "src/ssl/handshake.cc" + , "src/ssl/handshake_client.cc" + , "src/ssl/handshake_server.cc" + , "src/ssl/s3_both.cc" + , "src/ssl/s3_lib.cc" + , "src/ssl/s3_pkt.cc" + , "src/ssl/ssl_aead_ctx.cc" + , "src/ssl/ssl_asn1.cc" + , "src/ssl/ssl_buffer.cc" + , "src/ssl/ssl_cert.cc" + , "src/ssl/ssl_cipher.cc" + , "src/ssl/ssl_credential.cc" + , "src/ssl/ssl_file.cc" + , "src/ssl/ssl_key_share.cc" + , "src/ssl/ssl_lib.cc" + , "src/ssl/ssl_privkey.cc" + , "src/ssl/ssl_session.cc" + , "src/ssl/ssl_stat.cc" + , "src/ssl/ssl_transcript.cc" + , "src/ssl/ssl_versions.cc" + , "src/ssl/ssl_x509.cc" + , "src/ssl/t1_enc.cc" + , "src/ssl/tls13_both.cc" + , "src/ssl/tls13_client.cc" + , "src/ssl/tls13_enc.cc" + , "src/ssl/tls13_server.cc" + , "src/ssl/tls_method.cc" + , "src/ssl/tls_record.cc" ] } -, "asm_sources": - { "type": "install" - , "arguments_config": ["OS", "ARCH", "TARGET_ARCH"] - , "deps": - { "type": "let*" - , "bindings": - [ [ "PLATFORM" - , { "type": "join" - , "separator": "_" - , "$1": - [ {"type": "var", "name": "OS"} - , { "type": "var" - , "name": "TARGET_ARCH" - , "default": {"type": "var", "name": "ARCH"} - } - ] - } - ] - ] - , "body": - { "type": "case" - , "expr": {"type": "var", "name": "PLATFORM"} - , "case": - { "mac_arm64": ["crypto_sources_apple_aarch64"] - , "mac_x86": ["crypto_sources_apple_x86"] - , "mac_x86_64": ["crypto_sources_apple_x86_64"] - , "linux_arm64": ["crypto_sources_linux_aarch64"] - , "linux_arm": ["crypto_sources_linux_arm"] - , "linux_x86": ["crypto_sources_linux_x86"] - , "linux_x86_64": ["crypto_sources_linux_x86_64"] - , "windows_arm64": ["crypto_sources_win_aarch64"] - , "windows_x86": ["crypto_sources_win_x86"] - , "windows_x86_64": ["crypto_sources_win_x86_64"] - } - } - } - } +, "ssl_internal_headers": {"type": "install", "deps": ["src/ssl/internal.h"]} } diff --git a/etc/import/TARGETS.google_apis b/etc/import/TARGETS.google_apis index e07f15be3..89cb8ac01 100644 --- a/etc/import/TARGETS.google_apis +++ b/etc/import/TARGETS.google_apis @@ -26,6 +26,14 @@ { "type": ["@", "rules", "proto", "library"] , "name": ["google_api_client_proto"] , "srcs": ["google/api/client.proto"] + , "deps": ["google_api_launch_stage_proto"] + } +, "google_api_launch_stage_proto": + {"type": "export", "target": "google_api_launch_stage_proto (unexported)"} +, "google_api_launch_stage_proto (unexported)": + { "type": ["@", "rules", "proto", "library"] + , "name": ["google_api_launch_stage_proto"] + , "srcs": ["google/api/launch_stage.proto"] } , "google_api_expr_v1alpha1_checked_proto": { "type": "export" @@ -46,11 +54,6 @@ , "name": ["google_api_expr_v1alpha1_syntax_proto"] , "srcs": ["google/api/expr/v1alpha1/syntax.proto"] } -, "google/bytestream/bytestream.proto": - { "type": ["@", "rules", "patch", "file"] - , "src": [["FILE", null, "google/bytestream/bytestream.proto"]] - , "patch": [["@", "patches", "", "bytestream.proto.diff"]] - } , "google_bytestream_proto": { "type": "export" , "target": "google_bytestream_proto (unexported)" diff --git a/etc/import/TARGETS.grpc b/etc/import/TARGETS.grpc index 300eff2bf..1d8dc19d4 100644 --- a/etc/import/TARGETS.grpc +++ b/etc/import/TARGETS.grpc @@ -1,7 +1,9 @@ { "grpcxx_sources": { "type": ["@", "rules", "data", "staged"] , "srcs": - [ "src/cpp/client/channel_cc.cc" + [ "src/cpp/client/call_credentials.cc" + , "src/cpp/client/channel_cc.cc" + , "src/cpp/client/channel_credentials.cc" , "src/cpp/client/client_callback.cc" , "src/cpp/client/client_context.cc" , "src/cpp/client/client_interceptor.cc" @@ -27,6 +29,7 @@ , "src/cpp/server/server_callback.cc" , "src/cpp/server/server_cc.cc" , "src/cpp/server/server_context.cc" + , "src/cpp/server/server_credentials.cc" , "src/cpp/server/server_posix.cc" , "src/cpp/thread_manager/thread_manager.cc" , "src/cpp/util/byte_buffer_cc.cc" @@ -69,18 +72,25 @@ , "grpc_trace" , "http_connect_handshaker" , "iomgr_timer" + , "server" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "log"] , ["src/core", "channel_args"] , ["src/core", "channel_init"] , ["src/core", "channel_stack_type"] + , ["src/core", "client_channel_backup_poller"] , ["src/core", "default_event_engine"] + , ["src/core", "endpoint_info_handshaker"] , ["src/core", "experiments"] , ["src/core", "forkable"] , ["src/core", "grpc_authorization_base"] + , ["src/core", "http_proxy_mapper"] , ["src/core", "init_internally"] , ["src/core", "posix_event_engine_timer_manager"] + , ["src/core", "server_call_tracer_filter"] + , ["src/core", "service_config_channel_arg_filter"] , ["src/core", "slice"] , ["src/core", "tcp_connect_handshaker"] - , ["@", "absl", "absl/base", "core_headers"] ] } , "grpc": @@ -134,14 +144,19 @@ , "iomgr_timer" , "promise" , "ref_counted_ptr" + , "server" , "sockaddr_utils" , "tsi_base" , "uri_parser" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "log"] , ["src/core", "channel_args"] - , ["src/core", "channel_init"] , ["src/core", "channel_creds_registry_init"] + , ["src/core", "channel_init"] , ["src/core", "channel_stack_type"] + , ["src/core", "client_channel_backup_poller"] , ["src/core", "default_event_engine"] + , ["src/core", "endpoint_info_handshaker"] , ["src/core", "experiments"] , ["src/core", "forkable"] , ["src/core", "grpc_authorization_base"] @@ -150,34 +165,36 @@ , ["src/core", "grpc_google_default_credentials"] , ["src/core", "grpc_iam_credentials"] , ["src/core", "grpc_insecure_credentials"] - , ["src/core", "grpc_local_credentials"] - , ["src/core", "grpc_oauth2_credentials"] + , ["src/core", "grpc_lb_policy_cds"] + , ["src/core", "grpc_lb_policy_ring_hash"] , ["src/core", "grpc_lb_policy_rls"] - , ["src/core", "grpc_lb_policy_xds_cluster_manager"] , ["src/core", "grpc_lb_policy_xds_cluster_impl"] + , ["src/core", "grpc_lb_policy_xds_cluster_manager"] , ["src/core", "grpc_lb_policy_xds_override_host"] - , ["src/core", "grpc_lb_policy_cds"] - , ["src/core", "grpc_lb_policy_ring_hash"] , ["src/core", "grpc_lb_policy_xds_wrr_locality"] + , ["src/core", "grpc_local_credentials"] + , ["src/core", "grpc_oauth2_credentials"] , ["src/core", "grpc_rbac_filter"] , ["src/core", "grpc_resolver_c2p"] , ["src/core", "grpc_resolver_xds"] , ["src/core", "grpc_ssl_credentials"] , ["src/core", "grpc_stateful_session_filter"] - , ["src/core", "grpc_xds_channel_stack_modifier"] - , ["src/core", "grpc_xds_client"] , ["src/core", "grpc_tls_credentials"] , ["src/core", "grpc_transport_chttp2_alpn"] + , ["src/core", "grpc_xds_channel_stack_modifier"] + , ["src/core", "grpc_xds_client"] + , ["src/core", "http_proxy_mapper"] , ["src/core", "httpcli_ssl_credentials"] , ["src/core", "init_internally"] , ["src/core", "json"] , ["src/core", "posix_event_engine_timer_manager"] , ["src/core", "ref_counted"] + , ["src/core", "server_call_tracer_filter"] + , ["src/core", "service_config_channel_arg_filter"] , ["src/core", "slice"] , ["src/core", "slice_refcount"] , ["src/core", "tcp_connect_handshaker"] , ["src/core", "useful"] - , ["@", "absl", "absl/base", "core_headers"] ] } , "gpr": @@ -205,33 +222,7 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["gpr"] , "srcs": - [ "src/core/lib/gpr/alloc.cc" - , "src/core/lib/gpr/android/log.cc" - , "src/core/lib/gpr/iphone/cpu.cc" - , "src/core/lib/gpr/linux/cpu.cc" - , "src/core/lib/gpr/linux/log.cc" - , "src/core/lib/gpr/log.cc" - , "src/core/lib/gpr/msys/tmpfile.cc" - , "src/core/lib/gpr/posix/cpu.cc" - , "src/core/lib/gpr/posix/log.cc" - , "src/core/lib/gpr/posix/string.cc" - , "src/core/lib/gpr/posix/sync.cc" - , "src/core/lib/gpr/posix/time.cc" - , "src/core/lib/gpr/posix/tmpfile.cc" - , "src/core/lib/gpr/string.cc" - , "src/core/lib/gpr/sync.cc" - , "src/core/lib/gpr/sync_abseil.cc" - , "src/core/lib/gpr/time.cc" - , "src/core/lib/gpr/time_precise.cc" - , "src/core/lib/gpr/windows/cpu.cc" - , "src/core/lib/gpr/windows/log.cc" - , "src/core/lib/gpr/windows/string.cc" - , "src/core/lib/gpr/windows/string_util.cc" - , "src/core/lib/gpr/windows/sync.cc" - , "src/core/lib/gpr/windows/time.cc" - , "src/core/lib/gpr/windows/tmpfile.cc" - , "src/core/lib/gpr/wrap_memcpy.cc" - , "src/core/lib/gprpp/crash.cc" + [ "src/core/lib/gprpp/crash.cc" , "src/core/lib/gprpp/fork.cc" , "src/core/lib/gprpp/host_port.cc" , "src/core/lib/gprpp/mpscq.cc" @@ -240,13 +231,30 @@ , "src/core/lib/gprpp/time_util.cc" , "src/core/lib/gprpp/windows/stat.cc" , "src/core/lib/gprpp/windows/thd.cc" + , "src/core/util/alloc.cc" + , "src/core/util/iphone/cpu.cc" + , "src/core/util/linux/cpu.cc" + , "src/core/util/log.cc" + , "src/core/util/msys/tmpfile.cc" + , "src/core/util/posix/cpu.cc" + , "src/core/util/posix/string.cc" + , "src/core/util/posix/sync.cc" + , "src/core/util/posix/time.cc" + , "src/core/util/posix/tmpfile.cc" + , "src/core/util/string.cc" + , "src/core/util/sync.cc" + , "src/core/util/sync_abseil.cc" + , "src/core/util/time.cc" + , "src/core/util/time_precise.cc" + , "src/core/util/windows/cpu.cc" + , "src/core/util/windows/string.cc" + , "src/core/util/windows/string_util.cc" + , "src/core/util/windows/sync.cc" + , "src/core/util/windows/time.cc" + , "src/core/util/windows/tmpfile.cc" ] , "hdrs": - [ "src/core/lib/gpr/alloc.h" - , "src/core/lib/gpr/string.h" - , "src/core/lib/gpr/time_precise.h" - , "src/core/lib/gpr/tmpfile.h" - , "src/core/lib/gprpp/crash.h" + [ "src/core/lib/gprpp/crash.h" , "src/core/lib/gprpp/fork.h" , "src/core/lib/gprpp/host_port.h" , "src/core/lib/gprpp/memory.h" @@ -255,23 +263,22 @@ , "src/core/lib/gprpp/sync.h" , "src/core/lib/gprpp/thd.h" , "src/core/lib/gprpp/time_util.h" + , "src/core/util/alloc.h" + , "src/core/util/string.h" + , "src/core/util/time_precise.h" + , "src/core/util/tmpfile.h" , ["include/grpc", "gpr_public_headers"] ] , "deps": [ "config_vars" , "debug_location" - , ["src/core", "construct_destruct"] - , ["src/core", "env"] - , ["src/core", "event_engine_thread_local"] - , ["src/core", "examine_stack"] - , ["src/core", "gpr_atm"] - , ["src/core", "no_destruct"] - , ["src/core", "strerror"] - , ["src/core", "tchar"] - , ["src/core", "useful"] , ["@", "absl", "absl/base", "base"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/base", "log_severity"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "globals"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/memory", "memory"] , ["@", "absl", "absl/random", "random"] , ["@", "absl", "absl/status", "status"] @@ -281,6 +288,15 @@ , ["@", "absl", "absl/synchronization", "synchronization"] , ["@", "absl", "absl/time", "time"] , ["@", "absl", "absl/types", "optional"] + , ["src/core", "construct_destruct"] + , ["src/core", "env"] + , ["src/core", "event_engine_thread_local"] + , ["src/core", "examine_stack"] + , ["src/core", "gpr_atm"] + , ["src/core", "no_destruct"] + , ["src/core", "strerror"] + , ["src/core", "tchar"] + , ["src/core", "useful"] ] } , "gpr_public_hdrs": @@ -308,7 +324,6 @@ , ["src/core", "grpc_backend_metric_filter"] , ["src/core", "grpc_channel_idle_filter"] , ["src/core", "grpc_client_authority_filter"] - , ["src/core", "grpc_deadline_filter"] , ["src/core", "grpc_fault_injection_filter"] , ["src/core", "grpc_lb_policy_grpclb"] , ["src/core", "grpc_lb_policy_outlier_detection"] @@ -330,7 +345,10 @@ , "grpc_public_hdrs": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_public_hdrs"] - , "hdrs": [["include/grpc", "grpc_public_headers"]] + , "hdrs": + [ ["include/grpc", "grpc_public_headers"] + , ["include/grpc", "grpc_public_event_engine_headers"] + ] , "deps": [ "channel_arg_names" , "gpr_public_hdrs" @@ -346,11 +364,16 @@ , ["include/grpcpp", "grpcpp_public_headers"] ] , "deps": - [ "grpc_public_hdrs" - , ["src/core", "gpr_atm"] + [ "global_callback_hook" + , "grpc_public_hdrs" + , ["@", "absl", "absl/log", "absl_check"] + , ["@", "absl", "absl/log", "absl_log"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "cord"] , ["@", "absl", "absl/synchronization", "synchronization"] , ["@", "protobuf", "", "libprotobuf"] + , ["src/core", "gpr_atm"] ] } , "channel_arg_names": @@ -390,10 +413,15 @@ , ["include/grpcpp", "grpcpp_public_headers"] ] , "deps": - [ "grpc++_base" + [ "global_callback_hook" + , "grpc++_base" + , ["@", "absl", "absl/log", "absl_check"] + , ["@", "absl", "absl/log", "absl_log"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "cord"] , ["src/core", "gpr_atm"] , ["src/core", "slice"] - , ["@", "absl", "absl/strings", "cord"] ] } , "grpc_cronet_hdrs": @@ -426,6 +454,14 @@ , "grpc_public_hdrs" , "grpc_trace" , "ref_counted_ptr" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["src/core", "error"] , ["src/core", "grpc_audit_logging"] , ["src/core", "grpc_authorization_base"] @@ -438,12 +474,6 @@ , ["src/core", "slice_refcount"] , ["src/core", "status_helper"] , ["src/core", "useful"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc++_authorization_provider": @@ -467,12 +497,13 @@ , "deps": [ "gpr" , "grpc_mock_cel" - , ["src/core", "grpc_authorization_base"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["@", "absl", "absl/container", "flat_hash_set"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/types", "span"] + , ["src/core", "grpc_authorization_base"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] , ["third_party/upb", "message"] @@ -527,7 +558,9 @@ , "include/grpcpp/security/binder_security_policy.h" ] , "deps": - [ "config" + [ "channel" + , "channel_create" + , "config" , "debug_location" , "exec_ctx" , "gpr" @@ -539,6 +572,22 @@ , "grpc_public_hdrs" , "orphanable" , "ref_counted_ptr" + , "server" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/cleanup", "cleanup"] + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/memory", "memory"] + , ["@", "absl", "absl/meta", "type_traits"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/synchronization", "synchronization"] + , ["@", "absl", "absl/time", "time"] + , ["@", "absl", "absl/types", "variant"] , ["src/core", "arena"] , ["src/core", "channel_args"] , ["src/core", "channel_args_preconditioning"] @@ -552,20 +601,8 @@ , ["src/core", "slice"] , ["src/core", "slice_refcount"] , ["src/core", "status_helper"] + , ["src/core", "subchannel_connector"] , ["src/core", "transport_fwd"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/cleanup", "cleanup"] - , ["@", "absl", "absl/container", "flat_hash_map"] - , ["@", "absl", "absl/functional", "any_invocable"] - , ["@", "absl", "absl/hash", "hash"] - , ["@", "absl", "absl/memory", "memory"] - , ["@", "absl", "absl/meta", "type_traits"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/synchronization", "synchronization"] - , ["@", "absl", "absl/time", "time"] - , ["@", "absl", "absl/types", "variant"] ] } , "grpc++_xds_client": @@ -581,6 +618,7 @@ , "grpc_base" , "grpc_public_hdrs" , "grpc_security_base" + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/strings", "strings"] ] } @@ -600,6 +638,7 @@ , "gpr" , "grpc" , "grpc++_base" + , ["@", "absl", "absl/log", "check"] , ["src/core", "xds_enabled_server"] ] } @@ -611,12 +650,28 @@ , "src/cpp/common/insecure_create_auth_context.cc" , "src/cpp/server/insecure_server_credentials.cc" ] + , "hdrs": + [ ["include/grpc++", "grpc++_public_headers"] + , ["include/grpcpp", "grpcpp_public_headers"] + ] , "deps": - [ "gpr" + [ "channel_arg_names" + , "generic_stub_internal" + , "global_callback_hook" + , "gpr" , "grpc++_base_unsecure" , "grpc++_codegen_proto" + , "grpc_core_credentials_header" , "grpc_public_hdrs" + , "grpc_security_base" , "grpc_unsecure" + , ["@", "absl", "absl/log", "absl_check"] + , ["@", "absl", "absl/log", "absl_log"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/synchronization", "synchronization"] + , ["src/core", "gpr_atm"] , ["src/core", "grpc_insecure_credentials"] ] } @@ -639,11 +694,11 @@ , "include/grpcpp/security/alts_util.h" ] , "deps": - [ "alts_upb" - , "gpr" + [ "gpr" , "grpc++" , "grpc_base" , "tsi_alts_credentials" + , ["@", "absl", "absl/log", "log"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] , ["third_party/upb", "message"] @@ -655,7 +710,12 @@ , "srcs": ["src/core/ext/filters/census/grpc_context.cc"] , "hdrs": [["include/grpc", "census_headers"]] , "deps": - ["gpr", "grpc_base", "grpc_public_hdrs", "grpc_trace", "legacy_context"] + [ "gpr" + , "grpc_base" + , "grpc_public_hdrs" + , "grpc_trace" + , ["src/core", "arena"] + ] } , "gpr_platform": { "type": ["@", "rules", "CC", "library"] @@ -683,24 +743,112 @@ , ["@", "absl", "absl/types", "optional"] ] } -, "grpc_base": +, "channelz": { "type": ["@", "rules", "CC", "library"] - , "name": ["grpc_base"] + , "name": ["channelz"] , "srcs": - [ "src/core/lib/channel/call_tracer.cc" - , "src/core/lib/channel/channel_stack.cc" - , "src/core/lib/channel/channel_stack_builder_impl.cc" - , "src/core/lib/channel/channel_trace.cc" - , "src/core/lib/channel/channelz.cc" - , "src/core/lib/channel/channelz_registry.cc" - , "src/core/lib/channel/connected_channel.cc" - , "src/core/lib/channel/promise_based_filter.cc" - , "src/core/lib/channel/server_call_tracer_filter.cc" - , "src/core/lib/channel/status_util.cc" - , "src/core/lib/compression/compression.cc" - , "src/core/lib/compression/message_compress.cc" - , "src/core/lib/iomgr/call_combiner.cc" - , "src/core/lib/iomgr/cfstream_handle.cc" + [ "src/core/channelz/channel_trace.cc" + , "src/core/channelz/channelz.cc" + , "src/core/channelz/channelz_registry.cc" + ] + , "hdrs": + [ "src/core/channelz/channel_trace.h" + , "src/core/channelz/channelz.h" + , "src/core/channelz/channelz_registry.h" + ] + , "deps": + [ "exec_ctx" + , "gpr" + , "grpc_public_hdrs" + , "parse_address" + , "ref_counted_ptr" + , "sockaddr_utils" + , "uri_parser" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["src/core", "channel_args"] + , ["src/core", "connectivity_state"] + , ["src/core", "json"] + , ["src/core", "json_writer"] + , ["src/core", "per_cpu"] + , ["src/core", "ref_counted"] + , ["src/core", "resolved_address"] + , ["src/core", "slice"] + , ["src/core", "time"] + , ["src/core", "useful"] + ] + } +, "dynamic_annotations": + { "type": ["@", "rules", "CC", "library"] + , "name": ["dynamic_annotations"] + , "hdrs": ["src/core/lib/iomgr/dynamic_annotations.h"] + , "deps": ["gpr_public_hdrs"] + } +, "call_combiner": + { "type": ["@", "rules", "CC", "library"] + , "name": ["call_combiner"] + , "srcs": ["src/core/lib/iomgr/call_combiner.cc"] + , "hdrs": ["src/core/lib/iomgr/call_combiner.h"] + , "deps": + [ "dynamic_annotations" + , "exec_ctx" + , "gpr" + , "ref_counted_ptr" + , "stats" + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["src/core", "closure"] + , ["src/core", "gpr_atm"] + , ["src/core", "ref_counted"] + , ["src/core", "stats_data"] + ] + } +, "resource_quota_api": + { "type": ["@", "rules", "CC", "library"] + , "name": ["resource_quota_api"] + , "srcs": ["src/core/lib/resource_quota/api.cc"] + , "hdrs": ["src/core/lib/resource_quota/api.h"] + , "deps": + [ "channel_arg_names" + , "config" + , "event_engine_base_hdrs" + , "exec_ctx" + , "gpr_public_hdrs" + , "grpc_public_hdrs" + , "ref_counted_ptr" + , ["@", "absl", "absl/strings", "strings"] + , ["src/core", "channel_args"] + , ["src/core", "memory_quota"] + , ["src/core", "resource_quota"] + , ["src/core", "thread_quota"] + ] + } +, "byte_buffer": + { "type": ["@", "rules", "CC", "library"] + , "name": ["byte_buffer"] + , "srcs": + [ "src/core/lib/surface/byte_buffer.cc" + , "src/core/lib/surface/byte_buffer_reader.cc" + ] + , "deps": + [ "exec_ctx" + , "gpr_public_hdrs" + , "grpc_public_hdrs" + , ["@", "absl", "absl/log", "check"] + , ["src/core", "compression"] + , ["src/core", "slice"] + ] + } +, "iomgr": + { "type": ["@", "rules", "CC", "library"] + , "name": ["iomgr"] + , "srcs": + [ "src/core/lib/iomgr/cfstream_handle.cc" , "src/core/lib/iomgr/dualstack_socket_posix.cc" , "src/core/lib/iomgr/endpoint.cc" , "src/core/lib/iomgr/endpoint_cfstream.cc" @@ -711,10 +859,6 @@ , "src/core/lib/iomgr/ev_epoll1_linux.cc" , "src/core/lib/iomgr/ev_poll_posix.cc" , "src/core/lib/iomgr/ev_posix.cc" - , "src/core/lib/iomgr/ev_windows.cc" - , "src/core/lib/iomgr/event_engine_shims/closure.cc" - , "src/core/lib/iomgr/event_engine_shims/endpoint.cc" - , "src/core/lib/iomgr/event_engine_shims/tcp_client.cc" , "src/core/lib/iomgr/fork_posix.cc" , "src/core/lib/iomgr/fork_windows.cc" , "src/core/lib/iomgr/gethostname_fallback.cc" @@ -758,47 +902,13 @@ , "src/core/lib/iomgr/wakeup_fd_nospecial.cc" , "src/core/lib/iomgr/wakeup_fd_pipe.cc" , "src/core/lib/iomgr/wakeup_fd_posix.cc" - , "src/core/lib/resource_quota/api.cc" - , "src/core/lib/slice/b64.cc" - , "src/core/lib/surface/api_trace.cc" - , "src/core/lib/surface/builtins.cc" - , "src/core/lib/surface/byte_buffer.cc" - , "src/core/lib/surface/byte_buffer_reader.cc" - , "src/core/lib/surface/call.cc" - , "src/core/lib/surface/call_details.cc" - , "src/core/lib/surface/call_log_batch.cc" - , "src/core/lib/surface/call_trace.cc" - , "src/core/lib/surface/channel.cc" - , "src/core/lib/surface/channel_ping.cc" - , "src/core/lib/surface/completion_queue.cc" - , "src/core/lib/surface/completion_queue_factory.cc" - , "src/core/lib/surface/event_string.cc" - , "src/core/lib/surface/lame_client.cc" - , "src/core/lib/surface/metadata_array.cc" - , "src/core/lib/surface/server.cc" - , "src/core/lib/surface/validate_metadata.cc" - , "src/core/lib/surface/version.cc" - , "src/core/lib/surface/wait_for_cq_end_op.cc" - , "src/core/lib/transport/batch_builder.cc" - , "src/core/lib/transport/transport.cc" - , "src/core/lib/transport/transport_op_string.cc" + , "src/core/lib/iomgr/event_engine_shims/closure.cc" + , "src/core/lib/iomgr/event_engine_shims/endpoint.cc" + , "src/core/lib/iomgr/event_engine_shims/tcp_client.cc" ] , "hdrs": - [ "src/core/lib/channel/call_finalization.h" - , "src/core/lib/channel/call_tracer.h" - , "src/core/lib/channel/channel_stack.h" - , "src/core/lib/channel/channel_stack_builder_impl.h" - , "src/core/lib/channel/channel_trace.h" - , "src/core/lib/channel/channelz.h" - , "src/core/lib/channel/channelz_registry.h" - , "src/core/lib/channel/connected_channel.h" - , "src/core/lib/channel/promise_based_filter.h" - , "src/core/lib/channel/status_util.h" - , "src/core/lib/compression/message_compress.h" - , "src/core/lib/iomgr/block_annotate.h" - , "src/core/lib/iomgr/call_combiner.h" + [ "src/core/lib/iomgr/block_annotate.h" , "src/core/lib/iomgr/cfstream_handle.h" - , "src/core/lib/iomgr/dynamic_annotations.h" , "src/core/lib/iomgr/endpoint.h" , "src/core/lib/iomgr/endpoint_cfstream.h" , "src/core/lib/iomgr/endpoint_pair.h" @@ -807,9 +917,6 @@ , "src/core/lib/iomgr/ev_epoll1_linux.h" , "src/core/lib/iomgr/ev_poll_posix.h" , "src/core/lib/iomgr/ev_posix.h" - , "src/core/lib/iomgr/event_engine_shims/closure.h" - , "src/core/lib/iomgr/event_engine_shims/endpoint.h" - , "src/core/lib/iomgr/event_engine_shims/tcp_client.h" , "src/core/lib/iomgr/gethostname.h" , "src/core/lib/iomgr/iocp_windows.h" , "src/core/lib/iomgr/iomgr.h" @@ -841,33 +948,16 @@ , "src/core/lib/iomgr/vsock.h" , "src/core/lib/iomgr/wakeup_fd_pipe.h" , "src/core/lib/iomgr/wakeup_fd_posix.h" - , "src/core/lib/resource_quota/api.h" - , "src/core/lib/slice/b64.h" - , "src/core/lib/surface/api_trace.h" - , "src/core/lib/surface/builtins.h" - , "src/core/lib/surface/call.h" - , "src/core/lib/surface/call_test_only.h" - , "src/core/lib/surface/call_trace.h" - , "src/core/lib/surface/channel.h" - , "src/core/lib/surface/completion_queue.h" - , "src/core/lib/surface/completion_queue_factory.h" - , "src/core/lib/surface/event_string.h" - , "src/core/lib/surface/init.h" - , "src/core/lib/surface/lame_client.h" - , "src/core/lib/surface/server.h" - , "src/core/lib/surface/validate_metadata.h" - , "src/core/lib/surface/wait_for_cq_end_op.h" - , "src/core/lib/transport/batch_builder.h" - , "src/core/lib/transport/transport.h" + , "src/core/lib/iomgr/event_engine_shims/closure.h" + , "src/core/lib/iomgr/event_engine_shims/endpoint.h" + , "src/core/lib/iomgr/event_engine_shims/tcp_client.h" , ["include/grpc", "grpc_public_event_engine_headers"] , ["include/grpc", "grpc_public_headers"] ] , "deps": - [ "channel_arg_names" - , "channel_stack_builder" - , "config" + [ "byte_buffer" + , "channel_arg_names" , "config_vars" - , "cpp_impl_of" , "debug_location" , "exec_ctx" , "gpr" @@ -876,42 +966,31 @@ , "iomgr_buffer_list" , "iomgr_internal_errqueue" , "iomgr_timer" - , "legacy_context" , "orphanable" , "parse_address" - , "promise" - , "ref_counted_ptr" + , "resource_quota_api" , "sockaddr_utils" , "stats" - , "tcp_tracer" - , "uri_parser" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/container", "flat_hash_set"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/time", "time"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/utility", "utility"] , ["src/core", "1999"] - , ["src/core", "activity"] - , ["src/core", "all_ok"] - , ["src/core", "arena"] - , ["src/core", "arena_promise"] - , ["src/core", "atomic_utils"] - , ["src/core", "bitset"] - , ["src/core", "call_factory"] - , ["src/core", "call_filters"] - , ["src/core", "call_final_info"] - , ["src/core", "call_spine"] - , ["src/core", "cancel_callback"] , ["src/core", "channel_args"] , ["src/core", "channel_args_endpoint_config"] - , ["src/core", "channel_args_preconditioning"] - , ["src/core", "channel_fwd"] - , ["src/core", "channel_init"] - , ["src/core", "channel_stack_trace"] - , ["src/core", "channel_stack_type"] - , ["src/core", "chunked_vector"] , ["src/core", "closure"] - , ["src/core", "compression_internal"] - , ["src/core", "connectivity_state"] , ["src/core", "construct_destruct"] , ["src/core", "context"] , ["src/core", "default_event_engine"] - , ["src/core", "dual_ref_counted"] , ["src/core", "error"] , ["src/core", "error_utils"] , ["src/core", "event_engine_common"] @@ -920,87 +999,388 @@ , ["src/core", "event_engine_query_extensions"] , ["src/core", "event_engine_shim"] , ["src/core", "event_engine_tcp_socket_utils"] - , ["src/core", "event_engine_trace"] , ["src/core", "event_log"] + , ["src/core", "examine_stack"] , ["src/core", "experiments"] - , ["src/core", "for_each"] , ["src/core", "gpr_atm"] , ["src/core", "gpr_manual_constructor"] - , ["src/core", "gpr_spinlock"] , ["src/core", "grpc_sockaddr"] - , ["src/core", "if"] , ["src/core", "init_internally"] , ["src/core", "iomgr_fwd"] , ["src/core", "iomgr_port"] - , ["src/core", "json"] - , ["src/core", "json_writer"] - , ["src/core", "latch"] - , ["src/core", "loop"] - , ["src/core", "map"] - , ["src/core", "match"] , ["src/core", "memory_quota"] - , ["src/core", "message"] - , ["src/core", "metadata"] - , ["src/core", "metadata_batch"] , ["src/core", "no_destruct"] - , ["src/core", "per_cpu"] - , ["src/core", "pipe"] - , ["src/core", "poll"] , ["src/core", "pollset_set"] , ["src/core", "posix_event_engine_base_hdrs"] , ["src/core", "posix_event_engine_endpoint"] - , ["src/core", "promise_status"] - , ["src/core", "promise_trace"] - , ["src/core", "race"] - , ["src/core", "random_early_detection"] - , ["src/core", "ref_counted"] , ["src/core", "resolved_address"] , ["src/core", "resource_quota"] - , ["src/core", "resource_quota_trace"] - , ["src/core", "seq"] , ["src/core", "slice"] , ["src/core", "slice_buffer"] , ["src/core", "slice_cast"] , ["src/core", "slice_refcount"] , ["src/core", "socket_mutator"] , ["src/core", "stats_data"] - , ["src/core", "status_flag"] - , ["src/core", "status_helper"] , ["src/core", "strerror"] - , ["src/core", "thread_quota"] , ["src/core", "time"] - , ["src/core", "transport_fwd"] - , ["src/core", "try_join"] - , ["src/core", "try_seq"] , ["src/core", "useful"] , ["src/core", "windows_event_engine"] , ["src/core", "windows_event_engine_listener"] + ] + } +, "call_tracer": + { "type": ["@", "rules", "CC", "library"] + , "name": ["call_tracer"] + , "srcs": ["src/core/telemetry/call_tracer.cc"] + , "hdrs": ["src/core/telemetry/call_tracer.h"] + , "deps": + [ "gpr" + , "tcp_tracer" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["src/core", "arena"] + , ["src/core", "call_final_info"] + , ["src/core", "channel_args"] + , ["src/core", "context"] + , ["src/core", "error"] + , ["src/core", "metadata_batch"] + , ["src/core", "ref_counted_string"] + , ["src/core", "slice_buffer"] + ] + } +, "channel": + { "type": ["@", "rules", "CC", "library"] + , "name": ["channel"] + , "srcs": ["src/core/lib/surface/channel.cc"] + , "hdrs": ["src/core/lib/surface/channel.h"] + , "deps": + [ "channel_arg_names" + , "channelz" + , "cpp_impl_of" + , "event_engine_base_hdrs" + , "exec_ctx" + , "gpr" + , "grpc_public_hdrs" + , "grpc_trace" + , "ref_counted_ptr" + , "stats" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["src/core", "arena"] + , ["src/core", "call_arena_allocator"] + , ["src/core", "call_destination"] + , ["src/core", "channel_args"] + , ["src/core", "channel_stack_type"] + , ["src/core", "compression"] + , ["src/core", "connectivity_state"] + , ["src/core", "iomgr_fwd"] + , ["src/core", "ref_counted"] + , ["src/core", "resource_quota"] + , ["src/core", "slice"] + , ["src/core", "stats_data"] + , ["src/core", "time"] + ] + } +, "legacy_channel": + { "type": ["@", "rules", "CC", "library"] + , "name": ["legacy_channel"] + , "srcs": ["src/core/lib/surface/legacy_channel.cc"] + , "hdrs": ["src/core/lib/surface/legacy_channel.h"] + , "deps": + [ "channel" + , "channelz" + , "config" + , "exec_ctx" + , "gpr" + , "grpc_base" + , "grpc_client_channel" + , "ref_counted_ptr" + , "stats" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/types", "optional"] + , ["src/core", "arena"] + , ["src/core", "call_arena_allocator"] + , ["src/core", "channel_args"] + , ["src/core", "channel_args_endpoint_config"] + , ["src/core", "channel_fwd"] + , ["src/core", "channel_init"] + , ["src/core", "channel_stack_type"] + , ["src/core", "closure"] + , ["src/core", "dual_ref_counted"] + , ["src/core", "error"] + , ["src/core", "init_internally"] + , ["src/core", "iomgr_fwd"] + , ["src/core", "metrics"] + , ["src/core", "resource_quota"] + , ["src/core", "slice"] + , ["src/core", "stats_data"] + , ["src/core", "time"] + ] + } +, "channel_create": + { "type": ["@", "rules", "CC", "library"] + , "name": ["channel_create"] + , "srcs": ["src/core/lib/surface/channel_create.cc"] + , "hdrs": ["src/core/lib/surface/channel_create.h"] + , "deps": + [ "channel" + , "channel_arg_names" + , "channelz" + , "config" + , "gpr" + , "grpc_base" + , "grpc_client_channel" + , "grpc_public_hdrs" + , "legacy_channel" + , "ref_counted_ptr" + , "stats" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["src/core", "arena"] + , ["src/core", "channel_args"] + , ["src/core", "channel_stack_type"] + , ["src/core", "direct_channel"] + , ["src/core", "experiments"] + , ["src/core", "iomgr_fwd"] + , ["src/core", "ref_counted"] + , ["src/core", "slice"] + , ["src/core", "stats_data"] + ] + } +, "server": + { "type": ["@", "rules", "CC", "library"] + , "name": ["server"] + , "srcs": ["src/core/server/server.cc"] + , "hdrs": ["src/core/server/server.h"] + , "deps": + [ "call_combiner" + , "call_tracer" + , "channel" + , "channel_arg_names" + , "channelz" + , "config" + , "cpp_impl_of" + , "debug_location" + , "exec_ctx" + , "gpr" + , "grpc_base" + , "grpc_public_hdrs" + , "grpc_trace" + , "iomgr" + , "legacy_channel" + , "orphanable" + , "promise" + , "ref_counted_ptr" + , "stats" , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/cleanup", "cleanup"] , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/container", "flat_hash_set"] + , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["src/core", "activity"] + , ["src/core", "arena_promise"] + , ["src/core", "cancel_callback"] + , ["src/core", "channel_args"] + , ["src/core", "channel_args_preconditioning"] + , ["src/core", "channel_fwd"] + , ["src/core", "channel_stack_type"] + , ["src/core", "closure"] + , ["src/core", "connectivity_state"] + , ["src/core", "context"] + , ["src/core", "dual_ref_counted"] + , ["src/core", "error"] + , ["src/core", "error_utils"] + , ["src/core", "experiments"] + , ["src/core", "interception_chain"] + , ["src/core", "iomgr_fwd"] + , ["src/core", "map"] + , ["src/core", "metadata_batch"] + , ["src/core", "pipe"] + , ["src/core", "poll"] + , ["src/core", "pollset_set"] + , ["src/core", "random_early_detection"] + , ["src/core", "seq"] + , ["src/core", "server_interface"] + , ["src/core", "slice"] + , ["src/core", "slice_buffer"] + , ["src/core", "status_helper"] + , ["src/core", "time"] + , ["src/core", "try_join"] + , ["src/core", "try_seq"] + , ["src/core", "useful"] + ] + } +, "grpc_base": + { "type": ["@", "rules", "CC", "library"] + , "name": ["grpc_base"] + , "srcs": + [ "src/core/lib/channel/channel_stack.cc" + , "src/core/lib/channel/channel_stack_builder_impl.cc" + , "src/core/lib/channel/connected_channel.cc" + , "src/core/lib/channel/promise_based_filter.cc" + , "src/core/lib/channel/status_util.cc" + , "src/core/lib/compression/message_compress.cc" + , "src/core/lib/surface/call.cc" + , "src/core/lib/surface/call_details.cc" + , "src/core/lib/surface/call_log_batch.cc" + , "src/core/lib/surface/call_utils.cc" + , "src/core/lib/surface/client_call.cc" + , "src/core/lib/surface/completion_queue.cc" + , "src/core/lib/surface/completion_queue_factory.cc" + , "src/core/lib/surface/event_string.cc" + , "src/core/lib/surface/filter_stack_call.cc" + , "src/core/lib/surface/lame_client.cc" + , "src/core/lib/surface/metadata_array.cc" + , "src/core/lib/surface/server_call.cc" + , "src/core/lib/surface/validate_metadata.cc" + , "src/core/lib/surface/version.cc" + , "src/core/lib/transport/transport.cc" + , "src/core/lib/transport/transport_op_string.cc" + ] + , "hdrs": + [ "src/core/lib/channel/channel_stack.h" + , "src/core/lib/channel/channel_stack_builder_impl.h" + , "src/core/lib/channel/connected_channel.h" + , "src/core/lib/channel/promise_based_filter.h" + , "src/core/lib/channel/status_util.h" + , "src/core/lib/compression/message_compress.h" + , "src/core/lib/surface/call.h" + , "src/core/lib/surface/call_test_only.h" + , "src/core/lib/surface/call_utils.h" + , "src/core/lib/surface/client_call.h" + , "src/core/lib/surface/completion_queue.h" + , "src/core/lib/surface/completion_queue_factory.h" + , "src/core/lib/surface/event_string.h" + , "src/core/lib/surface/filter_stack_call.h" + , "src/core/lib/surface/init.h" + , "src/core/lib/surface/lame_client.h" + , "src/core/lib/surface/server_call.h" + , "src/core/lib/surface/validate_metadata.h" + , "src/core/lib/transport/transport.h" + , ["include/grpc", "grpc_public_event_engine_headers"] + , ["include/grpc", "grpc_public_headers"] + ] + , "deps": + [ "call_combiner" + , "call_tracer" + , "channel" + , "channel_arg_names" + , "channel_stack_builder" + , "channelz" + , "config" + , "cpp_impl_of" + , "debug_location" + , "exec_ctx" + , "gpr" + , "grpc_core_credentials_header" + , "grpc_public_hdrs" + , "grpc_trace" + , "iomgr" + , "iomgr_timer" + , "orphanable" + , "promise" + , "ref_counted_ptr" + , "stats" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/container", "inlined_vector"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/functional", "function_ref"] - , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/meta", "type_traits"] - , ["@", "absl", "absl/random", "random"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/time", "time"] , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/utility", "utility"] , ["@", "zlib", "", "zlib"] + , ["src/core", "1999"] + , ["src/core", "activity"] + , ["src/core", "all_ok"] + , ["src/core", "arena"] + , ["src/core", "arena_promise"] + , ["src/core", "atomic_utils"] + , ["src/core", "bitset"] + , ["src/core", "call_destination"] + , ["src/core", "call_filters"] + , ["src/core", "call_final_info"] + , ["src/core", "call_finalization"] + , ["src/core", "call_spine"] + , ["src/core", "cancel_callback"] + , ["src/core", "channel_args"] + , ["src/core", "channel_args_preconditioning"] + , ["src/core", "channel_fwd"] + , ["src/core", "channel_init"] + , ["src/core", "channel_stack_type"] + , ["src/core", "closure"] + , ["src/core", "compression"] + , ["src/core", "connectivity_state"] + , ["src/core", "context"] + , ["src/core", "default_event_engine"] + , ["src/core", "error"] + , ["src/core", "error_utils"] + , ["src/core", "event_engine_common"] + , ["src/core", "event_engine_context"] + , ["src/core", "experiments"] + , ["src/core", "for_each"] + , ["src/core", "gpr_atm"] + , ["src/core", "gpr_manual_constructor"] + , ["src/core", "gpr_spinlock"] + , ["src/core", "if"] + , ["src/core", "iomgr_fwd"] + , ["src/core", "latch"] + , ["src/core", "latent_see"] + , ["src/core", "loop"] + , ["src/core", "map"] + , ["src/core", "match"] + , ["src/core", "message"] + , ["src/core", "metadata"] + , ["src/core", "metadata_batch"] + , ["src/core", "metrics"] + , ["src/core", "no_destruct"] + , ["src/core", "pipe"] + , ["src/core", "poll"] + , ["src/core", "promise_status"] + , ["src/core", "race"] + , ["src/core", "ref_counted"] + , ["src/core", "seq"] + , ["src/core", "server_interface"] + , ["src/core", "single_set_ptr"] + , ["src/core", "slice"] + , ["src/core", "slice_buffer"] + , ["src/core", "slice_cast"] + , ["src/core", "slice_refcount"] + , ["src/core", "stats_data"] + , ["src/core", "status_flag"] + , ["src/core", "status_helper"] + , ["src/core", "time"] + , ["src/core", "transport_fwd"] + , ["src/core", "try_seq"] + , ["src/core", "unique_type_name"] + , ["src/core", "useful"] ] } -, "legacy_context": - { "type": ["@", "rules", "CC", "library"] - , "name": ["legacy_context"] - , "hdrs": ["src/core/lib/channel/context.h"] - , "deps": ["gpr_platform", ["src/core", "context"]] - } , "lb_load_data_store": { "type": ["@", "rules", "CC", "library"] , "name": ["lb_load_data_store"] @@ -1009,7 +1389,14 @@ [ "src/cpp/server/load_reporter/constants.h" , "src/cpp/server/load_reporter/load_data_store.h" ] - , "deps": ["gpr", "gpr_platform", "grpc++", ["src/core", "grpc_sockaddr"]] + , "deps": + [ "gpr" + , "gpr_platform" + , "grpc++" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["src/core", "grpc_sockaddr"] + ] } , "lb_server_load_reporting_service_server_builder_plugin": { "type": ["@", "rules", "CC", "library"] @@ -1039,6 +1426,7 @@ , "grpc++_public_hdrs" , "grpc_public_hdrs" , "lb_server_load_reporting_service_server_builder_plugin" + , ["@", "absl", "absl/log", "log"] , ["src/core", "lb_server_load_reporting_filter"] ] } @@ -1052,9 +1440,11 @@ [ "gpr" , "grpc++" , "lb_load_reporter" - , ["src/proto/grpc/lb/v1", "load_reporter_proto"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/memory", "memory"] , ["@", "protobuf", "", "libprotobuf"] + , ["src/proto/grpc/lb/v1", "load_reporter_proto"] ] } , "lb_get_cpu_stats": @@ -1067,7 +1457,7 @@ , "src/cpp/server/load_reporter/get_cpu_stats_windows.cc" ] , "hdrs": ["src/cpp/server/load_reporter/get_cpu_stats.h"] - , "deps": ["gpr", "gpr_platform"] + , "deps": ["gpr", "gpr_platform", ["@", "absl", "absl/log", "log"]] } , "lb_load_reporter": { "type": ["@", "rules", "CC", "library"] @@ -1081,57 +1471,69 @@ [ "gpr" , "lb_get_cpu_stats" , "lb_load_data_store" - , ["src/proto/grpc/lb/v1", "load_reporter_proto"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "opencensus-stats", "", ""] , ["@", "opencensus-tags", "", ""] , ["@", "protobuf", "", "libprotobuf"] + , ["src/proto/grpc/lb/v1", "load_reporter_proto"] ] } , "grpc_security_base": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_security_base"] , "srcs": - [ "src/core/lib/security/context/security_context.cc" + [ "src/core/handshaker/security/secure_endpoint.cc" + , "src/core/handshaker/security/security_handshaker.cc" + , "src/core/lib/security/context/security_context.cc" , "src/core/lib/security/credentials/call_creds_util.cc" , "src/core/lib/security/credentials/composite/composite_credentials.cc" , "src/core/lib/security/credentials/credentials.cc" , "src/core/lib/security/credentials/plugin/plugin_credentials.cc" , "src/core/lib/security/security_connector/security_connector.cc" , "src/core/lib/security/transport/client_auth_filter.cc" - , "src/core/lib/security/transport/legacy_server_auth_filter.cc" - , "src/core/lib/security/transport/secure_endpoint.cc" - , "src/core/lib/security/transport/security_handshaker.cc" , "src/core/lib/security/transport/server_auth_filter.cc" - , "src/core/lib/security/transport/tsi_error.cc" ] , "hdrs": - [ "src/core/lib/security/context/security_context.h" + [ "src/core/handshaker/security/secure_endpoint.h" + , "src/core/handshaker/security/security_handshaker.h" + , "src/core/lib/security/context/security_context.h" , "src/core/lib/security/credentials/call_creds_util.h" , "src/core/lib/security/credentials/composite/composite_credentials.h" , "src/core/lib/security/credentials/credentials.h" , "src/core/lib/security/credentials/plugin/plugin_credentials.h" , "src/core/lib/security/security_connector/security_connector.h" , "src/core/lib/security/transport/auth_filters.h" - , "src/core/lib/security/transport/secure_endpoint.h" - , "src/core/lib/security/transport/security_handshaker.h" - , "src/core/lib/security/transport/tsi_error.h" , ["include/grpc", "grpc_public_headers"] ] , "deps": [ "channel_arg_names" + , "channelz" , "config" , "debug_location" , "exec_ctx" , "gpr" , "grpc_base" + , "grpc_core_credentials_header" , "grpc_public_hdrs" , "grpc_trace" , "handshaker" - , "legacy_context" + , "iomgr" + , "orphanable" , "promise" , "ref_counted_ptr" + , "resource_quota_api" , "stats" , "tsi_base" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["src/core", "activity"] , ["src/core", "arena"] , ["src/core", "arena_promise"] @@ -1150,21 +1552,14 @@ , ["src/core", "poll"] , ["src/core", "ref_counted"] , ["src/core", "resource_quota"] - , ["src/core", "resource_quota_trace"] , ["src/core", "seq"] , ["src/core", "slice"] , ["src/core", "slice_refcount"] , ["src/core", "stats_data"] , ["src/core", "status_helper"] - , ["src/core", "try_seq"] - , ["src/core", "unique_type_name"] - , ["src/core", "useful"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/container", "inlined_vector"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] + , ["src/core", "try_seq"] + , ["src/core", "unique_type_name"] + , ["src/core", "useful"] ] } , "tsi_base": @@ -1181,6 +1576,11 @@ ] , "deps": ["gpr", "grpc_public_hdrs", "grpc_trace"] } +, "grpc_core_credentials_header": + { "type": ["@", "rules", "CC", "library"] + , "name": ["grpc_core_credentials_header"] + , "hdrs": [["include/grpc", "grpc_core_credentials_header"]] + } , "alts_util": { "type": ["@", "rules", "CC", "library"] , "name": ["alts_util"] @@ -1202,10 +1602,12 @@ ] , "deps": [ "gpr" + , "grpc_core_credentials_header" , "grpc_public_hdrs" + , ["@", "absl", "absl/log", "log"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] ] } , "tsi": @@ -1216,11 +1618,11 @@ , "tsi_alts_frame_protector" , "tsi_base" , "tsi_fake_credentials" - , ["src/core", "tsi_local_credentials"] - , ["src/core", "useful"] , ["@", "absl", "absl/strings", "strings"] , ["@", "ssl", "", "crypto"] , ["@", "ssl", "", "ssl"] + , ["src/core", "tsi_local_credentials"] + , ["src/core", "useful"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] ] @@ -1233,7 +1635,6 @@ , "src/cpp/client/secure_credentials.cc" , "src/cpp/common/auth_property_iterator.cc" , "src/cpp/common/secure_auth_context.cc" - , "src/cpp/common/secure_channel_arguments.cc" , "src/cpp/common/secure_create_auth_context.cc" , "src/cpp/common/tls_certificate_provider.cc" , "src/cpp/common/tls_certificate_verifier.cc" @@ -1255,10 +1656,13 @@ , "channel_stack_builder" , "config" , "exec_ctx" + , "generic_stub_internal" + , "global_callback_hook" , "gpr" , "grpc" , "grpc++_codegen_proto" , "grpc_base" + , "grpc_core_credentials_header" , "grpc_credentials_util" , "grpc_public_hdrs" , "grpc_security_base" @@ -1267,9 +1671,26 @@ , "grpcpp_backend_metric_recorder" , "grpcpp_call_metric_recorder" , "grpcpp_status" + , "iomgr" , "iomgr_timer" - , "legacy_context" , "ref_counted_ptr" + , "resource_quota_api" + , "server" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "absl_check"] + , ["@", "absl", "absl/log", "absl_log"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/memory", "memory"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/synchronization", "synchronization"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "protobuf", "", "libprotobuf"] , ["src/core", "arena"] , ["src/core", "channel_args"] , ["src/core", "channel_fwd"] @@ -1285,6 +1706,7 @@ , ["src/core", "grpc_backend_metric_provider"] , ["src/core", "grpc_crl_provider"] , ["src/core", "grpc_service_config"] + , ["src/core", "grpc_transport_chttp2_server"] , ["src/core", "grpc_transport_inproc"] , ["src/core", "json"] , ["src/core", "json_reader"] @@ -1299,19 +1721,9 @@ , ["src/core", "thread_quota"] , ["src/core", "time"] , ["src/core", "useful"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/container", "inlined_vector"] - , ["@", "absl", "absl/memory", "memory"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/synchronization", "synchronization"] - , ["@", "absl", "absl/types", "optional"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] - , ["@", "protobuf", "", "libprotobuf"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] ] } , "grpc++_base_unsecure": @@ -1328,19 +1740,40 @@ , "channel_stack_builder" , "config" , "exec_ctx" + , "generic_stub_internal" + , "global_callback_hook" , "gpr" , "grpc_base" + , "grpc_core_credentials_header" , "grpc_health_upb" , "grpc_public_hdrs" + , "grpc_security_base" , "grpc_service_config_impl" , "grpc_trace" + , "grpc_transport_chttp2" , "grpc_unsecure" , "grpcpp_backend_metric_recorder" , "grpcpp_call_metric_recorder" , "grpcpp_status" + , "iomgr" , "iomgr_timer" - , "legacy_context" , "ref_counted_ptr" + , "resource_quota_api" + , "server" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "absl_check"] + , ["@", "absl", "absl/log", "absl_log"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/memory", "memory"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/synchronization", "synchronization"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "protobuf", "", "libprotobuf"] , ["src/core", "arena"] , ["src/core", "channel_args"] , ["src/core", "channel_init"] @@ -1352,6 +1785,7 @@ , ["src/core", "grpc_backend_metric_provider"] , ["src/core", "grpc_insecure_credentials"] , ["src/core", "grpc_service_config"] + , ["src/core", "grpc_transport_chttp2_server"] , ["src/core", "grpc_transport_inproc"] , ["src/core", "ref_counted"] , ["src/core", "resource_quota"] @@ -1360,15 +1794,6 @@ , ["src/core", "thread_quota"] , ["src/core", "time"] , ["src/core", "useful"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/memory", "memory"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/synchronization", "synchronization"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "protobuf", "", "libprotobuf"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] ] @@ -1409,11 +1834,12 @@ , "include/grpcpp/ext/proto_server_reflection_plugin.h" ] , "deps": - [ "grpc++" + [ "config_vars" + , "grpc++" , "grpc++_config_proto" + , ["@", "protobuf", "", "libprotobuf"] , ["src/proto/grpc/reflection/v1", "reflection_proto"] , ["src/proto/grpc/reflection/v1alpha", "reflection_proto"] - , ["@", "protobuf", "", "libprotobuf"] ] } , "grpcpp_call_metric_recorder": @@ -1439,10 +1865,11 @@ , "grpc++_public_hdrs" , "grpc_trace" , "grpcpp_call_metric_recorder" - , ["src/core", "grpc_backend_metric_data"] - , ["src/core", "grpc_backend_metric_provider"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] + , ["src/core", "grpc_backend_metric_data"] + , ["src/core", "grpc_backend_metric_provider"] ] } , "grpcpp_orca_service": @@ -1458,16 +1885,17 @@ , "grpc_base" , "grpcpp_backend_metric_recorder" , "ref_counted_ptr" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/time", "time"] + , ["@", "absl", "absl/types", "optional"] , ["@", "protobuf", "", "libprotobuf"] , ["src/core", "default_event_engine"] , ["src/core", "grpc_backend_metric_data"] , ["src/core", "ref_counted"] , ["src/core", "time"] , ["src/core/ext/upb-gen", "upb-gen-lib"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/time", "time"] - , ["@", "absl", "absl/types", "optional"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] ] @@ -1488,8 +1916,8 @@ , "grpc" , "grpc++" , "grpc++_config_proto" - , ["src/proto/grpc/channelz", "channelz_proto"] , ["@", "protobuf", "", "libprotobuf"] + , ["src/proto/grpc/channelz", "channelz_proto"] ] } , "grpcpp_csds": @@ -1501,9 +1929,9 @@ [ "gpr" , "grpc" , "grpc++_base" - , ["src/proto/grpc/testing/xds/v3", "csds_proto"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["src/proto/grpc/testing/xds/v3", "csds_proto"] ] } , "grpcpp_admin": @@ -1531,7 +1959,7 @@ , "include/grpcpp/test/mock_stream.h" , "include/grpcpp/test/server_context_test_spouse.h" ] - , "deps": ["grpc++", "grpc_base", ["@", "gtest", "", ""]] + , "deps": ["channel", "grpc++", "grpc_base", ["@", "gtest", "", ""]] } , "grpc_opencensus_plugin": { "type": ["@", "rules", "CC", "library"] @@ -1556,27 +1984,16 @@ , "src/cpp/ext/filters/census/server_call_tracer.h" ] , "deps": - [ "config" + [ "call_tracer" + , "config" , "gpr" , "grpc++_base" , "grpc_base" , "grpc_public_hdrs" - , "legacy_context" , "tcp_tracer" - , ["src/core", "arena"] - , ["src/core", "arena_promise"] - , ["src/core", "channel_args"] - , ["src/core", "channel_fwd"] - , ["src/core", "channel_stack_type"] - , ["src/core", "context"] - , ["src/core", "error"] - , ["src/core", "logging_filter"] - , ["src/core", "metadata_batch"] - , ["src/core", "slice"] - , ["src/core", "slice_buffer"] - , ["src/core", "slice_refcount"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/base", "endian"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] @@ -1590,6 +2007,19 @@ , ["@", "opencensus-trace-context_util", "", ""] , ["@", "opencensus-trace-propagation", "", ""] , ["@", "opencensus-trace-span_context", "", ""] + , ["src/core", "arena"] + , ["src/core", "arena_promise"] + , ["src/core", "channel_args"] + , ["src/core", "channel_fwd"] + , ["src/core", "channel_stack_type"] + , ["src/core", "context"] + , ["src/core", "error"] + , ["src/core", "experiments"] + , ["src/core", "logging_filter"] + , ["src/core", "metadata_batch"] + , ["src/core", "slice"] + , ["src/core", "slice_buffer"] + , ["src/core", "slice_refcount"] ] } , "grpcpp_gcp_observability": @@ -1608,7 +2038,25 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["grpcpp_otel_plugin"] , "hdrs": ["include/grpcpp/ext/otel_plugin.h"] - , "deps": [["src/cpp/ext/otel", "otel_plugin"]] + , "deps": ["grpc++", ["src/cpp/ext/otel", "otel_plugin"]] + } +, "generic_stub_internal": + { "type": ["@", "rules", "CC", "library"] + , "name": ["generic_stub_internal"] + , "hdrs": ["include/grpcpp/impl/generic_stub_internal.h"] + , "deps": ["grpc++_public_hdrs"] + } +, "generic_stub_callback": + { "type": ["@", "rules", "CC", "library"] + , "name": ["generic_stub_callback"] + , "hdrs": ["include/grpcpp/generic/generic_stub_callback.h"] + , "deps": ["generic_stub_internal"] + } +, "callback_generic_service": + { "type": ["@", "rules", "CC", "library"] + , "name": ["callback_generic_service"] + , "hdrs": ["include/grpcpp/generic/callback_generic_service.h"] + , "deps": ["grpc++_public_hdrs"] } , "work_serializer": { "type": ["@", "rules", "CC", "library"] @@ -1623,22 +2071,32 @@ , "grpc_trace" , "orphanable" , "stats" - , ["src/core", "experiments"] - , ["src/core", "stats_data"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["src/core", "experiments"] + , ["src/core", "latent_see"] + , ["src/core", "stats_data"] ] } , "grpc_trace": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_trace"] - , "srcs": ["src/core/lib/debug/trace.cc"] - , "hdrs": ["src/core/lib/debug/trace.h"] + , "srcs": + ["src/core/lib/debug/trace.cc", "src/core/lib/debug/trace_flags.cc"] + , "hdrs": + [ "src/core/lib/debug/trace.h" + , "src/core/lib/debug/trace_flags.h" + , "src/core/lib/debug/trace_impl.h" + ] , "deps": [ "config_vars" , "gpr" , "grpc_public_hdrs" , ["@", "absl", "absl/strings", "strings"] + , ["src/core", "glob"] + , ["src/core", "no_destruct"] ] } , "load_config": @@ -1648,12 +2106,12 @@ , "hdrs": ["src/core/lib/config/load_config.h"] , "deps": [ "gpr_platform" - , ["src/core", "env"] - , ["src/core", "gpr_log_internal"] , ["@", "absl", "absl/flags", "flag"] , ["@", "absl", "absl/flags", "marshalling"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["src/core", "env"] ] } , "config_vars": @@ -1680,6 +2138,8 @@ , "deps": [ "gpr" , "grpc_resolver" + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] , ["src/core", "certificate_provider_registry"] , ["src/core", "channel_args_preconditioning"] , ["src/core", "channel_creds_registry"] @@ -1688,14 +2148,13 @@ , ["src/core", "lb_policy_registry"] , ["src/core", "proxy_mapper_registry"] , ["src/core", "service_config_parser"] - , ["@", "absl", "absl/functional", "any_invocable"] ] } , "debug_location": { "type": ["@", "rules", "CC", "library"] , "name": ["debug_location"] , "hdrs": ["src/core/lib/gprpp/debug_location.h"] - , "deps": ["gpr_platform"] + , "deps": ["gpr_platform", ["@", "absl", "absl/strings", "strings"]] } , "orphanable": { "type": ["@", "rules", "CC", "library"] @@ -1705,6 +2164,7 @@ [ "debug_location" , "gpr_platform" , "ref_counted_ptr" + , ["src/core", "down_cast"] , ["src/core", "ref_counted"] ] } @@ -1714,11 +2174,11 @@ , "hdrs": ["src/core/lib/promise/promise.h"] , "deps": [ "gpr_platform" - , ["src/core", "poll"] - , ["src/core", "promise_like"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/types", "optional"] + , ["src/core", "poll"] + , ["src/core", "promise_like"] ] } , "ref_counted_ptr": @@ -1726,13 +2186,17 @@ , "name": ["ref_counted_ptr"] , "hdrs": ["src/core/lib/gprpp/ref_counted_ptr.h"] , "deps": - ["debug_location", "gpr_platform", ["@", "absl", "absl/hash", "hash"]] + [ "debug_location" + , "gpr_platform" + , ["@", "absl", "absl/hash", "hash"] + , ["src/core", "down_cast"] + ] } , "handshaker": { "type": ["@", "rules", "CC", "library"] , "name": ["handshaker"] - , "srcs": ["src/core/lib/transport/handshaker.cc"] - , "hdrs": ["src/core/lib/transport/handshaker.h"] + , "srcs": ["src/core/handshaker/handshaker.cc"] + , "hdrs": ["src/core/handshaker/handshaker.h"] , "deps": [ "debug_location" , "event_engine_base_hdrs" @@ -1741,7 +2205,17 @@ , "grpc_base" , "grpc_public_hdrs" , "grpc_trace" + , "iomgr" + , "orphanable" , "ref_counted_ptr" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] , ["src/core", "channel_args"] , ["src/core", "closure"] , ["src/core", "error"] @@ -1750,17 +2224,13 @@ , ["src/core", "slice_buffer"] , ["src/core", "status_helper"] , ["src/core", "time"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/container", "inlined_vector"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "str_format"] ] } , "http_connect_handshaker": { "type": ["@", "rules", "CC", "library"] , "name": ["http_connect_handshaker"] - , "srcs": ["src/core/lib/transport/http_connect_handshaker.cc"] - , "hdrs": ["src/core/lib/transport/http_connect_handshaker.h"] + , "srcs": ["src/core/handshaker/http_connect/http_connect_handshaker.cc"] + , "hdrs": ["src/core/handshaker/http_connect/http_connect_handshaker.h"] , "deps": [ "config" , "debug_location" @@ -1769,7 +2239,13 @@ , "grpc_base" , "handshaker" , "httpcli" + , "iomgr" , "ref_counted_ptr" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["src/core", "channel_args"] , ["src/core", "closure"] , ["src/core", "error"] @@ -1778,10 +2254,6 @@ , ["src/core", "iomgr_fwd"] , ["src/core", "slice"] , ["src/core", "slice_buffer"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "exec_ctx": @@ -1804,14 +2276,17 @@ , "gpr" , "grpc_public_hdrs" , "grpc_trace" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "str_format"] , ["src/core", "closure"] , ["src/core", "error"] , ["src/core", "experiments"] , ["src/core", "gpr_atm"] , ["src/core", "gpr_spinlock"] + , ["src/core", "latent_see"] , ["src/core", "time"] , ["src/core", "useful"] - , ["@", "absl", "absl/strings", "str_format"] ] } , "sockaddr_utils": @@ -1822,13 +2297,15 @@ , "deps": [ "gpr" , "uri_parser" - , ["src/core", "grpc_sockaddr"] - , ["src/core", "iomgr_port"] - , ["src/core", "resolved_address"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] + , ["src/core", "grpc_sockaddr"] + , ["src/core", "iomgr_port"] + , ["src/core", "resolved_address"] ] } , "iomgr_timer": @@ -1853,6 +2330,10 @@ , "gpr" , "gpr_platform" , "grpc_trace" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "closure"] , ["src/core", "gpr_manual_constructor"] , ["src/core", "gpr_spinlock"] @@ -1860,8 +2341,6 @@ , ["src/core", "time"] , ["src/core", "time_averaged_stats"] , ["src/core", "useful"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] ] } , "iomgr_internal_errqueue": @@ -1869,7 +2348,12 @@ , "name": ["iomgr_internal_errqueue"] , "srcs": ["src/core/lib/iomgr/internal_errqueue.cc"] , "hdrs": ["src/core/lib/iomgr/internal_errqueue.h"] - , "deps": ["gpr", ["src/core", "iomgr_port"], ["src/core", "strerror"]] + , "deps": + [ "gpr" + , ["@", "absl", "absl/log", "log"] + , ["src/core", "iomgr_port"] + , ["src/core", "strerror"] + ] } , "iomgr_buffer_list": { "type": ["@", "rules", "CC", "library"] @@ -1879,11 +2363,12 @@ , "deps": [ "gpr" , "iomgr_internal_errqueue" - , ["src/core", "error"] - , ["src/core", "iomgr_port"] - , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["src/core", "error"] + , ["src/core", "iomgr_port"] ] } , "uri_parser": @@ -1893,6 +2378,7 @@ , "hdrs": ["src/core/lib/uri/uri_parser.h"] , "deps": [ "gpr" + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] @@ -1914,14 +2400,16 @@ , "deps": [ "gpr" , "uri_parser" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "error"] , ["src/core", "grpc_sockaddr"] , ["src/core", "iomgr_port"] , ["src/core", "resolved_address"] , ["src/core", "status_helper"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] ] } , "backoff": @@ -1931,22 +2419,22 @@ , "hdrs": ["src/core/lib/backoff/backoff.h"] , "deps": [ "gpr_platform" - , ["src/core", "time"] , ["@", "absl", "absl/random", "random"] + , ["src/core", "time"] ] } , "stats": { "type": ["@", "rules", "CC", "library"] , "name": ["stats"] - , "srcs": ["src/core/lib/debug/stats.cc"] - , "hdrs": ["src/core/lib/debug/stats.h"] + , "srcs": ["src/core/telemetry/stats.cc"] + , "hdrs": ["src/core/telemetry/stats.h"] , "deps": [ "gpr" + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "span"] , ["src/core", "histogram_view"] , ["src/core", "no_destruct"] , ["src/core", "stats_data"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "span"] ] } , "channel_stack_builder": @@ -1957,11 +2445,11 @@ , "deps": [ "gpr" , "ref_counted_ptr" + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "channel_args"] , ["src/core", "channel_fwd"] , ["src/core", "channel_stack_type"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_service_config_impl": @@ -1973,6 +2461,11 @@ [ "config" , "gpr" , "ref_counted_ptr" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["src/core", "channel_args"] , ["src/core", "grpc_service_config"] , ["src/core", "json"] @@ -1984,10 +2477,6 @@ , ["src/core", "slice"] , ["src/core", "slice_refcount"] , ["src/core", "validation_errors"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "endpoint_addresses": @@ -1999,14 +2488,15 @@ [ "gpr" , "gpr_platform" , "sockaddr_utils" - , ["src/core", "channel_args"] - , ["src/core", "resolved_address"] - , ["src/core", "useful"] , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] + , ["src/core", "channel_args"] + , ["src/core", "resolved_address"] + , ["src/core", "useful"] ] } , "server_address": @@ -2033,74 +2523,120 @@ , "ref_counted_ptr" , "server_address" , "uri_parser" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "channel_args"] , ["src/core", "grpc_service_config"] , ["src/core", "iomgr_fwd"] + ] + } +, "oob_backend_metric": + { "type": ["@", "rules", "CC", "library"] + , "name": ["oob_backend_metric"] + , "srcs": ["src/core/load_balancing/oob_backend_metric.cc"] + , "hdrs": + [ "src/core/load_balancing/oob_backend_metric.h" + , "src/core/load_balancing/oob_backend_metric_internal.h" + ] + , "deps": + [ "channelz" + , "debug_location" + , "exec_ctx" + , "gpr" + , "grpc_client_channel" + , "grpc_public_hdrs" + , "grpc_trace" + , "orphanable" + , "ref_counted_ptr" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] + , ["src/core", "backend_metric_parser"] + , ["src/core", "closure"] + , ["src/core", "error"] + , ["src/core", "grpc_backend_metric_data"] + , ["src/core", "iomgr_fwd"] + , ["src/core", "pollset_set"] + , ["src/core", "slice"] + , ["src/core", "subchannel_interface"] + , ["src/core", "time"] + , ["src/core", "unique_type_name"] + , ["third_party/upb", "base"] + , ["third_party/upb", "mem"] + ] + } +, "lb_child_policy_handler": + { "type": ["@", "rules", "CC", "library"] + , "name": ["lb_child_policy_handler"] + , "srcs": ["src/core/load_balancing/child_policy_handler.cc"] + , "hdrs": ["src/core/load_balancing/child_policy_handler.h"] + , "deps": + [ "config" + , "debug_location" + , "gpr_public_hdrs" + , "grpc_public_hdrs" + , "grpc_trace" + , "orphanable" + , "ref_counted_ptr" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] + , ["src/core", "channel_args"] + , ["src/core", "connectivity_state"] + , ["src/core", "delegating_helper"] + , ["src/core", "lb_policy"] + , ["src/core", "lb_policy_registry"] + , ["src/core", "pollset_set"] + , ["src/core", "resolved_address"] + , ["src/core", "subchannel_interface"] ] } , "grpc_client_channel": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_client_channel"] , "srcs": - [ "src/core/client_channel/backend_metric.cc" - , "src/core/client_channel/backup_poller.cc" - , "src/core/client_channel/channel_connectivity.cc" - , "src/core/client_channel/client_channel_channelz.cc" + [ "src/core/client_channel/client_channel.cc" , "src/core/client_channel/client_channel_factory.cc" , "src/core/client_channel/client_channel_filter.cc" , "src/core/client_channel/client_channel_plugin.cc" - , "src/core/client_channel/client_channel_service_config.cc" - , "src/core/client_channel/config_selector.cc" , "src/core/client_channel/dynamic_filters.cc" , "src/core/client_channel/global_subchannel_pool.cc" - , "src/core/client_channel/http_proxy_mapper.cc" + , "src/core/client_channel/load_balanced_call_destination.cc" , "src/core/client_channel/local_subchannel_pool.cc" , "src/core/client_channel/retry_filter.cc" , "src/core/client_channel/retry_filter_legacy_call_data.cc" - , "src/core/client_channel/retry_service_config.cc" - , "src/core/client_channel/retry_throttle.cc" - , "src/core/client_channel/service_config_channel_arg_filter.cc" , "src/core/client_channel/subchannel.cc" - , "src/core/client_channel/subchannel_pool_interface.cc" , "src/core/client_channel/subchannel_stream_client.cc" - , "src/core/load_balancing/child_policy_handler.cc" - , "src/core/load_balancing/oob_backend_metric.cc" ] , "hdrs": - [ "src/core/client_channel/backend_metric.h" - , "src/core/client_channel/backup_poller.h" - , "src/core/client_channel/client_channel_channelz.h" + [ "src/core/client_channel/client_channel.h" , "src/core/client_channel/client_channel_factory.h" , "src/core/client_channel/client_channel_filter.h" - , "src/core/client_channel/client_channel_internal.h" - , "src/core/client_channel/client_channel_service_config.h" - , "src/core/client_channel/config_selector.h" - , "src/core/client_channel/connector.h" , "src/core/client_channel/dynamic_filters.h" , "src/core/client_channel/global_subchannel_pool.h" - , "src/core/client_channel/http_proxy_mapper.h" + , "src/core/client_channel/load_balanced_call_destination.h" , "src/core/client_channel/local_subchannel_pool.h" , "src/core/client_channel/retry_filter.h" , "src/core/client_channel/retry_filter_legacy_call_data.h" - , "src/core/client_channel/retry_service_config.h" - , "src/core/client_channel/retry_throttle.h" , "src/core/client_channel/subchannel.h" , "src/core/client_channel/subchannel_interface_internal.h" - , "src/core/client_channel/subchannel_pool_interface.h" , "src/core/client_channel/subchannel_stream_client.h" - , "src/core/load_balancing/child_policy_handler.h" - , "src/core/load_balancing/oob_backend_metric.h" - , "src/core/load_balancing/oob_backend_metric_internal.h" ] , "deps": [ "backoff" + , "call_combiner" + , "call_tracer" + , "channel" , "channel_arg_names" + , "channelz" , "config" - , "config_vars" , "debug_location" , "endpoint_addresses" , "exec_ctx" @@ -2111,90 +2647,99 @@ , "grpc_security_base" , "grpc_service_config_impl" , "grpc_trace" - , "http_connect_handshaker" - , "iomgr_timer" - , "legacy_context" + , "iomgr" + , "lb_child_policy_handler" , "orphanable" - , "parse_address" , "promise" , "ref_counted_ptr" , "sockaddr_utils" , "stats" , "uri_parser" , "work_serializer" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/cleanup", "cleanup"] + , ["@", "absl", "absl/container", "flat_hash_set"] + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "cord"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["@", "protobuf", "", "libprotobuf"] - , ["src/core", "activity"] , ["src/core", "arena"] , ["src/core", "arena_promise"] + , ["src/core", "backend_metric_parser"] + , ["src/core", "call_destination"] + , ["src/core", "call_filters"] + , ["src/core", "call_spine"] , ["src/core", "cancel_callback"] , ["src/core", "channel_args"] + , ["src/core", "channel_args_endpoint_config"] , ["src/core", "channel_fwd"] , ["src/core", "channel_init"] , ["src/core", "channel_stack_type"] + , ["src/core", "client_channel_backup_poller"] + , ["src/core", "client_channel_internal_header"] + , ["src/core", "client_channel_service_config"] , ["src/core", "closure"] + , ["src/core", "config_selector"] , ["src/core", "connectivity_state"] , ["src/core", "construct_destruct"] , ["src/core", "context"] - , ["src/core", "delegating_helper"] , ["src/core", "dual_ref_counted"] - , ["src/core", "env"] , ["src/core", "error"] , ["src/core", "error_utils"] + , ["src/core", "exec_ctx_wakeup_scheduler"] , ["src/core", "experiments"] - , ["src/core", "gpr_atm"] , ["src/core", "gpr_manual_constructor"] , ["src/core", "grpc_backend_metric_data"] - , ["src/core", "grpc_deadline_filter"] - , ["src/core", "grpc_message_size_filter"] + , ["src/core", "grpc_channel_idle_filter"] , ["src/core", "grpc_service_config"] + , ["src/core", "idle_filter_state"] , ["src/core", "init_internally"] + , ["src/core", "interception_chain"] , ["src/core", "iomgr_fwd"] , ["src/core", "json"] - , ["src/core", "json_args"] - , ["src/core", "json_channel_args"] - , ["src/core", "json_object_loader"] , ["src/core", "latch"] + , ["src/core", "lb_metadata"] , ["src/core", "lb_policy"] , ["src/core", "lb_policy_registry"] + , ["src/core", "loop"] , ["src/core", "map"] , ["src/core", "memory_quota"] + , ["src/core", "metadata"] , ["src/core", "metadata_batch"] + , ["src/core", "metrics"] + , ["src/core", "observable"] , ["src/core", "pipe"] , ["src/core", "poll"] , ["src/core", "pollset_set"] - , ["src/core", "proxy_mapper"] , ["src/core", "proxy_mapper_registry"] , ["src/core", "ref_counted"] , ["src/core", "resolved_address"] , ["src/core", "resource_quota"] + , ["src/core", "retry_service_config"] + , ["src/core", "retry_throttle"] , ["src/core", "seq"] - , ["src/core", "service_config_parser"] + , ["src/core", "single_set_ptr"] + , ["src/core", "sleep"] , ["src/core", "slice"] , ["src/core", "slice_buffer"] , ["src/core", "slice_refcount"] , ["src/core", "stats_data"] , ["src/core", "status_helper"] + , ["src/core", "subchannel_connector"] , ["src/core", "subchannel_interface"] + , ["src/core", "subchannel_pool_interface"] , ["src/core", "time"] , ["src/core", "try_seq"] , ["src/core", "unique_type_name"] , ["src/core", "useful"] - , ["src/core", "validation_errors"] , ["src/core/ext/upb-gen", "upb-gen-lib"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/cleanup", "cleanup"] - , ["@", "absl", "absl/container", "flat_hash_set"] - , ["@", "absl", "absl/container", "inlined_vector"] - , ["@", "absl", "absl/functional", "any_invocable"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "cord"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] - , ["third_party/upb", "base"] - , ["third_party/upb", "mem"] - , ["third_party/upb", "message"] ] } , "grpc_resolver_dns_ares": @@ -2227,12 +2772,23 @@ , "grpc_resolver" , "grpc_service_config_impl" , "grpc_trace" + , "iomgr" , "iomgr_timer" , "orphanable" , "parse_address" , "ref_counted_ptr" , "sockaddr_utils" , "uri_parser" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "cares", "", "ares"] , ["src/core", "channel_args"] , ["src/core", "closure"] , ["src/core", "error"] @@ -2248,14 +2804,6 @@ , ["src/core", "slice"] , ["src/core", "status_helper"] , ["src/core", "time"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/functional", "any_invocable"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "cares", "", "ares"] , ["third_party/address_sorting", "address_sorting"] ] } @@ -2263,14 +2811,14 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["httpcli"] , "srcs": - [ "src/core/lib/http/format_request.cc" - , "src/core/lib/http/httpcli.cc" - , "src/core/lib/http/parser.cc" + [ "src/core/util/http_client/format_request.cc" + , "src/core/util/http_client/httpcli.cc" + , "src/core/util/http_client/parser.cc" ] , "hdrs": - [ "src/core/lib/http/format_request.h" - , "src/core/lib/http/httpcli.h" - , "src/core/lib/http/parser.h" + [ "src/core/util/http_client/format_request.h" + , "src/core/util/http_client/httpcli.h" + , "src/core/util/http_client/parser.h" ] , "deps": [ "config" @@ -2282,10 +2830,21 @@ , "grpc_security_base" , "grpc_trace" , "handshaker" + , "iomgr" , "orphanable" , "ref_counted_ptr" + , "resource_quota_api" , "sockaddr_utils" , "uri_parser" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/functional", "bind_front"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["src/core", "channel_args"] , ["src/core", "channel_args_preconditioning"] , ["src/core", "closure"] @@ -2301,13 +2860,6 @@ , ["src/core", "status_helper"] , ["src/core", "tcp_connect_handshaker"] , ["src/core", "time"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/functional", "bind_front"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_alts_credentials": @@ -2328,13 +2880,20 @@ , "exec_ctx" , "gpr" , "grpc_base" + , "grpc_core_credentials_header" , "grpc_public_hdrs" , "grpc_security_base" , "handshaker" + , "iomgr" , "promise" , "ref_counted_ptr" , "tsi_alts_credentials" , "tsi_base" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["src/core", "arena_promise"] , ["src/core", "channel_args"] , ["src/core", "closure"] @@ -2344,9 +2903,6 @@ , ["src/core", "slice_refcount"] , ["src/core", "unique_type_name"] , ["src/core", "useful"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "tsi_fake_credentials": @@ -2354,7 +2910,15 @@ , "name": ["tsi_fake_credentials"] , "srcs": ["src/core/tsi/fake_transport_security.cc"] , "hdrs": ["src/core/tsi/fake_transport_security.h"] - , "deps": ["gpr", "tsi_base", ["src/core", "slice"], ["src/core", "useful"]] + , "deps": + [ "gpr" + , "tsi_base" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["src/core", "dump_args"] + , ["src/core", "slice"] + , ["src/core", "useful"] + ] } , "grpc_jwt_credentials": { "type": ["@", "rules", "CC", "library"] @@ -2373,14 +2937,26 @@ [ "exec_ctx" , "gpr" , "grpc_base" + , "grpc_core_credentials_header" , "grpc_credentials_util" , "grpc_security_base" , "grpc_trace" , "httpcli" + , "iomgr" , "orphanable" , "promise" , "ref_counted_ptr" , "uri_parser" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/time", "time"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "ssl", "", "crypto"] + , ["@", "ssl", "", "ssl"] , ["src/core", "arena_promise"] , ["src/core", "closure"] , ["src/core", "error"] @@ -2397,14 +2973,6 @@ , ["src/core", "tsi_ssl_types"] , ["src/core", "unique_type_name"] , ["src/core", "useful"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/time", "time"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "ssl", "", "crypto"] - , ["@", "ssl", "", "ssl"] ] } , "grpc_credentials_util": @@ -2414,6 +2982,7 @@ [ "src/core/lib/security/credentials/tls/tls_utils.cc" , "src/core/lib/security/security_connector/load_system_roots_fallback.cc" , "src/core/lib/security/security_connector/load_system_roots_supported.cc" + , "src/core/lib/security/security_connector/load_system_roots_windows.cc" , "src/core/lib/security/util/json_util.cc" ] , "hdrs": @@ -2427,11 +2996,13 @@ , "gpr" , "grpc_base" , "grpc_security_base" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "error"] , ["src/core", "json"] , ["src/core", "load_file"] , ["src/core", "useful"] - , ["@", "absl", "absl/strings", "strings"] ] } , "tsi_alts_credentials": @@ -2452,20 +3023,26 @@ ] , "deps": [ "alts_util" + , "channel" + , "channel_create" + , "exec_ctx" , "gpr" , "grpc_base" + , "grpc_core_credentials_header" , "grpc_security_base" , "tsi_alts_frame_protector" , "tsi_base" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "channel_args"] , ["src/core", "closure"] , ["src/core", "env"] , ["src/core", "pollset_set"] , ["src/core", "slice"] - , ["@", "absl", "absl/strings", "strings"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] ] } , "tsi_alts_frame_protector": @@ -2507,12 +3084,14 @@ , "gpr" , "gpr_platform" , "tsi_base" - , ["src/core", "slice"] - , ["src/core", "slice_buffer"] - , ["src/core", "useful"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/types", "span"] , ["@", "ssl", "", "crypto"] , ["@", "ssl", "", "ssl"] + , ["src/core", "slice"] + , ["src/core", "slice_buffer"] + , ["src/core", "useful"] ] } , "tsi_ssl_session_cache": @@ -2531,10 +3110,12 @@ [ "cpp_impl_of" , "gpr" , "grpc_public_hdrs" - , ["src/core", "ref_counted"] - , ["src/core", "slice"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/memory", "memory"] , ["@", "ssl", "", "ssl"] + , ["src/core", "ref_counted"] + , ["src/core", "slice"] ] } , "tsi_ssl_credentials": @@ -2557,12 +3138,21 @@ , "config_vars" , "gpr" , "grpc_base" + , "grpc_core_credentials_header" , "grpc_credentials_util" , "grpc_public_hdrs" , "grpc_security_base" , "ref_counted_ptr" , "tsi_base" , "tsi_ssl_session_cache" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "ssl", "", "crypto"] + , ["@", "ssl", "", "ssl"] , ["src/core", "channel_args"] , ["src/core", "error"] , ["src/core", "grpc_crl_provider"] @@ -2572,11 +3162,6 @@ , ["src/core", "slice"] , ["src/core", "tsi_ssl_types"] , ["src/core", "useful"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "ssl", "", "crypto"] - , ["@", "ssl", "", "ssl"] ] } , "grpc_http_filters": @@ -2586,31 +3171,37 @@ [ "src/core/ext/filters/http/client/http_client_filter.cc" , "src/core/ext/filters/http/http_filters_plugin.cc" , "src/core/ext/filters/http/message_compress/compression_filter.cc" - , "src/core/ext/filters/http/message_compress/legacy_compression_filter.cc" , "src/core/ext/filters/http/server/http_server_filter.cc" ] , "hdrs": [ "src/core/ext/filters/http/client/http_client_filter.h" , "src/core/ext/filters/http/message_compress/compression_filter.h" - , "src/core/ext/filters/http/message_compress/legacy_compression_filter.h" , "src/core/ext/filters/http/server/http_server_filter.h" ] , "deps": - [ "channel_arg_names" + [ "call_tracer" + , "channel_arg_names" , "config" , "gpr" , "grpc_base" , "grpc_public_hdrs" , "grpc_trace" - , "legacy_context" , "promise" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["src/core", "activity"] , ["src/core", "arena"] , ["src/core", "arena_promise"] , ["src/core", "channel_args"] , ["src/core", "channel_fwd"] , ["src/core", "channel_stack_type"] - , ["src/core", "compression_internal"] + , ["src/core", "compression"] , ["src/core", "context"] , ["src/core", "experiments"] , ["src/core", "grpc_message_size_filter"] @@ -2625,12 +3216,6 @@ , ["src/core", "slice"] , ["src/core", "slice_buffer"] , ["src/core", "status_conversion"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_grpclb_balancer_addresses": @@ -2650,23 +3235,25 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["xds_client"] , "srcs": - [ "src/core/ext/xds/xds_api.cc" - , "src/core/ext/xds/xds_bootstrap.cc" - , "src/core/ext/xds/xds_client.cc" - , "src/core/ext/xds/xds_client_stats.cc" + [ "src/core/xds/xds_client/xds_api.cc" + , "src/core/xds/xds_client/xds_bootstrap.cc" + , "src/core/xds/xds_client/xds_client.cc" + , "src/core/xds/xds_client/xds_client_stats.cc" ] , "hdrs": - [ "src/core/ext/xds/xds_api.h" - , "src/core/ext/xds/xds_bootstrap.h" - , "src/core/ext/xds/xds_channel_args.h" - , "src/core/ext/xds/xds_client.h" - , "src/core/ext/xds/xds_client_stats.h" - , "src/core/ext/xds/xds_resource_type.h" - , "src/core/ext/xds/xds_resource_type_impl.h" - , "src/core/ext/xds/xds_transport.h" + [ "src/core/xds/xds_client/xds_api.h" + , "src/core/xds/xds_client/xds_bootstrap.h" + , "src/core/xds/xds_client/xds_channel_args.h" + , "src/core/xds/xds_client/xds_client.h" + , "src/core/xds/xds_client/xds_client_stats.h" + , "src/core/xds/xds_client/xds_metrics.h" + , "src/core/xds/xds_client/xds_resource_type.h" + , "src/core/xds/xds_client/xds_resource_type_impl.h" + , "src/core/xds/xds_client/xds_transport.h" ] , "deps": [ "backoff" + , "call_tracer" , "debug_location" , "endpoint_addresses" , "event_engine_base_hdrs" @@ -2677,6 +3264,16 @@ , "ref_counted_ptr" , "uri_parser" , "work_serializer" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/cleanup", "cleanup"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/memory", "memory"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["@", "protobuf", "", "libprotobuf"] , ["src/core", "default_event_engine"] , ["src/core", "dual_ref_counted"] @@ -2689,17 +3286,9 @@ , ["src/core", "useful"] , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["src/core/ext/upbdefs-gen", "upbdefs-gen-lib"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/cleanup", "cleanup"] - , ["@", "absl", "absl/memory", "memory"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] , ["third_party/upb", "base"] - , ["third_party/upb", "mem"] , ["third_party/upb", "json"] + , ["third_party/upb", "mem"] , ["third_party/upb", "reflection"] , ["third_party/upb", "text"] ] @@ -2717,11 +3306,11 @@ ] , "deps": [ "gpr_public_hdrs" - , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "span"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] ] } , "grpc_resolver_fake": @@ -2740,12 +3329,13 @@ , "server_address" , "uri_parser" , "work_serializer" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "channel_args"] , ["src/core", "notification"] , ["src/core", "ref_counted"] , ["src/core", "useful"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/strings", "strings"] ] } , "chttp2_frame": @@ -2755,13 +3345,14 @@ , "hdrs": ["src/core/ext/transport/chttp2/transport/frame.h"] , "deps": [ "gpr" - , ["src/core", "slice"] - , ["src/core", "slice_buffer"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "span"] , ["@", "absl", "absl/types", "variant"] + , ["src/core", "slice"] + , ["src/core", "slice_buffer"] ] } , "chttp2_legacy_frame": @@ -2770,13 +3361,6 @@ , "hdrs": ["src/core/ext/transport/chttp2/transport/legacy_frame.h"] , "deps": ["gpr"] } -, "http_trace": - { "type": ["@", "rules", "CC", "library"] - , "name": ["http_trace"] - , "srcs": ["src/core/ext/transport/chttp2/transport/http_trace.cc"] - , "hdrs": ["src/core/ext/transport/chttp2/transport/http_trace.h"] - , "deps": ["gpr_platform", "grpc_trace"] - } , "hpack_parser_table": { "type": ["@", "rules", "CC", "library"] , "name": ["hpack_parser_table"] @@ -2787,15 +3371,18 @@ , "gpr_platform" , "grpc_trace" , "hpack_parse_result" - , "http_trace" + , "stats" + , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "hpack_constants"] , ["src/core", "metadata_batch"] , ["src/core", "no_destruct"] , ["src/core", "parsed_metadata"] , ["src/core", "slice"] - , ["@", "absl", "absl/functional", "function_ref"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "strings"] + , ["src/core", "unique_ptr_with_bitset"] ] } , "hpack_parse_result": @@ -2807,15 +3394,16 @@ [ "gpr" , "grpc_base" , "ref_counted_ptr" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["src/core", "error"] , ["src/core", "hpack_constants"] , ["src/core", "ref_counted"] , ["src/core", "slice"] , ["src/core", "status_helper"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "hpack_parser": @@ -2824,7 +3412,8 @@ , "srcs": ["src/core/ext/transport/chttp2/transport/hpack_parser.cc"] , "hdrs": ["src/core/ext/transport/chttp2/transport/hpack_parser.h"] , "deps": - [ "chttp2_legacy_frame" + [ "call_tracer" + , "chttp2_legacy_frame" , "gpr" , "gpr_platform" , "grpc_base" @@ -2833,23 +3422,26 @@ , "hpack_parse_result" , "hpack_parser_table" , "stats" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "bit_gen_ref"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "span"] + , ["@", "absl", "absl/types", "variant"] , ["src/core", "decode_huff"] , ["src/core", "error"] , ["src/core", "hpack_constants"] , ["src/core", "match"] , ["src/core", "metadata_batch"] + , ["src/core", "metadata_info"] , ["src/core", "parsed_metadata"] , ["src/core", "random_early_detection"] , ["src/core", "slice"] , ["src/core", "slice_refcount"] , ["src/core", "stats_data"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/random", "bit_gen_ref"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "span"] - , ["@", "absl", "absl/types", "variant"] ] } , "hpack_encoder": @@ -2858,7 +3450,8 @@ , "srcs": ["src/core/ext/transport/chttp2/transport/hpack_encoder.cc"] , "hdrs": ["src/core/ext/transport/chttp2/transport/hpack_encoder.h"] , "deps": - [ "chttp2_bin_encoder" + [ "call_tracer" + , "chttp2_bin_encoder" , "chttp2_legacy_frame" , "chttp2_varint" , "gpr" @@ -2866,7 +3459,9 @@ , "grpc_base" , "grpc_public_hdrs" , "grpc_trace" - , "http_trace" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] , ["src/core", "hpack_constants"] , ["src/core", "hpack_encoder_table"] , ["src/core", "metadata_batch"] @@ -2875,7 +3470,6 @@ , ["src/core", "slice_buffer"] , ["src/core", "time"] , ["src/core", "timeout_encoding"] - , ["@", "absl", "absl/strings", "strings"] ] } , "chttp2_bin_encoder": @@ -2884,14 +3478,23 @@ , "srcs": ["src/core/ext/transport/chttp2/transport/bin_encoder.cc"] , "hdrs": ["src/core/ext/transport/chttp2/transport/bin_encoder.h"] , "deps": - ["gpr", "gpr_platform", ["src/core", "huffsyms"], ["src/core", "slice"]] + [ "gpr" + , "gpr_platform" + , ["@", "absl", "absl/log", "check"] + , ["src/core", "huffsyms"] + , ["src/core", "slice"] + ] } , "chttp2_varint": { "type": ["@", "rules", "CC", "library"] , "name": ["chttp2_varint"] , "srcs": ["src/core/ext/transport/chttp2/transport/varint.cc"] , "hdrs": ["src/core/ext/transport/chttp2/transport/varint.h"] - , "deps": ["gpr", ["@", "absl", "absl/base", "core_headers"]] + , "deps": + [ "gpr" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + ] } , "chttp2_context_list_entry": { "type": ["@", "rules", "CC", "library"] @@ -2902,7 +3505,7 @@ , "tcp_tracer": { "type": ["@", "rules", "CC", "library"] , "name": ["tcp_tracer"] - , "hdrs": ["src/core/lib/channel/tcp_tracer.h"] + , "hdrs": ["src/core/telemetry/tcp_tracer.h"] , "deps": [ "gpr" , ["@", "absl", "absl/time", "time"] @@ -2937,10 +3540,13 @@ , "src/core/ext/transport/chttp2/transport/internal.h" ] , "deps": - [ "channel_arg_names" + [ "call_tracer" + , "channel_arg_names" + , "channelz" , "chttp2_context_list_entry" , "chttp2_legacy_frame" , "chttp2_varint" + , "config_vars" , "debug_location" , "exec_ctx" , "gpr" @@ -2950,13 +3556,27 @@ , "hpack_encoder" , "hpack_parser" , "hpack_parser_table" - , "http_trace" , "httpcli" + , "iomgr" , "iomgr_buffer_list" - , "legacy_context" , "ref_counted_ptr" , "stats" , "tcp_tracer" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/meta", "type_traits"] + , ["@", "absl", "absl/random", "bit_gen_ref"] + , ["@", "absl", "absl/random", "distributions"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "cord"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["src/core", "arena"] , ["src/core", "bdp_estimator"] , ["src/core", "bitset"] @@ -2966,6 +3586,8 @@ , ["src/core", "connectivity_state"] , ["src/core", "error"] , ["src/core", "error_utils"] + , ["src/core", "event_engine_extensions"] + , ["src/core", "event_engine_query_extensions"] , ["src/core", "experiments"] , ["src/core", "gpr_manual_constructor"] , ["src/core", "http2_errors"] @@ -2974,9 +3596,9 @@ , ["src/core", "iomgr_fwd"] , ["src/core", "iomgr_port"] , ["src/core", "match"] - , ["src/core", "max_concurrent_streams_policy"] , ["src/core", "memory_quota"] , ["src/core", "metadata_batch"] + , ["src/core", "metadata_info"] , ["src/core", "ping_abuse_policy"] , ["src/core", "ping_callbacks"] , ["src/core", "ping_rate_policy"] @@ -2984,7 +3606,6 @@ , ["src/core", "random_early_detection"] , ["src/core", "ref_counted"] , ["src/core", "resource_quota"] - , ["src/core", "resource_quota_trace"] , ["src/core", "slice"] , ["src/core", "slice_buffer"] , ["src/core", "slice_refcount"] @@ -2994,19 +3615,6 @@ , ["src/core", "time"] , ["src/core", "useful"] , ["src/core", "write_size_policy"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/container", "flat_hash_map"] - , ["@", "absl", "absl/hash", "hash"] - , ["@", "absl", "absl/meta", "type_traits"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/random", "bit_gen_ref"] - , ["@", "absl", "absl/random", "distributions"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "cord"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] ] } , "grpcpp_status": @@ -3029,7 +3637,7 @@ , "hdrs": ["src/cpp/ext/chaotic_good.h"] , "deps": [ "gpr" - , "grpc++_public_hdrs" + , "grpc++_base" , "grpc_public_hdrs" , ["src/core", "chaotic_good_connector"] , ["src/core", "chaotic_good_server"] @@ -3039,16 +3647,29 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["subprocess"] , "srcs": - [ "src/core/lib/gpr/subprocess_poxis.cc" - , "src/core/lib/gpr/subprocess_windows.cc" + [ "src/core/util/subprocess_poxis.cc" + , "src/core/util/subprocess_windows.cc" ] - , "hdrs": ["src/core/lib/gpr/subprocess.h"] + , "hdrs": ["src/core/util/subprocess.h"] , "deps": [ "gpr" - , ["src/core", "strerror"] - , ["src/core", "tchar"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "span"] + , ["src/core", "strerror"] + , ["src/core", "tchar"] + ] + } +, "global_callback_hook": + { "type": ["@", "rules", "CC", "library"] + , "name": ["global_callback_hook"] + , "srcs": ["src/cpp/client/global_callback_hook.cc"] + , "hdrs": [["include/grpcpp", "global_callback_hook_headers"]] + , "deps": + [ ["@", "absl", "absl/base", "no_destructor"] + , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] ] } , "grpc_cpp_plugin": diff --git a/etc/import/TARGETS.lzma b/etc/import/TARGETS.lzma index 8c25dc3d7..bfd471a8b 100644 --- a/etc/import/TARGETS.lzma +++ b/etc/import/TARGETS.lzma @@ -340,16 +340,16 @@ , "extra_sources": { "type": "install" , "deps": - [ "src/liblzma/lzma/lzma_encoder_presets.c" - , "src/liblzma/delta/delta_common.c" - , "src/liblzma/simple/simple_coder.c" - , "src/liblzma/simple/x86.c" + [ "src/liblzma/delta/delta_common.c" + , "src/liblzma/lzma/lzma_encoder_presets.c" , "src/liblzma/simple/arm.c" - , "src/liblzma/simple/armthumb.c" , "src/liblzma/simple/arm64.c" - , "src/liblzma/simple/powerpc.c" + , "src/liblzma/simple/armthumb.c" , "src/liblzma/simple/ia64.c" + , "src/liblzma/simple/powerpc.c" + , "src/liblzma/simple/simple_coder.c" , "src/liblzma/simple/sparc.c" + , "src/liblzma/simple/x86.c" ] } } diff --git a/etc/import/absl/container/TARGETS.absl b/etc/import/absl/container/TARGETS.absl index 5cce491f5..348b189ca 100644 --- a/etc/import/absl/container/TARGETS.absl +++ b/etc/import/absl/container/TARGETS.absl @@ -237,8 +237,8 @@ , "deps": [ ["absl/base", "config"] , ["absl/hash", "hash"] - , ["absl/strings", "strings"] , ["absl/strings", "cord"] + , ["absl/strings", "strings"] ] } , "raw_hash_map": @@ -453,8 +453,8 @@ , ["absl/base", "throw_delegate"] , ["absl/memory", "memory"] , ["absl/meta", "type_traits"] - , ["absl/strings", "strings"] , ["absl/strings", "cord"] + , ["absl/strings", "strings"] , ["absl/types", "compare"] , ["absl/utility", "utility"] ] diff --git a/etc/import/absl/crc/TARGETS.absl b/etc/import/absl/crc/TARGETS.absl index fe2c4185e..91b864abc 100644 --- a/etc/import/absl/crc/TARGETS.absl +++ b/etc/import/absl/crc/TARGETS.absl @@ -75,8 +75,8 @@ , ["absl/base", "core_headers"] , ["absl/base", "endian"] , ["absl/base", "prefetch"] - , ["absl/strings", "strings"] , ["absl/strings", "str_format"] + , ["absl/strings", "strings"] ] } , "cpu_detect": diff --git a/etc/import/absl/flags/TARGETS.absl b/etc/import/absl/flags/TARGETS.absl index 899bb6213..a7edb02d5 100644 --- a/etc/import/absl/flags/TARGETS.absl +++ b/etc/import/absl/flags/TARGETS.absl @@ -65,8 +65,8 @@ , ["absl/base", "core_headers"] , ["absl/base", "log_severity"] , ["absl/numeric", "int128"] - , ["absl/strings", "strings"] , ["absl/strings", "str_format"] + , ["absl/strings", "strings"] , ["absl/types", "optional"] ] } diff --git a/etc/import/absl/log/TARGETS.absl b/etc/import/absl/log/TARGETS.absl index 83c49707b..c836523fa 100644 --- a/etc/import/absl/log/TARGETS.absl +++ b/etc/import/absl/log/TARGETS.absl @@ -52,6 +52,39 @@ , "hdrs": ["absl_log.h"] , "deps": [["absl/log/internal", "log_impl"]] } +, "check": + { "type": "export" + , "target": "check (unexported)" + , "flexible_config": + [ "ADD_CFLAGS" + , "ADD_CXXFLAGS" + , "AR" + , "ARCH" + , "CC" + , "CFLAGS" + , "CXX" + , "CXXFLAGS" + , "DEBUG" + , "ENV" + , "HOST_ARCH" + , "OS" + , "TARGET_ARCH" + , "TOOLCHAIN_CONFIG" + ] + } +, "check (unexported)": + { "type": ["@", "rules", "CC", "library"] + , "name": ["check"] + , "stage": ["absl", "log"] + , "hdrs": ["check.h"] + , "deps": + [ ["absl/log/internal", "check_impl"] + , ["absl/log/internal", "check_op"] + , ["absl/log/internal", "conditions"] + , ["absl/log/internal", "log_message"] + , ["absl/log/internal", "strip"] + ] + } , "die_if_null": { "type": "export" , "target": "die_if_null (unexported)" @@ -86,6 +119,26 @@ ] } , "globals": + { "type": "export" + , "target": "globals (unexported)" + , "flexible_config": + [ "ADD_CFLAGS" + , "ADD_CXXFLAGS" + , "AR" + , "ARCH" + , "CC" + , "CFLAGS" + , "CXX" + , "CXXFLAGS" + , "DEBUG" + , "ENV" + , "HOST_ARCH" + , "OS" + , "TARGET_ARCH" + , "TOOLCHAIN_CONFIG" + ] + } +, "globals (unexported)": { "type": ["@", "rules", "CC", "library"] , "name": ["globals"] , "stage": ["absl", "log"] diff --git a/etc/import/absl/log/internal/TARGETS.absl b/etc/import/absl/log/internal/TARGETS.absl index 4911d138a..037ddfa44 100644 --- a/etc/import/absl/log/internal/TARGETS.absl +++ b/etc/import/absl/log/internal/TARGETS.absl @@ -139,8 +139,8 @@ , ["absl/base", "config"] , ["absl/base", "core_headers"] , ["absl/base", "log_severity"] - , ["absl/strings", "strings"] , ["absl/strings", "str_format"] + , ["absl/strings", "strings"] , ["absl/time", "time"] , ["absl/types", "span"] ] @@ -478,11 +478,11 @@ , "hdrs": ["vlog_config.h"] , "srcs": ["vlog_config.cc"] , "deps": - [ ["absl/base", "base"] + [ "fnmatch" + , ["absl/base", "base"] , ["absl/base", "config"] , ["absl/base", "core_headers"] , ["absl/base", "no_destructor"] - , "fnmatch" , ["absl/memory", "memory"] , ["absl/strings", "strings"] , ["absl/synchronization", "synchronization"] diff --git a/etc/import/absl/random/TARGETS.absl b/etc/import/absl/random/TARGETS.absl index 9035e1d36..e349555db 100644 --- a/etc/import/absl/random/TARGETS.absl +++ b/etc/import/absl/random/TARGETS.absl @@ -26,11 +26,11 @@ , "hdrs": ["random.h"] , "deps": [ "distributions" - , "seed_sequences" , "internal_nonsecure_base" , "internal_pcg_engine" , "internal_pool_urbg" , "internal_randen_engine" + , "seed_sequences" ] } , "distributions": @@ -73,12 +73,7 @@ ] , "srcs": ["discrete_distribution.cc", "gaussian_distribution.cc"] , "deps": - [ ["absl/base", "base_internal"] - , ["absl/base", "config"] - , ["absl/base", "core_headers"] - , ["absl/meta", "type_traits"] - , ["absl/numeric", "bits"] - , "internal_distribution_caller" + [ "internal_distribution_caller" , "internal_fast_uniform_bits" , "internal_fastmath" , "internal_generate_real" @@ -86,6 +81,11 @@ , "internal_traits" , "internal_uniform_helper" , "internal_wide_multiply" + , ["absl/base", "base_internal"] + , ["absl/base", "config"] + , ["absl/base", "core_headers"] + , ["absl/meta", "type_traits"] + , ["absl/numeric", "bits"] , ["absl/strings", "strings"] ] } @@ -115,12 +115,12 @@ , "stage": ["absl", "random"] , "hdrs": ["bit_gen_ref.h"] , "deps": - [ "random" - , ["absl/base", "core_headers"] - , ["absl/base", "fast_type_id"] - , "internal_pool_urbg" + [ "internal_pool_urbg" , "internal_salted_seed_seq" , "internal_seed_material" + , "random" + , ["absl/base", "core_headers"] + , ["absl/base", "fast_type_id"] , ["absl/types", "span"] ] } @@ -145,11 +145,11 @@ , "stage": ["absl", "random"] , "hdrs": ["internal/pcg_engine.h"] , "deps": - [ ["absl/base", "config"] - , ["absl/numeric", "int128"] - , "internal_fastmath" + [ "internal_fastmath" , "internal_iostream_state_saver" + , ["absl/base", "config"] , ["absl/meta", "type_traits"] + , ["absl/numeric", "int128"] ] } , "internal_pool_urbg": @@ -184,12 +184,12 @@ [ "internal_randen" , "internal_seed_material" , "internal_traits" + , "seed_gen_exception" , ["absl/base", "base"] , ["absl/base", "config"] , ["absl/base", "core_headers"] , ["absl/base", "endian"] , ["absl/base", "raw_logging_internal"] - , "seed_gen_exception" , ["absl/types", "span"] ] } @@ -212,11 +212,11 @@ , "hdrs": ["seed_sequences.h"] , "srcs": ["seed_sequences.cc"] , "deps": - [ "seed_gen_exception" - , ["absl/base", "config"] - , "internal_pool_urbg" + [ "internal_pool_urbg" , "internal_salted_seed_seq" , "internal_seed_material" + , "seed_gen_exception" + , ["absl/base", "config"] , ["absl/types", "span"] ] } diff --git a/etc/import/absl/status/TARGETS.absl b/etc/import/absl/status/TARGETS.absl index b6b785d1a..bc9eca317 100644 --- a/etc/import/absl/status/TARGETS.absl +++ b/etc/import/absl/status/TARGETS.absl @@ -39,9 +39,9 @@ , ["absl/debugging", "symbolize"] , ["absl/functional", "function_ref"] , ["absl/memory", "memory"] - , ["absl/strings", "strings"] , ["absl/strings", "cord"] , ["absl/strings", "str_format"] + , ["absl/strings", "strings"] , ["absl/types", "optional"] , ["absl/types", "span"] ] @@ -80,9 +80,9 @@ , ["absl/base", "nullability"] , ["absl/base", "raw_logging_internal"] , ["absl/meta", "type_traits"] - , ["absl/strings", "strings"] , ["absl/strings", "has_ostream_operator"] , ["absl/strings", "str_format"] + , ["absl/strings", "strings"] , ["absl/types", "variant"] , ["absl/utility", "utility"] ] diff --git a/etc/import/absl/synchronization/TARGETS.absl b/etc/import/absl/synchronization/TARGETS.absl index 7657d3678..1b5b296fe 100644 --- a/etc/import/absl/synchronization/TARGETS.absl +++ b/etc/import/absl/synchronization/TARGETS.absl @@ -70,8 +70,8 @@ , "deps": [ "graphcycles_internal" , "kernel_timeout_internal" - , ["absl/base", "base"] , ["absl/base", "atomic_hook"] + , ["absl/base", "base"] , ["absl/base", "base_internal"] , ["absl/base", "config"] , ["absl/base", "core_headers"] diff --git a/etc/import/absl/time/TARGETS.absl b/etc/import/absl/time/TARGETS.absl index d8c46b859..22b2e8566 100644 --- a/etc/import/absl/time/TARGETS.absl +++ b/etc/import/absl/time/TARGETS.absl @@ -30,14 +30,14 @@ , "internal/get_current_time_posix.inc" ] , "deps": - [ ["absl/base", "base"] + [ "civil_time" + , "time_zone" + , ["absl/base", "base"] , ["absl/base", "config"] , ["absl/base", "core_headers"] , ["absl/base", "raw_logging_internal"] , ["absl/numeric", "int128"] , ["absl/strings", "strings"] - , "civil_time" - , "time_zone" , ["absl/types", "optional"] ] } diff --git a/etc/import/absl/utility/TARGETS.absl b/etc/import/absl/utility/TARGETS.absl index 55d783885..868555ede 100644 --- a/etc/import/absl/utility/TARGETS.absl +++ b/etc/import/absl/utility/TARGETS.absl @@ -29,4 +29,31 @@ , ["absl/meta", "type_traits"] ] } +, "if_constexpr": + { "type": "export" + , "target": "if_constexpr (unexported)" + , "flexible_config": + [ "ADD_CFLAGS" + , "ADD_CXXFLAGS" + , "AR" + , "ARCH" + , "CC" + , "CFLAGS" + , "CXX" + , "CXXFLAGS" + , "DEBUG" + , "ENV" + , "HOST_ARCH" + , "OS" + , "TARGET_ARCH" + , "TOOLCHAIN_CONFIG" + ] + } +, "if_constexpr (unexported)": + { "type": ["@", "rules", "CC", "library"] + , "name": ["if_constexpr"] + , "stage": ["absl", "utility"] + , "hdrs": ["internal/if_constexpr.h"] + , "deps": [["absl/base", "config"]] + } } diff --git a/etc/import/include/git2/TARGETS.git2 b/etc/import/include/git2/TARGETS.git2 index 5f16be192..f02de9888 100644 --- a/etc/import/include/git2/TARGETS.git2 +++ b/etc/import/include/git2/TARGETS.git2 @@ -16,9 +16,9 @@ , "commit.h" , "common.h" , "config.h" + , "cred_helpers.h" , "credential.h" , "credential_helpers.h" - , "cred_helpers.h" , "deprecated.h" , "describe.h" , "diff.h" @@ -28,18 +28,18 @@ , "global.h" , "graph.h" , "ignore.h" - , "indexer.h" , "index.h" + , "indexer.h" , "mailmap.h" , "merge.h" , "message.h" , "net.h" , "notes.h" , "object.h" - , "odb_backend.h" , "odb.h" - , "oidarray.h" + , "odb_backend.h" , "oid.h" + , "oidarray.h" , "pack.h" , "patch.h" , "pathspec.h" @@ -61,20 +61,12 @@ , "stdint.h" , "strarray.h" , "submodule.h" - , "tag.h" - , "trace.h" - , "transaction.h" - , "transport.h" - , "tree.h" - , "types.h" - , "version.h" - , "worktree.h" , "sys/alloc.h" , "sys/commit.h" , "sys/commit_graph.h" , "sys/config.h" - , "sys/credential.h" , "sys/cred.h" + , "sys/credential.h" , "sys/diff.h" , "sys/email.h" , "sys/filter.h" @@ -93,6 +85,14 @@ , "sys/repository.h" , "sys/stream.h" , "sys/transport.h" + , "tag.h" + , "trace.h" + , "transaction.h" + , "transport.h" + , "tree.h" + , "types.h" + , "version.h" + , "worktree.h" , ["src/libgit2", "experimental.h"] ] } diff --git a/etc/import/include/grpc/TARGETS.grpc b/etc/import/include/grpc/TARGETS.grpc index 2da10193e..2912a259b 100644 --- a/etc/import/include/grpc/TARGETS.grpc +++ b/etc/import/include/grpc/TARGETS.grpc @@ -10,6 +10,7 @@ , "support/json.h" , "support/log.h" , "support/log_windows.h" + , "support/metrics.h" , "support/port_platform.h" , "support/string_util.h" , "support/sync.h" @@ -51,6 +52,7 @@ , "grpc_posix.h" , "grpc_security.h" , "grpc_security_constants.h" + , "passive_listener.h" , "slice.h" , "slice_buffer.h" , "status.h" @@ -141,4 +143,9 @@ , "srcs": ["impl/channel_arg_names.h"] , "stage": ["grpc"] } +, "grpc_core_credentials_header": + { "type": ["@", "rules", "data", "staged"] + , "srcs": ["credentials.h"] + , "stage": ["grpc"] + } } diff --git a/etc/import/include/grpcpp/TARGETS.grpc b/etc/import/include/grpcpp/TARGETS.grpc index 3aae18775..a8a254034 100644 --- a/etc/import/include/grpcpp/TARGETS.grpc +++ b/etc/import/include/grpcpp/TARGETS.grpc @@ -9,7 +9,9 @@ , "create_channel_posix.h" , "ext/health_check_service_server_builder_option.h" , "generic/async_generic_service.h" + , "generic/callback_generic_service.h" , "generic/generic_stub.h" + , "generic/generic_stub_callback.h" , "grpcpp.h" , "health_check_service_interface.h" , "impl/call_hook.h" @@ -37,6 +39,7 @@ , "impl/service_type.h" , "impl/status.h" , "impl/sync.h" + , "passive_listener.h" , "resource_quota.h" , "security/audit_logging.h" , "security/tls_crl_provider.h" @@ -158,4 +161,9 @@ , "srcs": ["ext/server_metric_recorder.h"] , "stage": ["grpcpp"] } +, "global_callback_hook_headers": + { "type": ["@", "rules", "data", "staged"] + , "srcs": ["support/global_callback_hook.h"] + , "stage": ["grpcpp"] + } } diff --git a/etc/import/lib/TARGETS.curl b/etc/import/lib/TARGETS.curl index 9f2036967..96d9c40a9 100644 --- a/etc/import/lib/TARGETS.curl +++ b/etc/import/lib/TARGETS.curl @@ -1078,23 +1078,23 @@ , "guard": ["INCLUDE_curl_config_h__"] , "deps": [ ["src", "curl_ares"] - , ["src", "curl_ipv6"] , ["src", "curl_async_dns"] - , ["src", "curl_ssl"] + , ["src", "curl_brotli"] + , ["src", "curl_crypt_win32"] + , ["src", "curl_gssapi"] + , ["src", "curl_hidden_symbols"] + , ["src", "curl_idn2"] + , ["src", "curl_ipv6"] + , ["src", "curl_msh3"] , ["src", "curl_nghttp2"] , ["src", "curl_ngtcp2"] + , ["src", "curl_psl"] , ["src", "curl_quiche"] - , ["src", "curl_msh3"] - , ["src", "curl_idn2"] + , ["src", "curl_ssh"] + , ["src", "curl_ssh2"] + , ["src", "curl_ssl"] , ["src", "curl_zlib"] - , ["src", "curl_brotli"] , ["src", "curl_zstd"] - , ["src", "curl_psl"] - , ["src", "curl_ssh2"] - , ["src", "curl_ssh"] - , ["src", "curl_gssapi"] - , ["src", "curl_crypt_win32"] - , ["src", "curl_hidden_symbols"] ] } , "libcurl_csources": @@ -1214,23 +1214,23 @@ ] , "deps": [ ["src", "curl_ares"] - , ["src", "curl_ipv6"] , ["src", "curl_async_dns"] - , ["src", "curl_ssl"] + , ["src", "curl_brotli"] + , ["src", "curl_crypt_win32"] + , ["src", "curl_gssapi"] + , ["src", "curl_hidden_symbols"] + , ["src", "curl_idn2"] + , ["src", "curl_ipv6"] + , ["src", "curl_msh3"] , ["src", "curl_nghttp2"] , ["src", "curl_ngtcp2"] + , ["src", "curl_psl"] , ["src", "curl_quiche"] - , ["src", "curl_msh3"] - , ["src", "curl_idn2"] + , ["src", "curl_ssh"] + , ["src", "curl_ssh2"] + , ["src", "curl_ssl"] , ["src", "curl_zlib"] - , ["src", "curl_brotli"] , ["src", "curl_zstd"] - , ["src", "curl_psl"] - , ["src", "curl_ssh2"] - , ["src", "curl_ssh"] - , ["src", "curl_gssapi"] - , ["src", "curl_crypt_win32"] - , ["src", "curl_hidden_symbols"] ] } } diff --git a/etc/import/libarchive/TARGETS.archive b/etc/import/libarchive/TARGETS.archive index 35a373f80..c688ddaa9 100644 --- a/etc/import/libarchive/TARGETS.archive +++ b/etc/import/libarchive/TARGETS.archive @@ -1431,29 +1431,29 @@ , "name": ["config.h"] , "guard": ["INCLUDE_archive_config_h__"] , "deps": - [ ["src", "archive_pthread"] - , ["src", "archive_crypt"] - , ["src", "archive_zlib"] + [ ["src", "archive_acl"] , ["src", "archive_bzip2"] - , ["src", "archive_lzma"] - , ["src", "archive_lzo"] + , ["src", "archive_crypt"] + , ["src", "archive_crypto"] , ["src", "archive_libb2"] + , ["src", "archive_libgcc"] + , ["src", "archive_libmd"] + , ["src", "archive_libxml2"] , ["src", "archive_lz4"] + , ["src", "archive_lzma"] + , ["src", "archive_lzo"] , ["src", "archive_lzstd"] , ["src", "archive_mbedtls"] , ["src", "archive_nettle"] - , ["src", "archive_libmd"] - , ["src", "archive_crypto"] - , ["src", "archive_libxml2"] - , ["src", "archive_pcreposix"] - , ["src", "archive_pcre2posix"] , ["src", "archive_pcre"] , ["src", "archive_pcre2"] + , ["src", "archive_pcre2posix"] + , ["src", "archive_pcreposix"] + , ["src", "archive_pthread"] , ["src", "archive_regex"] - , ["src", "archive_libgcc"] - , ["src", "archive_acl"] - , ["src", "archive_xattr"] , ["src", "archive_richacl"] + , ["src", "archive_xattr"] + , ["src", "archive_zlib"] ] } , "libarchive_csources": @@ -1646,28 +1646,28 @@ , "private-hdrs": ["config.h", "libarchive_public_hheaders", "libarchive_private_hheaders"] , "deps": - [ ["src", "archive_pthread"] - , ["src", "archive_crypt"] - , ["src", "archive_zlib"] + [ ["src", "archive_acl"] , ["src", "archive_bzip2"] - , ["src", "archive_lzma"] - , ["src", "archive_lzo"] + , ["src", "archive_crypt"] + , ["src", "archive_crypto"] + , ["src", "archive_hidden_symbols"] , ["src", "archive_libb2"] + , ["src", "archive_libgcc"] + , ["src", "archive_libmd"] + , ["src", "archive_libxml2"] , ["src", "archive_lz4"] + , ["src", "archive_lzma"] + , ["src", "archive_lzo"] , ["src", "archive_lzstd"] , ["src", "archive_mbedtls"] , ["src", "archive_nettle"] - , ["src", "archive_libmd"] - , ["src", "archive_crypto"] - , ["src", "archive_libxml2"] - , ["src", "archive_pcreposix"] , ["src", "archive_pcre"] + , ["src", "archive_pcreposix"] + , ["src", "archive_pthread"] , ["src", "archive_regex"] - , ["src", "archive_libgcc"] - , ["src", "archive_acl"] - , ["src", "archive_xattr"] , ["src", "archive_richacl"] - , ["src", "archive_hidden_symbols"] + , ["src", "archive_xattr"] + , ["src", "archive_zlib"] ] } } diff --git a/etc/import/src/TARGETS.git2 b/etc/import/src/TARGETS.git2 index 96369d8b3..02e77e89c 100644 --- a/etc/import/src/TARGETS.git2 +++ b/etc/import/src/TARGETS.git2 @@ -27,15 +27,15 @@ , "hdrs": [["include", "git2_public_headers"]] , "private-hdrs": [["src/libgit2", "libgit2_private_headers"]] , "deps": - [ ["src/util", "util_os"] + [ ["src/util", "util_compress"] + , ["src/util", "util_gssapi"] + , ["src/util", "util_http_parser"] + , ["src/util", "util_https"] + , ["src/util", "util_os"] + , ["src/util", "util_regex"] , ["src/util", "util_sha1"] , ["src/util", "util_sha256"] - , ["src/util", "util_regex"] - , ["src/util", "util_compress"] , ["src/util", "util_ssh"] - , ["src/util", "util_http_parser"] - , ["src/util", "util_gssapi"] - , ["src/util", "util_https"] ] } } diff --git a/etc/import/src/compiler/TARGETS.grpc b/etc/import/src/compiler/TARGETS.grpc index d9d5fa57c..be5527b60 100644 --- a/etc/import/src/compiler/TARGETS.grpc +++ b/etc/import/src/compiler/TARGETS.grpc @@ -45,8 +45,8 @@ ] , "deps": [ "proto_parser_helper" - , ["", "grpc++_config_proto"] , ["@", "protobuf", "", "libprotoc"] + , ["", "grpc++_config_proto"] ] } , "grpc_cpp_plugin": diff --git a/etc/import/src/core/TARGETS.grpc b/etc/import/src/core/TARGETS.grpc index 83729c089..cb7baecec 100644 --- a/etc/import/src/core/TARGETS.grpc +++ b/etc/import/src/core/TARGETS.grpc @@ -4,6 +4,20 @@ , "stage": ["src", "core"] , "hdrs": ["lib/channel/channel_fwd.h"] } +, "dump_args": + { "type": ["@", "rules", "CC", "library"] + , "name": ["dump_args"] + , "stage": ["src", "core"] + , "srcs": ["lib/gprpp/dump_args.cc"] + , "hdrs": ["lib/gprpp/dump_args.h"] + , "deps": + [ ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] + ] + } , "slice_cast": { "type": ["@", "rules", "CC", "library"] , "name": ["slice_cast"] @@ -16,14 +30,18 @@ , "stage": ["src", "core"] , "hdrs": [ "lib/event_engine/extensions/can_track_errors.h" + , "lib/event_engine/extensions/chaotic_good_extension.h" , "lib/event_engine/extensions/supports_fd.h" + , "lib/event_engine/extensions/tcp_trace.h" ] , "deps": - [ ["", "event_engine_base_hdrs"] - , ["", "gpr_platform"] - , ["@", "absl", "absl/status", "statusor"] + [ "memory_quota" , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr_platform"] + , ["", "tcp_tracer"] ] } , "event_engine_common": @@ -48,21 +66,70 @@ , "slice_buffer" , "slice_cast" , "slice_refcount" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "gpr_platform"] , ["@", "absl", "absl/container", "flat_hash_set"] , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/utility", "utility"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "gpr_platform"] + ] + } +, "latent_see": + { "type": ["@", "rules", "CC", "library"] + , "name": ["latent_see"] + , "stage": ["src", "core"] + , "srcs": ["util/latent_see.cc"] + , "hdrs": ["util/latent_see.h"] + , "deps": + [ "per_cpu" + , "ring_buffer" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] ] } +, "ring_buffer": + { "type": ["@", "rules", "CC", "library"] + , "name": ["ring_buffer"] + , "stage": ["src", "core"] + , "hdrs": ["util/ring_buffer.h"] + , "deps": [["@", "absl", "absl/types", "optional"], ["", "gpr_platform"]] + } , "transport_fwd": { "type": ["@", "rules", "CC", "library"] , "name": ["transport_fwd"] , "stage": ["src", "core"] , "hdrs": ["lib/transport/transport_fwd.h"] } +, "server_call_tracer_filter": + { "type": ["@", "rules", "CC", "library"] + , "name": ["server_call_tracer_filter"] + , "stage": ["src", "core"] + , "srcs": ["server/server_call_tracer_filter.cc"] + , "hdrs": ["server/server_call_tracer_filter.h"] + , "deps": + [ "arena_promise" + , "call_finalization" + , "cancel_callback" + , "channel_args" + , "channel_fwd" + , "channel_stack_type" + , "context" + , "map" + , "pipe" + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["", "call_tracer"] + , ["", "config"] + , ["", "gpr_platform"] + , ["", "grpc_base"] + ] + } , "atomic_utils": { "type": ["@", "rules", "CC", "library"] , "name": ["atomic_utils"] @@ -77,6 +144,22 @@ , "hdrs": ["lib/transport/metadata_compression_traits.h"] , "deps": [["", "gpr_platform"]] } +, "metadata_info": + { "type": ["@", "rules", "CC", "library"] + , "name": ["metadata_info"] + , "stage": ["src", "core"] + , "srcs": ["lib/transport/metadata_info.cc"] + , "hdrs": ["lib/transport/metadata_info.h"] + , "deps": + [ "channel_args" + , "hpack_constants" + , "metadata_batch" + , "slice" + , ["", "call_tracer"] + , ["", "gpr_platform"] + , ["", "grpc_base"] + ] + } , "experiments": { "type": ["@", "rules", "CC", "library"] , "name": ["experiments"] @@ -85,10 +168,12 @@ , "hdrs": ["lib/experiments/config.h", "lib/experiments/experiments.h"] , "deps": [ "no_destruct" - , ["", "config_vars"] - , ["", "gpr"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] + , ["", "config_vars"] + , ["", "gpr"] ] } , "init_internally": @@ -103,11 +188,24 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["useful"] , "stage": ["src", "core"] - , "hdrs": ["lib/gpr/useful.h"] + , "hdrs": ["util/useful.h"] , "deps": - [ ["", "gpr_platform"] + [ ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/numeric", "bits"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "variant"] + , ["", "gpr_platform"] + ] + } +, "unique_ptr_with_bitset": + { "type": ["@", "rules", "CC", "library"] + , "name": ["unique_ptr_with_bitset"] + , "stage": ["src", "core"] + , "hdrs": ["util/unique_ptr_with_bitset.h"] + , "deps": + [ ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/numeric", "bits"] + , ["", "gpr_platform"] ] } , "examine_stack": @@ -116,13 +214,13 @@ , "stage": ["src", "core"] , "srcs": ["lib/gprpp/examine_stack.cc"] , "hdrs": ["lib/gprpp/examine_stack.h"] - , "deps": [["", "gpr_platform"], ["@", "absl", "absl/types", "optional"]] + , "deps": [["@", "absl", "absl/types", "optional"], ["", "gpr_platform"]] } , "gpr_atm": { "type": ["@", "rules", "CC", "library"] , "name": ["gpr_atm"] , "stage": ["src", "core"] - , "srcs": ["lib/gpr/atm.cc"] + , "srcs": ["util/atm.cc"] , "hdrs": [["include/grpc", "gpr_atm_headers"]] , "deps": ["useful", ["", "gpr_platform"]] } @@ -137,16 +235,9 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["gpr_spinlock"] , "stage": ["src", "core"] - , "hdrs": ["lib/gpr/spinlock.h"] + , "hdrs": ["util/spinlock.h"] , "deps": ["gpr_atm", ["", "gpr_platform"]] } -, "gpr_log_internal": - { "type": ["@", "rules", "CC", "library"] - , "name": ["gpr_log_internal"] - , "stage": ["src", "core"] - , "hdrs": ["lib/gpr/log_internal.h"] - , "deps": [["", "gpr_platform"]] - } , "env": { "type": ["@", "rules", "CC", "library"] , "name": ["env"] @@ -158,7 +249,7 @@ ] , "hdrs": ["lib/gprpp/env.h"] , "deps": - ["tchar", ["", "gpr_platform"], ["@", "absl", "absl/types", "optional"]] + ["tchar", ["@", "absl", "absl/types", "optional"], ["", "gpr_platform"]] } , "directory_reader": { "type": ["@", "rules", "CC", "library"] @@ -170,12 +261,12 @@ ] , "hdrs": ["lib/gprpp/directory_reader.h"] , "deps": - [ ["", "gpr"] - , ["", "gpr_platform"] - , ["@", "absl", "absl/functional", "function_ref"] + [ ["@", "absl", "absl/functional", "function_ref"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] + , ["", "gpr_platform"] ] } , "chunked_vector": @@ -183,7 +274,12 @@ , "name": ["chunked_vector"] , "stage": ["src", "core"] , "hdrs": ["lib/gprpp/chunked_vector.h"] - , "deps": ["arena", "gpr_manual_constructor", ["", "gpr"]] + , "deps": + [ "arena" + , "gpr_manual_constructor" + , ["@", "absl", "absl/log", "check"] + , ["", "gpr"] + ] } , "construct_destruct": { "type": ["@", "rules", "CC", "library"] @@ -211,16 +307,17 @@ , "deps": [ "percent_encoding" , "slice" - , ["", "debug_location"] - , ["", "gpr"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "cord"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/time", "time"] , ["@", "absl", "absl/types", "optional"] + , ["", "debug_location"] + , ["", "gpr"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] ] , "private-deps": ["status_helper_private_proto_deps"] } @@ -230,7 +327,7 @@ , "stage": ["src", "core"] , "hdrs": ["lib/gprpp/unique_type_name.h"] , "deps": - ["useful", ["", "gpr_platform"], ["@", "absl", "absl/strings", "strings"]] + ["useful", ["@", "absl", "absl/strings", "strings"], ["", "gpr_platform"]] } , "validation_errors": { "type": ["@", "rules", "CC", "library"] @@ -239,9 +336,10 @@ , "srcs": ["lib/gprpp/validation_errors.cc"] , "hdrs": ["lib/gprpp/validation_errors.h"] , "deps": - [ ["", "gpr_platform"] + [ ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] ] } , "overload": @@ -257,7 +355,7 @@ , "stage": ["src", "core"] , "hdrs": ["lib/gprpp/match.h"] , "deps": - ["overload", ["", "gpr_platform"], ["@", "absl", "absl/types", "variant"]] + ["overload", ["@", "absl", "absl/types", "variant"], ["", "gpr_platform"]] } , "table": { "type": ["@", "rules", "CC", "library"] @@ -266,9 +364,9 @@ , "hdrs": ["lib/gprpp/table.h"] , "deps": [ "bitset" - , ["", "gpr_platform"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/utility", "utility"] + , ["", "gpr_platform"] ] } , "packed_table": @@ -305,7 +403,14 @@ , "name": ["poll"] , "stage": ["src", "core"] , "hdrs": ["lib/promise/poll.h"] - , "deps": ["construct_destruct", ["", "gpr"], ["", "gpr_platform"]] + , "deps": + [ "construct_destruct" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "gpr_platform"] + ] } , "status_flag": { "type": ["@", "rules", "CC", "library"] @@ -314,11 +419,13 @@ , "hdrs": ["lib/promise/status_flag.h"] , "deps": [ "promise_status" - , ["", "gpr"] - , ["", "gpr_platform"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "gpr_platform"] ] } , "map_pipe": @@ -332,11 +439,11 @@ , "pipe" , "poll" , "promise_factory" - , "promise_trace" , "try_seq" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] , ["", "gpr"] , ["", "gpr_platform"] - , ["@", "absl", "absl/status", "status"] ] } , "1999": @@ -350,17 +457,22 @@ , "arena" , "construct_destruct" , "context" + , "event_engine_context" + , "latent_see" , "poll" , "promise_factory" - , "promise_trace" , "ref_counted" + , "useful" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["", "event_engine_base_hdrs"] , ["", "exec_ctx"] , ["", "gpr"] + , ["", "grpc_trace"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] ] } , "context": @@ -369,7 +481,11 @@ , "stage": ["src", "core"] , "hdrs": ["lib/promise/context.h"] , "deps": - ["down_cast", ["", "gpr"], ["@", "absl", "absl/meta", "type_traits"]] + [ "down_cast" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/meta", "type_traits"] + , ["", "gpr"] + ] } , "map": { "type": ["@", "rules", "CC", "library"] @@ -387,13 +503,13 @@ , "deps": [ "activity" , "context" - , "default_event_engine" + , "event_engine_context" , "poll" , "time" + , ["@", "absl", "absl/status", "status"] , ["", "event_engine_base_hdrs"] , ["", "exec_ctx"] , ["", "gpr"] - , ["@", "absl", "absl/status", "status"] ] } , "wait_for_callback": @@ -404,8 +520,8 @@ , "deps": [ "activity" , "poll" - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] + , ["", "gpr"] ] } , "arena_promise": @@ -418,8 +534,8 @@ , "construct_destruct" , "context" , "poll" - , ["", "gpr_platform"] , ["@", "absl", "absl/meta", "type_traits"] + , ["", "gpr_platform"] ] } , "promise_like": @@ -428,14 +544,18 @@ , "stage": ["src", "core"] , "hdrs": ["lib/promise/detail/promise_like.h"] , "deps": - ["poll", ["", "gpr_platform"], ["@", "absl", "absl/meta", "type_traits"]] + [ "poll" + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/meta", "type_traits"] + , ["", "gpr_platform"] + ] } , "cancel_callback": { "type": ["@", "rules", "CC", "library"] , "name": ["cancel_callback"] , "stage": ["src", "core"] , "hdrs": ["lib/promise/cancel_callback.h"] - , "deps": ["promise_like", ["", "gpr_platform"]] + , "deps": ["arena", "context", "promise_like", ["", "gpr_platform"]] } , "promise_factory": { "type": ["@", "rules", "CC", "library"] @@ -444,8 +564,8 @@ , "hdrs": ["lib/promise/detail/promise_factory.h"] , "deps": [ "promise_like" - , ["", "gpr_platform"] , ["@", "absl", "absl/meta", "type_traits"] + , ["", "gpr_platform"] ] } , "if": @@ -458,9 +578,9 @@ , "poll" , "promise_factory" , "promise_like" - , ["", "gpr_platform"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/types", "variant"] + , ["", "gpr_platform"] ] } , "switch": @@ -476,9 +596,10 @@ , "stage": ["src", "core"] , "hdrs": ["lib/promise/detail/status.h"] , "deps": - [ ["", "gpr_platform"] + [ ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["", "gpr_platform"] ] } , "race": @@ -504,10 +625,10 @@ [ "construct_destruct" , "poll" , "promise_factory" - , ["", "gpr_platform"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/types", "variant"] + , ["", "gpr_platform"] ] } , "join_state": @@ -520,10 +641,10 @@ , "construct_destruct" , "poll" , "promise_like" - , "promise_trace" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["", "gpr"] - , ["", "gpr_platform"] - , ["@", "absl", "absl/utility", "utility"] + , ["", "grpc_trace"] ] } , "join": @@ -534,8 +655,8 @@ , "deps": [ "join_state" , "map" - , ["", "gpr_platform"] , ["@", "absl", "absl/meta", "type_traits"] + , ["", "gpr_platform"] ] } , "try_join": @@ -548,10 +669,11 @@ , "map" , "poll" , "status_flag" - , ["", "gpr_platform"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["", "gpr_platform"] ] } , "all_ok": @@ -564,10 +686,10 @@ , "map" , "poll" , "status_flag" - , ["", "gpr_platform"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["", "gpr_platform"] ] } , "basic_seq": @@ -587,11 +709,12 @@ , "poll" , "promise_factory" , "promise_like" - , "promise_trace" - , ["", "debug_location"] - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] + , ["", "debug_location"] + , ["", "grpc_trace"] ] } , "seq": @@ -604,6 +727,7 @@ , "poll" , "promise_like" , "seq_state" + , ["@", "absl", "absl/log", "log"] , ["", "debug_location"] , ["", "gpr_platform"] ] @@ -620,11 +744,12 @@ , "promise_status" , "seq_state" , "status_flag" - , ["", "debug_location"] - , ["", "gpr_platform"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["", "debug_location"] + , ["", "gpr_platform"] ] } , "activity": @@ -637,17 +762,21 @@ [ "atomic_utils" , "construct_destruct" , "context" + , "dump_args" + , "latent_see" , "no_destruct" , "poll" , "promise_factory" , "promise_status" - , ["", "gpr"] - , ["", "orphanable"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "grpc_trace"] + , ["", "orphanable"] ] } , "exec_ctx_wakeup_scheduler": @@ -658,10 +787,10 @@ , "deps": [ "closure" , "error" + , ["@", "absl", "absl/status", "status"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr_platform"] - , ["@", "absl", "absl/status", "status"] ] } , "event_engine_wakeup_scheduler": @@ -670,7 +799,11 @@ , "stage": ["src", "core"] , "hdrs": ["lib/promise/event_engine_wakeup_scheduler.h"] , "deps": - [["", "event_engine_base_hdrs"], ["", "exec_ctx"], ["", "gpr_platform"]] + [ ["@", "absl", "absl/log", "check"] + , ["", "event_engine_base_hdrs"] + , ["", "exec_ctx"] + , ["", "gpr_platform"] + ] } , "wait_set": { "type": ["@", "rules", "CC", "library"] @@ -680,9 +813,9 @@ , "deps": [ "activity" , "poll" - , ["", "gpr_platform"] , ["@", "absl", "absl/container", "flat_hash_set"] , ["@", "absl", "absl/hash", "hash"] + , ["", "gpr_platform"] ] } , "latch": @@ -693,9 +826,11 @@ , "deps": [ "activity" , "poll" - , "promise_trace" - , ["", "gpr"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] + , ["", "grpc_trace"] ] } , "inter_activity_latch": @@ -706,11 +841,12 @@ , "deps": [ "activity" , "poll" - , "promise_trace" , "wait_set" - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] + , ["", "grpc_trace"] ] } , "interceptor_list": @@ -724,12 +860,13 @@ , "context" , "poll" , "promise_factory" - , "promise_trace" - , ["", "debug_location"] - , ["", "gpr"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "debug_location"] + , ["", "gpr"] ] } , "pipe": @@ -745,22 +882,24 @@ , "interceptor_list" , "map" , "poll" - , "promise_trace" , "seq" - , ["", "debug_location"] - , ["", "gpr"] - , ["", "ref_counted_ptr"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/types", "variant"] + , ["", "debug_location"] + , ["", "gpr"] + , ["", "ref_counted_ptr"] ] } -, "promise_mutes": +, "promise_mutex": { "type": ["@", "rules", "CC", "library"] , "name": ["promise_mutex"] , "stage": ["src", "core"] , "hdrs": ["lib/promise/promise_mutex.h"] - , "deps": ["activity", "poll", ["", "gpr"]] + , "deps": + ["activity", "poll", ["@", "absl", "absl/log", "check"], ["", "gpr"]] } , "inter_activity_pipe": { "type": ["@", "rules", "CC", "library"] @@ -771,21 +910,13 @@ [ "activity" , "poll" , "ref_counted" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/types", "optional"] , ["", "debug_location"] , ["", "gpr"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/types", "optional"] ] } -, "promise_trace": - { "type": ["@", "rules", "CC", "library"] - , "name": ["promise_trace"] - , "stage": ["src", "core"] - , "srcs": ["lib/promise/trace.cc"] - , "hdrs": ["lib/promise/trace.h"] - , "deps": [["", "gpr_platform"], ["", "grpc_trace"]] - } , "mpsc": { "type": ["@", "rules", "CC", "library"] , "name": ["mpsc"] @@ -796,9 +927,10 @@ , "poll" , "ref_counted" , "wait_set" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] , ["", "gpr"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/base", "core_headers"] ] } , "observable": @@ -809,9 +941,10 @@ , "deps": [ "activity" , "poll" - , ["", "gpr"] , ["@", "absl", "absl/container", "flat_hash_set"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["", "gpr"] ] } , "for_each": @@ -824,11 +957,13 @@ , "construct_destruct" , "poll" , "promise_factory" - , "promise_trace" - , ["", "gpr"] - , ["", "gpr_platform"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] + , ["", "gpr_platform"] + , ["", "grpc_trace"] ] } , "ref_counted": @@ -838,6 +973,9 @@ , "hdrs": ["lib/gprpp/ref_counted.h"] , "deps": [ "atomic_utils" + , "down_cast" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["", "debug_location"] , ["", "gpr"] , ["", "ref_counted_ptr"] @@ -863,9 +1001,9 @@ , "hdrs": ["lib/gprpp/ref_counted_string.h"] , "deps": [ "ref_counted" + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/strings", "strings"] ] } , "uuid_v4": @@ -874,21 +1012,21 @@ , "stage": ["src", "core"] , "srcs": ["lib/gprpp/uuid_v4.cc"] , "hdrs": ["lib/gprpp/uuid_v4.h"] - , "deps": [["", "gpr"], ["@", "absl", "absl/strings", "str_format"]] + , "deps": [["@", "absl", "absl/strings", "str_format"], ["", "gpr"]] } , "handshaker_factory": { "type": ["@", "rules", "CC", "library"] , "name": ["handshaker_factory"] , "stage": ["src", "core"] - , "hdrs": ["lib/transport/handshaker_factory.h"] + , "hdrs": ["handshaker/handshaker_factory.h"] , "deps": ["channel_args", "iomgr_fwd", ["", "gpr_platform"]] } , "handshaker_registry": { "type": ["@", "rules", "CC", "library"] , "name": ["handshaker_registry"] , "stage": ["src", "core"] - , "srcs": ["lib/transport/handshaker_registry.cc"] - , "hdrs": ["lib/transport/handshaker_registry.h"] + , "srcs": ["handshaker/handshaker_registry.cc"] + , "hdrs": ["handshaker/handshaker_registry.h"] , "deps": ["channel_args", "handshaker_factory", "iomgr_fwd", ["", "gpr_platform"]] } @@ -896,8 +1034,8 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["tcp_connect_handshaker"] , "stage": ["src", "core"] - , "srcs": ["lib/transport/tcp_connect_handshaker.cc"] - , "hdrs": ["lib/transport/tcp_connect_handshaker.h"] + , "srcs": ["handshaker/tcp_connect/tcp_connect_handshaker.cc"] + , "hdrs": ["handshaker/tcp_connect/tcp_connect_handshaker.h"] , "deps": [ "channel_args" , "channel_args_endpoint_config" @@ -909,19 +1047,44 @@ , "pollset_set" , "resolved_address" , "slice" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/types", "optional"] , ["", "config"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "parse_address"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] - , ["@", "absl", "absl/base", "core_headers"] + ] + } +, "endpoint_info_handshaker": + { "type": ["@", "rules", "CC", "library"] + , "name": ["endpoint_info_handshaker"] + , "stage": ["src", "core"] + , "srcs": ["handshaker/endpoint_info/endpoint_info_handshaker.cc"] + , "hdrs": ["handshaker/endpoint_info/endpoint_info_handshaker.h"] + , "deps": + [ "channel_args" + , "closure" + , "handshaker_factory" + , "handshaker_registry" + , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/types", "optional"] + , ["", "config"] + , ["", "debug_location"] + , ["", "exec_ctx"] + , ["", "gpr"] + , ["", "handshaker"] + , ["", "iomgr"] + , ["", "ref_counted_ptr"] ] } , "channel_creds_registry": @@ -934,9 +1097,9 @@ , "json_args" , "ref_counted" , "validation_errors" + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr_platform"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/strings", "strings"] ] } , "event_engine_memory_allocator": @@ -945,7 +1108,7 @@ , "stage": ["src", "core"] , "hdrs": [["include/grpc", "event_engine_memory_allocator_headers"]] , "deps": - ["slice", ["", "gpr_platform"], ["@", "absl", "absl/strings", "strings"]] + ["slice", ["@", "absl", "absl/strings", "strings"], ["", "gpr_platform"]] } , "event_engine_memory_allocator_factory": { "type": ["@", "rules", "CC", "library"] @@ -955,8 +1118,8 @@ , "deps": [ "event_engine_memory_allocator" , "memory_quota" - , ["", "gpr_platform"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] ] } , "memory_quota": @@ -975,20 +1138,21 @@ , "periodic_update" , "poll" , "race" - , "resource_quota_trace" , "seq" , "slice_refcount" , "time" , "useful" - , ["", "gpr"] - , ["", "grpc_trace"] - , ["", "orphanable"] - , ["", "ref_counted_ptr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "flat_hash_set"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "grpc_trace"] + , ["", "orphanable"] + , ["", "ref_counted_ptr"] ] } , "periodic_update": @@ -1000,8 +1164,8 @@ , "deps": [ "time" , "useful" - , ["", "gpr_platform"] , ["@", "absl", "absl/functional", "function_ref"] + , ["", "gpr_platform"] ] } , "arena": @@ -1015,6 +1179,8 @@ , "context" , "event_engine_memory_allocator" , "memory_quota" + , "resource_quota" + , ["@", "absl", "absl/log", "log"] , ["", "gpr"] ] } @@ -1026,18 +1192,25 @@ , "hdrs": ["lib/resource_quota/thread_quota.h"] , "deps": [ "ref_counted" + , ["@", "absl", "absl/base", "core_headers"] , ["", "gpr"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/base", "core_headers"] ] } -, "resource_quota_trace": +, "connection_quota": { "type": ["@", "rules", "CC", "library"] - , "name": ["resource_quota_trace"] + , "name": ["connection_quota"] , "stage": ["src", "core"] - , "srcs": ["lib/resource_quota/trace.cc"] - , "hdrs": ["lib/resource_quota/trace.h"] - , "deps": [["", "gpr_platform"], ["", "grpc_trace"]] + , "srcs": ["lib/resource_quota/connection_quota.cc"] + , "hdrs": ["lib/resource_quota/connection_quota.h"] + , "deps": + [ "memory_quota" + , "ref_counted" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["", "gpr"] + , ["", "ref_counted_ptr"] + ] } , "resource_quota": { "type": ["@", "rules", "CC", "library"] @@ -1046,26 +1219,27 @@ , "srcs": ["lib/resource_quota/resource_quota.cc"] , "hdrs": ["lib/resource_quota/resource_quota.h"] , "deps": - [ "memory_quota" + [ "connection_quota" + , "memory_quota" , "ref_counted" , "thread_quota" , "useful" + , ["@", "absl", "absl/strings", "strings"] , ["", "channel_arg_names"] , ["", "cpp_impl_of"] , ["", "event_engine_base_hdrs"] , ["", "gpr_platform"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/strings", "strings"] ] } , "slice_refcount": { "type": ["@", "rules", "CC", "library"] , "name": ["slice_refcount"] , "stage": ["src", "core"] - , "srcs": ["lib/slice/slice_refcount.cc"] , "hdrs": ["lib/slice/slice_refcount.h", ["include/grpc", "slice.h"]] , "deps": - [ ["", "debug_location"] + [ ["@", "absl", "absl/log", "log"] + , ["", "debug_location"] , ["", "event_engine_base_hdrs"] , ["", "gpr"] , ["", "grpc_trace"] @@ -1085,11 +1259,12 @@ , "deps": [ "slice_cast" , "slice_refcount" + , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "strings"] , ["", "debug_location"] , ["", "event_engine_base_hdrs"] , ["", "gpr"] - , ["@", "absl", "absl/hash", "hash"] - , ["@", "absl", "absl/strings", "strings"] ] } , "slice_buffer": @@ -1098,7 +1273,12 @@ , "stage": ["src", "core"] , "srcs": ["lib/slice/slice_buffer.cc"] , "hdrs": ["lib/slice/slice_buffer.h", ["include/grpc", "slice_buffer.h"]] - , "deps": ["slice", "slice_refcount", ["", "gpr"]] + , "deps": + [ "slice" + , "slice_refcount" + , ["@", "absl", "absl/log", "check"] + , ["", "gpr"] + ] } , "error": { "type": ["@", "rules", "CC", "library"] @@ -1113,11 +1293,14 @@ , "status_helper" , "strerror" , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr"] , ["", "grpc_public_hdrs"] , ["", "grpc_trace"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "str_format"] ] } , "closure": @@ -1129,9 +1312,11 @@ , "deps": [ "error" , "gpr_manual_constructor" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "str_format"] , ["", "debug_location"] , ["", "gpr"] - , ["@", "absl", "absl/strings", "str_format"] ] } , "time": @@ -1143,10 +1328,12 @@ , "deps": [ "no_destruct" , "useful" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/types", "optional"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] ] } , "iomgr_port": @@ -1175,7 +1362,7 @@ , "lib/iomgr/sockaddr_windows.h" , "lib/iomgr/socket_utils.h" ] - , "deps": ["iomgr_port", ["", "gpr"]] + , "deps": ["iomgr_port", ["@", "absl", "absl/log", "check"], ["", "gpr"]] } , "avl": { "type": ["@", "rules", "CC", "library"] @@ -1200,7 +1387,9 @@ , "srcs": ["lib/event_engine/forkable.cc"] , "hdrs": ["lib/event_engine/forkable.h"] , "deps": - [ ["", "config_vars"] + [ ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["", "config_vars"] , ["", "gpr"] , ["", "gpr_platform"] , ["", "grpc_trace"] @@ -1212,9 +1401,9 @@ , "stage": ["src", "core"] , "hdrs": ["lib/event_engine/poller.h"] , "deps": - [ ["", "event_engine_base_hdrs"] + [ ["@", "absl", "absl/functional", "function_ref"] + , ["", "event_engine_base_hdrs"] , ["", "gpr_platform"] - , ["@", "absl", "absl/functional", "function_ref"] ] } , "event_engine_time_util": @@ -1238,9 +1427,9 @@ , "stage": ["src", "core"] , "hdrs": ["lib/event_engine/work_queue/work_queue.h"] , "deps": - [ ["", "event_engine_base_hdrs"] + [ ["@", "absl", "absl/functional", "any_invocable"] + , ["", "event_engine_base_hdrs"] , ["", "gpr"] - , ["@", "absl", "absl/functional", "any_invocable"] ] } , "event_engine_basic_work_queue": @@ -1252,10 +1441,10 @@ , "deps": [ "common_event_engine_closures" , "event_engine_work_queue" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] ] } , "common_event_engine_closures": @@ -1264,9 +1453,9 @@ , "stage": ["src", "core"] , "hdrs": ["lib/event_engine/common_closures.h"] , "deps": - [ ["", "event_engine_base_hdrs"] + [ ["@", "absl", "absl/functional", "any_invocable"] + , ["", "event_engine_base_hdrs"] , ["", "gpr_platform"] - , ["@", "absl", "absl/functional", "any_invocable"] ] } , "posix_event_engine_timer": @@ -1285,10 +1474,10 @@ [ "time" , "time_averaged_stats" , "useful" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/types", "optional"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] ] } , "event_engine_thread_local": @@ -1308,11 +1497,12 @@ , "deps": [ "time" , "useful" - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/time", "time"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/time", "time"] + , ["", "gpr"] ] } , "event_engine_thread_pool": @@ -1333,21 +1523,23 @@ , "event_engine_basic_work_queue" , "event_engine_thread_count" , "event_engine_thread_local" - , "event_engine_trace" , "event_engine_work_queue" , "examine_stack" , "forkable" , "no_destruct" , "notification" , "time" - , ["", "backoff"] - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "flat_hash_set"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/time", "time"] , ["@", "absl", "absl/types", "optional"] + , ["", "backoff"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "grpc_trace"] ] } , "posix_event_engine_base_hdrs": @@ -1358,11 +1550,11 @@ , "deps": [ "event_engine_extensions" , "event_engine_query_extensions" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] ] } , "posix_event_engine_timer_manager": @@ -1377,12 +1569,14 @@ , "notification" , "posix_event_engine_timer" , "time" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "grpc_trace"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/time", "time"] , ["@", "absl", "absl/types", "optional"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "grpc_trace"] ] } , "posix_event_engine_event_poller": @@ -1394,11 +1588,11 @@ [ "event_engine_poller" , "forkable" , "posix_event_engine_closure" - , ["", "event_engine_base_hdrs"] - , ["", "gpr_platform"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr_platform"] ] } , "posix_event_engine_closure": @@ -1407,10 +1601,10 @@ , "stage": ["src", "core"] , "hdrs": ["lib/event_engine/posix_engine/posix_engine_closure.h"] , "deps": - [ ["", "event_engine_base_hdrs"] - , ["", "gpr_platform"] - , ["@", "absl", "absl/functional", "any_invocable"] + [ ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/status", "status"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr_platform"] ] } , "posix_event_engine_lockfree_event": @@ -1424,8 +1618,9 @@ , "posix_event_engine_closure" , "posix_event_engine_event_poller" , "status_helper" - , ["", "gpr"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] + , ["", "gpr"] ] } , "posix_event_engine_wakeup_fd_posix": @@ -1433,7 +1628,7 @@ , "name": ["posix_event_engine_wakeup_fd_posix"] , "stage": ["src", "core"] , "hdrs": ["lib/event_engine/posix_engine/wakeup_fd_posix.h"] - , "deps": [["", "gpr_platform"], ["@", "absl", "absl/status", "status"]] + , "deps": [["@", "absl", "absl/status", "status"], ["", "gpr_platform"]] } , "posix_event_engine_wakeup_fd_posix_pipe": { "type": ["@", "rules", "CC", "library"] @@ -1445,10 +1640,10 @@ [ "iomgr_port" , "posix_event_engine_wakeup_fd_posix" , "strerror" - , ["", "gpr"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] ] } , "posix_event_engine_wakeup_fd_posix_eventfd": @@ -1461,10 +1656,10 @@ [ "iomgr_port" , "posix_event_engine_wakeup_fd_posix" , "strerror" - , ["", "gpr"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] ] } , "posix_event_engine_wakeup_fd_posix_default": @@ -1478,9 +1673,9 @@ , "posix_event_engine_wakeup_fd_posix" , "posix_event_engine_wakeup_fd_posix_eventfd" , "posix_event_engine_wakeup_fd_posix_pipe" - , ["", "gpr_platform"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["", "gpr_platform"] ] } , "posix_event_engine_poller_posix_epoll1": @@ -1501,16 +1696,18 @@ , "posix_event_engine_wakeup_fd_posix_default" , "status_helper" , "strerror" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "grpc_public_hdrs"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "inlined_vector"] , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "grpc_public_hdrs"] ] } , "posix_event_engine_poller_posix_poll": @@ -1531,17 +1728,18 @@ , "status_helper" , "strerror" , "time" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "grpc_public_hdrs"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "inlined_vector"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "grpc_public_hdrs"] ] } , "posix_event_engine_poller_posix_default": @@ -1557,9 +1755,9 @@ , "posix_event_engine_event_poller" , "posix_event_engine_poller_posix_epoll1" , "posix_event_engine_poller_posix_poll" + , ["@", "absl", "absl/strings", "strings"] , ["", "config_vars"] , ["", "gpr"] - , ["@", "absl", "absl/strings", "strings"] ] } , "posix_event_engine_internal_errqueue": @@ -1568,7 +1766,8 @@ , "stage": ["src", "core"] , "srcs": ["lib/event_engine/posix_engine/internal_errqueue.cc"] , "hdrs": ["lib/event_engine/posix_engine/internal_errqueue.h"] - , "deps": ["iomgr_port", "strerror", ["", "gpr"]] + , "deps": + ["iomgr_port", "strerror", ["@", "absl", "absl/log", "log"], ["", "gpr"]] } , "posix_event_engine_traced_buffer_list": { "type": ["@", "rules", "CC", "library"] @@ -1579,10 +1778,11 @@ , "deps": [ "iomgr_port" , "posix_event_engine_internal_errqueue" - , ["", "gpr"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] ] } , "posix_event_engine_endpoint": @@ -1595,7 +1795,6 @@ [ "event_engine_common" , "event_engine_extensions" , "event_engine_tcp_socket_utils" - , "event_engine_trace" , "experiments" , "iomgr_port" , "load_file" @@ -1612,20 +1811,23 @@ , "status_helper" , "strerror" , "time" - , ["", "debug_location"] - , ["", "event_engine_base_hdrs"] - , ["", "exec_ctx"] - , ["", "gpr"] - , ["", "grpc_public_hdrs"] - , ["", "ref_counted_ptr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "debug_location"] + , ["", "event_engine_base_hdrs"] + , ["", "exec_ctx"] + , ["", "gpr"] + , ["", "grpc_public_hdrs"] + , ["", "grpc_trace"] + , ["", "ref_counted_ptr"] ] } , "event_engine_utils": @@ -1636,9 +1838,10 @@ , "hdrs": ["lib/event_engine/utils.h"] , "deps": [ "time" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "strings"] , ["", "event_engine_base_hdrs"] , ["", "gpr_platform"] - , ["@", "absl", "absl/strings", "strings"] ] } , "posix_event_engine_tcp_socket_utils": @@ -1656,15 +1859,17 @@ , "strerror" , "time" , "useful" - , ["", "channel_arg_names"] - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "ref_counted_ptr"] , ["@", "absl", "absl/cleanup", "cleanup"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "channel_arg_names"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "ref_counted_ptr"] ] } , "posix_event_engine_listener_utils": @@ -1679,12 +1884,14 @@ , "posix_event_engine_tcp_socket_utils" , "socket_mutator" , "status_helper" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] , ["@", "absl", "absl/cleanup", "cleanup"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] ] } , "posix_event_engine_listener": @@ -1695,7 +1902,6 @@ , "hdrs": ["lib/event_engine/posix_engine/posix_engine_listener.h"] , "deps": [ "event_engine_tcp_socket_utils" - , "event_engine_trace" , "iomgr_port" , "posix_event_engine_base_hdrs" , "posix_event_engine_closure" @@ -1707,15 +1913,18 @@ , "status_helper" , "strerror" , "time" - , ["", "event_engine_base_hdrs"] - , ["", "exec_ctx"] - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "event_engine_base_hdrs"] + , ["", "exec_ctx"] + , ["", "gpr"] + , ["", "grpc_trace"] ] } , "posix_event_engine": @@ -1730,7 +1939,6 @@ , "event_engine_poller" , "event_engine_tcp_socket_utils" , "event_engine_thread_pool" - , "event_engine_trace" , "event_engine_utils" , "forkable" , "init_internally" @@ -1748,19 +1956,20 @@ , "posix_event_engine_timer_manager" , "ref_counted_dns_resolver_interface" , "useful" - , ["", "config_vars"] - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "grpc_trace"] - , ["", "orphanable"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/cleanup", "cleanup"] , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "grpc_trace"] + , ["", "orphanable"] ] } , "windows_event_engine": @@ -1773,11 +1982,11 @@ [ "ares_resolver" , "channel_args_endpoint_config" , "common_event_engine_closures" + , "dump_args" , "error" , "event_engine_common" , "event_engine_tcp_socket_utils" , "event_engine_thread_pool" - , "event_engine_trace" , "event_engine_utils" , "init_internally" , "iomgr_port" @@ -1787,11 +1996,13 @@ , "windows_event_engine_listener" , "windows_iocp" , "windows_native_resolver" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] ] } , "windows_native_resolver": @@ -1803,10 +2014,10 @@ , "deps": [ "error" , "status_helper" + , ["@", "absl", "absl/strings", "str_format"] , ["", "event_engine_base_hdrs"] , ["", "gpr"] , ["", "gpr_platform"] - , ["@", "absl", "absl/strings", "str_format"] ] } , "windows_iocp": @@ -1827,15 +2038,16 @@ , "event_engine_tcp_socket_utils" , "event_engine_thread_pool" , "event_engine_time_util" - , "event_engine_trace" - , ["", "debug_location"] - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "gpr_platform"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "str_format"] + , ["", "debug_location"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "gpr_platform"] ] } , "windows_endpoint": @@ -1848,17 +2060,18 @@ [ "error" , "event_engine_tcp_socket_utils" , "event_engine_thread_pool" - , "event_engine_trace" , "status_helper" , "windows_iocp" - , ["", "debug_location"] - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "gpr_platform"] , ["@", "absl", "absl/cleanup", "cleanup"] , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "str_format"] + , ["", "debug_location"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "gpr_platform"] ] } , "windows_event_engine_listener": @@ -1872,16 +2085,17 @@ , "error" , "event_engine_tcp_socket_utils" , "event_engine_thread_pool" - , "event_engine_trace" , "windows_endpoint" , "windows_iocp" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "gpr_platform"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "gpr_platform"] ] } , "cf_event_engine": @@ -1903,7 +2117,6 @@ [ "event_engine_common" , "event_engine_tcp_socket_utils" , "event_engine_thread_pool" - , "event_engine_trace" , "event_engine_utils" , "init_internally" , "posix_event_engine_closure" @@ -1912,14 +2125,16 @@ , "posix_event_engine_timer_manager" , "ref_counted" , "strerror" + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["", "event_engine_base_hdrs"] , ["", "gpr"] , ["", "parse_address"] , ["", "ref_counted_ptr"] , ["", "sockaddr_utils"] - , ["@", "absl", "absl/container", "flat_hash_map"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/strings", "str_format"] ] } , "event_engine_tcp_socket_utils": @@ -1932,26 +2147,20 @@ [ "iomgr_port" , "resolved_address" , "status_helper" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "gpr_platform"] - , ["", "parse_address"] - , ["", "uri_parser"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "gpr_platform"] + , ["", "parse_address"] + , ["", "uri_parser"] ] } -, "event_engine_trace": - { "type": ["@", "rules", "CC", "library"] - , "name": ["event_engine_trace"] - , "stage": ["src", "core"] - , "srcs": ["lib/event_engine/trace.cc"] - , "hdrs": ["lib/event_engine/trace.h"] - , "deps": [["", "gpr"], ["", "gpr_platform"], ["", "grpc_trace"]] - } , "event_engine_shim": { "type": ["@", "rules", "CC", "library"] , "name": ["event_engine_shim"] @@ -1967,10 +2176,10 @@ , "srcs": ["lib/event_engine/default_event_engine_factory.cc"] , "hdrs": ["lib/event_engine/default_event_engine_factory.h"] , "deps": - [ ["", "event_engine_base_hdrs"] - , ["", "gpr_platform"] + [ "posix_event_engine" , ["@", "absl", "absl/memory", "memory"] - , "posix_event_engine" + , ["", "event_engine_base_hdrs"] + , ["", "gpr_platform"] ] } , "channel_args_endpoint_config": @@ -1981,10 +2190,10 @@ , "hdrs": ["lib/event_engine/channel_args_endpoint_config.h"] , "deps": [ "channel_args" - , ["", "event_engine_base_hdrs"] - , ["", "gpr_platform"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr_platform"] ] } , "thready_event_engine": @@ -1994,14 +2203,21 @@ , "srcs": ["lib/event_engine/thready_event_engine/thready_event_engine.cc"] , "hdrs": ["lib/event_engine/thready_event_engine/thready_event_engine.h"] , "deps": - [ ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["@", "absl", "absl/functional", "any_invocable"] + [ ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] ] } +, "event_engine_context": + { "type": ["@", "rules", "CC", "library"] + , "name": ["event_engine_context"] + , "stage": ["src", "core"] + , "hdrs": ["lib/event_engine/event_engine_context.h"] + , "deps": ["arena", ["", "event_engine_base_hdrs"], ["", "gpr"]] + } , "default_event_engine": { "type": ["@", "rules", "CC", "library"] , "name": ["default_event_engine"] @@ -2010,17 +2226,15 @@ , "hdrs": ["lib/event_engine/default_event_engine.h"] , "deps": [ "channel_args" - , "context" , "default_event_engine_factory" - , "event_engine_trace" , "no_destruct" , "thready_event_engine" + , ["@", "absl", "absl/functional", "any_invocable"] , ["", "config"] , ["", "debug_location"] , ["", "event_engine_base_hdrs"] , ["", "gpr"] , ["", "grpc_trace"] - , ["@", "absl", "absl/functional", "any_invocable"] ] } , "ref_counted_dns_resolver_interface": @@ -2029,10 +2243,10 @@ , "stage": ["src", "core"] , "hdrs": ["lib/event_engine/ref_counted_dns_resolver_interface.h"] , "deps": - [ ["", "event_engine_base_hdrs"] + [ ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] , ["", "gpr_platform"] , ["", "orphanable"] - , ["@", "absl", "absl/strings", "strings"] ] } , "native_posix_dns_resolver": @@ -2044,13 +2258,13 @@ , "deps": [ "iomgr_port" , "useful" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] ] } , "ares_resolver": @@ -2081,25 +2295,28 @@ , "resolved_address" , "slice" , "windows_iocp" - , ["", "debug_location"] - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "grpc_trace"] - , ["", "orphanable"] - , ["", "parse_address"] - , ["", "ref_counted_ptr"] - , ["", "sockaddr_utils"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/types", "variant"] , ["@", "cares", "", "ares"] + , ["", "config_vars"] + , ["", "debug_location"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "grpc_trace"] + , ["", "orphanable"] + , ["", "parse_address"] + , ["", "ref_counted_ptr"] + , ["", "sockaddr_utils"] , ["third_party/address_sorting", "address_sorting"] ] } @@ -2120,9 +2337,11 @@ , "hdrs": ["lib/transport/bdp_estimator.h"] , "deps": [ "time" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr"] , ["", "grpc_trace"] - , ["@", "absl", "absl/strings", "strings"] ] } , "percent_encoding": @@ -2131,7 +2350,8 @@ , "stage": ["src", "core"] , "srcs": ["lib/slice/percent_encoding.cc"] , "hdrs": ["lib/slice/percent_encoding.h"] - , "deps": ["bitset", "slice", ["", "gpr"]] + , "deps": + ["bitset", "slice", ["@", "absl", "absl/log", "check"], ["", "gpr"]] } , "socket_mutator": { "type": ["@", "rules", "CC", "library"] @@ -2154,21 +2374,21 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["histogram_view"] , "stage": ["src", "core"] - , "srcs": ["lib/debug/histogram_view.cc"] - , "hdrs": ["lib/debug/histogram_view.h"] + , "srcs": ["telemetry/histogram_view.cc"] + , "hdrs": ["telemetry/histogram_view.h"] , "deps": [["", "gpr"]] } , "stats_data": { "type": ["@", "rules", "CC", "library"] , "name": ["stats_data"] , "stage": ["src", "core"] - , "srcs": ["lib/debug/stats_data.cc"] - , "hdrs": ["lib/debug/stats_data.h"] + , "srcs": ["telemetry/stats_data.cc"] + , "hdrs": ["telemetry/stats_data.h"] , "deps": [ "histogram_view" , "per_cpu" - , ["", "gpr_platform"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] ] } , "per_cpu": @@ -2177,7 +2397,7 @@ , "stage": ["src", "core"] , "srcs": ["lib/gprpp/per_cpu.cc"] , "hdrs": ["lib/gprpp/per_cpu.h"] - , "deps": ["useful", ["", "exec_ctx"], ["", "gpr"]] + , "deps": ["useful", ["", "gpr"]] } , "event_log": { "type": ["@", "rules", "CC", "library"] @@ -2187,10 +2407,11 @@ , "hdrs": ["lib/debug/event_log.h"] , "deps": [ "per_cpu" - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "span"] + , ["", "gpr"] ] } , "load_file": @@ -2201,11 +2422,11 @@ , "hdrs": ["lib/gprpp/load_file.h"] , "deps": [ "slice" - , ["", "gpr"] , ["@", "absl", "absl/cleanup", "cleanup"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] ] } , "http2_errors": @@ -2222,14 +2443,6 @@ , "hdrs": ["lib/surface/channel_stack_type.h"] , "deps": [["", "gpr_platform"]] } -, "channel_stack_trace": - { "type": ["@", "rules", "CC", "library"] - , "name": ["channel_stack_trace"] - , "stage": ["src", "core"] - , "srcs": ["lib/channel/channel_stack_trace.cc"] - , "hdrs": ["lib/channel/channel_stack_trace.h"] - , "deps": [["", "gpr_platform"], ["", "grpc_trace"]] - } , "channel_init": { "type": ["@", "rules", "CC", "library"] , "name": ["channel_init"] @@ -2240,16 +2453,31 @@ [ "call_filters" , "channel_args" , "channel_fwd" - , "channel_stack_trace" , "channel_stack_type" + , "interception_chain" + , "unique_type_name" + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_stack_builder"] , ["", "debug_location"] , ["", "gpr"] , ["", "gpr_platform"] , ["", "grpc_trace"] - , ["@", "absl", "absl/functional", "any_invocable"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] + ] + } +, "server_interface": + { "type": ["@", "rules", "CC", "library"] + , "name": ["server_interface"] + , "stage": ["src", "core"] + , "hdrs": ["server/server_interface.h"] + , "deps": + [ "channel_args" + , ["", "channelz"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr_platform"] ] } , "single_set_ptr": @@ -2257,7 +2485,7 @@ , "name": ["single_set_ptr"] , "stage": ["src", "core"] , "hdrs": ["lib/gprpp/single_set_ptr.h"] - , "deps": [["", "gpr"]] + , "deps": [["@", "absl", "absl/log", "check"], ["", "gpr"]] } , "grpc_service_config": { "type": ["@", "rules", "CC", "library"] @@ -2270,15 +2498,16 @@ , "deps": [ "arena" , "chunked_vector" + , "down_cast" , "ref_counted" , "service_config_parser" , "slice_refcount" , "unique_type_name" , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr_platform"] - , ["", "legacy_context"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/strings", "strings"] ] } , "service_config_parser": @@ -2291,8 +2520,9 @@ [ "channel_args" , "json" , "validation_errors" - , ["", "gpr"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] ] } , "notification": @@ -2300,7 +2530,7 @@ , "name": ["notification"] , "stage": ["src", "core"] , "hdrs": ["lib/gprpp/notification.h"] - , "deps": [["", "gpr"], ["@", "absl", "absl/time", "time"]] + , "deps": [["@", "absl", "absl/time", "time"], ["", "gpr"]] } , "channel_args": { "type": ["@", "rules", "CC", "library"] @@ -2316,16 +2546,18 @@ , "ref_counted_string" , "time" , "useful" - , ["", "channel_arg_names"] - , ["", "debug_location"] - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "ref_counted_ptr"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/types", "variant"] + , ["", "channel_arg_names"] + , ["", "debug_location"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "ref_counted_ptr"] ] } , "resolved_address": @@ -2335,6 +2567,203 @@ , "hdrs": ["lib/iomgr/resolved_address.h"] , "deps": ["iomgr_port", ["", "gpr_platform"]] } +, "client_channel_internal_header": + { "type": ["@", "rules", "CC", "library"] + , "name": ["client_channel_internal_header"] + , "stage": ["src", "core"] + , "hdrs": ["client_channel/client_channel_internal.h"] + , "deps": + [ "arena" + , "call_destination" + , "down_cast" + , "grpc_service_config" + , "lb_policy" + , "unique_type_name" + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["", "call_tracer"] + , ["", "gpr"] + ] + } +, "subchannel_connector": + { "type": ["@", "rules", "CC", "library"] + , "name": ["subchannel_connector"] + , "stage": ["src", "core"] + , "hdrs": ["client_channel/connector.h"] + , "deps": + [ "channel_args" + , "closure" + , "error" + , "iomgr_fwd" + , "resolved_address" + , "time" + , ["", "channelz"] + , ["", "gpr_platform"] + , ["", "grpc_base"] + , ["", "iomgr"] + , ["", "orphanable"] + , ["", "ref_counted_ptr"] + ] + } +, "subchannel_pool_interface": + { "type": ["@", "rules", "CC", "library"] + , "name": ["subchannel_pool_interface"] + , "stage": ["src", "core"] + , "srcs": ["client_channel/subchannel_pool_interface.cc"] + , "hdrs": ["client_channel/subchannel_pool_interface.h"] + , "deps": + [ "channel_args" + , "ref_counted" + , "resolved_address" + , "useful" + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] + , ["", "grpc_trace"] + , ["", "ref_counted_ptr"] + , ["", "sockaddr_utils"] + ] + } +, "config_selector": + { "type": ["@", "rules", "CC", "library"] + , "name": ["config_selector"] + , "stage": ["src", "core"] + , "hdrs": ["client_channel/config_selector.h"] + , "deps": + [ "arena" + , "channel_args" + , "channel_fwd" + , "client_channel_internal_header" + , "grpc_service_config" + , "interception_chain" + , "metadata_batch" + , "ref_counted" + , "slice" + , "unique_type_name" + , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_public_hdrs"] + , ["", "grpc_public_hdrs"] + , ["", "ref_counted_ptr"] + ] + } +, "client_channel_service_config": + { "type": ["@", "rules", "CC", "library"] + , "name": ["client_channel_service_config"] + , "stage": ["src", "core"] + , "srcs": ["client_channel/client_channel_service_config.cc"] + , "hdrs": ["client_channel/client_channel_service_config.h"] + , "deps": + [ "channel_args" + , "json" + , "json_args" + , "json_object_loader" + , "lb_policy" + , "lb_policy_registry" + , "service_config_parser" + , "time" + , "validation_errors" + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["", "config"] + , ["", "gpr_platform"] + , ["", "ref_counted_ptr"] + ] + } +, "retry_service_config": + { "type": ["@", "rules", "CC", "library"] + , "name": ["retry_service_config"] + , "stage": ["src", "core"] + , "srcs": ["client_channel/retry_service_config.cc"] + , "hdrs": ["client_channel/retry_service_config.h"] + , "deps": + [ "channel_args" + , "json" + , "json_args" + , "json_channel_args" + , "json_object_loader" + , "service_config_parser" + , "time" + , "validation_errors" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["", "channel_arg_names"] + , ["", "config"] + , ["", "gpr_public_hdrs"] + , ["", "grpc_base"] + , ["", "grpc_public_hdrs"] + ] + } +, "retry_throttle": + { "type": ["@", "rules", "CC", "library"] + , "name": ["retry_throttle"] + , "stage": ["src", "core"] + , "srcs": ["client_channel/retry_throttle.cc"] + , "hdrs": ["client_channel/retry_throttle.h"] + , "deps": + [ "gpr_atm" + , "ref_counted" + , ["@", "absl", "absl/base", "core_headers"] + , ["", "gpr"] + , ["", "ref_counted_ptr"] + ] + } +, "client_channel_backup_poller": + { "type": ["@", "rules", "CC", "library"] + , "name": ["client_channel_backup_poller"] + , "stage": ["src", "core"] + , "srcs": ["client_channel/backup_poller.cc"] + , "hdrs": ["client_channel/backup_poller.h"] + , "deps": + [ "closure" + , "error" + , "iomgr_fwd" + , "pollset_set" + , "time" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["", "config_vars"] + , ["", "gpr"] + , ["", "gpr_platform"] + , ["", "iomgr"] + , ["", "iomgr_timer"] + ] + } +, "service_config_channel_arg_filter": + { "type": ["@", "rules", "CC", "library"] + , "name": ["service_config_channel_arg_filter"] + , "stage": ["src", "core"] + , "srcs": ["service_config/service_config_channel_arg_filter.cc"] + , "deps": + [ "arena" + , "arena_promise" + , "channel_args" + , "channel_fwd" + , "channel_stack_type" + , "context" + , "grpc_message_size_filter" + , "grpc_service_config" + , "metadata_batch" + , "service_config_parser" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/types", "optional"] + , ["", "channel_arg_names"] + , ["", "config"] + , ["", "gpr_platform"] + , ["", "gpr_public_hdrs"] + , ["", "grpc_base"] + , ["", "grpc_service_config_impl"] + , ["", "ref_counted_ptr"] + ] + } , "lb_policy": { "type": ["@", "rules", "CC", "library"] , "name": ["lb_policy"] @@ -2348,10 +2777,18 @@ , "error" , "grpc_backend_metric_data" , "iomgr_fwd" + , "metrics" , "pollset_set" , "ref_counted" , "resolved_address" , "subchannel_interface" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["", "debug_location"] , ["", "endpoint_addresses"] , ["", "event_engine_base_hdrs"] @@ -2362,12 +2799,6 @@ , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "work_serializer"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] ] } , "lb_policy_factory": @@ -2378,11 +2809,11 @@ , "deps": [ "json" , "lb_policy" + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr_platform"] , ["", "orphanable"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] ] } , "lb_policy_registry": @@ -2395,13 +2826,30 @@ [ "json" , "lb_policy" , "lb_policy_factory" - , ["", "gpr"] - , ["", "orphanable"] - , ["", "ref_counted_ptr"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] + , ["", "orphanable"] + , ["", "ref_counted_ptr"] + ] + } +, "lb_metadata": + { "type": ["@", "rules", "CC", "library"] + , "name": ["lb_metadata"] + , "stage": ["src", "core"] + , "srcs": ["client_channel/lb_metadata.cc"] + , "hdrs": ["client_channel/lb_metadata.h"] + , "deps": + [ "event_engine_common" + , "lb_policy" + , "metadata_batch" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] ] } , "subchannel_interface": @@ -2412,10 +2860,10 @@ , "deps": [ "dual_ref_counted" , "iomgr_fwd" + , ["@", "absl", "absl/status", "status"] , ["", "event_engine_base_hdrs"] , ["", "gpr_platform"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/status", "status"] ] } , "delegating_helper": @@ -2428,48 +2876,93 @@ , "lb_policy" , "resolved_address" , "subchannel_interface" + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] , ["", "debug_location"] , ["", "event_engine_base_hdrs"] , ["", "gpr_platform"] , ["", "grpc_security_base"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/status", "status"] + ] + } +, "backend_metric_parser": + { "type": ["@", "rules", "CC", "library"] + , "name": ["backend_metric_parser"] + , "stage": ["src", "core"] + , "srcs": ["load_balancing/backend_metric_parser.cc"] + , "hdrs": ["load_balancing/backend_metric_parser.h"] + , "deps": + [ "grpc_backend_metric_data" , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] + , ["third_party/upb", "base"] + , ["third_party/upb", "mem"] + , ["third_party/upb", "message"] ] } , "proxy_mapper": { "type": ["@", "rules", "CC", "library"] , "name": ["proxy_mapper"] , "stage": ["src", "core"] - , "hdrs": ["lib/handshaker/proxy_mapper.h"] + , "hdrs": ["handshaker/proxy_mapper.h"] , "deps": [ "channel_args" , "resolved_address" - , ["", "gpr_platform"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr_platform"] ] } , "proxy_mapper_registry": { "type": ["@", "rules", "CC", "library"] , "name": ["proxy_mapper_registry"] , "stage": ["src", "core"] - , "srcs": ["lib/handshaker/proxy_mapper_registry.cc"] - , "hdrs": ["lib/handshaker/proxy_mapper_registry.h"] + , "srcs": ["handshaker/proxy_mapper_registry.cc"] + , "hdrs": ["handshaker/proxy_mapper_registry.h"] , "deps": [ "channel_args" , "proxy_mapper" , "resolved_address" + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "gpr_platform"] + ] + } +, "http_proxy_mapper": + { "type": ["@", "rules", "CC", "library"] + , "name": ["http_proxy_mapper"] + , "stage": ["src", "core"] + , "srcs": ["handshaker/http_connect/http_proxy_mapper.cc"] + , "hdrs": ["handshaker/http_connect/http_proxy_mapper.h"] + , "deps": + [ "channel_args" + , "env" + , "experiments" + , "proxy_mapper" + , "resolved_address" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "channel_arg_names"] + , ["", "config"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "http_connect_handshaker"] + , ["", "iomgr"] + , ["", "parse_address"] + , ["", "sockaddr_utils"] + , ["", "uri_parser"] ] } , "grpc_server_config_selector": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_server_config_selector"] , "stage": ["src", "core"] - , "hdrs": ["ext/filters/server_config_selector/server_config_selector.h"] + , "hdrs": ["server/server_config_selector.h"] , "deps": [ "dual_ref_counted" , "grpc_service_config" @@ -2477,40 +2970,39 @@ , "ref_counted" , "service_config_parser" , "useful" + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr_platform"] , ["", "grpc_base"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_server_config_selector_filter": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_server_config_selector_filter"] , "stage": ["src", "core"] - , "srcs": - ["ext/filters/server_config_selector/server_config_selector_filter.cc"] - , "hdrs": - ["ext/filters/server_config_selector/server_config_selector_filter.h"] + , "srcs": ["server/server_config_selector_filter.cc"] + , "hdrs": ["server/server_config_selector_filter.h"] , "deps": [ "arena" , "arena_promise" , "channel_args" , "channel_fwd" , "context" + , "event_engine_context" , "grpc_server_config_selector" , "grpc_service_config" , "metadata_batch" , "status_helper" - , ["", "gpr"] - , ["", "grpc_base"] - , ["", "legacy_context"] - , ["", "promise"] - , ["", "ref_counted_ptr"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "promise"] + , ["", "ref_counted_ptr"] ] } , "sorted_pack": @@ -2544,10 +3036,11 @@ , "json_args" , "ref_counted" , "validation_errors" + , ["@", "absl", "absl/strings", "strings"] , ["", "alts_util"] , ["", "gpr"] + , ["", "grpc_core_credentials_header"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/strings", "strings"] ] } , "certificate_provider_registry": @@ -2560,8 +3053,10 @@ ["lib/security/certificate_provider/certificate_provider_registry.h"] , "deps": [ "certificate_provider_factory" - , ["", "gpr"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] ] } , "grpc_audit_logging": @@ -2577,14 +3072,15 @@ , "lib/security/authorization/stdout_logger.h" ] , "deps": - [ ["", "gpr"] - , ["", "grpc_base"] - , ["@", "absl", "absl/base", "core_headers"] + [ ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/time", "time"] + , ["", "gpr"] + , ["", "grpc_base"] ] } , "grpc_authorization_base": @@ -2607,12 +3103,18 @@ , "channel_args" , "channel_fwd" , "dual_ref_counted" + , "endpoint_info_handshaker" , "load_file" , "metadata_batch" , "ref_counted" , "resolved_address" , "slice" , "useful" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "gpr"] , ["", "grpc_base"] @@ -2623,10 +3125,6 @@ , ["", "promise"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_crl_provider": @@ -2638,9 +3136,6 @@ , "deps": [ "default_event_engine" , "directory_reader" - , ["", "exec_ctx"] - , ["", "gpr"] - , ["", "grpc_base"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/status", "status"] @@ -2650,6 +3145,9 @@ , ["@", "absl", "absl/types", "span"] , ["@", "ssl", "", "crypto"] , ["@", "ssl", "", "ssl"] + , ["", "exec_ctx"] + , ["", "gpr"] + , ["", "grpc_base"] ] } , "grpc_fake_credentials": @@ -2675,22 +3173,27 @@ , "slice" , "unique_type_name" , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_security_base"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "promise"] , ["", "ref_counted_ptr"] + , ["", "resource_quota_api"] , ["", "tsi_base"] , ["", "tsi_fake_credentials"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_insecure_credentials": @@ -2713,17 +3216,20 @@ , "iomgr_fwd" , "tsi_local_credentials" , "unique_type_name" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_security_base"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "promise"] , ["", "ref_counted_ptr"] , ["", "tsi_base"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "strings"] ] } , "tsi_local_credentials": @@ -2733,7 +3239,8 @@ , "srcs": ["tsi/local_transport_security.cc"] , "hdrs": ["tsi/local_transport_security.h"] , "deps": - [ ["", "event_engine_base_hdrs"] + [ ["@", "absl", "absl/log", "log"] + , ["", "event_engine_base_hdrs"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "tsi_base"] @@ -2762,23 +3269,27 @@ , "tsi_local_credentials" , "unique_type_name" , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] , ["", "grpc_client_channel"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_security_base"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "parse_address"] , ["", "promise"] , ["", "ref_counted_ptr"] , ["", "sockaddr_utils"] , ["", "tsi_base"] , ["", "uri_parser"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_ssl_credentials": @@ -2801,23 +3312,27 @@ , "iomgr_fwd" , "unique_type_name" , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_security_base"] , ["", "grpc_trace"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "promise"] , ["", "ref_counted_ptr"] , ["", "tsi_base"] , ["", "tsi_ssl_credentials"] , ["", "tsi_ssl_session_cache"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_google_default_credentials": @@ -2852,23 +3367,27 @@ , "time" , "unique_type_name" , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "alts_util"] , ["", "channel_arg_names"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_alts_credentials"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_jwt_credentials"] , ["", "grpc_public_hdrs"] , ["", "grpc_security_base"] , ["", "grpc_trace"] , ["", "httpcli"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "strerror": @@ -2877,7 +3396,7 @@ , "stage": ["src", "core"] , "srcs": ["lib/gprpp/strerror.cc"] , "hdrs": ["lib/gprpp/strerror.h"] - , "deps": [["", "gpr_platform"], ["@", "absl", "absl/strings", "str_format"]] + , "deps": [["@", "absl", "absl/strings", "str_format"], ["", "gpr_platform"]] } , "grpc_tls_credentials": { "type": ["@", "rules", "CC", "library"] @@ -2913,30 +3432,34 @@ , "status_helper" , "unique_type_name" , "useful" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/functional", "bind_front"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "ssl", "", "crypto"] + , ["@", "ssl", "", "ssl"] , ["", "channel_arg_names"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_credentials_util"] , ["", "grpc_public_hdrs"] , ["", "grpc_security_base"] , ["", "grpc_trace"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "promise"] , ["", "ref_counted_ptr"] , ["", "tsi_base"] , ["", "tsi_ssl_credentials"] , ["", "tsi_ssl_session_cache"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/container", "inlined_vector"] - , ["@", "absl", "absl/functional", "bind_front"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "ssl", "", "crypto"] - , ["@", "ssl", "", "ssl"] ] } , "grpc_iam_credentials": @@ -2951,17 +3474,96 @@ , "slice" , "unique_type_name" , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_security_base"] , ["", "grpc_trace"] , ["", "promise"] , ["", "ref_counted_ptr"] + ] + } +, "token_fetcher_credentials": + { "type": ["@", "rules", "CC", "library"] + , "name": ["token_fetcher_credentials"] + , "stage": ["src", "core"] + , "srcs": + ["lib/security/credentials/token_fetcher/token_fetcher_credentials.cc"] + , "hdrs": + ["lib/security/credentials/token_fetcher/token_fetcher_credentials.h"] + , "deps": + [ "arena_promise" + , "context" + , "default_event_engine" + , "metadata" + , "poll" + , "pollset_set" + , "ref_counted" + , "time" + , "useful" + , ["@", "absl", "absl/container", "flat_hash_set"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/types", "variant"] + , ["", "backoff"] + , ["", "gpr"] + , ["", "grpc_security_base"] + , ["", "grpc_trace"] + , ["", "httpcli"] + , ["", "iomgr"] + , ["", "orphanable"] + , ["", "promise"] + , ["", "ref_counted_ptr"] + ] + } +, "gcp_service_account_identity_credentials": + { "type": ["@", "rules", "CC", "library"] + , "name": ["gcp_service_account_identity_credentials"] + , "stage": ["src", "core"] + , "srcs": + [ "lib/security/credentials/gcp_service_account_identity/gcp_service_account_identity_credentials.cc" + ] + , "hdrs": + [ "lib/security/credentials/gcp_service_account_identity/gcp_service_account_identity_credentials.h" + ] + , "deps": + [ "activity" + , "arena_promise" + , "closure" + , "error" + , "json" + , "json_args" + , "json_object_loader" + , "json_reader" + , "metadata" + , "pollset_set" + , "ref_counted" + , "slice" + , "status_conversion" + , "status_helper" + , "time" + , "token_fetcher_credentials" + , "unique_type_name" + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] + , ["", "grpc_security_base"] + , ["", "httpcli"] + , ["", "iomgr"] + , ["", "orphanable"] + , ["", "promise"] + , ["", "ref_counted_ptr"] + , ["", "uri_parser"] ] } , "grpc_oauth2_credentials": @@ -2989,23 +3591,28 @@ , "slice_refcount" , "status_helper" , "time" + , "token_fetcher_credentials" , "unique_type_name" , "useful" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_credentials_util"] , ["", "grpc_security_base"] , ["", "grpc_trace"] , ["", "httpcli"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "promise"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_external_account_credentials": @@ -3041,29 +3648,34 @@ , "slice_refcount" , "status_helper" , "time" + , "token_fetcher_credentials" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/time", "time"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "ssl", "", "crypto"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_credentials_util"] , ["", "grpc_security_base"] , ["", "httpcli"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/time", "time"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "ssl", "", "crypto"] ] } , "httpcli_ssl_credentials": { "type": ["@", "rules", "CC", "library"] , "name": ["httpcli_ssl_credentials"] , "stage": ["src", "core"] - , "srcs": ["lib/http/httpcli_security_connector.cc"] - , "hdrs": ["lib/http/httpcli_ssl_credentials.h"] + , "srcs": ["util/http_client/httpcli_security_connector.cc"] + , "hdrs": ["util/http_client/httpcli_ssl_credentials.h"] , "deps": [ "arena_promise" , "channel_args" @@ -3071,20 +3683,23 @@ , "error" , "iomgr_fwd" , "unique_type_name" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_security_base"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "promise"] , ["", "ref_counted_ptr"] , ["", "tsi_base"] , ["", "tsi_ssl_credentials"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "tsi_ssl_types": @@ -3092,7 +3707,7 @@ , "name": ["tsi_ssl_types"] , "stage": ["src", "core"] , "hdrs": ["tsi/ssl_types.h"] - , "deps": [["", "gpr_platform"], ["@", "ssl", "", "ssl"]] + , "deps": [["@", "ssl", "", "ssl"], ["", "gpr_platform"]] } , "grpc_matchers": { "type": ["@", "rules", "CC", "library"] @@ -3101,13 +3716,13 @@ , "srcs": ["lib/matchers/matchers.cc"] , "hdrs": ["lib/matchers/matchers.h"] , "deps": - [ ["", "gpr"] - , ["@", "absl", "absl/status", "status"] + [ ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] , ["@", "re2", "", "re2"] + , ["", "gpr"] ] } , "grpc_rbac_engine": @@ -3129,56 +3744,58 @@ , "grpc_authorization_base" , "grpc_matchers" , "resolved_address" - , ["", "gpr"] - , ["", "grpc_base"] - , ["", "parse_address"] - , ["", "sockaddr_utils"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "parse_address"] + , ["", "sockaddr_utils"] ] } , "json": { "type": ["@", "rules", "CC", "library"] , "name": ["json"] , "stage": ["src", "core"] - , "hdrs": ["lib/json/json.h"] + , "hdrs": ["util/json/json.h"] , "deps": [["", "gpr"]] } , "json_reader": { "type": ["@", "rules", "CC", "library"] , "name": ["json_reader"] , "stage": ["src", "core"] - , "srcs": ["lib/json/json_reader.cc"] - , "hdrs": ["lib/json/json_reader.h"] + , "srcs": ["util/json/json_reader.cc"] + , "hdrs": ["util/json/json_reader.h"] , "deps": [ "json" , "match" - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "variant"] + , ["", "gpr"] ] } , "json_writer": { "type": ["@", "rules", "CC", "library"] , "name": ["json_writer"] , "stage": ["src", "core"] - , "srcs": ["lib/json/json_writer.cc"] - , "hdrs": ["lib/json/json_writer.h"] - , "deps": ["json", ["", "gpr"], ["@", "absl", "absl/strings", "strings"]] + , "srcs": ["util/json/json_writer.cc"] + , "hdrs": ["util/json/json_writer.h"] + , "deps": ["json", ["@", "absl", "absl/strings", "strings"], ["", "gpr"]] } , "json_util": { "type": ["@", "rules", "CC", "library"] , "name": ["json_util"] , "stage": ["src", "core"] - , "srcs": ["lib/json/json_util.cc"] - , "hdrs": ["lib/json/json_util.h"] + , "srcs": ["util/json/json_util.cc"] + , "hdrs": ["util/json/json_util.h"] , "deps": [ "error" , "json" @@ -3187,49 +3804,49 @@ , "no_destruct" , "time" , "validation_errors" - , ["", "gpr"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] ] } , "json_args": { "type": ["@", "rules", "CC", "library"] , "name": ["json_args"] , "stage": ["src", "core"] - , "hdrs": ["lib/json/json_args.h"] - , "deps": [["", "gpr"], ["@", "absl", "absl/strings", "strings"]] + , "hdrs": ["util/json/json_args.h"] + , "deps": [["@", "absl", "absl/strings", "strings"], ["", "gpr"]] } , "json_object_loader": { "type": ["@", "rules", "CC", "library"] , "name": ["json_object_loader"] , "stage": ["src", "core"] - , "srcs": ["lib/json/json_object_loader.cc"] - , "hdrs": ["lib/json/json_object_loader.h"] + , "srcs": ["util/json/json_object_loader.cc"] + , "hdrs": ["util/json/json_object_loader.h"] , "deps": [ "json" , "json_args" , "no_destruct" , "time" , "validation_errors" - , ["", "gpr"] - , ["", "ref_counted_ptr"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "ref_counted_ptr"] ] } , "json_channel_args": { "type": ["@", "rules", "CC", "library"] , "name": ["json_channel_args"] , "stage": ["src", "core"] - , "hdrs": ["lib/json/json_channel_args.h"] + , "hdrs": ["util/json/json_channel_args.h"] , "deps": [ "channel_args" , "json_args" - , ["", "gpr"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] ] } , "idle_filter_state": @@ -3244,16 +3861,11 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_channel_idle_filter"] , "stage": ["src", "core"] - , "srcs": - [ "ext/filters/channel_idle/channel_idle_filter.cc" - , "ext/filters/channel_idle/legacy_channel_idle_filter.cc" - ] - , "hdrs": - [ "ext/filters/channel_idle/channel_idle_filter.h" - , "ext/filters/channel_idle/legacy_channel_idle_filter.h" - ] + , "srcs": ["ext/filters/channel_idle/legacy_channel_idle_filter.cc"] + , "hdrs": ["ext/filters/channel_idle/legacy_channel_idle_filter.h"] , "deps": [ "activity" + , "arena" , "arena_promise" , "channel_args" , "channel_fwd" @@ -3275,51 +3887,22 @@ , "status_helper" , "time" , "try_seq" - , ["", "channel_arg_names"] - , ["", "config"] - , ["", "debug_location"] - , ["", "exec_ctx"] - , ["", "gpr"] - , ["", "grpc_base"] - , ["", "grpc_trace"] - , ["", "orphanable"] - , ["", "promise"] - , ["", "ref_counted_ptr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/random", "random"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/types", "optional"] - ] - } -, "grpc_deadline_filter": - { "type": ["@", "rules", "CC", "library"] - , "name": ["grpc_deadline_filter"] - , "stage": ["src", "core"] - , "srcs": ["ext/filters/deadline/deadline_filter.cc"] - , "hdrs": ["ext/filters/deadline/deadline_filter.h"] - , "deps": - [ "arena" - , "arena_promise" - , "channel_fwd" - , "channel_stack_type" - , "closure" - , "context" - , "error" - , "metadata_batch" - , "status_helper" - , "time" , ["", "channel_arg_names"] , ["", "config"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] - , ["", "grpc_public_hdrs"] - , ["", "iomgr_timer"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/types", "optional"] + , ["", "grpc_trace"] + , ["", "orphanable"] + , ["", "promise"] + , ["", "ref_counted_ptr"] ] } , "grpc_client_authority_filter": @@ -3335,15 +3918,15 @@ , "channel_stack_type" , "metadata_batch" , "slice" + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "config"] , ["", "gpr_platform"] , ["", "grpc_base"] , ["", "grpc_security_base"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_message_size_filter": @@ -3360,7 +3943,6 @@ , "channel_fwd" , "channel_stack_type" , "context" - , "grpc_deadline_filter" , "grpc_service_config" , "json" , "json_args" @@ -3373,17 +3955,17 @@ , "slice" , "slice_buffer" , "validation_errors" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "config"] , ["", "gpr"] , ["", "grpc_base"] , ["", "grpc_public_hdrs"] , ["", "grpc_trace"] - , ["", "legacy_context"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_fault_injection_filter": @@ -3413,19 +3995,19 @@ , "time" , "try_seq" , "validation_errors" - , ["", "config"] - , ["", "gpr"] - , ["", "grpc_base"] - , ["", "grpc_public_hdrs"] - , ["", "grpc_trace"] - , ["", "legacy_context"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/random", "random"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "config"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "grpc_public_hdrs"] + , ["", "grpc_trace"] ] } , "grpc_rbac_filter": @@ -3457,16 +4039,15 @@ , "metadata_batch" , "service_config_parser" , "validation_errors" + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "config"] , ["", "gpr"] , ["", "grpc_base"] , ["", "grpc_security_base"] - , ["", "legacy_context"] , ["", "promise"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_stateful_session_filter": @@ -3501,14 +4082,54 @@ , "time" , "unique_type_name" , "validation_errors" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "config"] , ["", "gpr"] , ["", "grpc_base"] , ["", "grpc_trace"] - , ["", "legacy_context"] + ] + } +, "gcp_authentication_filter": + { "type": ["@", "rules", "CC", "library"] + , "name": ["gcp_authentication_filter"] + , "stage": ["src", "core"] + , "srcs": + [ "ext/filters/gcp_authentication/gcp_authentication_filter.cc" + , "ext/filters/gcp_authentication/gcp_authentication_service_config_parser.cc" + ] + , "hdrs": + [ "ext/filters/gcp_authentication/gcp_authentication_filter.h" + , "ext/filters/gcp_authentication/gcp_authentication_service_config_parser.h" + ] + , "deps": + [ "arena" + , "channel_args" + , "channel_fwd" + , "context" + , "gcp_service_account_identity_credentials" + , "grpc_resolver_xds_attributes" + , "grpc_service_config" + , "json" + , "json_args" + , "json_object_loader" + , "lru_cache" + , "service_config_parser" + , "validation_errors" + , "xds_config" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "config"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "grpc_security_base"] + , ["", "grpc_trace"] + , ["", "ref_counted_ptr"] ] } , "grpc_lb_policy_grpclb": @@ -3560,8 +4181,23 @@ , "time" , "useful" , "validation_errors" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "globals"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["", "backoff"] + , ["", "channel"] , ["", "channel_arg_names"] + , ["", "channel_create"] + , ["", "channelz"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -3575,176 +4211,454 @@ , ["", "grpc_resolver_fake"] , ["", "grpc_security_base"] , ["", "grpc_trace"] + , ["", "lb_child_policy_handler"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "sockaddr_utils"] , ["", "work_serializer"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] + , ["third_party/upb", "base"] + , ["third_party/upb", "mem"] + ] + } +, "random_early_detection": + { "type": ["@", "rules", "CC", "library"] + , "name": ["random_early_detection"] + , "stage": ["src", "core"] + , "srcs": ["lib/backoff/random_early_detection.cc"] + , "hdrs": ["lib/backoff/random_early_detection.h"] + , "deps": + [ ["@", "absl", "absl/random", "bit_gen_ref"] + , ["@", "absl", "absl/random", "distributions"] + , ["", "gpr_platform"] + ] + } +, "grpc_backend_metric_data": + { "type": ["@", "rules", "CC", "library"] + , "name": ["grpc_backend_metric_data"] + , "stage": ["src", "core"] + , "hdrs": ["load_balancing/backend_metric_data.h"] + , "deps": [["@", "absl", "absl/strings", "strings"], ["", "gpr_platform"]] + } +, "grpc_backend_metric_provider": + { "type": ["@", "rules", "CC", "library"] + , "name": ["grpc_backend_metric_provider"] + , "stage": ["src", "core"] + , "hdrs": ["ext/filters/backend_metrics/backend_metric_provider.h"] + , "deps": ["arena"] + } +, "grpc_lb_policy_rls": + { "type": ["@", "rules", "CC", "library"] + , "name": ["grpc_lb_policy_rls"] + , "stage": ["src", "core"] + , "srcs": ["load_balancing/rls/rls.cc"] + , "hdrs": ["load_balancing/rls/rls.h"] + , "deps": + [ "channel_args" + , "closure" + , "connectivity_state" + , "delegating_helper" + , "dual_ref_counted" + , "error" + , "error_utils" + , "grpc_fake_credentials" + , "json" + , "json_args" + , "json_object_loader" + , "json_writer" + , "lb_policy" + , "lb_policy_factory" + , "lb_policy_registry" + , "match" + , "metrics" + , "pollset_set" + , "slice" + , "slice_refcount" + , "status_helper" + , "time" + , "upb_utils" + , "uuid_v4" + , "validation_errors" , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/container", "inlined_vector"] - , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "random"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["", "backoff"] + , ["", "channel"] + , ["", "channel_arg_names"] + , ["", "channel_create"] + , ["", "channelz"] + , ["", "config"] + , ["", "debug_location"] + , ["", "endpoint_addresses"] + , ["", "exec_ctx"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "grpc_client_channel"] + , ["", "grpc_public_hdrs"] + , ["", "grpc_resolver"] + , ["", "grpc_security_base"] + , ["", "grpc_service_config_impl"] + , ["", "grpc_trace"] + , ["", "lb_child_policy_handler"] + , ["", "orphanable"] + , ["", "ref_counted_ptr"] + , ["", "work_serializer"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] + , ["third_party/upb", "base"] + , ["third_party/upb", "mem"] + ] + } +, "lru_cache": + { "type": ["@", "rules", "CC", "library"] + , "name": ["lru_cache"] + , "stage": ["src", "core"] + , "hdrs": ["util/lru_cache.h"] + , "deps": + [ ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/types", "optional"] + , ["", "grpc_public_hdrs"] + ] + } +, "upb_utils": + { "type": ["@", "rules", "CC", "library"] + , "name": ["upb_utils"] + , "stage": ["src", "core"] + , "hdrs": ["util/upb_utils.h"] + , "deps": + [["@", "absl", "absl/strings", "strings"], ["third_party/upb", "base"]] + } +, "xds_enabled_server": + { "type": ["@", "rules", "CC", "library"] + , "name": ["xds_enabled_server"] + , "stage": ["src", "core"] + , "hdrs": ["xds/grpc/xds_enabled_server.h"] + } +, "xds_certificate_provider": + { "type": ["@", "rules", "CC", "library"] + , "name": ["xds_certificate_provider"] + , "stage": ["src", "core"] + , "srcs": ["xds/grpc/xds_certificate_provider.cc"] + , "hdrs": ["xds/grpc/xds_certificate_provider.h"] + , "deps": + [ "channel_args" + , "error" + , "grpc_matchers" + , "grpc_tls_credentials" + , "unique_type_name" + , "useful" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/functional", "bind_front"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "ref_counted_ptr"] + , ["", "tsi_ssl_credentials"] + ] + } +, "xds_certificate_provider_store": + { "type": ["@", "rules", "CC", "library"] + , "name": ["xds_certificate_provider_store"] + , "stage": ["src", "core"] + , "srcs": ["xds/grpc/certificate_provider_store.cc"] + , "hdrs": ["xds/grpc/certificate_provider_store.h"] + , "deps": + [ "certificate_provider_factory" + , "certificate_provider_registry" + , "grpc_tls_credentials" + , "json" + , "json_args" + , "json_object_loader" + , "unique_type_name" + , "useful" + , "validation_errors" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] + , ["", "config"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "orphanable"] + , ["", "ref_counted_ptr"] + ] + } +, "xds_credentials": + { "type": ["@", "rules", "CC", "library"] + , "name": ["xds_credentials"] + , "stage": ["src", "core"] + , "srcs": ["lib/security/credentials/xds/xds_credentials.cc"] + , "hdrs": ["lib/security/credentials/xds/xds_credentials.h"] + , "deps": + [ "channel_args" + , "grpc_lb_xds_channel_args" + , "grpc_matchers" + , "grpc_tls_credentials" + , "unique_type_name" + , "useful" + , "xds_certificate_provider" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/types", "optional"] + , ["", "channel_arg_names"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] + , ["", "grpc_credentials_util"] + , ["", "grpc_security_base"] + , ["", "ref_counted_ptr"] + ] + } +, "xds_file_watcher_certificate_provider_factory": + { "type": ["@", "rules", "CC", "library"] + , "name": ["xds_file_watcher_certificate_provider_factory"] + , "stage": ["src", "core"] + , "srcs": ["xds/grpc/file_watcher_certificate_provider_factory.cc"] + , "hdrs": ["xds/grpc/file_watcher_certificate_provider_factory.h"] + , "deps": + [ "certificate_provider_factory" + , "grpc_tls_credentials" + , "json" + , "json_args" + , "json_object_loader" + , "time" + , "validation_errors" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["", "config"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "ref_counted_ptr"] + ] + } +, "xds_common_types": + { "type": ["@", "rules", "CC", "library"] + , "name": ["xds_common_types"] + , "stage": ["src", "core"] + , "srcs": ["xds/grpc/xds_common_types.cc"] + , "hdrs": ["xds/grpc/xds_common_types.h"] + , "deps": + [ "grpc_matchers" + , "json" + , "match" + , "validation_errors" + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "variant"] + ] + } +, "xds_http_filter": + { "type": ["@", "rules", "CC", "library"] + , "name": ["xds_http_filter"] + , "stage": ["src", "core"] + , "hdrs": ["xds/grpc/xds_http_filter.h"] + , "deps": + [ "channel_args" + , "channel_fwd" + , "interception_chain" + , "json" + , "json_writer" + , "validation_errors" + , "xds_common_types" + , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] - , ["third_party/upb", "base"] - , ["third_party/upb", "mem"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] + , ["", "xds_client"] + , ["third_party/upb", "reflection"] ] } -, "random_early_detection": +, "xds_route_config": { "type": ["@", "rules", "CC", "library"] - , "name": ["random_early_detection"] + , "name": ["xds_route_config"] , "stage": ["src", "core"] - , "srcs": ["lib/backoff/random_early_detection.cc"] - , "hdrs": ["lib/backoff/random_early_detection.h"] + , "srcs": ["xds/grpc/xds_route_config.cc"] + , "hdrs": ["xds/grpc/xds_route_config.h"] , "deps": - [ ["", "gpr_platform"] - , ["@", "absl", "absl/random", "bit_gen_ref"] - , ["@", "absl", "absl/random", "distributions"] + [ "grpc_matchers" + , "match" + , "time" + , "xds_http_filter" + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] + , ["@", "re2", "", "re2"] + , ["", "grpc_base"] + , ["", "xds_client"] ] } -, "grpc_backend_metric_data": +, "xds_listener": { "type": ["@", "rules", "CC", "library"] - , "name": ["grpc_backend_metric_data"] + , "name": ["xds_listener"] , "stage": ["src", "core"] - , "hdrs": ["load_balancing/backend_metric_data.h"] - , "deps": [["", "gpr_platform"], ["@", "absl", "absl/strings", "strings"]] + , "srcs": ["xds/grpc/xds_listener.cc"] + , "hdrs": ["xds/grpc/xds_listener.h"] + , "deps": + [ "match" + , "resolved_address" + , "time" + , "xds_common_types" + , "xds_http_filter" + , "xds_route_config" + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] + , ["", "sockaddr_utils"] + , ["", "xds_client"] + ] } -, "grpc_backend_metric_provider": +, "xds_health_status": { "type": ["@", "rules", "CC", "library"] - , "name": ["grpc_backend_metric_provider"] + , "name": ["xds_health_status"] , "stage": ["src", "core"] - , "hdrs": ["ext/filters/backend_metrics/backend_metric_provider.h"] + , "srcs": ["xds/grpc/xds_health_status.cc"] + , "hdrs": ["xds/grpc/xds_health_status.h"] + , "deps": + [ ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "span"] + , ["", "endpoint_addresses"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] + ] } -, "grpc_lb_policy_rls": +, "xds_server_grpc": { "type": ["@", "rules", "CC", "library"] - , "name": ["grpc_lb_policy_rls"] + , "name": ["xds_server_grpc"] , "stage": ["src", "core"] - , "srcs": ["load_balancing/rls/rls.cc"] + , "srcs": ["xds/grpc/xds_server_grpc.cc"] + , "hdrs": ["xds/grpc/xds_server_grpc.h"] , "deps": - [ "channel_args" - , "closure" - , "connectivity_state" - , "delegating_helper" - , "dual_ref_counted" - , "error" - , "error_utils" - , "grpc_fake_credentials" + [ "channel_creds_registry" , "json" , "json_args" , "json_object_loader" + , "json_reader" , "json_writer" - , "lb_policy" - , "lb_policy_factory" - , "lb_policy_registry" - , "pollset_set" - , "slice" - , "slice_refcount" - , "status_helper" - , "time" , "validation_errors" - , ["", "backoff"] - , ["", "channel_arg_names"] + , ["@", "absl", "absl/strings", "strings"] , ["", "config"] - , ["", "debug_location"] - , ["", "endpoint_addresses"] - , ["", "exec_ctx"] - , ["", "gpr"] - , ["", "grpc_base"] - , ["", "grpc_client_channel"] - , ["", "grpc_public_hdrs"] - , ["", "grpc_resolver"] - , ["", "grpc_security_base"] - , ["", "grpc_service_config_impl"] - , ["", "grpc_trace"] - , ["", "orphanable"] , ["", "ref_counted_ptr"] - , ["", "work_serializer"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/hash", "hash"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] + , ["", "xds_client"] + ] + } +, "xds_metadata": + { "type": ["@", "rules", "CC", "library"] + , "name": ["xds_metadata"] + , "stage": ["src", "core"] + , "srcs": ["xds/grpc/xds_metadata.cc"] + , "hdrs": ["xds/grpc/xds_metadata.h"] + , "deps": + [ "down_cast" + , "json" + , "json_writer" + , "validation_errors" + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["third_party/upb", "base"] - , ["third_party/upb", "mem"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] ] } -, "upb_utils": +, "xds_cluster": { "type": ["@", "rules", "CC", "library"] - , "name": ["upb_utils"] + , "name": ["xds_cluster"] , "stage": ["src", "core"] - , "hdrs": ["ext/xds/upb_utils.h"] + , "srcs": ["xds/grpc/xds_cluster.cc"] + , "hdrs": ["xds/grpc/xds_cluster.h"] , "deps": - [ ["", "gpr_platform"] + [ "grpc_outlier_detection_header" + , "json" + , "json_writer" + , "match" + , "time" + , "xds_common_types" + , "xds_health_status" + , "xds_metadata" + , "xds_server_grpc" + , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/strings", "strings"] - , ["third_party/upb", "base"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] + , ["", "xds_client"] ] } -, "xds_enabled_server": +, "xds_endpoint": { "type": ["@", "rules", "CC", "library"] - , "name": ["xds_enabled_server"] + , "name": ["xds_endpoint"] , "stage": ["src", "core"] - , "hdrs": ["ext/xds/xds_enabled_server.h"] + , "srcs": ["xds/grpc/xds_endpoint.cc"] + , "hdrs": ["xds/grpc/xds_endpoint.h"] + , "deps": + [ "ref_counted" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/strings", "strings"] + , ["", "endpoint_addresses"] + , ["", "gpr"] + , ["", "ref_counted_ptr"] + , ["", "xds_client"] + ] } , "grpc_xds_client": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_xds_client"] , "stage": ["src", "core"] , "srcs": - [ "ext/xds/certificate_provider_store.cc" - , "ext/xds/file_watcher_certificate_provider_factory.cc" - , "ext/xds/xds_audit_logger_registry.cc" - , "ext/xds/xds_bootstrap_grpc.cc" - , "ext/xds/xds_certificate_provider.cc" - , "ext/xds/xds_client_grpc.cc" - , "ext/xds/xds_cluster.cc" - , "ext/xds/xds_cluster_specifier_plugin.cc" - , "ext/xds/xds_common_types.cc" - , "ext/xds/xds_endpoint.cc" - , "ext/xds/xds_health_status.cc" - , "ext/xds/xds_http_fault_filter.cc" - , "ext/xds/xds_http_filters.cc" - , "ext/xds/xds_http_rbac_filter.cc" - , "ext/xds/xds_http_stateful_session_filter.cc" - , "ext/xds/xds_lb_policy_registry.cc" - , "ext/xds/xds_listener.cc" - , "ext/xds/xds_route_config.cc" - , "ext/xds/xds_routing.cc" - , "ext/xds/xds_transport_grpc.cc" - , "lib/security/credentials/xds/xds_credentials.cc" + [ "xds/grpc/xds_audit_logger_registry.cc" + , "xds/grpc/xds_bootstrap_grpc.cc" + , "xds/grpc/xds_client_grpc.cc" + , "xds/grpc/xds_cluster_parser.cc" + , "xds/grpc/xds_cluster_specifier_plugin.cc" + , "xds/grpc/xds_common_types_parser.cc" + , "xds/grpc/xds_endpoint_parser.cc" + , "xds/grpc/xds_http_fault_filter.cc" + , "xds/grpc/xds_http_filter_registry.cc" + , "xds/grpc/xds_http_gcp_authn_filter.cc" + , "xds/grpc/xds_http_rbac_filter.cc" + , "xds/grpc/xds_http_stateful_session_filter.cc" + , "xds/grpc/xds_lb_policy_registry.cc" + , "xds/grpc/xds_listener_parser.cc" + , "xds/grpc/xds_metadata_parser.cc" + , "xds/grpc/xds_route_config_parser.cc" + , "xds/grpc/xds_routing.cc" + , "xds/grpc/xds_transport_grpc.cc" ] , "hdrs": - [ "ext/xds/certificate_provider_store.h" - , "ext/xds/file_watcher_certificate_provider_factory.h" - , "ext/xds/xds_audit_logger_registry.h" - , "ext/xds/xds_bootstrap_grpc.h" - , "ext/xds/xds_certificate_provider.h" - , "ext/xds/xds_client_grpc.h" - , "ext/xds/xds_cluster.h" - , "ext/xds/xds_cluster_specifier_plugin.h" - , "ext/xds/xds_common_types.h" - , "ext/xds/xds_endpoint.h" - , "ext/xds/xds_health_status.h" - , "ext/xds/xds_http_fault_filter.h" - , "ext/xds/xds_http_filters.h" - , "ext/xds/xds_http_rbac_filter.h" - , "ext/xds/xds_http_stateful_session_filter.h" - , "ext/xds/xds_lb_policy_registry.h" - , "ext/xds/xds_listener.h" - , "ext/xds/xds_route_config.h" - , "ext/xds/xds_routing.h" - , "ext/xds/xds_transport_grpc.h" - , "lib/security/credentials/xds/xds_credentials.h" + [ "xds/grpc/xds_audit_logger_registry.h" + , "xds/grpc/xds_bootstrap_grpc.h" + , "xds/grpc/xds_client_grpc.h" + , "xds/grpc/xds_cluster_parser.h" + , "xds/grpc/xds_cluster_specifier_plugin.h" + , "xds/grpc/xds_common_types_parser.h" + , "xds/grpc/xds_endpoint_parser.h" + , "xds/grpc/xds_http_fault_filter.h" + , "xds/grpc/xds_http_filter_registry.h" + , "xds/grpc/xds_http_gcp_authn_filter.h" + , "xds/grpc/xds_http_rbac_filter.h" + , "xds/grpc/xds_http_stateful_session_filter.h" + , "xds/grpc/xds_lb_policy_registry.h" + , "xds/grpc/xds_listener_parser.h" + , "xds/grpc/xds_metadata_parser.h" + , "xds/grpc/xds_route_config_parser.h" + , "xds/grpc/xds_routing.h" + , "xds/grpc/xds_transport_grpc.h" ] , "deps": [ "certificate_provider_factory" , "certificate_provider_registry" , "channel_args" + , "channel_args_endpoint_config" , "channel_creds_registry" , "channel_fwd" , "closure" @@ -3753,6 +4667,7 @@ , "env" , "error" , "error_utils" + , "gcp_authentication_filter" , "grpc_audit_logging" , "grpc_fake_credentials" , "grpc_fault_injection_filter" @@ -3766,6 +4681,7 @@ , "grpc_tls_credentials" , "grpc_transport_chttp2_client_connector" , "init_internally" + , "interception_chain" , "iomgr_fwd" , "json" , "json_args" @@ -3777,6 +4693,7 @@ , "load_file" , "match" , "metadata_batch" + , "metrics" , "pollset_set" , "ref_counted" , "resolved_address" @@ -3789,7 +4706,39 @@ , "upb_utils" , "useful" , "validation_errors" + , "xds_certificate_provider" + , "xds_certificate_provider_store" + , "xds_cluster" + , "xds_common_types" + , "xds_credentials" + , "xds_endpoint" + , "xds_file_watcher_certificate_provider_factory" + , "xds_health_status" + , "xds_http_filter" + , "xds_listener" + , "xds_metadata" + , "xds_route_config" + , "xds_server_grpc" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/cleanup", "cleanup"] + , ["@", "absl", "absl/functional", "bind_front"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/memory", "memory"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/synchronization", "synchronization"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "span"] + , ["@", "absl", "absl/types", "variant"] + , ["@", "protobuf", "", "libprotobuf"] + , ["@", "re2", "", "re2"] + , ["", "channel"] , ["", "channel_arg_names"] + , ["", "channel_create"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -3797,10 +4746,12 @@ , ["", "gpr"] , ["", "grpc_base"] , ["", "grpc_client_channel"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_credentials_util"] , ["", "grpc_public_hdrs"] , ["", "grpc_security_base"] , ["", "grpc_trace"] + , ["", "iomgr"] , ["", "iomgr_timer"] , ["", "orphanable"] , ["", "parse_address"] @@ -3810,39 +4761,22 @@ , ["", "uri_parser"] , ["", "work_serializer"] , ["", "xds_client"] - , ["@", "protobuf", "", "libprotobuf"] , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["src/core/ext/upbdefs-gen", "upbdefs-gen-lib"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/cleanup", "cleanup"] - , ["@", "absl", "absl/functional", "bind_front"] - , ["@", "absl", "absl/memory", "memory"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/synchronization", "synchronization"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "span"] - , ["@", "absl", "absl/types", "variant"] - , ["@", "re2", "", "re2"] - , ["third_party/upb", "message"] + , ["third_party/upb", "base"] , ["third_party/upb", "json"] + , ["third_party/upb", "mem"] + , ["third_party/upb", "message"] , ["third_party/upb", "reflection"] , ["third_party/upb", "text"] - , ["third_party/upb", "base"] - , ["third_party/upb", "mem"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] - , ["src/core/ext/upbdefs-gen", "upbdefs-gen-lib"] ] } , "grpc_xds_channel_stack_modifier": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_xds_channel_stack_modifier"] , "stage": ["src", "core"] - , "srcs": ["ext/xds/xds_channel_stack_modifier.cc"] - , "hdrs": ["ext/xds/xds_channel_stack_modifier.h"] + , "srcs": ["server/xds_channel_stack_modifier.cc"] + , "hdrs": ["server/xds_channel_stack_modifier.h"] , "deps": [ "channel_args" , "channel_fwd" @@ -3850,19 +4784,19 @@ , "channel_stack_type" , "ref_counted" , "useful" + , ["@", "absl", "absl/strings", "strings"] , ["", "channel_stack_builder"] , ["", "config"] , ["", "gpr_platform"] , ["", "grpc_base"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_xds_server_config_fetcher": { "type": ["@", "rules", "CC", "library"] , "name": ["grpc_xds_server_config_fetcher"] , "stage": ["src", "core"] - , "srcs": ["ext/xds/xds_server_config_fetcher.cc"] + , "srcs": ["server/xds_server_config_fetcher.cc"] , "deps": [ "channel_args" , "channel_args_preconditioning" @@ -3880,26 +4814,38 @@ , "resolved_address" , "slice_refcount" , "unique_type_name" + , "xds_certificate_provider" + , "xds_certificate_provider_store" + , "xds_common_types" + , "xds_credentials" + , "xds_http_filter" + , "xds_listener" + , "xds_route_config" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["", "config"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_public_hdrs"] , ["", "grpc_security_base"] , ["", "grpc_service_config_impl"] , ["", "grpc_trace"] + , ["", "iomgr"] , ["", "parse_address"] , ["", "ref_counted_ptr"] + , ["", "server"] , ["", "sockaddr_utils"] , ["", "uri_parser"] , ["", "xds_client"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] ] } , "channel_creds_registry_init": @@ -3917,12 +4863,13 @@ , "json_object_loader" , "time" , "validation_errors" + , ["@", "absl", "absl/strings", "strings"] , ["", "config"] , ["", "gpr"] , ["", "gpr_platform"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_security_base"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_lb_policy_cds": @@ -3949,7 +4896,18 @@ , "pollset_set" , "time" , "unique_type_name" + , "xds_cluster" + , "xds_common_types" + , "xds_config" , "xds_dependency_manager" + , "xds_health_status" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["", "config"] , ["", "debug_location"] , ["", "gpr"] @@ -3960,11 +4918,6 @@ , ["", "ref_counted_ptr"] , ["", "work_serializer"] , ["", "xds_client"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] ] } , "grpc_lb_xds_channel_args": @@ -3981,10 +4934,12 @@ , "srcs": ["load_balancing/xds/xds_cluster_impl.cc"] , "deps": [ "channel_args" + , "client_channel_internal_header" , "connectivity_state" , "delegating_helper" , "grpc_backend_metric_data" , "grpc_lb_xds_channel_args" + , "grpc_resolver_xds_attributes" , "grpc_xds_client" , "json" , "json_args" @@ -3992,12 +4947,25 @@ , "lb_policy" , "lb_policy_factory" , "lb_policy_registry" + , "match" , "pollset_set" , "ref_counted" + , "ref_counted_string" , "resolved_address" , "subchannel_interface" , "validation_errors" - , "xds_dependency_manager" + , "xds_config" + , "xds_credentials" + , "xds_endpoint" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] + , ["", "call_tracer"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4005,15 +4973,10 @@ , ["", "grpc_base"] , ["", "grpc_client_channel"] , ["", "grpc_trace"] + , ["", "lb_child_policy_handler"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "xds_client"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] ] } , "grpc_lb_policy_xds_cluster_manager": @@ -4023,6 +4986,7 @@ , "srcs": ["load_balancing/xds/xds_cluster_manager.cc"] , "deps": [ "channel_args" + , "client_channel_internal_header" , "connectivity_state" , "delegating_helper" , "grpc_resolver_xds_attributes" @@ -4035,6 +4999,11 @@ , "pollset_set" , "time" , "validation_errors" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4044,13 +5013,10 @@ , ["", "grpc_base"] , ["", "grpc_client_channel"] , ["", "grpc_trace"] + , ["", "lb_child_policy_handler"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "work_serializer"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_lb_policy_xds_wrr_locality": @@ -4070,7 +5036,12 @@ , "lb_policy_factory" , "lb_policy_registry" , "pollset_set" + , "ref_counted_string" , "validation_errors" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4080,9 +5051,6 @@ , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "xds_client"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_lb_address_filtering": @@ -4096,12 +5064,12 @@ , "ref_counted" , "ref_counted_string" , "resolved_address" - , ["", "endpoint_addresses"] - , ["", "gpr_platform"] - , ["", "ref_counted_ptr"] , ["@", "absl", "absl/functional", "function_ref"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "endpoint_addresses"] + , ["", "gpr_platform"] + , ["", "ref_counted_ptr"] ] } , "health_check_client": @@ -4115,6 +5083,7 @@ ] , "deps": [ "channel_args" + , "client_channel_internal_header" , "closure" , "connectivity_state" , "error" @@ -4123,51 +5092,29 @@ , "slice" , "subchannel_interface" , "unique_type_name" - , ["", "channel_arg_names"] - , ["", "debug_location"] - , ["", "exec_ctx"] - , ["", "gpr"] - , ["", "grpc_base"] - , ["", "grpc_client_channel"] - , ["", "grpc_public_hdrs"] - , ["", "grpc_trace"] - , ["", "orphanable"] - , ["", "ref_counted_ptr"] - , ["", "sockaddr_utils"] - , ["", "work_serializer"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] - , ["third_party/upb", "base"] - , ["third_party/upb", "mem"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] - ] - } -, "grpc_lb_subchannel_list": - { "type": ["@", "rules", "CC", "library"] - , "name": ["grpc_lb_subchannel_list"] - , "stage": ["src", "core"] - , "hdrs": ["load_balancing/subchannel_list.h"] - , "deps": - [ "channel_args" - , "connectivity_state" - , "dual_ref_counted" - , "gpr_manual_constructor" - , "health_check_client" - , "iomgr_fwd" - , "lb_policy" - , "subchannel_interface" + , ["", "channel_arg_names"] + , ["", "channelz"] , ["", "debug_location"] - , ["", "endpoint_addresses"] + , ["", "exec_ctx"] , ["", "gpr"] , ["", "grpc_base"] + , ["", "grpc_client_channel"] + , ["", "grpc_public_hdrs"] + , ["", "grpc_trace"] + , ["", "orphanable"] , ["", "ref_counted_ptr"] - , ["", "server_address"] + , ["", "sockaddr_utils"] , ["", "work_serializer"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/types", "optional"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] + , ["third_party/upb", "base"] + , ["third_party/upb", "mem"] ] } , "lb_endpoint_list": @@ -4186,6 +5133,12 @@ , "pollset_set" , "resolved_address" , "subchannel_interface" + , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/types", "optional"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4194,10 +5147,6 @@ , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "work_serializer"] - , ["@", "absl", "absl/functional", "function_ref"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_lb_policy_pick_first": @@ -4210,18 +5159,27 @@ [ "channel_args" , "connectivity_state" , "experiments" + , "grpc_outlier_detection_header" , "health_check_client" , "iomgr_fwd" - , "grpc_outlier_detection_header" , "json" , "json_args" , "json_object_loader" , "lb_policy" , "lb_policy_factory" + , "metrics" , "resolved_address" , "subchannel_interface" , "time" , "useful" + , ["@", "absl", "absl/algorithm", "container"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "config"] , ["", "debug_location"] @@ -4234,12 +5192,6 @@ , ["", "ref_counted_ptr"] , ["", "sockaddr_utils"] , ["", "work_serializer"] - , ["@", "absl", "absl/algorithm", "container"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "down_cast": @@ -4247,7 +5199,19 @@ , "name": ["down_cast"] , "stage": ["src", "core"] , "hdrs": ["lib/gprpp/down_cast.h"] - , "deps": [["", "gpr"], ["@", "absl", "absl/base", "config"]] + , "deps": + [ ["@", "absl", "absl/base", "config"] + , ["@", "absl", "absl/log", "check"] + , ["", "gpr"] + ] + } +, "glob": + { "type": ["@", "rules", "CC", "library"] + , "name": ["glob"] + , "stage": ["src", "core"] + , "srcs": ["lib/gprpp/glob.cc"] + , "hdrs": ["lib/gprpp/glob.h"] + , "deps": [["@", "absl", "absl/strings", "strings"]] } , "status_conversion": { "type": ["@", "rules", "CC", "library"] @@ -4270,9 +5234,9 @@ , "status_conversion" , "status_helper" , "time" + , ["@", "absl", "absl/status", "status"] , ["", "gpr_platform"] , ["", "grpc_public_hdrs"] - , ["@", "absl", "absl/status", "status"] ] } , "connectivity_state": @@ -4284,17 +5248,17 @@ , "deps": [ "closure" , "error" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] - , ["", "grpc_public_hdrs"] , ["", "gpr_platform"] + , ["", "grpc_public_hdrs"] , ["", "grpc_trace"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "work_serializer"] - , ["", "grpc_public_hdrs"] - , ["@", "absl", "absl/status", "status"] ] } , "xxhash_inline": @@ -4312,11 +5276,12 @@ , "hdrs": ["load_balancing/ring_hash/ring_hash.h"] , "deps": [ "channel_args" + , "client_channel_internal_header" , "closure" - , "error" - , "grpc_lb_policy_pick_first" , "connectivity_state" , "delegating_helper" + , "error" + , "grpc_lb_policy_pick_first" , "grpc_service_config" , "json" , "json_args" @@ -4331,6 +5296,14 @@ , "unique_type_name" , "validation_errors" , "xxhash_inline" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "config"] , ["", "debug_location"] @@ -4344,12 +5317,6 @@ , ["", "ref_counted_ptr"] , ["", "sockaddr_utils"] , ["", "work_serializer"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/container", "inlined_vector"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] , ["third_party/xxhash", "xxhash"] ] } @@ -4361,13 +5328,18 @@ , "deps": [ "channel_args" , "connectivity_state" - , "experiments" - , "grpc_lb_subchannel_list" , "json" , "lb_endpoint_list" , "lb_policy" , "lb_policy_factory" - , "subchannel_interface" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/meta", "type_traits"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4376,14 +5348,7 @@ , ["", "grpc_trace"] , ["", "orphanable"] , ["", "ref_counted_ptr"] - , ["", "server_address"] , ["", "work_serializer"] - , ["@", "absl", "absl/meta", "type_traits"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "static_stride_scheduler": @@ -4393,10 +5358,11 @@ , "srcs": ["load_balancing/weighted_round_robin/static_stride_scheduler.cc"] , "hdrs": ["load_balancing/weighted_round_robin/static_stride_scheduler.h"] , "deps": - [ ["", "gpr"] - , ["@", "absl", "absl/functional", "any_invocable"] + [ ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/types", "span"] + , ["", "gpr"] ] } , "grpc_lb_policy_weighted_round_robin": @@ -4409,13 +5375,14 @@ , "connectivity_state" , "experiments" , "grpc_backend_metric_data" - , "grpc_lb_subchannel_list" + , "grpc_lb_policy_weighted_target" , "json" , "json_args" , "json_object_loader" , "lb_endpoint_list" , "lb_policy" , "lb_policy_factory" + , "metrics" , "ref_counted" , "resolved_address" , "static_stride_scheduler" @@ -4423,6 +5390,16 @@ , "subchannel_interface" , "time" , "validation_errors" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/meta", "type_traits"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4431,20 +5408,11 @@ , ["", "grpc_base"] , ["", "grpc_client_channel"] , ["", "grpc_trace"] + , ["", "oob_backend_metric"] , ["", "orphanable"] , ["", "ref_counted_ptr"] - , ["", "server_address"] - , ["", "sockaddr_utils"] , ["", "stats"] , ["", "work_serializer"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/meta", "type_traits"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] ] } , "grpc_outlier_detection_header": @@ -4458,9 +5426,9 @@ , "json_object_loader" , "time" , "validation_errors" + , ["@", "absl", "absl/types", "optional"] , ["", "gpr_platform"] , ["", "server_address"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_lb_policy_outlier_detection": @@ -4486,6 +5454,15 @@ , "subchannel_interface" , "unique_type_name" , "validation_errors" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/meta", "type_traits"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "variant"] , ["", "config"] , ["", "debug_location"] , ["", "exec_ctx"] @@ -4493,17 +5470,11 @@ , ["", "grpc_base"] , ["", "grpc_client_channel"] , ["", "grpc_trace"] + , ["", "lb_child_policy_handler"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "sockaddr_utils"] , ["", "work_serializer"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/meta", "type_traits"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "variant"] ] } , "grpc_lb_policy_priority": @@ -4526,6 +5497,12 @@ , "ref_counted_string" , "time" , "validation_errors" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "config"] , ["", "debug_location"] @@ -4535,13 +5512,10 @@ , ["", "grpc_base"] , ["", "grpc_client_channel"] , ["", "grpc_trace"] + , ["", "lb_child_policy_handler"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "work_serializer"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_lb_policy_weighted_target": @@ -4549,6 +5523,7 @@ , "name": ["grpc_lb_policy_weighted_target"] , "stage": ["src", "core"] , "srcs": ["load_balancing/weighted_target/weighted_target.cc"] + , "hdrs": ["load_balancing/weighted_target/weighted_target.h"] , "deps": [ "channel_args" , "connectivity_state" @@ -4563,6 +5538,14 @@ , "pollset_set" , "time" , "validation_errors" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4571,15 +5554,10 @@ , ["", "grpc_base"] , ["", "grpc_client_channel"] , ["", "grpc_trace"] + , ["", "lb_child_policy_handler"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "work_serializer"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_lb_policy_xds_override_host": @@ -4590,6 +5568,7 @@ , "hdrs": ["load_balancing/xds/xds_override_host.h"] , "deps": [ "channel_args" + , "client_channel_internal_header" , "closure" , "connectivity_state" , "delegating_helper" @@ -4610,7 +5589,17 @@ , "resolved_address" , "subchannel_interface" , "validation_errors" - , "xds_dependency_manager" + , "xds_config" + , "xds_health_status" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["", "config"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4619,18 +5608,13 @@ , ["", "grpc_base"] , ["", "grpc_client_channel"] , ["", "grpc_trace"] + , ["", "lb_child_policy_handler"] , ["", "orphanable"] , ["", "parse_address"] , ["", "ref_counted_ptr"] , ["", "server_address"] , ["", "sockaddr_utils"] , ["", "work_serializer"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] ] } , "lb_server_load_reporting_filter": @@ -4645,6 +5629,7 @@ ] , "deps": [ "arena_promise" + , "call_finalization" , "channel_args" , "channel_fwd" , "channel_stack_type" @@ -4654,6 +5639,15 @@ , "resolved_address" , "seq" , "slice" + , ["@", "absl", "absl/container", "vector"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "opencensus-stats", "", ""] + , ["@", "opencensus-tags", "", ""] , ["", "channel_arg_names"] , ["", "config"] , ["", "gpr"] @@ -4664,14 +5658,6 @@ , ["", "parse_address"] , ["", "promise"] , ["", "uri_parser"] - , ["@", "absl", "absl/container", "vector"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "opencensus-stats", "", ""] - , ["@", "opencensus-tags", "", ""] ] } , "grpc_backend_metric_filter": @@ -4692,19 +5678,19 @@ , "map" , "metadata_batch" , "slice" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] , ["", "config"] , ["", "gpr"] , ["", "gpr_platform"] , ["", "grpc_base"] , ["", "grpc_trace"] - , ["", "legacy_context"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] + , ["src/core/ext/upb-gen", "upb-gen-lib"] , ["third_party/upb", "base"] , ["third_party/upb", "mem"] - , ["src/core/ext/upb-gen", "upb-gen-lib"] ] } , "polling_resolver": @@ -4718,6 +5704,12 @@ , "grpc_service_config" , "iomgr_fwd" , "time" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "backoff"] , ["", "debug_location"] , ["", "endpoint_addresses"] @@ -4730,10 +5722,6 @@ , ["", "ref_counted_ptr"] , ["", "uri_parser"] , ["", "work_serializer"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "service_config_helper": @@ -4749,10 +5737,11 @@ , "json_reader" , "json_writer" , "status_helper" - , ["", "gpr_platform"] - , ["", "grpc_base"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] + , ["", "grpc_base"] + , ["", "iomgr"] ] } , "grpc_resolver_dns_event_engine": @@ -4771,6 +5760,14 @@ , "service_config_helper" , "time" , "validation_errors" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/cleanup", "cleanup"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "backoff"] , ["", "channel_arg_names"] , ["", "debug_location"] @@ -4783,15 +5780,10 @@ , ["", "grpc_resolver"] , ["", "grpc_service_config_impl"] , ["", "grpc_trace"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/cleanup", "cleanup"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_resolver_dns_plugin": @@ -4804,12 +5796,13 @@ [ "experiments" , "grpc_resolver_dns_event_engine" , "grpc_resolver_dns_native" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] , ["", "config"] , ["", "config_vars"] , ["", "gpr"] , ["", "grpc_resolver"] , ["", "grpc_resolver_dns_ares"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_resolver_dns_native": @@ -4823,6 +5816,12 @@ , "polling_resolver" , "resolved_address" , "time" + , ["@", "absl", "absl/functional", "bind_front"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "backoff"] , ["", "channel_arg_names"] , ["", "config"] @@ -4832,14 +5831,10 @@ , ["", "grpc_base"] , ["", "grpc_resolver"] , ["", "grpc_trace"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] - , ["@", "absl", "absl/functional", "bind_front"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_resolver_sockaddr": @@ -4851,6 +5846,9 @@ [ "channel_args" , "iomgr_port" , "resolved_address" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] , ["", "config"] , ["", "endpoint_addresses"] , ["", "gpr"] @@ -4858,8 +5856,6 @@ , ["", "orphanable"] , ["", "parse_address"] , ["", "uri_parser"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_resolver_binder": @@ -4873,15 +5869,16 @@ , "iomgr_port" , "resolved_address" , "status_helper" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] , ["", "config"] , ["", "endpoint_addresses"] , ["", "gpr"] , ["", "grpc_resolver"] , ["", "orphanable"] , ["", "uri_parser"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_resolver_xds_attributes": @@ -4892,17 +5889,29 @@ , "deps": [ "grpc_service_config" , "unique_type_name" - , ["", "gpr_platform"] + , "xds_route_config" , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] ] } -, "grpc_resolver_xds_trace": +, "xds_config": { "type": ["@", "rules", "CC", "library"] - , "name": ["grpc_resolver_xds_trace"] + , "name": ["xds_config"] , "stage": ["src", "core"] - , "srcs": ["resolver/xds/xds_resolver_trace.cc"] - , "hdrs": ["resolver/xds/xds_resolver_trace.h"] - , "deps": [["", "gpr_platform"], ["", "grpc_trace"]] + , "srcs": ["resolver/xds/xds_config.cc"] + , "hdrs": ["resolver/xds/xds_config.h"] + , "deps": + [ "match" + , "ref_counted" + , "xds_cluster" + , "xds_endpoint" + , "xds_listener" + , "xds_route_config" + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "variant"] + ] } , "xds_dependency_manager": { "type": ["@", "rules", "CC", "library"] @@ -4912,17 +5921,23 @@ , "hdrs": ["resolver/xds/xds_dependency_manager.h"] , "deps": [ "grpc_lb_xds_channel_args" - , "grpc_resolver_xds_trace" , "grpc_xds_client" , "match" , "ref_counted" + , "xds_cluster" + , "xds_config" + , "xds_endpoint" + , "xds_listener" + , "xds_route_config" + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/container", "flat_hash_set"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/strings", "strings"] , ["", "config"] , ["", "gpr"] , ["", "grpc_resolver"] , ["", "grpc_resolver_fake"] - , ["@", "absl", "absl/container", "flat_hash_map"] - , ["@", "absl", "absl/container", "flat_hash_set"] - , ["@", "absl", "absl/strings", "strings"] ] } , "grpc_resolver_xds": @@ -4935,11 +5950,12 @@ , "arena_promise" , "channel_args" , "channel_fwd" + , "client_channel_internal_header" + , "config_selector" , "context" , "dual_ref_counted" , "grpc_lb_policy_ring_hash" , "grpc_resolver_xds_attributes" - , "grpc_resolver_xds_trace" , "grpc_service_config" , "grpc_xds_client" , "iomgr_fwd" @@ -4949,25 +5965,14 @@ , "ref_counted" , "slice" , "time" + , "xds_config" , "xds_dependency_manager" + , "xds_http_filter" + , "xds_listener" + , "xds_route_config" , "xxhash_inline" - , ["", "channel_arg_names"] - , ["", "config"] - , ["", "debug_location"] - , ["", "endpoint_addresses"] - , ["", "gpr"] - , ["", "grpc_base"] - , ["", "grpc_client_channel"] - , ["", "grpc_public_hdrs"] - , ["", "grpc_resolver"] - , ["", "grpc_service_config_impl"] - , ["", "grpc_trace"] - , ["", "legacy_context"] - , ["", "orphanable"] - , ["", "ref_counted_ptr"] - , ["", "uri_parser"] - , ["", "work_serializer"] - , ["", "xds_client"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/random", "random"] , ["@", "absl", "absl/status", "status"] @@ -4977,6 +5982,21 @@ , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/types", "variant"] , ["@", "re2", "", "re2"] + , ["", "channel_arg_names"] + , ["", "config"] + , ["", "debug_location"] + , ["", "endpoint_addresses"] + , ["", "gpr"] + , ["", "grpc_base"] + , ["", "grpc_public_hdrs"] + , ["", "grpc_resolver"] + , ["", "grpc_service_config_impl"] + , ["", "grpc_trace"] + , ["", "orphanable"] + , ["", "ref_counted_ptr"] + , ["", "uri_parser"] + , ["", "work_serializer"] + , ["", "xds_client"] ] } , "grpc_resolver_c2p": @@ -4993,20 +6013,23 @@ , "json_writer" , "resource_quota" , "time" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "alts_util"] , ["", "config"] , ["", "debug_location"] , ["", "gpr"] , ["", "grpc_base"] , ["", "grpc_resolver"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] , ["", "work_serializer"] , ["", "xds_client"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "hpack_constants": @@ -5022,7 +6045,8 @@ , "stage": ["src", "core"] , "srcs": ["ext/transport/chttp2/transport/hpack_encoder_table.cc"] , "hdrs": ["ext/transport/chttp2/transport/hpack_encoder_table.h"] - , "deps": ["hpack_constants", ["", "gpr"]] + , "deps": + ["hpack_constants", ["@", "absl", "absl/log", "check"], ["", "gpr"]] } , "chttp2_flow_control": { "type": ["@", "rules", "CC", "library"] @@ -5037,13 +6061,15 @@ , "memory_quota" , "time" , "useful" - , ["", "gpr"] - , ["", "grpc_trace"] , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/strings", "str_format"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "grpc_trace"] ] } , "ping_abuse_policy": @@ -5055,10 +6081,10 @@ , "deps": [ "channel_args" , "time" - , ["", "channel_arg_names"] - , ["", "gpr_platform"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "channel_arg_names"] + , ["", "gpr_platform"] ] } , "ping_callbacks": @@ -5069,17 +6095,18 @@ , "hdrs": ["ext/transport/chttp2/transport/ping_callbacks.h"] , "deps": [ "time" - , ["", "event_engine_base_hdrs"] - , ["", "gpr"] - , ["", "gpr_platform"] - , ["", "grpc_trace"] , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/functional", "any_invocable"] , ["@", "absl", "absl/hash", "hash"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/random", "bit_gen_ref"] , ["@", "absl", "absl/random", "distributions"] , ["@", "absl", "absl/types", "optional"] + , ["", "event_engine_base_hdrs"] + , ["", "gpr"] + , ["", "gpr_platform"] + , ["", "grpc_trace"] ] } , "write_size_policy": @@ -5088,7 +6115,12 @@ , "stage": ["src", "core"] , "srcs": ["ext/transport/chttp2/transport/write_size_policy.cc"] , "hdrs": ["ext/transport/chttp2/transport/write_size_policy.h"] - , "deps": ["time", ["", "gpr"], ["", "gpr_platform"]] + , "deps": + [ "time" + , ["@", "absl", "absl/log", "check"] + , ["", "gpr"] + , ["", "gpr_platform"] + ] } , "ping_rate_policy": { "type": ["@", "rules", "CC", "library"] @@ -5101,21 +6133,13 @@ , "experiments" , "match" , "time" - , ["", "channel_arg_names"] - , ["", "gpr_platform"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/types", "variant"] + , ["", "channel_arg_names"] + , ["", "gpr_platform"] ] } -, "max_concurrent_streams_policy": - { "type": ["@", "rules", "CC", "library"] - , "name": ["max_concurrent_streams_policy"] - , "stage": ["src", "core"] - , "srcs": ["ext/transport/chttp2/transport/max_concurrent_streams_policy.cc"] - , "hdrs": ["ext/transport/chttp2/transport/max_concurrent_streams_policy.h"] - , "deps": [["", "gpr"], ["", "gpr_platform"]] - } , "huffsyms": { "type": ["@", "rules", "CC", "library"] , "name": ["huffsyms"] @@ -5141,11 +6165,11 @@ , "deps": [ "http2_errors" , "useful" - , ["", "chttp2_frame"] - , ["", "gpr_platform"] , ["@", "absl", "absl/functional", "function_ref"] , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "chttp2_frame"] + , ["", "gpr_platform"] ] } , "grpc_transport_chttp2_alpn": @@ -5154,7 +6178,7 @@ , "stage": ["src", "core"] , "srcs": ["ext/transport/chttp2/alpn/alpn.cc"] , "hdrs": ["ext/transport/chttp2/alpn/alpn.h"] - , "deps": ["useful", ["", "gpr"]] + , "deps": ["useful", ["@", "absl", "absl/log", "check"], ["", "gpr"]] } , "grpc_transport_chttp2_client_connector": { "type": ["@", "rules", "CC", "library"] @@ -5174,10 +6198,21 @@ , "handshaker_registry" , "resolved_address" , "status_helper" + , "subchannel_connector" , "tcp_connect_handshaker" , "time" , "unique_type_name" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/types", "optional"] + , ["", "channel"] , ["", "channel_arg_names"] + , ["", "channel_create"] + , ["", "channelz"] , ["", "config"] , ["", "debug_location"] , ["", "exec_ctx"] @@ -5190,14 +6225,10 @@ , ["", "grpc_trace"] , ["", "grpc_transport_chttp2"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "sockaddr_utils"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_transport_chttp2_server": @@ -5210,8 +6241,11 @@ [ "channel_args" , "channel_args_endpoint_config" , "closure" + , "connection_quota" , "error" , "error_utils" + , "event_engine_extensions" + , "event_engine_query_extensions" , "grpc_insecure_credentials" , "handshaker_registry" , "iomgr_fwd" @@ -5222,7 +6256,16 @@ , "status_helper" , "time" , "unique_type_name" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "channel_arg_names"] + , ["", "channelz"] , ["", "chttp2_legacy_frame"] , ["", "config"] , ["", "debug_location"] @@ -5233,16 +6276,12 @@ , ["", "grpc_trace"] , ["", "grpc_transport_chttp2"] , ["", "handshaker"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] + , ["", "server"] , ["", "sockaddr_utils"] , ["", "uri_parser"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_transport_inproc": @@ -5250,8 +6289,7 @@ , "name": ["grpc_transport_inproc"] , "stage": ["src", "core"] , "srcs": - [ "ext/transport/inproc/inproc_plugin.cc" - , "ext/transport/inproc/inproc_transport.cc" + [ "ext/transport/inproc/inproc_transport.cc" , "ext/transport/inproc/legacy_inproc_transport.cc" ] , "hdrs": @@ -5266,15 +6304,26 @@ , "closure" , "connectivity_state" , "error" + , "event_engine_context" , "experiments" , "iomgr_fwd" + , "metadata" , "metadata_batch" , "slice" , "slice_buffer" , "status_helper" , "time" , "try_seq" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["", "channel"] , ["", "channel_arg_names"] + , ["", "channel_create"] + , ["", "channelz"] , ["", "config"] , ["", "debug_location"] , ["", "exec_ctx"] @@ -5282,12 +6331,10 @@ , ["", "grpc_base"] , ["", "grpc_public_hdrs"] , ["", "grpc_trace"] + , ["", "iomgr"] , ["", "promise"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] + , ["", "server"] ] } , "chaotic_good_frame": @@ -5307,15 +6354,16 @@ , "slice" , "slice_buffer" , "status_helper" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/random", "bit_gen_ref"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/types", "variant"] , ["", "gpr"] , ["", "gpr_platform"] , ["", "grpc_base"] , ["", "hpack_encoder"] , ["", "hpack_parser"] - , ["@", "absl", "absl/random", "bit_gen_ref"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/types", "variant"] ] } , "chaotic_good_settings_metadata": @@ -5327,9 +6375,9 @@ , "deps": [ "arena" , "metadata_batch" - , ["", "gpr"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] ] } , "chaotic_good_frame_header": @@ -5340,38 +6388,42 @@ , "hdrs": ["ext/transport/chaotic_good/frame_header.h"] , "deps": [ "bitset" - , ["", "gpr"] - , ["", "gpr_platform"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr"] + , ["", "gpr_platform"] ] } , "gcp_metadata_query": { "type": ["@", "rules", "CC", "library"] , "name": ["gcp_metadata_query"] , "stage": ["src", "core"] - , "srcs": ["ext/gcp/metadata_query.cc"] - , "hdrs": ["ext/gcp/metadata_query.h"] + , "srcs": ["util/gcp_metadata_query.cc"] + , "hdrs": ["util/gcp_metadata_query.h"] , "deps": [ "closure" , "error" , "status_helper" , "time" + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr"] , ["", "gpr_platform"] , ["", "grpc_base"] + , ["", "grpc_core_credentials_header"] , ["", "grpc_security_base"] , ["", "grpc_trace"] , ["", "httpcli"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] , ["", "uri_parser"] - , ["@", "absl", "absl/functional", "any_invocable"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/strings", "strings"] ] } , "logging_sink": @@ -5381,9 +6433,9 @@ , "hdrs": ["ext/filters/logging/logging_sink.h"] , "deps": [ "time" - , ["", "gpr_platform"] , ["@", "absl", "absl/numeric", "int128"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] ] } , "logging_filter": @@ -5408,6 +6460,14 @@ , "slice" , "slice_buffer" , "time" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/numeric", "int128"] + , ["@", "absl", "absl/random", "distributions"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["", "call_tracer"] , ["", "channel_arg_names"] , ["", "config"] , ["", "gpr"] @@ -5416,14 +6476,7 @@ , ["", "grpc_client_channel"] , ["", "grpc_public_hdrs"] , ["", "grpc_resolver"] - , ["", "legacy_context"] , ["", "uri_parser"] - , ["@", "absl", "absl/numeric", "int128"] - , ["@", "absl", "absl/random", "distributions"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/types", "optional"] ] } , "grpc_promise_endpoint": @@ -5434,26 +6487,29 @@ , "hdrs": ["lib/transport/promise_endpoint.h"] , "deps": [ "activity" + , "cancel_callback" , "event_engine_common" + , "event_engine_extensions" + , "event_engine_query_extensions" , "if" , "map" , "poll" , "slice" , "slice_buffer" - , ["", "event_engine_base_hdrs"] - , ["", "exec_ctx"] - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/types", "optional"] + , ["", "event_engine_base_hdrs"] + , ["", "exec_ctx"] + , ["", "gpr"] ] } , "chaotic_good_transport": { "type": ["@", "rules", "CC", "library"] , "name": ["chaotic_good_transport"] , "stage": ["src", "core"] - , "srcs": ["ext/transport/chaotic_good/chaotic_good_transport.cc"] , "hdrs": ["ext/transport/chaotic_good/chaotic_good_transport.h"] , "deps": [ "chaotic_good_frame" @@ -5463,11 +6519,12 @@ , "if" , "try_join" , "try_seq" + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "random"] , ["", "gpr_platform"] , ["", "grpc_trace"] , ["", "hpack_encoder"] , ["", "promise"] - , ["@", "absl", "absl/random", "random"] ] } , "chaotic_good_client_transport": @@ -5502,6 +6559,16 @@ , "slice_buffer" , "try_join" , "try_seq" + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "bit_gen_ref"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "variant"] , ["", "exec_ctx"] , ["", "gpr"] , ["", "gpr_platform"] @@ -5510,14 +6577,6 @@ , ["", "hpack_parser"] , ["", "promise"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/base", "core_headers"] - , ["@", "absl", "absl/container", "flat_hash_map"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/random", "bit_gen_ref"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] - , ["@", "absl", "absl/types", "optional"] - , ["@", "absl", "absl/types", "variant"] ] } , "chaotic_good_server_transport": @@ -5535,6 +6594,7 @@ , "chaotic_good_transport" , "context" , "default_event_engine" + , "event_engine_context" , "event_engine_wakeup_scheduler" , "for_each" , "grpc_promise_endpoint" @@ -5554,22 +6614,24 @@ , "switch" , "try_join" , "try_seq" - , ["", "exec_ctx"] - , ["", "gpr"] - , ["", "gpr_platform"] - , ["", "grpc_base"] - , ["", "hpack_encoder"] - , ["", "hpack_parser"] - , ["", "ref_counted_ptr"] , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/functional", "any_invocable"] - , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["@", "absl", "absl/random", "bit_gen_ref"] + , ["@", "absl", "absl/random", "random"] , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] , ["@", "absl", "absl/types", "optional"] , ["@", "absl", "absl/types", "variant"] + , ["", "exec_ctx"] + , ["", "gpr"] + , ["", "gpr_platform"] + , ["", "grpc_base"] + , ["", "hpack_encoder"] + , ["", "hpack_parser"] + , ["", "ref_counted_ptr"] ] } , "call_final_info": @@ -5580,6 +6642,22 @@ , "hdrs": ["lib/transport/call_final_info.h"] , "deps": [["", "gpr"], ["", "grpc_public_hdrs"]] } +, "call_finalization": + { "type": ["@", "rules", "CC", "library"] + , "name": ["call_finalization"] + , "stage": ["src", "core"] + , "hdrs": ["lib/channel/call_finalization.h"] + , "deps": ["arena", "call_final_info", "context", ["", "gpr_platform"]] + } +, "call_state": + { "type": ["@", "rules", "CC", "library"] + , "name": ["call_state"] + , "stage": ["src", "core"] + , "srcs": ["lib/transport/call_state.cc"] + , "hdrs": ["lib/transport/call_state.h"] + , "deps": + ["activity", "poll", "status_flag", ["", "gpr"], ["", "grpc_trace"]] + } , "call_filters": { "type": ["@", "rules", "CC", "library"] , "name": ["call_filters"] @@ -5588,30 +6666,38 @@ , "hdrs": ["lib/transport/call_filters.h"] , "deps": [ "call_final_info" + , "call_state" + , "dump_args" + , "if" + , "latch" + , "map" , "message" , "metadata" , "ref_counted" + , "seq" , "status_flag" + , "try_seq" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] , ["", "gpr"] , ["", "promise"] , ["", "ref_counted_ptr"] ] } -, "call_factory": +, "interception_chain": { "type": ["@", "rules", "CC", "library"] - , "name": ["call_factory"] + , "name": ["interception_chain"] , "stage": ["src", "core"] - , "srcs": ["lib/transport/call_factory.cc"] - , "hdrs": ["lib/transport/call_factory.h"] + , "srcs": ["lib/transport/interception_chain.cc"] + , "hdrs": ["lib/transport/interception_chain.h"] , "deps": - [ "arena" - , "call_size_estimator" + [ "call_destination" + , "call_filters" , "call_spine" - , "channel_args" + , "match" + , "metadata" , "ref_counted" - , "resource_quota" , ["", "gpr_platform"] - , ["", "stats"] ] } , "call_destination": @@ -5630,10 +6716,10 @@ , "deps": [ "slice" , "time" - , ["", "gpr_platform"] , ["@", "absl", "absl/functional", "function_ref"] , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/strings", "strings"] + , ["", "gpr_platform"] ] } , "metadata": @@ -5653,9 +6739,9 @@ , "deps": [ "arena" , "slice_buffer" + , ["@", "absl", "absl/strings", "strings"] , ["", "gpr_platform"] , ["", "grpc_public_hdrs"] - , ["@", "absl", "absl/strings", "strings"] ] } , "call_spine": @@ -5666,6 +6752,10 @@ , "hdrs": ["lib/transport/call_spine.h"] , "deps": [ "1999" + , "call_arena_allocator" + , "call_filters" + , "dual_ref_counted" + , "event_engine_context" , "for_each" , "if" , "latch" @@ -5676,7 +6766,26 @@ , "promise_status" , "status_flag" , "try_seq" + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/log", "check"] , ["", "gpr"] + , ["", "promise"] + ] + } +, "direct_channel": + { "type": ["@", "rules", "CC", "library"] + , "name": ["direct_channel"] + , "stage": ["src", "core"] + , "srcs": ["client_channel/direct_channel.cc"] + , "hdrs": ["client_channel/direct_channel.h"] + , "deps": + [ "channel_stack_type" + , "event_engine_context" + , "interception_chain" + , ["", "channel"] + , ["", "config"] + , ["", "grpc_base"] + , ["", "orphanable"] ] } , "metadata_batch": @@ -5692,7 +6801,7 @@ , "deps": [ "arena" , "chunked_vector" - , "compression_internal" + , "compression" , "experiments" , "if_list" , "metadata_compression_traits" @@ -5703,14 +6812,17 @@ , "time" , "timeout_encoding" , "type_list" - , ["", "gpr"] - , ["", "grpc_public_hdrs"] + , ["@", "absl", "absl/base", "no_destructor"] + , ["@", "absl", "absl/container", "flat_hash_set"] , ["@", "absl", "absl/container", "inlined_vector"] , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/meta", "type_traits"] - , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] + , ["", "grpc_public_hdrs"] ] } , "timeout_encoding": @@ -5722,38 +6834,44 @@ , "deps": [ "slice" , "time" - , ["", "gpr"] , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/log", "check"] , ["@", "absl", "absl/types", "optional"] + , ["", "gpr"] ] } -, "call_size_estimator": +, "call_arena_allocator": { "type": ["@", "rules", "CC", "library"] - , "name": ["call_size_estimator"] + , "name": ["call_arena_allocator"] , "stage": ["src", "core"] - , "srcs": ["lib/transport/call_size_estimator.cc"] - , "hdrs": ["lib/transport/call_size_estimator.h"] - , "deps": [["", "gpr_platform"]] + , "srcs": ["lib/transport/call_arena_allocator.cc"] + , "hdrs": ["lib/transport/call_arena_allocator.h"] + , "deps": ["arena", "memory_quota", "ref_counted", ["", "gpr_platform"]] } -, "compression_internal": +, "compression": { "type": ["@", "rules", "CC", "library"] - , "name": ["compression_internal"] + , "name": ["compression"] , "stage": ["src", "core"] - , "srcs": ["lib/compression/compression_internal.cc"] + , "srcs": + [ "lib/compression/compression.cc" + , "lib/compression/compression_internal.cc" + ] , "hdrs": ["lib/compression/compression_internal.h"] , "deps": [ "bitset" , "channel_args" , "ref_counted_string" , "slice" + , "useful" + , ["@", "absl", "absl/container", "inlined_vector"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] , ["", "gpr"] , ["", "grpc_public_hdrs"] , ["", "grpc_trace"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/container", "inlined_vector"] - , ["@", "absl", "absl/strings", "strings"] - , ["@", "absl", "absl/strings", "str_format"] - , ["@", "absl", "absl/types", "optional"] ] } , "chaotic_good_server": @@ -5777,6 +6895,9 @@ , "error" , "error_utils" , "event_engine_common" + , "event_engine_context" + , "event_engine_extensions" + , "event_engine_query_extensions" , "event_engine_tcp_socket_utils" , "event_engine_wakeup_scheduler" , "grpc_promise_endpoint" @@ -5795,19 +6916,24 @@ , "status_helper" , "time" , "try_seq" + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "bit_gen_ref"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["", "channelz"] , ["", "gpr"] , ["", "gpr_platform"] , ["", "grpc_base"] , ["", "handshaker"] , ["", "hpack_encoder"] , ["", "hpack_parser"] + , ["", "iomgr"] , ["", "orphanable"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/container", "flat_hash_map"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/random", "bit_gen_ref"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] + , ["", "server"] ] } , "chaotic_good_connector": @@ -5830,6 +6956,9 @@ , "default_event_engine" , "error" , "error_utils" + , "event_engine_context" + , "event_engine_extensions" + , "event_engine_query_extensions" , "event_engine_tcp_socket_utils" , "event_engine_wakeup_scheduler" , "grpc_promise_endpoint" @@ -5843,9 +6972,19 @@ , "sleep" , "slice" , "slice_buffer" + , "subchannel_connector" , "time" , "try_seq" , "wait_for_callback" + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/log", "log"] + , ["@", "absl", "absl/random", "bit_gen_ref"] + , ["@", "absl", "absl/random", "random"] + , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/status", "statusor"] + , ["", "channel"] + , ["", "channel_create"] + , ["", "config"] , ["", "debug_location"] , ["", "exec_ctx"] , ["", "gpr"] @@ -5855,11 +6994,31 @@ , ["", "handshaker"] , ["", "hpack_encoder"] , ["", "hpack_parser"] + , ["", "iomgr"] , ["", "ref_counted_ptr"] - , ["@", "absl", "absl/random", "random"] - , ["@", "absl", "absl/random", "bit_gen_ref"] - , ["@", "absl", "absl/status", "status"] - , ["@", "absl", "absl/status", "statusor"] + ] + } +, "metrics": + { "type": ["@", "rules", "CC", "library"] + , "name": ["metrics"] + , "stage": ["src", "core"] + , "srcs": ["telemetry/metrics.cc"] + , "hdrs": ["telemetry/metrics.h"] + , "deps": + [ "arena" + , "channel_args" + , "no_destruct" + , "slice" + , "time" + , ["@", "absl", "absl/container", "flat_hash_map"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/functional", "function_ref"] + , ["@", "absl", "absl/log", "check"] + , ["@", "absl", "absl/strings", "strings"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "span"] + , ["", "call_tracer"] + , ["", "gpr"] ] } } diff --git a/etc/import/src/core/ext/upb-gen/TARGETS.grpc b/etc/import/src/core/ext/upb-gen/TARGETS.grpc index dd45be4da..49bf3e106 100644 --- a/etc/import/src/core/ext/upb-gen/TARGETS.grpc +++ b/etc/import/src/core/ext/upb-gen/TARGETS.grpc @@ -71,6 +71,7 @@ , "envoy/extensions/clusters/aggregate/v3/cluster.upb_minitable.c" , "envoy/extensions/filters/common/fault/v3/fault.upb_minitable.c" , "envoy/extensions/filters/http/fault/v3/fault.upb_minitable.c" + , "envoy/extensions/filters/http/gcp_authn/v3/gcp_authn.upb_minitable.c" , "envoy/extensions/filters/http/rbac/v3/rbac.upb_minitable.c" , "envoy/extensions/filters/http/router/v3/router.upb_minitable.c" , "envoy/extensions/filters/http/stateful_session/v3/stateful_session.upb_minitable.c" @@ -306,6 +307,8 @@ , "envoy/extensions/filters/common/fault/v3/fault.upb_minitable.h" , "envoy/extensions/filters/http/fault/v3/fault.upb.h" , "envoy/extensions/filters/http/fault/v3/fault.upb_minitable.h" + , "envoy/extensions/filters/http/gcp_authn/v3/gcp_authn.upb.h" + , "envoy/extensions/filters/http/gcp_authn/v3/gcp_authn.upb_minitable.h" , "envoy/extensions/filters/http/rbac/v3/rbac.upb.h" , "envoy/extensions/filters/http/rbac/v3/rbac.upb_minitable.h" , "envoy/extensions/filters/http/router/v3/router.upb.h" diff --git a/etc/import/src/core/ext/upbdefs-gen/TARGETS.grpc b/etc/import/src/core/ext/upbdefs-gen/TARGETS.grpc index 789021cb8..ce9a53a61 100644 --- a/etc/import/src/core/ext/upbdefs-gen/TARGETS.grpc +++ b/etc/import/src/core/ext/upbdefs-gen/TARGETS.grpc @@ -71,6 +71,7 @@ , "envoy/extensions/clusters/aggregate/v3/cluster.upbdefs.h" , "envoy/extensions/filters/common/fault/v3/fault.upbdefs.h" , "envoy/extensions/filters/http/fault/v3/fault.upbdefs.h" + , "envoy/extensions/filters/http/gcp_authn/v3/gcp_authn.upbdefs.h" , "envoy/extensions/filters/http/rbac/v3/rbac.upbdefs.h" , "envoy/extensions/filters/http/router/v3/router.upbdefs.h" , "envoy/extensions/filters/http/stateful_session/v3/stateful_session.upbdefs.h" @@ -225,6 +226,7 @@ , "envoy/extensions/clusters/aggregate/v3/cluster.upbdefs.c" , "envoy/extensions/filters/common/fault/v3/fault.upbdefs.c" , "envoy/extensions/filters/http/fault/v3/fault.upbdefs.c" + , "envoy/extensions/filters/http/gcp_authn/v3/gcp_authn.upbdefs.c" , "envoy/extensions/filters/http/rbac/v3/rbac.upbdefs.c" , "envoy/extensions/filters/http/router/v3/router.upbdefs.c" , "envoy/extensions/filters/http/stateful_session/v3/stateful_session.upbdefs.c" diff --git a/etc/import/src/google/protobuf/TARGETS.protobuf b/etc/import/src/google/protobuf/TARGETS.protobuf index 9ce0cc607..26288441e 100644 --- a/etc/import/src/google/protobuf/TARGETS.protobuf +++ b/etc/import/src/google/protobuf/TARGETS.protobuf @@ -22,154 +22,40 @@ { "type": ["@", "rules", "data", "staged"] , "srcs": [ "well_known_protos" - , "any.h" , "any.pb.h" , "api.pb.h" + , "duration.pb.h" + , "empty.pb.h" + , "field_mask.pb.h" + , "source_context.pb.h" + , "struct.pb.h" + , "timestamp.pb.h" + , "type.pb.h" + , "wrappers.pb.h" + , "any.h" , "arena.h" - , "arenastring.h" - , "arenaz_sampler.h" , "arena_align.h" , "arena_allocation_policy.h" , "arena_cleanup.h" - , "arena_test_util.h" - , "compiler/allowlists/allowlist.h" - , "compiler/allowlists/allowlists.h" - , "compiler/annotation_test_util.h" - , "compiler/code_generator.h" - , "compiler/command_line_interface.h" - , "compiler/command_line_interface_tester.h" - , "compiler/cpp/cpp_generator.h" - , "compiler/cpp/enum.h" - , "compiler/cpp/extension.h" - , "compiler/cpp/field.h" - , "compiler/cpp/field_generators/generators.h" - , "compiler/cpp/file.h" - , "compiler/cpp/generator.h" - , "compiler/cpp/helpers.h" - , "compiler/cpp/message.h" - , "compiler/cpp/message_layout_helper.h" - , "compiler/cpp/names.h" - , "compiler/cpp/options.h" - , "compiler/cpp/padding_optimizer.h" - , "compiler/cpp/parse_function_generator.h" - , "compiler/cpp/service.h" - , "compiler/cpp/tracker.h" - , "compiler/cpp/unittest.h" - , "compiler/cpp/unittest.inc" - , "compiler/csharp/csharp_doc_comment.h" - , "compiler/csharp/csharp_enum.h" - , "compiler/csharp/csharp_enum_field.h" - , "compiler/csharp/csharp_field_base.h" - , "compiler/csharp/csharp_generator.h" - , "compiler/csharp/csharp_helpers.h" - , "compiler/csharp/csharp_map_field.h" - , "compiler/csharp/csharp_message.h" - , "compiler/csharp/csharp_message_field.h" - , "compiler/csharp/csharp_options.h" - , "compiler/csharp/csharp_primitive_field.h" - , "compiler/csharp/csharp_reflection_class.h" - , "compiler/csharp/csharp_repeated_enum_field.h" - , "compiler/csharp/csharp_repeated_message_field.h" - , "compiler/csharp/csharp_repeated_primitive_field.h" - , "compiler/csharp/csharp_source_generator_base.h" - , "compiler/csharp/csharp_wrapper_field.h" - , "compiler/csharp/names.h" + , "arenastring.h" + , "arenaz_sampler.h" , "compiler/importer.h" - , "compiler/java/context.h" - , "compiler/java/doc_comment.h" - , "compiler/java/enum.h" - , "compiler/java/enum_field.h" - , "compiler/java/enum_field_lite.h" - , "compiler/java/enum_lite.h" - , "compiler/java/extension.h" - , "compiler/java/extension_lite.h" - , "compiler/java/field.h" - , "compiler/java/file.h" - , "compiler/java/generator.h" - , "compiler/java/generator_factory.h" - , "compiler/java/helpers.h" - , "compiler/java/java_features.pb.h" - , "compiler/java/java_generator.h" - , "compiler/java/kotlin_generator.h" - , "compiler/java/map_field.h" - , "compiler/java/map_field_lite.h" - , "compiler/java/message.h" - , "compiler/java/message_builder.h" - , "compiler/java/message_builder_lite.h" - , "compiler/java/message_field.h" - , "compiler/java/message_field_lite.h" - , "compiler/java/message_lite.h" - , "compiler/java/message_serialization.h" - , "compiler/java/names.h" - , "compiler/java/name_resolver.h" - , "compiler/java/options.h" - , "compiler/java/primitive_field.h" - , "compiler/java/primitive_field_lite.h" - , "compiler/java/service.h" - , "compiler/java/shared_code_generator.h" - , "compiler/java/string_field.h" - , "compiler/java/string_field_lite.h" - , "compiler/mock_code_generator.h" - , "compiler/objectivec/enum.h" - , "compiler/objectivec/enum_field.h" - , "compiler/objectivec/extension.h" - , "compiler/objectivec/field.h" - , "compiler/objectivec/file.h" - , "compiler/objectivec/generator.h" - , "compiler/objectivec/helpers.h" - , "compiler/objectivec/import_writer.h" - , "compiler/objectivec/line_consumer.h" - , "compiler/objectivec/map_field.h" - , "compiler/objectivec/message.h" - , "compiler/objectivec/message_field.h" - , "compiler/objectivec/names.h" - , "compiler/objectivec/nsobject_methods.h" - , "compiler/objectivec/oneof.h" - , "compiler/objectivec/options.h" - , "compiler/objectivec/primitive_field.h" - , "compiler/objectivec/text_format_decode_data.h" - , "compiler/package_info.h" , "compiler/parser.h" - , "compiler/php/names.h" - , "compiler/php/php_generator.h" - , "compiler/plugin.h" - , "compiler/plugin.pb.h" - , "compiler/python/generator.h" - , "compiler/python/helpers.h" - , "compiler/python/pyi_generator.h" - , "compiler/python/python_generator.h" - , "compiler/retention.h" - , "compiler/ruby/ruby_generator.h" - , "compiler/rust/accessors/accessors.h" - , "compiler/rust/accessors/accessor_generator.h" - , "compiler/rust/context.h" - , "compiler/rust/generator.h" - , "compiler/rust/message.h" - , "compiler/rust/naming.h" - , "compiler/rust/oneof.h" - , "compiler/rust/relative_path.h" - , "compiler/scc.h" - , "compiler/subprocess.h" - , "compiler/versions.h" - , "compiler/versions_suffix.h" - , "compiler/zip_writer.h" , "cpp_edition_defaults.h" , "cpp_features.pb.h" , "descriptor.h" , "descriptor.pb.h" , "descriptor_database.h" , "descriptor_legacy.h" + , "descriptor_lite.h" , "descriptor_visitor.h" - , "duration.pb.h" , "dynamic_message.h" - , "empty.pb.h" , "endian.h" , "explicitly_constructed.h" , "extension_set.h" , "extension_set_inl.h" , "feature_resolver.h" , "field_access_listener.h" - , "field_mask.pb.h" , "generated_enum_reflection.h" , "generated_enum_util.h" , "generated_message_bases.h" @@ -181,16 +67,12 @@ , "has_bits.h" , "implicit_weak_message.h" , "inlined_string_field.h" - , "internal_message_util.h" , "internal_visibility.h" - , "internal_visibility_for_testing.h" , "io/coded_stream.h" , "io/gzip_stream.h" , "io/io_win32.h" - , "io/package_info.h" , "io/printer.h" , "io/strtod.h" - , "io/test_zero_copy_stream.h" , "io/tokenizer.h" , "io/zero_copy_sink.h" , "io/zero_copy_stream.h" @@ -212,68 +94,231 @@ , "map_field.h" , "map_field_inl.h" , "map_field_lite.h" - , "map_lite_test_util.h" - , "map_test.inc" - , "map_test_util.h" - , "map_test_util.inc" - , "map_test_util_impl.h" , "map_type_handler.h" , "message.h" , "message_lite.h" - , "message_unittest.inc" , "metadata.h" , "metadata_lite.h" - , "package_info.h" , "parse_context.h" , "port.h" , "port_def.inc" , "port_undef.inc" - , "proto3_lite_unittest.inc" , "raw_ptr.h" , "reflection.h" , "reflection_internal.h" , "reflection_mode.h" , "reflection_ops.h" - , "reflection_tester.h" + , "reflection_visit_field_info.h" + , "reflection_visit_fields.h" , "repeated_field.h" , "repeated_ptr_field.h" + , "runtime_version.h" , "serial_arena.h" , "service.h" - , "source_context.pb.h" , "string_block.h" - , "string_member_robber.h" - , "struct.pb.h" , "stubs/callback.h" , "stubs/common.h" , "stubs/platform_macros.h" , "stubs/port.h" , "stubs/status_macros.h" - , "testing/file.h" - , "testing/googletest.h" - , "test_textproto.h" - , "test_util.h" - , "test_util.inc" - , "test_util2.h" - , "test_util_lite.h" , "text_format.h" , "thread_safe_arena.h" - , "timestamp.pb.h" - , "type.pb.h" , "unknown_field_set.h" , "util/delimited_message_util.h" , "util/field_comparator.h" , "util/field_mask_util.h" , "util/json_util.h" , "util/message_differencer.h" - , "util/package_info.h" , "util/time_util.h" , "util/type_resolver.h" , "util/type_resolver_util.h" , "varint_shuffle.h" , "wire_format.h" , "wire_format_lite.h" - , "wire_format_unittest.inc" - , "wrappers.pb.h" + ] + , "stage": ["google", "protobuf"] + } +, "protobuf_lite_headers": + { "type": ["@", "rules", "data", "staged"] + , "srcs": + [ "any.h" + , "arena.h" + , "arena_align.h" + , "arena_allocation_policy.h" + , "arena_cleanup.h" + , "arenastring.h" + , "arenaz_sampler.h" + , "descriptor_lite.h" + , "endian.h" + , "explicitly_constructed.h" + , "extension_set.h" + , "extension_set_inl.h" + , "generated_enum_util.h" + , "generated_message_tctable_decl.h" + , "generated_message_tctable_impl.h" + , "generated_message_util.h" + , "has_bits.h" + , "implicit_weak_message.h" + , "inlined_string_field.h" + , "internal_visibility.h" + , "io/coded_stream.h" + , "io/io_win32.h" + , "io/zero_copy_stream.h" + , "io/zero_copy_stream_impl.h" + , "io/zero_copy_stream_impl_lite.h" + , "map.h" + , "map_field_lite.h" + , "map_type_handler.h" + , "message_lite.h" + , "metadata_lite.h" + , "parse_context.h" + , "port.h" + , "port_def.inc" + , "port_undef.inc" + , "raw_ptr.h" + , "repeated_field.h" + , "repeated_ptr_field.h" + , "runtime_version.h" + , "serial_arena.h" + , "string_block.h" + , "stubs/callback.h" + , "stubs/common.h" + , "stubs/platform_macros.h" + , "stubs/port.h" + , "stubs/status_macros.h" + , "thread_safe_arena.h" + , "varint_shuffle.h" + , "wire_format_lite.h" + ] + , "stage": ["google", "protobuf"] + } +, "protoc_headers": + { "type": ["@", "rules", "data", "staged"] + , "srcs": + [ "compiler/code_generator.h" + , "compiler/command_line_interface.h" + , "compiler/cpp/enum.h" + , "compiler/cpp/extension.h" + , "compiler/cpp/field.h" + , "compiler/cpp/field_generators/generators.h" + , "compiler/cpp/file.h" + , "compiler/cpp/generator.h" + , "compiler/cpp/helpers.h" + , "compiler/cpp/ifndef_guard.h" + , "compiler/cpp/message.h" + , "compiler/cpp/message_layout_helper.h" + , "compiler/cpp/names.h" + , "compiler/cpp/namespace_printer.h" + , "compiler/cpp/options.h" + , "compiler/cpp/padding_optimizer.h" + , "compiler/cpp/parse_function_generator.h" + , "compiler/cpp/service.h" + , "compiler/cpp/tracker.h" + , "compiler/csharp/csharp_doc_comment.h" + , "compiler/csharp/csharp_enum.h" + , "compiler/csharp/csharp_enum_field.h" + , "compiler/csharp/csharp_field_base.h" + , "compiler/csharp/csharp_generator.h" + , "compiler/csharp/csharp_helpers.h" + , "compiler/csharp/csharp_map_field.h" + , "compiler/csharp/csharp_message.h" + , "compiler/csharp/csharp_message_field.h" + , "compiler/csharp/csharp_options.h" + , "compiler/csharp/csharp_primitive_field.h" + , "compiler/csharp/csharp_reflection_class.h" + , "compiler/csharp/csharp_repeated_enum_field.h" + , "compiler/csharp/csharp_repeated_message_field.h" + , "compiler/csharp/csharp_repeated_primitive_field.h" + , "compiler/csharp/csharp_source_generator_base.h" + , "compiler/csharp/csharp_wrapper_field.h" + , "compiler/csharp/names.h" + , "compiler/java/context.h" + , "compiler/java/doc_comment.h" + , "compiler/java/field_common.h" + , "compiler/java/file.h" + , "compiler/java/full/enum.h" + , "compiler/java/full/enum_field.h" + , "compiler/java/full/extension.h" + , "compiler/java/full/field_generator.h" + , "compiler/java/full/generator_factory.h" + , "compiler/java/full/make_field_gens.h" + , "compiler/java/full/map_field.h" + , "compiler/java/full/message.h" + , "compiler/java/full/message_builder.h" + , "compiler/java/full/message_field.h" + , "compiler/java/full/primitive_field.h" + , "compiler/java/full/service.h" + , "compiler/java/full/string_field.h" + , "compiler/java/generator.h" + , "compiler/java/generator_common.h" + , "compiler/java/generator_factory.h" + , "compiler/java/helpers.h" + , "compiler/java/internal_helpers.h" + , "compiler/java/java_features.pb.h" + , "compiler/java/kotlin_generator.h" + , "compiler/java/lite/enum.h" + , "compiler/java/lite/enum_field.h" + , "compiler/java/lite/extension.h" + , "compiler/java/lite/field_generator.h" + , "compiler/java/lite/generator_factory.h" + , "compiler/java/lite/make_field_gens.h" + , "compiler/java/lite/map_field.h" + , "compiler/java/lite/message.h" + , "compiler/java/lite/message_builder.h" + , "compiler/java/lite/message_field.h" + , "compiler/java/lite/primitive_field.h" + , "compiler/java/lite/string_field.h" + , "compiler/java/message_serialization.h" + , "compiler/java/name_resolver.h" + , "compiler/java/names.h" + , "compiler/java/options.h" + , "compiler/java/shared_code_generator.h" + , "compiler/objectivec/enum.h" + , "compiler/objectivec/enum_field.h" + , "compiler/objectivec/extension.h" + , "compiler/objectivec/field.h" + , "compiler/objectivec/file.h" + , "compiler/objectivec/generator.h" + , "compiler/objectivec/helpers.h" + , "compiler/objectivec/import_writer.h" + , "compiler/objectivec/line_consumer.h" + , "compiler/objectivec/map_field.h" + , "compiler/objectivec/message.h" + , "compiler/objectivec/message_field.h" + , "compiler/objectivec/names.h" + , "compiler/objectivec/nsobject_methods.h" + , "compiler/objectivec/oneof.h" + , "compiler/objectivec/options.h" + , "compiler/objectivec/primitive_field.h" + , "compiler/objectivec/tf_decode_data.h" + , "compiler/php/names.h" + , "compiler/php/php_generator.h" + , "compiler/plugin.h" + , "compiler/plugin.pb.h" + , "compiler/python/generator.h" + , "compiler/python/helpers.h" + , "compiler/python/pyi_generator.h" + , "compiler/retention.h" + , "compiler/ruby/ruby_generator.h" + , "compiler/rust/accessors/accessor_case.h" + , "compiler/rust/accessors/accessors.h" + , "compiler/rust/accessors/generator.h" + , "compiler/rust/accessors/helpers.h" + , "compiler/rust/context.h" + , "compiler/rust/crate_mapping.h" + , "compiler/rust/enum.h" + , "compiler/rust/generator.h" + , "compiler/rust/message.h" + , "compiler/rust/naming.h" + , "compiler/rust/oneof.h" + , "compiler/rust/relative_path.h" + , "compiler/rust/rust_field_type.h" + , "compiler/rust/rust_keywords.h" + , "compiler/scc.h" + , "compiler/subprocess.h" + , "compiler/versions.h" + , "compiler/zip_writer.h" + , "testing/file.h" ] , "stage": ["google", "protobuf"] } @@ -310,7 +355,7 @@ , "-Wno-invalid-noreturn" ] } - , "hdrs": ["protobuf_headers"] + , "hdrs": ["protobuf_lite_headers"] , "srcs": [ "any_lite.cc" , "arena.cc" @@ -340,6 +385,10 @@ ] , "deps": [ ["@", "absl", "absl/base", "base"] + , ["@", "absl", "absl/base", "config"] + , ["@", "absl", "absl/base", "core_headers"] + , ["@", "absl", "absl/base", "dynamic_annotations"] + , ["@", "absl", "absl/base", "prefetch"] , ["@", "absl", "absl/container", "btree"] , ["@", "absl", "absl/container", "flat_hash_set"] , ["@", "absl", "absl/hash", "hash"] @@ -348,9 +397,15 @@ , ["@", "absl", "absl/meta", "type_traits"] , ["@", "absl", "absl/numeric", "bits"] , ["@", "absl", "absl/status", "status"] + , ["@", "absl", "absl/strings", "cord"] , ["@", "absl", "absl/strings", "internal"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/synchronization", "synchronization"] , ["@", "absl", "absl/time", "time"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "span"] + , ["@", "absl", "absl/utility", "if_constexpr"] , ["third_party/utf8_range", "utf8_validity"] ] } @@ -394,7 +449,6 @@ , "generated_message_util.cc" , "implicit_weak_message.cc" , "inlined_string_field.cc" - , "internal_message_util.cc" , "io/coded_stream.cc" , "io/gzip_stream.cc" , "io/io_win32.cc" @@ -438,34 +492,45 @@ , "wire_format_lite.cc" ] , "deps": - [ ["", "libprotobuf_lite"] + [ ["@", "absl", "absl/algorithm", "container"] , ["@", "absl", "absl/base", "base"] + , ["@", "absl", "absl/base", "core_headers"] , ["@", "absl", "absl/base", "dynamic_annotations"] , ["@", "absl", "absl/container", "btree"] + , ["@", "absl", "absl/container", "fixed_array"] , ["@", "absl", "absl/container", "flat_hash_map"] , ["@", "absl", "absl/container", "flat_hash_set"] + , ["@", "absl", "absl/functional", "any_invocable"] + , ["@", "absl", "absl/functional", "function_ref"] , ["@", "absl", "absl/hash", "hash"] , ["@", "absl", "absl/log", "absl_check"] , ["@", "absl", "absl/log", "absl_log"] , ["@", "absl", "absl/log", "die_if_null"] + , ["@", "absl", "absl/memory", "memory"] + , ["@", "absl", "absl/numeric", "bits"] + , ["@", "absl", "absl/status", "status"] , ["@", "absl", "absl/status", "statusor"] + , ["@", "absl", "absl/strings", "cord"] , ["@", "absl", "absl/strings", "internal"] + , ["@", "absl", "absl/strings", "str_format"] + , ["@", "absl", "absl/strings", "strings"] , ["@", "absl", "absl/synchronization", "synchronization"] , ["@", "absl", "absl/time", "time"] - , ["third_party/utf8_range", "utf8_validity"] + , ["@", "absl", "absl/types", "optional"] + , ["@", "absl", "absl/types", "span"] + , ["@", "absl", "absl/types", "variant"] + , ["@", "absl", "absl/utility", "if_constexpr"] , ["@", "zlib", "", "zlib"] + , ["", "libprotobuf_lite"] + , ["third_party/utf8_range", "utf8_validity"] ] } , "libprotoc": { "type": ["@", "rules", "CC", "library"] , "name": ["libprotoc"] + , "hdrs": ["protoc_headers"] , "srcs": [ "compiler/main.cc" - , "compiler/allowlists/editions.cc" - , "compiler/allowlists/empty_package.cc" - , "compiler/allowlists/open_enum.cc" - , "compiler/allowlists/unused_imports.cc" - , "compiler/allowlists/weak_imports.cc" , "compiler/code_generator.cc" , "compiler/command_line_interface.cc" , "compiler/cpp/enum.cc" @@ -477,10 +542,13 @@ , "compiler/cpp/field_generators/message_field.cc" , "compiler/cpp/field_generators/primitive_field.cc" , "compiler/cpp/field_generators/string_field.cc" + , "compiler/cpp/field_generators/string_view_field.cc" , "compiler/cpp/file.cc" , "compiler/cpp/generator.cc" , "compiler/cpp/helpers.cc" + , "compiler/cpp/ifndef_guard.cc" , "compiler/cpp/message.cc" + , "compiler/cpp/namespace_printer.cc" , "compiler/cpp/padding_optimizer.cc" , "compiler/cpp/parse_function_generator.cc" , "compiler/cpp/service.cc" @@ -504,36 +572,40 @@ , "compiler/csharp/names.cc" , "compiler/java/context.cc" , "compiler/java/doc_comment.cc" - , "compiler/java/enum.cc" - , "compiler/java/enum_field.cc" - , "compiler/java/enum_field_lite.cc" - , "compiler/java/enum_lite.cc" - , "compiler/java/extension.cc" - , "compiler/java/extension_lite.cc" - , "compiler/java/field.cc" + , "compiler/java/field_common.cc" , "compiler/java/file.cc" + , "compiler/java/full/enum.cc" + , "compiler/java/full/enum_field.cc" + , "compiler/java/full/extension.cc" + , "compiler/java/full/generator_factory.cc" + , "compiler/java/full/make_field_gens.cc" + , "compiler/java/full/map_field.cc" + , "compiler/java/full/message.cc" + , "compiler/java/full/message_builder.cc" + , "compiler/java/full/message_field.cc" + , "compiler/java/full/primitive_field.cc" + , "compiler/java/full/service.cc" + , "compiler/java/full/string_field.cc" , "compiler/java/generator.cc" - , "compiler/java/generator_factory.cc" , "compiler/java/helpers.cc" + , "compiler/java/internal_helpers.cc" , "compiler/java/java_features.pb.cc" , "compiler/java/kotlin_generator.cc" - , "compiler/java/map_field.cc" - , "compiler/java/map_field_lite.cc" - , "compiler/java/message.cc" - , "compiler/java/message_builder.cc" - , "compiler/java/message_builder_lite.cc" - , "compiler/java/message_field.cc" - , "compiler/java/message_field_lite.cc" - , "compiler/java/message_lite.cc" + , "compiler/java/lite/enum.cc" + , "compiler/java/lite/enum_field.cc" + , "compiler/java/lite/extension.cc" + , "compiler/java/lite/generator_factory.cc" + , "compiler/java/lite/make_field_gens.cc" + , "compiler/java/lite/map_field.cc" + , "compiler/java/lite/message.cc" + , "compiler/java/lite/message_builder.cc" + , "compiler/java/lite/message_field.cc" + , "compiler/java/lite/primitive_field.cc" + , "compiler/java/lite/string_field.cc" , "compiler/java/message_serialization.cc" , "compiler/java/name_resolver.cc" , "compiler/java/names.cc" - , "compiler/java/primitive_field.cc" - , "compiler/java/primitive_field_lite.cc" - , "compiler/java/service.cc" , "compiler/java/shared_code_generator.cc" - , "compiler/java/string_field.cc" - , "compiler/java/string_field_lite.cc" , "compiler/objectivec/enum.cc" , "compiler/objectivec/enum_field.cc" , "compiler/objectivec/extension.cc" @@ -549,7 +621,7 @@ , "compiler/objectivec/names.cc" , "compiler/objectivec/oneof.cc" , "compiler/objectivec/primitive_field.cc" - , "compiler/objectivec/text_format_decode_data.cc" + , "compiler/objectivec/tf_decode_data.cc" , "compiler/php/names.cc" , "compiler/php/php_generator.cc" , "compiler/plugin.cc" @@ -559,21 +631,35 @@ , "compiler/python/pyi_generator.cc" , "compiler/retention.cc" , "compiler/ruby/ruby_generator.cc" + , "compiler/rust/accessors/accessor_case.cc" , "compiler/rust/accessors/accessors.cc" + , "compiler/rust/accessors/helpers.cc" + , "compiler/rust/accessors/map.cc" + , "compiler/rust/accessors/repeated_field.cc" , "compiler/rust/accessors/singular_message.cc" , "compiler/rust/accessors/singular_scalar.cc" , "compiler/rust/accessors/singular_string.cc" , "compiler/rust/accessors/unsupported_field.cc" , "compiler/rust/context.cc" + , "compiler/rust/crate_mapping.cc" + , "compiler/rust/enum.cc" , "compiler/rust/generator.cc" , "compiler/rust/message.cc" , "compiler/rust/naming.cc" , "compiler/rust/oneof.cc" , "compiler/rust/relative_path.cc" + , "compiler/rust/rust_field_type.cc" + , "compiler/rust/rust_keywords.cc" , "compiler/subprocess.cc" + , "compiler/versions.cc" , "compiler/zip_writer.cc" + , "testing/file.cc" + ] + , "deps": + [ ["@", "absl", "absl/log", "initialize"] + , ["", "libprotobuf"] + , ["upb_generator", "mangle"] ] - , "deps": [["", "libprotobuf"], ["@", "absl", "absl/log", "initialize"]] } , "protoc": { "type": ["@", "rules", "CC", "binary"] diff --git a/etc/import/src/include/openssl/TARGETS.boringssl b/etc/import/src/include/openssl/TARGETS.boringssl index 0d4a9495e..df6e9328f 100644 --- a/etc/import/src/include/openssl/TARGETS.boringssl +++ b/etc/import/src/include/openssl/TARGETS.boringssl @@ -1,9 +1,4 @@ -{ "ssl_headers": - { "type": ["@", "rules", "data", "staged"] - , "srcs": ["dtls1.h", "srtp.h", "ssl.h", "ssl3.h", "tls1.h"] - , "stage": ["openssl"] - } -, "crypto_headers": +{ "crypto_headers": { "type": ["@", "rules", "data", "staged"] , "srcs": [ "aead.h" @@ -45,17 +40,21 @@ , "evp.h" , "evp_errors.h" , "ex_data.h" + , "experimental/dilithium.h" + , "experimental/kyber.h" + , "experimental/spx.h" , "hkdf.h" , "hmac.h" , "hpke.h" , "hrss.h" , "is_boringssl.h" , "kdf.h" - , "kyber.h" , "lhash.h" , "md4.h" , "md5.h" , "mem.h" + , "mldsa.h" + , "mlkem.h" , "nid.h" , "obj.h" , "obj_mac.h" @@ -92,4 +91,9 @@ ] , "stage": ["openssl"] } +, "ssl_headers": + { "type": ["@", "rules", "data", "staged"] + , "srcs": ["dtls1.h", "srtp.h", "ssl.h", "ssl3.h", "tls1.h"] + , "stage": ["openssl"] + } } diff --git a/etc/import/src/liblzma/check/TARGETS.lzma b/etc/import/src/liblzma/check/TARGETS.lzma index da930c24b..3a13c7934 100644 --- a/etc/import/src/liblzma/check/TARGETS.lzma +++ b/etc/import/src/liblzma/check/TARGETS.lzma @@ -4,14 +4,14 @@ , "hdrs": { "type": "++" , "$1": - [ ["check.h", "crc_macros.h"] + [ ["check.h", "crc_common.h"] , { "type": "if" , "cond": {"type": "var", "name": "ENABLE_SMALL"} , "then": [] , "else": { "type": "++" , "$1": - [ ["crc32_table_be.h", "crc32_table_le.h"] + [ ["crc32_table_be.h", "crc32_table_le.h", "crc_x86_clmul.h"] , { "type": "if" , "cond": {"type": "var", "name": "ADDITIONAL_CHECK_TYPES"} , "then": ["crc64_table_be.h", "crc64_table_le.h"] diff --git a/etc/import/src/liblzma/common/TARGETS.lzma b/etc/import/src/liblzma/common/TARGETS.lzma index cc3176020..3c1b9a9c3 100644 --- a/etc/import/src/liblzma/common/TARGETS.lzma +++ b/etc/import/src/liblzma/common/TARGETS.lzma @@ -1,6 +1,7 @@ { "headers": { "type": ["@", "rules", "CC", "library"] - , "arguments_config": ["ENABLE_THREADS", "ENCODERS", "DECODERS"] + , "arguments_config": + ["ENABLE_THREADS", "ENCODERS", "DECODERS", "LZIP_DECODER"] , "hdrs": { "type": "++" , "$1": @@ -15,6 +16,10 @@ , "cond": {"type": "var", "name": "ENABLE_THREADS"} , "then": ["outqueue.h"] } + , { "type": "if" + , "cond": {"type": "var", "name": "LZIP_DECODER"} + , "then": ["lzip_decoder.h"] + } ] } , "deps": @@ -40,9 +45,9 @@ , "index_encoder.h" ] , "deps": - [ ["src/liblzma/lzma", "lzma1_headers"] + [ ["src/liblzma/delta", "headers"] + , ["src/liblzma/lzma", "lzma1_headers"] , ["src/liblzma/lzma", "lzma2_headers"] - , ["src/liblzma/delta", "headers"] , ["src/liblzma/simple", "headers"] ] } @@ -50,24 +55,16 @@ { "type": ["@", "rules", "CC", "library"] , "arguments_config": ["LZIP_DECODER"] , "hdrs": - { "type": "++" - , "$1": - [ [ "alone_decoder.h" - , "block_decoder.h" - , "filter_decoder.h" - , "index_decoder.h" - , "stream_decoder.h" - ] - , { "type": "if" - , "cond": {"type": "var", "name": "LZIP_DECODER"} - , "then": ["lzip_decoder.h"] - } - ] - } + [ "alone_decoder.h" + , "block_decoder.h" + , "filter_decoder.h" + , "index_decoder.h" + , "stream_decoder.h" + ] , "deps": - [ ["src/liblzma/lzma", "lzma1_headers"] + [ ["src/liblzma/delta", "headers"] + , ["src/liblzma/lzma", "lzma1_headers"] , ["src/liblzma/lzma", "lzma2_headers"] - , ["src/liblzma/delta", "headers"] , ["src/liblzma/simple", "headers"] ] } diff --git a/etc/import/third_party/upb/TARGETS.grpc b/etc/import/third_party/upb/TARGETS.grpc index 14e97dbaf..4fe3c8bbf 100644 --- a/etc/import/third_party/upb/TARGETS.grpc +++ b/etc/import/third_party/upb/TARGETS.grpc @@ -1,11 +1,4 @@ -{ "upb": - { "type": ["@", "rules", "CC", "library"] - , "name": ["upb"] - , "hdrs": ["upb/upb.h"] - , "deps": ["base", "mem"] - , "pure C": ["YES"] - } -, "generated_code_support": +{ "generated_code_support": { "type": ["@", "rules", "CC", "library"] , "name": ["generated_code_support"] , "hdrs": @@ -14,13 +7,10 @@ [ "base" , "mem" , "message" - , "message_accessors" - , "message_accessors_internal" , "message_internal" , "mini_descriptor" , "mini_table" , "wire" - , "wire_internal" ] } , "reflection": @@ -33,13 +23,14 @@ , "upb/reflection/message.hpp" ] , "deps": - [ "reflection_internal" - , "base" + [ "base" , "mem" , "message" - , "message_types" - , "message_value" + , "message_internal" + , "mini_descriptor" + , "mini_table" , "port" + , "reflection_internal" ] , "pure C": ["YES"] } @@ -90,6 +81,7 @@ , "upb/reflection/internal/method_def.h" , "upb/reflection/internal/oneof_def.h" , "upb/reflection/internal/service_def.h" + , "upb/reflection/internal/upb_edition_defaults.h" , "upb/reflection/message.h" , "upb/reflection/message.hpp" , "upb/reflection/message_def.h" @@ -104,16 +96,18 @@ ] , "deps": [ "base" + , "base_internal" , "hash" , "mem" , "message" - , "message_accessors" - , "message_types" - , "message_value" + , "message_copy" + , "message_internal" , "mini_descriptor" , "mini_descriptor_internal" , "mini_table" + , "mini_table_internal" , "port" + , "wire" , ["src/core/ext/upb-gen", "upb-gen-lib"] ] , "pure C": ["YES"] @@ -127,6 +121,7 @@ , "upb/base/status.h" , "upb/base/status.hpp" , "upb/base/string_view.h" + , "upb/base/upcast.h" ] , "deps": ["port"] , "pure C": ["YES"] @@ -134,17 +129,10 @@ , "base_internal": { "type": ["@", "rules", "CC", "library"] , "name": ["base_internal"] - , "hdrs": ["upb/base/internal/log2.h"] + , "hdrs": ["upb/base/internal/endian.h", "upb/base/internal/log2.h"] , "deps": ["port"] , "pure C": ["YES"] } -, "collections": - { "type": ["@", "rules", "CC", "library"] - , "name": ["collections"] - , "hdrs": ["upb/collections/array.h", "upb/collections/map.h"] - , "deps": ["message"] - , "pure C": ["YES"] - } , "hash": { "type": ["@", "rules", "CC", "library"] , "name": ["hash"] @@ -166,7 +154,7 @@ , "name": ["tokenizer"] , "srcs": ["upb/io/tokenizer.c"] , "hdrs": ["upb/io/tokenizer.h"] - , "deps": ["string", "zero_copy_stream", "base", "lex", "mem", "port"] + , "deps": ["base", "lex", "mem", "port", "string", "zero_copy_stream"] , "pure C": ["YES"] } , "zero_copy_stream": @@ -182,7 +170,16 @@ , "name": ["json"] , "srcs": ["upb/json/decode.c", "upb/json/encode.c"] , "hdrs": ["upb/json/decode.h", "upb/json/encode.h"] - , "deps": ["lex", "message", "port", "reflection", "wire"] + , "deps": + [ "base" + , "lex" + , "mem" + , "message" + , "mini_table" + , "port" + , "reflection" + , "wire" + ] , "pure C": ["YES"] } , "lex": @@ -206,6 +203,7 @@ , "mem": { "type": ["@", "rules", "CC", "library"] , "name": ["mem"] + , "srcs": ["upb/mem/alloc.c", "upb/mem/arena.c"] , "hdrs": ["upb/mem/alloc.h", "upb/mem/arena.h", "upb/mem/arena.hpp"] , "deps": ["mem_internal", "port"] , "pure C": ["YES"] @@ -213,50 +211,38 @@ , "mem_internal": { "type": ["@", "rules", "CC", "library"] , "name": ["mem_internal"] - , "srcs": ["upb/mem/alloc.c", "upb/mem/arena.c"] , "hdrs": ["upb/mem/internal/arena.h"] - , "private-hdrs": ["upb/mem/alloc.h", "upb/mem/arena.h"] , "deps": ["port"] , "pure C": ["YES"] } -, "message_accessors": +, "message": { "type": ["@", "rules", "CC", "library"] - , "name": ["message_accessors"] - , "srcs": ["upb/message/accessors.c"] - , "hdrs": ["upb/message/accessors.h"] - , "private-hdrs": ["upb/message/internal/accessors.h"] - , "deps": - [ "message_internal" - , "message" - , "base" - , "eps_copy_input_stream" - , "mini_table" - , "mini_table_internal" - , "port" - , "wire" - , "wire_reader" + , "name": ["message"] + , "srcs": + [ "upb/message/accessors.c" + , "upb/message/array.c" + , "upb/message/compat.c" + , "upb/message/map.c" + , "upb/message/map_sorter.c" + , "upb/message/message.c" + ] + , "hdrs": + [ "upb/message/accessors.h" + , "upb/message/array.h" + , "upb/message/compat.h" + , "upb/message/map.h" + , "upb/message/map_gencode_util.h" + , "upb/message/message.h" + , "upb/message/tagged_ptr.h" + , "upb/message/value.h" ] - , "pure C": ["YES"] - } -, "message_accessors_internal": - { "type": ["@", "rules", "CC", "library"] - , "name": ["message_accessors_internal"] - , "hdrs": ["upb/message/internal/accessors.h"] - , "deps": ["message_internal", "mini_table_internal", "port"] - , "pure C": ["YES"] - } -, "message_copy": - { "type": ["@", "rules", "CC", "library"] - , "name": ["message_copy"] - , "srcs": ["upb/message/copy.c"] - , "hdrs": ["upb/message/copy.h"] , "deps": - [ "message_accessors" + [ "base" + , "base_internal" + , "hash" + , "mem" , "message_internal" - , "message" , "message_types" - , "base" - , "mem" , "mini_table" , "mini_table_internal" , "port" @@ -267,53 +253,46 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["message_internal"] , "srcs": - [ "upb/message/array.c" - , "upb/message/map.c" - , "upb/message/map_sorter.c" - , "upb/message/message.c" + [ "upb/message/internal/compare_unknown.c" + , "upb/message/internal/extension.c" + , "upb/message/internal/message.c" ] , "hdrs": - [ "upb/message/internal/array.h" + [ "upb/message/internal/accessors.h" + , "upb/message/internal/array.h" + , "upb/message/internal/compare_unknown.h" , "upb/message/internal/extension.h" , "upb/message/internal/map.h" - , "upb/message/internal/map_entry.h" , "upb/message/internal/map_sorter.h" , "upb/message/internal/message.h" - , "upb/message/map_gencode_util.h" + , "upb/message/internal/tagged_ptr.h" ] - , "private-hdrs": - ["upb/message/array.h", "upb/message/map.h", "upb/message/message.h"] + , "private-hdrs": ["upb/message/value.h"] , "deps": - [ "message_internal_types" - , "message_types" - , "message_value" - , "base" + [ "base" , "base_internal" + , "eps_copy_input_stream" , "hash" , "mem" + , "message_types" , "mini_table" + , "mini_table_internal" , "port" + , "wire_reader" ] , "pure C": ["YES"] } -, "message_internal_types": +, "message_compare": { "type": ["@", "rules", "CC", "library"] - , "name": ["message_internal_types"] - , "hdrs": ["upb/message/internal/types.h"] - , "pure C": ["YES"] - } -, "message": - { "type": ["@", "rules", "CC", "library"] - , "name": ["message"] - , "hdrs": - ["upb/message/array.h", "upb/message/map.h", "upb/message/message.h"] + , "name": ["message_compare"] + , "srcs": ["upb/message/compare.c"] + , "hdrs": ["upb/message/compare.h"] , "deps": - [ "message_internal" - , "message_types" - , "message_value" - , "base" - , "mem" + [ "base" + , "message" + , "message_internal" , "mini_table" + , "mini_table_internal" , "port" ] , "pure C": ["YES"] @@ -324,49 +303,55 @@ , "srcs": ["upb/message/promote.c"] , "hdrs": ["upb/message/promote.h"] , "deps": - [ "message_accessors" - , "message_accessors_internal" - , "message_internal" - , "message" - , "message_tagged_ptr" - , "message_types" - , "base" + [ "base" , "eps_copy_input_stream" , "mem" + , "message" + , "message_internal" , "mini_table" - , "mini_table_internal" , "port" , "wire" - , "wire_internal" , "wire_reader" ] , "pure C": ["YES"] } -, "message_split64": +, "message_copy": { "type": ["@", "rules", "CC", "library"] - , "name": ["message_split64"] - , "hdrs": ["upb/message/accessors_split64.h", "upb/message/array_split64.h"] - , "deps": ["message_accessors", "message", "port"] + , "name": ["message_copy"] + , "srcs": ["upb/message/copy.c"] + , "hdrs": ["upb/message/copy.h"] + , "deps": + [ "base" + , "base_internal" + , "mem" + , "message" + , "message_internal" + , "mini_table" + , "mini_table_internal" + , "port" + ] , "pure C": ["YES"] } -, "message_tagged_ptr": +, "message_split64": { "type": ["@", "rules", "CC", "library"] - , "name": ["message_tagged_ptr"] - , "hdrs": ["upb/message/tagged_ptr.h"] - , "deps": ["message_types", "port"] + , "name": ["message_split64"] + , "hdrs": ["upb/message/accessors_split64.h"] + , "deps": ["message", "port"] , "pure C": ["YES"] } , "message_types": { "type": ["@", "rules", "CC", "library"] , "name": ["message_types"] - , "hdrs": ["upb/message/types.h"] + , "hdrs": + ["upb/message/internal/map_entry.h", "upb/message/internal/types.h"] + , "deps": ["base", "hash", "port"] , "pure C": ["YES"] } , "message_value": { "type": ["@", "rules", "CC", "library"] , "name": ["message_value"] , "hdrs": ["upb/message/value.h"] - , "deps": ["message_tagged_ptr", "message_types", "base"] + , "deps": ["base", "message_tagged_ptr", "message_types"] , "pure C": ["YES"] } , "mini_descriptor": @@ -383,9 +368,11 @@ , "upb/mini_descriptor/link.h" ] , "deps": - [ "mini_descriptor_internal" - , "base" + [ "base" + , "base_internal" , "mem" + , "message_types" + , "mini_descriptor_internal" , "mini_table" , "mini_table_internal" , "port" @@ -415,7 +402,7 @@ , "name": ["mini_table_compat"] , "srcs": ["upb/mini_table/compat.c"] , "hdrs": ["upb/mini_table/compat.h"] - , "deps": ["mini_table", "base", "hash", "mem", "port"] + , "deps": ["base", "hash", "mem", "mini_table", "port"] , "pure C": ["YES"] } , "mini_table": @@ -431,8 +418,7 @@ , "upb/mini_table/message.h" , "upb/mini_table/sub.h" ] - , "deps": - ["mini_table_internal", "base", "hash", "mem", "message_types", "port"] + , "deps": ["base", "hash", "mem", "mini_table_internal", "port"] , "pure C": ["YES"] } , "mini_table_internal": @@ -445,9 +431,10 @@ , "upb/mini_table/internal/field.h" , "upb/mini_table/internal/file.h" , "upb/mini_table/internal/message.h" + , "upb/mini_table/internal/size_log2.h" , "upb/mini_table/internal/sub.h" ] - , "deps": ["base", "message_types", "port"] + , "deps": ["base", "hash", "mem", "message_types", "port"] , "pure C": ["YES"] } , "port": @@ -467,55 +454,49 @@ , "srcs": ["upb/text/encode.c"] , "hdrs": ["upb/text/encode.h"] , "deps": - [ "eps_copy_input_stream" + [ "base" + , "eps_copy_input_stream" , "lex" , "message" , "message_internal" + , "message_types" , "port" , "reflection" - , "wire" , "wire_reader" - , "wire_types" + , ["@", "protobuf", "third_party/utf8_range", "utf8_range"] ] , "pure C": ["YES"] } , "wire": { "type": ["@", "rules", "CC", "library"] , "name": ["wire"] - , "hdrs": ["upb/wire/decode.h", "upb/wire/encode.h"] - , "deps": - ["wire_internal", "wire_types", "mem", "message", "mini_table", "port"] - , "pure C": ["YES"] - } -, "wire_internal": - { "type": ["@", "rules", "CC", "library"] - , "name": ["wire_internal"] , "srcs": - ["upb/wire/decode.c", "upb/wire/decode_fast.c", "upb/wire/encode.c"] + [ "upb/wire/decode.c" + , "upb/wire/encode.c" + , "upb/wire/internal/decode_fast.c" + ] , "hdrs": - [ "upb/wire/decode_fast.h" - , "upb/wire/internal/constants.h" - , "upb/wire/internal/decode.h" - , "upb/wire/internal/swap.h" + [ "upb/wire/decode.h" + , "upb/wire/encode.h" + , "upb/wire/internal/decode_fast.h" ] - , "private-hdrs": ["upb/wire/decode.h", "upb/wire/encode.h"] + , "private-hdrs": + ["upb/wire/internal/constants.h", "upb/wire/internal/decoder.h"] , "deps": - [ "eps_copy_input_stream" - , "wire_reader" - , "wire_types" - , "base" + [ "base" + , "base_internal" + , "eps_copy_input_stream" , "hash" , "mem" , "mem_internal" , "message" - , "message_accessors_internal" , "message_internal" - , "message_internal_types" - , "message_tagged_ptr" + , "message_types" , "mini_table" , "mini_table_internal" , "port" - , ["third_party/utf8_range", ""] + , "wire_reader" + , ["@", "protobuf", "third_party/utf8_range", "utf8_range"] ] , "pure C": ["YES"] } @@ -523,15 +504,9 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["wire_reader"] , "srcs": ["upb/wire/reader.c"] - , "hdrs": ["upb/wire/reader.h"] - , "private-hdrs": ["upb/wire/internal/swap.h"] - , "deps": ["eps_copy_input_stream", "wire_types", "port"] - , "pure C": ["YES"] - } -, "wire_types": - { "type": ["@", "rules", "CC", "library"] - , "name": ["wire_types"] - , "hdrs": ["upb/wire/types.h"] + , "hdrs": + ["upb/wire/internal/reader.h", "upb/wire/reader.h", "upb/wire/types.h"] + , "deps": ["base_internal", "eps_copy_input_stream", "port"] , "pure C": ["YES"] } , "eps_copy_input_stream": diff --git a/etc/import/third_party/utf8_range/TARGETS.grpc b/etc/import/third_party/utf8_range/TARGETS.grpc deleted file mode 100644 index b8d48617f..000000000 --- a/etc/import/third_party/utf8_range/TARGETS.grpc +++ /dev/null @@ -1,8 +0,0 @@ -{ "": - { "type": ["@", "rules", "CC", "library"] - , "name": ["utf8_range"] - , "srcs": ["naive.c", "range2-neon.c", "range2-sse.c"] - , "hdrs": ["utf8_range.h"] - , "pure C": ["YES"] - } -} diff --git a/etc/import/third_party/utf8_range/TARGETS.protobuf b/etc/import/third_party/utf8_range/TARGETS.protobuf index bcd4d3dd2..64713dcbd 100644 --- a/etc/import/third_party/utf8_range/TARGETS.protobuf +++ b/etc/import/third_party/utf8_range/TARGETS.protobuf @@ -24,6 +24,34 @@ , "name": ["utf8_validity"] , "hdrs": ["utf8_validity.h"] , "srcs": ["utf8_validity.cc"] - , "deps": [["@", "absl", "absl/strings", "strings"]] + , "deps": ["utf8_range", ["@", "absl", "absl/strings", "strings"]] + } +, "utf8_range": + { "type": "export" + , "target": "utf8_range_internal" + , "flexible_config": + [ "OS" + , "ARCH" + , "HOST_ARCH" + , "TARGET_ARCH" + , "TOOLCHAIN_CONFIG" + , "DEBUG" + , "CXX" + , "CXXFLAGS" + , "ADD_CXXFLAGS" + , "AR" + , "ENV" + , "CC" + , "CFLAGS" + , "ADD_CFLAGS" + , "PKG_CONFIG_ARGS" + ] + } +, "utf8_range_internal": + { "type": ["@", "rules", "CC", "library"] + , "name": ["utf8_range"] + , "hdrs": ["utf8_range.h"] + , "srcs": ["utf8_range.c"] + , "pure C": ["YES"] } } diff --git a/etc/import/upb_generator/TARGETS.protobuf b/etc/import/upb_generator/TARGETS.protobuf new file mode 100644 index 000000000..e86bda2fb --- /dev/null +++ b/etc/import/upb_generator/TARGETS.protobuf @@ -0,0 +1,9 @@ +{ "mangle": + { "type": ["@", "rules", "CC", "library"] + , "name": ["mangle"] + , "stage": ["upb_generator"] + , "srcs": ["mangle.cc"] + , "hdrs": ["mangle.h"] + , "deps": [["@", "absl", "absl/strings", "strings"]] + } +} diff --git a/etc/patches/bytestream.proto.diff b/etc/patches/bytestream.proto.diff deleted file mode 100644 index 0ded6d5bd..000000000 --- a/etc/patches/bytestream.proto.diff +++ /dev/null @@ -1,13 +0,0 @@ ---- bytestream.proto.orig 2022-03-04 15:34:51.771366591 +0100 -+++ bytestream.proto 2022-03-04 15:36:46.721674292 +0100 -@@ -16,8 +16,8 @@ - - package google.bytestream; - --import "google/api/annotations.proto"; --import "google/protobuf/wrappers.proto"; -+// import "google/api/annotations.proto"; -+// import "google/protobuf/wrappers.proto"; - - option go_package = "google.golang.org/genproto/googleapis/bytestream;bytestream"; - option java_outer_classname = "ByteStreamProto"; diff --git a/etc/patches/crypto-use-_Generic-only-if-defined-__cplusplus.patch b/etc/patches/crypto-use-_Generic-only-if-defined-__cplusplus.patch deleted file mode 100644 index fea564c8f..000000000 --- a/etc/patches/crypto-use-_Generic-only-if-defined-__cplusplus.patch +++ /dev/null @@ -1,74 +0,0 @@ -From 3359a87a71307336100b84e66b69bad385cd3cfc Mon Sep 17 00:00:00 2001 -From: Martin Jansa -Date: Mon, 6 May 2024 01:36:39 +0200 -Subject: [PATCH] crypto: use _Generic only if !defined(__cplusplus) - -* fixes build with gcc-14 which has __builtin_addc and __builtin_subc - with gcc-13 it was already using the #else branch because of missing builtins - -* fixes - https://github.com/grpc/grpc/issues/35945 - -* _Generic was introduced in boringssl with: - https://boringssl.googlesource.com/boringssl/+/70ca6bc24be103dabd68e448cd3af29b929b771d%5E%21/#F4 - -* but e.g. third_party/boringssl-with-bazel/src/ssl/d1_both.cc includes - this internal.h and from the .cc extension gcc will process it as C++ - where _Generic isn't available, causing: - -In file included from third_party/boringssl-with-bazel/src/ssl/d1_both.cc:125: -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h: In function 'uint32_t CRYPTO_addc_u32(uint32_t, uint32_t, uint32_t, uint32_t*)': -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h:1159:7: error: expected primary-expression before 'unsigned' - 1159 | unsigned: __builtin_addc, \ - | ^~~~~~~~ -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h:1166:10: note: in expansion of macro 'CRYPTO_GENERIC_ADDC' - 1166 | return CRYPTO_GENERIC_ADDC(x, y, carry, out_carry); - | ^~~~~~~~~~~~~~~~~~~ -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h:1160:7: error: expected primary-expression before 'unsigned' - 1160 | unsigned long: __builtin_addcl, \ - | ^~~~~~~~ -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h:1166:10: note: in expansion of macro 'CRYPTO_GENERIC_ADDC' - 1166 | return CRYPTO_GENERIC_ADDC(x, y, carry, out_carry); - | ^~~~~~~~~~~~~~~~~~~ -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h:1161:7: error: expected primary-expression before 'unsigned' - 1161 | unsigned long long: __builtin_addcll))((x), (y), (carry), (out_carry)) - | ^~~~~~~~ -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h:1166:10: note: in expansion of macro 'CRYPTO_GENERIC_ADDC' - 1166 | return CRYPTO_GENERIC_ADDC(x, y, carry, out_carry); - | ^~~~~~~~~~~~~~~~~~~ -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h:1158:4: error: '_Generic' was not declared in this scope - 1158 | (_Generic((x), \ - | ^~~~~~~~ -third_party/boringssl-with-bazel/src/ssl/../crypto/internal.h:1166:10: note: in expansion of macro 'CRYPTO_GENERIC_ADDC' - 1166 | return CRYPTO_GENERIC_ADDC(x, y, carry, out_carry); - | ^~~~~~~~~~~~~~~~~~~ - -Signed-off-by: Martin Jansa ---- -Upstream-Status: Submitted [https://boringssl-review.googlesource.com/c/boringssl/+/68227 crypto: use _Generic only if !defined(__cplusplus)] - - crypto/internal.h | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/crypto/internal.h b/crypto/internal.h -index a77102d76..30d6826dd 100644 ---- a/crypto/internal.h -+++ b/crypto/internal.h -@@ -1152,7 +1152,7 @@ static inline uint64_t CRYPTO_rotr_u64(uint64_t value, int shift) { - - // CRYPTO_addc_* returns |x + y + carry|, and sets |*out_carry| to the carry - // bit. |carry| must be zero or one. --#if OPENSSL_HAS_BUILTIN(__builtin_addc) -+#if OPENSSL_HAS_BUILTIN(__builtin_addc) && !defined(__cplusplus) - - #define CRYPTO_GENERIC_ADDC(x, y, carry, out_carry) \ - (_Generic((x), \ -@@ -1204,7 +1204,7 @@ static inline uint64_t CRYPTO_addc_u64(uint64_t x, uint64_t y, uint64_t carry, - - // CRYPTO_subc_* returns |x - y - borrow|, and sets |*out_borrow| to the borrow - // bit. |borrow| must be zero or one. --#if OPENSSL_HAS_BUILTIN(__builtin_subc) -+#if OPENSSL_HAS_BUILTIN(__builtin_subc) && !defined(__cplusplus) - - #define CRYPTO_GENERIC_SUBC(x, y, borrow, out_borrow) \ - (_Generic((x), \ diff --git a/etc/repos.json b/etc/repos.json index d4e287c70..f3e6cefd7 100644 --- a/etc/repos.json +++ b/etc/repos.json @@ -187,11 +187,11 @@ , "cli11": { "repository": { "type": "archive" - , "content": "eb82119c62808a65bb2d15561f1968259ed50c95" - , "fetch": "https://github.com/CLIUtils/CLI11/archive/refs/tags/v2.4.1.tar.gz" - , "sha256": "73b7ec52261ce8fe980a29df6b4ceb66243bb0b779451dbd3d014cfec9fdbb58" - , "sha512": "965290d09977672d3bc3c57ca4b89a88c2c09461da6e866b18217d702d4d5a0977588fcb8fef1a3c3804e61ad80d276029f47469cc83dbfdc7021ee35f6b7269" - , "subdir": "CLI11-2.4.1" + , "content": "624cd17664daa964e192c176c98ea2fb919afd3d" + , "fetch": "https://github.com/CLIUtils/CLI11/archive/refs/tags/v2.4.2.tar.gz" + , "sha256": "f2d893a65c3b1324c50d4e682c0cdc021dd0477ae2c048544f39eed6654b699a" + , "sha512": "fdb61c430f5b99a9495fda7f94bfc8d0fb5360c99beeccbcb3b8918713579aac97fa0dcbce296065d9043f141a538c505919c9810fd1d192661e8b48b6a2637a" + , "subdir": "CLI11-2.4.2" } , "target_root": "import targets" , "target_file_name": "TARGETS.cli11" @@ -215,11 +215,11 @@ , "fmt": { "repository": { "type": "zip" - , "content": "c54a3ec3cba9177c0be51051b762a74449ea1466" - , "fetch": "https://github.com/fmtlib/fmt/releases/download/10.2.1/fmt-10.2.1.zip" - , "sha256": "312151a2d13c8327f5c9c586ac6cf7cddc1658e8f53edae0ec56509c8fa516c9" - , "sha512": "1cf0e3dd09c7d87e0890b8743559159d3be2a8f33c135516962d17c4eeb7b00659e6acd74518bd5566ee4e83ddaba155fecb4c229f90cd258b3b832e72ad82cd" - , "subdir": "fmt-10.2.1" + , "content": "2ec6acef7f8340d6d48eee098079995a163cc388" + , "fetch": "https://github.com/fmtlib/fmt/releases/download/11.0.2/fmt-11.0.2.zip" + , "sha256": "40fc58bebcf38c759e11a7bd8fdc163507d2423ef5058bba7f26280c5b9c5465" + , "sha512": "06eba9a2a8d1c2269801e10a00ed26a9344b79bca0391a6b10f35e9716682f8345125fceb96e9ca36ffbd4c0558b0e63e4c45a9dff09a8ee186458ec68e34198" + , "subdir": "fmt-11.0.2" } , "target_root": "import targets" , "target_file_name": "TARGETS.fmt" @@ -234,13 +234,13 @@ , "ssl": { "repository": { "type": "archive" - , "content": "7b7deaf147baf61c8efdc8a4ac79a16ba70b216e" - , "fetch": "https://github.com/google/boringssl/archive/ae72a4514c7afd150596b0a80947f3ca9b8363b5.tar.gz" - , "sha256": "057f662b0e85931a84945b2e89ba201fd44b0583da827c948fe443593690fb83" - , "sha512": "757a2727a2ab0e45656c22d807ea1217c74c276f51fa721a87f92b59a4d040d9931586e912b4558f57330340cc3af6361f1694b1479b764635c6a111b356afc4" - , "subdir": "boringssl-ae72a4514c7afd150596b0a80947f3ca9b8363b5" + , "content": "2142a2813a0d387f5f86dd90635d29a88b5b6c66" + , "fetch": "https://github.com/google/boringssl/archive/b8b3e6e11166719a8ebfa43c0cde9ad7d57a84f6.tar.gz" + , "sha256": "c70d519e4ee709b7a74410a5e3a937428b8198d793a3d771be3dd2086ae167c8" + , "sha512": "3a35107ce0b891911608b97e24a3437fb849bcb738f699eab74e55a2e0fd911f4a448ac1f2b11f444bcf7ae5794b7fa109009043a0f5dbeec5d73cf3909651c0" + , "subdir": "boringssl-b8b3e6e11166719a8ebfa43c0cde9ad7d57a84f6" , "mirrors": - [ "https://storage.googleapis.com/grpc-bazel-mirror/github.com/google/boringssl/archive/ae72a4514c7afd150596b0a80947f3ca9b8363b5.tar.gz" + [ "https://storage.googleapis.com/grpc-bazel-mirror/github.com/google/boringssl/archive/b8b3e6e11166719a8ebfa43c0cde9ad7d57a84f6.tar.gz" ] } , "target_root": "import targets" @@ -248,7 +248,7 @@ , "bindings": {"rules": "rules-boringssl", "patches": "patches"} , "bootstrap": { "arch_map": {"arm64": "aarch64"} - , "build": "{cc} {cflags} -I . -I src/include -c *.c src/crypto/*.c src/crypto/*/*.c src/crypto/*/*.S src/third_party/fiat/asm/*.S {os}-{arch}/crypto/fipsmodule/*.S && {ar} cqs libcrypto.a *.o" + , "build": "{cc} {cflags} -I . -I src/include -c src/crypto/*.c src/crypto/*/*.c src/gen/crypto/*.c src/crypto/*/*.S src/crypto/*/*/*.S src/gen/bcm/*.S src/gen/crypto/*.S src/third_party/fiat/asm/*.S && {ar} cqs libcrypto.a *.o" , "link": ["-lcrypto", "-pthread"] , "include_dir": "src/include/openssl" , "include_name": "openssl" @@ -258,11 +258,11 @@ , "protobuf": { "repository": { "type": "archive" - , "content": "989aa1fec35687ab84229fa84099582dd8f0246a" - , "fetch": "https://github.com/protocolbuffers/protobuf/releases/download/v25.1/protobuf-25.1.tar.gz" - , "sha256": "9bd87b8280ef720d3240514f884e56a712f2218f0d693b48050c836028940a42" - , "sha512": "d2fad2188118ced2cd951bdb472d72cc9e9b2158c88eeca652c76332a884b5b5b4b58628f7777272fa693140753823584ea9c7924f1655b1d5a363f59bdf7a4c" - , "subdir": "protobuf-25.1" + , "content": "eef52bb7f76e8c87ab741d6d9139a837f4b399d4" + , "fetch": "https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protobuf-27.2.tar.gz" + , "sha256": "e4ff2aeb767da6f4f52485c2e72468960ddfe5262483879ef6ad552e52757a77" + , "sha512": "664c66b62cf1ed0c65d9b910d8e67d4d5d471113697f1b8edf1573cd5c0fc8e850ac53ce984e48e6c6b9cbbefa12f8530058384e7388e65a59c1e46d03772397" + , "subdir": "protobuf-27.2" } , "target_root": "import targets" , "target_file_name": "TARGETS.protobuf" @@ -291,13 +291,13 @@ , "google_apis": { "repository": { "type": "archive" - , "content": "5d9001f3dff8dec1880c06c0de100a0c2fc65094" - , "fetch": "https://github.com/googleapis/googleapis/archive/2f9af297c84c55c8b871ba4495e01ade42476c92.tar.gz" - , "sha256": "5bb6b0253ccf64b53d6c7249625a7e3f6c3bc6402abd52d3778bfa48258703a0" - , "sha512": "cdeefae807df7097174b4bb28c0900b06a68d424c00ebba4ff5add260c9c651351d5e429bfc5de42f95ebb75dadec313f7bd3991c2fa476c9104f9ea656acad4" - , "subdir": "googleapis-2f9af297c84c55c8b871ba4495e01ade42476c92" + , "content": "db3c51a8fd9c923a4e4908d8bcd7dd4642cc4664" + , "fetch": "https://github.com/googleapis/googleapis/archive/fe8ba054ad4f7eca946c2d14a63c3f07c0b586a0.tar.gz" + , "sha256": "0513f0f40af63bd05dc789cacc334ab6cec27cc89db596557cb2dfe8919463e4" + , "sha512": "d77ea83f8e68e3c0b667e7de43c2cd28b0ca7b969b2cf127b3873fc19f330ad85afb314bef4174a4e11ed68b620e43853d8b44eb833c5eca7e820ca21c1c3e15" + , "subdir": "googleapis-fe8ba054ad4f7eca946c2d14a63c3f07c0b586a0" , "mirrors": - [ "https://storage.googleapis.com/grpc-bazel-mirror/github.com/googleapis/googleapis/archive/2f9af297c84c55c8b871ba4495e01ade42476c92.tar.gz" + [ "https://storage.googleapis.com/grpc-bazel-mirror/github.com/googleapis/googleapis/archive/fe8ba054ad4f7eca946c2d14a63c3f07c0b586a0.tar.gz" ] } , "target_root": "import targets" @@ -324,11 +324,11 @@ , "zlib": { "repository": { "type": "archive" - , "content": "88faf0fca21e0d82de44366fdd52aaadbab2e6b6" - , "fetch": "https://github.com/madler/zlib/releases/download/v1.3/zlib-1.3.tar.gz" - , "sha256": "ff0ba4c292013dbc27530b3a81e1f9a813cd39de01ca5e0f8bf355702efa593e" - , "sha512": "185795044461cd78a5545250e06f6efdb0556e8d1bfe44e657b509dd6f00ba8892c8eb3febe65f79ee0b192d6af857f0e0055326d33a881449f3833f92e5f8fb" - , "subdir": "zlib-1.3" + , "content": "53fa48bf97f0ee0f42c62743b018507a6583ec3e" + , "fetch": "https://github.com/madler/zlib/releases/download/v1.3.1/zlib-1.3.1.tar.gz" + , "sha256": "9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23" + , "sha512": "580677aad97093829090d4b605ac81c50327e74a6c2de0b85dd2e8525553f3ddde17556ea46f8f007f89e435493c9a20bc997d1ef1c1c2c23274528e3c46b94f" + , "subdir": "zlib-1.3.1" } , "target_root": "import targets" , "target_file_name": "TARGETS.zlib" @@ -350,11 +350,11 @@ , "lzma": { "repository": { "type": "archive" - , "content": "6dae9322046ff66b7074c33b24be9a98f5cee42a" - , "fetch": "https://github.com/xz-mirror/xz/archive/refs/tags/v5.4.0.tar.gz" - , "sha256": "26ec069c41e5dfae221825b9f28a4934cb6ce3bedd3f55bc0a08073f7c2e42fd" - , "sha512": "60aba20197d329a1e0d86b32de8a9727a98e5ff6f1eda6111c5ddf85086edeefcf9e2cc3998d237e248bf4af7cf746923b45d236e6b47d88f63cf3d57cae9e8f" - , "subdir": "xz-5.4.0" + , "content": "80e67abd2e08a54ec21f195b3e9e4abfc64ba7e1" + , "fetch": "https://github.com/tukaani-project/xz/releases/download/v5.6.3/xz-5.6.3.tar.gz" + , "sha256": "b1d45295d3f71f25a4c9101bd7c8d16cb56348bbef3bbc738da0351e17c73317" + , "sha512": "b07b45e18615d1946e9d12157af99b54700d757832a638fccab70549574dcd7f28e69e71cc4c0b9c808959f818e79b668a5ccf108429ea0f40b6125bfd55d274" + , "subdir": "xz-5.6.3" } , "target_root": "import targets" , "target_file_name": "TARGETS.lzma" @@ -392,11 +392,11 @@ , "com_github_grpc_grpc": { "repository": { "type": "archive" - , "content": "d89bbef10ae30b511e062e6aa36520a2805e0f97" - , "fetch": "https://github.com/grpc/grpc/archive/refs/tags/v1.62.1.tar.gz" - , "sha256": "c9f9ae6e4d6f40464ee9958be4068087881ed6aa37e30d0e64d40ed7be39dd01" - , "sha512": "3224ad2617c18156f90c54c1ebf1f2015e405a6f12546e8709e0c905f52508c9f1a13b4d5a6cc7a35abf58b429985b5b504c9062f50c0d3d6aa163180a61047a" - , "subdir": "grpc-1.62.1" + , "content": "11573055daa6b91b4873e8e6118d0535ddc05418" + , "fetch": "https://github.com/grpc/grpc/archive/refs/tags/v1.67.1.tar.gz" + , "sha256": "d74f8e99a433982a12d7899f6773e285c9824e1d9a173ea1d1fb26c9bd089299" + , "sha512": "4f9b63278590be282b5578ec6c5f607f11046fdac3e62b5bf9054f664971312efee621e1fd3a086eaf978d41f4cd83bab7f2947c9e6189dc30bcb69263f481dc" + , "subdir": "grpc-1.67.1" , "pragma": {"special": "ignore"} } , "target_root": "import targets" @@ -471,11 +471,11 @@ , "com_github_libarchive_libarchive": { "repository": { "type": "archive" - , "content": "50abf77b7226df46a33013f278464c1588b0ceea" - , "fetch": "https://github.com/libarchive/libarchive/releases/download/v3.7.4/libarchive-3.7.4.tar.gz" - , "sha256": "7875d49596286055b52439ed42f044bd8ad426aa4cc5aabd96bfe7abb971d5e8" - , "sha512": "a37006350c2a61a35ecbe638c3168661bef5cbcb7dd3e5ec9a14af1a9aa89ec1be23902cdd17c60bf15859ba1e41bad8456dee9df686bc2d825b30d453cb2e44" - , "subdir": "libarchive-3.7.4" + , "content": "994435922d1ce63b52f6420f90b1b2a9f6670c39" + , "fetch": "https://github.com/libarchive/libarchive/releases/download/v3.7.7/libarchive-3.7.7.tar.gz" + , "sha256": "4cc540a3e9a1eebdefa1045d2e4184831100667e6d7d5b315bb1cbc951f8ddff" + , "sha512": "cce6eecfcd33d228bd1b1162a90bad63750adb53ac4edcaed34e2fdc30b6ba211cf1fd25d4b8761373949ceec266478b09bd70ffa4e374803a29e8573d6d149e" + , "subdir": "libarchive-3.7.7" } , "target_root": "import targets" , "target_file_name": "TARGETS.archive" diff --git a/etc/toolchain/CC/TARGETS b/etc/toolchain/CC/TARGETS index 060369186..2f2b58e65 100644 --- a/etc/toolchain/CC/TARGETS +++ b/etc/toolchain/CC/TARGETS @@ -168,7 +168,7 @@ } , "clang": { "type": ["CC", "defaults"] - , "arguments_config": ["OS", "ARCH", "TARGET_ARCH"] + , "arguments_config": ["OS", "ARCH", "TARGET_ARCH", "DEBUG"] , "CC": ["clang"] , "CXX": ["clang++"] , "AR": @@ -271,6 +271,11 @@ } } } + , "ADD_COMPILE_FLAGS": + { "type": "if" + , "cond": {"type": "var", "name": "DEBUG"} + , "then": ["-fdebug-compilation-dir=."] + } , "PATH": ["/bin", "/sbin", "/usr/bin", "/usr/sbin"] } } diff --git a/rules/CC/EXPRESSIONS b/rules/CC/EXPRESSIONS index 2413f98c1..0330e7197 100644 --- a/rules/CC/EXPRESSIONS +++ b/rules/CC/EXPRESSIONS @@ -320,18 +320,21 @@ } , "imports": {"list_provider": ["./", "..", "field_list_provider"]} , "expression": - { "type": "++" + { "type": "nub_right" , "$1": - { "type": "foreach" - , "var": "fieldname" - , "range": {"type": "var", "name": "deps-fieldnames"} - , "body": - { "type": "let*" - , "bindings": - [ ["provider", "compile-args"] - , ["transition", {"type": "var", "name": "deps-transition"}] - ] - , "body": {"type": "CALL_EXPRESSION", "name": "list_provider"} + { "type": "++" + , "$1": + { "type": "foreach" + , "var": "fieldname" + , "range": {"type": "var", "name": "deps-fieldnames"} + , "body": + { "type": "let*" + , "bindings": + [ ["provider", "compile-args"] + , ["transition", {"type": "var", "name": "deps-transition"}] + ] + , "body": {"type": "CALL_EXPRESSION", "name": "list_provider"} + } } } } @@ -387,18 +390,21 @@ } , "imports": {"list_provider": ["./", "..", "field_list_provider"]} , "expression": - { "type": "++" + { "type": "nub_right" , "$1": - { "type": "foreach" - , "var": "fieldname" - , "range": {"type": "var", "name": "deps-fieldnames"} - , "body": - { "type": "let*" - , "bindings": - [ ["provider", "link-args"] - , ["transition", {"type": "var", "name": "deps-transition"}] - ] - , "body": {"type": "CALL_EXPRESSION", "name": "list_provider"} + { "type": "++" + , "$1": + { "type": "foreach" + , "var": "fieldname" + , "range": {"type": "var", "name": "deps-fieldnames"} + , "body": + { "type": "let*" + , "bindings": + [ ["provider", "link-args"] + , ["transition", {"type": "var", "name": "deps-transition"}] + ] + , "body": {"type": "CALL_EXPRESSION", "name": "list_provider"} + } } } } @@ -814,12 +820,14 @@ , "cflags-files" , "lint-deps fieldnames" , "deps-transition" + , "deps-fieldnames" , "compile-args" , "defaults-transition" ] , "imports": { "objects": "objects" , "list_provider": ["./", "..", "field_list_provider"] + , "runfiles_list": ["./", "..", "field_runfiles_list"] , "default-TOOLCHAIN": "default-TOOLCHAIN" , "default-NON_SYSTEM_TOOLS": "default-NON_SYSTEM_TOOLS" , "compiler": "compiler" @@ -864,6 +872,52 @@ ] } ] + , [ "direct-deps hdrs" + , { "type": "to_subdir" + , "subdir": "include" + , "$1": + { "type": "let*" + , "bindings": + [["transition", {"type": "var", "name": "deps-transition"}]] + , "body": + { "type": "map_union" + , "$1": + { "type": "++" + , "$1": + { "type": "foreach" + , "var": "fieldname" + , "range": {"type": "var", "name": "deps-fieldnames"} + , "body": + {"type": "CALL_EXPRESSION", "name": "runfiles_list"} + } + } + } + } + } + ] + , [ "own headers" + , { "type": "to_subdir" + , "subdir": "work" + , "$1": + { "type": "map_union" + , "$1": + [ {"type": "var", "name": "hdrs"} + , {"type": "var", "name": "private-hdrs"} + ] + } + } + ] + , [ "direct hdrs" + , { "type": "map_union" + , "$1": + [ {"type": "var", "name": "own headers"} + , {"type": "var", "name": "direct-deps hdrs"} + ] + } + ] + , [ "direct deps artifact names" + , {"type": "keys", "$1": {"type": "var", "name": "direct hdrs"}} + ] , [ "hdr lint" , { "type": "foreach" , "range": @@ -902,7 +956,10 @@ } ] ] - , "body": {"type": "env", "vars": ["cmd", "src"]} + , "body": + { "type": "env" + , "vars": ["cmd", "src", "direct deps artifact names"] + } } } } @@ -974,7 +1031,10 @@ { "type": "let*" , "bindings": [["src", {"type": "var", "name": "work src_name"}]] - , "body": {"type": "env", "vars": ["cmd", "src"]} + , "body": + { "type": "env" + , "vars": ["cmd", "src", "direct deps artifact names"] + } } } } diff --git a/rules/CC/auto/EXPRESSIONS b/rules/CC/auto/EXPRESSIONS deleted file mode 100644 index 95209758a..000000000 --- a/rules/CC/auto/EXPRESSIONS +++ /dev/null @@ -1,12 +0,0 @@ -{ "last_list_entry": - { "vars": ["list"] - , "expression": - {"type": "[]", "index": -1, "list": {"type": "var", "name": "list"}} - } -, "first_list_entry": - { "vars": ["list"] - , "imports": {"last": "last_list_entry"} - , "expression": - {"type": "[]", "index": 0, "list": {"type": "var", "name": "list"}} - } -} diff --git a/rules/CC/auto/RULES b/rules/CC/auto/RULES index debae9c17..6ad967f0f 100644 --- a/rules/CC/auto/RULES +++ b/rules/CC/auto/RULES @@ -149,9 +149,7 @@ , "size_cxxtype" ] , "imports": - { "first": "first_list_entry" - , "last": "last_list_entry" - , "artifacts": ["./", "../..", "field_artifacts"] + { "artifacts": ["./", "../..", "field_artifacts"] , "compile-deps": ["./", "..", "compile-deps"] , "compiler-cc": ["./", "..", "compiler-cc"] , "compiler-cxx": ["./", "..", "compiler-cxx"] @@ -490,14 +488,22 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" , "msg": "Define name in 'defines' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "val" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} } ] - , ["val", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "join" @@ -538,14 +544,22 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" , "msg": "Define name in 'defines1' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "val" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} } ] - , ["val", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "join" @@ -575,14 +589,22 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" , "msg": "Define name in 'defines01' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "val" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} } ] - , ["val", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "join" @@ -609,14 +631,22 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" , "msg": "Define name in 'have_cfile' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "val" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} } ] - , ["val", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "lookup" @@ -653,14 +683,22 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" , "msg": "Define name in 'have_cxxfile' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "val" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} } ] - , ["val", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "lookup" @@ -697,14 +735,22 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" , "msg": "Define name in 'have_ctype' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "type" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} } ] - , ["type", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "lookup" @@ -741,14 +787,22 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" , "msg": "Define name in 'have_cxxtype' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "type" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} } ] - , ["type", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "lookup" @@ -785,16 +839,34 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" , "msg": "Define name in 'have_csymbol' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "sym, hdrs" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} + } + ] + , [ "sym" + , { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "sym, hdrs"} + } + ] + , [ "hdrs" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "sym, hdrs"} } ] - , ["list", {"type": "CALL_EXPRESSION", "name": "last"}] - , ["sym", {"type": "CALL_EXPRESSION", "name": "first"}] - , ["hdrs", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "lookup" @@ -836,16 +908,34 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" - , "msg": "Define name in 'have_cxxsymbol' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "msg": "Define name in 'have_csymbol' may not be empty" + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "sym, hdrs" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} + } + ] + , [ "sym" + , { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "sym, hdrs"} + } + ] + , [ "hdrs" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "sym, hdrs"} } ] - , ["list", {"type": "CALL_EXPRESSION", "name": "last"}] - , ["sym", {"type": "CALL_EXPRESSION", "name": "first"}] - , ["hdrs", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "lookup" @@ -887,16 +977,34 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" - , "msg": "Define name in 'size_ctype' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "msg": "Define name in 'have_csymbol' may not be empty" + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "type, sizes" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} + } + ] + , [ "type" + , { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "type, sizes"} + } + ] + , [ "sizes" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "type, sizes"} } ] - , ["list", {"type": "CALL_EXPRESSION", "name": "last"}] - , ["type", {"type": "CALL_EXPRESSION", "name": "first"}] - , ["sizes", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "lookup" @@ -945,16 +1053,34 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , [ "def" + [ [ "def" , { "type": "assert_non_empty" - , "msg": "Define name in 'size_cxxtype' may not be empty" - , "$1": {"type": "CALL_EXPRESSION", "name": "first"} + , "msg": "Define name in 'have_csymbol' may not be empty" + , "$1": + { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + } + ] + , [ "type, sizes" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} + } + ] + , [ "type" + , { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "type, sizes"} + } + ] + , [ "sizes" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "type, sizes"} } ] - , ["list", {"type": "CALL_EXPRESSION", "name": "last"}] - , ["type", {"type": "CALL_EXPRESSION", "name": "first"}] - , ["sizes", {"type": "CALL_EXPRESSION", "name": "last"}] ] , "body": { "type": "lookup" @@ -1112,11 +1238,7 @@ , "string_fields": ["magic_string", "@only", "output"] , "target_fields": ["input"] , "config_vars": ["defines"] - , "imports": - { "first": "first_list_entry" - , "last": "last_list_entry" - , "stage_singleton_field": ["", "stage_singleton_field"] - } + , "imports": {"stage_singleton_field": ["", "stage_singleton_field"]} , "implicit": {"runner": ["runner"]} , "expression": { "type": "let*" @@ -1137,9 +1259,18 @@ , "body": { "type": "let*" , "bindings": - [ ["list", {"type": "var", "name": "pair"}] - , ["key", {"type": "CALL_EXPRESSION", "name": "first"}] - , ["val", {"type": "CALL_EXPRESSION", "name": "last"}] + [ [ "key" + , { "type": "[]" + , "index": 0 + , "list": {"type": "var", "name": "pair"} + } + ] + , [ "val" + , { "type": "[]" + , "index": -1 + , "list": {"type": "var", "name": "pair"} + } + ] ] , "body": { "type": "singleton_map" diff --git a/rules/CC/pkgconfig/EXPRESSIONS b/rules/CC/pkgconfig/EXPRESSIONS index d4b923b94..d6bdf0872 100644 --- a/rules/CC/pkgconfig/EXPRESSIONS +++ b/rules/CC/pkgconfig/EXPRESSIONS @@ -107,7 +107,7 @@ ] , [">"] , [ { "type": "join_cmd" - , "$1": {"type": "var", "name": "cflags-filename"} + , "$1": [{"type": "var", "name": "cflags-filename"}] } ] ] @@ -189,7 +189,7 @@ [ "./add_rpath $(cat ldflags.raw)" , ">" , { "type": "join_cmd" - , "$1": {"type": "var", "name": "ldflags-filename"} + , "$1": [{"type": "var", "name": "ldflags-filename"}] } ] } diff --git a/rules/lint/RULES b/rules/lint/RULES index 1d251028f..e1000d9e2 100644 --- a/rules/lint/RULES +++ b/rules/lint/RULES @@ -55,16 +55,22 @@ { "linter": [ "Single artifact running the lint checks." , "" - , "This artifact with" + , "This program is invoked with" , "- argv[1] the file to lint, and" , "- argv[2:] the original command line." , "This invocation happens in an environment with" , "- CONFIG pointing to the directory with all the artifacts given" - , " by the field \"config\"." + , " by the field \"config\", and" , "- OUT pointing to a directory to which files with the lint result" , " can be written." - , "The linter is supposed to indicate by the exit code whether the" - , "indicated file complies with the given linting policy, with 0 meaning" + , "- META pointing to a json file contaning" + , " - at key \"direct deps artifact names\" a list of all input" + , " artifacts that come from the target itself or are runfiles of a" + , " direct dependency." + , "- TMPDIR pointing to a directory location that can be used to" + , " create additional temporary files." + , "It is supposed to indicate by the exit code whether the file to lint" + , "complies with the given linting policy, with 0 meaning" , "compliant." , "Stdout and stderr, as well as the directory ${OUT} can be used to" , "provide additional information." @@ -72,14 +78,14 @@ , "config": ["Any configuration or other files needed by the linter."] , "summarizer": [ "Single artifact generating a summary of the individual lint results." - , "It will be called in a directory where all subdirectories with names" - , "consisting entirely of digits are the results of the individual lint" + , "It will be called in a directory where all subdirectories" + , "except . and .. represent the results of the individual lint" , "actions. Those are given as" - , " - a file result with content PASS if and only if the lint action" - , " exited 0," - , " - files stdout and stderr with stdout and stderr of the lint" + , " - a file \"result\" with content \"PASS\" if and only if the lint" + , " action exited 0," + , " - files \"stdout\" and \"stderr\" with stdout and stderr of the lint" , " action, and" - , " - a directory out with the additional information provided by the" + , " - a directory \"out\" with the additional information provided by the" , " lint action." , "The summarizer is required to indicate the overall result by the exit" , "code, produce a human-readable summary on stdout, and optionally" @@ -152,6 +158,12 @@ , "dep": {"type": "var", "name": "_"} } ] + , [ "direct deps artifact names" + , { "type": "DEP_PROVIDES" + , "dep": {"type": "var", "name": "_"} + , "provider": "direct deps artifact names" + } + ] ] , "body": { "type": "TREE" @@ -170,6 +182,19 @@ , "subdir": "config" , "$1": {"type": "var", "name": "config"} } + , { "type": "singleton_map" + , "key": "meta.json" + , "value": + { "type": "BLOB" + , "data": + { "type": "json_encode" + , "$1": + { "type": "env" + , "vars": ["direct deps artifact names"] + } + } + } + } ] } , "cmd": diff --git a/rules/lint/call_lint b/rules/lint/call_lint index ab9c2b8f6..fd2a24454 100755 --- a/rules/lint/call_lint +++ b/rules/lint/call_lint @@ -23,6 +23,9 @@ echo "${RESULT}" > result export OUT="$(pwd)/out" mkdir -p config export CONFIG="$(pwd)/config" +export META="$(pwd)/meta.json" +mkdir scratch +export TMPDIR=$(realpath scratch) cd work diff --git a/rules/shell/RULES b/rules/shell/RULES index 2ce078ef7..5ac07e686 100644 --- a/rules/shell/RULES +++ b/rules/shell/RULES @@ -137,14 +137,24 @@ } ] , [ "bin dirs" - , { "type": "++" + , { "type": "reverse" , "$1": - [ {"type": "FIELD", "name": "bin dirs"} - , { "type": "let*" - , "bindings": [["provider", "bin dirs"]] - , "body": {"type": "CALL_EXPRESSION", "name": "base-provides-++"} + { "type": "nub_right" + , "$1": + { "type": "reverse" + , "$1": + { "type": "++" + , "$1": + [ {"type": "FIELD", "name": "bin dirs"} + , { "type": "let*" + , "bindings": [["provider", "bin dirs"]] + , "body": + {"type": "CALL_EXPRESSION", "name": "base-provides-++"} + } + ] + } } - ] + } } ] ] diff --git a/share/just_complete.bash b/share/just_complete.bash index 7a7c00c00..8fc8176da 100644 --- a/share/just_complete.bash +++ b/share/just_complete.bash @@ -78,7 +78,7 @@ EOF } _just_completion(){ - local readonly SUBCOMMANDS=(build analyse describe install-cas install rebuild gc execute -h --help version) + local readonly SUBCOMMANDS=(build analyse describe install-cas install rebuild gc execute -h --help version add-to-cas serve) local word=${COMP_WORDS[$COMP_CWORD]} local prev=${COMP_WORDS[$((COMP_CWORD-1))]} local cmd=${COMP_WORDS[1]} @@ -111,7 +111,7 @@ _just_completion(){ done # if $conf is empty and this function is invoked by just-mr # we use the auto-generated conf file - if [ -z "$conf" ]; then conf="${justmrconf}"; + if [ -z "$conf" ]; then conf="${justmrconf}"; fi local _targets=($(_just_targets "$conf" "$main" "$prev" 2>/dev/null)) COMPREPLY=($(compgen -f -W "${_opts[*]} ${_targets[*]}" -- $word )) @@ -162,7 +162,7 @@ EOF } _just-mr_completion(){ - local readonly SUBCOMMANDS=(setup setup-env fetch update "do" version build analyse describe install-cas install rebuild gc execute) + local readonly SUBCOMMANDS=(mrversion setup setup-env fetch update "do" gc-repo add-to-cas analyse build describe gc install install-cas rebuild version -h --help) local word=${COMP_WORDS[$COMP_CWORD]} local prev=${COMP_WORDS[$((COMP_CWORD-1))]} local cmd=$(_just-mr_parse_subcommand "${COMP_WORDS[@]}") diff --git a/share/man/just.1.md b/share/man/just.1.md index dd5859206..e981d6330 100644 --- a/share/man/just.1.md +++ b/share/man/just.1.md @@ -439,6 +439,11 @@ File path for writing the action graph description to. See **`just-graph-file`**(5) for more details. Supported by: analyse|build|install|rebuild. +**`--dump-plain-graph`** *`PATH`* +File path for writing the action graph description to, however without +the additional `"origins"` key. See **`just-graph-file`**(5) for more details. +Supported by: analyse|build|install|rebuild. + **`-f`**, **`--log-file`** *`PATH`* Path to local log file. **`just`** will store the information printed on stderr in the log file along with the thread id and timestamp when the diff --git a/src/buildtool/auth/authentication.hpp b/src/buildtool/auth/authentication.hpp index 80fe58864..ace2e60dd 100644 --- a/src/buildtool/auth/authentication.hpp +++ b/src/buildtool/auth/authentication.hpp @@ -35,18 +35,18 @@ struct Auth final { class Builder; // CA certificate bundle - std::string const ca_cert = {}; + std::string const ca_cert; // Client-side signed certificate - std::string const client_cert = {}; + std::string const client_cert; // Client-side private key - std::string const client_key = {}; + std::string const client_key; // Server-side signed certificate - std::string const server_cert = {}; + std::string const server_cert; // Server-side private key - std::string const server_key = {}; + std::string const server_key; }; - std::variant method = {}; + std::variant method; }; class Auth::TLS::Builder final { diff --git a/src/buildtool/build_engine/analysed_target/TARGETS b/src/buildtool/build_engine/analysed_target/TARGETS index 829e269dd..3fff08b8b 100644 --- a/src/buildtool/build_engine/analysed_target/TARGETS +++ b/src/buildtool/build_engine/analysed_target/TARGETS @@ -4,12 +4,12 @@ , "hdrs": ["analysed_target.hpp"] , "srcs": ["analysed_target.cpp"] , "deps": - [ ["src/buildtool/build_engine/expression", "expression"] + [ "graph_information" + , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] , ["src/buildtool/common", "action_description"] , ["src/buildtool/common", "artifact_description"] , ["src/buildtool/common", "tree"] - , "graph_information" - , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] ] , "stage": ["src", "buildtool", "build_engine", "analysed_target"] , "private-deps": [["@", "gsl", "", "gsl"]] @@ -20,8 +20,8 @@ , "hdrs": ["target_graph_information.hpp"] , "srcs": ["target_graph_information.cpp"] , "deps": - [ ["src/buildtool/build_engine/target_map", "configured_target"] - , ["@", "json", "", "json"] + [ ["@", "json", "", "json"] + , ["src/buildtool/build_engine/target_map", "configured_target"] ] , "stage": ["src", "buildtool", "build_engine", "analysed_target"] } diff --git a/src/buildtool/build_engine/analysed_target/analysed_target.cpp b/src/buildtool/build_engine/analysed_target/analysed_target.cpp index 045add0d5..b9a5f5598 100644 --- a/src/buildtool/build_engine/analysed_target/analysed_target.cpp +++ b/src/buildtool/build_engine/analysed_target/analysed_target.cpp @@ -18,7 +18,6 @@ namespace { -// NOLINTNEXTLINE(misc-no-recursion) void CollectNonKnownArtifacts( ExpressionPtr const& expr, gsl::not_null*> const& artifacts, diff --git a/src/buildtool/build_engine/analysed_target/target_graph_information.cpp b/src/buildtool/build_engine/analysed_target/target_graph_information.cpp index dfd333ac7..24ba3090e 100644 --- a/src/buildtool/build_engine/analysed_target/target_graph_information.cpp +++ b/src/buildtool/build_engine/analysed_target/target_graph_information.cpp @@ -37,7 +37,7 @@ auto NodesToString(std::vector const& } // namespace -auto TargetGraphInformation::DepsToJson() const noexcept -> nlohmann::json { +auto TargetGraphInformation::DepsToJson() const -> nlohmann::json { auto result = nlohmann::json::object(); result["declared"] = NodesToString(direct_); result["implicit"] = NodesToString(implicit_); diff --git a/src/buildtool/build_engine/analysed_target/target_graph_information.hpp b/src/buildtool/build_engine/analysed_target/target_graph_information.hpp index 672aeddc5..f07dfd006 100644 --- a/src/buildtool/build_engine/analysed_target/target_graph_information.hpp +++ b/src/buildtool/build_engine/analysed_target/target_graph_information.hpp @@ -44,7 +44,7 @@ class TargetGraphInformation { [[nodiscard]] auto NodeString() const noexcept -> std::optional; - [[nodiscard]] auto DepsToJson() const noexcept -> nlohmann::json; + [[nodiscard]] auto DepsToJson() const -> nlohmann::json; private: BuildMaps::Target::ConfiguredTargetPtr node_; @@ -55,4 +55,4 @@ class TargetGraphInformation { inline const TargetGraphInformation TargetGraphInformation::kSource = TargetGraphInformation{nullptr, {}, {}, {}}; -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILDENGINE_ANALYSED_TARGET_TARGET_GRAPH_INFORMATION_HPP diff --git a/src/buildtool/build_engine/base_maps/TARGETS b/src/buildtool/build_engine/base_maps/TARGETS index 5a54ea54e..3a3107bee 100644 --- a/src/buildtool/build_engine/base_maps/TARGETS +++ b/src/buildtool/build_engine/base_maps/TARGETS @@ -11,10 +11,11 @@ , "hdrs": ["directory_map.hpp"] , "srcs": ["directory_map.cpp"] , "deps": - [ ["src/buildtool/multithreading", "async_map_consumer"] - , "module_name" - , ["src/buildtool/file_system", "file_root"] + [ "module_name" + , ["@", "gsl", "", "gsl"] , ["src/buildtool/common", "config"] + , ["src/buildtool/file_system", "file_root"] + , ["src/buildtool/multithreading", "async_map_consumer"] ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] , "private-deps": [["src/utils/cpp", "path"]] @@ -24,12 +25,12 @@ , "name": ["json_file_map"] , "hdrs": ["json_file_map.hpp"] , "deps": - [ ["@", "fmt", "", "fmt"] - , ["@", "json", "", "json"] + [ "module_name" + , ["@", "fmt", "", "fmt"] , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/common", "config"] , ["src/buildtool/multithreading", "async_map_consumer"] - , "module_name" ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] } @@ -39,9 +40,9 @@ , "hdrs": ["targets_file_map.hpp"] , "deps": [ "json_file_map" + , "module_name" , ["@", "json", "", "json"] , ["src/buildtool/multithreading", "async_map_consumer"] - , "module_name" ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] } @@ -51,11 +52,11 @@ , "hdrs": ["entity_name_data.hpp"] , "srcs": ["entity_name_data.cpp"] , "deps": - [ ["@", "json", "", "json"] + [ "module_name" + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] , ["src/utils/cpp", "hash_combine"] , ["src/utils/cpp", "hex_string"] - , "module_name" ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] } @@ -65,10 +66,10 @@ , "hdrs": ["entity_name.hpp"] , "deps": [ "entity_name_data" - , ["@", "json", "", "json"] , ["@", "gsl", "", "gsl"] - , ["src/buildtool/common", "config"] + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/common", "config"] , ["src/utils/cpp", "hash_combine"] ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] @@ -81,16 +82,21 @@ , "deps": [ "directory_map" , "entity_name" - , ["@", "json", "", "json"] , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/analysed_target", "target"] , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/common", "config"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/multithreading", "async_map_consumer"] , ["src/buildtool/multithreading", "task_system"] ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] - , "private-deps": [["src/utils/cpp", "json"]] + , "private-deps": + [ ["src/buildtool/common", "common"] + , ["src/buildtool/file_system", "object_type"] + , ["src/utils/cpp", "json"] + ] } , "field_reader": { "type": ["@", "rules", "CC", "library"] @@ -99,11 +105,11 @@ , "deps": [ "entity_name" , ["@", "fmt", "", "fmt"] - , ["@", "json", "", "json"] , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] + , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/common", "config"] , ["src/buildtool/multithreading", "async_map_consumer"] - , ["src/buildtool/build_engine/expression", "expression"] ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] } @@ -112,14 +118,14 @@ , "name": ["expression_function"] , "hdrs": ["expression_function.hpp"] , "deps": - [ ["src/buildtool/build_engine/expression", "expression"] + [ ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/utils/cpp", "gsl"] - , ["@", "gsl", "", "gsl"] ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] - , "private-deps": [["src/utils/cpp", "hash_combine"]] } , "expression_map": { "type": ["@", "rules", "CC", "library"] @@ -127,19 +133,18 @@ , "hdrs": ["expression_map.hpp"] , "srcs": ["expression_map.cpp"] , "deps": - [ "json_file_map" - , "entity_name" + [ "entity_name" , "expression_function" - , ["@", "gsl", "", "gsl"] + , "json_file_map" + , "module_name" , ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] , ["src/buildtool/common", "config"] , ["src/buildtool/multithreading", "async_map_consumer"] - , "module_name" ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] - , "private-deps": - ["field_reader", ["src/buildtool/build_engine/expression", "expression"]] + , "private-deps": ["field_reader"] } , "user_rule": { "type": ["@", "rules", "CC", "library"] @@ -148,8 +153,8 @@ , "deps": [ "entity_name" , "expression_function" - , ["@", "gsl", "", "gsl"] , ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] , ["src/buildtool/build_engine/expression", "expression"] , ["src/utils/cpp", "concepts"] , ["src/utils/cpp", "gsl"] @@ -162,19 +167,18 @@ , "hdrs": ["rule_map.hpp"] , "srcs": ["rule_map.cpp"] , "deps": - [ "json_file_map" - , "entity_name" - , "user_rule" + [ "entity_name" , "expression_map" - , ["@", "gsl", "", "gsl"] + , "json_file_map" + , "module_name" + , "user_rule" , ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] , ["src/buildtool/common", "config"] , ["src/buildtool/multithreading", "async_map_consumer"] - , "module_name" ] , "stage": ["src", "buildtool", "build_engine", "base_maps"] - , "private-deps": - ["field_reader", ["src/buildtool/build_engine/expression", "expression"]] + , "private-deps": ["field_reader"] } } diff --git a/src/buildtool/build_engine/base_maps/directory_map.hpp b/src/buildtool/build_engine/base_maps/directory_map.hpp index 59f17da1d..463b273d3 100644 --- a/src/buildtool/build_engine/base_maps/directory_map.hpp +++ b/src/buildtool/build_engine/base_maps/directory_map.hpp @@ -37,4 +37,4 @@ auto CreateDirectoryEntriesMap( } // namespace BuildMaps::Base -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_DIRECTORY_MAP_HPP diff --git a/src/buildtool/build_engine/base_maps/entity_name.hpp b/src/buildtool/build_engine/base_maps/entity_name.hpp index 397a82f03..ad2b368d2 100644 --- a/src/buildtool/build_engine/base_maps/entity_name.hpp +++ b/src/buildtool/build_engine/base_maps/entity_name.hpp @@ -96,10 +96,10 @@ template GetString(list[0]), GetString(list[1])}; } - + return std::nullopt; } catch (...) { + return std::nullopt; } - return std::nullopt; } template @@ -112,14 +112,19 @@ template std::optional> logger = std::nullopt) noexcept -> std::optional { try { - bool const is_file = s0 == EntityName::kFileLocationMarker; - bool const is_glob = s0 == EntityName::kGlobMarker; - bool const is_symlink = s0 == EntityName::kSymlinkLocationMarker; - auto const ref_type = - is_file ? ReferenceType::kFile - : (is_glob ? ReferenceType::kGlob - : (is_symlink ? ReferenceType::kSymlink - : ReferenceType::kTree)); + auto get_ref_type = [](std::string const& s) -> ReferenceType { + if (s == EntityName::kFileLocationMarker) { + return ReferenceType::kFile; + } + if (s == EntityName::kGlobMarker) { + return ReferenceType::kGlob; + } + if (s == EntityName::kSymlinkLocationMarker) { + return ReferenceType::kSymlink; + } + return ReferenceType::kTree; + }; + auto const ref_type = get_ref_type(s0); if (list_size == 3) { if (IsString(list[2])) { auto const& name = GetString(list[2]); @@ -141,9 +146,10 @@ template } } } + return std::nullopt; } catch (...) { + return std::nullopt; } - return std::nullopt; } template // IsList(list) == true @@ -171,9 +177,10 @@ template relmodule)); } } + return std::nullopt; } catch (...) { + return std::nullopt; } - return std::nullopt; } template @@ -202,10 +209,10 @@ template local_repo_name)); } } - + return std::nullopt; } catch (...) { + return std::nullopt; } - return std::nullopt; } template @@ -243,9 +250,10 @@ template s0, list, list_size, current, logger); } } + return std::nullopt; } catch (...) { + return std::nullopt; } - return std::nullopt; } template @@ -278,8 +286,8 @@ template } return res; } catch (...) { + return std::nullopt; } - return std::nullopt; } [[nodiscard]] inline auto ParseEntityNameFromJson( @@ -302,4 +310,4 @@ template } // namespace BuildMaps::Base -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_ENTITY_NAME_HPP diff --git a/src/buildtool/build_engine/base_maps/entity_name_data.hpp b/src/buildtool/build_engine/base_maps/entity_name_data.hpp index ba227bc4d..52ac1490d 100644 --- a/src/buildtool/build_engine/base_maps/entity_name_data.hpp +++ b/src/buildtool/build_engine/base_maps/entity_name_data.hpp @@ -48,9 +48,9 @@ enum class ReferenceType : std::int8_t { }; struct NamedTarget { - std::string repository{}; - std::string module{}; - std::string name{}; + std::string repository; + std::string module; + std::string name; ReferenceType reference_t{ReferenceType::kTarget}; NamedTarget() = default; NamedTarget(std::string repository, @@ -211,4 +211,4 @@ struct hash { } // namespace std -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_ENTITY_NAME_DATA_HPP diff --git a/src/buildtool/build_engine/base_maps/expression_function.hpp b/src/buildtool/build_engine/base_maps/expression_function.hpp index a5235807e..379da7c28 100644 --- a/src/buildtool/build_engine/base_maps/expression_function.hpp +++ b/src/buildtool/build_engine/base_maps/expression_function.hpp @@ -112,9 +112,9 @@ class ExpressionFunction { Expression::FromJson(R"([{"type": "empty_map"}])"_json)); private: - std::vector vars_{}; - imports_t imports_{}; - ExpressionPtr expr_{}; + std::vector vars_; + imports_t imports_; + ExpressionPtr expr_; }; using ExpressionFunctionPtr = ExpressionFunction::Ptr; diff --git a/src/buildtool/build_engine/base_maps/expression_map.cpp b/src/buildtool/build_engine/base_maps/expression_map.cpp index 02a875cc9..30722bf3f 100644 --- a/src/buildtool/build_engine/base_maps/expression_map.cpp +++ b/src/buildtool/build_engine/base_maps/expression_map.cpp @@ -46,11 +46,11 @@ auto CreateExpressionMap( logger, subcaller = std::move(subcaller), id](auto json_values) { - auto const& target_ = id.GetNamedTarget(); - auto func_it = json_values[0]->find(target_.name); + auto const& target = id.GetNamedTarget(); + auto func_it = json_values[0]->find(target.name); if (func_it == json_values[0]->end()) { (*logger)(fmt::format("Cannot find expression {}", - EntityName(target_).ToString()), + EntityName(target).ToString()), true); return; } diff --git a/src/buildtool/build_engine/base_maps/expression_map.hpp b/src/buildtool/build_engine/base_maps/expression_map.hpp index ceb2cf2fc..3acec5312 100644 --- a/src/buildtool/build_engine/base_maps/expression_map.hpp +++ b/src/buildtool/build_engine/base_maps/expression_map.hpp @@ -33,10 +33,13 @@ namespace BuildMaps::Base { using ExpressionFileMap = AsyncMapConsumer; -constexpr auto CreateExpressionFileMap = - CreateJsonFileMap<&RepositoryConfig::ExpressionRoot, - &RepositoryConfig::ExpressionFileName, - /*kMandatory=*/true>; +[[nodiscard]] static inline auto CreateExpressionFileMap( + gsl::not_null const& repo_config, + std::size_t jobs) -> JsonFileMap { + return CreateJsonFileMap<&RepositoryConfig::ExpressionRoot, + &RepositoryConfig::ExpressionFileName, + /*kMandatory=*/true>(repo_config, jobs); +} using ExpressionFunctionMap = AsyncMapConsumer; diff --git a/src/buildtool/build_engine/base_maps/field_reader.hpp b/src/buildtool/build_engine/base_maps/field_reader.hpp index f66c0b1b9..a28999ab9 100644 --- a/src/buildtool/build_engine/base_maps/field_reader.hpp +++ b/src/buildtool/build_engine/base_maps/field_reader.hpp @@ -58,10 +58,10 @@ class FieldReader { names_.reserve(size); ids_.reserve(size); } - template - auto emplace_back(T_Name&& name, T_Id&& id) -> void { - names_.emplace_back(std::forward(name)); - ids_.emplace_back(std::forward(id)); + template + auto emplace_back(TName&& name, TId&& id) -> void { + names_.emplace_back(std::forward(name)); + ids_.emplace_back(std::forward(id)); } private: diff --git a/src/buildtool/build_engine/base_maps/json_file_map.hpp b/src/buildtool/build_engine/base_maps/json_file_map.hpp index 52ab89abc..61813c7bb 100644 --- a/src/buildtool/build_engine/base_maps/json_file_map.hpp +++ b/src/buildtool/build_engine/base_maps/json_file_map.hpp @@ -40,7 +40,7 @@ using RootGetter = auto (RepositoryConfig::*)(std::string const&) const using FileNameGetter = auto (RepositoryConfig::*)(std::string const&) const -> std::string const*; -template +template auto CreateJsonFileMap( gsl::not_null const& repo_config, std::size_t jobs) -> JsonFileMap { @@ -49,9 +49,9 @@ auto CreateJsonFileMap( auto logger, auto /* unused */, auto const& key) { - auto const* root = ((*repo_config).*get_root)(key.repository); + auto const* root = ((*repo_config).*kGetRoot)(key.repository); - auto const* json_file_name = ((*repo_config).*get_name)(key.repository); + auto const* json_file_name = ((*repo_config).*kGetName)(key.repository); if (root == nullptr or json_file_name == nullptr) { (*logger)(fmt::format("Cannot determine root or JSON file name for " "repository {}.", diff --git a/src/buildtool/build_engine/base_maps/module_name.hpp b/src/buildtool/build_engine/base_maps/module_name.hpp index 8ff582c5e..584982403 100644 --- a/src/buildtool/build_engine/base_maps/module_name.hpp +++ b/src/buildtool/build_engine/base_maps/module_name.hpp @@ -24,8 +24,8 @@ namespace BuildMaps::Base { struct ModuleName { - std::string repository{}; - std::string module{}; + std::string repository; + std::string module; ModuleName(std::string repository, std::string module) : repository{std::move(repository)}, module{std::move(module)} {} @@ -51,4 +51,4 @@ struct hash { } // namespace std -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_MODULE_NAME_HPP diff --git a/src/buildtool/build_engine/base_maps/rule_map.cpp b/src/buildtool/build_engine/base_maps/rule_map.cpp index 15999d013..3e90749ea 100644 --- a/src/buildtool/build_engine/base_maps/rule_map.cpp +++ b/src/buildtool/build_engine/base_maps/rule_map.cpp @@ -260,14 +260,13 @@ auto CreateRuleMap(gsl::not_null const& rule_file_map, {id.ToModule()}, [ts, expr_map, repo_config, setter = std::move(setter), logger, id]( auto json_values) { - const auto& target_ = id.GetNamedTarget(); - auto rule_it = json_values[0]->find(target_.name); + const auto& target = id.GetNamedTarget(); + auto rule_it = json_values[0]->find(target.name); if (rule_it == json_values[0]->end()) { - (*logger)( - fmt::format("Cannot find rule {} in {}", - nlohmann::json(target_.name).dump(), - nlohmann::json(target_.module).dump()), - true); + (*logger)(fmt::format("Cannot find rule {} in {}", + nlohmann::json(target.name).dump(), + nlohmann::json(target.module).dump()), + true); return; } @@ -365,7 +364,7 @@ auto CreateRuleMap(gsl::not_null const& rule_file_map, tainted, std::move(*config_transitions), std::make_shared( - std::move(config_vars), + config_vars, std::move(imports), std::move(expr)), [&logger](auto const& msg) { diff --git a/src/buildtool/build_engine/base_maps/rule_map.hpp b/src/buildtool/build_engine/base_maps/rule_map.hpp index 867f4c38f..d911317cf 100644 --- a/src/buildtool/build_engine/base_maps/rule_map.hpp +++ b/src/buildtool/build_engine/base_maps/rule_map.hpp @@ -33,10 +33,13 @@ namespace BuildMaps::Base { using RuleFileMap = AsyncMapConsumer; -constexpr auto CreateRuleFileMap = - CreateJsonFileMap<&RepositoryConfig::RuleRoot, - &RepositoryConfig::RuleFileName, - /*kMandatory=*/true>; +[[nodiscard]] static inline auto CreateRuleFileMap( + gsl::not_null const& repo_config, + std::size_t jobs) -> JsonFileMap { + return CreateJsonFileMap<&RepositoryConfig::RuleRoot, + &RepositoryConfig::RuleFileName, + /*kMandatory=*/true>(repo_config, jobs); +} using UserRuleMap = AsyncMapConsumer; diff --git a/src/buildtool/build_engine/base_maps/source_map.cpp b/src/buildtool/build_engine/base_maps/source_map.cpp index e47c59b1f..8c716ac73 100644 --- a/src/buildtool/build_engine/base_maps/source_map.cpp +++ b/src/buildtool/build_engine/base_maps/source_map.cpp @@ -49,12 +49,13 @@ auto as_target(const BuildMaps::Base::EntityName& key, auto CreateSourceTargetMap( const gsl::not_null& dirs, gsl::not_null const& repo_config, + HashFunction::Type hash_type, std::size_t jobs) -> SourceTargetMap { - auto src_target_reader = [dirs, repo_config](auto ts, - auto setter, - auto logger, - auto /* unused */, - auto const& key) { + auto src_target_reader = [dirs, repo_config, hash_type](auto ts, + auto setter, + auto logger, + auto /* unused */, + auto const& key) { using std::filesystem::path; const auto& target = key.GetNamedTarget(); auto name = path(target.name).lexically_normal(); @@ -68,27 +69,30 @@ auto CreateSourceTargetMap( auto dir = (path(target.module) / name).parent_path(); auto const* ws_root = repo_config->WorkspaceRoot(target.repository); - auto src_file_reader = [key, name, setter, logger, dir, ws_root]( - bool exists_in_ws_root) { - if (ws_root != nullptr and exists_in_ws_root) { - if (auto desc = ws_root->ToArtifactDescription( - path(key.GetNamedTarget().module) / name, - key.GetNamedTarget().repository)) { - (*setter)(as_target(key, ExpressionPtr{std::move(*desc)})); - return; + auto src_file_reader = + [key, name, setter, logger, dir, ws_root, hash_type]( + bool exists_in_ws_root) { + if (ws_root != nullptr and exists_in_ws_root) { + if (auto desc = ws_root->ToArtifactDescription( + hash_type, + path(key.GetNamedTarget().module) / name, + key.GetNamedTarget().repository)) { + (*setter)( + as_target(key, ExpressionPtr{std::move(*desc)})); + return; + } } - } - (*logger)( - fmt::format( - "Cannot determine source file {} in directory {} of " - "repository {}", - nlohmann::json( - path(key.GetNamedTarget().name).filename().string()) - .dump(), - nlohmann::json(dir.string()).dump(), - nlohmann::json(key.GetNamedTarget().repository).dump()), - true); - }; + (*logger)( + fmt::format( + "Cannot determine source file {} in directory {} of " + "repository {}", + nlohmann::json( + path(key.GetNamedTarget().name).filename().string()) + .dump(), + nlohmann::json(dir.string()).dump(), + nlohmann::json(key.GetNamedTarget().repository).dump()), + true); + }; if (ws_root != nullptr and ws_root->HasFastDirectoryLookup()) { // by-pass directory map and directly attempt to read from ws_root diff --git a/src/buildtool/build_engine/base_maps/source_map.hpp b/src/buildtool/build_engine/base_maps/source_map.hpp index 97088d9d3..a65de6928 100644 --- a/src/buildtool/build_engine/base_maps/source_map.hpp +++ b/src/buildtool/build_engine/base_maps/source_map.hpp @@ -25,6 +25,7 @@ #include "src/buildtool/build_engine/base_maps/entity_name.hpp" #include "src/buildtool/build_engine/expression/expression.hpp" #include "src/buildtool/common/repository_config.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/multithreading/async_map_consumer.hpp" #include "src/buildtool/multithreading/task_system.hpp" @@ -35,8 +36,9 @@ using SourceTargetMap = AsyncMapConsumer; auto CreateSourceTargetMap( const gsl::not_null& dirs, gsl::not_null const& repo_config, + HashFunction::Type hash_type, std::size_t jobs = 0) -> SourceTargetMap; } // namespace BuildMaps::Base -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_SOURCE_MAP_HPP diff --git a/src/buildtool/build_engine/base_maps/targets_file_map.hpp b/src/buildtool/build_engine/base_maps/targets_file_map.hpp index 167f12f86..8ff5d76cd 100644 --- a/src/buildtool/build_engine/base_maps/targets_file_map.hpp +++ b/src/buildtool/build_engine/base_maps/targets_file_map.hpp @@ -27,11 +27,13 @@ namespace BuildMaps::Base { using TargetsFileMap = AsyncMapConsumer; -constexpr auto CreateTargetsFileMap = - CreateJsonFileMap<&RepositoryConfig::TargetRoot, - &RepositoryConfig::TargetFileName, - /*kMandatory=*/true>; - +[[nodiscard]] static inline auto CreateTargetsFileMap( + gsl::not_null const& repo_config, + std::size_t jobs) -> JsonFileMap { + return CreateJsonFileMap<&RepositoryConfig::TargetRoot, + &RepositoryConfig::TargetFileName, + /*kMandatory=*/true>(repo_config, jobs); +} } // namespace BuildMaps::Base -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_TARGETS_FILE_MAP_HPP diff --git a/src/buildtool/build_engine/base_maps/user_rule.hpp b/src/buildtool/build_engine/base_maps/user_rule.hpp index 8ea0d6cc0..c2d46420a 100644 --- a/src/buildtool/build_engine/base_maps/user_rule.hpp +++ b/src/buildtool/build_engine/base_maps/user_rule.hpp @@ -41,16 +41,15 @@ namespace BuildMaps::Base { // kTriangular=true Performs triangular compare, everyone with everyone. // kTriangular=false Performs linear compare, first with each of the rest. template > -[[nodiscard]] static inline auto GetDuplicates(T_Container const& first, - T_Rest const&... rest) - -> T_Result; - -template -[[nodiscard]] static inline auto JoinContainer(T_Container const& c, + InputIterableContainer TContainer, + InputIterableContainer... TRest, + OutputIterableContainer TResult = + std::unordered_set> +[[nodiscard]] static inline auto GetDuplicates(TContainer const& first, + TRest const&... rest) -> TResult; + +template +[[nodiscard]] static inline auto JoinContainer(TContainer const& c, std::string const& sep) -> std::string; @@ -169,7 +168,7 @@ class UserRule { std::vector target_exps; target_exps.reserve(target_entity_vec.size()); for (auto const& target_entity : target_entity_vec) { - target_exps.emplace_back(ExpressionPtr{target_entity}); + target_exps.emplace_back(target_entity); } implicit_target_exp.emplace(target_name, target_exps); } @@ -324,16 +323,16 @@ class UserRule { return expected_entries; } - std::vector target_fields_{}; - std::vector string_fields_{}; - std::vector config_fields_{}; - implicit_t implicit_targets_{}; - implicit_exp_t implicit_target_exp_{}; - anonymous_defs_t anonymous_defs_{}; - std::vector config_vars_{}; - std::set tainted_{}; - config_trans_t config_transitions_{}; - ExpressionFunctionPtr expr_{}; + std::vector target_fields_; + std::vector string_fields_; + std::vector config_fields_; + implicit_t implicit_targets_; + implicit_exp_t implicit_target_exp_; + anonymous_defs_t anonymous_defs_; + std::vector config_vars_; + std::set tainted_; + config_trans_t config_transitions_; + ExpressionFunctionPtr expr_; std::unordered_set expected_entries_{ ComputeExpectedEntries(target_fields_, string_fields_, config_fields_)}; }; @@ -342,9 +341,9 @@ using UserRulePtr = UserRule::Ptr; namespace detail { -template -[[nodiscard]] static inline auto MaxSize(T_Container const& first, - T_Rest const&... rest) -> std::size_t { +template +[[nodiscard]] static inline auto MaxSize(TContainer const& first, + TRest const&... rest) -> std::size_t { if constexpr (sizeof...(rest) > 0) { return std::max(first.size(), MaxSize(rest...)); } @@ -352,14 +351,14 @@ template } template -static auto inline FindDuplicates(gsl::not_null const& dups, - T_First const& first, - T_Second const& second, - T_Rest const&... rest) -> void { + OutputIterableContainer TResult, + InputIterableContainer TFirst, + InputIterableContainer TSecond, + InputIterableContainer... TRest> +static auto inline FindDuplicates(gsl::not_null const& dups, + TFirst const& first, + TSecond const& second, + TRest const&... rest) -> void { ExpectsAudit(std::is_sorted(first.begin(), first.end()) and std::is_sorted(second.begin(), second.end())); std::set_intersection(first.begin(), @@ -372,6 +371,8 @@ static auto inline FindDuplicates(gsl::not_null const& dups, FindDuplicates(dups, first, rest...); if constexpr (kTriangular) { // do triangular compare of second with rest + + // NOLINTNEXTLINE(readability-suspicious-call-argument) FindDuplicates(dups, second, rest...); } } @@ -380,13 +381,13 @@ static auto inline FindDuplicates(gsl::not_null const& dups, } // namespace detail template -[[nodiscard]] static inline auto GetDuplicates(T_Container const& first, - T_Rest const&... rest) - -> T_Result { - auto dups = T_Result{}; + InputIterableContainer TContainer, + InputIterableContainer... TRest, + OutputIterableContainer TResult> +[[nodiscard]] static inline auto GetDuplicates(TContainer const& first, + TRest const&... rest) + -> TResult { + auto dups = TResult{}; constexpr auto kNumContainers = 1 + sizeof...(rest); if constexpr (kNumContainers > 1) { std::size_t size{}; @@ -398,13 +399,13 @@ template (&dups, first, rest...); + detail::FindDuplicates(&dups, first, rest...); } return dups; } -template -[[nodiscard]] static inline auto JoinContainer(T_Container const& c, +template +[[nodiscard]] static inline auto JoinContainer(TContainer const& c, std::string const& sep) -> std::string { std::ostringstream oss{}; diff --git a/src/buildtool/build_engine/expression/TARGETS b/src/buildtool/build_engine/expression/TARGETS index 30e64466d..ff615020f 100644 --- a/src/buildtool/build_engine/expression/TARGETS +++ b/src/buildtool/build_engine/expression/TARGETS @@ -5,8 +5,8 @@ , "deps": [ ["@", "fmt", "", "fmt"] , ["src/buildtool/multithreading", "atomic_value"] - , ["src/utils/cpp", "hash_combine"] , ["src/utils/cpp", "atomic"] + , ["src/utils/cpp", "hash_combine"] ] , "stage": ["src", "buildtool", "build_engine", "expression"] } @@ -42,24 +42,25 @@ , "deps": [ "expression_ptr_interface" , "linked_map" + , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/base_maps", "entity_name_data"] , ["src/buildtool/common", "artifact_description"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/multithreading", "atomic_value"] - , ["src/utils/cpp", "json"] - , ["src/utils/cpp", "hash_combine"] - , ["src/utils/cpp", "hex_string"] , ["src/utils/cpp", "concepts"] , ["src/utils/cpp", "gsl"] - , ["@", "gsl", "", "gsl"] + , ["src/utils/cpp", "hash_combine"] + , ["src/utils/cpp", "hex_string"] + , ["src/utils/cpp", "json"] ] , "stage": ["src", "buildtool", "build_engine", "expression"] , "private-deps": - [ ["src/buildtool/logging", "log_level"] + [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/crypto", "hasher"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["src/utils/cpp", "type_safe_arithmetic"] , ["src/utils/cpp", "path"] - , ["src/buildtool/crypto", "hasher"] - , ["src/buildtool/crypto", "hash_function"] ] } } diff --git a/src/buildtool/build_engine/expression/evaluator.cpp b/src/buildtool/build_engine/expression/evaluator.cpp index 66eb8cfe5..dfae8499e 100644 --- a/src/buildtool/build_engine/expression/evaluator.cpp +++ b/src/buildtool/build_engine/expression/evaluator.cpp @@ -319,7 +319,7 @@ auto Range(ExpressionPtr const& expr) -> ExpressionPtr { auto result = Expression::list_t{}; result.reserve(len); for (std::size_t i = 0; i < len; i++) { - result.emplace_back(ExpressionPtr{fmt::format("{}", i)}); + result.emplace_back(fmt::format("{}", i)); } return ExpressionPtr{result}; } @@ -347,14 +347,16 @@ auto ShellQuote(std::string arg) -> std::string { return fmt::format("'{}'", arg); } -template +template auto Join(ExpressionPtr const& expr, std::string const& sep) -> ExpressionPtr { - if (expr->IsString()) { - auto string = expr->String(); - if constexpr (kDoQuote) { - string = ShellQuote(std::move(string)); + if constexpr (kAllowString) { + if (expr->IsString()) { + auto string = expr->String(); + if constexpr (kDoQuote) { + string = ShellQuote(std::move(string)); + } + return ExpressionPtr{std::move(string)}; } - return ExpressionPtr{std::move(string)}; } if (expr->IsList()) { auto const& list = expr->List(); @@ -370,12 +372,13 @@ auto Join(ExpressionPtr const& expr, std::string const& sep) -> ExpressionPtr { }); return ExpressionPtr{ss.str()}; } - throw Evaluator::EvaluationError{fmt::format( - "Join expects string or list but got: {}.", expr->ToString())}; + throw Evaluator::EvaluationError{ + fmt::format("Join expects a list of strings{}, but got: {}.", + kAllowString ? " or a single string" : "", + expr->ToString())}; } template -// NOLINTNEXTLINE(misc-no-recursion) auto Union(Expression::list_t const& dicts, std::size_t from, std::size_t to) -> ExpressionPtr { @@ -540,7 +543,6 @@ auto ExpandQuasiQuote(const SubExprEvaluator& eval, ExpressionPtr const& expr, Configuration const& env) -> ExpressionPtr; -// NOLINTNEXTLINE(misc-no-recursion) auto ExpandQuasiQuoteListEntry(const SubExprEvaluator& eval, ExpressionPtr const& expr, Configuration const& env) -> ExpressionPtr { @@ -595,7 +597,6 @@ auto ExpandQuasiQuoteListEntry(const SubExprEvaluator& eval, return ExpressionPtr{Expression::list_t{expr}}; } -// NOLINTNEXTLINE(misc-no-recursion) auto ExpandQuasiQuote(const SubExprEvaluator& eval, ExpressionPtr const& expr, Configuration const& env) -> ExpressionPtr { @@ -790,7 +791,7 @@ auto JoinCmdExpr(SubExprEvaluator&& eval, ExpressionPtr const& expr, Configuration const& env) -> ExpressionPtr { auto const& list = eval(expr->Get("$1", list_t{}), env); - return Join(list, " "); + return Join(list, " "); } auto JsonEncodeExpr(SubExprEvaluator&& eval, @@ -1123,7 +1124,8 @@ auto ConcatTargetNameExpr(SubExprEvaluator&& eval, Configuration const& env) -> ExpressionPtr { auto p1 = eval(expr->Get("$1", ""s), env); auto p2 = eval(expr->Get("$2", ""s), env); - return ConcatTargetName(p1, Join(p2, "")); + return ConcatTargetName( + p1, Join(p2, "")); } auto ContextExpr(SubExprEvaluator&& eval, @@ -1399,17 +1401,11 @@ auto Evaluator::EvaluateExpression( FunctionMap::MakePtr(kBuiltInFunctions, provider_functions)); } catch (EvaluationError const& ex) { if (ex.UserContext()) { - try { - note_user_context(); - } catch (...) { - // should not throw - } + note_user_context(); } - else { - if (ex.WhileEvaluation()) { - ss << "Expression evaluation traceback (most recent call last):" - << std::endl; - } + else if (ex.WhileEvaluation()) { + ss << "Expression evaluation traceback (most recent call last):" + << std::endl; } ss << ex.what(); for (auto const& object : ex.InvolvedObjects()) { @@ -1418,15 +1414,10 @@ auto Evaluator::EvaluateExpression( } catch (std::exception const& ex) { ss << ex.what(); } - try { - logger(ss.str()); - } catch (...) { - // should not throw - } + logger(ss.str()); return ExpressionPtr{nullptr}; } -// NOLINTNEXTLINE(misc-no-recursion) auto Evaluator::Evaluate(ExpressionPtr const& expr, Configuration const& env, FunctionMapPtr const& functions) -> ExpressionPtr { @@ -1440,7 +1431,6 @@ auto Evaluator::Evaluate(ExpressionPtr const& expr, expr->List().cbegin(), expr->List().cend(), std::back_inserter(list), - // NOLINTNEXTLINE(misc-no-recursion) [&](auto const& e) { return Evaluate(e, env, functions); }); return ExpressionPtr{list}; } diff --git a/src/buildtool/build_engine/expression/evaluator.hpp b/src/buildtool/build_engine/expression/evaluator.hpp index 726df568f..ba9bd2ef5 100644 --- a/src/buildtool/build_engine/expression/evaluator.hpp +++ b/src/buildtool/build_engine/expression/evaluator.hpp @@ -44,18 +44,20 @@ class Evaluator { class EvaluationError : public std::exception { public: - explicit EvaluationError(std::string const& msg, + explicit EvaluationError(std::string msg, bool while_eval = false, bool user_context = false, std::vector involved_objetcs = std::vector{}) noexcept - : msg_{(while_eval ? "" - : (user_context ? "UserError: " - : "EvaluationError: ")) + - msg}, + : msg_{std::move(msg)}, while_eval_{while_eval}, user_context_{user_context}, - involved_objects_{std::move(std::move(involved_objetcs))} {} + involved_objects_{std::move(std::move(involved_objetcs))} { + if (not while_eval_) { + msg_ = (user_context_ ? "UserError: " : "EvaluationError: ") + + msg_; + } + } [[nodiscard]] auto what() const noexcept -> char const* final { return msg_.c_str(); } diff --git a/src/buildtool/build_engine/expression/expression.cpp b/src/buildtool/build_engine/expression/expression.cpp index 7d0f55b55..4dd61a2d5 100644 --- a/src/buildtool/build_engine/expression/expression.cpp +++ b/src/buildtool/build_engine/expression/expression.cpp @@ -75,7 +75,6 @@ auto Expression::operator[](std::size_t pos) && -> ExpressionPtr { fmt::format("List pos '{}' is out of bounds.", pos)}; } -// NOLINTNEXTLINE(misc-no-recursion) auto Expression::ToJson(Expression::JsonMode mode) const -> nlohmann::json { if (IsBool()) { return Bool(); @@ -112,14 +111,12 @@ auto Expression::ToJson(Expression::JsonMode mode) const -> nlohmann::json { std::transform(list.begin(), list.end(), std::back_inserter(json), - // NOLINTNEXTLINE(misc-no-recursion) [mode](auto const& e) { return e->ToJson(mode); }); return json; } if (IsMap()) { auto json = nlohmann::json::object(); auto const& map = Value()->get(); - // NOLINTNEXTLINE(misc-no-recursion) std::for_each(map.begin(), map.end(), [&](auto const& p) { json.emplace(p.first, p.second->ToJson(mode)); }); @@ -131,7 +128,6 @@ auto Expression::ToJson(Expression::JsonMode mode) const -> nlohmann::json { return nlohmann::json{}; } -// NOLINTNEXTLINE(misc-no-recursion) auto Expression::ComputeIsCacheable() const -> bool { // Must be updated whenever we add a new non-cacheable value if (IsName()) { @@ -160,7 +156,6 @@ auto Expression::ComputeIsCacheable() const -> bool { return true; } -// NOLINTNEXTLINE(misc-no-recursion) auto Expression::ToString() const -> std::string { return ToJson().dump(); } @@ -169,17 +164,15 @@ auto Expression::ToString() const -> std::string { -> std::string { return AbbreviateJson(ToJson(), len); } -// NOLINTNEXTLINE(misc-no-recursion) + auto Expression::ToHash() const noexcept -> std::string { return hash_.SetOnceAndGet([this] { return ComputeHash(); }); } -// NOLINTNEXTLINE(misc-no-recursion) auto Expression::IsCacheable() const -> bool { return is_cachable_.SetOnceAndGet([this] { return ComputeIsCacheable(); }); } -// NOLINTNEXTLINE(misc-no-recursion) auto Expression::FromJson(nlohmann::json const& json) noexcept -> ExpressionPtr { if (json.is_null()) { @@ -201,7 +194,6 @@ auto Expression::FromJson(nlohmann::json const& json) noexcept std::transform(json.begin(), json.end(), std::back_inserter(l), - // NOLINTNEXTLINE(misc-no-recursion) [](auto const& j) { return FromJson(j); }); return ExpressionPtr{l}; } @@ -224,18 +216,17 @@ auto Expression::TypeStringForIndex() const noexcept -> std::string { if (kIndex == data_.index()) { return TypeToString>(); } - constexpr auto size = std::variant_size_v; - if constexpr (kIndex < size - 1) { + constexpr auto kSize = std::variant_size_v; + if constexpr (kIndex < kSize - 1) { return TypeStringForIndex(); } - return TypeToString>(); + return TypeToString>(); } auto Expression::TypeString() const noexcept -> std::string { return TypeStringForIndex(); } -// NOLINTNEXTLINE(misc-no-recursion) auto Expression::ComputeHash() const noexcept -> std::string { auto hash = std::string{}; @@ -246,11 +237,19 @@ auto Expression::ComputeHash() const noexcept -> std::string { IsResult() or IsNode() or IsName()) { // just hash the JSON representation, but prepend "@" for artifact, // "=" for result, "#" for node, and "$" for name. - std::string prefix{IsArtifact() ? "@" - : IsResult() ? "=" - : IsNode() ? "#" - : IsName() ? "$" - : ""}; + std::string prefix; + if (IsArtifact()) { + prefix = "@"; + } + else if (IsResult()) { + prefix = "="; + } + else if (IsNode()) { + prefix = "#"; + } + else if (IsName()) { + prefix = "$"; + } hash = hash_function.PlainHashData(prefix + ToString()).Bytes(); } else { diff --git a/src/buildtool/build_engine/expression/expression.hpp b/src/buildtool/build_engine/expression/expression.hpp index cbfb2f24d..b28fec512 100644 --- a/src/buildtool/build_engine/expression/expression.hpp +++ b/src/buildtool/build_engine/expression/expression.hpp @@ -16,6 +16,7 @@ #define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EXPRESSION_HPP #include +#include #include #include #include @@ -152,11 +153,7 @@ class Expression { [[nodiscard]] auto At( std::string const& key) && -> std::optional { - auto value = std::move(*this).Map().Find(key); - if (value) { - return std::move(*value); - } - return std::nullopt; + return std::move(*this).Map().Find(key); } template @@ -222,7 +219,11 @@ class Expression { std::size_t pos) const& -> ExpressionPtr const&; [[nodiscard]] auto operator[](std::size_t pos) && -> ExpressionPtr; - enum class JsonMode { SerializeAll, SerializeAllButNodes, NullForNonJson }; + enum class JsonMode : std::uint8_t { + SerializeAll, + SerializeAllButNodes, + NullForNonJson + }; [[nodiscard]] auto ToJson(JsonMode mode = JsonMode::SerializeAll) const -> nlohmann::json; @@ -264,8 +265,8 @@ class Expression { map_t> data_{none_t{}}; - AtomicValue hash_{}; - AtomicValue is_cachable_{}; + AtomicValue hash_; + AtomicValue is_cachable_; template requires(IsValidType()) diff --git a/src/buildtool/build_engine/expression/expression_ptr.cpp b/src/buildtool/build_engine/expression/expression_ptr.cpp index 68b45c6f4..0d3ce649e 100644 --- a/src/buildtool/build_engine/expression/expression_ptr.cpp +++ b/src/buildtool/build_engine/expression/expression_ptr.cpp @@ -76,7 +76,7 @@ auto ExpressionPtr::ToIdentifier() const noexcept -> std::string { return ptr_ ? ptr_->ToIdentifier() : std::string{}; } -auto ExpressionPtr::ToJson() const noexcept -> nlohmann::json { +auto ExpressionPtr::ToJson() const -> nlohmann::json { return ptr_ ? ptr_->ToJson() : nlohmann::json::object(); } diff --git a/src/buildtool/build_engine/expression/expression_ptr.hpp b/src/buildtool/build_engine/expression/expression_ptr.hpp index b1f698c6f..c1ab60663 100644 --- a/src/buildtool/build_engine/expression/expression_ptr.hpp +++ b/src/buildtool/build_engine/expression/expression_ptr.hpp @@ -90,7 +90,7 @@ class ExpressionPtr { [[nodiscard]] auto IsCacheable() const noexcept -> bool; [[nodiscard]] auto ToIdentifier() const noexcept -> std::string; - [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json; + [[nodiscard]] auto ToJson() const -> nlohmann::json; using linked_map_t = LinkedMap; [[nodiscard]] auto IsNotNull() const noexcept -> bool; diff --git a/src/buildtool/build_engine/expression/target_node.hpp b/src/buildtool/build_engine/expression/target_node.hpp index feed94d6d..1e7cbd2ea 100644 --- a/src/buildtool/build_engine/expression/target_node.hpp +++ b/src/buildtool/build_engine/expression/target_node.hpp @@ -62,21 +62,23 @@ class TargetNode { [[nodiscard]] auto operator==(TargetNode const& other) const noexcept -> bool { - if (data_.index() == other.data_.index()) { - try { - if (IsValue()) { - return GetValue() == other.GetValue(); - } - auto const& abs_l = GetAbstract(); - auto const& abs_r = other.GetAbstract(); - return abs_l.node_type == abs_r.node_type and - abs_l.string_fields == abs_r.string_fields and - abs_l.target_fields == abs_r.string_fields; - } catch (...) { - // should never happen + if (data_.index() != other.data_.index()) { + return false; + } + + try { + if (IsValue()) { + return GetValue() == other.GetValue(); } + auto const& abs_l = GetAbstract(); + auto const& abs_r = other.GetAbstract(); + return abs_l.node_type == abs_r.node_type and + abs_l.string_fields == abs_r.string_fields and + abs_l.target_fields == abs_r.string_fields; + } catch (...) { + // should never happen + return false; } - return false; } [[nodiscard]] auto ToString() const noexcept -> std::string { @@ -84,8 +86,8 @@ class TargetNode { return ToJson().dump(); } catch (...) { // should never happen + return {}; } - return {}; } [[nodiscard]] auto ToJson() const -> nlohmann::json; diff --git a/src/buildtool/build_engine/expression/target_result.cpp b/src/buildtool/build_engine/expression/target_result.cpp index c4f43f898..c95a803f2 100644 --- a/src/buildtool/build_engine/expression/target_result.cpp +++ b/src/buildtool/build_engine/expression/target_result.cpp @@ -65,7 +65,6 @@ auto SerializeTargetResultWithReplacement( // If replacements is set, replace any contained // non-known artifact by known artifact from replacement. Throws runtime_error // if no replacement is found. -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto SerializeExpression( gsl::not_null*> const& nodes, @@ -178,8 +177,8 @@ auto SerializeTargetResultWithReplacement( return id; } -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto DeserializeExpression( + HashFunction::Type hash_type, nlohmann::json const& entry, nlohmann::json const& nodes, std::unordered_set const& provided_artifacts, @@ -198,7 +197,8 @@ auto SerializeTargetResultWithReplacement( auto const& json = nodes.at(id); if (json.is_object()) { if (provided_artifacts.contains(id)) { - if (auto artifact = ArtifactDescription::FromJson(json)) { + if (auto artifact = + ArtifactDescription::FromJson(hash_type, json)) { auto result = ExpressionPtr{*artifact}; sofar->emplace(id, result); return result; @@ -209,14 +209,16 @@ auto SerializeTargetResultWithReplacement( if (json["type"] == "ABSTRACT_NODE") { auto node_type = json["node_type"].get(); auto target_fields = - DeserializeExpression(json["target_fields"], + DeserializeExpression(hash_type, + json["target_fields"], nodes, provided_artifacts, provided_nodes, provided_results, sofar); auto string_fields = - DeserializeExpression(json["string_fields"], + DeserializeExpression(hash_type, + json["string_fields"], nodes, provided_artifacts, provided_nodes, @@ -230,7 +232,8 @@ auto SerializeTargetResultWithReplacement( return result; } if (json["type"] == "VALUE_NODE") { - auto value = DeserializeExpression(json["result"], + auto value = DeserializeExpression(hash_type, + json["result"], nodes, provided_artifacts, provided_nodes, @@ -243,19 +246,22 @@ auto SerializeTargetResultWithReplacement( return ExpressionPtr{nullptr}; } if (provided_results.contains(id)) { - auto artifact_stage = DeserializeExpression(json["artifact_stage"], + auto artifact_stage = DeserializeExpression(hash_type, + json["artifact_stage"], nodes, provided_artifacts, provided_nodes, provided_results, sofar); - auto runfiles = DeserializeExpression(json["runfiles"], + auto runfiles = DeserializeExpression(hash_type, + json["runfiles"], nodes, provided_artifacts, provided_nodes, provided_results, sofar); - auto provides = DeserializeExpression(json["provides"], + auto provides = DeserializeExpression(hash_type, + json["provides"], nodes, provided_artifacts, provided_nodes, @@ -273,7 +279,8 @@ auto SerializeTargetResultWithReplacement( Expression::map_t::underlying_map_t map{}; for (auto const& [key, val] : json.items()) { - auto new_val = DeserializeExpression(val.get(), + auto new_val = DeserializeExpression(hash_type, + val.get(), nodes, provided_artifacts, provided_nodes, @@ -293,7 +300,8 @@ auto SerializeTargetResultWithReplacement( Expression::list_t list{}; list.reserve(json.size()); for (auto const& val : json) { - auto new_val = DeserializeExpression(val.get(), + auto new_val = DeserializeExpression(hash_type, + val.get(), nodes, provided_artifacts, provided_nodes, @@ -317,7 +325,6 @@ auto SerializeTargetResultWithReplacement( // Serialize artifact map to JSON. If replacements is set, replace // non-known artifacts by known artifacts from replacement. Throws runtime_error // if no replacement is found. -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto SerializeArtifactMap( ExpressionPtr const& expr, std::unordered_map const& @@ -335,12 +342,13 @@ auto SerializeTargetResultWithReplacement( return artifacts; } -[[nodiscard]] auto DeserializeArtifactMap(nlohmann::json const& json) +[[nodiscard]] auto DeserializeArtifactMap(HashFunction::Type hash_type, + nlohmann::json const& json) -> ExpressionPtr { if (json.is_object()) { Expression::map_t::underlying_map_t map{}; for (auto const& [key, val] : json.items()) { - auto artifact = ArtifactDescription::FromJson(val); + auto artifact = ArtifactDescription::FromJson(hash_type, val); if (not artifact) { return ExpressionPtr{nullptr}; } @@ -354,7 +362,6 @@ auto SerializeTargetResultWithReplacement( // Serialize provides map to JSON. If replacements is set, replace // non-known artifacts by known artifacts from replacement. Throws runtime_error // if no replacement is found. -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto SerializeProvidesMap( ExpressionPtr const& expr, std::unordered_map const& @@ -387,11 +394,12 @@ auto JsonSet(nlohmann::json const& j) -> std::unordered_set { return result; } -// NOLINTNEXTLINE(misc-no-recursion) -[[nodiscard]] auto DeserializeProvidesMap(nlohmann::json const& json) +[[nodiscard]] auto DeserializeProvidesMap(HashFunction::Type hash_type, + nlohmann::json const& json) -> ExpressionPtr { std::unordered_map sofar{}; - return DeserializeExpression(json["entry"], + return DeserializeExpression(hash_type, + json["entry"], json["nodes"], JsonSet(json["provided_artifacts"]), JsonSet(json["provided_nodes"]), @@ -402,7 +410,6 @@ auto JsonSet(nlohmann::json const& j) -> std::unordered_set { // Serialize TargetResult to JSON. If replacements is set, replace non-known // artifacts by known artifacts from replacement. Throws runtime_error if no // replacement is found. -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto SerializeTargetResultWithReplacement( TargetResult const& result, std::unordered_map const& @@ -433,13 +440,13 @@ auto TargetResult::ReplaceNonKnownAndToJson( return std::nullopt; } -// NOLINTNEXTLINE(misc-no-recursion) -auto TargetResult::FromJson(nlohmann::json const& json) noexcept +auto TargetResult::FromJson(HashFunction::Type hash_type, + nlohmann::json const& json) noexcept -> std::optional { try { - auto artifacts = DeserializeArtifactMap(json["artifacts"]); - auto runfiles = DeserializeArtifactMap(json["runfiles"]); - auto provides = DeserializeProvidesMap(json["provides"]); + auto artifacts = DeserializeArtifactMap(hash_type, json["artifacts"]); + auto runfiles = DeserializeArtifactMap(hash_type, json["runfiles"]); + auto provides = DeserializeProvidesMap(hash_type, json["provides"]); if (artifacts and runfiles and provides) { return TargetResult{artifacts, provides, runfiles}; } diff --git a/src/buildtool/build_engine/expression/target_result.hpp b/src/buildtool/build_engine/expression/target_result.hpp index dcf201ee4..4a698fc4a 100644 --- a/src/buildtool/build_engine/expression/target_result.hpp +++ b/src/buildtool/build_engine/expression/target_result.hpp @@ -22,15 +22,17 @@ #include "nlohmann/json.hpp" #include "src/buildtool/build_engine/expression/expression_ptr.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/utils/cpp/hash_combine.hpp" struct TargetResult { - ExpressionPtr artifact_stage{}; - ExpressionPtr provides{}; - ExpressionPtr runfiles{}; + ExpressionPtr artifact_stage; + ExpressionPtr provides; + ExpressionPtr runfiles; bool is_cacheable{provides.IsCacheable()}; - [[nodiscard]] static auto FromJson(nlohmann::json const& json) noexcept + [[nodiscard]] static auto FromJson(HashFunction::Type hash_type, + nlohmann::json const& json) noexcept -> std::optional; [[nodiscard]] auto ToJson() const -> nlohmann::json; diff --git a/src/buildtool/build_engine/target_map/TARGETS b/src/buildtool/build_engine/target_map/TARGETS index ae56d784b..17b794762 100644 --- a/src/buildtool/build_engine/target_map/TARGETS +++ b/src/buildtool/build_engine/target_map/TARGETS @@ -16,20 +16,21 @@ , "name": ["result_map"] , "hdrs": ["result_map.hpp"] , "deps": - [ ["src/buildtool/common", "tree"] - , ["src/buildtool/storage", "storage"] + [ ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/analysed_target", "target"] - , ["src/buildtool/build_engine/target_map", "configured_target"] + , ["src/buildtool/build_engine/base_maps", "entity_name"] , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/build_engine/target_map", "configured_target"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "tree"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] , ["src/buildtool/multithreading", "task"] , ["src/buildtool/multithreading", "task_system"] , ["src/buildtool/progress_reporting", "progress"] - , ["@", "gsl", "", "gsl"] + , ["src/buildtool/storage", "storage"] , ["src/utils/cpp", "hash_combine"] - , ["src/buildtool/logging", "logging"] - , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/common", "common"] - , ["src/buildtool/build_engine/base_maps", "entity_name"] ] , "stage": ["src", "buildtool", "build_engine", "target_map"] } @@ -40,42 +41,42 @@ , "srcs": ["utils.cpp", "built_in_rules.cpp", "export.cpp", "target_map.cpp"] , "private-hdrs": ["built_in_rules.hpp", "export.hpp", "utils.hpp"] , "deps": - [ "configured_target" + [ "absent_target_map" + , "configured_target" , "result_map" - , "absent_target_map" + , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/analysed_target", "target"] , ["src/buildtool/build_engine/base_maps", "rule_map"] , ["src/buildtool/build_engine/base_maps", "source_map"] , ["src/buildtool/build_engine/base_maps", "targets_file_map"] - , ["src/buildtool/multithreading", "async_map_consumer"] - , ["src/buildtool/storage", "storage"] , ["src/buildtool/main", "analyse_context"] - , ["@", "gsl", "", "gsl"] - , ["@", "json", "", "json"] + , ["src/buildtool/multithreading", "async_map_consumer"] ] , "stage": ["src", "buildtool", "build_engine", "target_map"] , "private-deps": [ ["@", "fmt", "", "fmt"] - , ["src/utils/cpp", "gsl"] - , ["src/buildtool/storage", "storage"] , ["src/buildtool/build_engine/base_maps", "entity_name"] , ["src/buildtool/build_engine/base_maps", "field_reader"] , ["src/buildtool/build_engine/expression", "expression"] - , ["src/buildtool/execution_api/local", "local"] + , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] + , ["src/buildtool/common", "artifact_description"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "config"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/crypto", "hasher"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["src/utils/cpp", "hash_combine"] + , ["src/buildtool/progress_reporting", "progress"] + , ["src/buildtool/serve_api/remote", "config"] + , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "storage"] + , ["src/utils/cpp", "gsl"] , ["src/utils/cpp", "json"] , ["src/utils/cpp", "path"] , ["src/utils/cpp", "path_hash"] , ["src/utils/cpp", "vector"] - , ["src/buildtool/serve_api/remote", "serve_api"] - , ["src/buildtool/serve_api/remote", "config"] - , ["src/buildtool/common", "common"] - , ["src/buildtool/common", "config"] - , ["src/buildtool/progress_reporting", "progress"] - , ["src/buildtool/crypto", "hasher"] - , ["src/buildtool/crypto", "hash_function"] ] } , "target_map_testable_internals": @@ -84,13 +85,14 @@ , "name": ["target_map_testable_internals"] , "hdrs": ["utils.hpp"] , "deps": - [ "target_map" - , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] - , "configured_target" + [ "configured_target" + , "target_map" + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/build_engine/analysed_target", "target"] , ["src/buildtool/build_engine/base_maps", "entity_name"] , ["src/buildtool/build_engine/base_maps", "field_reader"] , ["src/buildtool/build_engine/expression", "expression"] - , ["src/buildtool/build_engine/analysed_target", "target"] + , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] , ["src/buildtool/common", "config"] ] , "stage": ["src", "buildtool", "build_engine", "target_map"] @@ -103,23 +105,23 @@ , "deps": [ "configured_target" , "result_map" + , ["@", "gsl", "", "gsl"] , ["src/buildtool/build_engine/analysed_target", "target"] + , ["src/buildtool/main", "analyse_context"] , ["src/buildtool/multithreading", "async_map_consumer"] , ["src/utils/cpp", "hash_combine"] - , ["src/buildtool/main", "analyse_context"] - , ["@", "gsl", "", "gsl"] ] , "stage": ["src", "buildtool", "build_engine", "target_map"] , "private-deps": - [ ["src/buildtool/storage", "storage"] + [ ["@", "json", "", "json"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "config"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] + , ["src/buildtool/progress_reporting", "progress"] , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "storage"] , ["src/utils/cpp", "json"] - , ["@", "json", "", "json"] - , ["src/buildtool/common", "common"] - , ["src/buildtool/common", "config"] - , ["src/buildtool/progress_reporting", "progress"] ] } } diff --git a/src/buildtool/build_engine/target_map/absent_target_map.hpp b/src/buildtool/build_engine/target_map/absent_target_map.hpp index 6dc36a7d4..874204a77 100644 --- a/src/buildtool/build_engine/target_map/absent_target_map.hpp +++ b/src/buildtool/build_engine/target_map/absent_target_map.hpp @@ -76,4 +76,4 @@ struct hash { }; } // namespace std -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_ABSENT_TARGET_MAP_HPP diff --git a/src/buildtool/build_engine/target_map/built_in_rules.cpp b/src/buildtool/build_engine/target_map/built_in_rules.cpp index 0237e450b..9623f8163 100644 --- a/src/buildtool/build_engine/target_map/built_in_rules.cpp +++ b/src/buildtool/build_engine/target_map/built_in_rules.cpp @@ -28,11 +28,13 @@ #include "fmt/core.h" #include "src/buildtool/build_engine/base_maps/field_reader.hpp" +#include "src/buildtool/build_engine/expression/evaluator.hpp" #include "src/buildtool/build_engine/expression/expression.hpp" #include "src/buildtool/build_engine/expression/expression_ptr.hpp" #include "src/buildtool/build_engine/target_map/export.hpp" #include "src/buildtool/build_engine/target_map/utils.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/utils/cpp/path.hpp" #include "src/utils/cpp/vector.hpp" @@ -107,18 +109,18 @@ void ReportArtifactWithDependencyOrigin( void ReportStagingConflict( const std::string& location, - const ExpressionPtr& stage_A, - const ExpressionPtr& stage_B, + const ExpressionPtr& stage_a, + const ExpressionPtr& stage_b, const std::unordered_map& deps_by_target, const BuildMaps::Target::TargetMap::LoggerPtr& logger) { std::stringstream msg{}; - auto artifact_A = stage_A->Get(location, Expression::kNone); - auto artifact_B = stage_B->Get(location, Expression::kNone); + auto artifact_a = stage_a->Get(location, Expression::kNone); + auto artifact_b = stage_b->Get(location, Expression::kNone); msg << "Staging conflict on path " << nlohmann::json(location).dump() << " between\n"; - ReportArtifactWithDependencyOrigin(artifact_A, deps_by_target, &msg); - ReportArtifactWithDependencyOrigin(artifact_B, deps_by_target, &msg); + ReportArtifactWithDependencyOrigin(artifact_a, deps_by_target, &msg); + ReportArtifactWithDependencyOrigin(artifact_b, deps_by_target, &msg); (*logger)(msg.str(), true); } @@ -271,7 +273,7 @@ void BlobGenRuleWithDeps( auto stage = ExpressionPtr{Expression::map_t{ name_val->String(), ExpressionPtr{ArtifactDescription::CreateKnown( - ArtifactDigest::Create( + ArtifactDigestFactory::HashDataAs( context->storage->GetHashFunction(), data_val->String()), blob_type)}}}; @@ -867,12 +869,11 @@ void InstallRule( auto files = std::unordered_map{}; files.reserve(files_exp->Map().size()); for (auto const& [path, dep_exp] : files_exp->Map()) { - std::string path_ = path; // Have a variable to capture auto dep_name = dep_exp.Evaluate( - param_config, {}, [&logger, &path_](auto const& msg) { + param_config, {}, [&logger, &path = path](auto const& msg) { (*logger)( fmt::format( - "While evaluating files entry for {}:\n{}", path_, msg), + "While evaluating files entry for {}:\n{}", path, msg), true); }); if (not dep_name) { @@ -921,8 +922,8 @@ void InstallRule( std::vector>{}; dirs.reserve(dirs_value->List().size()); for (auto const& entry : dirs_value->List()) { - if (not(entry->IsList() and entry->List().size() == 2 and - entry->List()[1]->IsString())) { + if (not entry->IsList() or entry->List().size() != 2 or + not entry->List()[1]->IsString()) { (*logger)(fmt::format("Expected dirs to evaluate to a list of " "target-path pairs, but found entry {}", entry->ToString()), @@ -946,8 +947,7 @@ void InstallRule( } dependency_keys.emplace_back( BuildMaps::Target::ConfiguredTarget{*dep_target, key.config}); - dirs.emplace_back(std::pair{ - *dep_target, entry->List()[1]->String()}); + dirs.emplace_back(*dep_target, entry->List()[1]->String()); } (*subcaller)( @@ -1680,10 +1680,11 @@ auto HandleBuiltin(const gsl::not_null& context, } auto target_logger = std::make_shared( [logger, rule_name, key](auto msg, auto fatal) { - (*logger)(fmt::format("While evaluating {} target {}:\n{}", - rule_name, - key.ToShortString(), - msg), + (*logger)(fmt::format( + "While evaluating {} target {}:\n{}", + rule_name, + key.ToShortString(Evaluator::GetExpressionLogLimit()), + msg), fatal); }); (it->second)( diff --git a/src/buildtool/build_engine/target_map/built_in_rules.hpp b/src/buildtool/build_engine/target_map/built_in_rules.hpp index 653344f2c..242b68c34 100644 --- a/src/buildtool/build_engine/target_map/built_in_rules.hpp +++ b/src/buildtool/build_engine/target_map/built_in_rules.hpp @@ -33,4 +33,4 @@ auto HandleBuiltin(const gsl::not_null& context, const gsl::not_null& result_map) -> bool; } // namespace BuildMaps::Target -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_BUILT_IN_RULES_HPP diff --git a/src/buildtool/build_engine/target_map/configured_target.hpp b/src/buildtool/build_engine/target_map/configured_target.hpp index 88c59cf2c..dc1f981ef 100644 --- a/src/buildtool/build_engine/target_map/configured_target.hpp +++ b/src/buildtool/build_engine/target_map/configured_target.hpp @@ -30,8 +30,6 @@ struct ConfiguredTarget { BuildMaps::Base::EntityName target; Configuration config; - static constexpr std::size_t kConfigLength = 320; - [[nodiscard]] auto operator==(BuildMaps::Target::ConfiguredTarget const& other) const noexcept -> bool { return target == other.target and config == other.config; @@ -41,11 +39,12 @@ struct ConfiguredTarget { return fmt::format("[{},{}]", target.ToString(), config.ToString()); } - [[nodiscard]] auto ToShortString() const noexcept -> std::string { + [[nodiscard]] auto ToShortString(std::size_t config_length) const noexcept + -> std::string { return fmt::format( "[{},{}]", target.ToString(), - AbbreviateJson(PruneJson(config.ToJson()), kConfigLength)); + AbbreviateJson(PruneJson(config.ToJson()), config_length)); } }; @@ -66,4 +65,4 @@ struct hash { }; } // namespace std -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_CONFIGURED_TARGET_HPP diff --git a/src/buildtool/build_engine/target_map/export.hpp b/src/buildtool/build_engine/target_map/export.hpp index e0e6e5685..5845514ef 100644 --- a/src/buildtool/build_engine/target_map/export.hpp +++ b/src/buildtool/build_engine/target_map/export.hpp @@ -31,4 +31,4 @@ void ExportRule( const BuildMaps::Target::TargetMap::LoggerPtr& logger, const gsl::not_null& result_map); -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_EXPORT_HPP diff --git a/src/buildtool/build_engine/target_map/result_map.hpp b/src/buildtool/build_engine/target_map/result_map.hpp index 9d7787bc4..3e144628d 100644 --- a/src/buildtool/build_engine/target_map/result_map.hpp +++ b/src/buildtool/build_engine/target_map/result_map.hpp @@ -55,9 +55,9 @@ class ResultTargetMap { template struct ResultType { - std::vector actions{}; - std::vector blobs{}; - std::vector trees{}; + std::vector actions; + std::vector blobs; + std::vector trees; }; explicit ResultTargetMap(std::size_t jobs) : width_{ComputeWidth(jobs)} {} @@ -131,8 +131,7 @@ class ResultTargetMap { return all_exports; } - [[nodiscard]] auto ConfiguredTargetsGraph() const noexcept - -> nlohmann::json { + [[nodiscard]] auto ConfiguredTargetsGraph() const -> nlohmann::json { auto result = nlohmann::json::object(); for (auto const& i : targets_) { for (auto const& it : i) { @@ -385,7 +384,6 @@ class ResultTargetMap { std::vector num_blobs_{std::vector(width_)}; std::vector num_trees_{std::vector(width_)}; - // NOLINTNEXTLINE(misc-no-recursion) constexpr static auto ComputeWidth(std::size_t jobs) -> std::size_t { if (jobs <= 0) { // Non-positive indicates to use the default value @@ -399,9 +397,9 @@ class ResultTargetMap { template <> struct ResultTargetMap::ResultType { - std::vector actions{}; - std::vector blobs{}; - std::vector trees{}; + std::vector actions; + std::vector blobs; + std::vector trees; }; } // namespace BuildMaps::Target diff --git a/src/buildtool/build_engine/target_map/target_map.cpp b/src/buildtool/build_engine/target_map/target_map.cpp index 26935f099..961cdc73a 100644 --- a/src/buildtool/build_engine/target_map/target_map.cpp +++ b/src/buildtool/build_engine/target_map/target_map.cpp @@ -15,6 +15,7 @@ #include "src/buildtool/build_engine/target_map/target_map.hpp" #include +#include #include #include #include @@ -38,10 +39,13 @@ #include "src/buildtool/build_engine/target_map/built_in_rules.hpp" #include "src/buildtool/build_engine/target_map/utils.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/common/statistics.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" +#include "src/buildtool/storage/storage.hpp" #include "src/utils/cpp/gsl.hpp" #include "src/utils/cpp/path.hpp" #include "src/utils/cpp/vector.hpp" @@ -206,9 +210,9 @@ struct TargetData { Expression::list_t targets{}; targets.reserve(nodes.size()); for (auto const& node_expr : nodes) { - targets.emplace_back(ExpressionPtr{BuildMaps::Base::EntityName{ + targets.emplace_back(BuildMaps::Base::EntityName{ BuildMaps::Base::AnonymousTarget{ - .rule_map = rule_map, .target_node = node_expr}}}); + .rule_map = rule_map, .target_node = node_expr}}); } target_exprs.emplace(field_name, targets); } @@ -229,12 +233,11 @@ auto NameTransitionedDeps( auto conf = effective_conf.Update(transitioned_target.config.Expr()) .Prune(analysis->Vars()); return BuildMaps::Target::ConfiguredTarget{transitioned_target.target, conf} - .ToShortString(); + .ToShortString(Evaluator::GetExpressionLogLimit()); } // Check if an object is contained an expression; to avoid tree-unfolding // the expression, we need to cache the values already computed. -// NOLINTNEXTLINE(misc-no-recursion) auto ExpressionContainsObject(std::unordered_map* map, const ExpressionPtr& object, const ExpressionPtr& exp) { @@ -313,7 +316,7 @@ void withDependencies( const BuildMaps::Base::UserRulePtr& rule, const TargetData::Ptr& data, const BuildMaps::Target::ConfiguredTarget& key, - std::unordered_map params, + std::unordered_map params, // NOLINT const BuildMaps::Target::TargetMap::SetterPtr& setter, const BuildMaps::Target::TargetMap::LoggerPtr& logger, const gsl::not_null& result_map) { @@ -349,8 +352,8 @@ void withDependencies( std::size_t const b, auto* deps) { std::transform( - dependency_values.begin() + a, - dependency_values.begin() + b, + dependency_values.begin() + static_cast(a), + dependency_values.begin() + static_cast(b), std::back_inserter(*deps), [](auto dep) { return (*(dep))->GraphInformation().Node(); }); }; @@ -760,7 +763,7 @@ void withDependencies( } blobs.emplace_back(data->String()); return ExpressionPtr{ArtifactDescription::CreateKnown( - ArtifactDigest::Create( + ArtifactDigestFactory::HashDataAs( context->storage->GetHashFunction(), data->String()), ObjectType::File)}; }}, @@ -781,7 +784,7 @@ void withDependencies( blobs.emplace_back(data->String()); return ExpressionPtr{ArtifactDescription::CreateKnown( - ArtifactDigest::Create( + ArtifactDigestFactory::HashDataAs( context->storage->GetHashFunction(), data->String()), ObjectType::Symlink)}; }}, @@ -1063,7 +1066,7 @@ void withRuleDefinition( std::unordered_map params; params.reserve(rule->ConfigFields().size() + rule->TargetFields().size() + rule->ImplicitTargetExps().size()); - for (auto field_name : rule->ConfigFields()) { + for (auto const& field_name : rule->ConfigFields()) { auto const& field_expression = data->config_exprs[field_name]; auto field_value = field_expression.Evaluate( param_config, {}, [&logger, &field_name](auto const& msg) { @@ -1194,10 +1197,10 @@ void withRuleDefinition( std::vector dependency_keys; std::vector transition_keys; - for (auto target_field_name : rule->TargetFields()) { + for (auto const& target_field_name : rule->TargetFields()) { auto const& deps_expression = data->target_exprs[target_field_name]; auto deps_names = deps_expression.Evaluate( - param_config, {}, [logger, target_field_name](auto const& msg) { + param_config, {}, [&logger, &target_field_name](auto const& msg) { (*logger)( fmt::format("While evaluating target parameter {}:\n{}", target_field_name, @@ -1235,7 +1238,7 @@ void withRuleDefinition( if (not target) { return; } - dep_target_exps.emplace_back(ExpressionPtr{*target}); + dep_target_exps.emplace_back(*target); } } else { @@ -1508,10 +1511,12 @@ void withTargetsFile( std::make_shared( [logger, key, rn](auto const& msg, auto fatal) { (*logger)( - fmt::format("While analysing {} target {}:\n{}", - rn.ToString(), - key.ToShortString(), - msg), + fmt::format( + "While analysing {} target {}:\n{}", + rn.ToString(), + key.ToShortString( + Evaluator::GetExpressionLogLimit()), + msg), fatal); }), result_map); @@ -1618,6 +1623,7 @@ void withTargetNode( } void TreeTarget( + const gsl::not_null& context, const BuildMaps::Target::ConfiguredTarget& key, const gsl::not_null& ts, const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller, @@ -1625,20 +1631,22 @@ void TreeTarget( const BuildMaps::Target::TargetMap::LoggerPtr& logger, const gsl::not_null& result_map, const gsl::not_null& - directory_entries, - const gsl::not_null& stats) { + directory_entries) { const auto& target = key.target.GetNamedTarget(); const auto dir_name = std::filesystem::path{target.module} / target.name; - auto module_ = BuildMaps::Base::ModuleName{target.repository, dir_name}; + auto target_module = + BuildMaps::Base::ModuleName{target.repository, dir_name}; directory_entries->ConsumeAfterKeysReady( ts, - {module_}, - [setter, subcaller, target, key, result_map, logger, dir_name, stats]( + {target_module}, + [context, setter, subcaller, target, key, result_map, logger, dir_name]( auto values) { // expected values.size() == 1 const auto& dir_entries = *values[0]; - auto known_tree = dir_entries.AsKnownTree(target.repository); + auto known_tree = dir_entries.AsKnownTree( + context->storage->GetHashFunction().GetType(), + target.repository); if (known_tree) { auto tree = ExpressionPtr{ Expression::map_t{target.name, ExpressionPtr{*known_tree}}}; @@ -1664,7 +1672,7 @@ void TreeTarget( "Source tree reference for non-known tree {}", key.target.ToString()); }); - stats->IncrementTreesAnalysedCounter(); + context->statistics->IncrementTreesAnalysedCounter(); using BuildMaps::Target::ConfiguredTarget; @@ -1791,20 +1799,18 @@ void GlobTargetWithDirEntry( std::vector matches; for (auto const& x : dir.FilesIterator()) { if (fnmatch(pattern.c_str(), x.c_str(), 0) == 0) { - matches.emplace_back(BuildMaps::Base::EntityName{ - target.repository, - target.module, - x, - BuildMaps::Base::ReferenceType::kFile}); + matches.emplace_back(target.repository, + target.module, + x, + BuildMaps::Base::ReferenceType::kFile); } } for (auto const& x : dir.SymlinksIterator()) { if (fnmatch(pattern.c_str(), x.c_str(), 0) == 0) { - matches.emplace_back(BuildMaps::Base::EntityName{ - target.repository, - target.module, - x, - BuildMaps::Base::ReferenceType::kSymlink}); + matches.emplace_back(target.repository, + target.module, + x, + BuildMaps::Base::ReferenceType::kSymlink); } } source_target_map->ConsumeAfterKeysReady( @@ -1863,14 +1869,14 @@ auto CreateTargetMap( msg), fatal); }); - TreeTarget(key, + TreeTarget(context, + key, ts, subcaller, setter, wrapped_logger, result_map, - directory_entries_map, - context->statistics); + directory_entries_map); } else if (key.target.GetNamedTarget().reference_t == BuildMaps::Base::ReferenceType::kFile) { @@ -1966,7 +1972,8 @@ auto CreateTargetMap( [logger, key](auto msg, auto fatal) { (*logger)( fmt::format("While processing absent target {}:\n{}", - key.ToShortString(), + key.ToShortString( + Evaluator::GetExpressionLogLimit()), msg), fatal); }); diff --git a/src/buildtool/build_engine/target_map/target_map.hpp b/src/buildtool/build_engine/target_map/target_map.hpp index 11273ad0a..d411da1c1 100644 --- a/src/buildtool/build_engine/target_map/target_map.hpp +++ b/src/buildtool/build_engine/target_map/target_map.hpp @@ -54,4 +54,4 @@ auto IsBuiltInRule(nlohmann::json const& rule_type) -> bool; } // namespace BuildMaps::Target -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_TARGET_MAP_HPP diff --git a/src/buildtool/build_engine/target_map/utils.hpp b/src/buildtool/build_engine/target_map/utils.hpp index 8c469bbee..9ffd6374d 100644 --- a/src/buildtool/build_engine/target_map/utils.hpp +++ b/src/buildtool/build_engine/target_map/utils.hpp @@ -77,4 +77,4 @@ auto createAction(const ActionDescription::outputs_t& output_files, const ExpressionPtr& inputs_exp) -> ActionDescription::Ptr; } // namespace BuildMaps::Target::Utils -#endif +#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_UTILS_HPP diff --git a/src/buildtool/common/TARGETS b/src/buildtool/common/TARGETS index df3c0e339..878d27875 100644 --- a/src/buildtool/common/TARGETS +++ b/src/buildtool/common/TARGETS @@ -19,16 +19,15 @@ , "deps": [ "clidefaults" , "retry_cli" + , ["@", "cli11", "", "cli11"] + , ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/expression", "expression"] - , ["src/buildtool/compatibility", "compatibility"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/logging", "logging"] , ["src/buildtool/main", "build_utils"] , ["src/utils/cpp", "path"] - , ["@", "cli11", "", "cli11"] - , ["@", "json", "", "json"] - , ["@", "fmt", "", "fmt"] - , ["@", "gsl", "", "gsl"] ] , "stage": ["src", "buildtool", "common"] } @@ -39,6 +38,45 @@ , "proto": [["@", "bazel_remote_apis", "", "remote_execution_proto"]] , "stage": ["src", "buildtool", "common"] } +, "bazel_digest_factory": + { "type": ["@", "rules", "CC", "library"] + , "name": ["bazel_digest_factory"] + , "hdrs": ["bazel_digest_factory.hpp"] + , "srcs": ["bazel_digest_factory.cpp"] + , "deps": + [ "bazel_types" + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/crypto", "hash_info"] + , ["src/buildtool/file_system", "object_type"] + , ["src/utils/cpp", "expected"] + ] + , "private-deps": + [ "protocol_traits" + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/crypto", "hasher"] + ] + , "stage": ["src", "buildtool", "common"] + } +, "artifact_digest_factory": + { "type": ["@", "rules", "CC", "library"] + , "name": ["artifact_digest_factory"] + , "hdrs": ["artifact_digest_factory.hpp"] + , "srcs": ["artifact_digest_factory.cpp"] + , "deps": + [ "common" + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/crypto", "hash_info"] + , ["src/buildtool/file_system", "object_type"] + , ["src/utils/cpp", "expected"] + ] + , "private-deps": + [ "bazel_digest_factory" + , "bazel_types" + , "protocol_traits" + , ["@", "gsl", "", "gsl"] + ] + , "stage": ["src", "buildtool", "common"] + } , "common": { "type": ["@", "rules", "CC", "library"] , "name": ["common"] @@ -50,15 +88,14 @@ , "statistics.hpp" ] , "deps": - [ "bazel_types" + [ "protocol_traits" + , ["@", "json", "", "json"] , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/crypto", "hash_info"] , ["src/buildtool/file_system", "object_type"] - , ["src/buildtool/compatibility", "compatibility"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/utils/cpp", "hash_combine"] - , ["src/utils/cpp", "gsl"] - , ["@", "json", "", "json"] ] , "stage": ["src", "buildtool", "common"] } @@ -70,11 +107,15 @@ , "deps": [ "common" , ["@", "json", "", "json"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/file_system", "object_type"] - , ["src/buildtool/logging", "logging"] ] , "private-deps": - [["src/utils/cpp", "json"], ["src/buildtool/crypto", "hash_function"]] + [ "artifact_digest_factory" + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "json"] + ] , "stage": ["src", "buildtool", "common"] } , "action_description": @@ -82,9 +123,10 @@ , "name": ["action_description"] , "hdrs": ["action_description.hpp"] , "deps": - [ "common" - , "artifact_description" + [ "artifact_description" + , "common" , ["@", "json", "", "json"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/utils/cpp", "json"] @@ -101,8 +143,6 @@ , "common" , ["@", "json", "", "json"] , ["src/buildtool/crypto", "hash_function"] - , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/logging", "logging"] ] , "stage": ["src", "buildtool", "common"] } @@ -114,11 +154,12 @@ , "deps": [ ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/file_system", "file_root"] , ["src/buildtool/file_system", "git_cas"] , ["src/buildtool/multithreading", "atomic_value"] , ["src/buildtool/storage", "storage"] - , ["src/buildtool/crypto", "hash_function"] ] , "stage": ["src", "buildtool", "common"] , "private-deps": [["src/utils/automata", "dfa_minimizer"]] @@ -148,4 +189,22 @@ , ["src/buildtool/logging", "logging"] ] } +, "git_hashes_converter": + { "type": ["@", "rules", "CC", "library"] + , "name": ["git_hashes_converter"] + , "hdrs": ["git_hashes_converter.hpp"] + , "deps": + [ ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + ] + , "stage": ["src", "buildtool", "common"] + } +, "protocol_traits": + { "type": ["@", "rules", "CC", "library"] + , "name": ["protocol_traits"] + , "hdrs": ["protocol_traits.hpp"] + , "deps": [["src/buildtool/crypto", "hash_function"]] + , "stage": ["src", "buildtool", "common"] + } } diff --git a/src/buildtool/common/action.hpp b/src/buildtool/common/action.hpp index 350a1d2be..4a4be1839 100644 --- a/src/buildtool/common/action.hpp +++ b/src/buildtool/common/action.hpp @@ -56,7 +56,9 @@ class Action { 1.0, std::map{}) {} - [[nodiscard]] auto Id() const noexcept -> ActionIdentifier { return id_; } + [[nodiscard]] auto Id() const noexcept -> ActionIdentifier const& { + return id_; + } [[nodiscard]] auto Command() && noexcept -> std::vector { return std::move(command_); @@ -67,10 +69,12 @@ class Action { return command_; } - [[nodiscard]] auto Cwd() const -> std::string { return cwd_; } + [[nodiscard]] auto Cwd() const noexcept -> std::string const& { + return cwd_; + } [[nodiscard]] auto Env() const& noexcept - -> std::map { + -> std::map const& { return env_; } @@ -78,15 +82,20 @@ class Action { return std::move(env_); } - [[nodiscard]] auto IsTreeAction() const -> bool { return is_tree_; } - [[nodiscard]] auto MayFail() const -> std::optional { + [[nodiscard]] auto IsTreeAction() const noexcept -> bool { + return is_tree_; + } + [[nodiscard]] auto MayFail() const noexcept + -> std::optional const& { return may_fail_; } - [[nodiscard]] auto NoCache() const -> bool { return no_cache_; } - [[nodiscard]] auto TimeoutScale() const -> double { return timeout_scale_; } + [[nodiscard]] auto NoCache() const noexcept -> bool { return no_cache_; } + [[nodiscard]] auto TimeoutScale() const noexcept -> double { + return timeout_scale_; + } [[nodiscard]] auto ExecutionProperties() const& noexcept - -> std::map { + -> std::map const& { return execution_properties_; } @@ -95,23 +104,24 @@ class Action { return std::move(execution_properties_); } - [[nodiscard]] static auto CreateTreeAction(ActionIdentifier const& id) - -> Action { + [[nodiscard]] static auto CreateTreeAction( + ActionIdentifier const& id) noexcept -> Action { return Action{id}; } private: - ActionIdentifier id_{}; - std::vector command_{}; - std::string cwd_{}; - std::map env_{}; + ActionIdentifier id_; + std::vector command_; + std::string cwd_; + std::map env_; bool is_tree_{}; - std::optional may_fail_{}; + std::optional may_fail_; bool no_cache_{}; double timeout_scale_{}; - std::map execution_properties_{}; + std::map execution_properties_; - explicit Action(ActionIdentifier id) : id_{std::move(id)}, is_tree_{true} {} + explicit Action(ActionIdentifier id) noexcept + : id_{std::move(id)}, is_tree_{true} {} }; #endif // INCLUDED_SRC_BUILDTOOL_COMMON_ACTION_HPP diff --git a/src/buildtool/common/action_description.hpp b/src/buildtool/common/action_description.hpp index 2ae7ad8ae..e51552ad7 100644 --- a/src/buildtool/common/action_description.hpp +++ b/src/buildtool/common/action_description.hpp @@ -26,6 +26,7 @@ #include "nlohmann/json.hpp" #include "src/buildtool/common/action.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" #include "src/utils/cpp/json.hpp" @@ -45,7 +46,8 @@ class ActionDescription { action_{std::move(action)}, inputs_{std::move(inputs)} {} - [[nodiscard]] static auto FromJson(std::string const& id, + [[nodiscard]] static auto FromJson(HashFunction::Type hash_type, + std::string const& id, nlohmann::json const& desc) noexcept -> std::optional { try { @@ -119,7 +121,8 @@ class ActionDescription { inputs_t inputs{}; for (auto const& [path, input_desc] : input.items()) { - auto artifact = ArtifactDescription::FromJson(input_desc); + auto artifact = + ArtifactDescription::FromJson(hash_type, input_desc); if (not artifact) { return std::nullopt; } @@ -193,7 +196,7 @@ class ActionDescription { return action_.Id(); } - [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json { + [[nodiscard]] auto ToJson() const -> nlohmann::json { auto json = nlohmann::json{{"command", action_.Command()}}; if (not output_files_.empty()) { json["output"] = output_files_; diff --git a/src/buildtool/common/artifact.hpp b/src/buildtool/common/artifact.hpp index 56c05e2f6..403071a26 100644 --- a/src/buildtool/common/artifact.hpp +++ b/src/buildtool/common/artifact.hpp @@ -25,6 +25,9 @@ #include "nlohmann/json.hpp" #include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/common/identifier.hpp" +#include "src/buildtool/common/protocol_traits.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" @@ -35,7 +38,7 @@ class Artifact { public: struct ObjectInfo { - ArtifactDigest digest{}; + ArtifactDigest digest; ObjectType type{}; bool failed{}; @@ -48,14 +51,6 @@ class Artifact { return not(*this == other); } - [[nodiscard]] auto operator<(ObjectInfo const& other) const -> bool { - return (digest < other.digest) or - ((digest == other.digest) and (type < other.type)) or - ((digest == other.digest) and (type == other.type) and - (static_cast(failed) < - static_cast(other.failed))); - } - // Create string of the form '[hash:size:type]' [[nodiscard]] auto ToString(bool size_unknown = false) const noexcept -> std::string { @@ -70,13 +65,14 @@ class Artifact { // Create JSON of the form '{"id": "hash", "size": x, "file_type": "f"}' // As the failed property is only internal to a run, discard it. - [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json { + [[nodiscard]] auto ToJson() const -> nlohmann::json { return {{"id", digest.hash()}, {"size", digest.size()}, {"file_type", std::string{ToChar(type)}}}; } - [[nodiscard]] static auto FromString(std::string const& s) noexcept + [[nodiscard]] static auto FromString(HashFunction::Type hash_type, + std::string const& s) noexcept -> std::optional { std::istringstream iss(s); std::string id{}; @@ -89,36 +85,42 @@ class Artifact { "failed parsing object info from string."); return std::nullopt; } + + std::size_t size = 0; try { - std::size_t size = std::stoul(size_str); - auto const& object_type = FromChar(*type.c_str()); - return ObjectInfo{ - .digest = - ArtifactDigest{id, size, IsTreeObject(object_type)}, - .type = object_type}; + size = std::stoul(size_str); } catch (std::out_of_range const& e) { Logger::Log(LogLevel::Debug, "size raised out_of_range exception."); + return std::nullopt; } catch (std::invalid_argument const& e) { Logger::Log(LogLevel::Debug, "size raised invalid_argument exception."); + return std::nullopt; } - return std::nullopt; + + auto const object_type = FromChar(*type.c_str()); + // TODO(design): The logic of ArtifactDigestFactory::Create is + // duplicated here to avoid a cyclic dependency. A better solution + // is advisable. + auto hash_info = + HashInfo::Create(hash_type, + id, + ProtocolTraits::IsTreeAllowed(hash_type) and + IsTreeObject(object_type)); + if (not hash_info) { + Logger::Log( + LogLevel::Debug, "{}", std::move(hash_info).error()); + return std::nullopt; + } + return ObjectInfo{ + .digest = ArtifactDigest{*std::move(hash_info), size}, + .type = object_type}; } }; explicit Artifact(ArtifactIdentifier id) noexcept : id_{std::move(id)} {} - Artifact(Artifact const& other) noexcept - : id_{other.id_}, file_path_{other.file_path_}, repo_{other.repo_} { - object_info_ = other.object_info_; - } - - Artifact(Artifact&&) noexcept = default; - ~Artifact() noexcept = default; - auto operator=(Artifact const&) noexcept -> Artifact& = delete; - auto operator=(Artifact&&) noexcept -> Artifact& = default; - [[nodiscard]] auto Id() const& noexcept -> ArtifactIdentifier const& { return id_; } @@ -182,12 +184,10 @@ class Artifact { [[nodiscard]] static auto CreateKnownArtifact( std::string const& id, - std::string const& hash, - std::size_t size, + ArtifactDigest const& digest, ObjectType type, std::optional const& repo) noexcept -> Artifact { - return Artifact{ - id, {hash, size, IsTreeObject(type)}, type, false, repo}; + return Artifact{id, digest, type, false, repo}; } [[nodiscard]] static auto CreateActionArtifact( @@ -196,10 +196,10 @@ class Artifact { } private: - ArtifactIdentifier id_{}; - std::optional file_path_{}; - std::string repo_{}; - mutable std::optional object_info_{}; + ArtifactIdentifier id_; + std::optional file_path_; + std::string repo_; + mutable std::optional object_info_; Artifact(ArtifactIdentifier id, std::filesystem::path const& file_path, diff --git a/src/buildtool/common/artifact_description.cpp b/src/buildtool/common/artifact_description.cpp index 6a0bf7de1..b14514913 100644 --- a/src/buildtool/common/artifact_description.cpp +++ b/src/buildtool/common/artifact_description.cpp @@ -17,32 +17,33 @@ #include #include "nlohmann/json.hpp" -#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" #include "src/utils/cpp/json.hpp" namespace { [[nodiscard]] auto DescribeLocalArtifact(std::filesystem::path const& src_path, - std::string const& repository) noexcept + std::string const& repository) -> nlohmann::json; -[[nodiscard]] auto DescribeKnownArtifact( - std::string const& blob_id, - std::size_t size, - ObjectType type = ObjectType::File) noexcept -> nlohmann::json; +[[nodiscard]] auto DescribeKnownArtifact(std::string const& blob_id, + std::size_t size, + ObjectType type = ObjectType::File) + -> nlohmann::json; [[nodiscard]] auto DescribeActionArtifact(std::string const& action_id, - std::string const& out_path) noexcept + std::string const& out_path) -> nlohmann::json; -[[nodiscard]] auto DescribeTreeArtifact(std::string const& tree_id) noexcept +[[nodiscard]] auto DescribeTreeArtifact(std::string const& tree_id) -> nlohmann::json; [[nodiscard]] auto CreateLocalArtifactDescription(nlohmann::json const& data) -> std::optional; -[[nodiscard]] auto CreateKnownArtifactDescription(nlohmann::json const& data) +[[nodiscard]] auto CreateKnownArtifactDescription(HashFunction::Type hash_type, + nlohmann::json const& data) -> std::optional; [[nodiscard]] auto CreateActionArtifactDescription(nlohmann::json const& data) @@ -79,7 +80,8 @@ auto ArtifactDescription::CreateTree(std::string tree_id) noexcept return ArtifactDescription{std::move(tree_id)}; } -auto ArtifactDescription::FromJson(nlohmann::json const& json) noexcept +auto ArtifactDescription::FromJson(HashFunction::Type hash_type, + nlohmann::json const& json) noexcept -> std::optional { try { auto const type = ExtractValueAs( @@ -107,7 +109,7 @@ auto ArtifactDescription::FromJson(nlohmann::json const& json) noexcept return CreateLocalArtifactDescription(*data); } if (*type == "KNOWN") { - return CreateKnownArtifactDescription(*data); + return CreateKnownArtifactDescription(hash_type, *data); } if (*type == "ACTION") { return CreateActionArtifactDescription(*data); @@ -127,30 +129,23 @@ auto ArtifactDescription::FromJson(nlohmann::json const& json) noexcept return std::nullopt; } -auto ArtifactDescription::ToJson() const noexcept -> nlohmann::json { - try { - if (std::holds_alternative(data_)) { - auto const& [path, repo] = std::get(data_); - return DescribeLocalArtifact(path.string(), repo); - } - if (std::holds_alternative(data_)) { - auto const& [digest, file_type, _] = std::get(data_); - return DescribeKnownArtifact( - digest.hash(), digest.size(), file_type); - } - if (std::holds_alternative(data_)) { - auto const& [action_id, path] = std::get(data_); - return DescribeActionArtifact(action_id, path); - } - if (std::holds_alternative(data_)) { - return DescribeTreeArtifact(std::get(data_)); - } - Logger::Log(LogLevel::Error, "Internal error, unknown artifact type"); - } catch (std::exception const& ex) { - Logger::Log(LogLevel::Error, - "Serializing to JSON failed with error:\n{}", - ex.what()); +auto ArtifactDescription::ToJson() const -> nlohmann::json { + if (std::holds_alternative(data_)) { + auto const& [path, repo] = std::get(data_); + return DescribeLocalArtifact(path.string(), repo); + } + if (std::holds_alternative(data_)) { + auto const& [digest, file_type, _] = std::get(data_); + return DescribeKnownArtifact(digest.hash(), digest.size(), file_type); + } + if (std::holds_alternative(data_)) { + auto const& [action_id, path] = std::get(data_); + return DescribeActionArtifact(action_id, path); } + if (std::holds_alternative(data_)) { + return DescribeTreeArtifact(std::get(data_)); + } + Logger::Log(LogLevel::Error, "Internal error, unknown artifact type"); Ensures(false); // unreachable return {}; } @@ -163,8 +158,7 @@ auto ArtifactDescription::ToArtifact() const noexcept -> Artifact { } if (std::holds_alternative(data_)) { auto const& [digest, file_type, repo] = std::get(data_); - return Artifact::CreateKnownArtifact( - id_, digest.hash(), digest.size(), file_type, repo); + return Artifact::CreateKnownArtifact(id_, digest, file_type, repo); } if (std::holds_alternative(data_) or std::holds_alternative(data_)) { @@ -209,8 +203,7 @@ auto ArtifactDescription::ComputeId(nlohmann::json const& desc) noexcept namespace { auto DescribeLocalArtifact(std::filesystem::path const& src_path, - std::string const& repository) noexcept - -> nlohmann::json { + std::string const& repository) -> nlohmann::json { return { {"type", "LOCAL"}, {"data", {{"path", src_path.string()}, {"repository", repository}}}}; @@ -218,7 +211,7 @@ auto DescribeLocalArtifact(std::filesystem::path const& src_path, auto DescribeKnownArtifact(std::string const& blob_id, std::size_t size, - ObjectType type) noexcept -> nlohmann::json { + ObjectType type) -> nlohmann::json { std::string const typestr{ToChar(type)}; return { {"type", "KNOWN"}, @@ -226,14 +219,12 @@ auto DescribeKnownArtifact(std::string const& blob_id, } auto DescribeActionArtifact(std::string const& action_id, - std::string const& out_path) noexcept - -> nlohmann::json { + std::string const& out_path) -> nlohmann::json { return {{"type", "ACTION"}, {"data", {{"id", action_id}, {"path", out_path}}}}; } -auto DescribeTreeArtifact(std::string const& tree_id) noexcept - -> nlohmann::json { +auto DescribeTreeArtifact(std::string const& tree_id) -> nlohmann::json { return {{"type", "TREE"}, {"data", {{"id", tree_id}}}}; } @@ -260,7 +251,8 @@ auto CreateLocalArtifactDescription(nlohmann::json const& data) return std::nullopt; } -auto CreateKnownArtifactDescription(nlohmann::json const& data) +auto CreateKnownArtifactDescription(HashFunction::Type hash_type, + nlohmann::json const& data) -> std::optional { auto const blob_id = ExtractValueAs(data, "id", [](std::string const& error) { @@ -285,9 +277,15 @@ auto CreateKnownArtifactDescription(nlohmann::json const& data) }); if (blob_id.has_value() and size.has_value() and file_type.has_value() and file_type->size() == 1) { - auto const& object_type = FromChar((*file_type)[0]); - ArtifactDigest digest{*blob_id, *size, IsTreeObject(object_type)}; - return ArtifactDescription::CreateKnown(std::move(digest), object_type); + auto const object_type = FromChar((*file_type)[0]); + + auto digest = ArtifactDigestFactory::Create( + hash_type, *blob_id, *size, IsTreeObject(object_type)); + if (not digest) { + return std::nullopt; + } + return ArtifactDescription::CreateKnown(*std::move(digest), + object_type); } return std::nullopt; } diff --git a/src/buildtool/common/artifact_description.hpp b/src/buildtool/common/artifact_description.hpp index 53d3adcc6..1858e41ee 100644 --- a/src/buildtool/common/artifact_description.hpp +++ b/src/buildtool/common/artifact_description.hpp @@ -23,6 +23,7 @@ #include "src/buildtool/common/artifact.hpp" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/object_type.hpp" class ArtifactDescription final { @@ -65,10 +66,11 @@ class ArtifactDescription final { return std::holds_alternative(data_); } - [[nodiscard]] static auto FromJson(nlohmann::json const& json) noexcept + [[nodiscard]] static auto FromJson(HashFunction::Type hash_type, + nlohmann::json const& json) noexcept -> std::optional; - [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json; + [[nodiscard]] auto ToJson() const -> nlohmann::json; [[nodiscard]] auto ToArtifact() const noexcept -> Artifact; diff --git a/src/buildtool/common/artifact_digest.hpp b/src/buildtool/common/artifact_digest.hpp index 6b9ecb885..5e88232a2 100644 --- a/src/buildtool/common/artifact_digest.hpp +++ b/src/buildtool/common/artifact_digest.hpp @@ -16,116 +16,48 @@ #define INCLUDED_SRC_COMMON_ARTIFACT_DIGEST_HPP #include -#include #include #include // std::move -#include "gsl/gsl" -#include "src/buildtool/common/bazel_types.hpp" -#include "src/buildtool/compatibility/native_support.hpp" #include "src/buildtool/crypto/hash_function.hpp" -#include "src/buildtool/file_system/object_type.hpp" -#include "src/utils/cpp/gsl.hpp" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/utils/cpp/hash_combine.hpp" -// Provides getter for size with convenient non-protobuf type. Contains a -// unprefixed hex string as hash. For communication with the execution API it -// can be cast to bazel_re::Digest which is the wire format that contains -// prefixed hashes in native mode. -class ArtifactDigest { - friend struct std::hash; +// Provides getter for size with convenient non-protobuf type. Contains an +// unprefixed hex string as hash. +class ArtifactDigest final { + friend class ArtifactDigestFactory; public: ArtifactDigest() noexcept = default; - explicit ArtifactDigest(bazel_re::Digest const& digest) noexcept - : size_{gsl::narrow(digest.size_bytes())}, - hash_{NativeSupport::Unprefix(digest.hash())}, - // Tree information is only stored in a digest in native mode and - // false in compatible mode. - is_tree_{NativeSupport::IsTree(digest.hash())} {} - - ArtifactDigest(std::string hash, std::size_t size, bool is_tree) noexcept - : size_{size}, - hash_{std::move(hash)}, - // Tree information is only stored in a digest in native mode and - // false in compatible mode. - is_tree_{not Compatibility::IsCompatible() and is_tree} { - ExpectsAudit(not NativeSupport::IsPrefixed(hash_)); - } + explicit ArtifactDigest(HashInfo hash_info, std::size_t size) noexcept + : hash_info_{std::move(hash_info)}, size_{size} {} [[nodiscard]] auto hash() const& noexcept -> std::string const& { - return hash_; + return hash_info_.Hash(); } [[nodiscard]] auto hash() && noexcept -> std::string { - return std::move(hash_); + return std::move(hash_info_).Hash(); } [[nodiscard]] auto size() const noexcept -> std::size_t { return size_; } - - // NOLINTNEXTLINE allow implicit casts - [[nodiscard]] operator bazel_re::Digest() const { - return CreateBazelDigest(hash_, size_, is_tree_); + [[nodiscard]] auto IsTree() const noexcept -> bool { + return hash_info_.IsTree(); } [[nodiscard]] auto operator==(ArtifactDigest const& other) const -> bool { - return std::equal_to{}(*this, other); - } - - template - [[nodiscard]] static auto Create(HashFunction hash_function, - std::string const& content) noexcept - -> ArtifactDigest { - if constexpr (kType == ObjectType::Tree) { - return ArtifactDigest{ - hash_function.HashTreeData(content).HexString(), - content.size(), - /*is_tree=*/true}; - } - else { - return ArtifactDigest{ - hash_function.HashBlobData(content).HexString(), - content.size(), - /*is_tree=*/false}; - } + return hash_info_ == other.hash_info_; } - template - [[nodiscard]] static auto CreateFromFile( - HashFunction hash_function, - std::filesystem::path const& path) noexcept - -> std::optional { - static constexpr bool kIsTree = IsTreeObject(kType); - auto const hash = kIsTree ? hash_function.HashTreeFile(path) - : hash_function.HashBlobFile(path); - if (hash) { - return ArtifactDigest{ - hash->first.HexString(), hash->second, kIsTree}; - } - return std::nullopt; - } - - [[nodiscard]] auto operator<(ArtifactDigest const& other) const -> bool { - return (hash_ < other.hash_) or - ((hash_ == other.hash_) and (static_cast(is_tree_) < - static_cast(other.is_tree_))); + [[nodiscard]] auto GetHashType() const& noexcept -> HashFunction::Type { + return hash_info_.HashType(); } private: - std::size_t size_{}; - std::string hash_{}; - bool is_tree_{}; - - [[nodiscard]] static auto CreateBazelDigest(std::string const& hash, - std::size_t size, - bool is_tree) - -> bazel_re::Digest { - bazel_re::Digest d; - d.set_hash(NativeSupport::Prefix(hash, is_tree)); - d.set_size_bytes(gsl::narrow(size)); - return d; - } + HashInfo hash_info_; + std::size_t size_ = 0; }; namespace std { @@ -134,8 +66,8 @@ struct hash { [[nodiscard]] auto operator()(ArtifactDigest const& digest) const noexcept -> std::size_t { std::size_t seed{}; - hash_combine(&seed, digest.hash_); - hash_combine(&seed, digest.is_tree_); + hash_combine(&seed, digest.hash()); + hash_combine(&seed, digest.IsTree()); return seed; } }; diff --git a/src/buildtool/common/artifact_digest_factory.cpp b/src/buildtool/common/artifact_digest_factory.cpp new file mode 100644 index 000000000..f3ea30c12 --- /dev/null +++ b/src/buildtool/common/artifact_digest_factory.cpp @@ -0,0 +1,52 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/common/artifact_digest_factory.hpp" + +#include "gsl/gsl" +#include "src/buildtool/common/bazel_digest_factory.hpp" +#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/common/protocol_traits.hpp" + +auto ArtifactDigestFactory::Create(HashFunction::Type hash_type, + std::string hash, + std::size_t size, + bool is_tree) noexcept + -> expected { + auto hash_info = + HashInfo::Create(hash_type, + std::move(hash), + ProtocolTraits::IsTreeAllowed(hash_type) and is_tree); + if (not hash_info) { + return unexpected{std::move(hash_info).error()}; + } + return ArtifactDigest{*std::move(hash_info), size}; +} + +auto ArtifactDigestFactory::FromBazel(HashFunction::Type hash_type, + bazel_re::Digest const& digest) noexcept + -> expected { + auto hash_info = BazelDigestFactory::ToHashInfo(hash_type, digest); + if (not hash_info) { + return unexpected{std::move(hash_info).error()}; + } + return ArtifactDigest{*std::move(hash_info), + static_cast(digest.size_bytes())}; +} + +auto ArtifactDigestFactory::ToBazel(ArtifactDigest const& digest) + -> bazel_re::Digest { + return BazelDigestFactory::Create(digest.hash_info_, + gsl::narrow(digest.size_)); +} diff --git a/src/buildtool/common/artifact_digest_factory.hpp b/src/buildtool/common/artifact_digest_factory.hpp new file mode 100644 index 000000000..51425eabf --- /dev/null +++ b/src/buildtool/common/artifact_digest_factory.hpp @@ -0,0 +1,102 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_DIGEST_FACTORY_HPP +#define INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_DIGEST_FACTORY_HPP + +#include +#include +#include +#include +#include + +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/crypto/hash_info.hpp" +#include "src/buildtool/file_system/object_type.hpp" +#include "src/utils/cpp/expected.hpp" + +namespace build::bazel::remote::execution::v2 { +class Digest; +} +namespace bazel_re = build::bazel::remote::execution::v2; + +class ArtifactDigestFactory final { + public: + /// \brief Create ArtifactDigest from plain hash. + /// \param hash_type Type of the hash function that was used for creation + /// of the hash + /// \param hash Hexadecimal plain hash + /// \param size Size of the content + /// \return A valid ArtifactDigest on success or an error message if + /// validation fails. + [[nodiscard]] static auto Create(HashFunction::Type hash_type, + std::string hash, + std::size_t size, + bool is_tree) noexcept + -> expected; + + /// \brief Create ArtifactDigest from bazel_re::Digest + /// \param hash_type Type of the hash function that was used for creation of + /// the hash + /// \param digest Digest to be converted + /// \return A valid ArtifactDigest on success or an error message if + /// validation fails. + [[nodiscard]] static auto FromBazel(HashFunction::Type hash_type, + bazel_re::Digest const& digest) noexcept + -> expected; + + /// \brief Convert ArtifactDigest to bazel_re::Digest. Throws an exception + /// on a narrow conversion error. + /// \param digest Digest to be converted. + /// \return A valid bazel_re::Digest + [[nodiscard]] static auto ToBazel(ArtifactDigest const& digest) + -> bazel_re::Digest; + + /// \brief Hash content using hash function and return a valid + /// ArtifactDigest + /// \tparam kType Type of the hashing algorithm to be used + /// \param hash_function Hash function to be used for hashing + /// \param content Content to be hashed + /// \return The digest of the content + template + [[nodiscard]] static auto HashDataAs(HashFunction hash_function, + std::string const& content) noexcept + -> ArtifactDigest { + auto hash_info = + HashInfo::HashData(hash_function, content, IsTreeObject(kType)); + return ArtifactDigest{std::move(hash_info), content.size()}; + } + + /// \brief Hash file using hash function and return a valid ArtifactDigest + /// \tparam kType Type of the hashing algorithm to be used + /// \param hash_function Hash function to be used for hashing + /// \param content Content to be hashed + /// \return The digest of the file + template + [[nodiscard]] static auto HashFileAs( + HashFunction hash_function, + std::filesystem::path const& path) noexcept + -> std::optional { + auto hash_info = + HashInfo::HashFile(hash_function, path, IsTreeObject(kType)); + if (not hash_info) { + return std::nullopt; + } + return ArtifactDigest{std::move(hash_info->first), + static_cast(hash_info->second)}; + } +}; + +#endif // INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_DIGEST_FACTORY_HPP diff --git a/src/buildtool/common/bazel_digest_factory.cpp b/src/buildtool/common/bazel_digest_factory.cpp new file mode 100644 index 000000000..3a1f8519c --- /dev/null +++ b/src/buildtool/common/bazel_digest_factory.cpp @@ -0,0 +1,50 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/common/bazel_digest_factory.hpp" + +#include + +#include "src/buildtool/common/protocol_traits.hpp" +#include "src/buildtool/crypto/hasher.hpp" + +auto BazelDigestFactory::Create(HashInfo const& hash_info, + std::int64_t size) noexcept + -> bazel_re::Digest { + auto hash = ProtocolTraits::IsNative(hash_info.HashType()) + ? Prefix(hash_info.Hash(), hash_info.IsTree()) + : hash_info.Hash(); + + bazel_re::Digest digest{}; + digest.set_hash(std::move(hash)); + digest.set_size_bytes(size); + return digest; +} + +auto BazelDigestFactory::ToHashInfo(HashFunction::Type hash_type, + bazel_re::Digest const& digest) noexcept + -> expected { + bool const is_prefixed = IsPrefixed(hash_type, digest.hash()); + + auto hash = is_prefixed ? Unprefix(digest.hash()) : digest.hash(); + auto const is_tree = is_prefixed and digest.hash().starts_with(kTreeTag); + return HashInfo::Create(hash_type, std::move(hash), is_tree); +} + +auto BazelDigestFactory::IsPrefixed(HashFunction::Type hash_type, + std::string const& hash) noexcept -> bool { + auto const tagged_length = + HashFunction{hash_type}.MakeHasher().GetHashLength() + kTagLength; + return hash.size() == tagged_length; +} diff --git a/src/buildtool/common/bazel_digest_factory.hpp b/src/buildtool/common/bazel_digest_factory.hpp new file mode 100644 index 000000000..67ee35cb9 --- /dev/null +++ b/src/buildtool/common/bazel_digest_factory.hpp @@ -0,0 +1,83 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_BAZEL_DIGEST_FACTORY_HPP +#define INCLUDED_SRC_BUILDTOOL_COMMON_BAZEL_DIGEST_FACTORY_HPP + +#include +#include +#include + +#include "gsl/gsl" +#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/crypto/hash_info.hpp" +#include "src/buildtool/file_system/object_type.hpp" +#include "src/utils/cpp/expected.hpp" + +class BazelDigestFactory final { + static constexpr auto kBlobTag = "62"; + static constexpr auto kTreeTag = "74"; + static constexpr std::size_t kTagLength = 2; + + public: + /// \brief Create bazel_re::Digest from preliminarily validated data. + /// \param hash_data Validated hash + /// \param size Size of the content + [[nodiscard]] static auto Create(HashInfo const& hash_info, + std::int64_t size) noexcept + -> bazel_re::Digest; + + /// \brief Validate bazel_re::Digest + /// \param hash_type Type of the hash function that was used for creation + /// of the hash + /// \param digest Digest to be validated + /// \return Validated hash on success or an error message on failure. + [[nodiscard]] static auto ToHashInfo( + HashFunction::Type hash_type, + bazel_re::Digest const& digest) noexcept + -> expected; + + /// \brief Hash content using hash function and return a valid + /// bazel_re::Digest + /// \tparam kType Type of the hashing algorithm to be used + /// \param hash_function Hash function to be used for hashing + /// \param content Content to be hashed + /// \return The digest of the content + template + [[nodiscard]] static auto HashDataAs(HashFunction hash_function, + std::string const& content) + -> bazel_re::Digest { + auto const hash_info = + HashInfo::HashData(hash_function, content, IsTreeObject(kType)); + return Create(hash_info, gsl::narrow(content.size())); + } + + private: + [[nodiscard]] static auto Prefix(std::string const& hash, + bool is_tree) noexcept -> std::string { + return (is_tree ? kTreeTag : kBlobTag) + hash; + } + + [[nodiscard]] static auto Unprefix(std::string const& hash) noexcept + -> std::string { + return hash.substr(kTagLength); + } + + [[nodiscard]] static auto IsPrefixed(HashFunction::Type hash_type, + std::string const& hash) noexcept + -> bool; +}; + +#endif // INCLUDED_SRC_BUILDTOOL_COMMON_BAZEL_DIGEST_FACTORY_HPP diff --git a/src/buildtool/common/cli.hpp b/src/buildtool/common/cli.hpp index 79beaadbb..7f9353649 100644 --- a/src/buildtool/common/cli.hpp +++ b/src/buildtool/common/cli.hpp @@ -33,7 +33,7 @@ #include "src/buildtool/build_engine/expression/evaluator.hpp" #include "src/buildtool/common/clidefaults.hpp" #include "src/buildtool/common/retry_cli.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/main/build_utils.hpp" #include "src/utils/cpp/path.hpp" @@ -43,36 +43,37 @@ inline constexpr auto kMaxOpCacheExponent = std::uint8_t{63}; /// \brief Arguments common to all commands. struct CommonArguments { - std::optional workspace_root{}; - std::optional repository_config{}; - std::optional main{}; + std::optional workspace_root; + std::optional repository_config; + std::optional main; std::size_t jobs{std::max(1U, std::thread::hardware_concurrency())}; }; struct LogArguments { - std::vector log_files{}; + std::vector log_files; LogLevel log_limit{kDefaultLogLevel}; - std::optional restrict_stderr_log_limit{}; + std::optional restrict_stderr_log_limit; bool plain_log{false}; bool log_append{false}; }; /// \brief Arguments required for analysing targets. struct AnalysisArguments { - std::optional expression_log_limit{}; - std::vector defines{}; - std::filesystem::path config_file{}; - std::optional target{}; - std::optional request_action_input{}; - std::optional target_file_name{}; - std::optional rule_file_name{}; - std::optional expression_file_name{}; - std::optional target_root{}; - std::optional rule_root{}; - std::optional expression_root{}; - std::optional graph_file{}; - std::optional artifacts_to_build_file{}; - std::optional serve_errors_file{}; + std::optional expression_log_limit; + std::vector defines; + std::filesystem::path config_file; + std::optional target; + std::optional request_action_input; + std::optional target_file_name; + std::optional rule_file_name; + std::optional expression_file_name; + std::optional target_root; + std::optional rule_root; + std::optional expression_root; + std::optional graph_file; + std::optional graph_file_plain; + std::optional artifacts_to_build_file; + std::optional serve_errors_file; }; /// \brief Arguments required for describing targets/rules. @@ -98,10 +99,10 @@ struct DiagnosticArguments { /// \brief Arguments required for specifying build endpoint. struct EndpointArguments { - std::optional local_root{}; + std::optional local_root; std::optional remote_execution_address; std::vector platform_properties; - std::optional remote_execution_dispatch_file{}; + std::optional remote_execution_dispatch_file; }; /// \brief Arguments required for building. @@ -122,21 +123,21 @@ struct TCArguments { /// \brief Arguments required for staging. struct StageArguments { - std::filesystem::path output_dir{}; + std::filesystem::path output_dir; bool remember{false}; }; /// \brief Arguments required for rebuilding. struct RebuildArguments { - std::optional cache_endpoint{}; - std::optional dump_flaky{}; + std::optional cache_endpoint; + std::optional dump_flaky; }; /// \brief Arguments for fetching artifacts from CAS. struct FetchArguments { - std::string object_id{}; - std::optional output_path{}; - std::optional sub_path{}; + std::string object_id; + std::optional output_path; + std::optional sub_path; bool remember{false}; bool raw_tree{}; bool archive{}; @@ -144,9 +145,9 @@ struct FetchArguments { /// \brief Arguments required for running from graph file. struct GraphArguments { - nlohmann::json artifacts{}; - std::filesystem::path graph_file{}; - std::optional git_cas{}; + nlohmann::json artifacts; + std::filesystem::path graph_file; + std::optional git_cas; }; // Arguments for authentication methods. @@ -177,10 +178,10 @@ struct ServiceArguments { }; struct ServeArguments { - std::filesystem::path config{}; - std::optional remote_serve_address{}; + std::filesystem::path config; + std::optional remote_serve_address; // repositories populated from just-serve config file - std::vector repositories{}; + std::vector repositories; }; struct GcArguments { @@ -188,10 +189,14 @@ struct GcArguments { }; struct ToAddArguments { - std::filesystem::path location{}; + std::filesystem::path location; bool follow_symlinks{}; }; +struct ProtocolArguments final { + HashFunction::Type hash_type = HashFunction::Type::GitSHA1; +}; + static inline auto SetupCommonArguments( gsl::not_null const& app, gsl::not_null const& clargs) { @@ -339,6 +344,11 @@ static inline auto SetupAnalysisArguments( clargs->graph_file, "File path for writing the action graph description to.") ->type_name("PATH"); + app->add_option("--dump-plain-graph", + clargs->graph_file_plain, + "File path for writing the action graph description " + "(without origins) to.") + ->type_name("PATH"); app->add_option("--dump-artifacts-to-build", clargs->artifacts_to_build_file, "File path for writing the artifacts to build to.") @@ -682,11 +692,14 @@ static inline auto SetupGraphArguments( "missing KNOWN artifacts."); } -static inline auto SetupCompatibilityArguments( - gsl::not_null const& app) { +static inline auto SetupProtocolArguments( + gsl::not_null const& app, + gsl::not_null const& protocol) { app->add_flag_function( "--compatible", - [](auto /*unused*/) { Compatibility::SetCompatible(); }, + [protocol](auto /*unused*/) { + protocol->hash_type = HashFunction::Type::PlainSHA256; + }, "At increased computational effort, be compatible with the original " "remote build execution protocol. As the change affects identifiers, " "the flag must be used consistently for all related invocations."); diff --git a/src/buildtool/common/git_hashes_converter.hpp b/src/buildtool/common/git_hashes_converter.hpp new file mode 100644 index 000000000..52fd12838 --- /dev/null +++ b/src/buildtool/common/git_hashes_converter.hpp @@ -0,0 +1,84 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_GIT_HASHES_CONVERTER_HPP +#define INCLUDED_SRC_BUILDTOOL_COMMON_GIT_HASHES_CONVERTER_HPP + +#include //std::unique_lock +#include +#include +#include +#include +#include + +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/logging/log_level.hpp" +#include "src/buildtool/logging/logger.hpp" + +class GitHashesConverter final { + using git_hash = std::string; + using compat_hash = std::string; + using git_repo = std::string; + using GitToCompatibleMap = std::unordered_map; + using CompatibleToGitMap = + std::unordered_map>; + + public: + [[nodiscard]] static auto Instance() noexcept -> GitHashesConverter& { + static GitHashesConverter instance; + return instance; + } + + [[nodiscard]] auto RegisterGitEntry(std::string const& git_hash, + std::string const& data, + std::string const& repo) + -> compat_hash { + { + std::shared_lock lock{mutex_}; + auto it = git_to_compatible_.find(git_hash); + if (it != git_to_compatible_.end()) { + return it->second; + } + } + // This is only used in compatible mode. + HashFunction const hash_function{HashFunction::Type::PlainSHA256}; + auto compatible_hash = hash_function.PlainHashData(data).HexString(); + std::unique_lock lock{mutex_}; + git_to_compatible_[git_hash] = compatible_hash; + compatible_to_git_[compatible_hash] = {git_hash, repo}; + return compatible_hash; + } + + [[nodiscard]] auto GetGitEntry(std::string const& compatible_hash) + -> std::optional> { + std::shared_lock lock{mutex_}; + auto it = compatible_to_git_.find(compatible_hash); + if (it != compatible_to_git_.end()) { + return it->second; + } + Logger::Log(LogLevel::Warning, + "Unable to get the git-sha1 code associated to {}", + compatible_hash); + return std::nullopt; + } + + private: + explicit GitHashesConverter() noexcept = default; + + GitToCompatibleMap git_to_compatible_; + CompatibleToGitMap compatible_to_git_; + std::shared_mutex mutex_; +}; + +#endif // INCLUDED_SRC_BUILDTOOL_COMMON_GIT_HASHES_CONVERTER_HPP \ No newline at end of file diff --git a/src/buildtool/execution_api/common/bytestream_common.hpp b/src/buildtool/common/protocol_traits.hpp similarity index 52% rename from src/buildtool/execution_api/common/bytestream_common.hpp rename to src/buildtool/common/protocol_traits.hpp index 5c7cf3cb4..eca2a807b 100644 --- a/src/buildtool/execution_api/common/bytestream_common.hpp +++ b/src/buildtool/common/protocol_traits.hpp @@ -12,12 +12,21 @@ // See the License for the specific language governing permissions and // limitations under the License. -// settings common for server and client -#ifndef BYTESTREAM_COMMON_HPP -#define BYTESTREAM_COMMON_HPP -#include +#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_PROTOCOL_TRAITS_HPP +#define INCLUDED_SRC_BUILDTOOL_COMMON_PROTOCOL_TRAITS_HPP -// Chunk size for uploads (default size used by BuildBarn) -constexpr static std::size_t kChunkSize = 64 * 1024; +#include "src/buildtool/crypto/hash_function.hpp" -#endif +class ProtocolTraits final { + public: + static constexpr auto IsNative(HashFunction::Type hash_type) noexcept + -> bool { + return hash_type == HashFunction::Type::GitSHA1; + } + + static constexpr auto IsTreeAllowed(HashFunction::Type hash_type) noexcept + -> bool { + return IsNative(hash_type); + } +}; +#endif // INCLUDED_SRC_BUILDTOOL_COMMON_PROTOCOL_TRAITS_HPP diff --git a/src/buildtool/common/remote/TARGETS b/src/buildtool/common/remote/TARGETS index 5056a848c..9bb840b77 100644 --- a/src/buildtool/common/remote/TARGETS +++ b/src/buildtool/common/remote/TARGETS @@ -3,12 +3,12 @@ , "name": ["client_common"] , "hdrs": ["client_common.hpp"] , "deps": - [ ["@", "fmt", "", "fmt"] + [ "port" + , ["@", "fmt", "", "fmt"] , ["@", "grpc", "", "grpc++"] , ["@", "gsl", "", "gsl"] , ["src/buildtool/auth", "auth"] - , ["src/buildtool/common", "common"] - , "port" + , ["src/buildtool/common", "bazel_types"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] ] @@ -31,10 +31,10 @@ , "name": ["remote_common"] , "hdrs": ["remote_common.hpp"] , "deps": - [ ["@", "fmt", "", "fmt"] + [ "port" + , ["@", "fmt", "", "fmt"] , ["@", "json", "", "json"] , ["src/utils/cpp", "expected"] - , "port" ] , "stage": ["src", "buildtool", "common", "remote"] } @@ -57,9 +57,9 @@ , "deps": [ "retry_config" , ["@", "grpc", "", "grpc++"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] ] , "stage": ["src", "buildtool", "common", "remote"] - , "private-deps": [["src/buildtool/logging", "log_level"]] } } diff --git a/src/buildtool/common/remote/client_common.hpp b/src/buildtool/common/remote/client_common.hpp index 58c194587..aecbf02f8 100644 --- a/src/buildtool/common/remote/client_common.hpp +++ b/src/buildtool/common/remote/client_common.hpp @@ -72,11 +72,11 @@ s.message()); } -template +template [[maybe_unused]] static inline void LogStatus( Logger const* logger, LogLevel level, - T_Status const& s, + TStatus const& s, std::optional const& prefix = std::nullopt) noexcept { auto msg = [&s, &prefix]() { return StatusString(s, prefix); }; if (logger == nullptr) { diff --git a/src/buildtool/common/remote/port.hpp b/src/buildtool/common/remote/port.hpp index 161d8c3b7..01d28e8dc 100644 --- a/src/buildtool/common/remote/port.hpp +++ b/src/buildtool/common/remote/port.hpp @@ -27,8 +27,8 @@ #include "src/utils/cpp/type_safe_arithmetic.hpp" // Port -struct PortTag : type_safe_arithmetic_tag {}; -using Port = type_safe_arithmetic; +struct PortTag : TypeSafeArithmeticTag {}; +using Port = TypeSafeArithmetic; [[nodiscard]] static auto ParsePort(int const port_num) noexcept -> std::optional { diff --git a/src/buildtool/common/remote/remote_common.hpp b/src/buildtool/common/remote/remote_common.hpp index 9df0db5c4..b9bc926a1 100644 --- a/src/buildtool/common/remote/remote_common.hpp +++ b/src/buildtool/common/remote/remote_common.hpp @@ -28,8 +28,8 @@ #include "src/utils/cpp/expected.hpp" struct ServerAddress { - std::string host{}; - Port port{}; + std::string host; + Port port; [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json { return nlohmann::json( @@ -86,7 +86,7 @@ using DispatchEndpoint = std::pair; dispatch.dump())}; } for (auto const& entry : dispatch) { - if (not(entry.is_array() and entry.size() == 2)) { + if (not entry.is_array() or entry.size() != 2) { return unexpected{ fmt::format("Endpoint configuration has to be a list of " "pairs, but found entry {}", diff --git a/src/buildtool/common/remote/retry.cpp b/src/buildtool/common/remote/retry.cpp index 00d275cd6..8c97bf38b 100644 --- a/src/buildtool/common/remote/retry.cpp +++ b/src/buildtool/common/remote/retry.cpp @@ -15,6 +15,7 @@ #include "src/buildtool/common/remote/retry.hpp" #ifndef BOOTSTRAP_BUILD_TOOL +#include #include #include @@ -22,7 +23,8 @@ auto WithRetry(CallableReturningRetryResponse const& f, RetryConfig const& retry_config, - Logger const& logger) noexcept -> bool { + Logger const& logger, + LogLevel fatal_log_level) noexcept -> bool { try { auto const& attempts = retry_config.GetMaxAttempts(); for (auto attempt = 1U; attempt <= attempts; ++attempt) { @@ -32,7 +34,7 @@ auto WithRetry(CallableReturningRetryResponse const& f, } if (fatal) { if (error_msg) { - logger.Emit(LogLevel::Error, "{}", *error_msg); + logger.Emit(fatal_log_level, "{}", *error_msg); } return false; } @@ -51,7 +53,7 @@ auto WithRetry(CallableReturningRetryResponse const& f, } else { if (error_msg) { - logger.Emit(LogLevel::Error, + logger.Emit(fatal_log_level, "After {} attempts: {}", attempt, *error_msg); @@ -59,7 +61,8 @@ auto WithRetry(CallableReturningRetryResponse const& f, } } } catch (...) { - logger.Emit(LogLevel::Error, "WithRetry: caught unknown exception"); + logger.Emit(std::min(fatal_log_level, LogLevel::Warning), + "WithRetry: caught unknown exception"); } return false; } diff --git a/src/buildtool/common/remote/retry.hpp b/src/buildtool/common/remote/retry.hpp index 6b8278a40..9387ef2a8 100644 --- a/src/buildtool/common/remote/retry.hpp +++ b/src/buildtool/common/remote/retry.hpp @@ -24,6 +24,7 @@ #include "grpcpp/grpcpp.h" #include "src/buildtool/common/remote/retry_config.hpp" +#include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" // Utility class to help detecting when exit the retry loop. This class can be @@ -51,9 +52,11 @@ using CallableReturningRetryResponse = std::function; /// \brief Calls a function with a retry strategy using a backoff algorithm. /// Retry loop interrupts when one of the two members of the function's returned /// RetryResponse object is set to true. -[[nodiscard]] auto WithRetry(CallableReturningRetryResponse const& f, - RetryConfig const& retry_config, - Logger const& logger) noexcept -> bool; +[[nodiscard]] auto WithRetry( + CallableReturningRetryResponse const& f, + RetryConfig const& retry_config, + Logger const& logger, + LogLevel fatal_log_level = LogLevel::Error) noexcept -> bool; using CallableReturningGrpcStatus = std::function; diff --git a/src/buildtool/common/remote/retry_config.hpp b/src/buildtool/common/remote/retry_config.hpp index 91c8b75e6..ed4b01084 100644 --- a/src/buildtool/common/remote/retry_config.hpp +++ b/src/buildtool/common/remote/retry_config.hpp @@ -78,7 +78,7 @@ class RetryConfig final { static std::mutex mutex; static std::mt19937 rng{std::random_device{}()}; try { - dist_type dist{0, backoff * 3}; + dist_type dist{0, 3UL * backoff}; std::unique_lock lock(mutex); return dist(rng); } catch (...) { diff --git a/src/buildtool/common/repository_config.cpp b/src/buildtool/common/repository_config.cpp index caf0d2bfc..78f7b8a47 100644 --- a/src/buildtool/common/repository_config.cpp +++ b/src/buildtool/common/repository_config.cpp @@ -36,18 +36,16 @@ auto RepositoryConfig::RepositoryInfo::BaseContentDescription() const auto RepositoryConfig::RepositoryKey(Storage const& storage, std::string const& repo) const noexcept - -> std::optional { + -> std::optional { auto const unique = DeduplicateRepo(repo, storage.GetHashFunction()); if (auto const* data = Data(unique)) { // compute key only once (thread-safe) return data->key.SetOnceAndGet( - [this, &storage, &unique]() -> std::optional { + [this, &storage, &unique]() -> std::optional { if (auto graph = BuildGraphForRepository( unique, storage.GetHashFunction())) { auto const& cas = storage.CAS(); - if (auto digest = cas.StoreBlob(graph->dump(2))) { - return ArtifactDigest{*digest}.hash(); - } + return cas.StoreBlob(graph->dump(2)); } return std::nullopt; }); @@ -113,7 +111,6 @@ auto RepositoryConfig::BuildGraphForRepository(std::string const& repo, // unique id if it and all its dependencies are content-fixed or return // std::nullopt otherwise. Recursion immediately aborts on traversing the first // non-content-fixed repository. -// NOLINTNEXTLINE(misc-no-recursion) auto RepositoryConfig::AddToGraphAndGetId( gsl::not_null const& graph, gsl::not_null const& id_counter, diff --git a/src/buildtool/common/repository_config.hpp b/src/buildtool/common/repository_config.hpp index 34918d517..25d5ce4f7 100644 --- a/src/buildtool/common/repository_config.hpp +++ b/src/buildtool/common/repository_config.hpp @@ -25,6 +25,7 @@ #include "gsl/gsl" #include "nlohmann/json.hpp" +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/buildtool/file_system/git_cas.hpp" @@ -39,7 +40,7 @@ class RepositoryConfig { FileRoot target_root{workspace_root}; FileRoot rule_root{target_root}; FileRoot expression_root{rule_root}; - std::map name_mapping{}; + std::map name_mapping; std::string target_file_name{"TARGETS"}; std::string rule_file_name{"RULES"}; std::string expression_file_name{"EXPRESSIONS"}; @@ -140,7 +141,7 @@ class RepositoryConfig { // std::nullopt otherwise. [[nodiscard]] auto RepositoryKey(Storage const& storage, std::string const& repo) const noexcept - -> std::optional; + -> std::optional; // used for testing void Reset() { @@ -157,24 +158,21 @@ class RepositoryConfig { // Info structure (roots, file names, bindings) RepositoryInfo info{}; // Base description if content-fixed - std::optional base_desc{}; + std::optional base_desc; // Cache key if content-fixed - AtomicValue> key{}; + AtomicValue> key; }; std::unordered_map repos_; GitCASPtr git_cas_; - AtomicValue duplicates_{}; + AtomicValue duplicates_; template [[nodiscard]] auto Get(std::string const& repo, std::function const& getter) const noexcept -> T const* { if (auto const* info = Info(repo)) { - try { // satisfy clang-tidy's bugprone-exception-escape - return getter(*info); - } catch (...) { - } + return getter(*info); } return nullptr; } diff --git a/src/buildtool/common/retry_cli.hpp b/src/buildtool/common/retry_cli.hpp index 53076252e..5e6417ee6 100644 --- a/src/buildtool/common/retry_cli.hpp +++ b/src/buildtool/common/retry_cli.hpp @@ -22,9 +22,9 @@ /// \brief Arguments required for tuning the retry strategy. struct RetryArguments { - std::optional max_attempts{}; - std::optional initial_backoff_seconds{}; - std::optional max_backoff_seconds{}; + std::optional max_attempts; + std::optional initial_backoff_seconds; + std::optional max_backoff_seconds; }; static inline void SetupRetryArguments( @@ -48,4 +48,4 @@ static inline void SetupRetryArguments( "the resources that survived the outage. (Default: 60)"); } -#endif +#endif // INCLUDED_SRC_BUILDTOOL_COMMON_RETRY_CLI_HPP diff --git a/src/buildtool/common/statistics.hpp b/src/buildtool/common/statistics.hpp index ed2c3afb7..7bbd8d8ba 100644 --- a/src/buildtool/common/statistics.hpp +++ b/src/buildtool/common/statistics.hpp @@ -91,19 +91,19 @@ class Statistics { } private: - std::atomic num_actions_queued_{}; - std::atomic num_actions_executed_{}; - std::atomic num_actions_cached_{}; - std::atomic num_actions_flaky_{}; - std::atomic num_actions_flaky_tainted_{}; - std::atomic num_rebuilt_actions_missing_{}; - std::atomic num_rebuilt_actions_compared_{}; - std::atomic num_exports_cached_{}; - std::atomic num_exports_uncached_{}; - std::atomic num_exports_not_eligible_{}; - std::atomic num_exports_found_{}; - std::atomic num_exports_served_{}; - std::atomic num_trees_analysed_{}; + std::atomic num_actions_queued_; + std::atomic num_actions_executed_; + std::atomic num_actions_cached_; + std::atomic num_actions_flaky_; + std::atomic num_actions_flaky_tainted_; + std::atomic num_rebuilt_actions_missing_; + std::atomic num_rebuilt_actions_compared_; + std::atomic num_exports_cached_; + std::atomic num_exports_uncached_; + std::atomic num_exports_not_eligible_; + std::atomic num_exports_found_; + std::atomic num_exports_served_; + std::atomic num_trees_analysed_; }; #endif // INCLUDED_SRC_BUILDTOOL_COMMON_STATISTICS_HPP diff --git a/src/buildtool/common/tree.hpp b/src/buildtool/common/tree.hpp index 53333901c..55922346f 100644 --- a/src/buildtool/common/tree.hpp +++ b/src/buildtool/common/tree.hpp @@ -54,13 +54,15 @@ class Tree { return ArtifactDescription::CreateTree(id_); } - [[nodiscard]] static auto FromJson(std::string const& id, + [[nodiscard]] static auto FromJson(HashFunction::Type hash_type, + std::string const& id, nlohmann::json const& json) -> std::optional { auto inputs = inputs_t{}; inputs.reserve(json.size()); for (auto const& [path, artifact] : json.items()) { - auto artifact_desc = ArtifactDescription::FromJson(artifact); + auto artifact_desc = + ArtifactDescription::FromJson(hash_type, artifact); if (not artifact_desc) { return std::nullopt; } diff --git a/src/buildtool/common/user_structs.hpp b/src/buildtool/common/user_structs.hpp index bebd8fab5..019d5925c 100644 --- a/src/buildtool/common/user_structs.hpp +++ b/src/buildtool/common/user_structs.hpp @@ -57,8 +57,8 @@ struct LocalPaths { path = path.parent_path(); } }()}; - nlohmann::json git_checkout_locations{}; - std::vector distdirs{}; + nlohmann::json git_checkout_locations; + std::vector distdirs; }; struct CAInfo { diff --git a/src/buildtool/compatibility/TARGETS b/src/buildtool/compatibility/TARGETS deleted file mode 100644 index 6c26fb486..000000000 --- a/src/buildtool/compatibility/TARGETS +++ /dev/null @@ -1,14 +0,0 @@ -{ "compatibility": - { "type": ["@", "rules", "CC", "library"] - , "name": ["compatibility"] - , "hdrs": ["compatibility.hpp", "native_support.hpp"] - , "deps": - [ ["src/buildtool/crypto", "hash_function"] - , ["@", "gsl", "", "gsl"] - , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/logging", "logging"] - , ["src/utils/cpp", "gsl"] - ] - , "stage": ["src", "buildtool", "compatibility"] - } -} diff --git a/src/buildtool/compatibility/compatibility.hpp b/src/buildtool/compatibility/compatibility.hpp deleted file mode 100644 index e276c5626..000000000 --- a/src/buildtool/compatibility/compatibility.hpp +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2022 Huawei Cloud Computing Technology Co., Ltd. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef INCLUDED_SRC_BUILDTOOL_COMPATIBILITY_COMPATIBILITY_HPP -#define INCLUDED_SRC_BUILDTOOL_COMPATIBILITY_COMPATIBILITY_HPP -#include -#include -#include - -#include "src/buildtool/crypto/hash_function.hpp" -#include "src/buildtool/logging/log_level.hpp" -#include "src/buildtool/logging/logger.hpp" -class Compatibility { - using git_hash = std::string; - using compat_hash = std::string; - using git_repo = std::string; - using GitToCompatibleMap = std::unordered_map; - using CompatibleToGitMap = - std::unordered_map>; - - public: - [[nodiscard]] static auto Instance() noexcept -> Compatibility& { - static Compatibility instance{}; - return instance; - } - [[nodiscard]] static auto IsCompatible() noexcept -> bool { - return Instance().compatible_; - } - static void SetCompatible(bool value = true) noexcept { - Instance().compatible_ = value; - } - - [[nodiscard]] static auto RegisterGitEntry(std::string const& git_hash, - std::string const& data, - std::string const& repo) - -> compat_hash { - - { - auto& git_to_compatible = Instance().git_to_compatible_; - std::shared_lock lock_{Instance().mutex_}; - auto it = git_to_compatible.find(git_hash); - if (it != git_to_compatible.end()) { - return it->second; - } - } - // This is only used in compatible mode. - HashFunction const hash_function{HashFunction::Type::PlainSHA256}; - auto compatible_hash = hash_function.PlainHashData(data).HexString(); - std::unique_lock lock_{Instance().mutex_}; - Instance().git_to_compatible_[git_hash] = compatible_hash; - Instance().compatible_to_git_[compatible_hash] = {git_hash, repo}; - return compatible_hash; - } - - [[nodiscard]] static auto GetGitEntry(std::string const& compatible_hash) - -> std::optional> { - auto const& compatible_to_git = Instance().compatible_to_git_; - std::shared_lock lock_{Instance().mutex_}; - auto it = compatible_to_git.find(compatible_hash); - if (it != compatible_to_git.end()) { - return it->second; - } - Logger::Log( - LogLevel::Warning, - fmt::format("Unable to get the git-sha1 code associated to {}", - compatible_hash)); - return std::nullopt; - } - - private: - GitToCompatibleMap git_to_compatible_{}; - CompatibleToGitMap compatible_to_git_{}; - bool compatible_{false}; - std::shared_mutex mutex_; -}; -#endif // INCLUDED_SRC_BUILDTOOL_COMPATIBILITY_COMPATIBILITY_HPP diff --git a/src/buildtool/compatibility/native_support.hpp b/src/buildtool/compatibility/native_support.hpp deleted file mode 100644 index 9890742e1..000000000 --- a/src/buildtool/compatibility/native_support.hpp +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2022 Huawei Cloud Computing Technology Co., Ltd. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef INCLUDED_SRC_BUILDTOOL_COMPATIBILITY_NATIVE_SUPPORT_HPP -#define INCLUDED_SRC_BUILDTOOL_COMPATIBILITY_NATIVE_SUPPORT_HPP - -#include -#include - -#include "gsl/gsl" -#include "src/buildtool/compatibility/compatibility.hpp" -#include "src/utils/cpp/gsl.hpp" - -/// \brief Helper functions to support the native remote-execution protocol. -class NativeSupport { - static constexpr std::size_t kTagLength = 2; - static constexpr std::size_t kTaggedLength = 42; - static constexpr auto kBlobTag = "62"; - static constexpr auto kTreeTag = "74"; - - public: - [[nodiscard]] static auto IsPrefixed(std::string const& hash) noexcept - -> bool { - if (Compatibility::IsCompatible()) { - return false; - } - return hash.length() == kTaggedLength; - } - - /// \brief Returns a prefixed hash in case of native remote-execution - /// protocol (0x62 in case of a blob, 0x74 in case of a tree). - [[nodiscard]] static auto Prefix(std::string const& hash, - bool is_tree) noexcept -> std::string { - if (Compatibility::IsCompatible()) { - return hash; - } - ExpectsAudit(not IsPrefixed(hash)); - return (is_tree ? kTreeTag : kBlobTag) + hash; - } - - [[nodiscard]] static auto Unprefix(std::string const& hash) noexcept - -> std::string { - if (Compatibility::IsCompatible()) { - return hash; - } - ExpectsAudit(IsPrefixed(hash)); - return hash.substr(kTagLength); - } - - [[nodiscard]] static auto IsTree(std::string const& hash) noexcept -> bool { - return IsPrefixed(hash) and hash.starts_with(kTreeTag); - } -}; -#endif // INCLUDED_SRC_BUILDTOOL_COMPATIBILITY_NATIVE_SUPPORT_HPP diff --git a/src/buildtool/crypto/TARGETS b/src/buildtool/crypto/TARGETS index fba8ba84d..e2f010f5f 100644 --- a/src/buildtool/crypto/TARGETS +++ b/src/buildtool/crypto/TARGETS @@ -6,8 +6,8 @@ , "stage": ["src", "buildtool", "crypto"] , "deps": [["src/utils/cpp", "hex_string"]] , "private-deps": - [ ["@", "ssl", "", "crypto"] - , ["@", "gsl", "", "gsl"] + [ ["@", "gsl", "", "gsl"] + , ["@", "ssl", "", "crypto"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] ] @@ -19,8 +19,23 @@ , "srcs": ["hash_function.cpp"] , "deps": ["hasher", ["@", "gsl", "", "gsl"]] , "private-deps": - [ ["src/buildtool/logging", "logging"] - , ["src/buildtool/logging", "log_level"] + [ ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + ] + , "stage": ["src", "buildtool", "crypto"] + } +, "hash_info": + { "type": ["@", "rules", "CC", "library"] + , "name": ["hash_info"] + , "hdrs": ["hash_info.hpp"] + , "srcs": ["hash_info.cpp"] + , "deps": ["hash_function", ["src/utils/cpp", "expected"]] + , "private-deps": + [ "hasher" + , ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/utils/cpp", "hex_string"] ] , "stage": ["src", "buildtool", "crypto"] } diff --git a/src/buildtool/crypto/hash_function.hpp b/src/buildtool/crypto/hash_function.hpp index 9e5a9c0b7..530e0b2b4 100644 --- a/src/buildtool/crypto/hash_function.hpp +++ b/src/buildtool/crypto/hash_function.hpp @@ -95,4 +95,15 @@ class HashFunction { -> std::optional>; }; +[[nodiscard]] constexpr auto ToString(HashFunction::Type type) noexcept -> const + char* { + switch (type) { + case HashFunction::Type::GitSHA1: + return "git-SHA1"; + case HashFunction::Type::PlainSHA256: + return "plain-SHA256"; + } + Ensures(false); // unreachable +} + #endif // INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_FUNCTION_HPP diff --git a/src/buildtool/crypto/hash_info.cpp b/src/buildtool/crypto/hash_info.cpp new file mode 100644 index 000000000..e185d214e --- /dev/null +++ b/src/buildtool/crypto/hash_info.cpp @@ -0,0 +1,102 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/crypto/hash_info.hpp" + +#include "fmt/core.h" +#include "gsl/gsl" // Ensures +#include "src/buildtool/common/protocol_traits.hpp" +#include "src/buildtool/crypto/hasher.hpp" +#include "src/utils/cpp/hex_string.hpp" + +namespace { + +inline constexpr auto kSHA1EmptyGitBlobHash = + "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"; +} // namespace + +HashInfo::HashInfo() noexcept + : hash_{kSHA1EmptyGitBlobHash}, + hash_type_{HashFunction::Type::GitSHA1}, + is_tree_{false} {} + +auto HashInfo::Create(HashFunction::Type type, + std::string hash, + bool is_tree) noexcept + -> expected { + if (auto error = HashInfo::ValidateInput(type, hash, is_tree)) { + return unexpected{*std::move(error)}; + } + return HashInfo(std::move(hash), type, is_tree); +} + +auto HashInfo::HashData(HashFunction hash_function, + std::string const& content, + bool is_tree) noexcept -> HashInfo { + auto const hash_digest = is_tree ? hash_function.HashTreeData(content) + : hash_function.HashBlobData(content); + return HashInfo{ + hash_digest.HexString(), + hash_function.GetType(), + is_tree and ProtocolTraits::IsTreeAllowed(hash_function.GetType())}; +} + +auto HashInfo::HashFile(HashFunction hash_function, + std::filesystem::path const& path, + bool is_tree) noexcept + -> std::optional> { + auto const hash_digest = is_tree ? hash_function.HashTreeFile(path) + : hash_function.HashBlobFile(path); + if (not hash_digest) { + return std::nullopt; + } + return std::pair{HashInfo{hash_digest->first.HexString(), + hash_function.GetType(), + is_tree and hash_function.GetType() == + HashFunction::Type::GitSHA1}, + hash_digest->second}; +} + +auto HashInfo::operator==(HashInfo const& other) const noexcept -> bool { + return hash_ == other.hash_ and is_tree_ == other.is_tree_; +} + +auto HashInfo::ValidateInput(HashFunction::Type type, + std::string const& hash, + bool is_tree) noexcept + -> std::optional { + if (is_tree and not ProtocolTraits::IsTreeAllowed(type)) { + return fmt::format( + "HashInfo: hash {} is expected to be {}.\nTrees are " + "not allowed in this mode.", + hash, + ToString(type)); + } + + if (auto const exp_size = HashFunction{type}.MakeHasher().GetHashLength(); + hash.size() != exp_size) { + return fmt::format( + "HashInfo: hash {} is expected to be {}.\n It must have a length " + "of {}, but its length is {}.", + hash, + ToString(type), + exp_size, + hash.size()); + } + + if (not IsHexString(hash)) { + return fmt::format("HashInfo: Invalid hash {}", hash); + } + return std::nullopt; +} diff --git a/src/buildtool/crypto/hash_info.hpp b/src/buildtool/crypto/hash_info.hpp new file mode 100644 index 000000000..dc40e7ca1 --- /dev/null +++ b/src/buildtool/crypto/hash_info.hpp @@ -0,0 +1,102 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_INFO_HPP +#define INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_INFO_HPP + +#include +#include +#include +#include + +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/utils/cpp/expected.hpp" + +/// \brief A collection of data related to a specific hash. Once it is +/// constructed, it holds a valid hexadecimal (always unprefixed) hash with some +/// additional information about the method of hashing. +class HashInfo final { + public: + explicit HashInfo() noexcept; + + /// \brief Build HashInfo based on 'external' data that cannot be trusted. A + /// number of validation checks is happening + /// \param type Type of the hash function used to create the hash + /// \param hash A hexadecimal hash + /// \param is_tree Tree or blob. Note that trees are not allowed in the + /// compatible mode. + /// \return Validated HashInfo on success or an error message on failure. + [[nodiscard]] static auto Create(HashFunction::Type type, + std::string hash, + bool is_tree) noexcept + -> expected; + + /// \brief Hash content and build HashInfo + /// \param hash_function Hash function to be used + /// \param content Content to be hashed + /// \param is_tree Tree or blob, the type of the algorithm to be used for + /// hashing. Note that HashInfo may return another value from IsTree in + /// compatible mode. + [[nodiscard]] static auto HashData(HashFunction hash_function, + std::string const& content, + bool is_tree) noexcept -> HashInfo; + + /// \brief Hash file and build HashInfo + /// \param hash_function Hash function to be use + /// \param path File to be hashed + /// \param is_tree Tree or blob, the type of the algorithm to be used for + /// hashing. Note that HashInfo may return another value from IsTree in + /// compatible mode. + /// \return A combination of the hash of the file and file's size or + /// std::nullopt on IO failure. + [[nodiscard]] static auto HashFile(HashFunction hash_function, + std::filesystem::path const& path, + bool is_tree) noexcept + -> std::optional>; + + [[nodiscard]] auto Hash() const& noexcept -> std::string const& { + return hash_; + } + + [[nodiscard]] auto Hash() && -> std::string { return std::move(hash_); } + + [[nodiscard]] auto HashType() const noexcept -> HashFunction::Type { + return hash_type_; + } + + [[nodiscard]] auto IsTree() const noexcept -> bool { return is_tree_; } + + [[nodiscard]] auto operator==(HashInfo const& other) const noexcept -> bool; + + private: + std::string hash_; + HashFunction::Type hash_type_; + + /// \brief Tree or blob algorithm was used for hashing. is_tree_ can be true + /// in the native mode only, in compatible it falls back to false during + /// hashing via HashData/HashFile or an error occurs during validation. + bool is_tree_; + + explicit HashInfo(std::string hash, + HashFunction::Type type, + bool is_tree) noexcept + : hash_{std::move(hash)}, hash_type_{type}, is_tree_{is_tree} {} + + [[nodiscard]] static auto ValidateInput(HashFunction::Type type, + std::string const& hash, + bool is_tree) noexcept + -> std::optional; +}; + +#endif // INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_INFO_HPP diff --git a/src/buildtool/crypto/hasher.cpp b/src/buildtool/crypto/hasher.cpp index 41182559b..7c5d1d9c4 100644 --- a/src/buildtool/crypto/hasher.cpp +++ b/src/buildtool/crypto/hasher.cpp @@ -66,15 +66,15 @@ struct InitializeVisitor final { static constexpr std::string_view kLogInfo = "Initialize"; // NOLINTNEXTLINE(google-runtime-references) - [[nodiscard]] inline auto operator()(SHA_CTX& ctx) const -> bool { + [[nodiscard]] auto operator()(SHA_CTX& ctx) const -> bool { return SHA1_Init(&ctx) == kOpenSslTrue; } // NOLINTNEXTLINE(google-runtime-references) - [[nodiscard]] inline auto operator()(SHA256_CTX& ctx) const -> bool { + [[nodiscard]] auto operator()(SHA256_CTX& ctx) const -> bool { return SHA256_Init(&ctx) == kOpenSslTrue; } // NOLINTNEXTLINE(google-runtime-references) - [[nodiscard]] inline auto operator()(SHA512_CTX& ctx) const -> bool { + [[nodiscard]] auto operator()(SHA512_CTX& ctx) const -> bool { return SHA512_Init(&ctx) == kOpenSslTrue; } }; @@ -86,15 +86,15 @@ struct UpdateVisitor final { : data_{*data} {} // NOLINTNEXTLINE(google-runtime-references) - [[nodiscard]] inline auto operator()(SHA_CTX& ctx) const -> bool { + [[nodiscard]] auto operator()(SHA_CTX& ctx) const -> bool { return SHA1_Update(&ctx, data_.data(), data_.size()) == kOpenSslTrue; } // NOLINTNEXTLINE(google-runtime-references) - [[nodiscard]] inline auto operator()(SHA256_CTX& ctx) const -> bool { + [[nodiscard]] auto operator()(SHA256_CTX& ctx) const -> bool { return SHA256_Update(&ctx, data_.data(), data_.size()) == kOpenSslTrue; } // NOLINTNEXTLINE(google-runtime-references) - [[nodiscard]] inline auto operator()(SHA512_CTX& ctx) const -> bool { + [[nodiscard]] auto operator()(SHA512_CTX& ctx) const -> bool { return SHA512_Update(&ctx, data_.data(), data_.size()) == kOpenSslTrue; } diff --git a/src/buildtool/crypto/hasher.hpp b/src/buildtool/crypto/hasher.hpp index 4987f7bbe..49e9eeec3 100644 --- a/src/buildtool/crypto/hasher.hpp +++ b/src/buildtool/crypto/hasher.hpp @@ -61,7 +61,7 @@ class Hasher final { } private: - std::string bytes_{}; + std::string bytes_; explicit HashDigest(std::string bytes) : bytes_{std::move(bytes)} {} }; diff --git a/src/buildtool/execution_api/bazel_msg/TARGETS b/src/buildtool/execution_api/bazel_msg/TARGETS index ddb408995..3adeaa9e2 100644 --- a/src/buildtool/execution_api/bazel_msg/TARGETS +++ b/src/buildtool/execution_api/bazel_msg/TARGETS @@ -3,17 +3,12 @@ , "name": ["bazel_msg"] , "hdrs": ["bazel_blob_container.hpp", "bazel_common.hpp"] , "deps": - [ ["src/buildtool/common", "common"] - , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/common", "bazel_types"] + [ ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/common", "common"] , ["src/buildtool/execution_api/common", "content_blob_container"] + , ["src/buildtool/file_system", "file_system_manager"] ] , "stage": ["src", "buildtool", "execution_api", "bazel_msg"] - , "private-deps": - [ ["src/buildtool/crypto", "hash_function"] - , ["src/buildtool/file_system", "git_cas"] - , ["src/utils/cpp", "gsl"] - ] } , "bazel_msg_factory": { "type": ["@", "rules", "CC", "library"] @@ -23,19 +18,24 @@ , "deps": [ "bazel_msg" , "directory_tree" + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "bazel_types"] , ["src/buildtool/common", "common"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] , ["src/buildtool/execution_engine/dag", "dag"] - , ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["@", "gsl", "", "gsl"] - , ["src/buildtool/crypto", "hash_function"] + , ["src/utils/cpp", "expected"] ] , "private-deps": - [ ["src/buildtool/compatibility", "compatibility"] - , ["src/utils/cpp", "hex_string"] + [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/file_system", "git_repo"] + , ["src/utils/cpp", "hex_string"] + , ["src/utils/cpp", "path"] ] , "stage": ["src", "buildtool", "execution_api", "bazel_msg"] } @@ -53,6 +53,7 @@ [ ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "gsl"] ] , "stage": ["src", "buildtool", "execution_api", "bazel_msg"] } diff --git a/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp index 15353a429..0d5aa8b05 100644 --- a/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp +++ b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp @@ -25,18 +25,14 @@ #include // std::move #include -#include "src/buildtool/common/bazel_types.hpp" -#include "src/buildtool/compatibility/native_support.hpp" +#include "fmt/core.h" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/git_repo.hpp" #include "src/utils/cpp/hex_string.hpp" +#include "src/utils/cpp/path.hpp" namespace { -struct DirectoryNodeBundle final { - bazel_re::DirectoryNode const message; - BazelBlob const bazel_blob; -}; - /// \brief Serialize protobuf message to string. template [[nodiscard]] auto SerializeMessage(T const& message) noexcept @@ -47,8 +43,8 @@ template gsl::narrow(content.size())); return content; } catch (...) { + return std::nullopt; } - return std::nullopt; } /// \brief Create protobuf message 'Platform'. @@ -89,22 +85,22 @@ template /// \brief Create protobuf message 'FileNode'. [[nodiscard]] auto CreateFileNode(std::string const& file_name, ObjectType type, - bazel_re::Digest const& digest) noexcept + ArtifactDigest const& digest) noexcept -> bazel_re::FileNode { bazel_re::FileNode node; node.set_name(file_name); node.set_is_executable(IsExecutableObject(type)); - (*node.mutable_digest()) = digest; + (*node.mutable_digest()) = ArtifactDigestFactory::ToBazel(digest); return node; } /// \brief Create protobuf message 'DirectoryNode'. [[nodiscard]] auto CreateDirectoryNode(std::string const& dir_name, - bazel_re::Digest const& digest) noexcept + ArtifactDigest const& digest) noexcept -> bazel_re::DirectoryNode { bazel_re::DirectoryNode node; node.set_name(dir_name); - (*node.mutable_digest()) = digest; + (*node.mutable_digest()) = ArtifactDigestFactory::ToBazel(digest); return node; } @@ -122,7 +118,7 @@ template /// instances at once [[nodiscard]] auto CreateSymlinkNodesFromDigests( std::vector const& symlink_names, - std::vector const& symlink_digests, + std::vector const& symlink_digests, BazelMsgFactory::LinkDigestResolveFunc const& resolve_links) -> std::vector { std::vector symlink_targets; @@ -137,6 +133,11 @@ template return symlink_nodes; } +struct DirectoryNodeBundle final { + bazel_re::DirectoryNode message; + ArtifactBlob blob; +}; + /// \brief Create bundle for protobuf message DirectoryNode from Directory. [[nodiscard]] auto CreateDirectoryNodeBundle(std::string const& dir_name, bazel_re::Directory const& dir) @@ -148,19 +149,19 @@ template // SHA256 is used since bazel types are processed here. HashFunction const hash_function{HashFunction::Type::PlainSHA256}; - auto digest = - ArtifactDigest::Create(hash_function, *content); + auto digest = ArtifactDigestFactory::HashDataAs( + hash_function, *content); return DirectoryNodeBundle{ .message = CreateDirectoryNode(dir_name, digest), - .bazel_blob = BazelBlob{ + .blob = ArtifactBlob{ std::move(digest), std::move(*content), /*is_exec=*/false}}; } /// \brief Create bundle for protobuf message Command from args strings. [[nodiscard]] auto CreateCommandBundle( BazelMsgFactory::ActionDigestRequest const& request) - -> std::optional { + -> std::optional { bazel_re::Command msg; // DEPRECATED as of v2.2: platform properties are now specified // directly in the action. See documentation note in the @@ -185,16 +186,18 @@ template if (not content) { return std::nullopt; } - auto digest = ArtifactDigest::Create( + auto digest = ArtifactDigestFactory::HashDataAs( request.hash_function, *content); - return BazelBlob{digest, std::move(*content), /*is_exec=*/false}; + return ArtifactBlob{std::move(digest), + std::move(*content), + /*is_exec=*/false}; } /// \brief Create bundle for protobuf message Action from Command. [[nodiscard]] auto CreateActionBundle( - bazel_re::Digest const& command, + ArtifactDigest const& command, BazelMsgFactory::ActionDigestRequest const& request) - -> std::optional { + -> std::optional { using seconds = std::chrono::seconds; using nanoseconds = std::chrono::nanoseconds; auto sec = std::chrono::duration_cast(request.timeout); @@ -202,15 +205,15 @@ template auto duration = std::make_unique(); duration->set_seconds(sec.count()); - duration->set_nanos(nanos.count()); + duration->set_nanos(static_cast(nanos.count())); bazel_re::Action msg; msg.set_do_not_cache(request.skip_action_cache); msg.set_allocated_timeout(duration.release()); - msg.set_allocated_command_digest( - gsl::owner{new bazel_re::Digest{command}}); - msg.set_allocated_input_root_digest( - gsl::owner{new bazel_re::Digest{*request.exec_dir}}); + *msg.mutable_command_digest() = ArtifactDigestFactory::ToBazel(command); + *msg.mutable_input_root_digest() = + ArtifactDigestFactory::ToBazel(*request.exec_dir); + // New in version 2.2: clients SHOULD set these platform properties // as well as those in the // [Command][build.bazel.remote.execution.v2.Command]. Servers @@ -222,13 +225,14 @@ template if (not content) { return std::nullopt; } - auto digest = ArtifactDigest::Create( + auto digest = ArtifactDigestFactory::HashDataAs( request.hash_function, *content); - return BazelBlob{digest, std::move(*content), /*is_exec=*/false}; + return ArtifactBlob{std::move(digest), + std::move(*content), + /*is_exec=*/false}; } /// \brief Convert `DirectoryTree` to `DirectoryNodeBundle`. -/// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto DirectoryTreeToBundle( std::string const& root_name, DirectoryTreePtr const& tree, @@ -239,18 +243,18 @@ template std::vector file_nodes{}; std::vector dir_nodes{}; std::vector symlink_names{}; - std::vector symlink_digests{}; + std::vector symlink_digests{}; try { for (auto const& [name, node] : *tree) { if (std::holds_alternative(node)) { auto const& dir = std::get(node); - auto const dir_bundle = DirectoryTreeToBundle( + auto dir_bundle = DirectoryTreeToBundle( name, dir, resolve_links, process_blob, parent / name); if (not dir_bundle) { return std::nullopt; } - dir_nodes.emplace_back(dir_bundle->message); - if (not process_blob(BazelBlob{dir_bundle->bazel_blob})) { + dir_nodes.emplace_back(std::move(dir_bundle->message)); + if (not process_blob(std::move(dir_bundle->blob))) { return std::nullopt; } } @@ -289,25 +293,229 @@ template return std::nullopt; } +[[nodiscard]] auto GetContentFromGitEntry( + BazelMsgFactory::GitReadFunc const& read_git, + ArtifactDigest const& digest, + ObjectType entry_type) -> expected { + auto read_git_res = read_git(digest, entry_type); + if (not read_git_res) { + return unexpected{ + fmt::format("failed reading Git entry {}", digest.hash())}; + } + if (std::holds_alternative(read_git_res.value())) { + return std::get(std::move(read_git_res).value()); + } + if (std::holds_alternative(read_git_res.value())) { + auto content = FileSystemManager::ReadFile( + std::get(std::move(read_git_res).value())); + if (not content) { + return unexpected{fmt::format("failed reading content of tree {}", + digest.hash())}; + } + return *std::move(content); + } + return unexpected{ + fmt::format("unexpected failure reading Git entry {}", digest.hash())}; +} + } // namespace auto BazelMsgFactory::CreateDirectoryDigestFromTree( DirectoryTreePtr const& tree, LinkDigestResolveFunc const& resolve_links, BlobProcessFunc const& process_blob) noexcept - -> std::optional { - if (auto bundle = - DirectoryTreeToBundle("", tree, resolve_links, process_blob)) { - try { - if (not process_blob(BazelBlob{bundle->bazel_blob})) { - return std::nullopt; - } - } catch (...) { + -> std::optional { + auto bundle = DirectoryTreeToBundle("", tree, resolve_links, process_blob); + if (not bundle) { + return std::nullopt; + } + + auto digest = bundle->blob.digest; + try { + if (not process_blob(std::move(bundle->blob))) { return std::nullopt; } - return bundle->bazel_blob.digest; + } catch (...) { + return std::nullopt; + } + return digest; +} + +auto BazelMsgFactory::CreateDirectoryDigestFromGitTree( + ArtifactDigest const& digest, + GitReadFunc const& read_git, + BlobStoreFunc const& store_file, + TreeStoreFunc const& store_dir, + SymlinkStoreFunc const& store_symlink, + RehashedDigestReadFunc const& read_rehashed, + RehashedDigestStoreFunc const& store_rehashed) noexcept + -> expected { + std::vector files{}; + std::vector dirs{}; + std::vector symlinks{}; + + try { + // read tree object + auto const tree_content = + GetContentFromGitEntry(read_git, digest, ObjectType::Tree); + if (not tree_content) { + return unexpected{tree_content.error()}; + } + auto const check_symlinks = + [&read_git](std::vector const& ids) { + return std::all_of(ids.begin(), + ids.end(), + [&read_git](auto const& id) -> bool { + auto content = GetContentFromGitEntry( + read_git, id, ObjectType::Symlink); + return content and + PathIsNonUpwards(*content); + }); + }; + + // Git-SHA1 hashing is used for reading from git + HashFunction const hash_function{HashFunction::Type::GitSHA1}; + // the tree digest is in native mode, so no need for rehashing content + auto const entries = GitRepo::ReadTreeData( + *tree_content, digest.hash(), check_symlinks, /*is_hex_id=*/true); + if (not entries) { + return unexpected{fmt::format("failed reading entries of tree {}", + digest.hash())}; + } + + // handle tree entries + for (auto const& [raw_id, es] : *entries) { + auto const hex_id = ToHexString(raw_id); + for (auto const& entry : es) { + // get native digest of entry + auto const git_digest = + ArtifactDigestFactory::Create(HashFunction::Type::GitSHA1, + hex_id, + /*size is unknown*/ 0, + IsTreeObject(entry.type)); + if (not git_digest) { + return unexpected{git_digest.error()}; + } + // get any cached digest mapping, to avoid unnecessary work + auto const cached_obj = read_rehashed(*git_digest); + if (not cached_obj) { + return unexpected{cached_obj.error()}; + } + // create and store the directory entry + switch (entry.type) { + case ObjectType::Tree: { + if (cached_obj.value()) { + // no work to be done if we already know the digest + dirs.emplace_back(CreateDirectoryNode( + entry.name, cached_obj.value()->digest)); + } + else { + // create and store sub directory + auto const dir_digest = + CreateDirectoryDigestFromGitTree( + *git_digest, + read_git, + store_file, + store_dir, + store_symlink, + read_rehashed, + store_rehashed); + if (not dir_digest) { + return unexpected{dir_digest.error()}; + } + dirs.emplace_back( + CreateDirectoryNode(entry.name, *dir_digest)); + // no need to cache the digest mapping, as this was + // done in the recursive call + } + } break; + case ObjectType::Symlink: { + // create and store symlink; for this entry type the + // cached digest is ignored because we always need the + // target (i.e., the symlink content) + auto const sym_target = GetContentFromGitEntry( + read_git, *git_digest, ObjectType::Symlink); + if (not sym_target) { + return unexpected{sym_target.error()}; + } + auto const sym_digest = store_symlink(*sym_target); + if (not sym_digest) { + return unexpected{fmt::format( + "failed storing symlink {}", hex_id)}; + } + symlinks.emplace_back( + CreateSymlinkNode(entry.name, *sym_target)); + // while useless for future symlinks, cache digest + // mapping for file-type blobs with same content + if (auto error_msg = + store_rehashed(*git_digest, + *sym_digest, + ObjectType::Symlink)) { + return unexpected{*std::move(error_msg)}; + } + } break; + default: { + if (cached_obj.value()) { + // no work to be done if we already know the digest + files.emplace_back( + CreateFileNode(entry.name, + entry.type, + cached_obj.value()->digest)); + } + else { + // create and store file; here we want to NOT read + // the content if from CAS, where we can rehash via + // streams! + auto const read_git_file = + read_git(*git_digest, entry.type); + if (not read_git_file) { + return unexpected{ + fmt::format("failed reading Git entry ")}; + } + auto const file_digest = store_file( + *read_git_file, IsExecutableObject(entry.type)); + if (not file_digest) { + return unexpected{fmt::format( + "failed storing file {}", hex_id)}; + } + files.emplace_back(CreateFileNode( + entry.name, entry.type, *file_digest)); + // cache digest mapping + if (auto error_msg = store_rehashed( + *git_digest, *file_digest, entry.type)) { + return unexpected{*std::move(error_msg)}; + } + } + } + } + } + } + + // create and store tree + auto const bytes = + SerializeMessage(CreateDirectory(files, dirs, symlinks)); + if (not bytes) { + return unexpected{ + fmt::format("failed serializing bazel Directory for tree {}", + digest.hash())}; + } + auto const tree_digest = store_dir(*bytes); + if (not tree_digest) { + return unexpected{fmt::format( + "failed storing bazel Directory for tree {}", digest.hash())}; + } + // cache digest mapping + if (auto error_msg = + store_rehashed(digest, *tree_digest, ObjectType::Tree)) { + return unexpected{*std::move(error_msg)}; + } + // return digest + return *tree_digest; + } catch (std::exception const& ex) { + return unexpected{fmt::format( + "creating bazel Directory digest unexpectedly failed with:\n{}", + ex.what())}; } - return std::nullopt; } auto BazelMsgFactory::CreateDirectoryDigestFromLocalTree( @@ -315,7 +523,7 @@ auto BazelMsgFactory::CreateDirectoryDigestFromLocalTree( FileStoreFunc const& store_file, TreeStoreFunc const& store_dir, SymlinkStoreFunc const& store_symlink) noexcept - -> std::optional { + -> std::optional { std::vector files{}; std::vector dirs{}; std::vector symlinks{}; @@ -377,9 +585,7 @@ auto BazelMsgFactory::CreateDirectoryDigestFromLocalTree( auto dir = CreateDirectory(files, dirs, symlinks); if (auto bytes = SerializeMessage(dir)) { try { - if (auto digest = store_dir(*bytes)) { - return *digest; - } + return store_dir(*bytes); } catch (std::exception const& ex) { Logger::Log(LogLevel::Error, "storing directory failed with:\n{}", @@ -396,7 +602,7 @@ auto BazelMsgFactory::CreateGitTreeDigestFromLocalTree( FileStoreFunc const& store_file, TreeStoreFunc const& store_tree, SymlinkStoreFunc const& store_symlink) noexcept - -> std::optional { + -> std::optional { GitRepo::tree_entries_t entries{}; auto dir_reader = [&entries, &root, @@ -408,8 +614,7 @@ auto BazelMsgFactory::CreateGitTreeDigestFromLocalTree( // create and store sub directory if (auto digest = CreateGitTreeDigestFromLocalTree( full_name, store_file, store_tree, store_symlink)) { - if (auto raw_id = FromHexString( - NativeSupport::Unprefix(digest->hash()))) { + if (auto raw_id = FromHexString(digest->hash())) { entries[std::move(*raw_id)].emplace_back(name.string(), ObjectType::Tree); return true; @@ -423,10 +628,9 @@ auto BazelMsgFactory::CreateGitTreeDigestFromLocalTree( try { if (IsSymlinkObject(type)) { auto content = FileSystemManager::ReadSymlink(full_name); - if (content) { + if (content and PathIsNonUpwards(*content)) { if (auto digest = store_symlink(*content)) { - if (auto raw_id = FromHexString( - NativeSupport::Unprefix(digest->hash()))) { + if (auto raw_id = FromHexString(digest->hash())) { entries[std::move(*raw_id)].emplace_back( name.string(), type); return true; @@ -445,8 +649,7 @@ auto BazelMsgFactory::CreateGitTreeDigestFromLocalTree( } // create and store file if (auto digest = store_file(full_name, IsExecutableObject(type))) { - if (auto raw_id = FromHexString( - NativeSupport::Unprefix(digest->hash()))) { + if (auto raw_id = FromHexString(digest->hash())) { entries[std::move(*raw_id)].emplace_back(name.string(), type); return true; @@ -465,9 +668,7 @@ auto BazelMsgFactory::CreateGitTreeDigestFromLocalTree( root, dir_reader, /*allow_upwards=*/true)) { if (auto tree = GitRepo::CreateShallowTree(entries)) { try { - if (auto digest = store_tree(tree->second)) { - return *digest; - } + return store_tree(tree->second); } catch (std::exception const& ex) { Logger::Log(LogLevel::Error, "storing tree failed with:\n{}", @@ -480,7 +681,7 @@ auto BazelMsgFactory::CreateGitTreeDigestFromLocalTree( } auto BazelMsgFactory::CreateActionDigestFromCommandLine( - ActionDigestRequest const& request) -> std::optional { + ActionDigestRequest const& request) -> std::optional { auto cmd = CreateCommandBundle(request); if (not cmd) { return std::nullopt; @@ -491,12 +692,15 @@ auto BazelMsgFactory::CreateActionDigestFromCommandLine( return std::nullopt; } - if (not request.store_blob) { - return action->digest; + if (request.store_blob) { + std::invoke(*request.store_blob, + BazelBlob{ArtifactDigestFactory::ToBazel(cmd->digest), + cmd->data, + cmd->is_exec}); + std::invoke(*request.store_blob, + BazelBlob{ArtifactDigestFactory::ToBazel(action->digest), + action->data, + action->is_exec}); } - - auto digest = action->digest; - std::invoke(*request.store_blob, std::move(*cmd)); - std::invoke(*request.store_blob, std::move(*action)); - return digest; + return action->digest; } diff --git a/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp index 0b006273d..540aaa395 100644 --- a/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp +++ b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp @@ -21,6 +21,7 @@ #include #include #include +#include #include #include "gsl/gsl" @@ -29,11 +30,13 @@ #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" -#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp" #include "src/buildtool/execution_api/bazel_msg/directory_tree.hpp" +#include "src/buildtool/execution_api/common/artifact_blob_container.hpp" #include "src/buildtool/execution_engine/dag/dag.hpp" +#include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" +#include "src/utils/cpp/expected.hpp" /// \brief Factory for creating Bazel API protobuf messages. /// Responsible for creating protobuf messages necessary for Bazel API server @@ -41,16 +44,29 @@ class BazelMsgFactory { public: /// \brief Store or otherwise process a blob. Returns success flag. - using BlobProcessFunc = std::function; + using BlobProcessFunc = std::function; using LinkDigestResolveFunc = - std::function const&, - std::vector*)>; + std::function const&, + gsl::not_null*> const&)>; + using GitReadFunc = std::function>(ArtifactDigest const&, + ObjectType)>; + using BlobStoreFunc = std::function( + std::variant const&, + bool)>; using FileStoreFunc = std::function< - std::optional(std::filesystem::path const&, bool)>; + std::optional(std::filesystem::path const&, bool)>; using SymlinkStoreFunc = - std::function(std::string const&)>; + std::function(std::string const&)>; using TreeStoreFunc = - std::function(std::string const&)>; + std::function(std::string const&)>; + using RehashedDigestReadFunc = + std::function, + std::string>(ArtifactDigest const&)>; + using RehashedDigestStoreFunc = + std::function(ArtifactDigest const&, + ArtifactDigest const&, + ObjectType)>; /// \brief Create Directory digest from artifact tree structure. Uses /// compatible HashFunction for hashing. Recursively traverse entire tree @@ -63,7 +79,32 @@ class BazelMsgFactory { DirectoryTreePtr const& tree, LinkDigestResolveFunc const& resolve_links, BlobProcessFunc const& process_blob) noexcept - -> std::optional; + -> std::optional; + + /// \brief Create Directory digest from an owned Git tree. + /// Recursively traverse entire tree and store files and directories. + /// Used to convert from native to compatible representation of trees. + /// \param digest Digest of a Git tree. + /// \param read_git Function for reading Git tree entries. Reading from + /// CAS returns the CAS path, while reading from Git CAS + /// returns content directly. This differentiation is + /// made to avoid unnecessary storing blobs in memory. + /// \param store_file Function for storing file via path or content. + /// \param store_dir Function for storing Directory blobs. + /// \param store_symlink Function for storing symlink via content. + /// \param read_rehashed Function to read mapping between digests. + /// \param store_rehashed Function to store mapping between digests. + /// \returns Digest representing the entire tree directory, or error string + /// on failure. + [[nodiscard]] static auto CreateDirectoryDigestFromGitTree( + ArtifactDigest const& digest, + GitReadFunc const& read_git, + BlobStoreFunc const& store_file, + TreeStoreFunc const& store_dir, + SymlinkStoreFunc const& store_symlink, + RehashedDigestReadFunc const& read_rehashed, + RehashedDigestStoreFunc const& store_rehashed) noexcept + -> expected; /// \brief Create Directory digest from local file root. /// Recursively traverse entire root and store files and directories. @@ -77,7 +118,7 @@ class BazelMsgFactory { FileStoreFunc const& store_file, TreeStoreFunc const& store_dir, SymlinkStoreFunc const& store_symlink) noexcept - -> std::optional; + -> std::optional; /// \brief Create Git tree digest from local file root. /// Recursively traverse entire root and store files and directories. @@ -91,7 +132,7 @@ class BazelMsgFactory { FileStoreFunc const& store_file, TreeStoreFunc const& store_tree, SymlinkStoreFunc const& store_symlink) noexcept - -> std::optional; + -> std::optional; struct ActionDigestRequest; /// \brief Creates Action digest from command line. @@ -99,7 +140,7 @@ class BazelMsgFactory { /// CommandBundle that can be captured via BlobStoreFunc. /// \returns Digest representing the action. [[nodiscard]] static auto CreateActionDigestFromCommandLine( - ActionDigestRequest const& request) -> std::optional; + ActionDigestRequest const& request) -> std::optional; /// \brief Create message vector from std::map. /// \param[in] input map @@ -160,7 +201,7 @@ struct BazelMsgFactory::ActionDigestRequest final { VectorPtr const properties; /// \brief The Digest of the execution directory. - gsl::not_null const exec_dir; + gsl::not_null const exec_dir; /// \brief Hash function to be used. HashFunction const hash_function; diff --git a/src/buildtool/execution_api/bazel_msg/directory_tree.cpp b/src/buildtool/execution_api/bazel_msg/directory_tree.cpp index 5b3b20e2c..c7ddfdef3 100644 --- a/src/buildtool/execution_api/bazel_msg/directory_tree.cpp +++ b/src/buildtool/execution_api/bazel_msg/directory_tree.cpp @@ -51,7 +51,6 @@ auto DirectoryTree::FromNamedArtifacts( return dir_tree; } -// NOLINTNEXTLINE(misc-no-recursion) auto DirectoryTree::AddArtifact(std::filesystem::path::iterator* begin, std::filesystem::path::iterator const& end, Artifact const* artifact) -> bool { diff --git a/src/buildtool/execution_api/common/TARGETS b/src/buildtool/execution_api/common/TARGETS index 7d1b5ebea..0de326447 100644 --- a/src/buildtool/execution_api/common/TARGETS +++ b/src/buildtool/execution_api/common/TARGETS @@ -9,29 +9,37 @@ , "tree_reader.hpp" , "tree_reader_utils.hpp" , "stream_dumper.hpp" - , "artifact_blob_container.hpp" ] , "srcs": ["tree_reader_utils.cpp"] , "deps": - [ "content_blob_container" + [ "artifact_blob_container" , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "bazel_types"] , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/crypto", "hash_function"] - , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] - , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/execution_engine/dag", "dag"] + , ["src/buildtool/file_system", "git_repo"] + , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "expected"] , ["src/utils/cpp", "gsl"] , ["src/utils/cpp", "hex_string"] - , ["src/buildtool/file_system", "git_repo"] - , ["src/buildtool/execution_engine/dag", "dag"] + ] + , "private-deps": + [ ["@", "json", "", "json"] + , ["src/buildtool/common", "artifact_digest_factory"] ] , "stage": ["src", "buildtool", "execution_api", "common"] } -, "bytestream-common": +, "bytestream_utils": { "type": ["@", "rules", "CC", "library"] - , "name": ["bytestream-common"] - , "hdrs": ["bytestream_common.hpp"] + , "name": ["bytestream_utils"] + , "hdrs": ["bytestream_utils.hpp"] + , "srcs": ["bytestream_utils.cpp"] + , "private-deps": + [["@", "fmt", "", "fmt"], ["src/buildtool/common", "bazel_types"]] , "stage": ["src", "buildtool", "execution_api", "common"] } , "api_bundle": @@ -42,10 +50,11 @@ , "stage": ["src", "buildtool", "execution_api", "common"] , "deps": [ "common" + , ["@", "gsl", "", "gsl"] , ["src/buildtool/common", "config"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/execution_api/remote", "context"] - , ["src/buildtool/crypto", "hash_function"] ] , "private-deps": [ ["src/buildtool/execution_api/bazel_msg", "bazel_msg"] @@ -57,7 +66,7 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["message_limits"] , "hdrs": ["message_limits.hpp"] - , "deps": [["@", "grpc", "", "grpc++"]] + , "deps": [["@", "grpc", "", "grpc"]] , "stage": ["src", "buildtool", "execution_api", "common"] } , "common_api": @@ -66,16 +75,16 @@ , "hdrs": ["common_api.hpp"] , "srcs": ["common_api.cpp"] , "deps": - [ "common" - , "blob_tree" + [ "blob_tree" + , "common" , "content_blob_container" , "message_limits" , ["@", "gsl", "", "gsl"] , ["src/buildtool/common", "common"] , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] , ["src/buildtool/execution_api/bazel_msg", "directory_tree"] - , ["src/buildtool/logging", "logging"] , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] ] , "stage": ["src", "buildtool", "execution_api", "common"] , "private-deps": [["@", "fmt", "", "fmt"]] @@ -86,13 +95,15 @@ , "hdrs": ["blob_tree.hpp"] , "srcs": ["blob_tree.cpp"] , "deps": - [ "common" - , ["src/buildtool/execution_api/bazel_msg", "directory_tree"] + [ "artifact_blob_container" , ["@", "gsl", "", "gsl"] - , ["src/buildtool/compatibility", "compatibility"] + , ["src/buildtool/execution_api/bazel_msg", "directory_tree"] ] , "private-deps": - [ ["src/buildtool/file_system", "git_repo"] + [ ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/file_system", "git_repo"] , ["src/buildtool/file_system", "object_type"] , ["src/utils/cpp", "hex_string"] ] @@ -102,7 +113,14 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["content_blob_container"] , "hdrs": ["content_blob_container.hpp"] + , "deps": [["@", "gsl", "", "gsl"], ["src/utils/cpp", "transformed_range"]] + , "stage": ["src", "buildtool", "execution_api", "common"] + } +, "artifact_blob_container": + { "type": ["@", "rules", "CC", "library"] + , "name": ["artifact_blob_container"] + , "hdrs": ["artifact_blob_container.hpp"] + , "deps": ["content_blob_container", ["src/buildtool/common", "common"]] , "stage": ["src", "buildtool", "execution_api", "common"] - , "deps": [["src/utils/cpp", "transformed_range"], ["@", "gsl", "", "gsl"]] } } diff --git a/src/buildtool/execution_api/common/api_bundle.cpp b/src/buildtool/execution_api/common/api_bundle.cpp index 8808e5d86..2106f5556 100644 --- a/src/buildtool/execution_api/common/api_bundle.cpp +++ b/src/buildtool/execution_api/common/api_bundle.cpp @@ -24,7 +24,7 @@ auto ApiBundle::Create( gsl::not_null const& local_context, gsl::not_null const& remote_context, RepositoryConfig const* repo_config) -> ApiBundle { - auto const hash_fct = local_context->storage_config->hash_function; + auto const& hash_fct = local_context->storage_config->hash_function; IExecutionApi::Ptr local_api = std::make_shared(local_context, repo_config); IExecutionApi::Ptr remote_api = local_api; @@ -37,7 +37,7 @@ auto ApiBundle::Create( remote_context->auth, remote_context->retry_config, config, - hash_fct); + &hash_fct); } return ApiBundle{.hash_function = hash_fct, .local = std::move(local_api), @@ -58,7 +58,7 @@ auto ApiBundle::MakeRemote( authentication, retry_config, config, - hash_function); + &hash_function); } return local; } diff --git a/src/buildtool/execution_api/common/api_bundle.hpp b/src/buildtool/execution_api/common/api_bundle.hpp index 28595010d..e342ce555 100644 --- a/src/buildtool/execution_api/common/api_bundle.hpp +++ b/src/buildtool/execution_api/common/api_bundle.hpp @@ -49,9 +49,7 @@ struct ApiBundle final { gsl::not_null const& retry_config) const -> gsl::not_null; - HashFunction const hash_function; - // 7 bytes of alignment. - + HashFunction const& hash_function; gsl::not_null const local; gsl::not_null const remote; }; diff --git a/src/buildtool/execution_api/common/blob_tree.cpp b/src/buildtool/execution_api/common/blob_tree.cpp index 6e04d55f6..6bb503b7a 100644 --- a/src/buildtool/execution_api/common/blob_tree.cpp +++ b/src/buildtool/execution_api/common/blob_tree.cpp @@ -18,11 +18,13 @@ #include #include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/git_repo.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/utils/cpp/hex_string.hpp" -/// NOLINTNEXTLINE(misc-no-recursion) auto BlobTree::FromDirectoryTree(DirectoryTreePtr const& tree, std::filesystem::path const& parent) noexcept -> std::optional { @@ -61,13 +63,18 @@ auto BlobTree::FromDirectoryTree(DirectoryTreePtr const& tree, } } if (auto git_tree = GitRepo::CreateShallowTree(entries)) { - ArtifactDigest digest{ToHexString(git_tree->first), - git_tree->second.size(), - /*is_tree=*/true}; - return std::make_shared(ArtifactBlob{std::move(digest), - git_tree->second, - /*is_exec=*/false}, - nodes); + auto digest = + ArtifactDigestFactory::Create(HashFunction::Type::GitSHA1, + ToHexString(git_tree->first), + git_tree->second.size(), + /*is_tree=*/true); + if (digest) { + return std::make_shared( + ArtifactBlob{*std::move(digest), + git_tree->second, + /*is_exec=*/false}, + nodes); + } } } catch (...) { return std::nullopt; diff --git a/src/buildtool/execution_api/common/blob_tree.hpp b/src/buildtool/execution_api/common/blob_tree.hpp index 25d0f560a..051b0fe2f 100644 --- a/src/buildtool/execution_api/common/blob_tree.hpp +++ b/src/buildtool/execution_api/common/blob_tree.hpp @@ -22,7 +22,6 @@ #include #include "gsl/gsl" -#include "src/buildtool/compatibility/native_support.hpp" #include "src/buildtool/execution_api/bazel_msg/directory_tree.hpp" #include "src/buildtool/execution_api/common/artifact_blob_container.hpp" @@ -38,8 +37,7 @@ class BlobTree { [[nodiscard]] auto Blob() const noexcept -> ArtifactBlob { return blob_; } [[nodiscard]] auto IsTree() const noexcept -> bool { - return NativeSupport::IsTree( - static_cast(blob_.digest).hash()); + return blob_.digest.IsTree(); } /// \brief Create a `BlobTree` from a `DirectoryTree`. diff --git a/src/buildtool/execution_api/common/bytestream_utils.cpp b/src/buildtool/execution_api/common/bytestream_utils.cpp new file mode 100644 index 000000000..de66d58e2 --- /dev/null +++ b/src/buildtool/execution_api/common/bytestream_utils.cpp @@ -0,0 +1,157 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/execution_api/common/bytestream_utils.hpp" + +#include +#include +#include + +#include "fmt/core.h" +#include "src/buildtool/common/bazel_types.hpp" + +namespace { +/// \brief Split a string into parts with '/' delimiter +/// \param request String to be split +/// \return A vector of parts on success or an empty vector on failure. +[[nodiscard]] auto SplitRequest(std::string const& request) noexcept + -> std::vector { + std::vector parts; + try { + std::size_t shift = 0; + for (std::size_t length = 0; shift + length < request.size(); + ++length) { + if (request.at(shift + length) == '/') { + parts.emplace_back(&request.at(shift), length); + shift += length + 1; + length = 0; + } + } + + if (shift < request.size()) { + parts.emplace_back(&request.at(shift), request.size() - shift); + } + } catch (...) { + return {}; + } + return parts; +} + +[[nodiscard]] inline auto ToBazelDigest(std::string hash, + std::int64_t size) noexcept + -> bazel_re::Digest { + bazel_re::Digest digest{}; + digest.set_hash(std::move(hash)); + digest.set_size_bytes(size); + return digest; +} +} // namespace + +ByteStreamUtils::ReadRequest::ReadRequest( + std::string instance_name, + bazel_re::Digest const& digest) noexcept + : instance_name_{std::move(instance_name)}, + hash_{digest.hash()}, + size_{digest.size_bytes()} {} + +auto ByteStreamUtils::ReadRequest::ToString() && noexcept -> std::string { + return fmt::format("{}/{}/{}/{}", + std::move(instance_name_), + ByteStreamUtils::kBlobs, + std::move(hash_), + size_); +} + +auto ByteStreamUtils::ReadRequest::FromString( + std::string const& request) noexcept -> std::optional { + static constexpr std::size_t kInstanceNameIndex = 0U; + static constexpr std::size_t kBlobsIndex = 1U; + static constexpr std::size_t kHashIndex = 2U; + static constexpr std::size_t kSizeIndex = 3U; + static constexpr std::size_t kReadRequestPartsCount = 4U; + + auto const parts = ::SplitRequest(request); + if (parts.size() != kReadRequestPartsCount or + parts.at(kBlobsIndex).compare(ByteStreamUtils::kBlobs) != 0) { + return std::nullopt; + } + + ReadRequest result; + result.instance_name_ = std::string(parts.at(kInstanceNameIndex)); + result.hash_ = std::string(parts.at(kHashIndex)); + try { + result.size_ = std::stoi(std::string(parts.at(kSizeIndex))); + } catch (...) { + return std::nullopt; + } + return result; +} + +auto ByteStreamUtils::ReadRequest::GetDigest() const noexcept + -> bazel_re::Digest { + return ToBazelDigest(hash_, size_); +} + +ByteStreamUtils::WriteRequest::WriteRequest( + std::string instance_name, + std::string uuid, + bazel_re::Digest const& digest) noexcept + : instance_name_{std::move(instance_name)}, + uuid_{std::move(uuid)}, + hash_{digest.hash()}, + size_{digest.size_bytes()} {} + +auto ByteStreamUtils::WriteRequest::ToString() && noexcept -> std::string { + return fmt::format("{}/{}/{}/{}/{}/{}", + std::move(instance_name_), + ByteStreamUtils::kUploads, + std::move(uuid_), + ByteStreamUtils::kBlobs, + std::move(hash_), + size_); +} + +auto ByteStreamUtils::WriteRequest::FromString( + std::string const& request) noexcept -> std::optional { + static constexpr std::size_t kInstanceNameIndex = 0U; + static constexpr std::size_t kUploadsIndex = 1U; + static constexpr std::size_t kUUIDIndex = 2U; + static constexpr std::size_t kBlobsIndex = 3U; + static constexpr std::size_t kHashIndex = 4U; + static constexpr std::size_t kSizeIndex = 5U; + static constexpr std::size_t kWriteRequestPartsCount = 6U; + + auto const parts = ::SplitRequest(request); + if (parts.size() != kWriteRequestPartsCount or + parts.at(kUploadsIndex).compare(ByteStreamUtils::kUploads) != 0 or + parts.at(kBlobsIndex).compare(ByteStreamUtils::kBlobs) != 0) { + return std::nullopt; + } + + WriteRequest result; + result.instance_name_ = std::string(parts.at(kInstanceNameIndex)); + result.uuid_ = std::string(parts.at(kUUIDIndex)); + result.hash_ = std::string(parts.at(kHashIndex)); + try { + result.size_ = std::stoi(std::string(parts.at(kSizeIndex))); + } catch (...) { + return std::nullopt; + } + return result; +} + +auto ByteStreamUtils::WriteRequest::GetDigest() const noexcept + -> bazel_re::Digest { + return ToBazelDigest(hash_, size_); +} diff --git a/src/buildtool/execution_api/common/bytestream_utils.hpp b/src/buildtool/execution_api/common/bytestream_utils.hpp new file mode 100644 index 000000000..232c67284 --- /dev/null +++ b/src/buildtool/execution_api/common/bytestream_utils.hpp @@ -0,0 +1,103 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_BYTESTREAM_UTILS_HPP +#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_BYTESTREAM_UTILS_HPP + +#include +#include +#include +#include + +namespace build::bazel::remote::execution::v2 { +class Digest; +} +namespace bazel_re = build::bazel::remote::execution::v2; + +class ByteStreamUtils final { + static constexpr auto* kBlobs = "blobs"; + static constexpr auto* kUploads = "uploads"; + + public: + // Chunk size for uploads (default size used by BuildBarn) + static constexpr std::size_t kChunkSize = 64UL * 1024; + + /// \brief Create a read request for the bytestream service to be + /// transferred over the net. Handles serialization/deserialization on its + /// own. The pattern is: + /// "{instance_name}/{kBlobs}/{digest.hash()}/{digest.size_bytes()}". + /// "instance_name_example/blobs/62183d7a696acf7e69e218efc82c93135f8c85f895/4424712" + class ReadRequest final { + public: + explicit ReadRequest(std::string instance_name, + bazel_re::Digest const& digest) noexcept; + + [[nodiscard]] auto ToString() && noexcept -> std::string; + + [[nodiscard]] static auto FromString( + std::string const& request) noexcept -> std::optional; + + [[nodiscard]] auto GetInstanceName() const noexcept + -> std::string const& { + return instance_name_; + } + + [[nodiscard]] auto GetDigest() const noexcept -> bazel_re::Digest; + + private: + std::string instance_name_; + std::string hash_; + std::int64_t size_ = 0; + + ReadRequest() = default; + }; + + /// \brief Create a write request for the bytestream service to be + /// transferred over the net. Handles serialization/deserialization on its + /// own. The pattern is: + /// "{instance_name}/{kUploads}/{uuid}/{kBlobs}/{digest.hash()}/{digest.size_bytes()}". + /// "instance_name_example/uploads/c4f03510-7d56-4490-8934-01bce1b1288e/blobs/62183d7a696acf7e69e218efc82c93135f8c85f895/4424712" + class WriteRequest final { + public: + explicit WriteRequest(std::string instance_name, + std::string uuid, + bazel_re::Digest const& digest) noexcept; + + [[nodiscard]] auto ToString() && noexcept -> std::string; + + [[nodiscard]] static auto FromString( + std::string const& request) noexcept -> std::optional; + + [[nodiscard]] auto GetInstanceName() const noexcept + -> std::string const& { + return instance_name_; + } + + [[nodiscard]] auto GetUUID() const noexcept -> std::string const& { + return uuid_; + } + + [[nodiscard]] auto GetDigest() const noexcept -> bazel_re::Digest; + + private: + std::string instance_name_; + std::string uuid_; + std::string hash_; + std::int64_t size_ = 0; + + WriteRequest() = default; + }; +}; + +#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_BYTESTREAM_UTILS_HPP diff --git a/src/buildtool/execution_api/common/common_api.cpp b/src/buildtool/execution_api/common/common_api.cpp index 74e52c191..c21d14a45 100644 --- a/src/buildtool/execution_api/common/common_api.cpp +++ b/src/buildtool/execution_api/common/common_api.cpp @@ -81,13 +81,12 @@ auto CommonRetrieveToFds( return true; } -/// NOLINTNEXTLINE(misc-no-recursion) auto CommonUploadBlobTree(BlobTreePtr const& blob_tree, IExecutionApi const& api) noexcept -> bool { // Create digest list from blobs for batch availability check. auto missing_blobs_info = GetMissingArtifactsInfo( api, blob_tree->begin(), blob_tree->end(), [](BlobTreePtr const& node) { - return ArtifactDigest{node->Blob().digest}; + return node->Blob().digest; }); if (not missing_blobs_info) { Logger::Log(LogLevel::Error, @@ -133,11 +132,10 @@ auto CommonUploadTreeCompatible( ArtifactBlobContainer blobs{}; // Store and upload blobs, taking into account the maximum transfer size. auto digest = BazelMsgFactory::CreateDirectoryDigestFromTree( - build_root, resolve_links, [&blobs, &api](BazelBlob&& blob) { + build_root, resolve_links, [&blobs, &api](ArtifactBlob&& blob) { return UpdateContainerAndUpload( &blobs, - std::move(ArtifactBlob{ - ArtifactDigest{blob.digest}, blob.data, blob.is_exec}), + std::move(blob), /*exception_is_fatal=*/false, [&api](ArtifactBlobContainer&& container) -> bool { return api.Upload(std::move(container), @@ -159,7 +157,7 @@ auto CommonUploadTreeCompatible( Logger::Log(LogLevel::Debug, "failed to upload blobs for build root."); return std::nullopt; } - return ArtifactDigest{*digest}; + return digest; } auto CommonUploadTreeNative(IExecutionApi const& api, diff --git a/src/buildtool/execution_api/common/common_api.hpp b/src/buildtool/execution_api/common/common_api.hpp index c11180da4..16931832d 100644 --- a/src/buildtool/execution_api/common/common_api.hpp +++ b/src/buildtool/execution_api/common/common_api.hpp @@ -18,7 +18,9 @@ #include #include #include +#include #include +#include #include #include #include @@ -59,21 +61,26 @@ struct MissingArtifactsInfo { /// be uploaded. /// \returns A struct storing the missing artifacts and a back-mapping to the /// original given type, or nullopt in case of exceptions. -template +template + requires(std::is_same_v< + TValue, + typename std::iterator_traits::value_type>) [[nodiscard]] auto GetMissingArtifactsInfo( IExecutionApi const& api, - typename std::vector::const_iterator const& begin, - typename std::vector::const_iterator const& end, - typename std::function const& converter) noexcept - -> std::optional> { + TIterator const& begin, + TIterator const& end, + typename std::function const& + converter) noexcept -> std::optional> { std::vector digests; - digests.reserve(end - begin); - MissingArtifactsInfo res{}; + digests.reserve(std::distance(begin, end)); + MissingArtifactsInfo res{}; for (auto it = begin; it != end; ++it) { try { - auto dgst = converter(*it); // can't enforce it to be noexcept - digests.emplace_back(dgst); - res.back_map.emplace(std::move(dgst), *it); + auto const inserted = + res.back_map.insert({std::invoke(converter, *it), *it}); + if (inserted.second) { + digests.emplace_back(inserted.first->first); + } } catch (...) { return std::nullopt; } diff --git a/src/buildtool/execution_api/common/content_blob_container.hpp b/src/buildtool/execution_api/common/content_blob_container.hpp index f15187f1f..1788d2296 100644 --- a/src/buildtool/execution_api/common/content_blob_container.hpp +++ b/src/buildtool/execution_api/common/content_blob_container.hpp @@ -37,9 +37,9 @@ struct ContentBlob final { bool is_exec) noexcept : digest{std::move(mydigest)}, data(mydata), is_exec{is_exec} {} - TDigest digest{}; - std::shared_ptr data{}; - bool is_exec{}; + TDigest digest; + std::shared_ptr data; + bool is_exec = false; }; template diff --git a/src/buildtool/execution_api/common/execution_action.hpp b/src/buildtool/execution_api/common/execution_action.hpp index 649c634c4..a5284ca15 100644 --- a/src/buildtool/execution_api/common/execution_action.hpp +++ b/src/buildtool/execution_api/common/execution_action.hpp @@ -16,6 +16,7 @@ #define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_ACTION_HPP #include +#include #include #include "gsl/gsl" @@ -31,7 +32,7 @@ class IExecutionAction { public: using Ptr = std::unique_ptr; - enum class CacheFlag { + enum class CacheFlag : std::uint8_t { CacheOutput, ///< run and cache, or serve from cache DoNotCacheOutput, ///< run and do not cache, never served from cached FromCacheOnly, ///< do not run, only serve from cache diff --git a/src/buildtool/execution_api/common/execution_response.hpp b/src/buildtool/execution_api/common/execution_response.hpp index 57e1f0876..90c673177 100644 --- a/src/buildtool/execution_api/common/execution_response.hpp +++ b/src/buildtool/execution_api/common/execution_response.hpp @@ -15,13 +15,16 @@ #ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_RESPONSE_HPP #define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_RESPONSE_HPP +#include #include #include #include +#include #include #include "gsl/gsl" #include "src/buildtool/common/artifact.hpp" +#include "src/utils/cpp/expected.hpp" /// \brief Abstract response. /// Response of an action execution. Contains outputs from multiple commands and @@ -33,7 +36,7 @@ class IExecutionResponse { // set of paths found in output_directory_symlinks list of the action result using DirSymlinks = std::unordered_set; - enum class StatusCode { Failed, Success }; + enum class StatusCode : std::uint8_t { Failed, Success }; IExecutionResponse() = default; IExecutionResponse(IExecutionResponse const&) = delete; @@ -59,9 +62,10 @@ class IExecutionResponse { [[nodiscard]] virtual auto ActionDigest() const noexcept -> std::string const& = 0; - [[nodiscard]] virtual auto Artifacts() noexcept -> ArtifactInfos const& = 0; + [[nodiscard]] virtual auto Artifacts() noexcept + -> expected, std::string> = 0; [[nodiscard]] virtual auto DirectorySymlinks() noexcept - -> DirSymlinks const& = 0; + -> expected, std::string> = 0; }; #endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_RESPONSE_HPP diff --git a/src/buildtool/execution_api/common/message_limits.hpp b/src/buildtool/execution_api/common/message_limits.hpp index 47b3b8b17..5ad1818a2 100644 --- a/src/buildtool/execution_api/common/message_limits.hpp +++ b/src/buildtool/execution_api/common/message_limits.hpp @@ -17,6 +17,6 @@ #include // Max size for batch transfers -static constexpr std::size_t kMaxBatchTransferSize = 3 * 1024 * 1024; +static constexpr std::size_t kMaxBatchTransferSize = 3UL * 1024 * 1024; static_assert(kMaxBatchTransferSize < GRPC_DEFAULT_MAX_RECV_MESSAGE_LENGTH, "Max batch transfer size too large."); diff --git a/src/buildtool/execution_api/common/stream_dumper.hpp b/src/buildtool/execution_api/common/stream_dumper.hpp index 81d3bf35c..c1d995884 100644 --- a/src/buildtool/execution_api/common/stream_dumper.hpp +++ b/src/buildtool/execution_api/common/stream_dumper.hpp @@ -22,7 +22,7 @@ #include "gsl/gsl" #include "src/buildtool/common/artifact.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/common/tree_reader_utils.hpp" #include "src/buildtool/file_system/object_type.hpp" @@ -65,7 +65,7 @@ class StreamDumper final { [[nodiscard]] auto DumpTree( Artifact::ObjectInfo const& info, gsl::not_null const& stream) const noexcept -> bool { - if (Compatibility::IsCompatible()) { + if (not impl_.IsNativeProtocol()) { auto directory = impl_.ReadDirectory(info.digest); auto data = directory ? TreeReaderUtils::DirectoryToString(*directory) diff --git a/src/buildtool/execution_api/common/tree_reader.hpp b/src/buildtool/execution_api/common/tree_reader.hpp index d9048e1f0..9c1b295e7 100644 --- a/src/buildtool/execution_api/common/tree_reader.hpp +++ b/src/buildtool/execution_api/common/tree_reader.hpp @@ -23,6 +23,7 @@ #include "src/buildtool/common/artifact.hpp" #include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/common/tree_reader_utils.hpp" #include "src/buildtool/file_system/git_repo.hpp" #include "src/buildtool/file_system/object_type.hpp" @@ -52,13 +53,13 @@ class TreeReader final { TreeReaderUtils::InfoStoreFunc store_info = [&result, &parent](std::filesystem::path const& path, - Artifact::ObjectInfo const& info) { + Artifact::ObjectInfo&& info) { result.paths.emplace_back(parent / path); - result.infos.emplace_back(info); + result.infos.emplace_back(std::move(info)); return true; }; - if (Compatibility::IsCompatible()) { + if (not impl_.IsNativeProtocol()) { auto tree = impl_.ReadDirectory(digest); if (tree and not TreeReaderUtils::ReadObjectInfos(*tree, store_info)) { @@ -101,16 +102,16 @@ class TreeReader final { store, parent, digest, include_trees)) { return result; } + return std::nullopt; } catch (...) { - // fallthrough + return std::nullopt; } - return std::nullopt; } private: TImpl impl_; - [[nodiscard]] static inline auto IsDirectoryEmpty( + [[nodiscard]] static auto IsDirectoryEmpty( bazel_re::Directory const& dir) noexcept -> bool { return dir.files().empty() and dir.directories().empty() and dir.symlinks().empty(); @@ -124,14 +125,14 @@ class TreeReader final { TreeReaderUtils::InfoStoreFunc internal_store = [this, &store, &parent, include_trees]( std::filesystem::path const& path, - Artifact::ObjectInfo const& info) -> bool { + Artifact::ObjectInfo&& info) -> bool { return IsTreeObject(info.type) ? ReadObjectInfosRecursively( store, parent / path, info.digest, include_trees) - : store(parent / path, info); + : store(parent / path, std::move(info)); }; - if (Compatibility::IsCompatible()) { + if (not impl_.IsNativeProtocol()) { if (auto tree = impl_.ReadDirectory(digest)) { if (include_trees and IsDirectoryEmpty(*tree)) { if (not store(parent, {digest, ObjectType::Tree})) { diff --git a/src/buildtool/execution_api/common/tree_reader_utils.cpp b/src/buildtool/execution_api/common/tree_reader_utils.cpp index 0573d89e2..5a55c0596 100644 --- a/src/buildtool/execution_api/common/tree_reader_utils.cpp +++ b/src/buildtool/execution_api/common/tree_reader_utils.cpp @@ -16,9 +16,11 @@ #include #include +#include #include "nlohmann/json.hpp" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/logging/log_level.hpp" @@ -26,53 +28,57 @@ #include "src/utils/cpp/hex_string.hpp" namespace { -[[nodiscard]] auto CreateObjectInfo(bazel_re::DirectoryNode const& node) - -> Artifact::ObjectInfo { - return Artifact::ObjectInfo{.digest = ArtifactDigest{node.digest()}, +[[nodiscard]] auto CreateObjectInfo(HashFunction hash_function, + bazel_re::DirectoryNode const& node) + -> std::optional { + auto digest = ArtifactDigestFactory::FromBazel(hash_function.GetType(), + node.digest()); + if (not digest) { + return std::nullopt; + } + return Artifact::ObjectInfo{.digest = *std::move(digest), .type = ObjectType::Tree}; } -[[nodiscard]] auto CreateObjectInfo(bazel_re::FileNode const& node) - -> Artifact::ObjectInfo { - return Artifact::ObjectInfo{.digest = ArtifactDigest{node.digest()}, +[[nodiscard]] auto CreateObjectInfo(HashFunction hash_function, + bazel_re::FileNode const& node) + -> std::optional { + auto digest = ArtifactDigestFactory::FromBazel(hash_function.GetType(), + node.digest()); + if (not digest) { + return std::nullopt; + } + return Artifact::ObjectInfo{.digest = *std::move(digest), .type = node.is_executable() ? ObjectType::Executable : ObjectType::File}; } -[[nodiscard]] auto CreateObjectInfo(bazel_re::SymlinkNode const& node, - HashFunction hash_function) +[[nodiscard]] auto CreateObjectInfo(HashFunction hash_function, + bazel_re::SymlinkNode const& node) -> Artifact::ObjectInfo { - return Artifact::ObjectInfo{ - .digest = ArtifactDigest::Create(hash_function, - node.target()), + .digest = ArtifactDigestFactory::HashDataAs( + hash_function, node.target()), .type = ObjectType::Symlink}; } template -[[nodiscard]] auto TreeToString(TTree const& entries) noexcept +[[nodiscard]] auto TreeToString(TTree const& entries) -> std::optional { auto json = nlohmann::json::object(); TreeReaderUtils::InfoStoreFunc store_infos = [&json](std::filesystem::path const& path, - Artifact::ObjectInfo const& info) -> bool { + Artifact::ObjectInfo&& info) -> bool { static constexpr bool kSizeUnknown = std::is_same_v; - json[path.string()] = info.ToString(kSizeUnknown); + json[path.string()] = std::move(info).ToString(kSizeUnknown); return true; }; if (TreeReaderUtils::ReadObjectInfos(entries, store_infos)) { - try { - return json.dump(2) + "\n"; - } catch (std::exception const& ex) { - Logger::Log(LogLevel::Error, - "dumping Directory to string failed with:\n{}", - ex.what()); - return std::nullopt; - } + return json.dump(2) + "\n"; } Logger::Log(LogLevel::Error, "reading object infos from Directory failed"); return std::nullopt; @@ -83,22 +89,30 @@ template auto TreeReaderUtils::ReadObjectInfos(bazel_re::Directory const& dir, InfoStoreFunc const& store_info) noexcept -> bool { + // SHA256 is used since bazel types are processed here. + HashFunction const hash_function{HashFunction::Type::PlainSHA256}; try { for (auto const& f : dir.files()) { - if (not store_info(f.name(), CreateObjectInfo(f))) { + auto info = CreateObjectInfo(hash_function, f); + if (not info or not store_info(f.name(), *std::move(info))) { return false; } } - // SHA256 is used since bazel types are processed here. - HashFunction const hash_function{HashFunction::Type::PlainSHA256}; for (auto const& l : dir.symlinks()) { - if (not store_info(l.name(), CreateObjectInfo(l, hash_function))) { + // check validity of symlinks + if (not PathIsNonUpwards(l.target())) { + Logger::Log( + LogLevel::Error, "found invalid symlink at {}", l.name()); + return false; + } + if (not store_info(l.name(), CreateObjectInfo(hash_function, l))) { return false; } } for (auto const& d : dir.directories()) { - if (not store_info(d.name(), CreateObjectInfo(d))) { + auto info = CreateObjectInfo(hash_function, d); + if (not info or not store_info(d.name(), *std::move(info))) { return false; } } @@ -118,13 +132,16 @@ auto TreeReaderUtils::ReadObjectInfos(GitRepo::tree_entries_t const& entries, for (auto const& [raw_id, es] : entries) { auto const hex_id = ToHexString(raw_id); for (auto const& entry : es) { - if (not store_info( + auto digest = + ArtifactDigestFactory::Create(HashFunction::Type::GitSHA1, + hex_id, + /*size is unknown*/ 0, + IsTreeObject(entry.type)); + if (not digest or + not store_info( entry.name, - Artifact::ObjectInfo{ - .digest = ArtifactDigest{hex_id, - /*size is unknown*/ 0, - IsTreeObject(entry.type)}, - .type = entry.type})) { + Artifact::ObjectInfo{.digest = *std::move(digest), + .type = entry.type})) { return false; } } @@ -140,11 +157,25 @@ auto TreeReaderUtils::ReadObjectInfos(GitRepo::tree_entries_t const& entries, auto TreeReaderUtils::DirectoryToString(bazel_re::Directory const& dir) noexcept -> std::optional { - return TreeToString(dir); + try { + return TreeToString(dir); + } catch (const std::exception& e) { + Logger::Log(LogLevel::Error, + "An error occurred while reading bazel:re::Directory:\n", + e.what()); + return std::nullopt; + } } auto TreeReaderUtils::GitTreeToString( GitRepo::tree_entries_t const& entries) noexcept -> std::optional { - return TreeToString(entries); + try { + return TreeToString(entries); + } catch (const std::exception& e) { + Logger::Log(LogLevel::Error, + "An error occurred while reading git tree:\n{}", + e.what()); + return std::nullopt; + } } diff --git a/src/buildtool/execution_api/common/tree_reader_utils.hpp b/src/buildtool/execution_api/common/tree_reader_utils.hpp index ed1518993..ab0aab5d7 100644 --- a/src/buildtool/execution_api/common/tree_reader_utils.hpp +++ b/src/buildtool/execution_api/common/tree_reader_utils.hpp @@ -27,7 +27,7 @@ class TreeReaderUtils final { public: using InfoStoreFunc = std::function; + Artifact::ObjectInfo&&)>; /// \brief Read object infos from directory. /// \returns true on success. diff --git a/src/buildtool/execution_api/execution_service/TARGETS b/src/buildtool/execution_api/execution_service/TARGETS index bf5c10d74..18a66818b 100644 --- a/src/buildtool/execution_api/execution_service/TARGETS +++ b/src/buildtool/execution_api/execution_service/TARGETS @@ -8,21 +8,24 @@ , "deps": [ "operation_cache" , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "bazel_types"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/common", "bazel_types"] - , ["src/buildtool/storage", "storage"] , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] , ["src/utils/cpp", "expected"] ] , "private-deps": [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/execution_api/local", "local"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] - , "operation_cache" - , ["src/utils/cpp", "verify_hash"] - , ["src/buildtool/execution_api/local", "local"] + , ["src/utils/cpp", "hex_string"] + , ["src/utils/cpp", "path"] ] , "private-ldflags": ["-pthread", "-Wl,--whole-archive,-lpthread,--no-whole-archive"] @@ -36,14 +39,18 @@ , "stage": ["src", "buildtool", "execution_api", "execution_service"] , "deps": [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "bazel_types"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/common", "bazel_types"] - , ["src/buildtool/storage", "storage"] , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] ] , "private-deps": - [["src/buildtool/logging", "log_level"], ["src/utils/cpp", "verify_hash"]] + [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/logging", "log_level"] + ] } , "cas_server": { "type": ["@", "rules", "CC", "library"] @@ -54,18 +61,19 @@ , "stage": ["src", "buildtool", "execution_api", "execution_service"] , "deps": [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "bazel_types"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/common", "bazel_types"] - , ["src/buildtool/storage", "storage"] , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] ] , "private-deps": - [ ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/logging", "log_level"] + [ "cas_utils" , ["@", "fmt", "", "fmt"] - , ["src/utils/cpp", "verify_hash"] - , "cas_utils" + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/logging", "log_level"] ] } , "server_implementation": @@ -75,24 +83,25 @@ , "srcs": ["server_implementation.cpp"] , "stage": ["src", "buildtool", "execution_api", "execution_service"] , "deps": - [ ["src/buildtool/execution_api/common", "api_bundle"] + [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/execution_api/common", "api_bundle"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/execution_api/remote", "context"] ] , "private-deps": - [ "execution_server" - , "ac_server" - , "cas_server" + [ "ac_server" , "bytestream_server" , "capabilities_server" + , "cas_server" + , "execution_server" , "operations_server" - , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/logging", "logging"] - , ["@", "json", "", "json"] - , ["@", "grpc", "", "grpc++"] , ["@", "fmt", "", "fmt"] - , ["src/buildtool/compatibility", "compatibility"] + , ["@", "grpc", "", "grpc++"] + , ["@", "json", "", "json"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/common/remote", "port"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] ] } , "bytestream_server": @@ -106,18 +115,19 @@ [ ["@", "gsl", "", "gsl"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/storage", "storage"] , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] ] , "private-deps": - [ ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/execution_api/common", "bytestream-common"] + [ "cas_utils" + , ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/execution_api/common", "bytestream_utils"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] , ["src/utils/cpp", "tmp_dir"] - , ["@", "fmt", "", "fmt"] - , ["src/utils/cpp", "verify_hash"] - , "cas_utils" ] } , "capabilities_server": @@ -127,11 +137,14 @@ , "srcs": ["capabilities_server.cpp"] , "proto": [["@", "bazel_remote_apis", "", "remote_execution_proto"]] , "stage": ["src", "buildtool", "execution_api", "execution_service"] - , "deps": [["src/buildtool/common", "bazel_types"]] + , "deps": + [ ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/crypto", "hash_function"] + ] , "private-deps": - [ ["src/buildtool/logging", "log_level"] + [ ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/compatibility", "compatibility"] ] } , "operation_cache": @@ -141,7 +154,7 @@ , "srcs": ["operation_cache.cpp"] , "stage": ["src", "buildtool", "execution_api", "execution_service"] , "proto": [["@", "googleapis", "", "google_longrunning_operations_proto"]] - , "private-deps": [["src/buildtool/logging", "logging"]] + , "deps": [["@", "protoc", "", "libprotobuf"]] } , "operations_server": { "type": ["@", "rules", "CC", "library"] @@ -151,14 +164,15 @@ , "deps": [ "operation_cache" , ["@", "gsl", "", "gsl"] + , ["@", "protoc", "", "libprotobuf"] , ["src/buildtool/logging", "logging"] ] , "proto": [["@", "googleapis", "", "google_longrunning_operations_proto"]] , "stage": ["src", "buildtool", "execution_api", "execution_service"] , "private-deps": - [ "operation_cache" + [ ["@", "fmt", "", "fmt"] , ["src/buildtool/logging", "log_level"] - , ["src/utils/cpp", "verify_hash"] + , ["src/utils/cpp", "hex_string"] ] } , "cas_utils": @@ -169,13 +183,14 @@ , "stage": ["src", "buildtool", "execution_api", "execution_service"] , "deps": [ ["@", "grpc", "", "grpc++"] - , ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/common", "common"] , ["src/buildtool/storage", "storage"] , ["src/utils/cpp", "expected"] ] , "private-deps": [ ["@", "fmt", "", "fmt"] - , ["src/buildtool/compatibility", "compatibility"] + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/file_system", "file_system_manager"] ] } diff --git a/src/buildtool/execution_api/execution_service/ac_server.cpp b/src/buildtool/execution_api/execution_service/ac_server.cpp index 0d85596e0..d0c1a74c5 100644 --- a/src/buildtool/execution_api/execution_service/ac_server.cpp +++ b/src/buildtool/execution_api/execution_service/ac_server.cpp @@ -15,34 +15,37 @@ #include "src/buildtool/execution_api/execution_service/ac_server.hpp" #include "fmt/core.h" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/storage/garbage_collector.hpp" -#include "src/utils/cpp/verify_hash.hpp" auto ActionCacheServiceImpl::GetActionResult( ::grpc::ServerContext* /*context*/, const ::bazel_re::GetActionResultRequest* request, ::bazel_re::ActionResult* response) -> ::grpc::Status { - if (auto error_msg = IsAHash(request->action_digest().hash()); error_msg) { - logger_.Emit(LogLevel::Debug, "{}", *error_msg); - return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, *error_msg}; + auto action_digest = ArtifactDigestFactory::FromBazel( + storage_config_.hash_function.GetType(), request->action_digest()); + if (not action_digest) { + logger_.Emit(LogLevel::Debug, "{}", action_digest.error()); + return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, + std::move(action_digest).error()}; } - logger_.Emit(LogLevel::Trace, - "GetActionResult: {}", - request->action_digest().hash()); - auto lock = GarbageCollector::SharedLock(storage_config_); + logger_.Emit(LogLevel::Trace, "GetActionResult: {}", action_digest->hash()); + auto const lock = GarbageCollector::SharedLock(storage_config_); if (not lock) { - auto str = fmt::format("Could not acquire SharedLock"); - logger_.Emit(LogLevel::Error, str); - return grpc::Status{grpc::StatusCode::INTERNAL, str}; + static constexpr auto kStr = "Could not acquire SharedLock"; + logger_.Emit(LogLevel::Error, kStr); + return grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } - auto x = storage_.ActionCache().CachedResult(request->action_digest()); - if (not x) { + + auto action_result = storage_.ActionCache().CachedResult(*action_digest); + if (not action_result) { return grpc::Status{ grpc::StatusCode::NOT_FOUND, - fmt::format("{} missing from AC", request->action_digest().hash())}; + fmt::format("{} missing from AC", action_digest->hash())}; } - *response = *x; + *response = *std::move(action_result); return ::grpc::Status::OK; } @@ -51,7 +54,7 @@ auto ActionCacheServiceImpl::UpdateActionResult( const ::bazel_re::UpdateActionResultRequest* /*request*/, ::bazel_re::ActionResult* /*response*/) -> ::grpc::Status { - auto const* str = "UpdateActionResult not implemented"; - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, str}; + static auto constexpr kStr = "UpdateActionResult not implemented"; + logger_.Emit(LogLevel::Error, kStr); + return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, kStr}; } diff --git a/src/buildtool/execution_api/execution_service/ac_server.hpp b/src/buildtool/execution_api/execution_service/ac_server.hpp index 59b8297f6..268bbf9a0 100644 --- a/src/buildtool/execution_api/execution_service/ac_server.hpp +++ b/src/buildtool/execution_api/execution_service/ac_server.hpp @@ -73,4 +73,4 @@ class ActionCacheServiceImpl final : public bazel_re::ActionCache::Service { Logger logger_{"execution-service"}; }; -#endif +#endif // AC_SERVER_HPP diff --git a/src/buildtool/execution_api/execution_service/bytestream_server.cpp b/src/buildtool/execution_api/execution_service/bytestream_server.cpp index 32adba912..642347cca 100644 --- a/src/buildtool/execution_api/execution_service/bytestream_server.cpp +++ b/src/buildtool/execution_api/execution_service/bytestream_server.cpp @@ -20,27 +20,15 @@ #include #include "fmt/core.h" -#include "src/buildtool/compatibility/native_support.hpp" -#include "src/buildtool/execution_api/common/bytestream_common.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/execution_api/common/bytestream_utils.hpp" #include "src/buildtool/execution_api/execution_service/cas_utils.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/storage/garbage_collector.hpp" #include "src/utils/cpp/tmp_dir.hpp" -#include "src/utils/cpp/verify_hash.hpp" - -namespace { -auto ParseResourceName(std::string const& x) -> std::optional { - // resource name is like this - // remote-execution/uploads/c4f03510-7d56-4490-8934-01bce1b1288e/blobs/62183d7a696acf7e69e218efc82c93135f8c85f895/4424712 - if (auto end = x.rfind('/'); end != std::string::npos) { - if (auto start = x.rfind('/', end - 1); start != std::string::npos) { - return x.substr(start + 1, end - start - 1); - } - } - return std::nullopt; -} -} // namespace auto BytestreamServiceImpl::Read( ::grpc::ServerContext* /*context*/, @@ -48,40 +36,36 @@ auto BytestreamServiceImpl::Read( ::grpc::ServerWriter<::google::bytestream::ReadResponse>* writer) -> ::grpc::Status { logger_.Emit(LogLevel::Trace, "Read {}", request->resource_name()); - // resource_name is of type - // remote-execution/blobs/62f408d64bca5de775c4b1dbc3288fc03afd6b19eb/0 - auto hash = ParseResourceName(request->resource_name()); - if (not hash) { - auto str = fmt::format("could not parse {}", request->resource_name()); + auto const read_request = + ByteStreamUtils::ReadRequest::FromString(request->resource_name()); + if (not read_request) { + auto const str = + fmt::format("could not parse {}", request->resource_name()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, str}; } - - if (auto error_msg = IsAHash(*hash); error_msg) { - logger_.Emit(LogLevel::Debug, "{}", *error_msg); - return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, *error_msg}; + auto const read_digest = ArtifactDigestFactory::FromBazel( + storage_config_.hash_function.GetType(), read_request->GetDigest()); + if (not read_digest) { + logger_.Emit(LogLevel::Debug, "{}", read_digest.error()); + return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, + read_digest.error()}; } - auto lock = GarbageCollector::SharedLock(storage_config_); + auto const lock = GarbageCollector::SharedLock(storage_config_); if (not lock) { - auto str = fmt::format("Could not acquire SharedLock"); - logger_.Emit(LogLevel::Error, str); - return grpc::Status{grpc::StatusCode::INTERNAL, str}; + static constexpr auto kStr = "Could not acquire SharedLock"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } - std::optional path{}; + auto const path = + read_digest->IsTree() + ? storage_.CAS().TreePath(*read_digest) + : storage_.CAS().BlobPath(*read_digest, /*is_executable=*/false); - if (NativeSupport::IsTree(*hash)) { - ArtifactDigest dgst{NativeSupport::Unprefix(*hash), 0, true}; - path = storage_.CAS().TreePath(static_cast(dgst)); - } - else { - ArtifactDigest dgst{NativeSupport::Unprefix(*hash), 0, false}; - path = - storage_.CAS().BlobPath(static_cast(dgst), false); - } if (not path) { - auto str = fmt::format("could not find {}", *hash); + auto const str = fmt::format("could not find {}", read_digest->hash()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{::grpc::StatusCode::NOT_FOUND, str}; } @@ -91,12 +75,13 @@ auto BytestreamServiceImpl::Read( ::google::bytestream::ReadResponse response; std::string& buffer = *response.mutable_data(); - buffer.resize(kChunkSize); + buffer.resize(ByteStreamUtils::kChunkSize); while (not stream.eof()) { - stream.read(buffer.data(), kChunkSize); + stream.read(buffer.data(), ByteStreamUtils::kChunkSize); if (stream.bad()) { - auto const str = fmt::format("Failed to read data for {}", *hash); + auto const str = + fmt::format("Failed to read data for {}", read_digest->hash()); logger_.Emit(LogLevel::Error, str); return grpc::Status{grpc::StatusCode::INTERNAL, str}; } @@ -117,40 +102,46 @@ auto BytestreamServiceImpl::Write( ::google::bytestream::WriteRequest request; reader->Read(&request); logger_.Emit(LogLevel::Debug, "write {}", request.resource_name()); - auto hash = ParseResourceName(request.resource_name()); - if (not hash) { - auto str = fmt::format("could not parse {}", request.resource_name()); + auto const write_request = + ByteStreamUtils::WriteRequest::FromString(request.resource_name()); + if (not write_request) { + auto const str = + fmt::format("could not parse {}", request.resource_name()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, str}; } - if (auto error_msg = IsAHash(*hash); error_msg) { - logger_.Emit(LogLevel::Debug, "{}", *error_msg); - return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, *error_msg}; + + auto const write_digest = ArtifactDigestFactory::FromBazel( + storage_config_.hash_function.GetType(), write_request->GetDigest()); + if (not write_digest) { + logger_.Emit(LogLevel::Debug, "{}", write_digest.error()); + return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, + write_digest.error()}; } logger_.Emit(LogLevel::Trace, "Write: {}, offset {}, finish write {}", - *hash, + write_digest->hash(), request.write_offset(), request.finish_write()); - auto lock = GarbageCollector::SharedLock(storage_config_); + auto const lock = GarbageCollector::SharedLock(storage_config_); if (not lock) { - auto str = fmt::format("Could not acquire SharedLock"); - logger_.Emit(LogLevel::Error, str); - return grpc::Status{grpc::StatusCode::INTERNAL, str}; + static constexpr auto kStr = "Could not acquire SharedLock"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } - auto tmp_dir = storage_config_.CreateTypedTmpDir("execution-service"); + auto const tmp_dir = storage_config_.CreateTypedTmpDir("execution-service"); if (not tmp_dir) { return ::grpc::Status{::grpc::StatusCode::INTERNAL, "could not create TmpDir"}; } - auto tmp = tmp_dir->GetPath() / *hash; + auto tmp = tmp_dir->GetPath() / write_digest->hash(); { std::ofstream stream{tmp, std::ios::binary}; - do { + do { // NOLINT(cppcoreguidelines-avoid-do-while) if (not stream.good()) { - auto const str = - fmt::format("Failed to write data for {}", *hash); + auto const str = fmt::format("Failed to write data for {}", + write_digest->hash()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{::grpc::StatusCode::INTERNAL, str}; } @@ -159,55 +150,12 @@ auto BytestreamServiceImpl::Write( } while (not request.finish_write() and reader->Read(&request)); } - // Before storing a tree, we have to verify that its parts are present - bool const is_tree = NativeSupport::IsTree(*hash); - if (is_tree) { - // ... unfortunately, this requires us to read the whole tree object - // into memory - auto content = FileSystemManager::ReadFile(tmp); - if (not content) { - auto const msg = fmt::format( - "Failed to read temporary file {} for {}", tmp.string(), *hash); - logger_.Emit(LogLevel::Error, "{}", msg); - return ::grpc::Status{::grpc::StatusCode::INTERNAL, msg}; - } - - ArtifactDigest dgst{NativeSupport::Unprefix(*hash), 0, true}; - if (auto err = CASUtils::EnsureTreeInvariant( - static_cast(dgst), *content, storage_)) { - auto const str = fmt::format("Write: {}", *std::move(err)); - logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{grpc::StatusCode::FAILED_PRECONDITION, str}; - } - } - - // Store blob and verify hash - std::optional stored; - if (is_tree) { - stored = storage_.CAS().StoreTree(tmp); - } - else { - stored = storage_.CAS().StoreBlob( - tmp, /*is_executable=*/false); - } - - if (not stored) { - // This is a serious problem: we have a sequence of bytes, but cannot - // write them to CAS. - auto str = fmt::format("Failed to store object {}", *hash); + auto const status = CASUtils::AddFileToCAS(*write_digest, tmp, storage_); + if (not status.ok()) { + auto const str = fmt::format("Write: {}", status.error_message()); logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{::grpc::StatusCode::INTERNAL, str}; + return ::grpc::Status{status.error_code(), str}; } - - if (stored->hash() != *hash) { - // User error: did not get a file with the announced hash - auto str = fmt::format("In upload for {} received object with hash {}", - *hash, - stored->hash()); - logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, str}; - } - response->set_committed_size( static_cast(std::filesystem::file_size(tmp))); return ::grpc::Status::OK; @@ -218,7 +166,7 @@ auto BytestreamServiceImpl::QueryWriteStatus( const ::google::bytestream::QueryWriteStatusRequest* /*request*/, ::google::bytestream::QueryWriteStatusResponse* /*response*/) -> ::grpc::Status { - auto const* str = "QueryWriteStatus not implemented"; - logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, str}; + static constexpr auto kStr = "QueryWriteStatus not implemented"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, kStr}; } diff --git a/src/buildtool/execution_api/execution_service/bytestream_server.hpp b/src/buildtool/execution_api/execution_service/bytestream_server.hpp index e246da0c8..27ea19723 100644 --- a/src/buildtool/execution_api/execution_service/bytestream_server.hpp +++ b/src/buildtool/execution_api/execution_service/bytestream_server.hpp @@ -88,4 +88,4 @@ class BytestreamServiceImpl : public ::google::bytestream::ByteStream::Service { Logger logger_{"execution-service:bytestream"}; }; -#endif +#endif // BYTESTREAM_SERVER_HPP diff --git a/src/buildtool/execution_api/execution_service/capabilities_server.cpp b/src/buildtool/execution_api/execution_service/capabilities_server.cpp index d8ba4279d..dc1532e1e 100644 --- a/src/buildtool/execution_api/execution_service/capabilities_server.cpp +++ b/src/buildtool/execution_api/execution_service/capabilities_server.cpp @@ -16,7 +16,7 @@ #include -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" @@ -25,7 +25,7 @@ auto CapabilitiesServiceImpl::GetCapabilities( const ::bazel_re::GetCapabilitiesRequest* /*request*/, ::bazel_re::ServerCapabilities* response) -> ::grpc::Status { - if (not Compatibility::IsCompatible()) { + if (ProtocolTraits::IsNative(hash_type_)) { auto const* str = "GetCapabilities not implemented"; Logger::Log(LogLevel::Error, str); return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, str}; @@ -36,7 +36,7 @@ auto CapabilitiesServiceImpl::GetCapabilities( cache.add_digest_functions( ::bazel_re::DigestFunction_Value::DigestFunction_Value_SHA256); cache.mutable_action_cache_update_capabilities()->set_update_enabled(false); - static constexpr std::size_t kMaxBatchTransferSize = 1024 * 1024; + static constexpr std::size_t kMaxBatchTransferSize = 1024UL * 1024; cache.set_max_batch_total_size_bytes(kMaxBatchTransferSize); static_assert(kMaxBatchTransferSize < GRPC_DEFAULT_MAX_RECV_MESSAGE_LENGTH, "Max batch transfer size too large."); diff --git a/src/buildtool/execution_api/execution_service/capabilities_server.hpp b/src/buildtool/execution_api/execution_service/capabilities_server.hpp index 57c49fc90..d17065848 100644 --- a/src/buildtool/execution_api/execution_service/capabilities_server.hpp +++ b/src/buildtool/execution_api/execution_service/capabilities_server.hpp @@ -17,9 +17,13 @@ #include "build/bazel/remote/execution/v2/remote_execution.grpc.pb.h" #include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/crypto/hash_function.hpp" class CapabilitiesServiceImpl final : public bazel_re::Capabilities::Service { public: + explicit CapabilitiesServiceImpl(HashFunction::Type hash_type) noexcept + : hash_type_{hash_type} {} + // GetCapabilities returns the server capabilities configuration of the // remote endpoint. // Only the capabilities of the services supported by the endpoint will @@ -32,5 +36,8 @@ class CapabilitiesServiceImpl final : public bazel_re::Capabilities::Service { const ::bazel_re::GetCapabilitiesRequest* request, ::bazel_re::ServerCapabilities* response) -> ::grpc::Status override; + + private: + HashFunction::Type const hash_type_; }; -#endif +#endif // CAPABILITIES_SERVER_HPP diff --git a/src/buildtool/execution_api/execution_service/cas_server.cpp b/src/buildtool/execution_api/execution_service/cas_server.cpp index 9960dc963..f32b2754d 100644 --- a/src/buildtool/execution_api/execution_service/cas_server.cpp +++ b/src/buildtool/execution_api/execution_service/cas_server.cpp @@ -23,25 +23,14 @@ #include #include "fmt/core.h" -#include "src/buildtool/compatibility/compatibility.hpp" -#include "src/buildtool/compatibility/native_support.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/execution_service/cas_utils.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/storage/garbage_collector.hpp" -#include "src/utils/cpp/verify_hash.hpp" namespace { -inline constexpr std::size_t kGitSHA1Length = 42; -inline constexpr std::size_t kSHA256Length = 64; - -[[nodiscard]] auto IsValidHash(std::string const& x) -> bool { - auto error_msg = IsAHash(x); - auto const& length = x.size(); - return not error_msg and - ((Compatibility::IsCompatible() and length == kSHA256Length) or - length == kGitSHA1Length); -} - [[nodiscard]] auto ChunkingAlgorithmToString( ::bazel_re::ChunkingAlgorithm_Value type) -> std::string { switch (type) { @@ -58,64 +47,40 @@ inline constexpr std::size_t kSHA256Length = 64; return "[Unknown Chunking Algorithm Type]"; } } - -[[nodiscard]] auto CheckDigestConsistency( - bazel_re::Digest const& ref, - bazel_re::Digest const& computed) noexcept -> std::optional { - bool valid = ref.hash() == computed.hash(); - if (valid) { - bool const check_sizes = - Compatibility::IsCompatible() or ref.size_bytes() != 0; - if (check_sizes) { - valid = ref.size_bytes() == computed.size_bytes(); - } - } - if (not valid) { - return fmt::format( - "Blob {} is corrupted: provided digest {}:{} and digest computed " - "from data {}:{} do not correspond.", - ref.hash(), - ref.hash(), - ref.size_bytes(), - computed.hash(), - computed.size_bytes()); - } - return std::nullopt; -} } // namespace auto CASServiceImpl::FindMissingBlobs( ::grpc::ServerContext* /*context*/, const ::bazel_re::FindMissingBlobsRequest* request, ::bazel_re::FindMissingBlobsResponse* response) -> ::grpc::Status { - auto lock = GarbageCollector::SharedLock(storage_config_); + auto const lock = GarbageCollector::SharedLock(storage_config_); if (not lock) { - auto str = - fmt::format("FindMissingBlobs: could not acquire SharedLock"); - logger_.Emit(LogLevel::Error, str); - return grpc::Status{grpc::StatusCode::INTERNAL, str}; + static constexpr auto kStr = + "FindMissingBlobs: could not acquire SharedLock"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } for (auto const& x : request->blob_digests()) { - auto const& hash = x.hash(); + auto const digest = ArtifactDigestFactory::FromBazel( + storage_config_.hash_function.GetType(), x); bool is_in_cas = false; - if (IsValidHash(hash)) { - logger_.Emit(LogLevel::Trace, "FindMissingBlobs: {}", hash); - ArtifactDigest const digest(x); + if (digest) { + logger_.Emit( + LogLevel::Trace, "FindMissingBlobs: {}", digest->hash()); is_in_cas = - NativeSupport::IsTree(hash) - ? storage_.CAS().TreePath(digest).has_value() - : storage_.CAS().BlobPath(digest, false).has_value(); + digest->IsTree() + ? storage_.CAS().TreePath(*digest).has_value() + : storage_.CAS().BlobPath(*digest, false).has_value(); } else { logger_.Emit(LogLevel::Error, "FindMissingBlobs: unsupported digest {}", - hash); + x.hash()); } if (not is_in_cas) { - auto* d = response->add_missing_blob_digests(); - d->CopyFrom(x); + *response->add_missing_blob_digests() = x; } } return ::grpc::Status::OK; @@ -125,59 +90,35 @@ auto CASServiceImpl::BatchUpdateBlobs( ::grpc::ServerContext* /*context*/, const ::bazel_re::BatchUpdateBlobsRequest* request, ::bazel_re::BatchUpdateBlobsResponse* response) -> ::grpc::Status { - auto lock = GarbageCollector::SharedLock(storage_config_); + auto const lock = GarbageCollector::SharedLock(storage_config_); if (not lock) { - auto str = - fmt::format("BatchUpdateBlobs: could not acquire SharedLock"); - logger_.Emit(LogLevel::Error, str); - return grpc::Status{grpc::StatusCode::INTERNAL, str}; + static constexpr auto kStr = + "BatchUpdateBlobs: could not acquire SharedLock"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } + auto const hash_type = storage_config_.hash_function.GetType(); for (auto const& x : request->requests()) { auto const& hash = x.digest().hash(); logger_.Emit(LogLevel::Trace, "BatchUpdateBlobs: {}", hash); - if (not IsValidHash(hash)) { + auto const digest = + ArtifactDigestFactory::FromBazel(hash_type, x.digest()); + if (not digest) { auto const str = fmt::format("BatchUpdateBlobs: unsupported digest {}", hash); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; } - logger_.Emit(LogLevel::Trace, "BatchUpdateBlobs: {}", hash); + logger_.Emit(LogLevel::Trace, "BatchUpdateBlobs: {}", digest->hash()); auto* r = response->add_responses(); r->mutable_digest()->CopyFrom(x.digest()); - bool const is_tree = NativeSupport::IsTree(hash); - if (is_tree) { - // In native mode: for trees, check whether the tree invariant holds - // before storing the actual tree object. - if (auto err = CASUtils::EnsureTreeInvariant( - x.digest(), x.data(), storage_)) { - auto const str = - fmt::format("BatchUpdateBlobs: {}", *std::move(err)); - logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{grpc::StatusCode::FAILED_PRECONDITION, - str}; - } - } - - auto const cas_digest = - is_tree - ? storage_.CAS().StoreTree(x.data()) - : storage_.CAS().StoreBlob(x.data(), /*is_executable=*/false); - - if (not cas_digest) { + auto const status = CASUtils::AddDataToCAS(*digest, x.data(), storage_); + if (not status.ok()) { auto const str = - fmt::format("BatchUpdateBlobs: could not upload {} {}", - is_tree ? "tree" : "blob", - hash); + fmt::format("BatchUpdateBlobs: {}", status.error_message()); logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; - } - - if (auto err = CheckDigestConsistency(x.digest(), *cas_digest)) { - auto const str = - fmt::format("BatchUpdateBlobs: {}", *std::move(err)); - logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; + return ::grpc::Status{status.error_code(), str}; } } return ::grpc::Status::OK; @@ -187,22 +128,29 @@ auto CASServiceImpl::BatchReadBlobs( ::grpc::ServerContext* /*context*/, const ::bazel_re::BatchReadBlobsRequest* request, ::bazel_re::BatchReadBlobsResponse* response) -> ::grpc::Status { - auto lock = GarbageCollector::SharedLock(storage_config_); + auto const lock = GarbageCollector::SharedLock(storage_config_); if (not lock) { - auto const str = - fmt::format("BatchReadBlobs: Could not acquire SharedLock"); - logger_.Emit(LogLevel::Error, "{}", str); - return grpc::Status{grpc::StatusCode::INTERNAL, str}; + static constexpr auto kStr = + "BatchReadBlobs: Could not acquire SharedLock"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } for (auto const& x : request->digests()) { auto* r = response->add_responses(); r->mutable_digest()->CopyFrom(x); - ArtifactDigest const digest(x); + auto const digest = ArtifactDigestFactory::FromBazel( + storage_config_.hash_function.GetType(), x); + if (not digest) { + auto const str = + fmt::format("BatchReadBlobs: unsupported digest {}", x.hash()); + logger_.Emit(LogLevel::Error, "{}", str); + return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; + } auto const path = - NativeSupport::IsTree(x.hash()) - ? storage_.CAS().TreePath(digest) - : storage_.CAS().BlobPath(digest, /*is_executable=*/false); + digest->IsTree() + ? storage_.CAS().TreePath(*digest) + : storage_.CAS().BlobPath(*digest, /*is_executable=*/false); if (not path) { google::rpc::Status status; @@ -225,9 +173,9 @@ auto CASServiceImpl::GetTree( const ::bazel_re::GetTreeRequest* /*request*/, ::grpc::ServerWriter<::bazel_re::GetTreeResponse>* /*writer*/) -> ::grpc::Status { - auto const* str = "GetTree not implemented"; - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, str}; + static constexpr auto kStr = "GetTree not implemented"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, kStr}; } auto CASServiceImpl::SplitBlob(::grpc::ServerContext* /*context*/, @@ -235,23 +183,24 @@ auto CASServiceImpl::SplitBlob(::grpc::ServerContext* /*context*/, ::bazel_re::SplitBlobResponse* response) -> ::grpc::Status { if (not request->has_blob_digest()) { - auto str = fmt::format("SplitBlob: no blob digest provided"); - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; + static constexpr auto kStr = "SplitBlob: no blob digest provided"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, kStr}; } - auto const& blob_digest = request->blob_digest(); - if (not IsValidHash(blob_digest.hash())) { - auto str = - fmt::format("SplitBlob: unsupported digest {}", blob_digest.hash()); + auto const blob_digest = ArtifactDigestFactory::FromBazel( + storage_config_.hash_function.GetType(), request->blob_digest()); + if (not blob_digest) { + auto const str = fmt::format("SplitBlob: unsupported digest {}", + request->blob_digest().hash()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; } - auto chunking_algorithm = request->chunking_algorithm(); + auto const chunking_algorithm = request->chunking_algorithm(); logger_.Emit(LogLevel::Debug, "SplitBlob({}, {})", - blob_digest.hash(), + blob_digest->hash(), ChunkingAlgorithmToString(chunking_algorithm)); // Print warning if unsupported chunking algorithm was requested. @@ -269,24 +218,24 @@ auto CASServiceImpl::SplitBlob(::grpc::ServerContext* /*context*/, } // Acquire garbage collection lock. - auto lock = GarbageCollector::SharedLock(storage_config_); + auto const lock = GarbageCollector::SharedLock(storage_config_); if (not lock) { - auto str = - fmt::format("SplitBlob: could not acquire garbage collection lock"); - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; + static constexpr auto kStr = + "SplitBlob: could not acquire garbage collection lock"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return ::grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } // Split blob into chunks. - auto split_result = chunking_algorithm == - ::bazel_re::ChunkingAlgorithm_Value:: - ChunkingAlgorithm_Value_IDENTITY - ? CASUtils::SplitBlobIdentity(blob_digest, storage_) - : CASUtils::SplitBlobFastCDC(blob_digest, storage_); + auto const split_result = + chunking_algorithm == ::bazel_re::ChunkingAlgorithm_Value:: + ChunkingAlgorithm_Value_IDENTITY + ? CASUtils::SplitBlobIdentity(*blob_digest, storage_) + : CASUtils::SplitBlobFastCDC(*blob_digest, storage_); if (not split_result) { auto const& status = split_result.error(); - auto str = fmt::format("SplitBlob: {}", status.error_message()); + auto const str = fmt::format("SplitBlob: {}", status.error_message()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{status.error_code(), str}; } @@ -294,20 +243,21 @@ auto CASServiceImpl::SplitBlob(::grpc::ServerContext* /*context*/, auto const& chunk_digests = *split_result; logger_.Emit(LogLevel::Debug, [&blob_digest, &chunk_digests]() { std::stringstream ss{}; - ss << "Split blob " << blob_digest.hash() << ":" - << blob_digest.size_bytes() << " into " << chunk_digests.size() - << " chunks: [ "; + ss << "Split blob " << blob_digest->hash() << ":" << blob_digest->size() + << " into " << chunk_digests.size() << " chunks: [ "; for (auto const& chunk_digest : chunk_digests) { - ss << chunk_digest.hash() << ":" << chunk_digest.size_bytes() - << " "; + ss << chunk_digest.hash() << ":" << chunk_digest.size() << " "; } ss << "]"; return ss.str(); }); - std::copy(chunk_digests.cbegin(), - chunk_digests.cend(), - pb::back_inserter(response->mutable_chunk_digests())); + std::transform(chunk_digests.cbegin(), + chunk_digests.cend(), + pb::back_inserter(response->mutable_chunk_digests()), + [](ArtifactDigest const& digest) { + return ArtifactDigestFactory::ToBazel(digest); + }); return ::grpc::Status::OK; } @@ -316,60 +266,59 @@ auto CASServiceImpl::SpliceBlob(::grpc::ServerContext* /*context*/, ::bazel_re::SpliceBlobResponse* response) -> ::grpc::Status { if (not request->has_blob_digest()) { - auto str = fmt::format("SpliceBlob: no blob digest provided"); - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; + static constexpr auto kStr = "SpliceBlob: no blob digest provided"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, kStr}; } - auto const& blob_digest = request->blob_digest(); - if (not IsValidHash(blob_digest.hash())) { - auto str = fmt::format("SpliceBlob: unsupported digest {}", - blob_digest.hash()); + auto const hash_type = storage_config_.hash_function.GetType(); + auto const blob_digest = + ArtifactDigestFactory::FromBazel(hash_type, request->blob_digest()); + if (not blob_digest) { + auto const str = fmt::format("SpliceBlob: unsupported digest {}", + request->blob_digest().hash()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; } logger_.Emit(LogLevel::Debug, "SpliceBlob({}, {} chunks)", - blob_digest.hash(), + blob_digest->hash(), request->chunk_digests().size()); - // Acquire garbage collection lock. - auto lock = GarbageCollector::SharedLock(storage_config_); - if (not lock) { - auto str = fmt::format( - "SpliceBlob: could not acquire garbage collection lock"); - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; - } - - auto chunk_digests = std::vector{}; + auto chunk_digests = std::vector{}; chunk_digests.reserve(request->chunk_digests().size()); for (auto const& x : request->chunk_digests()) { - if (not IsValidHash(x.hash())) { + auto chunk = ArtifactDigestFactory::FromBazel(hash_type, x); + if (not chunk) { auto const str = fmt::format("SpliceBlob: unsupported digest {}", x.hash()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; } - chunk_digests.push_back(x); + chunk_digests.emplace_back(*std::move(chunk)); + } + + // Acquire garbage collection lock. + auto const lock = GarbageCollector::SharedLock(storage_config_); + if (not lock) { + static constexpr auto kStr = + "SpliceBlob: could not acquire garbage collection lock"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return ::grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } // Splice blob from chunks. - auto splice_result = - CASUtils::SpliceBlob(blob_digest, chunk_digests, storage_); + auto const splice_result = + CASUtils::SpliceBlob(*blob_digest, chunk_digests, storage_); if (not splice_result) { auto const& status = splice_result.error(); auto const str = fmt::format("SpliceBlob: {}", status.error_message()); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{status.error_code(), str}; } - if (auto err = CheckDigestConsistency(blob_digest, *splice_result)) { - auto const str = fmt::format("SpliceBlob: {}", *err); - logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, str}; - } - response->mutable_blob_digest()->CopyFrom(*splice_result); + (*response->mutable_blob_digest()) = + ArtifactDigestFactory::ToBazel(*splice_result); return ::grpc::Status::OK; } diff --git a/src/buildtool/execution_api/execution_service/cas_server.hpp b/src/buildtool/execution_api/execution_service/cas_server.hpp index 30f8697bb..2e139560a 100644 --- a/src/buildtool/execution_api/execution_service/cas_server.hpp +++ b/src/buildtool/execution_api/execution_service/cas_server.hpp @@ -222,4 +222,4 @@ class CASServiceImpl final Storage const& storage_; Logger logger_{"execution-service"}; }; -#endif +#endif // CAS_SERVER_HPP diff --git a/src/buildtool/execution_api/execution_service/cas_utils.cpp b/src/buildtool/execution_api/execution_service/cas_utils.cpp index 592cd6ce3..ecd75c4fb 100644 --- a/src/buildtool/execution_api/execution_service/cas_utils.cpp +++ b/src/buildtool/execution_api/execution_service/cas_utils.cpp @@ -14,11 +14,18 @@ #include "src/buildtool/execution_api/execution_service/cas_utils.hpp" +#include +#include + #include "fmt/core.h" -#include "src/buildtool/compatibility/native_support.hpp" +#include "gsl/gsl" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" +#include "src/buildtool/storage/large_object_cas.hpp" +#include "src/buildtool/storage/local_cas.hpp" -static auto ToGrpc(LargeObjectError&& error) noexcept -> grpc::Status { +namespace { +[[nodiscard]] auto ToGrpc(LargeObjectError&& error) noexcept -> grpc::Status { switch (error.Code()) { case LargeObjectErrorCode::Internal: return grpc::Status{grpc::StatusCode::INTERNAL, @@ -34,23 +41,94 @@ static auto ToGrpc(LargeObjectError&& error) noexcept -> grpc::Status { return grpc::Status{grpc::StatusCode::INTERNAL, "an unknown error"}; } -auto CASUtils::EnsureTreeInvariant(bazel_re::Digest const& digest, - std::string const& tree_data, - Storage const& storage) noexcept - -> std::optional { - auto error = storage.CAS().CheckTreeInvariant(digest, tree_data); - if (error) { - return std::move(*error).Message(); +class CASContentValidator final { + public: + explicit CASContentValidator(gsl::not_null const& storage, + bool is_owner = true) noexcept; + + template + [[nodiscard]] auto Add(ArtifactDigest const& digest, + TData const& data) const noexcept -> grpc::Status { + if (digest.IsTree()) { + // For trees, check whether the tree invariant holds before storing + // the actual tree object. + if (auto err = storage_.CAS().CheckTreeInvariant(digest, data)) { + return ToGrpc(std::move(*err)); + } + } + + auto const cas_digest = + digest.IsTree() ? StoreTree(data) : StoreBlob(data); + if (not cas_digest) { + // This is a serious problem: we have a sequence of bytes, but + // cannot write them to CAS. + return ::grpc::Status{grpc::StatusCode::INTERNAL, + fmt::format("Could not upload {} {}", + digest.IsTree() ? "tree" : "blob", + digest.hash())}; + } + + if (auto err = CheckDigestConsistency(digest, *cas_digest)) { + // User error: did not get a file with the announced hash + return ::grpc::Status{grpc::StatusCode::INVALID_ARGUMENT, + *std::move(err)}; + } + return ::grpc::Status::OK; } - return std::nullopt; + + private: + Storage const& storage_; + bool const is_owner_; + + template + [[nodiscard]] auto StoreTree(TData const& data) const noexcept + -> std::optional { + if constexpr (std::is_same_v) { + return storage_.CAS().StoreTree(data); + } + else { + return is_owner_ ? storage_.CAS().StoreTree(data) + : storage_.CAS().StoreTree(data); + } + } + + template + [[nodiscard]] auto StoreBlob(TData const& data) const noexcept + -> std::optional { + static constexpr bool kIsExec = false; + if constexpr (std::is_same_v) { + return storage_.CAS().StoreBlob(data, kIsExec); + } + else { + return is_owner_ ? storage_.CAS().StoreBlob(data, kIsExec) + : storage_.CAS().StoreBlob(data, kIsExec); + } + } + + [[nodiscard]] auto CheckDigestConsistency(ArtifactDigest const& ref, + ArtifactDigest const& computed) + const noexcept -> std::optional; +}; +} // namespace + +auto CASUtils::AddDataToCAS(ArtifactDigest const& digest, + std::string const& content, + Storage const& storage) noexcept -> grpc::Status { + return CASContentValidator{&storage}.Add(digest, content); } -auto CASUtils::SplitBlobIdentity(bazel_re::Digest const& blob_digest, - Storage const& storage) noexcept - -> expected, grpc::Status> { +auto CASUtils::AddFileToCAS(ArtifactDigest const& digest, + std::filesystem::path const& file, + Storage const& storage, + bool is_owner) noexcept -> grpc::Status { + return CASContentValidator{&storage, is_owner}.Add(digest, file); +} +auto CASUtils::SplitBlobIdentity(ArtifactDigest const& blob_digest, + Storage const& storage) noexcept + -> expected, grpc::Status> { // Check blob existence. - auto path = NativeSupport::IsTree(blob_digest.hash()) + auto path = blob_digest.IsTree() ? storage.CAS().TreePath(blob_digest) : storage.CAS().BlobPath(blob_digest, false); if (not path) { @@ -63,8 +141,8 @@ auto CASUtils::SplitBlobIdentity(bazel_re::Digest const& blob_digest, // operation is stored in (file) CAS. This means for the native mode, if we // return the identity of a tree, we need to put the tree data in file CAS // and return the resulting digest. - auto chunk_digests = std::vector{}; - if (NativeSupport::IsTree(blob_digest.hash())) { + auto chunk_digests = std::vector{}; + if (blob_digest.IsTree()) { auto tree_data = FileSystemManager::ReadFile(*path); if (not tree_data) { return unexpected{grpc::Status{ @@ -85,35 +163,61 @@ auto CASUtils::SplitBlobIdentity(bazel_re::Digest const& blob_digest, return chunk_digests; } -auto CASUtils::SplitBlobFastCDC(bazel_re::Digest const& blob_digest, +auto CASUtils::SplitBlobFastCDC(ArtifactDigest const& blob_digest, Storage const& storage) noexcept - -> expected, grpc::Status> { + -> expected, grpc::Status> { // Split blob into chunks: - auto split = NativeSupport::IsTree(blob_digest.hash()) - ? storage.CAS().SplitTree(blob_digest) - : storage.CAS().SplitBlob(blob_digest); + auto split = blob_digest.IsTree() ? storage.CAS().SplitTree(blob_digest) + : storage.CAS().SplitBlob(blob_digest); - // Process result: - if (split) { - return *std::move(split); + if (not split) { + return unexpected{ToGrpc(std::move(split).error())}; } - // Process errors - return unexpected{ToGrpc(std::move(split).error())}; + return *std::move(split); } -auto CASUtils::SpliceBlob(bazel_re::Digest const& blob_digest, - std::vector const& chunk_digests, +auto CASUtils::SpliceBlob(ArtifactDigest const& blob_digest, + std::vector const& chunk_digests, Storage const& storage) noexcept - -> expected { + -> expected { // Splice blob from chunks: auto splice = - NativeSupport::IsTree(blob_digest.hash()) + blob_digest.IsTree() ? storage.CAS().SpliceTree(blob_digest, chunk_digests) : storage.CAS().SpliceBlob(blob_digest, chunk_digests, false); - // Process result: - if (splice) { - return *std::move(splice); + if (not splice) { + return unexpected{ToGrpc(std::move(splice).error())}; } - return unexpected{ToGrpc(std::move(splice).error())}; + return *std::move(splice); +} + +namespace { +CASContentValidator::CASContentValidator( + gsl::not_null const& storage, + bool is_owner) noexcept + : storage_{*storage}, is_owner_{is_owner} {} + +auto CASContentValidator::CheckDigestConsistency(ArtifactDigest const& ref, + ArtifactDigest const& computed) + const noexcept -> std::optional { + bool valid = ref == computed; + if (valid) { + bool const check_sizes = not ProtocolTraits::IsNative( + storage_.GetHashFunction().GetType()) or + ref.size() != 0; + if (check_sizes) { + valid = ref.size() == computed.size(); + } + } + if (not valid) { + return fmt::format( + "Expected digest {}:{} and computed digest {}:{} do not match.", + ref.hash(), + ref.size(), + computed.hash(), + computed.size()); + } + return std::nullopt; } +} // namespace diff --git a/src/buildtool/execution_api/execution_service/cas_utils.hpp b/src/buildtool/execution_api/execution_service/cas_utils.hpp index a7aec9ed9..b98e32235 100644 --- a/src/buildtool/execution_api/execution_service/cas_utils.hpp +++ b/src/buildtool/execution_api/execution_service/cas_utils.hpp @@ -15,37 +15,44 @@ #ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_EXECUTION_SERVICE_CAS_UTILS_HPP #define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_EXECUTION_SERVICE_CAS_UTILS_HPP +#include #include #include #include #include "grpcpp/support/status.h" -#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/storage/storage.hpp" #include "src/utils/cpp/expected.hpp" class CASUtils { public: - [[nodiscard]] static auto EnsureTreeInvariant( - bazel_re::Digest const& digest, - std::string const& tree_data, - Storage const& storage) noexcept -> std::optional; + [[nodiscard]] static auto AddDataToCAS(ArtifactDigest const& digest, + std::string const& content, + Storage const& storage) noexcept + -> grpc::Status; + + [[nodiscard]] static auto AddFileToCAS(ArtifactDigest const& digest, + std::filesystem::path const& file, + Storage const& storage, + bool is_owner = true) noexcept + -> grpc::Status; [[nodiscard]] static auto SplitBlobIdentity( - bazel_re::Digest const& blob_digest, + ArtifactDigest const& blob_digest, Storage const& storage) noexcept - -> expected, grpc::Status>; + -> expected, grpc::Status>; [[nodiscard]] static auto SplitBlobFastCDC( - bazel_re::Digest const& blob_digest, + ArtifactDigest const& blob_digest, Storage const& storage) noexcept - -> expected, grpc::Status>; + -> expected, grpc::Status>; [[nodiscard]] static auto SpliceBlob( - bazel_re::Digest const& blob_digest, - std::vector const& chunk_digests, + ArtifactDigest const& blob_digest, + std::vector const& chunk_digests, Storage const& storage) noexcept - -> expected; + -> expected; }; #endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_EXECUTION_SERVICE_CAS_UTILS_HPP diff --git a/src/buildtool/execution_api/execution_service/execution_server.cpp b/src/buildtool/execution_api/execution_service/execution_server.cpp index 162029752..1b416ec8f 100644 --- a/src/buildtool/execution_api/execution_service/execution_server.cpp +++ b/src/buildtool/execution_api/execution_service/execution_server.cpp @@ -19,14 +19,17 @@ #include #include -#include "execution_server.hpp" #include "fmt/core.h" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/execution_service/operation_cache.hpp" #include "src/buildtool/execution_api/local/local_cas_reader.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/storage/garbage_collector.hpp" -#include "src/utils/cpp/verify_hash.hpp" +#include "src/utils/cpp/hex_string.hpp" +#include "src/utils/cpp/path.hpp" namespace { void UpdateTimeStamp( @@ -45,7 +48,7 @@ void UpdateTimeStamp( Storage const& storage) noexcept -> expected; -[[nodiscard]] auto ToBazelAction(::bazel_re::ExecuteRequest const& request, +[[nodiscard]] auto ToBazelAction(ArtifactDigest const& action_digest, Storage const& storage) noexcept -> expected<::bazel_re::Action, std::string>; @@ -58,8 +61,11 @@ auto ExecutionServiceImpl::ToIExecutionAction( ::bazel_re::Action const& action, ::bazel_re::Command const& command) const noexcept -> std::optional { - auto const root_digest = - static_cast(action.input_root_digest()); + auto const root_digest = ArtifactDigestFactory::FromBazel( + storage_config_.hash_function.GetType(), action.input_root_digest()); + if (not root_digest) { + return std::nullopt; + } std::vector const args(command.arguments().begin(), command.arguments().end()); std::vector const files(command.output_files().begin(), @@ -70,7 +76,7 @@ auto ExecutionServiceImpl::ToIExecutionAction( for (auto const& x : command.environment_variables()) { env_vars.insert_or_assign(x.name(), x.value()); } - auto execution_action = api_.CreateAction(root_digest, + auto execution_action = api_.CreateAction(*root_digest, args, command.working_directory(), files, @@ -90,8 +96,16 @@ auto ExecutionServiceImpl::ToIExecutionAction( auto ExecutionServiceImpl::ToBazelExecuteResponse( IExecutionResponse::Ptr const& i_execution_response) const noexcept -> expected<::bazel_re::ExecuteResponse, std::string> { - auto result = ToBazelActionResult(i_execution_response->Artifacts(), - i_execution_response->DirectorySymlinks(), + auto artifacts = i_execution_response->Artifacts(); + if (not artifacts) { + return unexpected{std::move(artifacts).error()}; + } + auto dir_symlinks = i_execution_response->DirectorySymlinks(); + if (not dir_symlinks) { + return unexpected{std::move(dir_symlinks).error()}; + } + auto result = ToBazelActionResult(*std::move(artifacts).value(), + *std::move(dir_symlinks).value(), storage_); if (not result) { return unexpected{std::move(result).error()}; @@ -101,7 +115,7 @@ auto ExecutionServiceImpl::ToBazelExecuteResponse( action_result.set_exit_code(i_execution_response->ExitCode()); if (i_execution_response->HasStdErr()) { - auto cas_digest = + auto const cas_digest = storage_.CAS().StoreBlob(i_execution_response->StdErr(), /*is_executable=*/false); if (not cas_digest) { @@ -109,11 +123,12 @@ auto ExecutionServiceImpl::ToBazelExecuteResponse( fmt::format("Could not store stderr of action {}", i_execution_response->ActionDigest())}; } - action_result.mutable_stderr_digest()->CopyFrom(*cas_digest); + (*action_result.mutable_stderr_digest()) = + ArtifactDigestFactory::ToBazel(*cas_digest); } if (i_execution_response->HasStdOut()) { - auto cas_digest = + auto const cas_digest = storage_.CAS().StoreBlob(i_execution_response->StdOut(), /*is_executable=*/false); if (not cas_digest) { @@ -121,7 +136,8 @@ auto ExecutionServiceImpl::ToBazelExecuteResponse( fmt::format("Could not store stdout of action {}", i_execution_response->ActionDigest())}; } - action_result.mutable_stdout_digest()->CopyFrom(*cas_digest); + (*action_result.mutable_stdout_digest()) = + ArtifactDigestFactory::ToBazel(*cas_digest); } ::bazel_re::ExecuteResponse bazel_response{}; @@ -151,34 +167,41 @@ auto ExecutionServiceImpl::Execute( const ::bazel_re::ExecuteRequest* request, ::grpc::ServerWriter<::google::longrunning::Operation>* writer) -> ::grpc::Status { - auto lock = GarbageCollector::SharedLock(storage_config_); + auto const action_digest = ArtifactDigestFactory::FromBazel( + storage_config_.hash_function.GetType(), request->action_digest()); + if (not action_digest) { + logger_.Emit(LogLevel::Error, "{}", action_digest.error()); + return grpc::Status{grpc::StatusCode::INTERNAL, action_digest.error()}; + } + + auto const lock = GarbageCollector::SharedLock(storage_config_); if (not lock) { - auto str = fmt::format("Could not acquire SharedLock"); - logger_.Emit(LogLevel::Error, str); - return grpc::Status{grpc::StatusCode::INTERNAL, str}; + static constexpr auto kStr = "Could not acquire SharedLock"; + logger_.Emit(LogLevel::Error, "{}", kStr); + return grpc::Status{grpc::StatusCode::INTERNAL, kStr}; } - auto action = ToBazelAction(*request, storage_); + auto action = ToBazelAction(*action_digest, storage_); if (not action) { - logger_.Emit(LogLevel::Error, action.error()); + logger_.Emit(LogLevel::Error, "{}", action.error()); return ::grpc::Status{grpc::StatusCode::INTERNAL, std::move(action).error()}; } auto command = ToBazelCommand(*action, storage_); if (not command) { - logger_.Emit(LogLevel::Error, command.error()); + logger_.Emit(LogLevel::Error, "{}", command.error()); return ::grpc::Status{grpc::StatusCode::INTERNAL, std::move(command).error()}; } auto i_execution_action = ToIExecutionAction(*action, *command); if (not i_execution_action) { auto const str = fmt::format("Could not create action from {}", - request->action_digest().hash()); - logger_.Emit(LogLevel::Error, str); + action_digest->hash()); + logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; } - logger_.Emit(LogLevel::Info, "Execute {}", request->action_digest().hash()); + logger_.Emit(LogLevel::Info, "Execute {}", action_digest->hash()); // send initial response to the client auto op = ::google::longrunning::Operation{}; auto const& op_name = request->action_digest().hash(); @@ -193,19 +216,19 @@ auto ExecutionServiceImpl::Execute( logger_.Emit( LogLevel::Trace, "Finished execution of {} in {} seconds", - request->action_digest().hash(), + action_digest->hash(), std::chrono::duration_cast(t1 - t0).count()); if (i_execution_response == nullptr) { - auto const str = fmt::format("Failed to execute action {}", - request->action_digest().hash()); - logger_.Emit(LogLevel::Error, str); + auto const str = + fmt::format("Failed to execute action {}", action_digest->hash()); + logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; } auto execute_response = ToBazelExecuteResponse(i_execution_response); if (not execute_response) { - logger_.Emit(LogLevel::Error, execute_response.error()); + logger_.Emit(LogLevel::Error, "{}", execute_response.error()); return ::grpc::Status{grpc::StatusCode::INTERNAL, std::move(execute_response).error()}; } @@ -213,12 +236,12 @@ auto ExecutionServiceImpl::Execute( // Store the result in action cache if (i_execution_response->ExitCode() == 0 and not action->do_not_cache()) { if (not storage_.ActionCache().StoreResult( - request->action_digest(), execute_response->result())) { + *action_digest, execute_response->result())) { auto const str = fmt::format("Could not store action result for action {}", - request->action_digest().hash()); + action_digest->hash()); - logger_.Emit(LogLevel::Error, str); + logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; } } @@ -233,22 +256,23 @@ auto ExecutionServiceImpl::WaitExecution( ::grpc::ServerWriter<::google::longrunning::Operation>* writer) -> ::grpc::Status { auto const& hash = request->name(); - if (auto error_msg = IsAHash(hash)) { - logger_.Emit(LogLevel::Error, "{}", *error_msg); - return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, *error_msg}; + if (not IsHexString(hash)) { + auto const str = fmt::format("Invalid hash {}", hash); + logger_.Emit(LogLevel::Error, "{}", str); + return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, str}; } logger_.Emit(LogLevel::Trace, "WaitExecution: {}", hash); - std::optional<::google::longrunning::Operation> op; - do { - op = op_cache_.Query(hash); - if (not op) { - auto const& str = fmt::format( - "Executing action {} not found in internal cache.", hash); - logger_.Emit(LogLevel::Error, "{}", str); - return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; - } + auto op = op_cache_.Query(hash); + while (op and not op->done()) { std::this_thread::sleep_for(std::chrono::seconds(1)); - } while (not op->done()); + op = op_cache_.Query(hash); + } + if (not op) { + auto const str = fmt::format( + "Executing action {} not found in internal cache.", hash); + logger_.Emit(LogLevel::Error, "{}", str); + return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; + } writer->Write(*op); logger_.Emit(LogLevel::Trace, "Finished WaitExecution {}", hash); return ::grpc::Status::OK; @@ -262,30 +286,37 @@ namespace { ::bazel_re::OutputDirectory out_dir{}; *(out_dir.mutable_path()) = std::move(path); - if (Compatibility::IsCompatible()) { + LocalCasReader reader(&storage.CAS()); + if (ProtocolTraits::IsNative(storage.GetHashFunction().GetType())) { + // In native mode: Check validity of tree entries, otherwise set the + // digest directly. + if (not reader.ReadGitTree(digest)) { + auto const error = fmt::format( + "Found invalid entry in the Git Tree {}", digest.hash()); + return unexpected{error}; + } + (*out_dir.mutable_tree_digest()) = + ArtifactDigestFactory::ToBazel(digest); + } + else { // In compatible mode: Create a tree digest from directory // digest on the fly and set tree digest. - LocalCasReader reader(&storage.CAS()); - auto tree = reader.MakeTree(digest); + auto const tree = reader.MakeTree(digest); if (not tree) { - auto error = - fmt::format("Failed to build bazel Tree for {}", digest.hash()); - return unexpected{std::move(error)}; + return unexpected{fmt::format("Failed to build bazel Tree for {}", + digest.hash())}; } - auto cas_digest = storage.CAS().StoreBlob(tree->SerializeAsString(), - /*is_executable=*/false); + auto const cas_digest = + storage.CAS().StoreBlob(tree->SerializeAsString(), + /*is_executable=*/false); if (not cas_digest) { - auto error = fmt::format( + return unexpected{fmt::format( "Failed to add to the storage the bazel Tree for {}", - digest.hash()); - return unexpected{std::move(error)}; + digest.hash())}; } - *(out_dir.mutable_tree_digest()) = *std::move(cas_digest); - } - else { - // In native mode: Set the directory digest directly. - *(out_dir.mutable_tree_digest()) = digest; + (*out_dir.mutable_tree_digest()) = + ArtifactDigestFactory::ToBazel(*cas_digest); } return std::move(out_dir); } @@ -297,18 +328,24 @@ namespace { ::bazel_re::OutputSymlink out_link{}; *(out_link.mutable_path()) = std::move(path); // recover the target of the symlink - auto cas_path = storage.CAS().BlobPath(digest, /*is_executable=*/false); + auto const cas_path = + storage.CAS().BlobPath(digest, /*is_executable=*/false); if (not cas_path) { - auto error = - fmt::format("Failed to recover the symlink for {}", digest.hash()); - return unexpected{std::move(error)}; + return unexpected{ + fmt::format("Failed to recover the symlink for {}", digest.hash())}; } auto content = FileSystemManager::ReadFile(*cas_path); if (not content) { - auto error = fmt::format("Failed to read the symlink content for {}", - digest.hash()); - return unexpected{std::move(error)}; + return unexpected{fmt::format( + "Failed to read the symlink content for {}", digest.hash())}; + } + + // in native mode, check that we do not pass invalid symlinks + if (ProtocolTraits::IsNative(storage.GetHashFunction().GetType()) and + not PathIsNonUpwards(*content)) { + auto const error = fmt::format("Invalid symlink for {}", digest.hash()); + return unexpected{error}; } *(out_link.mutable_target()) = *std::move(content); @@ -319,8 +356,8 @@ namespace { Artifact::ObjectInfo const& info) noexcept -> ::bazel_re::OutputFile { ::bazel_re::OutputFile out_file{}; - *(out_file.mutable_path()) = std::move(path); - *(out_file.mutable_digest()) = info.digest; + (*out_file.mutable_path()) = std::move(path); + (*out_file.mutable_digest()) = ArtifactDigestFactory::ToBazel(info.digest); out_file.set_is_executable(IsExecutableObject(info.type)); return out_file; } @@ -371,37 +408,37 @@ namespace { return std::move(result); } -[[nodiscard]] auto ToBazelAction(::bazel_re::ExecuteRequest const& request, +[[nodiscard]] auto ToBazelAction(ArtifactDigest const& action_digest, Storage const& storage) noexcept -> expected<::bazel_re::Action, std::string> { - // get action description - if (auto error_msg = IsAHash(request.action_digest().hash())) { - return unexpected{std::move(*error_msg)}; - } - auto const action_path = - storage.CAS().BlobPath(request.action_digest(), false); + auto const action_path = storage.CAS().BlobPath(action_digest, false); if (not action_path) { return unexpected{fmt::format("could not retrieve blob {} from cas", - request.action_digest().hash())}; + action_digest.hash())}; } ::bazel_re::Action action{}; if (std::ifstream f(*action_path); not action.ParseFromIstream(&f)) { return unexpected{fmt::format("failed to parse action from blob {}", - request.action_digest().hash())}; + action_digest.hash())}; } - if (auto error_msg = IsAHash(action.input_root_digest().hash())) { - return unexpected{*std::move(error_msg)}; + + auto const hash_type = storage.GetHashFunction().GetType(); + auto const input_root_digest = + ArtifactDigestFactory::FromBazel(hash_type, action.input_root_digest()); + if (not input_root_digest) { + return unexpected{input_root_digest.error()}; } auto const input_root_path = - Compatibility::IsCompatible() - ? storage.CAS().BlobPath(action.input_root_digest(), false) - : storage.CAS().TreePath(action.input_root_digest()); + ProtocolTraits::IsNative(hash_type) + ? storage.CAS().TreePath(*input_root_digest) + : storage.CAS().BlobPath(*input_root_digest, + /*is_executable=*/false); if (not input_root_path) { return unexpected{ fmt::format("could not retrieve input root {} from cas", - action.input_root_digest().hash())}; + input_root_digest->hash())}; } return std::move(action); } @@ -409,19 +446,22 @@ namespace { [[nodiscard]] auto ToBazelCommand(bazel_re::Action const& action, Storage const& storage) noexcept -> expected { - if (auto error_msg = IsAHash(action.command_digest().hash())) { - return unexpected{*std::move(error_msg)}; + auto const command_digest = ArtifactDigestFactory::FromBazel( + storage.GetHashFunction().GetType(), action.command_digest()); + if (not command_digest) { + return unexpected{command_digest.error()}; } - auto path = storage.CAS().BlobPath(action.command_digest(), false); + auto const path = + storage.CAS().BlobPath(*command_digest, /*is_executable=*/false); if (not path) { return unexpected{fmt::format("Could not retrieve blob {} from cas", - action.command_digest().hash())}; + command_digest->hash())}; } ::bazel_re::Command c{}; if (std::ifstream f(*path); not c.ParseFromIstream(&f)) { return unexpected{fmt::format("Failed to parse command from blob {}", - action.command_digest().hash())}; + command_digest->hash())}; } return std::move(c); } diff --git a/src/buildtool/execution_api/execution_service/execution_server.hpp b/src/buildtool/execution_api/execution_service/execution_server.hpp index 01ede03ed..65709cf27 100644 --- a/src/buildtool/execution_api/execution_service/execution_server.hpp +++ b/src/buildtool/execution_api/execution_service/execution_server.hpp @@ -150,4 +150,4 @@ class ExecutionServiceImpl final : public bazel_re::Execution::Service { ::google::longrunning::Operation&& op) noexcept; }; -#endif +#endif // EXECUTION_SERVER_HPP diff --git a/src/buildtool/execution_api/execution_service/operation_cache.hpp b/src/buildtool/execution_api/execution_service/operation_cache.hpp index a53aff98d..1d51854e8 100644 --- a/src/buildtool/execution_api/execution_service/operation_cache.hpp +++ b/src/buildtool/execution_api/execution_service/operation_cache.hpp @@ -82,4 +82,4 @@ class OperationCache final { void GarbageCollection(); }; -#endif +#endif // OPERATION_CACHE_HPP diff --git a/src/buildtool/execution_api/execution_service/operations_server.cpp b/src/buildtool/execution_api/execution_service/operations_server.cpp index 0654a5ff8..33fd61b42 100644 --- a/src/buildtool/execution_api/execution_service/operations_server.cpp +++ b/src/buildtool/execution_api/execution_service/operations_server.cpp @@ -14,56 +14,57 @@ #include "src/buildtool/execution_api/execution_service/operations_server.hpp" +#include "fmt/core.h" #include "src/buildtool/execution_api/execution_service/operation_cache.hpp" #include "src/buildtool/logging/log_level.hpp" -#include "src/utils/cpp/verify_hash.hpp" +#include "src/utils/cpp/hex_string.hpp" -auto OperarationsServiceImpl::GetOperation( +auto OperationsServiceImpl::GetOperation( ::grpc::ServerContext* /*context*/, const ::google::longrunning::GetOperationRequest* request, ::google::longrunning::Operation* response) -> ::grpc::Status { auto const& hash = request->name(); - if (auto error_msg = IsAHash(hash); error_msg) { - logger_.Emit(LogLevel::Debug, "{}", *error_msg); - return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, *error_msg}; + if (not IsHexString(hash)) { + auto const str = fmt::format("Invalid hash {}", hash); + logger_.Emit(LogLevel::Debug, "{}", str); + return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, str}; } logger_.Emit(LogLevel::Trace, "GetOperation: {}", hash); - std::optional<::google::longrunning::Operation> op; - op = op_cache_.Query(hash); + auto op = op_cache_.Query(hash); if (not op) { - auto const& str = fmt::format( + auto const str = fmt::format( "Executing action {} not found in internal cache.", hash); logger_.Emit(LogLevel::Error, "{}", str); return ::grpc::Status{grpc::StatusCode::INTERNAL, str}; } - response->CopyFrom(*op); + *response = *std::move(op); return ::grpc::Status::OK; } -auto OperarationsServiceImpl::ListOperations( +auto OperationsServiceImpl::ListOperations( ::grpc::ServerContext* /*context*/, const ::google::longrunning::ListOperationsRequest* /*request*/, ::google::longrunning::ListOperationsResponse* /*response*/) -> ::grpc::Status { - auto const* str = "ListOperations not implemented"; - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, str}; + static constexpr auto kStr = "ListOperations not implemented"; + logger_.Emit(LogLevel::Error, kStr); + return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, kStr}; } -auto OperarationsServiceImpl::DeleteOperation( +auto OperationsServiceImpl::DeleteOperation( ::grpc::ServerContext* /*context*/, const ::google::longrunning::DeleteOperationRequest* /*request*/, ::google::protobuf::Empty* /*response*/) -> ::grpc::Status { - auto const* str = "DeleteOperation not implemented"; - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, str}; + static constexpr auto kStr = "DeleteOperation not implemented"; + logger_.Emit(LogLevel::Error, kStr); + return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, kStr}; } -auto OperarationsServiceImpl::CancelOperation( +auto OperationsServiceImpl::CancelOperation( ::grpc::ServerContext* /*context*/, const ::google::longrunning::CancelOperationRequest* /*request*/, ::google::protobuf::Empty* /*response*/) -> ::grpc::Status { - auto const* str = "CancelOperation not implemented"; - logger_.Emit(LogLevel::Error, str); - return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, str}; + static constexpr auto kStr = "CancelOperation not implemented"; + logger_.Emit(LogLevel::Error, kStr); + return ::grpc::Status{grpc::StatusCode::UNIMPLEMENTED, kStr}; } diff --git a/src/buildtool/execution_api/execution_service/operations_server.hpp b/src/buildtool/execution_api/execution_service/operations_server.hpp index faa876024..5947eeb7a 100644 --- a/src/buildtool/execution_api/execution_service/operations_server.hpp +++ b/src/buildtool/execution_api/execution_service/operations_server.hpp @@ -20,10 +20,10 @@ #include "src/buildtool/execution_api/execution_service/operation_cache.hpp" #include "src/buildtool/logging/logger.hpp" -class OperarationsServiceImpl final +class OperationsServiceImpl final : public ::google::longrunning::Operations::Service { public: - explicit OperarationsServiceImpl( + explicit OperationsServiceImpl( gsl::not_null const& op_cache) : op_cache_{*op_cache} {}; @@ -74,4 +74,4 @@ class OperarationsServiceImpl final Logger logger_{"execution-service:operations"}; }; -#endif +#endif // OPERATIONS_SERVER_HPP diff --git a/src/buildtool/execution_api/execution_service/server_implementation.cpp b/src/buildtool/execution_api/execution_service/server_implementation.cpp index 988d22a74..0b921d6ba 100644 --- a/src/buildtool/execution_api/execution_service/server_implementation.cpp +++ b/src/buildtool/execution_api/execution_service/server_implementation.cpp @@ -26,8 +26,8 @@ #include "fmt/core.h" #include "grpcpp/grpcpp.h" #include "nlohmann/json.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/remote/port.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/execution_api/execution_service/ac_server.hpp" #include "src/buildtool/execution_api/execution_service/bytestream_server.hpp" #include "src/buildtool/execution_api/execution_service/capabilities_server.hpp" @@ -84,12 +84,14 @@ auto ServerImpl::Run(gsl::not_null const& local_context, gsl::not_null const& remote_context, ApiBundle const& apis, std::optional op_exponent) -> bool { + auto const hash_type = + local_context->storage_config->hash_function.GetType(); ExecutionServiceImpl es{local_context, &*apis.local, op_exponent}; ActionCacheServiceImpl ac{local_context}; CASServiceImpl cas{local_context}; BytestreamServiceImpl b{local_context}; - CapabilitiesServiceImpl cap{}; - OperarationsServiceImpl op{&es.GetOpCache()}; + CapabilitiesServiceImpl cap{hash_type}; + OperationsServiceImpl op{&es.GetOpCache()}; grpc::ServerBuilder builder; @@ -140,11 +142,11 @@ auto ServerImpl::Run(gsl::not_null const& local_context, } } - auto const& info_str = nlohmann::to_string(info); + auto const info_str = nlohmann::to_string(info); Logger::Log(LogLevel::Info, - fmt::format("{}execution service started: {}", - Compatibility::IsCompatible() ? "compatible " : "", - info_str)); + "{}execution service started: {}", + ProtocolTraits::IsNative(hash_type) ? "" : "compatible ", + info_str); if (not info_file_.empty()) { if (not TryWrite(info_file_, info_str)) { diff --git a/src/buildtool/execution_api/execution_service/server_implementation.hpp b/src/buildtool/execution_api/execution_service/server_implementation.hpp index 932011c01..1a5db5087 100644 --- a/src/buildtool/execution_api/execution_service/server_implementation.hpp +++ b/src/buildtool/execution_api/execution_service/server_implementation.hpp @@ -58,8 +58,8 @@ class ServerImpl final { std::string interface_{"127.0.0.1"}; int port_{0}; - std::string info_file_{}; - std::string pid_file_{}; + std::string info_file_; + std::string pid_file_; }; -#endif +#endif // SERVER_IMPLEMENATION_HPP diff --git a/src/buildtool/execution_api/git/TARGETS b/src/buildtool/execution_api/git/TARGETS index 2c614f085..860a93880 100644 --- a/src/buildtool/execution_api/git/TARGETS +++ b/src/buildtool/execution_api/git/TARGETS @@ -4,14 +4,16 @@ , "hdrs": ["git_api.hpp"] , "deps": [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/common", "common"] , ["src/buildtool/common", "config"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/execution_api/common", "common_api"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/crypto", "hash_function"] ] , "stage": ["src", "buildtool", "execution_api", "git"] } diff --git a/src/buildtool/execution_api/git/git_api.hpp b/src/buildtool/execution_api/git/git_api.hpp index 43afc14a0..aab488b3c 100644 --- a/src/buildtool/execution_api/git/git_api.hpp +++ b/src/buildtool/execution_api/git/git_api.hpp @@ -24,6 +24,7 @@ #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" @@ -52,7 +53,7 @@ class GitApi final : public IExecutionApi { return nullptr; } - // NOLINTNEXTLINE(misc-no-recursion,google-default-arguments) + // NOLINTNEXTLINE(google-default-arguments) [[nodiscard]] auto RetrieveToPaths( std::vector const& artifacts_info, std::vector const& output_paths, @@ -72,13 +73,12 @@ class GitApi final : public IExecutionApi { return false; } for (auto const& [path, entry] : *tree) { - if (not RetrieveToPaths( - {Artifact::ObjectInfo{ - .digest = ArtifactDigest{entry->Hash(), - /*size*/ 0, - entry->IsTree()}, - .type = entry->Type(), - .failed = false}}, + auto digest = ToArtifactDigest(*entry); + if (not digest or + not RetrieveToPaths( + {Artifact::ObjectInfo{.digest = *std::move(digest), + .type = entry->Type(), + .failed = false}}, {output_paths[i] / path})) { return false; } @@ -115,8 +115,9 @@ class GitApi final : public IExecutionApi { return false; } for (std::size_t i{}; i < artifacts_info.size(); ++i) { - auto fd = fds[i]; auto const& info = artifacts_info[i]; + + std::string content; if (IsTreeObject(info.type) and not raw_tree) { auto tree = repo_config_->ReadTreeFromGitCAS(info.digest.hash()); @@ -126,26 +127,22 @@ class GitApi final : public IExecutionApi { info.digest.hash()); return false; } - auto json = nlohmann::json::object(); - for (auto const& [path, entry] : *tree) { - json[path] = - Artifact::ObjectInfo{ - .digest = ArtifactDigest{entry->Hash(), - /*size*/ 0, - entry->IsTree()}, - .type = entry->Type(), - .failed = false} - .ToString(/*size_unknown*/ true); - } - auto msg = json.dump(2) + "\n"; - if (gsl::owner out = fdopen(fd, "wb")) { // NOLINT - std::fwrite(msg.data(), 1, msg.size(), out); - std::fclose(out); - } - else { - Logger::Log(LogLevel::Error, - "dumping to file descriptor {} failed.", - fd); + + try { + auto json = nlohmann::json::object(); + for (auto const& [path, entry] : *tree) { + auto digest = ToArtifactDigest(*entry); + if (not digest) { + return false; + } + json[path] = + Artifact::ObjectInfo{.digest = *std::move(digest), + .type = entry->Type(), + .failed = false} + .ToString(/*size_unknown*/ true); + } + content = json.dump(2) + "\n"; + } catch (...) { return false; } } @@ -158,23 +155,23 @@ class GitApi final : public IExecutionApi { info.digest.hash()); return false; } - auto msg = *blob; - if (gsl::owner out = fdopen(fd, "wb")) { // NOLINT - std::fwrite(msg.data(), 1, msg.size(), out); - std::fclose(out); - } - else { - Logger::Log(LogLevel::Error, - "dumping to file descriptor {} failed.", - fd); - return false; - } + content = *std::move(blob); + } + + if (gsl::owner out = fdopen(fds[i], "wb")) { // NOLINT + std::fwrite(content.data(), 1, content.size(), out); + std::fclose(out); + } + else { + Logger::Log(LogLevel::Error, + "dumping to file descriptor {} failed.", + fds[i]); + return false; } } return true; } - // NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto RetrieveToCas( std::vector const& artifacts_info, IExecutionApi const& api) const noexcept -> bool override { @@ -215,14 +212,13 @@ class GitApi final : public IExecutionApi { ArtifactBlobContainer tree_deps_only_blobs{}; for (auto const& [path, entry] : *tree) { if (entry->IsTree()) { - if (not RetrieveToCas( - {Artifact::ObjectInfo{ - .digest = ArtifactDigest{entry->Hash(), - /*size*/ 0, - entry->IsTree()}, - .type = entry->Type(), - .failed = false}}, - api)) { + auto digest = ToArtifactDigest(*entry); + if (not digest or + not RetrieveToCas({Artifact::ObjectInfo{ + .digest = *std::move(digest), + .type = entry->Type(), + .failed = false}}, + api)) { return false; } } @@ -231,8 +227,9 @@ class GitApi final : public IExecutionApi { if (not entry_content) { return false; } - auto digest = ArtifactDigest::Create( - hash_function, *entry_content); + auto digest = + ArtifactDigestFactory::HashDataAs( + hash_function, *entry_content); // Collect blob and upload to remote CAS if transfer // size reached. if (not UpdateContainerAndUpload( @@ -263,10 +260,10 @@ class GitApi final : public IExecutionApi { ArtifactDigest digest = IsTreeObject(info.type) - ? ArtifactDigest::Create(hash_function, - *content) - : ArtifactDigest::Create(hash_function, - *content); + ? ArtifactDigestFactory::HashDataAs( + hash_function, *content) + : ArtifactDigestFactory::HashDataAs( + hash_function, *content); // Collect blob and upload to remote CAS if transfer size reached. if (not UpdateContainerAndUpload( @@ -327,6 +324,18 @@ class GitApi final : public IExecutionApi { private: gsl::not_null repo_config_; + + [[nodiscard]] static auto ToArtifactDigest( + GitTreeEntry const& entry) noexcept -> std::optional { + auto digest = ArtifactDigestFactory::Create(HashFunction::Type::GitSHA1, + entry.Hash(), + /*size=*/0, + entry.IsTree()); + if (not digest) { + return std::nullopt; + } + return *std::move(digest); + } }; -#endif +#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_GIT_GIT_API_HPP diff --git a/src/buildtool/execution_api/local/TARGETS b/src/buildtool/execution_api/local/TARGETS index f3949310c..66a27034f 100644 --- a/src/buildtool/execution_api/local/TARGETS +++ b/src/buildtool/execution_api/local/TARGETS @@ -22,36 +22,38 @@ ] , "srcs": ["local_action.cpp", "local_cas_reader.cpp"] , "deps": - [ ["@", "fmt", "", "fmt"] - , ["@", "gsl", "", "gsl"] + [ "context" + , ["@", "fmt", "", "fmt"] , ["@", "grpc", "", "grpc++"] - , "context" + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "bazel_types"] , ["src/buildtool/common", "common"] , ["src/buildtool/common", "config"] - , ["src/buildtool/storage", "storage"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"] + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] + , ["src/buildtool/execution_api/common", "blob_tree"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/execution_api/common", "common_api"] - , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] + , ["src/buildtool/execution_api/execution_service", "cas_utils"] , ["src/buildtool/execution_api/git", "git"] , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"] + , ["src/buildtool/file_system", "git_repo"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/execution_api/execution_service", "cas_utils"] - , ["src/buildtool/file_system", "git_repo"] + , ["src/buildtool/storage", "storage"] + , ["src/utils/cpp", "expected"] + , ["src/utils/cpp", "path"] , ["src/utils/cpp", "tmp_dir"] - , ["src/buildtool/crypto", "hash_function"] ] , "stage": ["src", "buildtool", "execution_api", "local"] , "private-deps": - [ ["src/buildtool/file_system", "object_type"] + [ ["src/buildtool/execution_api/utils", "outputscheck"] + , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/system", "system_command"] - , ["src/buildtool/common", "bazel_types"] - , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/execution_api/utils", "outputscheck"] - , ["src/buildtool/crypto", "hash_function"] - , ["src/utils/cpp", "path"] ] } , "context": @@ -59,8 +61,8 @@ , "name": ["context"] , "hdrs": ["context.hpp"] , "deps": - [ ["@", "gsl", "", "gsl"] - , "config" + [ "config" + , ["@", "gsl", "", "gsl"] , ["src/buildtool/storage", "config"] , ["src/buildtool/storage", "storage"] ] diff --git a/src/buildtool/execution_api/local/local_action.cpp b/src/buildtool/execution_api/local/local_action.cpp index 8cc1d242d..c500afba4 100644 --- a/src/buildtool/execution_api/local/local_action.cpp +++ b/src/buildtool/execution_api/local/local_action.cpp @@ -21,8 +21,8 @@ #include #include -#include "src/buildtool/common/bazel_types.hpp" -#include "src/buildtool/compatibility/native_support.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/common/tree_reader.hpp" #include "src/buildtool/execution_api/local/local_cas_reader.hpp" #include "src/buildtool/execution_api/local/local_response.hpp" @@ -31,6 +31,7 @@ #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/system/system_command.hpp" +#include "src/utils/cpp/path.hpp" namespace { @@ -38,43 +39,43 @@ namespace { class BuildCleanupAnchor { public: explicit BuildCleanupAnchor(std::filesystem::path build_path) noexcept - : build_path{std::move(build_path)} {} + : build_path_{std::move(build_path)} {} BuildCleanupAnchor(BuildCleanupAnchor const&) = delete; BuildCleanupAnchor(BuildCleanupAnchor&&) = delete; auto operator=(BuildCleanupAnchor const&) -> BuildCleanupAnchor& = delete; auto operator=(BuildCleanupAnchor&&) -> BuildCleanupAnchor& = delete; ~BuildCleanupAnchor() { - if (not FileSystemManager::RemoveDirectory(build_path, true)) { + if (not FileSystemManager::RemoveDirectory(build_path_, true)) { Logger::Log(LogLevel::Error, "Could not cleanup build directory {}", - build_path.string()); + build_path_.string()); } } private: - std::filesystem::path const build_path{}; + std::filesystem::path const build_path_; }; [[nodiscard]] auto CreateDigestFromLocalOwnedTree( Storage const& storage, - std::filesystem::path const& dir_path) -> std::optional { + std::filesystem::path const& dir_path) -> std::optional { auto const& cas = storage.CAS(); auto store_blob = [&cas](std::filesystem::path const& path, - auto is_exec) -> std::optional { + auto is_exec) -> std::optional { return cas.StoreBlob(path, is_exec); }; auto store_tree = - [&cas](std::string const& content) -> std::optional { + [&cas](std::string const& content) -> std::optional { return cas.StoreTree(content); }; auto store_symlink = - [&cas](std::string const& content) -> std::optional { + [&cas](std::string const& content) -> std::optional { return cas.StoreBlob(content); }; - return Compatibility::IsCompatible() - ? BazelMsgFactory::CreateDirectoryDigestFromLocalTree( + return ProtocolTraits::IsNative(storage.GetHashFunction().GetType()) + ? BazelMsgFactory::CreateGitTreeDigestFromLocalTree( dir_path, store_blob, store_tree, store_symlink) - : BazelMsgFactory::CreateGitTreeDigestFromLocalTree( + : BazelMsgFactory::CreateDirectoryDigestFromLocalTree( dir_path, store_blob, store_tree, store_symlink); } @@ -84,7 +85,7 @@ auto LocalAction::Execute(Logger const* logger) noexcept -> IExecutionResponse::Ptr { auto do_cache = CacheEnabled(cache_flag_); - auto action = CreateActionDigest(root_digest_, not do_cache); + auto const action = CreateActionDigest(root_digest_, not do_cache); if (not action) { if (logger != nullptr) { logger->Emit(LogLevel::Error, @@ -100,19 +101,36 @@ auto LocalAction::Execute(Logger const* logger) noexcept " - exec_dir digest: {}\n" " - action digest: {}", root_digest_.hash(), - NativeSupport::Unprefix(action->hash())); + action->hash()); } + auto create_response = [](Logger const* logger, + std::string const& action_hash, + auto&&... args) -> IExecutionResponse::Ptr { + try { + return IExecutionResponse::Ptr{new LocalResponse{ + action_hash, std::forward(args)...}}; + } catch (...) { + if (logger != nullptr) { + logger->Emit(LogLevel::Error, + "failed to create a response for {}", + action_hash); + } + } + return nullptr; + }; + if (do_cache) { if (auto result = local_context_.storage->ActionCache().CachedResult(*action)) { if (result->exit_code() == 0 and ActionResultContainsExpectedOutputs( *result, output_files_, output_dirs_)) { - return IExecutionResponse::Ptr{ - new LocalResponse{action->hash(), - {std::move(*result), /*is_cached=*/true}, - local_context_.storage}}; + return create_response( + logger, + action->hash(), + LocalAction::Output{*std::move(result), /*is_cached=*/true}, + local_context_.storage); } } } @@ -121,7 +139,8 @@ auto LocalAction::Execute(Logger const* logger) noexcept if (auto output = Run(*action)) { if (cache_flag_ == CacheFlag::PretendCached) { // ensure the same id is created as if caching were enabled - auto action_cached = CreateActionDigest(root_digest_, false); + auto const action_cached = + CreateActionDigest(root_digest_, false); if (not action_cached) { if (logger != nullptr) { logger->Emit( @@ -133,24 +152,25 @@ auto LocalAction::Execute(Logger const* logger) noexcept } output->is_cached = true; - return IExecutionResponse::Ptr{ - new LocalResponse{action_cached->hash(), - std::move(*output), - local_context_.storage}}; + return create_response(logger, + action_cached->hash(), + *std::move(output), + local_context_.storage); } - return IExecutionResponse::Ptr{new LocalResponse{ - action->hash(), std::move(*output), local_context_.storage}}; + return create_response(logger, + action->hash(), + *std::move(output), + local_context_.storage); } } return nullptr; } -auto LocalAction::Run(bazel_re::Digest const& action_id) const noexcept +auto LocalAction::Run(ArtifactDigest const& action_id) const noexcept -> std::optional { - auto exec_path = - CreateUniquePath(local_context_.storage_config->ExecutionRoot() / - NativeSupport::Unprefix(action_id.hash())); + auto const exec_path = CreateUniquePath( + local_context_.storage_config->ExecutionRoot() / action_id.hash()); if (not exec_path) { return std::nullopt; @@ -179,13 +199,13 @@ auto LocalAction::Run(bazel_re::Digest const& action_id) const noexcept if (exit_code.has_value()) { Output result{}; result.action.set_exit_code(*exit_code); - if (gsl::owner digest_ptr = - DigestFromOwnedFile(*exec_path / "stdout")) { - result.action.set_allocated_stdout_digest(digest_ptr); + if (auto const digest = DigestFromOwnedFile(*exec_path / "stdout")) { + *result.action.mutable_stdout_digest() = + ArtifactDigestFactory::ToBazel(*digest); } - if (gsl::owner digest_ptr = - DigestFromOwnedFile(*exec_path / "stderr")) { - result.action.set_allocated_stderr_digest(digest_ptr); + if (auto const digest = DigestFromOwnedFile(*exec_path / "stderr")) { + *result.action.mutable_stderr_digest() = + ArtifactDigestFactory::ToBazel(*digest); } if (CollectAndStoreOutputs(&result.action, build_root / cwd_)) { @@ -368,12 +388,21 @@ auto LocalAction::CollectOutputFileOrSymlink( return std::nullopt; } if (IsSymlinkObject(*type)) { - auto content = FileSystemManager::ReadSymlink(file_path); - if (content and local_context_.storage->CAS().StoreBlob(*content)) { - auto out_symlink = bazel_re::OutputSymlink{}; - out_symlink.set_path(local_path); - out_symlink.set_target(*content); - return out_symlink; + if (auto content = FileSystemManager::ReadSymlink(file_path)) { + // in native mode: check validity of symlink + if (ProtocolTraits::IsNative( + local_context_.storage->GetHashFunction().GetType()) and + not PathIsNonUpwards(*content)) { + Logger::Log( + LogLevel::Error, "found invalid symlink at {}", local_path); + return std::nullopt; + } + if (local_context_.storage->CAS().StoreBlob(*content)) { + auto out_symlink = bazel_re::OutputSymlink{}; + out_symlink.set_path(local_path); + out_symlink.set_target(*content); + return out_symlink; + } } } else if (IsFileObject(*type)) { @@ -383,8 +412,8 @@ auto LocalAction::CollectOutputFileOrSymlink( if (digest) { auto out_file = bazel_re::OutputFile{}; out_file.set_path(local_path); - out_file.set_allocated_digest( - gsl::owner{new bazel_re::Digest{*digest}}); + *out_file.mutable_digest() = + ArtifactDigestFactory::ToBazel(*digest); out_file.set_is_executable(is_executable); return out_file; } @@ -407,12 +436,21 @@ auto LocalAction::CollectOutputDirOrSymlink( return std::nullopt; } if (IsSymlinkObject(*type)) { - auto content = FileSystemManager::ReadSymlink(dir_path); - if (content and local_context_.storage->CAS().StoreBlob(*content)) { - auto out_symlink = bazel_re::OutputSymlink{}; - out_symlink.set_path(local_path); - out_symlink.set_target(*content); - return out_symlink; + if (auto content = FileSystemManager::ReadSymlink(dir_path)) { + // in native mode: check validity of symlink + if (ProtocolTraits::IsNative( + local_context_.storage->GetHashFunction().GetType()) and + not PathIsNonUpwards(*content)) { + Logger::Log( + LogLevel::Error, "found invalid symlink at {}", local_path); + return std::nullopt; + } + if (local_context_.storage->CAS().StoreBlob(*content)) { + auto out_symlink = bazel_re::OutputSymlink{}; + out_symlink.set_path(local_path); + out_symlink.set_target(*content); + return out_symlink; + } } } else if (IsTreeObject(*type)) { @@ -420,10 +458,13 @@ auto LocalAction::CollectOutputDirOrSymlink( *local_context_.storage, dir_path)) { auto out_dir = bazel_re::OutputDirectory{}; out_dir.set_path(local_path); - out_dir.set_allocated_tree_digest( - gsl::owner{new bazel_re::Digest{*digest}}); + (*out_dir.mutable_tree_digest()) = + ArtifactDigestFactory::ToBazel(*digest); return out_dir; } + Logger::Log(LogLevel::Error, + "found invalid entries in directory at {}", + local_path); } else { Logger::Log( @@ -495,10 +536,7 @@ auto LocalAction::CollectAndStoreOutputs( } auto LocalAction::DigestFromOwnedFile(std::filesystem::path const& file_path) - const noexcept -> gsl::owner { - if (auto digest = local_context_.storage->CAS().StoreBlob( - file_path, /*is_executable=*/false)) { - return new bazel_re::Digest{std::move(*digest)}; - } - return nullptr; + const noexcept -> std::optional { + return local_context_.storage->CAS().StoreBlob( + file_path, /*is_executable=*/false); } diff --git a/src/buildtool/execution_api/local/local_action.hpp b/src/buildtool/execution_api/local/local_action.hpp index 4d256302d..96541f58a 100644 --- a/src/buildtool/execution_api/local/local_action.hpp +++ b/src/buildtool/execution_api/local/local_action.hpp @@ -25,6 +25,8 @@ #include #include "gsl/gsl" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" #include "src/buildtool/execution_api/common/execution_action.hpp" #include "src/buildtool/execution_api/common/execution_response.hpp" @@ -40,7 +42,7 @@ class LocalAction final : public IExecutionAction { public: struct Output { - bazel_re::ActionResult action{}; + bazel_re::ActionResult action; bool is_cached{}; }; @@ -63,12 +65,12 @@ class LocalAction final : public IExecutionAction { private: Logger logger_{"LocalExecution"}; LocalContext const& local_context_; - ArtifactDigest const root_digest_{}; - std::vector const cmdline_{}; - std::string const cwd_{}; - std::vector output_files_{}; - std::vector output_dirs_{}; - std::map const env_vars_{}; + ArtifactDigest const root_digest_; + std::vector const cmdline_; + std::string const cwd_; + std::vector output_files_; + std::vector output_dirs_; + std::map const env_vars_; std::vector const properties_; std::chrono::milliseconds timeout_{kDefaultTimeout}; CacheFlag cache_flag_{CacheFlag::CacheOutput}; @@ -95,9 +97,9 @@ class LocalAction final : public IExecutionAction { std::sort(output_dirs_.begin(), output_dirs_.end()); } - [[nodiscard]] auto CreateActionDigest(bazel_re::Digest const& exec_dir, + [[nodiscard]] auto CreateActionDigest(ArtifactDigest const& exec_dir, bool do_not_cache) - -> std::optional { + -> std::optional { auto const env_vars = BazelMsgFactory::CreateMessageVectorFromMap< bazel_re::Command_EnvironmentVariable>(env_vars_); @@ -115,7 +117,7 @@ class LocalAction final : public IExecutionAction { return BazelMsgFactory::CreateActionDigestFromCommandLine(request); } - [[nodiscard]] auto Run(bazel_re::Digest const& action_id) const noexcept + [[nodiscard]] auto Run(ArtifactDigest const& action_id) const noexcept -> std::optional; [[nodiscard]] auto StageInput( @@ -152,7 +154,7 @@ class LocalAction final : public IExecutionAction { /// \brief Store file from path in file CAS and return pointer to digest. [[nodiscard]] auto DigestFromOwnedFile( std::filesystem::path const& file_path) const noexcept - -> gsl::owner; + -> std::optional; }; #endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_ACTION_HPP diff --git a/src/buildtool/execution_api/local/local_api.hpp b/src/buildtool/execution_api/local/local_api.hpp index eadf45b7b..2302cb5cd 100644 --- a/src/buildtool/execution_api/local/local_api.hpp +++ b/src/buildtool/execution_api/local/local_api.hpp @@ -19,6 +19,7 @@ #include #include #include +#include // std::nothrow #include #include #include @@ -31,9 +32,9 @@ #include "grpcpp/support/status.h" #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/repository_config.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" -#include "src/buildtool/compatibility/native_support.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/execution_api/common/artifact_blob_container.hpp" #include "src/buildtool/execution_api/common/blob_tree.hpp" @@ -56,7 +57,7 @@ class LocalApi final : public IExecutionApi { explicit LocalApi(gsl::not_null const& local_context, RepositoryConfig const* repo_config = nullptr) noexcept : local_context_{*local_context}, - git_api_{CreateFallbackApi(repo_config)} {} + git_api_{CreateFallbackApi(*local_context->storage, repo_config)} {} [[nodiscard]] auto CreateAction( ArtifactDigest const& root_digest, @@ -67,7 +68,8 @@ class LocalApi final : public IExecutionApi { std::map const& env_vars, std::map const& properties) const noexcept -> IExecutionAction::Ptr final { - return IExecutionAction::Ptr{new LocalAction{&local_context_, + return IExecutionAction::Ptr{new (std::nothrow) + LocalAction{&local_context_, root_digest, command, cwd, @@ -77,7 +79,7 @@ class LocalApi final : public IExecutionApi { properties}}; } - // NOLINTNEXTLINE(misc-no-recursion,google-default-arguments) + // NOLINTNEXTLINE(google-default-arguments) [[nodiscard]] auto RetrieveToPaths( std::vector const& artifacts_info, std::vector const& output_paths, @@ -152,7 +154,6 @@ class LocalApi final : public IExecutionApi { }); } - // NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto RetrieveToCas( std::vector const& artifacts_info, IExecutionApi const& api) const noexcept -> bool final { @@ -210,10 +211,10 @@ class LocalApi final : public IExecutionApi { // storage_.ReadTreeInfos() will contain 0 as size. ArtifactDigest digest = IsTreeObject(info.type) - ? ArtifactDigest::Create( + ? ArtifactDigestFactory::HashDataAs( local_context_.storage_config->hash_function, *content) - : ArtifactDigest::Create( + : ArtifactDigestFactory::HashDataAs( local_context_.storage_config->hash_function, *content); @@ -261,19 +262,17 @@ class LocalApi final : public IExecutionApi { [[nodiscard]] auto Upload(ArtifactBlobContainer&& blobs, bool /*skip_find_missing*/) const noexcept -> bool final { - for (auto const& blob : blobs.Blobs()) { - auto const is_tree = NativeSupport::IsTree( - static_cast(blob.digest).hash()); - auto cas_digest = - is_tree ? local_context_.storage->CAS().StoreTree(*blob.data) - : local_context_.storage->CAS().StoreBlob(*blob.data, - blob.is_exec); - if (not cas_digest or not std::equal_to{}( - *cas_digest, blob.digest)) { - return false; - } - } - return true; + auto const range = blobs.Blobs(); + return std::all_of( + range.begin(), + range.end(), + [&cas = local_context_.storage->CAS()](ArtifactBlob const& blob) { + auto const cas_digest = + blob.digest.IsTree() + ? cas.StoreTree(*blob.data) + : cas.StoreBlob(*blob.data, blob.is_exec); + return cas_digest and *cas_digest == blob.digest; + }); } [[nodiscard]] auto UploadTree( @@ -286,29 +285,29 @@ class LocalApi final : public IExecutionApi { return std::nullopt; } - if (Compatibility::IsCompatible()) { - return CommonUploadTreeCompatible( - *this, - *build_root, - [&cas = local_context_.storage->CAS()]( - std::vector const& digests, - std::vector* targets) { - targets->reserve(digests.size()); - for (auto const& digest : digests) { - auto p = cas.BlobPath(digest, /*is_executable=*/false); - auto content = FileSystemManager::ReadFile(*p); - targets->emplace_back(*content); - } - }); + auto const& cas = local_context_.storage->CAS(); + if (ProtocolTraits::IsNative(cas.GetHashFunction().GetType())) { + return CommonUploadTreeNative(*this, *build_root); } - - return CommonUploadTreeNative(*this, *build_root); + return CommonUploadTreeCompatible( + *this, + *build_root, + [&cas](std::vector const& digests, + gsl::not_null*> const& targets) { + targets->reserve(digests.size()); + for (auto const& digest : digests) { + auto p = cas.BlobPath(digest, + /*is_executable=*/false); + auto content = FileSystemManager::ReadFile(*p); + targets->emplace_back(*content); + } + }); } [[nodiscard]] auto IsAvailable(ArtifactDigest const& digest) const noexcept -> bool final { return static_cast( - NativeSupport::IsTree(static_cast(digest).hash()) + digest.IsTree() ? local_context_.storage->CAS().TreePath(digest) : local_context_.storage->CAS().BlobPath(digest, false)); } @@ -317,9 +316,8 @@ class LocalApi final : public IExecutionApi { const noexcept -> std::vector final { std::vector result; for (auto const& digest : digests) { - auto const& path = - NativeSupport::IsTree( - static_cast(digest).hash()) + auto const path = + digest.IsTree() ? local_context_.storage->CAS().TreePath(digest) : local_context_.storage->CAS().BlobPath(digest, false); if (not path) { @@ -332,9 +330,8 @@ class LocalApi final : public IExecutionApi { [[nodiscard]] auto SplitBlob(ArtifactDigest const& blob_digest) const noexcept -> std::optional> final { Logger::Log(LogLevel::Debug, "SplitBlob({})", blob_digest.hash()); - auto split_result = CASUtils::SplitBlobFastCDC( - static_cast(blob_digest), - *local_context_.storage); + auto split_result = + CASUtils::SplitBlobFastCDC(blob_digest, *local_context_.storage); if (not split_result) { Logger::Log(LogLevel::Error, split_result.error().error_message()); return std::nullopt; @@ -346,20 +343,12 @@ class LocalApi final : public IExecutionApi { << blob_digest.size() << " into " << chunk_digests.size() << " chunks: [ "; for (auto const& chunk_digest : chunk_digests) { - ss << chunk_digest.hash() << ":" << chunk_digest.size_bytes() - << " "; + ss << chunk_digest.hash() << ":" << chunk_digest.size() << " "; } ss << "]"; return ss.str(); }); - auto artifact_digests = std::vector{}; - artifact_digests.reserve(chunk_digests.size()); - std::transform( - chunk_digests.cbegin(), - chunk_digests.cend(), - std::back_inserter(artifact_digests), - [](auto const& digest) { return ArtifactDigest{digest}; }); - return artifact_digests; + return *std::move(split_result); } [[nodiscard]] auto BlobSplitSupport() const noexcept -> bool final { @@ -374,24 +363,14 @@ class LocalApi final : public IExecutionApi { "SpliceBlob({}, {} chunks)", blob_digest.hash(), chunk_digests.size()); - auto digests = std::vector{}; - digests.reserve(chunk_digests.size()); - std::transform( - chunk_digests.cbegin(), - chunk_digests.cend(), - std::back_inserter(digests), - [](auto const& artifact_digest) { - return static_cast(artifact_digest); - }); - auto splice_result = - CASUtils::SpliceBlob(static_cast(blob_digest), - digests, - *local_context_.storage); + + auto splice_result = CASUtils::SpliceBlob( + blob_digest, chunk_digests, *local_context_.storage); if (not splice_result) { Logger::Log(LogLevel::Error, splice_result.error().error_message()); return std::nullopt; } - return ArtifactDigest{*std::move(splice_result)}; + return *std::move(splice_result); } [[nodiscard]] auto BlobSpliceSupport() const noexcept -> bool final { @@ -403,8 +382,10 @@ class LocalApi final : public IExecutionApi { std::optional const git_api_; [[nodiscard]] static auto CreateFallbackApi( + Storage const& storage, RepositoryConfig const* repo_config) noexcept -> std::optional { - if (repo_config == nullptr or Compatibility::IsCompatible()) { + if (repo_config == nullptr or + not ProtocolTraits::IsNative(storage.GetHashFunction().GetType())) { return std::nullopt; } return GitApi{repo_config}; diff --git a/src/buildtool/execution_api/local/local_cas_reader.cpp b/src/buildtool/execution_api/local/local_cas_reader.cpp index 56915fa9c..bac7ee711 100644 --- a/src/buildtool/execution_api/local/local_cas_reader.cpp +++ b/src/buildtool/execution_api/local/local_cas_reader.cpp @@ -23,6 +23,7 @@ #include #include "gsl/gsl" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" @@ -52,6 +53,7 @@ auto LocalCasReader::ReadDirectory(ArtifactDigest const& digest) const noexcept auto LocalCasReader::MakeTree(ArtifactDigest const& root) const noexcept -> std::optional { + auto const hash_type = cas_.GetHashFunction().GetType(); try { std::unordered_map directories; @@ -70,7 +72,12 @@ auto LocalCasReader::MakeTree(ArtifactDigest const& root) const noexcept return std::nullopt; } for (auto const& node : read_dir->directories()) { - to_check.push(ArtifactDigest{node.digest()}); + auto digest = + ArtifactDigestFactory::FromBazel(hash_type, node.digest()); + if (not digest) { + return std::nullopt; + } + to_check.push(*std::move(digest)); } directories.insert_or_assign(std::move(current), *std::move(read_dir)); @@ -87,10 +94,10 @@ auto LocalCasReader::ReadGitTree(ArtifactDigest const& digest) const noexcept if (auto const path = cas_.TreePath(digest)) { if (auto const content = FileSystemManager::ReadFile(*path)) { auto check_symlinks = - [this](std::vector const& ids) { + [&cas = cas_](std::vector const& ids) { for (auto const& id : ids) { - auto link_path = cas_.BlobPath(id, - /*is_executable=*/false); + auto link_path = cas.BlobPath(id, + /*is_executable=*/false); if (not link_path) { return false; } @@ -157,6 +164,10 @@ auto LocalCasReader::DumpRaw(std::filesystem::path const& path, return true; } +auto LocalCasReader::IsNativeProtocol() const noexcept -> bool { + return ProtocolTraits::IsNative(cas_.GetHashFunction().GetType()); +} + namespace { [[nodiscard]] auto AssembleTree( bazel_re::Directory root, diff --git a/src/buildtool/execution_api/local/local_cas_reader.hpp b/src/buildtool/execution_api/local/local_cas_reader.hpp index b989ec094..8fe9afd4e 100644 --- a/src/buildtool/execution_api/local/local_cas_reader.hpp +++ b/src/buildtool/execution_api/local/local_cas_reader.hpp @@ -52,6 +52,8 @@ class LocalCasReader final { DumpCallback const& dumper) const noexcept -> bool; + [[nodiscard]] auto IsNativeProtocol() const noexcept -> bool; + private: LocalCAS const& cas_; diff --git a/src/buildtool/execution_api/local/local_response.hpp b/src/buildtool/execution_api/local/local_response.hpp index 21dd59520..fbb58b288 100644 --- a/src/buildtool/execution_api/local/local_response.hpp +++ b/src/buildtool/execution_api/local/local_response.hpp @@ -16,17 +16,25 @@ #define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_RESPONSE_HPP #include +#include #include #include +#include "fmt/core.h" #include "gsl/gsl" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/execution_response.hpp" +#include "src/buildtool/execution_api/common/tree_reader.hpp" #include "src/buildtool/execution_api/local/local_action.hpp" +#include "src/buildtool/execution_api/local/local_cas_reader.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" #include "src/buildtool/storage/storage.hpp" +#include "src/utils/cpp/expected.hpp" +#include "src/utils/cpp/path.hpp" /// \brief Response of a LocalAction. class LocalResponse final : public IExecutionResponse { @@ -43,21 +51,15 @@ class LocalResponse final : public IExecutionResponse { return (output_.action.stdout_digest().size_bytes() != 0); } auto StdErr() noexcept -> std::string final { - if (auto path = storage_.CAS().BlobPath(output_.action.stderr_digest(), - /*is_executable=*/false)) { - if (auto content = FileSystemManager::ReadFile(*path)) { - return std::move(*content); - } + if (auto content = ReadContent(output_.action.stderr_digest())) { + return *std::move(content); } Logger::Log(LogLevel::Debug, "reading stderr failed"); return {}; } auto StdOut() noexcept -> std::string final { - if (auto path = storage_.CAS().BlobPath(output_.action.stdout_digest(), - /*is_executable=*/false)) { - if (auto content = FileSystemManager::ReadFile(*path)) { - return std::move(*content); - } + if (auto content = ReadContent(output_.action.stdout_digest())) { + return *std::move(content); } Logger::Log(LogLevel::Debug, "reading stdout failed"); return {}; @@ -71,18 +73,26 @@ class LocalResponse final : public IExecutionResponse { return action_id_; } - auto Artifacts() noexcept -> ArtifactInfos const& final { - Populate(); - return artifacts_; + auto Artifacts() noexcept + -> expected, std::string> final { + if (auto error_msg = Populate()) { + return unexpected{*std::move(error_msg)}; + } + return gsl::not_null( + &artifacts_); // explicit type needed for expected } - auto DirectorySymlinks() noexcept -> DirSymlinks const& final { - Populate(); - return dir_symlinks_; + auto DirectorySymlinks() noexcept + -> expected, std::string> final { + if (auto error_msg = Populate()) { + return unexpected{*std::move(error_msg)}; + } + return gsl::not_null( + &dir_symlinks_); // explicit type needed for expected } private: - std::string action_id_{}; + std::string action_id_; LocalAction::Output output_{}; Storage const& storage_; ArtifactInfos artifacts_; @@ -97,10 +107,12 @@ class LocalResponse final : public IExecutionResponse { output_{std::move(output)}, storage_{*storage} {} - void Populate() noexcept { + /// \brief Populates the stored data, once. + /// \returns Error message on failure, nullopt on success. + [[nodiscard]] auto Populate() noexcept -> std::optional { // Initialized only once lazily if (populated_) { - return; + return std::nullopt; } populated_ = true; @@ -118,61 +130,138 @@ class LocalResponse final : public IExecutionResponse { dir_symlinks.reserve(static_cast( action_result.output_directory_symlinks_size())); + auto const hash_type = storage_.GetHashFunction().GetType(); // collect files and store them for (auto const& file : action_result.output_files()) { + auto digest = + ArtifactDigestFactory::FromBazel(hash_type, file.digest()); + if (not digest) { + return fmt::format( + "LocalResponse: failed to create artifact digest for {}", + file.path()); + } try { artifacts.emplace( file.path(), - Artifact::ObjectInfo{ - .digest = ArtifactDigest{file.digest()}, - .type = file.is_executable() ? ObjectType::Executable + Artifact::ObjectInfo{.digest = *std::move(digest), + .type = file.is_executable() + ? ObjectType::Executable : ObjectType::File}); - } catch (...) { - return; + } catch (std::exception const& ex) { + return fmt::format( + "LocalResponse: unexpected failure gathering digest for " + "{}:\n{}", + file.path(), + ex.what()); } } // collect all symlinks and store them for (auto const& link : action_result.output_file_symlinks()) { try { + // in compatible mode: check symlink validity + if (not ProtocolTraits::IsNative( + storage_.GetHashFunction().GetType()) and + not PathIsNonUpwards(link.target())) { + return fmt::format( + "LocalResponse: found invalid symlink at {}", + link.path()); + } artifacts.emplace( link.path(), Artifact::ObjectInfo{ - .digest = ArtifactDigest::Create( - storage_.GetHashFunction(), link.target()), + .digest = + ArtifactDigestFactory::HashDataAs( + storage_.GetHashFunction(), link.target()), .type = ObjectType::Symlink}); - } catch (...) { - return; + } catch (std::exception const& ex) { + return fmt::format( + "LocalResponse: unexpected failure gathering digest for " + "{}:\n{}", + link.path(), + ex.what()); } } for (auto const& link : action_result.output_directory_symlinks()) { try { + // in compatible mode: check symlink validity + if (not ProtocolTraits::IsNative( + storage_.GetHashFunction().GetType()) and + not PathIsNonUpwards(link.target())) { + return fmt::format( + "LocalResponse: found invalid symlink at {}", + link.path()); + } artifacts.emplace( link.path(), Artifact::ObjectInfo{ - .digest = ArtifactDigest::Create( - storage_.GetHashFunction(), link.target()), + .digest = + ArtifactDigestFactory::HashDataAs( + storage_.GetHashFunction(), link.target()), .type = ObjectType::Symlink}); dir_symlinks.emplace(link.path()); // add it to set - } catch (...) { - return; + } catch (std::exception const& ex) { + return fmt::format( + "LocalResponse: unexpected failure gathering digest for " + "{}:\n{}", + link.path(), + ex.what()); } } // collect directories and store them for (auto const& dir : action_result.output_directories()) { + auto digest = + ArtifactDigestFactory::FromBazel(hash_type, dir.tree_digest()); + if (not digest) { + return fmt::format( + "LocalResponse: failed to create artifact digest for {}", + dir.path()); + } try { + // in compatible mode: check validity of symlinks in dir + if (not ProtocolTraits::IsNative( + storage_.GetHashFunction().GetType())) { + auto reader = TreeReader{&storage_.CAS()}; + auto result = reader.RecursivelyReadTreeLeafs( + *digest, "", /*include_trees=*/true); + if (not result) { + return fmt::format( + "LocalResponse: found invalid entries in directory " + "{}", + dir.path()); + } + } artifacts.emplace( dir.path(), - Artifact::ObjectInfo{ - .digest = ArtifactDigest{dir.tree_digest()}, - .type = ObjectType::Tree}); - } catch (...) { - return; + Artifact::ObjectInfo{.digest = *std::move(digest), + .type = ObjectType::Tree}); + } catch (std::exception const& ex) { + return fmt::format( + "LocalResponse: unexpected failure gathering digest for " + "{}:\n{}", + dir.path(), + ex.what()); } } artifacts_ = std::move(artifacts); dir_symlinks_ = std::move(dir_symlinks); + return std::nullopt; + } + + [[nodiscard]] auto ReadContent(bazel_re::Digest const& digest) + const noexcept -> std::optional { + auto const a_digest = ArtifactDigestFactory::FromBazel( + storage_.GetHashFunction().GetType(), digest); + if (not a_digest) { + return std::nullopt; + } + auto const path = + storage_.CAS().BlobPath(*a_digest, /*is_executable=*/false); + if (not path) { + return std::nullopt; + } + return FileSystemManager::ReadFile(*path); } }; diff --git a/src/buildtool/execution_api/remote/TARGETS b/src/buildtool/execution_api/remote/TARGETS index 7c45ec8d3..0256ea050 100644 --- a/src/buildtool/execution_api/remote/TARGETS +++ b/src/buildtool/execution_api/remote/TARGETS @@ -22,40 +22,45 @@ ] , "deps": [ "config" - , ["src/buildtool/auth", "auth"] - , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/logging", "logging"] - , ["src/buildtool/execution_api/common", "common"] , ["@", "gsl", "", "gsl"] - , ["src/buildtool/common", "bazel_types"] - , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"] - , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] , ["src/buildtool/auth", "auth"] - , ["src/buildtool/execution_api/common", "bytestream-common"] - , ["src/utils/cpp", "gsl"] + , ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/common", "common"] , ["src/buildtool/common/remote", "client_common"] , ["src/buildtool/common/remote", "port"] , ["src/buildtool/common/remote", "retry_config"] - , ["src/buildtool/file_system", "git_repo"] , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/crypto", "hash_info"] + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"] + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] + , ["src/buildtool/execution_api/common", "bytestream_utils"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/file_system", "git_repo"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "expected"] ] , "proto": [ ["@", "bazel_remote_apis", "", "remote_execution_proto"] , ["@", "googleapis", "", "google_bytestream_proto"] + , ["@", "googleapis", "", "google_longrunning_operations_proto"] ] , "stage": ["src", "buildtool", "execution_api", "remote"] , "private-deps": - [ ["src/buildtool/common", "common"] - , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/file_system", "object_type"] - , ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] - , ["src/buildtool/execution_api/common", "common_api"] - , ["src/buildtool/execution_api/utils", "outputscheck"] - , ["src/buildtool/compatibility", "compatibility"] + [ ["@", "fmt", "", "fmt"] , ["@", "grpc", "", "grpc++"] + , ["@", "protoc", "", "libprotobuf"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "bazel_digest_factory"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/common/remote", "retry"] + , ["src/buildtool/execution_api/common", "common_api"] , ["src/buildtool/execution_api/common", "message_limits"] + , ["src/buildtool/execution_api/utils", "outputscheck"] + , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/file_system", "object_type"] + , ["src/utils/cpp", "gsl"] , ["src/utils/cpp", "path"] ] } @@ -71,23 +76,27 @@ , ["src/buildtool/common", "common"] , ["src/buildtool/common/remote", "port"] , ["src/buildtool/common/remote", "retry_config"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"] + , ["src/buildtool/execution_api/common", "blob_tree"] , ["src/buildtool/execution_api/common", "common"] - , ["src/buildtool/execution_api/common", "common_api"] ] , "stage": ["src", "buildtool", "execution_api", "remote"] , "private-deps": [ "bazel_network" , ["@", "fmt", "", "fmt"] - , ["src/buildtool/auth", "auth"] - , ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/multithreading", "task_system"] - , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] + , ["src/buildtool/execution_api/common", "common_api"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/buildtool/multithreading", "task_system"] , ["src/buildtool/storage", "fs_utils"] - , ["src/buildtool/crypto", "hash_function"] ] } , "config": @@ -113,8 +122,8 @@ , "name": ["context"] , "hdrs": ["context.hpp"] , "deps": - [ ["@", "gsl", "", "gsl"] - , "config" + [ "config" + , ["@", "gsl", "", "gsl"] , ["src/buildtool/auth", "auth"] , ["src/buildtool/common/remote", "retry_config"] ] diff --git a/src/buildtool/execution_api/remote/bazel/bazel_ac_client.cpp b/src/buildtool/execution_api/remote/bazel/bazel_ac_client.cpp index c50253582..51ccfeebf 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_ac_client.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_ac_client.cpp @@ -39,8 +39,7 @@ auto BazelAcClient::GetActionResult( -> std::optional { bazel_re::GetActionResultRequest request{}; request.set_instance_name(instance_name); - request.set_allocated_action_digest( - gsl::owner{new bazel_re::Digest{action_digest}}); + (*request.mutable_action_digest()) = action_digest; request.set_inline_stdout(inline_stdout); request.set_inline_stderr(inline_stderr); std::copy(inline_output_files.begin(), diff --git a/src/buildtool/execution_api/remote/bazel/bazel_action.cpp b/src/buildtool/execution_api/remote/bazel/bazel_action.cpp index dcbddb8d7..f805c6fcd 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_action.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_action.cpp @@ -16,6 +16,7 @@ #include // std::move +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" #include "src/buildtool/execution_api/remote/bazel/bazel_response.hpp" @@ -24,7 +25,7 @@ BazelAction::BazelAction( std::shared_ptr network, - bazel_re::Digest root_digest, + ArtifactDigest root_digest, std::vector command, std::string cwd, std::vector output_files, @@ -68,6 +69,22 @@ auto BazelAction::Execute(Logger const* logger) noexcept action->hash()); } + auto create_response = [](Logger const* logger, + std::string const& action_hash, + auto&&... args) -> IExecutionResponse::Ptr { + try { + return IExecutionResponse::Ptr{new BazelResponse{ + action_hash, std::forward(args)...}}; + } catch (...) { + if (logger != nullptr) { + logger->Emit(LogLevel::Error, + "failed to create a response for {}", + action_hash); + } + } + return nullptr; + }; + if (do_cache) { if (auto result = network_->GetCachedActionResult(*action, output_files_)) { @@ -76,8 +93,11 @@ auto BazelAction::Execute(Logger const* logger) noexcept *result, output_files_, output_dirs_) ) { - return IExecutionResponse::Ptr{new BazelResponse{ - action->hash(), network_, {*result, true}}}; + return create_response( + logger, + action->hash(), + network_, + BazelExecutionClient::ExecutionOutput{*result, true}); } } } @@ -100,11 +120,13 @@ auto BazelAction::Execute(Logger const* logger) noexcept } output->cached_result = true; - return IExecutionResponse::Ptr{new BazelResponse{ - action_cached->hash(), network_, std::move(*output)}}; + return create_response(logger, + action_cached->hash(), + network_, + *std::move(output)); } - return IExecutionResponse::Ptr{new BazelResponse{ - action->hash(), network_, std::move(*output)}}; + return create_response( + logger, action->hash(), network_, *std::move(output)); } } @@ -112,7 +134,7 @@ auto BazelAction::Execute(Logger const* logger) noexcept } auto BazelAction::CreateBundlesForAction(BazelBlobContainer* blobs, - bazel_re::Digest const& exec_dir, + ArtifactDigest const& exec_dir, bool do_not_cache) const noexcept -> std::optional { using StoreFunc = BazelMsgFactory::ActionDigestRequest::BlobStoreFunc; @@ -134,5 +156,10 @@ auto BazelAction::CreateBundlesForAction(BazelBlobContainer* blobs, .timeout = timeout_, .skip_action_cache = do_not_cache, .store_blob = std::move(store_blob)}; - return BazelMsgFactory::CreateActionDigestFromCommandLine(request); + auto const action_digest = + BazelMsgFactory::CreateActionDigestFromCommandLine(request); + if (not action_digest) { + return std::nullopt; + } + return ArtifactDigestFactory::ToBazel(*action_digest); } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_action.hpp b/src/buildtool/execution_api/remote/bazel/bazel_action.hpp index 7bf62a45a..c990cc8ea 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_action.hpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_action.hpp @@ -21,6 +21,7 @@ #include #include +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/execution_api/common/execution_action.hpp" #include "src/buildtool/execution_api/remote/bazel/bazel_network.hpp" @@ -42,7 +43,7 @@ class BazelAction final : public IExecutionAction { private: std::shared_ptr const network_; - bazel_re::Digest const root_digest_; + ArtifactDigest const root_digest_; std::vector const cmdline_; std::string const cwd_; std::vector output_files_; @@ -54,7 +55,7 @@ class BazelAction final : public IExecutionAction { explicit BazelAction( std::shared_ptr network, - bazel_re::Digest root_digest, + ArtifactDigest root_digest, std::vector command, std::string cwd, std::vector output_files, @@ -63,7 +64,7 @@ class BazelAction final : public IExecutionAction { std::map const& properties) noexcept; [[nodiscard]] auto CreateBundlesForAction(BazelBlobContainer* blobs, - bazel_re::Digest const& exec_dir, + ArtifactDigest const& exec_dir, bool do_not_cache) const noexcept -> std::optional; }; diff --git a/src/buildtool/execution_api/remote/bazel/bazel_api.cpp b/src/buildtool/execution_api/remote/bazel/bazel_api.cpp index b76c3877d..c9206d1b8 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_api.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_api.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #include #include #include @@ -26,8 +27,10 @@ #include "fmt/core.h" #include "src/buildtool/auth/authentication.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/bazel_types.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" @@ -149,14 +152,16 @@ namespace { transmitted_bytes += chunk_digest.size(); } } - double transmission_factor = - (total_size > 0) ? 100.0 * transmitted_bytes / total_size - : 100.0; + double transmission_factor = 0.; + if (total_size > 0) { + transmission_factor = static_cast(transmitted_bytes) / + static_cast(total_size); + } return fmt::format( "Blob splitting saved {} bytes ({:.2f}%) of network traffic " "when fetching {}.\n", total_size - transmitted_bytes, - 100.0 - transmission_factor, + transmission_factor, artifact_info.ToString()); }); @@ -170,7 +175,9 @@ namespace { try { blobs.reserve(container.Size()); for (const auto& blob : container.Blobs()) { - blobs.emplace_back(blob.digest, blob.data, blob.is_exec); + blobs.emplace_back(ArtifactDigestFactory::ToBazel(blob.digest), + blob.data, + blob.is_exec); } } catch (...) { return std::nullopt; @@ -180,13 +187,14 @@ namespace { } // namespace -BazelApi::BazelApi(std::string const& instance_name, - std::string const& host, - Port port, - gsl::not_null const& auth, - gsl::not_null const& retry_config, - ExecutionConfiguration const& exec_config, - HashFunction hash_function) noexcept { +BazelApi::BazelApi( + std::string const& instance_name, + std::string const& host, + Port port, + gsl::not_null const& auth, + gsl::not_null const& retry_config, + ExecutionConfiguration const& exec_config, + gsl::not_null const& hash_function) noexcept { network_ = std::make_shared(instance_name, host, port, @@ -211,7 +219,8 @@ auto BazelApi::CreateAction( std::map const& env_vars, std::map const& properties) const noexcept -> IExecutionAction::Ptr { - return std::unique_ptr{new BazelAction{network_, + return std::unique_ptr{new (std::nothrow) + BazelAction{network_, root_digest, command, cwd, @@ -221,7 +230,7 @@ auto BazelApi::CreateAction( properties}}; } -// NOLINTNEXTLINE(misc-no-recursion, google-default-arguments) +// NOLINTNEXTLINE(google-default-arguments) [[nodiscard]] auto BazelApi::RetrieveToPaths( std::vector const& artifacts_info, std::vector const& output_paths, @@ -314,7 +323,6 @@ auto BazelApi::CreateAction( ); } -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto BazelApi::RetrieveToCas( std::vector const& artifacts_info, IExecutionApi const& api) const noexcept -> bool { @@ -373,7 +381,6 @@ auto BazelApi::CreateAction( artifacts_info, api, jobs, use_blob_splitting, &done); } -/// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto BazelApi::ParallelRetrieveToCasWithCache( std::vector const& all_artifacts_info, IExecutionApi const& api, @@ -381,20 +388,24 @@ auto BazelApi::CreateAction( bool use_blob_splitting, gsl::not_null*> done) const noexcept -> bool { - - std::vector artifacts_info{}; - artifacts_info.reserve(all_artifacts_info.size()); - for (auto const& info : all_artifacts_info) { - if (not done->contains(info)) { - artifacts_info.emplace_back(info); + std::unordered_set artifacts_info; + try { + artifacts_info.reserve(all_artifacts_info.size()); + for (auto const& info : all_artifacts_info) { + if (not done->contains(info)) { + artifacts_info.emplace(info); + } } + } catch (std::exception const& ex) { + Logger::Log( + LogLevel::Error, + "BazelApi: Collecting the set of artifacts failed with:\n{}", + ex.what()); + return false; } if (artifacts_info.empty()) { return true; // Nothing to do } - std::sort(artifacts_info.begin(), artifacts_info.end()); - auto last_info = std::unique(artifacts_info.begin(), artifacts_info.end()); - artifacts_info.erase(last_info, artifacts_info.end()); // Determine missing artifacts in other CAS. auto missing_artifacts_info = GetMissingArtifactsInfo( @@ -527,28 +538,28 @@ auto BazelApi::CreateAction( return std::nullopt; } - if (Compatibility::IsCompatible()) { - return CommonUploadTreeCompatible( - *this, - *build_root, - [&network = network_](std::vector const& digests, - std::vector* targets) { - auto reader = network->CreateReader(); - targets->reserve(digests.size()); - for (auto blobs : reader.ReadIncrementally(digests)) { - for (auto const& blob : blobs) { - targets->emplace_back(*blob.data); - } - } - }); + if (ProtocolTraits::IsNative(network_->GetHashFunction().GetType())) { + return CommonUploadTreeNative(*this, *build_root); } - - return CommonUploadTreeNative(*this, *build_root); + return CommonUploadTreeCompatible( + *this, + *build_root, + [&network = network_]( + std::vector const& digests, + gsl::not_null*> const& targets) { + auto reader = network->CreateReader(); + targets->reserve(digests.size()); + for (auto blobs : reader.ReadIncrementally(digests)) { + for (auto const& blob : blobs) { + targets->emplace_back(*blob.data); + } + } + }); } [[nodiscard]] auto BazelApi::IsAvailable( ArtifactDigest const& digest) const noexcept -> bool { - return network_->IsAvailable(digest); + return network_->IsAvailable(ArtifactDigestFactory::ToBazel(digest)); } [[nodiscard]] auto BazelApi::IsAvailable( @@ -558,8 +569,8 @@ auto BazelApi::CreateAction( bazel_digests.reserve(digests.size()); std::unordered_map digest_map; for (auto const& digest : digests) { - auto const& bazel_digest = static_cast(digest); - bazel_digests.push_back(bazel_digest); + auto const& bazel_digest = + bazel_digests.emplace_back(ArtifactDigestFactory::ToBazel(digest)); digest_map[bazel_digest] = digest; } auto bazel_result = network_->IsAvailable(bazel_digests); @@ -573,17 +584,21 @@ auto BazelApi::CreateAction( [[nodiscard]] auto BazelApi::SplitBlob(ArtifactDigest const& blob_digest) const noexcept -> std::optional> { - auto chunk_digests = - network_->SplitBlob(static_cast(blob_digest)); + auto const chunk_digests = + network_->SplitBlob(ArtifactDigestFactory::ToBazel(blob_digest)); if (not chunk_digests) { return std::nullopt; } auto artifact_digests = std::vector{}; artifact_digests.reserve(chunk_digests->size()); - std::transform(chunk_digests->cbegin(), - chunk_digests->cend(), - std::back_inserter(artifact_digests), - [](auto const& digest) { return ArtifactDigest{digest}; }); + for (auto const& chunk : *chunk_digests) { + auto part = ArtifactDigestFactory::FromBazel( + network_->GetHashFunction().GetType(), chunk); + if (not part) { + return std::nullopt; + } + artifact_digests.emplace_back(*std::move(part)); + } return artifact_digests; } @@ -601,14 +616,19 @@ auto BazelApi::CreateAction( chunk_digests.cend(), std::back_inserter(digests), [](auto const& artifact_digest) { - return static_cast(artifact_digest); + return ArtifactDigestFactory::ToBazel(artifact_digest); }); - auto digest = network_->SpliceBlob( - static_cast(blob_digest), digests); + auto const digest = network_->SpliceBlob( + ArtifactDigestFactory::ToBazel(blob_digest), digests); if (not digest) { return std::nullopt; } - return ArtifactDigest{*digest}; + auto result = ArtifactDigestFactory::FromBazel( + network_->GetHashFunction().GetType(), *digest); + if (not result) { + return std::nullopt; + } + return *std::move(result); } [[nodiscard]] auto BazelApi::BlobSpliceSupport() const noexcept -> bool { diff --git a/src/buildtool/execution_api/remote/bazel/bazel_api.hpp b/src/buildtool/execution_api/remote/bazel/bazel_api.hpp index 6b2a6f176..b829529cd 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_api.hpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_api.hpp @@ -48,7 +48,7 @@ class BazelApi final : public IExecutionApi { gsl::not_null const& auth, gsl::not_null const& retry_config, ExecutionConfiguration const& exec_config, - HashFunction hash_function) noexcept; + gsl::not_null const& hash_function) noexcept; BazelApi(BazelApi const&) = delete; BazelApi(BazelApi&& other) noexcept; auto operator=(BazelApi const&) -> BazelApi& = delete; diff --git a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp index aac7f2a51..fa2991873 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp @@ -22,44 +22,34 @@ #include "grpcpp/grpcpp.h" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" #include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/remote/client_common.hpp" #include "src/buildtool/common/remote/retry.hpp" #include "src/buildtool/common/remote/retry_config.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" -#include "src/buildtool/compatibility/native_support.hpp" -#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/execution_api/common/bytestream_utils.hpp" #include "src/buildtool/execution_api/common/execution_common.hpp" #include "src/buildtool/execution_api/common/message_limits.hpp" +#include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/logging/log_level.hpp" namespace { -[[nodiscard]] auto ToResourceName(std::string const& instance_name, - bazel_re::Digest const& digest) noexcept - -> std::string { - return fmt::format( - "{}/blobs/{}/{}", instance_name, digest.hash(), digest.size_bytes()); -} - // In order to determine whether blob splitting is supported at the remote, a // trial request to the remote CAS service is issued. This is just a workaround // until the blob split API extension is accepted as part of the official remote // execution protocol. Then, the ordinary way to determine server capabilities // can be employed by using the capabilities service. [[nodiscard]] auto BlobSplitSupport( + HashFunction hash_function, std::string const& instance_name, std::unique_ptr const& stub) noexcept -> bool { // Create empty blob. std::string empty_str{}; - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - std::string hash = hash_function.HashBlobData(empty_str).HexString(); - bazel_re::Digest digest{}; - digest.set_hash(NativeSupport::Prefix(hash, false)); - digest.set_size_bytes(empty_str.size()); + auto const digest = BazelDigestFactory::HashDataAs( + hash_function, empty_str); // Upload empty blob. grpc::ClientContext update_context{}; @@ -88,6 +78,7 @@ namespace { // Cached version of blob-split support request. [[nodiscard]] auto BlobSplitSupportCached( + HashFunction hash_function, std::string const& instance_name, std::unique_ptr const& stub, Logger const* logger) noexcept -> bool { @@ -100,7 +91,7 @@ namespace { return blob_split_support_map[instance_name]; } } - auto supported = ::BlobSplitSupport(instance_name, stub); + auto supported = ::BlobSplitSupport(hash_function, instance_name, stub); logger->Emit(LogLevel::Debug, "Blob split support for \"{}\": {}", instance_name, @@ -116,18 +107,14 @@ namespace { // remote execution protocol. Then, the ordinary way to determine server // capabilities can be employed by using the capabilities service. [[nodiscard]] auto BlobSpliceSupport( + HashFunction hash_function, std::string const& instance_name, std::unique_ptr const& stub) noexcept -> bool { // Create empty blob. std::string empty_str{}; - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - std::string hash = hash_function.HashBlobData(empty_str).HexString(); - bazel_re::Digest digest{}; - digest.set_hash(NativeSupport::Prefix(hash, false)); - digest.set_size_bytes(empty_str.size()); + auto const digest = BazelDigestFactory::HashDataAs( + hash_function, empty_str); // Upload empty blob. grpc::ClientContext update_context{}; @@ -157,6 +144,7 @@ namespace { // Cached version of blob-splice support request. [[nodiscard]] auto BlobSpliceSupportCached( + HashFunction hash_function, std::string const& instance_name, std::unique_ptr const& stub, Logger const* logger) noexcept -> bool { @@ -169,7 +157,7 @@ namespace { return blob_splice_support_map[instance_name]; } } - auto supported = ::BlobSpliceSupport(instance_name, stub); + auto supported = ::BlobSpliceSupport(hash_function, instance_name, stub); logger->Emit(LogLevel::Debug, "Blob splice support for \"{}\": {}", instance_name, @@ -275,7 +263,6 @@ auto BazelCasClient::BatchReadBlobs( return result; } -// NOLINTNEXTLINE(misc-no-recursion) auto BazelCasClient::GetTree(std::string const& instance_name, bazel_re::Digest const& root_digest, std::int32_t page_size, @@ -329,12 +316,9 @@ auto BazelCasClient::UpdateSingleBlob(std::string const& instance_name, } uuid = CreateUUIDVersion4(*id); } - auto ok = stream_->Write(fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - blob.digest.hash(), - blob.digest.size_bytes()), - *blob.data); + auto ok = stream_->Write( + ByteStreamUtils::WriteRequest{instance_name, uuid, blob.digest}, + *blob.data); if (not ok) { logger_.Emit(LogLevel::Error, "Failed to write {}:{}", @@ -347,22 +331,26 @@ auto BazelCasClient::UpdateSingleBlob(std::string const& instance_name, auto BazelCasClient::IncrementalReadSingleBlob(std::string const& instance_name, bazel_re::Digest const& digest) const noexcept -> ByteStreamClient::IncrementalReader { - return stream_->IncrementalRead(ToResourceName(instance_name, digest)); + return stream_->IncrementalRead( + ByteStreamUtils::ReadRequest{instance_name, digest}); } auto BazelCasClient::ReadSingleBlob( std::string const& instance_name, bazel_re::Digest const& digest) const noexcept -> std::optional { - if (auto data = stream_->Read(ToResourceName(instance_name, digest))) { + if (auto data = stream_->Read( + ByteStreamUtils::ReadRequest{instance_name, digest})) { return BazelBlob{digest, std::move(*data), /*is_exec=*/false}; } return std::nullopt; } -auto BazelCasClient::SplitBlob(std::string const& instance_name, +auto BazelCasClient::SplitBlob(HashFunction hash_function, + std::string const& instance_name, bazel_re::Digest const& blob_digest) const noexcept -> std::optional> { - if (not BlobSplitSupportCached(instance_name, stub_, &logger_)) { + if (not BlobSplitSupportCached( + hash_function, instance_name, stub_, &logger_)) { return std::nullopt; } bazel_re::SplitBlobRequest request{}; @@ -386,11 +374,13 @@ auto BazelCasClient::SplitBlob(std::string const& instance_name, } auto BazelCasClient::SpliceBlob( + HashFunction hash_function, std::string const& instance_name, bazel_re::Digest const& blob_digest, std::vector const& chunk_digests) const noexcept -> std::optional { - if (not BlobSpliceSupportCached(instance_name, stub_, &logger_)) { + if (not BlobSpliceSupportCached( + hash_function, instance_name, stub_, &logger_)) { return std::nullopt; } bazel_re::SpliceBlobRequest request{}; @@ -418,19 +408,23 @@ auto BazelCasClient::SpliceBlob( } auto BazelCasClient::BlobSplitSupport( + HashFunction hash_function, std::string const& instance_name) const noexcept -> bool { - return ::BlobSplitSupportCached(instance_name, stub_, &logger_); + return ::BlobSplitSupportCached( + hash_function, instance_name, stub_, &logger_); } auto BazelCasClient::BlobSpliceSupport( + HashFunction hash_function, std::string const& instance_name) const noexcept -> bool { - return ::BlobSpliceSupportCached(instance_name, stub_, &logger_); + return ::BlobSpliceSupportCached( + hash_function, instance_name, stub_, &logger_); } -template +template auto BazelCasClient::FindMissingBlobs(std::string const& instance_name, - T_ForwardIter const& start, - T_ForwardIter const& end) const noexcept + TForwardIter const& start, + TForwardIter const& end) const noexcept -> std::vector { std::vector result; if (start == end) { @@ -489,7 +483,6 @@ auto BazelCasClient::FindMissingBlobs(std::string const& instance_name, return result; } -// NOLINTNEXTLINE(misc-no-recursion) auto BazelCasClient::BatchUpdateBlobs( std::string const& instance_name, std::vector>::const_iterator const& begin, @@ -549,12 +542,14 @@ auto BazelCasClient::BatchUpdateBlobs( return batch_update_blobs(request); }, retry_config_, - logger_); + logger_, + LogLevel::Performance); })) { - logger_.Emit(LogLevel::Error, "Failed to BatchUpdateBlobs."); + logger_.Emit(LogLevel::Performance, "Failed to BatchUpdateBlobs."); } } catch (...) { - logger_.Emit(LogLevel::Error, "Caught exception in DoBatchUpdateBlobs"); + logger_.Emit(LogLevel::Warning, + "Caught exception in DoBatchUpdateBlobs"); } logger_.Emit(LogLevel::Trace, [begin, end, &result]() { std::ostringstream oss{}; @@ -613,9 +608,9 @@ auto BazelCasClient::BatchUpdateBlobs( namespace detail { // Getter for response contents (needs specialization, never implemented) -template -static auto GetResponseContents(T_Response const&) noexcept - -> pb::RepeatedPtrField const&; +template +static auto GetResponseContents(TResponse const&) noexcept + -> pb::RepeatedPtrField const&; // Specialization of GetResponseContents for 'FindMissingBlobsResponse' template <> @@ -643,26 +638,26 @@ auto GetResponseContents( } // namespace detail -template +template auto BazelCasClient::CreateBatchRequestsMaxSize( std::string const& instance_name, - T_ForwardIter const& first, - T_ForwardIter const& last, + TForwardIter const& first, + TForwardIter const& last, std::string const& heading, - std::function const& - request_builder) const noexcept -> std::vector { + std::function const& + request_builder) const noexcept -> std::vector { if (first == last) { return {}; } - std::vector result; - T_Request accumulating_request; + std::vector result; + TRequest accumulating_request; std::for_each( first, last, [&instance_name, &accumulating_request, &result, &request_builder]( auto const& blob) { - T_Request request; + TRequest request; request.set_instance_name(instance_name); request_builder(&request, blob); if (accumulating_request.ByteSizeLong() + request.ByteSizeLong() > @@ -693,8 +688,7 @@ auto BazelCasClient::CreateBatchRequestsMaxSize( auto BazelCasClient::CreateUpdateBlobsSingleRequest(BazelBlob const& b) noexcept -> bazel_re::BatchUpdateBlobsRequest_Request { bazel_re::BatchUpdateBlobsRequest_Request r{}; - r.set_allocated_digest( - gsl::owner{new bazel_re::Digest{b.digest}}); + (*r.mutable_digest()) = b.digest; r.set_data(*b.data); return r; } @@ -706,18 +700,17 @@ auto BazelCasClient::CreateGetTreeRequest( std::string const& page_token) noexcept -> bazel_re::GetTreeRequest { bazel_re::GetTreeRequest request; request.set_instance_name(instance_name); - request.set_allocated_root_digest( - gsl::owner{new bazel_re::Digest{root_digest}}); + (*request.mutable_root_digest()) = root_digest; request.set_page_size(page_size); request.set_page_token(page_token); return request; } -template +template auto BazelCasClient::ProcessResponseContents( - T_Response const& response) const noexcept -> std::vector { - std::vector output; - auto const& contents = detail::GetResponseContents(response); + TResponse const& response) const noexcept -> std::vector { + std::vector output; + auto const& contents = detail::GetResponseContents(response); std::copy(contents.begin(), contents.end(), std::back_inserter(output)); return output; } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp index 225ae05d7..e2ddc5617 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp @@ -29,6 +29,7 @@ #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/common/remote/port.hpp" #include "src/buildtool/common/remote/retry_config.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp" #include "src/buildtool/execution_api/remote/bazel/bytestream_client.hpp" @@ -118,51 +119,59 @@ class BazelCasClient { const noexcept -> std::optional; /// @brief Split single blob into chunks + /// @param[in] hash_function Hash function to be used for creation of + /// an empty blob. /// @param[in] instance_name Name of the CAS instance /// @param[in] blob_digest Blob digest to be splitted /// @return The chunk digests of the splitted blob - [[nodiscard]] auto SplitBlob(std::string const& instance_name, + [[nodiscard]] auto SplitBlob(HashFunction hash_function, + std::string const& instance_name, bazel_re::Digest const& blob_digest) const noexcept -> std::optional>; /// @brief Splice blob from chunks at the remote side + /// @param[in] hash_function Hash function to be used for creation of + /// an empty blob. /// @param[in] instance_name Name of the CAS instance /// @param[in] blob_digest Expected digest of the spliced blob /// @param[in] chunk_digests The chunk digests of the splitted blob /// @return Whether the splice call was successful [[nodiscard]] auto SpliceBlob( + HashFunction hash_function, std::string const& instance_name, bazel_re::Digest const& blob_digest, std::vector const& chunk_digests) const noexcept -> std::optional; [[nodiscard]] auto BlobSplitSupport( + HashFunction hash_function, std::string const& instance_name) const noexcept -> bool; [[nodiscard]] auto BlobSpliceSupport( + HashFunction hash_function, std::string const& instance_name) const noexcept -> bool; private: - std::unique_ptr stream_{}; + std::unique_ptr stream_; RetryConfig const& retry_config_; std::unique_ptr stub_; Logger logger_{"RemoteCasClient"}; - template + template [[nodiscard]] auto FindMissingBlobs(std::string const& instance_name, - T_OutputIter const& start, - T_OutputIter const& end) const noexcept + TOutputIter const& start, + TOutputIter const& end) const noexcept -> std::vector; - template + template [[nodiscard]] auto CreateBatchRequestsMaxSize( std::string const& instance_name, - T_ForwardIter const& first, - T_ForwardIter const& last, + TForwardIter const& first, + TForwardIter const& last, std::string const& heading, - std::function const& - request_builder) const noexcept -> std::vector; + std::function const& + request_builder) const noexcept -> std::vector; [[nodiscard]] static auto CreateUpdateBlobsSingleRequest( BazelBlob const& b) noexcept @@ -176,22 +185,22 @@ class BazelCasClient { /// \brief Utility class for supporting the Retry strategy while parsing a /// BatchResponse - template + template struct RetryProcessBatchResponse { bool ok{false}; - std::vector result{}; + std::vector result; bool exit_retry_loop{false}; - std::optional error_msg{}; + std::optional error_msg; }; // If this function is defined in the .cpp file, clang raises an error // while linking - template + template [[nodiscard]] auto ProcessBatchResponse( - T_Response const& response, - std::function*, T_Inner const&)> const& - inserter) const noexcept -> RetryProcessBatchResponse { - std::vector output; + TResponse const& response, + std::function*, TInner const&)> const& + inserter) const noexcept -> RetryProcessBatchResponse { + std::vector output; for (auto const& res : response.responses()) { auto const& res_status = res.status(); if (res_status.code() == static_cast(grpc::StatusCode::OK)) { @@ -211,9 +220,9 @@ class BazelCasClient { return {.ok = true, .result = std::move(output)}; } - template - auto ProcessResponseContents(T_Response const& response) const noexcept - -> std::vector; + template + auto ProcessResponseContents(TResponse const& response) const noexcept + -> std::vector; }; #endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_CAS_CLIENT_HPP diff --git a/src/buildtool/execution_api/remote/bazel/bazel_execution_client.cpp b/src/buildtool/execution_api/remote/bazel/bazel_execution_client.cpp index 15ff2c34d..09f16b8ff 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_execution_client.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_execution_client.cpp @@ -16,6 +16,8 @@ #include // std::move +#include "fmt/core.h" +#include "google/protobuf/text_format.h" #include "grpcpp/grpcpp.h" #include "src/buildtool/common/remote/client_common.hpp" #include "src/buildtool/common/remote/retry.hpp" @@ -40,6 +42,17 @@ void LogExecutionStatus(gsl::not_null const& logger, "Execution could not be started.\n{}", s.ShortDebugString()); break; + case grpc::StatusCode::FAILED_PRECONDITION: + // quote from remote_execution.proto: + // One or more errors occurred in setting up the + // action requested, such as a missing input or command or no worker + // being available. The client may be able to fix the errors and + // retry. + logger->Emit(LogLevel::Progress, + "Some precondition for the action failed.\n{}", + s.message()); + break; + default: // fallback to default status logging LogStatus(logger, LogLevel::Error, s); @@ -80,8 +93,7 @@ auto BazelExecutionClient::Execute(std::string const& instance_name, bazel_re::ExecuteRequest request; request.set_instance_name(instance_name); request.set_skip_cache_lookup(config.skip_cache_lookup); - request.set_allocated_action_digest( - gsl::owner{new bazel_re::Digest(action_digest)}); + (*request.mutable_action_digest()) = action_digest; request.set_allocated_execution_policy(execution_policy.release()); request.set_allocated_results_cache_policy(results_cache_policy.release()); BazelExecutionClient::ExecutionResponse response; @@ -90,20 +102,22 @@ auto BazelExecutionClient::Execute(std::string const& instance_name, std::unique_ptr> reader(stub_->Execute(&context, request)); - auto [op, fatal, error_msg] = ReadExecution(reader.get(), wait); + auto [op, fatal, _] = ReadExecution(reader.get(), wait); if (not op.has_value()) { - return { - .ok = false, .exit_retry_loop = fatal, .error_msg = error_msg}; + return {.ok = false, .exit_retry_loop = fatal}; } auto contents = ExtractContents(std::move(op)); response = contents.response; + if (response.state == ExecutionResponse::State::Ongoing) { + return {.ok = true, .exit_retry_loop = true}; + } if (response.state == ExecutionResponse::State::Finished) { return {.ok = true}; } + auto const is_fatal = response.state != ExecutionResponse::State::Retry; return {.ok = false, - .exit_retry_loop = - response.state != ExecutionResponse::State::Retry, - .error_msg = contents.error_msg}; + .exit_retry_loop = is_fatal, + .error_msg = is_fatal ? std::nullopt : contents.error_msg}; }; if (not WithRetry(execute, retry_config_, logger_)) { logger_.Emit(LogLevel::Error, @@ -125,21 +139,19 @@ auto BazelExecutionClient::WaitExecution(std::string const& execution_handle) std::unique_ptr> reader(stub_->WaitExecution(&context, request)); - auto [op, fatal, error_msg] = - ReadExecution(reader.get(), /*wait=*/true); + auto [op, fatal, _] = ReadExecution(reader.get(), /*wait=*/true); if (not op.has_value()) { - return { - .ok = false, .exit_retry_loop = fatal, .error_msg = error_msg}; + return {.ok = false, .exit_retry_loop = fatal}; } auto contents = ExtractContents(std::move(op)); response = contents.response; if (response.state == ExecutionResponse::State::Finished) { return {.ok = true}; } + auto const is_fatal = response.state != ExecutionResponse::State::Retry; return {.ok = false, - .exit_retry_loop = - response.state != ExecutionResponse::State::Retry, - .error_msg = contents.error_msg}; + .exit_retry_loop = is_fatal, + .error_msg = is_fatal ? std::nullopt : contents.error_msg}; }; if (not WithRetry(wait_execution, retry_config_, logger_)) { logger_.Emit( @@ -163,7 +175,8 @@ auto BazelExecutionClient::ReadExecution( if (not reader->Read(&operation)) { grpc::Status status = reader->Finish(); auto exit_retry_loop = - status.error_code() != grpc::StatusCode::UNAVAILABLE; + (status.error_code() != grpc::StatusCode::UNAVAILABLE) && + (status.error_code() != grpc::StatusCode::DEADLINE_EXCEEDED); LogStatus(&logger_, (exit_retry_loop ? LogLevel::Error : LogLevel::Debug), status); @@ -177,7 +190,8 @@ auto BazelExecutionClient::ReadExecution( grpc::Status status = reader->Finish(); if (not status.ok()) { auto exit_retry_loop = - status.error_code() != grpc::StatusCode::UNAVAILABLE; + (status.error_code() != grpc::StatusCode::UNAVAILABLE) && + (status.error_code() != grpc::StatusCode::DEADLINE_EXCEEDED); LogStatus(&logger_, (exit_retry_loop ? LogLevel::Error : LogLevel::Debug), status); @@ -230,6 +244,17 @@ auto BazelExecutionClient::ExtractContents( if (status_code == grpc::StatusCode::UNAVAILABLE) { response.state = ExecutionResponse::State::Retry; } + else if (status_code == grpc::StatusCode::FAILED_PRECONDITION) { + logger_.Emit(LogLevel::Debug, [&exec_response] { + std::string text_repr; + google::protobuf::TextFormat::PrintToString(exec_response, + &text_repr); + return fmt::format( + "Full exec_response of precondition failure\n{}", + text_repr); + }); + response.state = ExecutionResponse::State::Retry; + } else { response.state = ExecutionResponse::State::Failed; } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp b/src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp index b78ac36c5..fd1ff5c1e 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp @@ -15,6 +15,7 @@ #ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_EXECUTION_CLIENT_HPP #define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_EXECUTION_CLIENT_HPP +#include #include #include #include @@ -36,17 +37,23 @@ class BazelExecutionClient { public: struct ExecutionOutput { - bazel_re::ActionResult action_result{}; + bazel_re::ActionResult action_result; bool cached_result{}; - grpc::Status status{}; - std::unordered_map server_logs{}; - std::string message{}; + grpc::Status status; + std::unordered_map server_logs; + std::string message; }; struct ExecutionResponse { - enum class State { Failed, Ongoing, Finished, Unknown, Retry }; + enum class State : std::uint8_t { + Failed, + Ongoing, + Finished, + Unknown, + Retry + }; - std::string execution_handle{}; + std::string execution_handle; State state{State::Unknown}; std::optional output{std::nullopt}; diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network.cpp b/src/buildtool/execution_api/remote/bazel/bazel_network.cpp index 5fff6305a..34ebb418c 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_network.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_network.cpp @@ -28,7 +28,7 @@ BazelNetwork::BazelNetwork( gsl::not_null const& auth, gsl::not_null const& retry_config, ExecutionConfiguration const& exec_config, - HashFunction hash_function) noexcept + gsl::not_null const& hash_function) noexcept : instance_name_{std::move(instance_name)}, cas_{std::make_unique(host, port, auth, retry_config)}, ac_{std::make_unique(host, port, auth, retry_config)}, @@ -37,7 +37,7 @@ BazelNetwork::BazelNetwork( auth, retry_config)}, exec_config_{exec_config}, - hash_function_{hash_function} {} + hash_function_{*hash_function} {} auto BazelNetwork::IsAvailable(bazel_re::Digest const& digest) const noexcept -> bool { @@ -54,27 +54,28 @@ auto BazelNetwork::IsAvailable(std::vector const& digests) auto BazelNetwork::SplitBlob(bazel_re::Digest const& blob_digest) const noexcept -> std::optional> { - return cas_->SplitBlob(instance_name_, blob_digest); + return cas_->SplitBlob(hash_function_, instance_name_, blob_digest); } auto BazelNetwork::SpliceBlob( bazel_re::Digest const& blob_digest, std::vector const& chunk_digests) const noexcept -> std::optional { - return cas_->SpliceBlob(instance_name_, blob_digest, chunk_digests); + return cas_->SpliceBlob( + hash_function_, instance_name_, blob_digest, chunk_digests); } auto BazelNetwork::BlobSplitSupport() const noexcept -> bool { - return cas_->BlobSplitSupport(instance_name_); + return cas_->BlobSplitSupport(hash_function_, instance_name_); } auto BazelNetwork::BlobSpliceSupport() const noexcept -> bool { - return cas_->BlobSpliceSupport(instance_name_); + return cas_->BlobSpliceSupport(hash_function_, instance_name_); } -template -auto BazelNetwork::DoUploadBlobs(T_Iter const& first, - T_Iter const& last) noexcept -> bool { +template +auto BazelNetwork::DoUploadBlobs(TIter const& first, + TIter const& last) noexcept -> bool { try { // Partition the blobs according to their size. The first group collects // all the blobs that can be uploaded in batch, the second group gathers @@ -130,6 +131,12 @@ auto BazelNetwork::ExecuteBazelActionSync( auto response = exec_->Execute(instance_name_, action, exec_config_, true /*wait*/); + if (response.state == + BazelExecutionClient::ExecutionResponse::State::Ongoing) { + Logger::Log( + LogLevel::Trace, "Waiting for {}", response.execution_handle); + response = exec_->WaitExecution(response.execution_handle); + } if (response.state != BazelExecutionClient::ExecutionResponse::State::Finished or not response.output) { @@ -143,7 +150,7 @@ auto BazelNetwork::ExecuteBazelActionSync( } auto BazelNetwork::CreateReader() const noexcept -> BazelNetworkReader { - return BazelNetworkReader{instance_name_, cas_.get(), hash_function_}; + return BazelNetworkReader{instance_name_, cas_.get(), &hash_function_}; } auto BazelNetwork::GetCachedActionResult( diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network.hpp b/src/buildtool/execution_api/remote/bazel/bazel_network.hpp index 645b403d9..0dce2fbdb 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_network.hpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_network.hpp @@ -39,13 +39,14 @@ /// \brief Contains all network clients and is responsible for all network IO. class BazelNetwork { public: - explicit BazelNetwork(std::string instance_name, - std::string const& host, - Port port, - gsl::not_null const& auth, - gsl::not_null const& retry_config, - ExecutionConfiguration const& exec_config, - HashFunction hash_function) noexcept; + explicit BazelNetwork( + std::string instance_name, + std::string const& host, + Port port, + gsl::not_null const& auth, + gsl::not_null const& retry_config, + ExecutionConfiguration const& exec_config, + gsl::not_null const& hash_function) noexcept; /// \brief Check if digest exists in CAS /// \param[in] digest The digest to look up @@ -91,16 +92,16 @@ class BazelNetwork { -> std::optional; private: - std::string const instance_name_{}; - std::unique_ptr cas_{}; - std::unique_ptr ac_{}; - std::unique_ptr exec_{}; + std::string const instance_name_; + std::unique_ptr cas_; + std::unique_ptr ac_; + std::unique_ptr exec_; ExecutionConfiguration exec_config_{}; - HashFunction const hash_function_; + HashFunction const& hash_function_; - template - [[nodiscard]] auto DoUploadBlobs(T_Iter const& first, - T_Iter const& last) noexcept -> bool; + template + [[nodiscard]] auto DoUploadBlobs(TIter const& first, + TIter const& last) noexcept -> bool; }; #endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_NETWORK_HPP diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp b/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp index bb21c5e0a..0a8d56388 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp @@ -16,6 +16,9 @@ #include +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" #include "src/buildtool/execution_api/common/message_limits.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" @@ -27,10 +30,10 @@ BazelNetworkReader::BazelNetworkReader( std::string instance_name, gsl::not_null const& cas, - HashFunction hash_function) noexcept + gsl::not_null const& hash_function) noexcept : instance_name_{std::move(instance_name)}, cas_{*cas}, - hash_function_{hash_function} {} + hash_function_{*hash_function} {} BazelNetworkReader::BazelNetworkReader( BazelNetworkReader&& other, @@ -38,11 +41,13 @@ BazelNetworkReader::BazelNetworkReader( : instance_name_{other.instance_name_}, cas_{other.cas_}, hash_function_{other.hash_function_} { - if (Compatibility::IsCompatible() and request_remote_tree) { + if (not IsNativeProtocol() and request_remote_tree) { // Query full tree from remote CAS. Note that this is currently not // supported by Buildbarn revision c3c06bbe2a. - auto full_tree = cas_.GetTree( - instance_name_, *request_remote_tree, kMaxBatchTransferSize); + auto full_tree = + cas_.GetTree(instance_name_, + ArtifactDigestFactory::ToBazel(*request_remote_tree), + kMaxBatchTransferSize); auxiliary_map_ = MakeAuxiliaryMap(std::move(full_tree)); } } @@ -67,14 +72,14 @@ auto BazelNetworkReader::ReadDirectory(ArtifactDigest const& digest) auto BazelNetworkReader::ReadGitTree(ArtifactDigest const& digest) const noexcept -> std::optional { - ExpectsAudit(hash_function_.GetType() == HashFunction::Type::GitSHA1); + ExpectsAudit(IsNativeProtocol()); auto read_blob = ReadSingleBlob(digest); if (not read_blob) { Logger::Log(LogLevel::Debug, "Tree {} not found in CAS", digest.hash()); return std::nullopt; } - auto check_symlinks = [this](std::vector const& ids) { + auto check_symlinks = [this](std::vector const& ids) { size_t const size = ids.size(); size_t count = 0; for (auto blobs : ReadIncrementally(ids)) { @@ -122,7 +127,8 @@ auto BazelNetworkReader::DumpRawTree(Artifact::ObjectInfo const& info, auto BazelNetworkReader::DumpBlob(Artifact::ObjectInfo const& info, DumpCallback const& dumper) const noexcept -> bool { - auto reader = cas_.IncrementalReadSingleBlob(instance_name_, info.digest); + auto reader = cas_.IncrementalReadSingleBlob( + instance_name_, ArtifactDigestFactory::ToBazel(info.digest)); auto data = reader.Next(); while (data and not data->empty()) { try { @@ -137,16 +143,20 @@ auto BazelNetworkReader::DumpBlob(Artifact::ObjectInfo const& info, return data.has_value(); } +auto BazelNetworkReader::IsNativeProtocol() const noexcept -> bool { + return ProtocolTraits::IsNative(hash_function_.GetType()); +} + auto BazelNetworkReader::MakeAuxiliaryMap( std::vector&& full_tree) const noexcept -> std::optional { - ExpectsAudit(hash_function_.GetType() == HashFunction::Type::PlainSHA256); + ExpectsAudit(not IsNativeProtocol()); DirectoryMap result; result.reserve(full_tree.size()); for (auto& dir : full_tree) { try { - result.emplace(ArtifactDigest::Create( + result.emplace(ArtifactDigestFactory::HashDataAs( hash_function_, dir.SerializeAsString()), std::move(dir)); } catch (...) { @@ -159,16 +169,22 @@ auto BazelNetworkReader::MakeAuxiliaryMap( auto BazelNetworkReader::ReadSingleBlob(bazel_re::Digest const& digest) const noexcept -> std::optional { auto blob = cas_.ReadSingleBlob(instance_name_, digest); - if (blob and Validate(*blob)) { - return ArtifactBlob{ - ArtifactDigest{blob->digest}, blob->data, blob->is_exec}; + if (not blob) { + return std::nullopt; } - return std::nullopt; + auto hash_info = Validate(*blob); + if (not hash_info) { + return std::nullopt; + } + return ArtifactBlob{ + ArtifactDigest{*std::move(hash_info), blob->data->size()}, + blob->data, + blob->is_exec}; } auto BazelNetworkReader::ReadSingleBlob(ArtifactDigest const& digest) const noexcept -> std::optional { - return ReadSingleBlob(static_cast(digest)); + return ReadSingleBlob(ArtifactDigestFactory::ToBazel(digest)); } auto BazelNetworkReader::ReadIncrementally( @@ -180,7 +196,7 @@ auto BazelNetworkReader::ReadIncrementally( digests.end(), std::back_inserter(bazel_digests), [](ArtifactDigest const& d) { - return static_cast(d); + return ArtifactDigestFactory::ToBazel(d); }); return ReadIncrementally(std::move(bazel_digests)); } @@ -193,44 +209,47 @@ auto BazelNetworkReader::ReadIncrementally( auto BazelNetworkReader::BatchReadBlobs( std::vector const& blobs) const noexcept -> std::vector { - std::vector result = + std::vector const result = cas_.BatchReadBlobs(instance_name_, blobs.begin(), blobs.end()); - auto it = std::remove_if( - result.begin(), result.end(), [this](BazelBlob const& blob) { - return not Validate(blob); - }); - result.erase(it, result.end()); - std::vector artifacts; artifacts.reserve(result.size()); - std::transform(result.begin(), - result.end(), - std::back_inserter(artifacts), - [](BazelBlob const& blob) { - return ArtifactBlob{ArtifactDigest{blob.digest}, - blob.data, - blob.is_exec}; - }); + for (auto const& blob : result) { + if (auto hash_info = Validate(blob)) { + artifacts.emplace_back( + ArtifactDigest{*std::move(hash_info), blob.data->size()}, + blob.data, + blob.is_exec); + } + } return artifacts; } auto BazelNetworkReader::Validate(BazelBlob const& blob) const noexcept - -> bool { - ArtifactDigest const rehashed_digest = - NativeSupport::IsTree(blob.digest.hash()) - ? ArtifactDigest::Create(hash_function_, - *blob.data) - : ArtifactDigest::Create(hash_function_, - *blob.data); - if (rehashed_digest == ArtifactDigest{blob.digest}) { - return true; + -> std::optional { + // validate digest + auto requested_hash_info = + BazelDigestFactory::ToHashInfo(hash_function_.GetType(), blob.digest); + if (not requested_hash_info) { + Logger::Log(LogLevel::Warning, + "BazelNetworkReader: {}", + std::move(requested_hash_info).error()); + return std::nullopt; + } + + // rehash data + auto rehashed_info = HashInfo::HashData( + hash_function_, *blob.data, requested_hash_info->IsTree()); + + // ensure rehashed data produce the same hash + if (*requested_hash_info != rehashed_info) { + Logger::Log(LogLevel::Warning, + "Requested {}, but received {}", + requested_hash_info->Hash(), + rehashed_info.Hash()); + return std::nullopt; } - Logger::Log(LogLevel::Warning, - "Requested {}, but received {}", - ArtifactDigest{blob.digest}.hash(), - rehashed_digest.hash()); - return false; + return rehashed_info; } namespace { @@ -259,7 +278,7 @@ namespace { } } // namespace -BazelNetworkReader::IncrementalReader::iterator::iterator( +BazelNetworkReader::IncrementalReader::Iterator::Iterator( BazelNetworkReader const& owner, std::vector::const_iterator begin, std::vector::const_iterator end) noexcept @@ -267,7 +286,7 @@ BazelNetworkReader::IncrementalReader::iterator::iterator( current_ = FindCurrentIterator(begin_, end_); } -auto BazelNetworkReader::IncrementalReader::iterator::operator*() const noexcept +auto BazelNetworkReader::IncrementalReader::Iterator::operator*() const noexcept -> value_type { if (begin_ != current_) { if (std::distance(begin_, current_) > 1) { @@ -281,8 +300,8 @@ auto BazelNetworkReader::IncrementalReader::iterator::operator*() const noexcept return {}; } -auto BazelNetworkReader::IncrementalReader::iterator::operator++() noexcept - -> iterator& { +auto BazelNetworkReader::IncrementalReader::Iterator::operator++() noexcept + -> Iterator& { begin_ = current_; current_ = FindCurrentIterator(begin_, end_); return *this; diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network_reader.hpp b/src/buildtool/execution_api/remote/bazel/bazel_network_reader.hpp index f009d968f..95f641263 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_network_reader.hpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_network_reader.hpp @@ -29,6 +29,7 @@ #include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/execution_api/common/artifact_blob_container.hpp" #include "src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp" @@ -40,9 +41,10 @@ class BazelNetworkReader final { public: using DumpCallback = std::function; - explicit BazelNetworkReader(std::string instance_name, - gsl::not_null const& cas, - HashFunction hash_function) noexcept; + explicit BazelNetworkReader( + std::string instance_name, + gsl::not_null const& cas, + gsl::not_null const& hash_function) noexcept; BazelNetworkReader( BazelNetworkReader&& other, @@ -62,6 +64,8 @@ class BazelNetworkReader final { DumpCallback const& dumper) const noexcept -> bool; + [[nodiscard]] auto IsNativeProtocol() const noexcept -> bool; + [[nodiscard]] auto ReadSingleBlob(bazel_re::Digest const& digest) const noexcept -> std::optional; @@ -81,7 +85,7 @@ class BazelNetworkReader final { std::string const instance_name_; BazelCasClient const& cas_; - HashFunction const hash_function_; + HashFunction const& hash_function_; std::optional auxiliary_map_; [[nodiscard]] auto MakeAuxiliaryMap( @@ -92,7 +96,8 @@ class BazelNetworkReader final { std::vector const& blobs) const noexcept -> std::vector; - [[nodiscard]] auto Validate(BazelBlob const& blob) const noexcept -> bool; + [[nodiscard]] auto Validate(BazelBlob const& blob) const noexcept + -> std::optional; }; class BazelNetworkReader::IncrementalReader final { @@ -101,7 +106,7 @@ class BazelNetworkReader::IncrementalReader final { std::vector digests) noexcept : owner_(owner), digests_(std::move(digests)) {} - class iterator final { + class Iterator final { public: using value_type = std::vector; using pointer = value_type*; @@ -109,22 +114,22 @@ class BazelNetworkReader::IncrementalReader final { using difference_type = std::ptrdiff_t; using iterator_category = std::forward_iterator_tag; - iterator(BazelNetworkReader const& owner, + Iterator(BazelNetworkReader const& owner, std::vector::const_iterator begin, std::vector::const_iterator end) noexcept; auto operator*() const noexcept -> value_type; - auto operator++() noexcept -> iterator&; + auto operator++() noexcept -> Iterator&; - [[nodiscard]] friend auto operator==(iterator const& lhs, - iterator const& rhs) noexcept + [[nodiscard]] friend auto operator==(Iterator const& lhs, + Iterator const& rhs) noexcept -> bool { return lhs.begin_ == rhs.begin_ and lhs.end_ == rhs.end_ and lhs.current_ == rhs.current_; } - [[nodiscard]] friend auto operator!=(iterator const& lhs, - iterator const& rhs) noexcept + [[nodiscard]] friend auto operator!=(Iterator const& lhs, + Iterator const& rhs) noexcept -> bool { return not(lhs == rhs); } @@ -137,11 +142,11 @@ class BazelNetworkReader::IncrementalReader final { }; [[nodiscard]] auto begin() const noexcept { - return iterator{owner_, digests_.begin(), digests_.end()}; + return Iterator{owner_, digests_.begin(), digests_.end()}; } [[nodiscard]] auto end() const noexcept { - return iterator{owner_, digests_.end(), digests_.end()}; + return Iterator{owner_, digests_.end(), digests_.end()}; } private: diff --git a/src/buildtool/execution_api/remote/bazel/bazel_response.cpp b/src/buildtool/execution_api/remote/bazel/bazel_response.cpp index f56f99928..638747cb9 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_response.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_response.cpp @@ -15,9 +15,13 @@ #include "src/buildtool/execution_api/remote/bazel/bazel_response.hpp" #include +#include -#include "gsl/gsl" -#include "src/buildtool/compatibility/native_support.hpp" +#include "fmt/core.h" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/execution_api/common/common_api.hpp" @@ -30,10 +34,20 @@ namespace { auto ProcessDirectoryMessage(HashFunction hash_function, bazel_re::Directory const& dir) noexcept - -> std::optional { + -> expected { + // in compatible mode: check validity of all symlinks + for (auto const& link : dir.symlinks()) { + if (not PathIsNonUpwards(link.target())) { + return unexpected{ + fmt::format("found invalid symlink at {}", link.name())}; + } + } auto data = dir.SerializeAsString(); - auto digest = ArtifactDigest::Create(hash_function, data); - return BazelBlob{std::move(digest), std::move(data), /*is_exec=*/false}; + auto digest = + BazelDigestFactory::HashDataAs(hash_function, data); + return BazelBlob{std::move(digest), + std::move(data), + /*is_exec=*/false}; } } // namespace @@ -50,20 +64,28 @@ auto BazelResponse::ReadStringBlob(bazel_re::Digest const& id) noexcept return std::string{}; } -auto BazelResponse::Artifacts() noexcept -> ArtifactInfos const& { - Populate(); - return artifacts_; +auto BazelResponse::Artifacts() noexcept + -> expected, std::string> { + if (auto error_msg = Populate()) { + return unexpected{*std::move(error_msg)}; + } + return gsl::not_null( + &artifacts_); // explicit type needed for expected } -auto BazelResponse::DirectorySymlinks() noexcept -> DirSymlinks const& { - Populate(); - return dir_symlinks_; +auto BazelResponse::DirectorySymlinks() noexcept + -> expected, std::string> { + if (auto error_msg = Populate()) { + return unexpected{*std::move(error_msg)}; + } + return gsl::not_null( + &dir_symlinks_); // explicit type needed for expected } -void BazelResponse::Populate() noexcept { +auto BazelResponse::Populate() noexcept -> std::optional { // Initialized only once lazily if (populated_) { - return; + return std::nullopt; } populated_ = true; @@ -80,64 +102,110 @@ void BazelResponse::Populate() noexcept { dir_symlinks.reserve(static_cast( action_result.output_directory_symlinks_size())); + auto const hash_type = network_->GetHashFunction().GetType(); // collect files and store them for (auto const& file : action_result.output_files()) { + auto digest = + ArtifactDigestFactory::FromBazel(hash_type, file.digest()); + if (not digest) { + return fmt::format( + "BazelResponse: failed to create artifact digest for {}", + file.path()); + } try { artifacts.emplace( file.path(), - Artifact::ObjectInfo{.digest = ArtifactDigest{file.digest()}, + Artifact::ObjectInfo{.digest = *std::move(digest), .type = file.is_executable() ? ObjectType::Executable : ObjectType::File}); - } catch (...) { - return; + } catch (std::exception const& ex) { + return fmt::format( + "BazelResponse: unexpected failure gathering digest for " + "{}:\n{}", + file.path(), + ex.what()); } } // collect all symlinks and store them for (auto const& link : action_result.output_file_symlinks()) { try { + // in compatible mode: check symlink validity + if (not ProtocolTraits::IsNative( + network_->GetHashFunction().GetType()) and + not PathIsNonUpwards(link.target())) { + return fmt::format("BazelResponse: found invalid symlink at {}", + link.path()); + } artifacts.emplace( link.path(), Artifact::ObjectInfo{ - .digest = ArtifactDigest::Create( - network_->GetHashFunction(), link.target()), + .digest = + ArtifactDigestFactory::HashDataAs( + network_->GetHashFunction(), link.target()), .type = ObjectType::Symlink}); - } catch (...) { - return; + } catch (std::exception const& ex) { + return fmt::format( + "BazelResponse: unexpected failure gathering digest for " + "{}:\n{}", + link.path(), + ex.what()); } } for (auto const& link : action_result.output_directory_symlinks()) { try { + // in compatible mode: check symlink validity + if (not ProtocolTraits::IsNative( + network_->GetHashFunction().GetType()) and + not PathIsNonUpwards(link.target())) { + return fmt::format("BazelResponse: found invalid symlink at {}", + link.path()); + } artifacts.emplace( link.path(), Artifact::ObjectInfo{ - .digest = ArtifactDigest::Create( - network_->GetHashFunction(), link.target()), + .digest = + ArtifactDigestFactory::HashDataAs( + network_->GetHashFunction(), link.target()), .type = ObjectType::Symlink}); dir_symlinks.emplace(link.path()); // add it to set - } catch (...) { - return; + } catch (std::exception const& ex) { + return fmt::format( + "BazelResponse: unexpected failure gathering digest for " + "{}:\n{}", + link.path(), + ex.what()); } } - if (not Compatibility::IsCompatible()) { + if (ProtocolTraits::IsNative(hash_type)) { // in native mode: just collect and store tree digests for (auto const& tree : action_result.output_directories()) { - ExpectsAudit(NativeSupport::IsTree(tree.tree_digest().hash())); + auto digest = + ArtifactDigestFactory::FromBazel(hash_type, tree.tree_digest()); + if (not digest) { + return fmt::format( + "BazelResponse: failed to create artifact digest for {}", + tree.path()); + } + ExpectsAudit(digest->IsTree()); try { artifacts.emplace( tree.path(), - Artifact::ObjectInfo{ - .digest = ArtifactDigest{tree.tree_digest()}, - .type = ObjectType::Tree}); - } catch (...) { - return; + Artifact::ObjectInfo{.digest = *std::move(digest), + .type = ObjectType::Tree}); + } catch (std::exception const& ex) { + return fmt::format( + "BazelResponse: unexpected failure gathering digest for " + "{}:\n{}", + tree.path(), + ex.what()); } } artifacts_ = std::move(artifacts); dir_symlinks_ = std::move(dir_symlinks); - return; + return std::nullopt; } // obtain tree digests for output directories @@ -158,7 +226,9 @@ void BazelResponse::Populate() noexcept { auto tree = BazelMsgFactory::MessageFromString( *tree_blob.data); if (not tree) { - return; + return fmt::format( + "BazelResponse: failed to create Tree for {}", + tree_blob.digest.hash()); } // The server does not store the Directory messages it just @@ -167,70 +237,79 @@ void BazelResponse::Populate() noexcept { // have to upload them manually. auto root_digest = UploadTreeMessageDirectories(*tree); if (not root_digest) { - return; + auto error = + fmt::format("BazelResponse: {}", root_digest.error()); + Logger::Log(LogLevel::Trace, error); + return error; } artifacts.emplace( action_result.output_directories(pos).path(), Artifact::ObjectInfo{.digest = *root_digest, .type = ObjectType::Tree}); - } catch (...) { - return; + } catch (std::exception const& ex) { + return fmt::format( + "BazelResponse: unexpected failure gathering digest for " + "{}:\n{}", + tree_blob.digest.hash(), + ex.what()); } ++pos; } } artifacts_ = std::move(artifacts); dir_symlinks_ = std::move(dir_symlinks); + return std::nullopt; } auto BazelResponse::UploadTreeMessageDirectories( - bazel_re::Tree const& tree) const -> std::optional { + bazel_re::Tree const& tree) const -> expected { + auto const upload_callback = + [&network = *network_](BazelBlobContainer&& blobs) -> bool { + return network.UploadBlobs(std::move(blobs)); + }; + auto const hash_function = network_->GetHashFunction(); BazelBlobContainer dir_blobs{}; - auto rootdir_blob = - ProcessDirectoryMessage(network_->GetHashFunction(), tree.root()); + auto rootdir_blob = ProcessDirectoryMessage(hash_function, tree.root()); if (not rootdir_blob) { - return std::nullopt; + return unexpected{std::move(rootdir_blob).error()}; } - auto root_digest = rootdir_blob->digest; + auto const root_digest = rootdir_blob->digest; // store or upload rootdir blob, taking maximum transfer size into account if (not UpdateContainerAndUpload( &dir_blobs, - std::move(*rootdir_blob), + *std::move(rootdir_blob), /*exception_is_fatal=*/false, - [&network = network_](BazelBlobContainer&& blobs) { - return network->UploadBlobs(std::move(blobs)); - })) { - Logger::Log(LogLevel::Error, - "uploading Tree's Directory messages failed"); - return std::nullopt; + upload_callback)) { + return unexpected{fmt::format( + "failed to upload Tree with root digest {}", root_digest.hash())}; } for (auto const& subdir : tree.children()) { - auto subdir_blob = - ProcessDirectoryMessage(network_->GetHashFunction(), subdir); - if (not subdir_blob) { - return std::nullopt; - } // store or upload blob, taking maximum transfer size into account + auto blob = ProcessDirectoryMessage(hash_function, subdir); + if (not blob) { + return unexpected{std::move(blob).error()}; + } + auto const blob_digest = blob->digest; if (not UpdateContainerAndUpload( &dir_blobs, - std::move(*subdir_blob), + *std::move(blob), /*exception_is_fatal=*/false, - [&network = network_](BazelBlobContainer&& blobs) { - return network->UploadBlobs(std::move(blobs)); - })) { - Logger::Log(LogLevel::Error, - "uploading Tree's Directory messages failed"); - return std::nullopt; + upload_callback)) { + return unexpected{ + fmt::format("failed to upload Tree subdir with digest {}", + blob_digest.hash())}; } } // upload any remaining blob - if (not network_->UploadBlobs(std::move(dir_blobs))) { - Logger::Log(LogLevel::Error, - "uploading Tree's Directory messages failed"); - return std::nullopt; + if (not std::invoke(upload_callback, std::move(dir_blobs))) { + return unexpected{ + fmt::format("failed to upload blobs for Tree with root digest {}", + root_digest.hash())}; } - return ArtifactDigest{root_digest}; + return ArtifactDigestFactory::FromBazel(hash_function.GetType(), + root_digest) + .value(); // must succeed all the time } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_response.hpp b/src/buildtool/execution_api/remote/bazel/bazel_response.hpp index 6e37d3443..86e7491c4 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_response.hpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_response.hpp @@ -20,9 +20,11 @@ #include // std::move #include +#include "gsl/gsl" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp" #include "src/buildtool/execution_api/remote/bazel/bazel_network.hpp" +#include "src/utils/cpp/expected.hpp" class BazelAction; @@ -58,12 +60,14 @@ class BazelResponse final : public IExecutionResponse { return action_id_; } - auto Artifacts() noexcept -> ArtifactInfos const& final; - auto DirectorySymlinks() noexcept -> DirSymlinks const& final; + auto Artifacts() noexcept + -> expected, std::string> final; + auto DirectorySymlinks() noexcept + -> expected, std::string> final; private: - std::string action_id_{}; - std::shared_ptr const network_{}; + std::string action_id_; + std::shared_ptr const network_; BazelExecutionClient::ExecutionOutput output_{}; ArtifactInfos artifacts_; DirSymlinks dir_symlinks_; @@ -84,10 +88,12 @@ class BazelResponse final : public IExecutionResponse { return id.size_bytes() != 0; } - void Populate() noexcept; + /// \brief Populates the stored data, once. + /// \returns Error message on failure, nullopt on success. + [[nodiscard]] auto Populate() noexcept -> std::optional; - [[nodiscard]] auto UploadTreeMessageDirectories( - bazel_re::Tree const& tree) const -> std::optional; + [[nodiscard]] auto UploadTreeMessageDirectories(bazel_re::Tree const& tree) + const -> expected; }; #endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_RESPONSE_HPP diff --git a/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp b/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp index 88abe6fd3..dca8d59a9 100644 --- a/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp +++ b/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp @@ -17,19 +17,17 @@ #include #include -#include #include #include #include #include // std::move -#include #include "google/bytestream/bytestream.grpc.pb.h" #include "gsl/gsl" #include "src/buildtool/auth/authentication.hpp" #include "src/buildtool/common/remote/client_common.hpp" #include "src/buildtool/common/remote/port.hpp" -#include "src/buildtool/execution_api/common/bytestream_common.hpp" +#include "src/buildtool/execution_api/common/bytestream_utils.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" @@ -72,11 +70,11 @@ class ByteStreamClient { IncrementalReader( gsl::not_null const& stub, - Logger const* logger, - std::string const& resource_name) + ByteStreamUtils::ReadRequest&& read_request, + Logger const* logger) : logger_{logger} { google::bytestream::ReadRequest request{}; - request.set_resource_name(resource_name); + request.set_resource_name(std::move(read_request).ToString()); reader_ = stub->Read(&ctx_, request); } }; @@ -88,14 +86,14 @@ class ByteStreamClient { CreateChannelWithCredentials(server, port, auth)); } - [[nodiscard]] auto IncrementalRead( - std::string const& resource_name) const noexcept -> IncrementalReader { - return IncrementalReader{stub_.get(), &logger_, resource_name}; + [[nodiscard]] auto IncrementalRead(ByteStreamUtils::ReadRequest&& request) + const noexcept -> IncrementalReader { + return IncrementalReader{stub_.get(), std::move(request), &logger_}; } - [[nodiscard]] auto Read(std::string const& resource_name) const noexcept - -> std::optional { - auto reader = IncrementalRead(resource_name); + [[nodiscard]] auto Read(ByteStreamUtils::ReadRequest&& request) + const noexcept -> std::optional { + auto reader = IncrementalRead(std::move(request)); std::string output{}; auto data = reader.Next(); while (data and not data->empty()) { @@ -108,22 +106,21 @@ class ByteStreamClient { return output; } - [[nodiscard]] auto Write(std::string const& resource_name, + [[nodiscard]] auto Write(ByteStreamUtils::WriteRequest&& write_request, std::string const& data) const noexcept -> bool { try { grpc::ClientContext ctx; google::bytestream::WriteResponse response{}; auto writer = stub_->Write(&ctx, &response); - auto* allocated_data = - std::make_unique(kChunkSize, '\0').release(); google::bytestream::WriteRequest request{}; - request.set_resource_name(resource_name); - request.set_allocated_data(allocated_data); + request.set_resource_name(std::move(write_request).ToString()); + request.mutable_data()->resize(ByteStreamUtils::kChunkSize, '\0'); - std::size_t pos{}; - do { - auto const size = std::min(data.size() - pos, kChunkSize); + std::size_t pos = 0; + do { // NOLINT(cppcoreguidelines-avoid-do-while) + auto const size = + std::min(data.size() - pos, ByteStreamUtils::kChunkSize); request.mutable_data()->resize(size); data.copy(request.mutable_data()->data(), size, pos); request.set_write_offset(static_cast(pos)); @@ -134,24 +131,25 @@ class ByteStreamClient { // the `Write()`, the client should check the status of the // `Write()` by calling `QueryWriteStatus()` and continue // writing from the returned `committed_size`. - auto const committed_size = QueryWriteStatus(resource_name); + auto const committed_size = + QueryWriteStatus(request.resource_name()); if (committed_size <= 0) { logger_.Emit( LogLevel::Warning, "broken stream for upload to resource name {}", - resource_name); + request.resource_name()); return false; } pos = gsl::narrow(committed_size); } else { - pos += kChunkSize; + pos += ByteStreamUtils::kChunkSize; } } while (pos < data.size()); if (not writer->WritesDone()) { logger_.Emit(LogLevel::Warning, "broken stream for upload to resource name {}", - resource_name); + request.resource_name()); return false; } @@ -176,32 +174,6 @@ class ByteStreamClient { } } - template - void ReadMany( - std::vector const& inputs, - std::function const& to_resource_name, - std::function const& parse_data) const noexcept { - for (auto const& i : inputs) { - auto data = Read(to_resource_name(i)); - if (data) { - parse_data(std::move(*data)); - } - } - } - - template - [[nodiscard]] auto WriteMany( - std::vector const& inputs, - std::function const& to_resource_name, - std::function const& to_data) - const noexcept -> bool { - return std::all_of(inputs.begin(), - inputs.end(), - [this, &to_resource_name, &to_data](auto const& i) { - return Write(to_resource_name(i), to_data(i)); - }); - } - private: std::unique_ptr stub_; Logger logger_{"ByteStreamClient"}; diff --git a/src/buildtool/execution_api/remote/config.cpp b/src/buildtool/execution_api/remote/config.cpp index 7f065296d..7a73eaa10 100644 --- a/src/buildtool/execution_api/remote/config.cpp +++ b/src/buildtool/execution_api/remote/config.cpp @@ -27,12 +27,13 @@ auto RemoteExecutionConfig::Builder::Build() const noexcept -> expected { // To not duplicate default arguments in builder, create a default config // and copy arguments from there. - RemoteExecutionConfig const default_config; + RemoteExecutionConfig const default_config{}; // Set remote endpoint. auto remote_address = default_config.remote_address; if (remote_address_raw_.has_value()) { - if (not(remote_address = ParseAddress(*remote_address_raw_))) { + remote_address = ParseAddress(*remote_address_raw_); + if (not remote_address) { return unexpected{ fmt::format("Failed to set remote endpoint address {}", nlohmann::json(*remote_address_raw_).dump())}; diff --git a/src/buildtool/execution_api/remote/config.hpp b/src/buildtool/execution_api/remote/config.hpp index 45420c80a..36e83fdd4 100644 --- a/src/buildtool/execution_api/remote/config.hpp +++ b/src/buildtool/execution_api/remote/config.hpp @@ -29,16 +29,16 @@ struct RemoteExecutionConfig final { class Builder; // Server address of remote execution. - std::optional const remote_address = {}; + std::optional const remote_address; // Server dispatch data - std::vector const dispatch = {}; + std::vector const dispatch; // Server address of cache endpoint for rebuild. - std::optional const cache_address = {}; + std::optional const cache_address; // Platform properties for execution. - ExecutionProperties const platform_properties = {}; + ExecutionProperties const platform_properties; }; class RemoteExecutionConfig::Builder final { @@ -80,16 +80,16 @@ class RemoteExecutionConfig::Builder final { private: // Server address of remote execution; needs parsing. - std::optional remote_address_raw_{}; + std::optional remote_address_raw_; // Server dispatch data file; needs parsing. - std::optional dispatch_file_{}; + std::optional dispatch_file_; // Server address of cache endpoint for rebuild; needs parsing. - std::optional cache_address_raw_{}; + std::optional cache_address_raw_; // Platform properties for execution; needs parsing. - std::vector platform_properties_raw_{}; + std::vector platform_properties_raw_; }; #endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_CONFIG_HPP diff --git a/src/buildtool/execution_api/serve/TARGETS b/src/buildtool/execution_api/serve/TARGETS new file mode 100644 index 000000000..2d45ed14d --- /dev/null +++ b/src/buildtool/execution_api/serve/TARGETS @@ -0,0 +1,71 @@ +{ "utils": + { "type": ["@", "rules", "CC", "library"] + , "name": ["utils"] + , "hdrs": ["utils.hpp"] + , "srcs": ["utils.cpp"] + , "deps": + [ ["src/buildtool/common", "common"] + , ["src/buildtool/file_system", "object_type"] + , ["src/buildtool/storage", "config"] + , ["src/utils/cpp", "expected"] + ] + , "stage": ["src", "buildtool", "execution_api", "serve"] + , "private-deps": + [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/storage", "fs_utils"] + ] + } +, "mr_git_api": + { "type": ["@", "rules", "CC", "library"] + , "name": ["mr_git_api"] + , "hdrs": ["mr_git_api.hpp"] + , "srcs": ["mr_git_api.cpp"] + , "deps": + [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "config"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/execution_engine/dag", "dag"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] + ] + , "stage": ["src", "buildtool", "execution_api", "serve"] + , "private-deps": + [ "utils" + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] + , ["src/buildtool/execution_api/git", "git"] + , ["src/buildtool/file_system", "object_type"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "expected"] + ] + } +, "mr_local_api": + { "type": ["@", "rules", "CC", "library"] + , "name": ["mr_local_api"] + , "hdrs": ["mr_local_api.hpp"] + , "srcs": ["mr_local_api.cpp"] + , "deps": + [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/execution_api/local", "context"] + , ["src/buildtool/execution_engine/dag", "dag"] + ] + , "stage": ["src", "buildtool", "execution_api", "serve"] + , "private-deps": + [ "utils" + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] + , ["src/buildtool/file_system", "object_type"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "expected"] + ] + } +} diff --git a/src/buildtool/execution_api/serve/mr_git_api.cpp b/src/buildtool/execution_api/serve/mr_git_api.cpp new file mode 100644 index 000000000..1e433fff7 --- /dev/null +++ b/src/buildtool/execution_api/serve/mr_git_api.cpp @@ -0,0 +1,178 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/execution_api/serve/mr_git_api.hpp" + +#include + +#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" +#include "src/buildtool/execution_api/git/git_api.hpp" +#include "src/buildtool/execution_api/serve/utils.hpp" +#include "src/buildtool/file_system/object_type.hpp" +#include "src/buildtool/logging/log_level.hpp" +#include "src/buildtool/logging/logger.hpp" +#include "src/utils/cpp/expected.hpp" + +MRGitApi::MRGitApi( + gsl::not_null const& repo_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compatible_storage_config, + Storage const* compatible_storage, + IExecutionApi const* compatible_local_api) noexcept + : repo_config_{repo_config}, + native_storage_config_{native_storage_config}, + compat_storage_config_{compatible_storage_config}, + compat_storage_{compatible_storage}, + compat_local_api_{compatible_local_api} {} + +auto MRGitApi::RetrieveToCas( + std::vector const& artifacts_info, + IExecutionApi const& api) const noexcept -> bool { + // Return immediately if target CAS is this CAS + if (this == &api) { + return true; + } + + // in native mode: dispatch to regular GitApi + if (compat_storage_config_ == nullptr) { + GitApi const git_api{repo_config_}; + return git_api.RetrieveToCas(artifacts_info, api); + } + + // in compatible mode: set up needed callbacks for caching digest mappings + auto read_rehashed = + [native_sc = native_storage_config_, + compat_sc = compat_storage_config_](ArtifactDigest const& digest) + -> expected, std::string> { + return MRApiUtils::ReadRehashedDigest( + digest, *native_sc, *compat_sc, /*from_git=*/true); + }; + auto store_rehashed = + [native_sc = native_storage_config_, + compat_sc = compat_storage_config_]( + ArtifactDigest const& source_digest, + ArtifactDigest const& target_digest, + ObjectType obj_type) -> std::optional { + return MRApiUtils::StoreRehashedDigest(source_digest, + target_digest, + obj_type, + *native_sc, + *compat_sc, + /*from_git=*/true); + }; + + // collect the native blobs and rehash them as compatible to be able to + // check what is missing in the other api + std::vector compat_artifacts; + compat_artifacts.reserve(artifacts_info.size()); + for (auto const& native_obj : artifacts_info) { + // check if we know already the compatible digest + auto cached_obj = read_rehashed(native_obj.digest); + if (not cached_obj) { + Logger::Log( + LogLevel::Error, "MRGitApi: {}", std::move(cached_obj).error()); + return false; + } + if (*cached_obj) { + // add object to the vector of compatible artifacts + compat_artifacts.emplace_back(std::move(cached_obj)->value()); + } + else { + // process object; trees need to be handled appropriately + if (IsTreeObject(native_obj.type)) { + // set up all the callbacks needed + auto read_git = [repo_config = repo_config_]( + ArtifactDigest const& digest, + ObjectType /*type*/) + -> std::optional< + std::variant> { + return repo_config->ReadBlobFromGitCAS(digest.hash()); + }; + auto store_file = + [cas = &compat_storage_->CAS()]( + std::variant const& + data, + bool is_exec) -> std::optional { + if (not std::holds_alternative(data)) { + return std::nullopt; + } + return cas->StoreBlob(std::get(data), is_exec); + }; + BazelMsgFactory::TreeStoreFunc store_dir = + [cas = &compat_storage_->CAS()](std::string const& content) + -> std::optional { + return cas->StoreTree(content); + }; + BazelMsgFactory::SymlinkStoreFunc store_symlink = + [cas = &compat_storage_->CAS()](std::string const& content) + -> std::optional { + return cas->StoreBlob(content); + }; + // get the directory digest + auto tree_digest = + BazelMsgFactory::CreateDirectoryDigestFromGitTree( + native_obj.digest, + read_git, + store_file, + store_dir, + store_symlink, + read_rehashed, + store_rehashed); + if (not tree_digest) { + Logger::Log(LogLevel::Error, + "MRGitApi: {}", + std::move(tree_digest).error()); + return false; + } + // add object to the vector of compatible artifacts + compat_artifacts.emplace_back( + Artifact::ObjectInfo{.digest = *std::move(tree_digest), + .type = ObjectType::Tree}); + } + else { + // blobs are read from repo and added to compatible CAS + auto const blob_content = + repo_config_->ReadBlobFromGitCAS(native_obj.digest.hash()); + if (not blob_content) { + Logger::Log(LogLevel::Error, + "MRGitApi: failed reading Git entry {}", + native_obj.digest.hash()); + return false; + } + auto blob_digest = compat_storage_->CAS().StoreBlob( + *blob_content, IsExecutableObject(native_obj.type)); + if (not blob_digest) { + Logger::Log(LogLevel::Error, + "MRGitApi: failed to rehash Git entry {}", + native_obj.digest.hash()); + return false; + } + // cache the digest association + if (auto error_msg = store_rehashed( + native_obj.digest, *blob_digest, native_obj.type)) { + Logger::Log( + LogLevel::Error, "MRGitApi: {}", *std::move(error_msg)); + return false; + } + // add object to the vector of compatible artifacts + compat_artifacts.emplace_back( + Artifact::ObjectInfo{.digest = *std::move(blob_digest), + .type = native_obj.type}); + } + } + } + // now that we have gathered all the compatible object infos, simply pass + // them to a local api that can interact with the remote + return compat_local_api_->RetrieveToCas(compat_artifacts, api); +} diff --git a/src/buildtool/execution_api/serve/mr_git_api.hpp b/src/buildtool/execution_api/serve/mr_git_api.hpp new file mode 100644 index 000000000..74864218b --- /dev/null +++ b/src/buildtool/execution_api/serve/mr_git_api.hpp @@ -0,0 +1,143 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_MR_GIT_API_HPP +#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_MR_GIT_API_HPP + +#include +#include +#include +#include +#include + +#include "gsl/gsl" +#include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/repository_config.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/execution_api/common/artifact_blob_container.hpp" +#include "src/buildtool/execution_api/common/execution_api.hpp" +#include "src/buildtool/execution_engine/dag/dag.hpp" +#include "src/buildtool/storage/config.hpp" +#include "src/buildtool/storage/storage.hpp" + +/// \brief Multi-repo-specific implementation of the abstract Execution API. +/// Handles interaction between the Git CAS and another api, irrespective of the +/// remote-execution protocol used. This instance cannot create actions or store +/// anything to the Git CAS, but has access to local storages. +class MRGitApi final : public IExecutionApi { + public: + MRGitApi(gsl::not_null const& repo_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compatible_storage_config = nullptr, + Storage const* compatible_storage = nullptr, + IExecutionApi const* compatible_local_api = nullptr) noexcept; + + /// \brief Not supported. + [[nodiscard]] auto CreateAction( + ArtifactDigest const& /*root_digest*/, + std::vector const& /*command*/, + std::string const& /*cwd*/, + std::vector const& /*output_files*/, + std::vector const& /*output_dirs*/, + std::map const& /*env_vars*/, + std::map const& /*properties*/) const noexcept + -> IExecutionAction::Ptr final { + // Execution not supported. + return nullptr; + } + + /// \brief Not supported. + // NOLINTNEXTLINE(google-default-arguments) + [[nodiscard]] auto RetrieveToPaths( + std::vector const& /*artifacts_info*/, + std::vector const& /*output_paths*/, + IExecutionApi const* /*alternative*/ = nullptr) const noexcept + -> bool final { + // Retrieval to paths not suported. + return false; + } + + /// \brief Not supported. + [[nodiscard]] auto RetrieveToFds( + std::vector const& /*artifacts_info*/, + std::vector const& /*fds*/, + bool /*raw_tree*/) const noexcept -> bool final { + // Retrieval to file descriptors not supported. + return false; + } + + /// \brief Passes artifacts from Git CAS to specified (remote) api. In + /// compatible mode, it must rehash the native digests to be able to upload + /// to a compatible remote. Expects native digests. + /// \note Caller is responsible for passing vectors with artifacts of the + /// same digest type. + [[nodiscard]] auto RetrieveToCas( + std::vector const& artifacts_info, + IExecutionApi const& api) const noexcept -> bool final; + + /// \brief Not supported. + [[nodiscard]] auto RetrieveToMemory( + Artifact::ObjectInfo const& /*artifact_info*/) const noexcept + -> std::optional final { + // Retrieval to memory not supported. + return std::nullopt; + } + + /// \brief Not supported. + // NOLINTNEXTLINE(google-default-arguments) + [[nodiscard]] auto Upload(ArtifactBlobContainer&& /*blobs*/, + bool /*skip_find_missing*/ = false) const noexcept + -> bool final { + // Upload not suppoorted. + return false; + } + + /// \brief Not supported. + [[nodiscard]] auto UploadTree( + std::vector const& /*artifacts*/) + const noexcept -> std::optional final { + // Upload tree not supported. + return std::nullopt; + } + + /// \brief Not supported. + [[nodiscard]] auto IsAvailable( + ArtifactDigest const& /*digest*/) const noexcept -> bool final { + // Not supported. + return false; + } + + /// \brief Not implemented. + [[nodiscard]] auto IsAvailable( + std::vector const& /*digests*/) const noexcept + -> std::vector final { + // Not supported. + return {}; + } + + private: + gsl::not_null repo_config_; + + // retain references to needed storages and configs + gsl::not_null native_storage_config_; + StorageConfig const* compat_storage_config_; + Storage const* compat_storage_; + + // an api accessing compatible storage, used purely to communicate with a + // compatible remote; only instantiated if in compatible mode + IExecutionApi const* compat_local_api_; +}; + +#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_MR_GIT_API_HPP diff --git a/src/buildtool/execution_api/serve/mr_local_api.cpp b/src/buildtool/execution_api/serve/mr_local_api.cpp new file mode 100644 index 000000000..78e960c22 --- /dev/null +++ b/src/buildtool/execution_api/serve/mr_local_api.cpp @@ -0,0 +1,274 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/execution_api/serve/mr_local_api.hpp" + +#include +#include +#include + +#include "src/buildtool/common/protocol_traits.hpp" +#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" +#include "src/buildtool/execution_api/serve/utils.hpp" +#include "src/buildtool/file_system/object_type.hpp" +#include "src/buildtool/logging/log_level.hpp" +#include "src/buildtool/logging/logger.hpp" +#include "src/utils/cpp/expected.hpp" + +MRLocalApi::MRLocalApi( + gsl::not_null const& native_context, + gsl::not_null const& native_local_api, + LocalContext const* compatible_context, + IExecutionApi const* compatible_local_api) noexcept + : native_context_{native_context}, + compat_context_{compatible_context}, + native_local_api_{native_local_api}, + compat_local_api_{compatible_local_api} {} + +// NOLINTNEXTLINE(google-default-arguments) +auto MRLocalApi::RetrieveToPaths( + std::vector const& artifacts_info, + std::vector const& output_paths, + IExecutionApi const* /*alternative*/) const noexcept -> bool { + // This method can legitimately be called with both native and + // compatible digests when in compatible mode, therefore we need to + // interrogate the hash type of the input. + + // we need at least one digest to interrogate the hash type + if (artifacts_info.empty()) { + return true; // nothing to do + } + // native artifacts get dispatched to native local api + if (ProtocolTraits::IsNative(artifacts_info[0].digest.GetHashType())) { + return native_local_api_->RetrieveToPaths(artifacts_info, output_paths); + } + // compatible digests get dispatched to compatible local api + if (compat_local_api_ == nullptr) { + Logger::Log(LogLevel::Error, + "MRLocalApi: Unexpected digest type provided"); + return false; + } + return compat_local_api_->RetrieveToPaths(artifacts_info, output_paths); +} + +auto MRLocalApi::RetrieveToCas( + std::vector const& artifacts_info, + IExecutionApi const& api) const noexcept -> bool { + // return immediately if being passed the same api + if (this == &api) { + return true; + } + + // in native mode: dispatch directly to native local api + if (compat_local_api_ == nullptr) { + return native_local_api_->RetrieveToCas(artifacts_info, api); + } + + // in compatible mode: if compatible hashes passed, dispatch them to + // compatible local api + if (not artifacts_info.empty() and + not ProtocolTraits::IsNative(artifacts_info[0].digest.GetHashType())) { + return compat_local_api_->RetrieveToCas(artifacts_info, api); + } + + // in compatible mode: if passed native digests, one must rehash them; + // first, set up needed callbacks for caching digest mappings + auto read_rehashed = [native_storage = native_context_->storage_config, + compat_storage = compat_context_->storage_config]( + ArtifactDigest const& digest) + -> expected, std::string> { + return MRApiUtils::ReadRehashedDigest( + digest, *native_storage, *compat_storage); + }; + auto store_rehashed = + [native_storage = native_context_->storage_config, + compat_storage = compat_context_->storage_config]( + ArtifactDigest const& source_digest, + ArtifactDigest const& target_digest, + ObjectType obj_type) -> std::optional { + return MRApiUtils::StoreRehashedDigest(source_digest, + target_digest, + obj_type, + *native_storage, + *compat_storage); + }; + + // collect the native blobs and rehash them as compatible to be able to + // check what is missing in the other api + std::vector compat_artifacts; + compat_artifacts.reserve(artifacts_info.size()); + for (auto const& native_obj : artifacts_info) { + // check if we know already the compatible digest + auto cached_obj = read_rehashed(native_obj.digest); + if (not cached_obj) { + Logger::Log(LogLevel::Error, + "MRLocalApi: {}", + std::move(cached_obj).error()); + return false; + } + if (*cached_obj) { + // add object to the vector of compatible artifacts + compat_artifacts.emplace_back(std::move(cached_obj)->value()); + } + else { + // process object; trees need to be handled appropriately + if (IsTreeObject(native_obj.type)) { + // set up all the callbacks needed + auto read_git = [cas = &native_context_->storage->CAS()]( + ArtifactDigest const& digest, + ObjectType type) + -> std::optional< + std::variant> { + return IsTreeObject(type) + ? cas->TreePath(digest) + : cas->BlobPath(digest, + IsExecutableObject(type)); + }; + auto store_file = + [cas = &compat_context_->storage->CAS()]( + std::variant const& + data, + bool is_exec) -> std::optional { + if (not std::holds_alternative( + data)) { + return std::nullopt; + } + return cas->StoreBlob(std::get(data), + is_exec); + }; + BazelMsgFactory::TreeStoreFunc store_dir = + [cas = &compat_context_->storage->CAS()]( + std::string const& content) + -> std::optional { + return cas->StoreTree(content); + }; + BazelMsgFactory::SymlinkStoreFunc store_symlink = + [cas = &compat_context_->storage->CAS()]( + std::string const& content) + -> std::optional { + return cas->StoreBlob(content); + }; + // get the directory digest + auto tree_digest = + BazelMsgFactory::CreateDirectoryDigestFromGitTree( + native_obj.digest, + read_git, + store_file, + store_dir, + store_symlink, + read_rehashed, + store_rehashed); + if (not tree_digest) { + Logger::Log(LogLevel::Error, + "MRLocalApi: {}", + std::move(tree_digest).error()); + return false; + } + // add object to the vector of compatible artifacts + compat_artifacts.emplace_back( + Artifact::ObjectInfo{.digest = *std::move(tree_digest), + .type = ObjectType::Tree}); + } + else { + // blobs can be directly rehashed + auto const is_exec = IsExecutableObject(native_obj.type); + auto path = native_context_->storage->CAS().BlobPath( + native_obj.digest, is_exec); + if (not path) { + Logger::Log( + LogLevel::Error, + "MRLocalApi: failed to get path of CAS entry {}", + native_obj.digest.hash()); + return false; + } + auto blob_digest = + compat_context_->storage->CAS().StoreBlob(*path, is_exec); + if (not blob_digest) { + Logger::Log(LogLevel::Error, + "MRLocalApi: failed to rehash CAS entry {}", + native_obj.digest.hash()); + return false; + } + // cache the digest association + if (auto error_msg = store_rehashed( + native_obj.digest, *blob_digest, native_obj.type)) { + Logger::Log(LogLevel::Error, + "MRLocalApi: {}", + *std::move(error_msg)); + return false; + } + // add object to the vector of compatible artifacts + compat_artifacts.emplace_back( + Artifact::ObjectInfo{.digest = *std::move(blob_digest), + .type = native_obj.type}); + } + } + } + // now that we have gathered all the compatible object infos, simply pass + // them to the compatible local api + return compat_local_api_->RetrieveToCas(compat_artifacts, api); +} + +// NOLINTNEXTLINE(google-default-arguments) +auto MRLocalApi::Upload(ArtifactBlobContainer&& blobs, + bool skip_find_missing) const noexcept -> bool { + // in native mode, dispatch to native local api + if (compat_local_api_ == nullptr) { + return native_local_api_->Upload(std::move(blobs), skip_find_missing); + } + // in compatible mode, dispatch to compatible local api + return compat_local_api_->Upload(std::move(blobs), skip_find_missing); +} + +auto MRLocalApi::IsAvailable(ArtifactDigest const& digest) const noexcept + -> bool { + // This method can legitimately be called with both native and + // compatible digests when in compatible mode, therefore we need to + // interrogate the hash type of the input. + + // a native digest gets dispatched to native local api + if (ProtocolTraits::IsNative(digest.GetHashType())) { + return native_local_api_->IsAvailable(digest); + } + // compatible digests get dispatched to compatible local api + if (compat_local_api_ == nullptr) { + Logger::Log(LogLevel::Warning, + "MRLocalApi: unexpected digest type provided"); + return false; + } + return compat_local_api_->IsAvailable(digest); +} + +auto MRLocalApi::IsAvailable(std::vector const& digests) + const noexcept -> std::vector { + // This method can legitimately be called with both native and + // compatible digests when in compatible mode, therefore we need to + // interrogate the hash type of the input. + + // we need at least one digest to interrogate the hash type + if (digests.empty()) { + return {}; // nothing to do + } + // native digests get dispatched to native local api + if (ProtocolTraits::IsNative(digests[0].GetHashType())) { + return native_local_api_->IsAvailable(digests); + } + // compatible digests get dispatched to compatible local api + if (compat_local_api_ == nullptr) { + Logger::Log(LogLevel::Warning, + "MRLocalApi: Unexpected digest type provided"); + return {}; + } + return compat_local_api_->IsAvailable(digests); +} diff --git a/src/buildtool/execution_api/serve/mr_local_api.hpp b/src/buildtool/execution_api/serve/mr_local_api.hpp new file mode 100644 index 000000000..e86efb2f5 --- /dev/null +++ b/src/buildtool/execution_api/serve/mr_local_api.hpp @@ -0,0 +1,140 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_MR_LOCAL_API_HPP +#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_MR_LOCAL_API_HPP + +#include +#include +#include +#include +#include + +#include "gsl/gsl" +#include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/execution_api/common/artifact_blob_container.hpp" +#include "src/buildtool/execution_api/common/execution_api.hpp" +#include "src/buildtool/execution_api/local/context.hpp" +#include "src/buildtool/execution_engine/dag/dag.hpp" + +/// \brief Multi-repo-specific implementation of the abstract Execution API. +/// Handles interaction between a native storage and a remote, irrespective of +/// the remote protocol used. In compatible mode, both native and compatible +/// storages are available. +class MRLocalApi final : public IExecutionApi { + public: + /// \brief Construct a new MRLocalApi object. In native mode only the native + /// storage in instantiated (hence behaving like regular LocalApi), while in + /// compatible mode both storages are instantiated. + MRLocalApi(gsl::not_null const& native_context, + gsl::not_null const& native_local_api, + LocalContext const* compatible_context = nullptr, + IExecutionApi const* compatible_local_api = nullptr) noexcept; + + /// \brief Not supported. + [[nodiscard]] auto CreateAction( + ArtifactDigest const& /*root_digest*/, + std::vector const& /*command*/, + std::string const& /*cwd*/, + std::vector const& /*output_files*/, + std::vector const& /*output_dirs*/, + std::map const& /*env_vars*/, + std::map const& /*properties*/) const noexcept + -> IExecutionAction::Ptr final { + // Execution not supported + return nullptr; + } + + /// \brief Stages artifacts from CAS to the file system. + /// Handles both native and compatible artifacts. Dispatches to appropriate + /// local api instance based on digest hash type. Alternative api is never + /// used. + // NOLINTNEXTLINE(google-default-arguments) + [[nodiscard]] auto RetrieveToPaths( + std::vector const& artifacts_info, + std::vector const& output_paths, + IExecutionApi const* /*alternative*/ = nullptr) const noexcept + -> bool final; + + [[nodiscard]] auto RetrieveToFds( + std::vector const& /*artifacts_info*/, + std::vector const& /*fds*/, + bool /*raw_tree*/) const noexcept -> bool final { + // Retrieval to file descriptors not supported + return false; + } + + /// \brief Passes artifacts from native CAS to specified api. Handles both + /// native and compatible digests. In compatible mode, if passed native + /// digests it must rehash them to be able to upload to a compatible remote. + /// \note Caller is responsible for passing vectors with artifacts of the + /// same digest type. For simplicity, this method takes the first digest of + /// the vector as representative for figuring out hash function type. + [[nodiscard]] auto RetrieveToCas( + std::vector const& artifacts_info, + IExecutionApi const& api) const noexcept -> bool final; + + [[nodiscard]] auto RetrieveToMemory( + Artifact::ObjectInfo const& /*artifact_info*/) const noexcept + -> std::optional final { + // Retrieval to memory not supported + return std::nullopt; + } + + /// \brief Uploads artifacts from local CAS into specified api. Dispatches + /// the blobs to the appropriate local api instance based on used protocol. + /// \note Caller is responsible for passing vectors with artifacts of the + /// same digest type. + // NOLINTNEXTLINE(google-default-arguments) + [[nodiscard]] auto Upload(ArtifactBlobContainer&& blobs, + bool skip_find_missing = false) const noexcept + -> bool final; + + [[nodiscard]] auto UploadTree( + std::vector const& /*artifacts*/) + const noexcept -> std::optional final { + // Upload tree not supported -- only used in execution + return std::nullopt; + } + + /// \brief Check availability of an artifact in CAS. Handles both native and + /// compatible digests. Dispatches to appropriate local api instance based + /// on digest hash type. + [[nodiscard]] auto IsAvailable(ArtifactDigest const& digest) const noexcept + -> bool final; + + /// \brief Check availability of artifacts in CAS. Handles both native and + /// compatible digests. Dispatches to appropriate local api instance based + /// on hash type of digests. + /// \note The caller is responsible for passing vectors with digests of the + /// same type. For simplicity, this method takes the first digest of the + /// vector as representative for figuring out hash function type. + [[nodiscard]] auto IsAvailable(std::vector const& digests) + const noexcept -> std::vector final; + + private: + // retain local context references to have direct access to storages + gsl::not_null native_context_; + LocalContext const* compat_context_; + + // local api accessing native storage; all artifacts must pass through it + gsl::not_null native_local_api_; + // local api accessing compatible storage, used purely to communicate with + // a compatible remote; only instantiated if in compatible mode + IExecutionApi const* compat_local_api_; +}; + +#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_MR_LOCAL_API_HPP diff --git a/src/buildtool/execution_api/serve/utils.cpp b/src/buildtool/execution_api/serve/utils.cpp new file mode 100644 index 000000000..2796d3b11 --- /dev/null +++ b/src/buildtool/execution_api/serve/utils.cpp @@ -0,0 +1,117 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/execution_api/serve/utils.hpp" + +#include // std::size_t +#include +#include +#include // std::move + +#include "fmt/core.h" +#include "src/buildtool/file_system/file_system_manager.hpp" +#include "src/buildtool/storage/fs_utils.hpp" + +namespace MRApiUtils { + +auto ReadRehashedDigest(ArtifactDigest const& digest, + StorageConfig const& source_config, + StorageConfig const& target_config, + bool from_git) noexcept + -> expected, std::string> { + // check for mapping file in all generations + std::size_t generation = 0; + std::optional rehash_id_file = std::nullopt; + auto const compat_hash_type = target_config.hash_function.GetType(); + for (; generation < source_config.num_generations; ++generation) { + auto path = StorageUtils::GetRehashIDFile(source_config, + compat_hash_type, + digest.hash(), + from_git, + generation); + if (FileSystemManager::Exists(path)) { + rehash_id_file = std::move(path); + break; // found the generation + } + } + if (rehash_id_file) { + // read id file + auto compat_obj_str = FileSystemManager::ReadFile(*rehash_id_file); + if (not compat_obj_str) { + return unexpected{fmt::format("failed to read rehash id file {}", + rehash_id_file->string())}; + } + // get artifact object from content + auto compat_obj = + Artifact::ObjectInfo::FromString(compat_hash_type, *compat_obj_str); + if (not compat_obj) { + // handle nullopt value explicitly + return unexpected{ + fmt::format("failed to read rehashed artifact from id file {}", + rehash_id_file->string())}; + } + // ensure the id file is in generation 0 for future calls + if (generation != 0) { + auto dest_id_file = StorageUtils::GetRehashIDFile( + source_config, compat_hash_type, digest.hash(), from_git); + auto ok = FileSystemManager::CreateFileHardlink(*rehash_id_file, + dest_id_file); + if (not ok) { + auto const& err = ok.error(); + if (err != std::errc::too_many_links) { + return unexpected{ + fmt::format("failed to link rehash id file {}:\n{} {}", + dest_id_file.string(), + err.value(), + err.message())}; + } + // if too many links reported, write id file ourselves + if (not StorageUtils::WriteTreeIDFile(dest_id_file, + *compat_obj_str)) { + return unexpected{ + fmt::format("failed to write rehash id file {}", + dest_id_file.string())}; + } + } + } + return std::move(compat_obj); // not dereferenced to assist type + // deduction in variant + } + // no mapping file found + return std::optional{std::nullopt}; +} + +auto StoreRehashedDigest(ArtifactDigest const& source_digest, + ArtifactDigest const& target_digest, + ObjectType obj_type, + StorageConfig const& source_config, + StorageConfig const& target_config, + bool from_git) noexcept -> std::optional { + // write mapping + auto const rehash_id_file = + StorageUtils::GetRehashIDFile(source_config, + target_config.hash_function.GetType(), + source_digest.hash(), + from_git); + if (not StorageUtils::WriteTreeIDFile( + rehash_id_file, + Artifact::ObjectInfo{.digest = target_digest, .type = obj_type} + .ToString())) { + return fmt::format("failed to write rehash id to file {}", + rehash_id_file.string()); + } + return std::nullopt; // a-ok +} + +} // namespace MRApiUtils diff --git a/src/buildtool/execution_api/serve/utils.hpp b/src/buildtool/execution_api/serve/utils.hpp new file mode 100644 index 000000000..3b588cf36 --- /dev/null +++ b/src/buildtool/execution_api/serve/utils.hpp @@ -0,0 +1,64 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_UTILS_HPP +#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_UTILS_HPP + +#include +#include + +#include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/file_system/object_type.hpp" +#include "src/buildtool/storage/config.hpp" +#include "src/utils/cpp/expected.hpp" + +namespace MRApiUtils { + +/// \brief Get a corresponding known object from a different local CAS, as +/// stored in a mapping file, if exists. +/// \param digest Source digest. +/// \param source_config Storage config corresponding to source digest. +/// \param target_config Storage config corresponding to target digest. +/// \param from_git Specify if source digest comes from a Git location instead +/// of CAS. +/// \returns The target artifact info on successfully reading an existing +/// mapping file, nullopt if no mapping file exists, or the error message on +/// failure. +[[nodiscard]] auto ReadRehashedDigest(ArtifactDigest const& digest, + StorageConfig const& source_config, + StorageConfig const& target_config, + bool from_git = false) noexcept + -> expected, std::string>; + +/// \brief Write the mapping file linking two digests hashing the same content. +/// \param source_digest Source digest. +/// \param target_digest Target digest. +/// \param obj_type Object type of the content represented by the two digests. +/// \param source_config Storage config corresponding to source digest. +/// \param target_config Storage config corresponding to target digest. +/// \param from_git Specify if source digest comes from a Git location instead +/// of CAS. +/// \returns nullopt on success, error message on failure. +[[nodiscard]] auto StoreRehashedDigest(ArtifactDigest const& source_digest, + ArtifactDigest const& target_digest, + ObjectType obj_type, + StorageConfig const& source_config, + StorageConfig const& target_config, + bool from_git = false) noexcept + -> std::optional; + +} // namespace MRApiUtils + +#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_SERVE_UTILS_HPP diff --git a/src/buildtool/execution_api/utils/TARGETS b/src/buildtool/execution_api/utils/TARGETS index b202a9ea6..f4033ee54 100644 --- a/src/buildtool/execution_api/utils/TARGETS +++ b/src/buildtool/execution_api/utils/TARGETS @@ -5,13 +5,15 @@ , "srcs": ["subobject.cpp"] , "deps": [ ["src/buildtool/common", "common"] - , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/execution_api/common", "api_bundle"] ] , "private-deps": - [ ["src/buildtool/compatibility", "compatibility"] + [ ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/crypto", "hash_function"] ] , "stage": ["src", "buildtool", "execution_api", "utils"] } @@ -19,7 +21,7 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["outputscheck"] , "hdrs": ["outputscheck.hpp"] - , "deps": [["src/buildtool/common", "common"]] + , "deps": [["src/buildtool/common", "bazel_types"]] , "stage": ["src", "buildtool", "execution_api", "utils"] } } diff --git a/src/buildtool/execution_api/utils/outputscheck.hpp b/src/buildtool/execution_api/utils/outputscheck.hpp index 431a53285..4719d2d63 100644 --- a/src/buildtool/execution_api/utils/outputscheck.hpp +++ b/src/buildtool/execution_api/utils/outputscheck.hpp @@ -63,4 +63,4 @@ #endif -#endif +#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_UTILS_OUTPUTSCHECK_HPP diff --git a/src/buildtool/execution_api/utils/subobject.cpp b/src/buildtool/execution_api/utils/subobject.cpp index 380fe65f5..912e2d4f9 100644 --- a/src/buildtool/execution_api/utils/subobject.cpp +++ b/src/buildtool/execution_api/utils/subobject.cpp @@ -15,15 +15,18 @@ #include "src/buildtool/execution_api/utils/subobject.hpp" #ifndef BOOTSTRAP_BUILD_TOOL -#include "src/buildtool/compatibility/compatibility.hpp" +#include + +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" +#include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/execution_api/common/tree_reader_utils.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" auto RetrieveSubPathId(Artifact::ObjectInfo object_info, - IExecutionApi const& api, + ApiBundle const& apis, const std::filesystem::path& sub_path) -> std::optional { std::filesystem::path sofar{}; @@ -35,7 +38,7 @@ auto RetrieveSubPathId(Artifact::ObjectInfo object_info, segment.string()); break; } - auto data = api.RetrieveToMemory(object_info); + auto data = apis.remote->RetrieveToMemory(object_info); if (not data) { Logger::Log(LogLevel::Error, "Failed to retrieve artifact {} at path '{}'", @@ -43,7 +46,7 @@ auto RetrieveSubPathId(Artifact::ObjectInfo object_info, sofar.string()); return std::nullopt; } - if (Compatibility::IsCompatible()) { + if (not ProtocolTraits::IsNative(apis.hash_function.GetType())) { auto directory = BazelMsgFactory::MessageFromString(*data); if (not directory) { @@ -55,9 +58,11 @@ auto RetrieveSubPathId(Artifact::ObjectInfo object_info, std::optional new_object_info{}; if (not TreeReaderUtils::ReadObjectInfos( *directory, - [&new_object_info, &segment](auto path, auto info) { + [&new_object_info, &segment]( + std::filesystem::path const& path, + Artifact::ObjectInfo&& info) { if (path == segment) { - new_object_info = info; + new_object_info = std::move(info); } return true; })) { @@ -94,9 +99,11 @@ auto RetrieveSubPathId(Artifact::ObjectInfo object_info, std::optional new_object_info{}; if (not TreeReaderUtils::ReadObjectInfos( *entries, - [&new_object_info, &segment](auto path, auto info) { + [&new_object_info, &segment]( + std::filesystem::path const& path, + Artifact::ObjectInfo&& info) { if (path == segment) { - new_object_info = info; + new_object_info = std::move(info); } return true; })) { @@ -121,4 +128,4 @@ auto RetrieveSubPathId(Artifact::ObjectInfo object_info, return object_info; } -#endif +#endif // BOOTSTRAP_BUILD_TOOL diff --git a/src/buildtool/execution_api/utils/subobject.hpp b/src/buildtool/execution_api/utils/subobject.hpp index c5cee9350..e2a8f46b4 100644 --- a/src/buildtool/execution_api/utils/subobject.hpp +++ b/src/buildtool/execution_api/utils/subobject.hpp @@ -21,13 +21,13 @@ #include #include "src/buildtool/common/artifact.hpp" -#include "src/buildtool/execution_api/common/execution_api.hpp" +#include "src/buildtool/execution_api/common/api_bundle.hpp" auto RetrieveSubPathId(Artifact::ObjectInfo object_info, - IExecutionApi const& api, + ApiBundle const& apis, const std::filesystem::path& sub_path) -> std::optional; -#endif +#endif // BOOTSTRAP_BUILD_TOOL -#endif +#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_UTILS_SUBOBJECT_HPP diff --git a/src/buildtool/execution_engine/dag/TARGETS b/src/buildtool/execution_engine/dag/TARGETS index e6d581b16..dfcab9dfd 100644 --- a/src/buildtool/execution_engine/dag/TARGETS +++ b/src/buildtool/execution_engine/dag/TARGETS @@ -4,15 +4,16 @@ , "hdrs": ["dag.hpp"] , "srcs": ["dag.cpp"] , "deps": - [ ["src/utils/cpp", "hex_string"] - , ["src/utils/cpp", "type_safe_arithmetic"] - , ["src/buildtool/common", "common"] + [ ["@", "gsl", "", "gsl"] , ["src/buildtool/common", "action_description"] , ["src/buildtool/common", "artifact_description"] + , ["src/buildtool/common", "common"] , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["@", "gsl", "", "gsl"] + , ["src/utils/cpp", "hex_string"] + , ["src/utils/cpp", "transformed_range"] + , ["src/utils/cpp", "type_safe_arithmetic"] ] , "stage": ["src", "buildtool", "execution_engine", "dag"] } diff --git a/src/buildtool/execution_engine/dag/dag.cpp b/src/buildtool/execution_engine/dag/dag.cpp index 17f4b3afb..cb82ec55d 100644 --- a/src/buildtool/execution_engine/dag/dag.cpp +++ b/src/buildtool/execution_engine/dag/dag.cpp @@ -148,7 +148,7 @@ auto DependencyGraph::AddAction(ActionDescription const& description) -> bool { auto DependencyGraph::AddAction(Action const& a) noexcept -> DependencyGraph::ActionNodeIdentifier { - auto id = a.Id(); + auto const& id = a.Id(); auto const action_it = action_ids_.find(id); if (action_it != action_ids_.end()) { return action_it->second; diff --git a/src/buildtool/execution_engine/dag/dag.hpp b/src/buildtool/execution_engine/dag/dag.hpp index 99a35f4b3..ac205da98 100644 --- a/src/buildtool/execution_engine/dag/dag.hpp +++ b/src/buildtool/execution_engine/dag/dag.hpp @@ -37,6 +37,7 @@ #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" #include "src/utils/cpp/hex_string.hpp" +#include "src/utils/cpp/transformed_range.hpp" #include "src/utils/cpp/type_safe_arithmetic.hpp" /// \brief Plain DirectedAcyclicGraph. @@ -46,23 +47,23 @@ class DirectedAcyclicGraph { public: /// \brief Abstract class for DAG nodes. - /// \tparam T_Content Type of content. - /// \tparam T_Other Type of neighboring nodes. + /// \tparam TContent Type of content. + /// \tparam TOther Type of neighboring nodes. /// TODO: once we have hashes, require sub classes to implement generating /// IDs depending on its unique content. - template + template class Node { public: - using OtherNode = T_Other; + using OtherNode = TOther; using OtherNodePtr = gsl::not_null; - explicit Node(T_Content&& content) noexcept + explicit Node(TContent&& content) noexcept : content_{std::move(content)} {} // NOLINTNEXTLINE(modernize-pass-by-value) - explicit Node(T_Content const& content) noexcept : content_{content} {} + explicit Node(TContent const& content) noexcept : content_{content} {} - Node(T_Content const& content, + Node(TContent const& content, std::vector const& parents, std::vector const& children) noexcept : content_{content}, parents_{parents}, children_{children} {} @@ -75,11 +76,11 @@ class DirectedAcyclicGraph { auto operator=(Node&&) -> Node& = delete; ~Node() = default; - [[nodiscard]] auto Content() const& noexcept -> T_Content const& { + [[nodiscard]] auto Content() const& noexcept -> TContent const& { return content_; } - [[nodiscard]] auto Content() && noexcept -> T_Content { + [[nodiscard]] auto Content() && noexcept -> TContent { return std::move(content_); } @@ -114,7 +115,7 @@ class DirectedAcyclicGraph { } private: - T_Content content_{}; + TContent content_{}; std::vector parents_{}; std::vector children_{}; }; @@ -177,13 +178,13 @@ class DependencyGraph : DirectedAcyclicGraph { class ArtifactNode; // Node identifier for actions - struct ActionNodeIdentifierTag : type_safe_arithmetic_tag {}; - using ActionNodeIdentifier = type_safe_arithmetic; + struct ActionNodeIdentifierTag : TypeSafeArithmeticTag {}; + using ActionNodeIdentifier = TypeSafeArithmetic; // Node identifier for artifacts - struct ArtifactNodeIdentifierTag : type_safe_arithmetic_tag {}; + struct ArtifactNodeIdentifierTag : TypeSafeArithmeticTag {}; using ArtifactNodeIdentifier = - type_safe_arithmetic; + TypeSafeArithmetic; /// \brief Class for traversal state data specific for ActionNode's /// Provides the following atomic operations (listed on the public methods): @@ -262,6 +263,9 @@ class DependencyGraph : DirectedAcyclicGraph { Action::LocalPath path; base::OtherNodePtr node; }; + using LocalPaths = + TransformedRange::const_iterator, + Action::LocalPath>; [[nodiscard]] static auto Create(Action const& content) noexcept -> Ptr { @@ -314,59 +318,59 @@ class DependencyGraph : DirectedAcyclicGraph { return true; } - [[nodiscard]] auto OutputFiles() - const& -> std::vector const& { + [[nodiscard]] auto OutputFiles() const& noexcept + -> std::vector const& { return output_files_; } - [[nodiscard]] auto OutputDirs() - const& -> std::vector const& { + [[nodiscard]] auto OutputDirs() const& noexcept + -> std::vector const& { return output_dirs_; } - [[nodiscard]] auto Dependencies() - const& -> std::vector const& { + [[nodiscard]] auto Dependencies() const& noexcept + -> std::vector const& { return dependencies_; } - [[nodiscard]] auto Command() const -> std::vector { + [[nodiscard]] auto Command() const noexcept + -> std::vector const& { return Content().Command(); } - [[nodiscard]] auto Env() const -> std::map { + [[nodiscard]] auto Env() const noexcept + -> std::map const& { return Content().Env(); } - [[nodiscard]] auto MayFail() const -> std::optional { + [[nodiscard]] auto MayFail() const noexcept + -> std::optional const& { return Content().MayFail(); } - [[nodiscard]] auto TimeoutScale() const -> double { + [[nodiscard]] auto TimeoutScale() const noexcept -> double { return Content().TimeoutScale(); } - [[nodiscard]] auto ExecutionProperties() const - -> std::map { + [[nodiscard]] auto ExecutionProperties() const noexcept + -> std::map const& { return Content().ExecutionProperties(); } - [[nodiscard]] auto NoCache() const -> bool { + [[nodiscard]] auto NoCache() const noexcept -> bool { return Content().NoCache(); } - [[nodiscard]] auto OutputFilePaths() const - -> std::vector { - return NodePaths(output_files_); + [[nodiscard]] auto OutputFilePaths() const& noexcept -> LocalPaths { + return NodePaths(&output_files_); } - [[nodiscard]] auto OutputDirPaths() const - -> std::vector { - return NodePaths(output_dirs_); + [[nodiscard]] auto OutputDirPaths() const& noexcept -> LocalPaths { + return NodePaths(&output_dirs_); } - [[nodiscard]] auto DependencyPaths() const - -> std::vector { - return NodePaths(dependencies_); + [[nodiscard]] auto DependencyPaths() const& noexcept -> LocalPaths { + return NodePaths(&dependencies_); } // To initialise the action traversal specific data before traversing @@ -387,19 +391,13 @@ class DependencyGraph : DirectedAcyclicGraph { std::unique_ptr traversal_state_{ std::make_unique()}; - // Collect paths from named nodes. - // TODO(oreiche): This could be potentially speed up by using a wrapper - // iterator to provide a read-only view (similar to BazelBlobContainer) [[nodiscard]] static auto NodePaths( - std::vector const& nodes) - -> std::vector { - std::vector paths{nodes.size()}; - std::transform( - nodes.cbegin(), - nodes.cend(), - paths.begin(), - [](auto const& named_node) { return named_node.path; }); - return paths; + gsl::not_null const*> const& nodes) + -> LocalPaths { + return TransformedRange{ + nodes->begin(), + nodes->end(), + [](NamedOtherNodePtr const& node) { return node.path; }}; } }; @@ -492,17 +490,17 @@ class DependencyGraph : DirectedAcyclicGraph { private: // List of action nodes we already created - std::vector action_nodes_{}; + std::vector action_nodes_; // List of artifact nodes we already created - std::vector artifact_nodes_{}; + std::vector artifact_nodes_; // Associates global action identifier to local node id - std::unordered_map action_ids_{}; + std::unordered_map action_ids_; // Associates global artifact identifier to local node id std::unordered_map - artifact_ids_{}; + artifact_ids_; [[nodiscard]] auto CreateOutputArtifactNodes( std::string const& action_id, diff --git a/src/buildtool/execution_engine/executor/TARGETS b/src/buildtool/execution_engine/executor/TARGETS index 15ca24418..3b798b491 100644 --- a/src/buildtool/execution_engine/executor/TARGETS +++ b/src/buildtool/execution_engine/executor/TARGETS @@ -4,26 +4,29 @@ , "hdrs": ["executor.hpp"] , "deps": [ "context" - , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/logging", "logging"] + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "git_hashes_converter"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/common", "tree"] - , ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/execution_engine/dag", "dag"] - , ["src/buildtool/execution_api/common", "api_bundle"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/execution_api/common", "common_api"] + , ["src/buildtool/execution_api/remote", "bazel"] , ["src/buildtool/execution_api/remote", "config"] , ["src/buildtool/execution_api/remote", "context"] - , ["src/buildtool/execution_api/remote", "bazel"] + , ["src/buildtool/execution_engine/dag", "dag"] + , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] , ["src/buildtool/progress_reporting", "progress"] , ["src/utils/cpp", "hex_string"] , ["src/utils/cpp", "path_rebase"] , ["src/utils/cpp", "prefix"] - , ["@", "gsl", "", "gsl"] - , ["src/buildtool/common", "common"] - , ["src/buildtool/crypto", "hash_function"] + , ["src/utils/cpp", "transformed_range"] ] , "stage": ["src", "buildtool", "execution_engine", "executor"] } diff --git a/src/buildtool/execution_engine/executor/executor.hpp b/src/buildtool/execution_engine/executor/executor.hpp index 26e415b16..a27f2215e 100644 --- a/src/buildtool/execution_engine/executor/executor.hpp +++ b/src/buildtool/execution_engine/executor/executor.hpp @@ -28,10 +28,13 @@ #include #include "gsl/gsl" +#include "src/buildtool/build_engine/expression/evaluator.hpp" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/git_hashes_converter.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/statistics.hpp" #include "src/buildtool/common/tree.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/artifact_blob_container.hpp" #include "src/buildtool/execution_api/common/common_api.hpp" @@ -48,6 +51,7 @@ #include "src/utils/cpp/hex_string.hpp" #include "src/utils/cpp/path_rebase.hpp" #include "src/utils/cpp/prefix.hpp" +#include "src/utils/cpp/transformed_range.hpp" /// \brief Implementations for executing actions and uploading artifacts. class ExecutorImpl { @@ -61,7 +65,7 @@ class ExecutorImpl { IExecutionApi const& api, ExecutionProperties const& merged_properties, gsl::not_null const& remote_context, - HashFunction hash_function, + gsl::not_null const& hash_function, std::chrono::milliseconds const& timeout, IExecutionAction::CacheFlag cache_flag, gsl::not_null const& stats, @@ -127,10 +131,10 @@ class ExecutorImpl { } auto base = action->Content().Cwd(); - auto cwd_relative_output_files = - RebasePathStringsRelativeTo(base, action->OutputFilePaths()); - auto cwd_relative_output_dirs = - RebasePathStringsRelativeTo(base, action->OutputDirPaths()); + auto cwd_relative_output_files = RebasePathStringsRelativeTo( + base, action->OutputFilePaths().ToVector()); + auto cwd_relative_output_dirs = RebasePathStringsRelativeTo( + base, action->OutputDirPaths().ToVector()); auto remote_action = (alternative_api ? *alternative_api : api) .CreateAction(*root_digest, action->Command(), @@ -152,10 +156,14 @@ class ExecutorImpl { auto result = remote_action->Execute(&logger); if (alternative_api) { if (result) { - auto const& artifacts = result->Artifacts(); + auto const artifacts = result->Artifacts(); + if (not artifacts) { + logger.Emit(LogLevel::Error, artifacts.error()); + return nullptr; + } std::vector object_infos{}; - object_infos.reserve(artifacts.size()); - for (auto const& [path, info] : artifacts) { + object_infos.reserve(artifacts.value()->size()); + for (auto const& [path, info] : *artifacts.value()) { object_infos.emplace_back(info); } if (not alternative_api->RetrieveToCas(object_infos, api)) { @@ -211,6 +219,7 @@ class ExecutorImpl { } if (not VerifyOrUploadKnownArtifact( + apis.hash_function.GetType(), *apis.remote, artifact->Content().Repository(), repo_config, @@ -263,7 +272,6 @@ class ExecutorImpl { /// \param[in] api The remote execution API of the CAS. /// \param[in] tree The git tree to be uploaded. /// \returns True if the upload was successful, False in case of any error. - // NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] static auto VerifyOrUploadTree(IExecutionApi const& api, GitTree const& tree) noexcept -> bool { @@ -272,11 +280,19 @@ class ExecutorImpl { std::unordered_map> entry_map; for (auto const& [path, entry] : tree) { + // Since GitTrees are processed here, HashFunction::Type::GitSHA1 is + // used auto digest = - ArtifactDigest{entry->Hash(), *entry->Size(), entry->IsTree()}; - digests.emplace_back(digest); + ArtifactDigestFactory::Create(HashFunction::Type::GitSHA1, + entry->Hash(), + *entry->Size(), + entry->IsTree()); + if (not digest) { + return false; + } + digests.emplace_back(*digest); try { - entry_map.emplace(std::move(digest), entry); + entry_map.emplace(*std::move(digest), entry); } catch (...) { return false; } @@ -351,8 +367,7 @@ class ExecutorImpl { Artifact::ObjectInfo const& info, std::string const& hash) noexcept -> bool { std::optional content; - if (NativeSupport::IsTree( - static_cast(info.digest).hash())) { + if (info.digest.IsTree()) { // if known tree is not available, recursively upload its content auto tree = ReadGitTree(repo, repo_config, hash); if (not tree) { @@ -424,12 +439,14 @@ class ExecutorImpl { /// \param info The info of the object /// \returns true on success [[nodiscard]] static auto VerifyOrUploadKnownArtifact( + HashFunction::Type hash_type, IExecutionApi const& api, std::string const& repo, gsl::not_null const& repo_config, Artifact::ObjectInfo const& info) noexcept -> bool { - if (Compatibility::IsCompatible()) { - auto opt = Compatibility::GetGitEntry(info.digest.hash()); + if (not ProtocolTraits::IsNative(hash_type)) { + auto opt = + GitHashesConverter::Instance().GetGitEntry(info.digest.hash()); if (opt) { auto const& [git_sha1_hash, comp_repo] = *opt; return VerifyOrUploadGitArtifact( @@ -466,8 +483,8 @@ class ExecutorImpl { if (not content.has_value()) { return std::nullopt; } - auto digest = - ArtifactDigest::Create(hash_function, *content); + auto digest = ArtifactDigestFactory::HashDataAs( + hash_function, *content); if (not api.Upload(ArtifactBlobContainer{ {ArtifactBlob{digest, std::move(*content), @@ -519,14 +536,15 @@ class ExecutorImpl { /// are present in the artifacts map [[nodiscard]] static auto CheckOutputsExist( IExecutionResponse::ArtifactInfos const& artifacts, - std::vector const& outputs, + DependencyGraph::ActionNode::LocalPaths const& outputs, std::string base) noexcept -> bool { - return std::all_of(outputs.begin(), - outputs.end(), - [&artifacts, &base](auto const& output) { - return artifacts.contains( - RebasePathStringRelativeTo(base, output)); - }); + return std::all_of( + outputs.begin(), + outputs.end(), + [&artifacts, &base](Action::LocalPath const& output) { + return artifacts.contains( + RebasePathStringRelativeTo(base, output)); + }); } /// \brief Parse response and write object info to DAG's artifact nodes. @@ -576,23 +594,27 @@ class ExecutorImpl { } } - auto artifacts = response->Artifacts(); - auto output_files = action->OutputFilePaths(); - auto output_dirs = action->OutputDirPaths(); + auto const artifacts = response->Artifacts(); + if (not artifacts) { + logger.Emit(LogLevel::Error, artifacts.error()); + return false; + } - if (artifacts.empty() or - not CheckOutputsExist( - artifacts, output_files, action->Content().Cwd()) or - not CheckOutputsExist( - artifacts, output_dirs, action->Content().Cwd())) { + if (artifacts.value()->empty() or + not CheckOutputsExist(*artifacts.value(), + action->OutputFilePaths(), + action->Content().Cwd()) or + not CheckOutputsExist(*artifacts.value(), + action->OutputDirPaths(), + action->Content().Cwd())) { logger.Emit(LogLevel::Error, [&] { std::string message{ "action executed with missing outputs.\n" " Action outputs should be the following artifacts:"}; - for (auto const& output : output_files) { + for (auto const& output : action->OutputFilePaths()) { message += "\n - file: " + output; } - for (auto const& output : output_dirs) { + for (auto const& output : action->OutputDirPaths()) { message += "\n - dir: " + output; } return message; @@ -601,7 +623,7 @@ class ExecutorImpl { return false; } - SaveObjectInfo(artifacts, action, should_fail_outputs); + SaveObjectInfo(*artifacts.value(), action, should_fail_outputs); return true; } @@ -623,10 +645,13 @@ class ExecutorImpl { auto message = ""s; bool has_both = has_err and has_out; if (has_err or has_out) { - message += (has_both ? "Output"s - : has_out ? "Stdout"s - : "Stderr"s) + - " of command "; + if (has_both) { + message += "Output"s; + } + else { + message += has_out ? "Stdout"s : "Stderr"s; + } + message += " of command "; } message += nlohmann::json(action->Command()).dump() + " in environment " + @@ -664,7 +689,8 @@ class ExecutorImpl { msg << "\nrequested by"; for (auto const& origin : origins->second) { msg << "\n - "; - msg << origin.first.ToShortString(); + msg << origin.first.ToShortString( + Evaluator::GetExpressionLogLimit()); msg << "#"; msg << origin.second; } @@ -672,13 +698,13 @@ class ExecutorImpl { logger.Emit(LogLevel::Error, "{}", msg.str()); } - [[nodiscard]] static inline auto ScaleTime(std::chrono::milliseconds t, - double f) - -> std::chrono::milliseconds { - return std::chrono::milliseconds(std::lround(t.count() * f)); + [[nodiscard]] static auto ScaleTime(std::chrono::milliseconds t, + double f) -> std::chrono::milliseconds { + return std::chrono::milliseconds( + std::lround(static_cast(t.count()) * f)); } - [[nodiscard]] static inline auto MergeProperties( + [[nodiscard]] static auto MergeProperties( const ExecutionProperties& base, const ExecutionProperties& overlay) { ExecutionProperties result = base; @@ -692,10 +718,11 @@ class ExecutorImpl { /// \brief Get the alternative endpoint based on a specified set of platform /// properties. These are checked against the dispatch list of an existing /// remote context. - [[nodiscard]] static inline auto GetAlternativeEndpoint( + [[nodiscard]] static auto GetAlternativeEndpoint( const ExecutionProperties& properties, const gsl::not_null& remote_context, - HashFunction hash_function) -> std::unique_ptr { + const gsl::not_null& hash_function) + -> std::unique_ptr { for (auto const& [pred, endpoint] : remote_context->exec_config->dispatch) { bool match = true; @@ -761,7 +788,7 @@ class Executor { context_.remote_context->exec_config->platform_properties, action->ExecutionProperties()), context_.remote_context, - context_.apis->hash_function, + &context_.apis->hash_function, Impl::ScaleTime(timeout_, action->TimeoutScale()), action->NoCache() ? CF::DoNotCacheOutput : CF::CacheOutput, context_.statistics, @@ -784,7 +811,7 @@ class Executor { context_.remote_context->exec_config->platform_properties, action->ExecutionProperties()), context_.remote_context, - context_.apis->hash_function, + &context_.apis->hash_function, Impl::ScaleTime(timeout_, action->TimeoutScale()), action->NoCache() ? CF::DoNotCacheOutput : CF::CacheOutput, context_.statistics, @@ -858,7 +885,7 @@ class Rebuilder { context_.remote_context->exec_config->platform_properties, action->ExecutionProperties()), context_.remote_context, - context_.apis->hash_function, + &context_.apis->hash_function, Impl::ScaleTime(timeout_, action->TimeoutScale()), CF::PretendCached, context_.statistics, @@ -877,7 +904,7 @@ class Rebuilder { context_.remote_context->exec_config->platform_properties, action->ExecutionProperties()), context_.remote_context, - context_.apis->hash_function, + &context_.apis->hash_function, Impl::ScaleTime(timeout_, action->TimeoutScale()), CF::FromCacheOnly, context_.statistics, @@ -889,7 +916,11 @@ class Rebuilder { return false; } - DetectFlakyAction(*response, *response_cached, action->Content()); + if (auto error = DetectFlakyAction( + *response, *response_cached, action->Content())) { + logger_cached.Emit(LogLevel::Error, *error); + return false; + } return Impl::ParseResponse(logger, *response, action, @@ -906,7 +937,7 @@ class Rebuilder { logger, artifact, context_.repo_config, *context_.apis); } - [[nodiscard]] auto DumpFlakyActions() const noexcept -> nlohmann::json { + [[nodiscard]] auto DumpFlakyActions() const -> nlohmann::json { std::unique_lock lock{m_}; auto actions = nlohmann::json::object(); for (auto const& [action_id, outputs] : flaky_actions_) { @@ -923,29 +954,42 @@ class Rebuilder { gsl::not_null const api_cached_; std::chrono::milliseconds timeout_; mutable std::mutex m_; - mutable std::vector cache_misses_{}; + mutable std::vector cache_misses_; mutable std::unordered_map< std::string, std::unordered_map< std::string, std::pair>> - flaky_actions_{}; + flaky_actions_; - void DetectFlakyAction(IExecutionResponse::Ptr const& response, - IExecutionResponse::Ptr const& response_cached, - Action const& action) const noexcept { + [[nodiscard]] auto DetectFlakyAction( + IExecutionResponse::Ptr const& response, + IExecutionResponse::Ptr const& response_cached, + Action const& action) const noexcept -> std::optional { auto& stats = *context_.statistics; if (response and response_cached and response_cached->ActionDigest() == response->ActionDigest()) { stats.IncrementRebuiltActionComparedCounter(); - auto artifacts = response->Artifacts(); - auto artifacts_cached = response_cached->Artifacts(); + auto const artifacts = response->Artifacts(); + if (not artifacts) { + return artifacts.error(); + } + auto const artifacts_cached = response_cached->Artifacts(); + if (not artifacts_cached) { + return artifacts_cached.error(); + } std::ostringstream msg{}; - for (auto const& [path, info] : artifacts) { - auto const& info_cached = artifacts_cached[path]; - if (info != info_cached) { - RecordFlakyAction(&msg, action, path, info, info_cached); + try { + for (auto const& [path, info] : *artifacts.value()) { + auto const& info_cached = + artifacts_cached.value()->at(path); + if (info != info_cached) { + RecordFlakyAction( + &msg, action, path, info, info_cached); + } } + } catch (std::exception const& ex) { + return ex.what(); } if (msg.tellp() > 0) { stats.IncrementActionsFlakyCounter(); @@ -963,6 +1007,7 @@ class Rebuilder { std::unique_lock lock{m_}; cache_misses_.emplace_back(action.Id()); } + return std::nullopt; // ok } void RecordFlakyAction(gsl::not_null const& msg, diff --git a/src/buildtool/execution_engine/traverser/TARGETS b/src/buildtool/execution_engine/traverser/TARGETS index 5f6f6a9f2..37859e8f4 100644 --- a/src/buildtool/execution_engine/traverser/TARGETS +++ b/src/buildtool/execution_engine/traverser/TARGETS @@ -3,12 +3,12 @@ , "name": ["traverser"] , "hdrs": ["traverser.hpp"] , "deps": - [ ["src/buildtool/execution_engine/dag", "dag"] - , ["src/buildtool/multithreading", "task_system"] + [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/execution_engine/dag", "dag"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] + , ["src/buildtool/multithreading", "task_system"] , ["src/utils/cpp", "concepts"] - , ["@", "gsl", "", "gsl"] ] , "stage": ["src", "buildtool", "execution_engine", "traverser"] } diff --git a/src/buildtool/execution_engine/traverser/traverser.hpp b/src/buildtool/execution_engine/traverser/traverser.hpp index 8cfb991f7..17ab7ac6b 100644 --- a/src/buildtool/execution_engine/traverser/traverser.hpp +++ b/src/buildtool/execution_engine/traverser/traverser.hpp @@ -72,7 +72,7 @@ class Traverser { Executor const& runner_{}; DependencyGraph const& graph_; gsl::not_null*> failed_; - TaskSystem tasker_{}; // THIS SHOULD BE THE LAST MEMBER VARIABLE + TaskSystem tasker_; // THIS SHOULD BE THE LAST MEMBER VARIABLE // Visits discover nodes and queue visits to their children nodes. void Visit(gsl::not_null diff --git a/src/buildtool/file_system/TARGETS b/src/buildtool/file_system/TARGETS index 6eb9cc706..e9ee489d9 100644 --- a/src/buildtool/file_system/TARGETS +++ b/src/buildtool/file_system/TARGETS @@ -24,12 +24,13 @@ , "hdrs": ["object_cas.hpp"] , "deps": [ "file_storage" - , ["src/buildtool/file_system", "file_system_manager"] + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/common", "common"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/crypto", "hash_function"] - , ["@", "gsl", "", "gsl"] ] , "stage": ["src", "buildtool", "file_system"] } @@ -39,13 +40,13 @@ , "hdrs": ["file_system_manager.hpp"] , "deps": [ "object_type" + , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/buildtool/system", "system"] , ["src/utils/cpp", "expected"] , ["src/utils/cpp", "path"] - , ["@", "gsl", "", "gsl"] - , ["@", "json", "", "json"] ] , "stage": ["src", "buildtool", "file_system"] } @@ -54,8 +55,8 @@ , "name": ["jsonfs"] , "hdrs": ["jsonfs.hpp"] , "deps": - [ "object_type" - , "file_system_manager" + [ "file_system_manager" + , "object_type" , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/utils/cpp", "json"] @@ -70,11 +71,11 @@ , "deps": ["git_context", "git_utils", ["@", "gsl", "", "gsl"]] , "stage": ["src", "buildtool", "file_system"] , "private-deps": - [ ["src/buildtool/logging", "log_level"] + [ ["", "libgit2"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/utils/cpp", "hex_string"] , ["src/utils/cpp", "path"] - , ["", "libgit2"] ] } , "git_tree": @@ -85,18 +86,17 @@ , "deps": [ "git_repo" , "object_type" + , ["@", "gsl", "", "gsl"] , ["src/buildtool/multithreading", "atomic_value"] , ["src/utils/cpp", "hex_string"] - , ["@", "gsl", "", "gsl"] ] , "stage": ["src", "buildtool", "file_system"] , "private-deps": [ ["", "libgit2"] - , "file_system_manager" + , ["src/buildtool/common", "common"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/utils/cpp", "path"] - , ["src/buildtool/common", "common"] ] } , "git_context": @@ -106,9 +106,9 @@ , "srcs": ["git_context.cpp"] , "stage": ["src", "buildtool", "file_system"] , "private-deps": - [ ["src/buildtool/logging", "log_level"] + [ ["", "libgit2"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["", "libgit2"] , ["src/utils/cpp", "gsl"] ] } @@ -120,21 +120,22 @@ , "deps": [ "git_cas" , "git_types" - , ["src/buildtool/common", "bazel_types"] - , ["src/utils/cpp", "expected"] - , ["src/buildtool/storage", "config"] , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/storage", "config"] + , ["src/utils/cpp", "expected"] ] , "stage": ["src", "buildtool", "file_system"] , "private-deps": - [ ["src/buildtool/logging", "log_level"] + [ ["", "libgit2"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["", "libgit2"] - , ["src/utils/cpp", "path"] - , ["src/utils/cpp", "hex_string"] , ["src/utils/cpp", "gsl"] - , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/common", "common"] + , ["src/utils/cpp", "hex_string"] + , ["src/utils/cpp", "path"] ] , "cflags": ["-pthread"] } @@ -143,11 +144,11 @@ , "name": ["git_utils"] , "hdrs": ["git_utils.hpp"] , "srcs": ["git_utils.cpp"] - , "deps": [["@", "gsl", "", "gsl"], "object_type"] + , "deps": ["object_type", ["@", "gsl", "", "gsl"]] , "stage": ["src", "buildtool", "file_system"] , "private-deps": - [ ["", "libgit2"] - , ["@", "fmt", "", "fmt"] + [ ["@", "fmt", "", "fmt"] + , ["", "libgit2"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/utils/cpp", "hex_string"] @@ -164,14 +165,18 @@ , "name": ["file_root"] , "hdrs": ["file_root.hpp"] , "deps": - [ "git_tree" - , "file_system_manager" + [ "file_system_manager" + , "git_tree" + , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/common", "artifact_description"] - , ["src/buildtool/compatibility", "compatibility"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "git_hashes_converter"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["@", "gsl", "", "gsl"] - , ["@", "json", "", "json"] , ["src/utils/cpp", "concepts"] , ["src/utils/cpp", "json"] ] diff --git a/src/buildtool/file_system/file_root.hpp b/src/buildtool/file_system/file_root.hpp index 8b85f5a9b..f9085f523 100644 --- a/src/buildtool/file_system/file_root.hpp +++ b/src/buildtool/file_system/file_root.hpp @@ -26,7 +26,11 @@ #include "gsl/gsl" #include "nlohmann/json.hpp" #include "src/buildtool/common/artifact_description.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/git_hashes_converter.hpp" +#include "src/buildtool/common/protocol_traits.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/git_tree.hpp" #include "src/buildtool/logging/log_level.hpp" @@ -57,19 +61,19 @@ class FilteredIterator { FilteredIterator(I first, I last, predicate_t p) noexcept : iterator_{std::find_if(first, last, p)}, end_{std::move(last)}, - p{std::move(p)} {} + p_{std::move(p)} {} auto operator*() const noexcept -> reference { return iterator_->first; } auto operator++() noexcept -> FilteredIterator& { ++iterator_; - iterator_ = std::find_if(iterator_, end_, p); + iterator_ = std::find_if(iterator_, end_, p_); return *this; } [[nodiscard]] auto begin() noexcept -> FilteredIterator& { return *this; } [[nodiscard]] auto end() const noexcept -> FilteredIterator { - return FilteredIterator{end_, end_, p}; + return FilteredIterator{end_, end_, p_}; } [[nodiscard]] friend auto operator==(FilteredIterator const& x, @@ -87,18 +91,18 @@ class FilteredIterator { private: I iterator_{}; const I end_{}; - predicate_t p{}; + predicate_t p_{}; }; class FileRoot { using fs_root_t = std::filesystem::path; - struct git_root_t { + struct RootGit { gsl::not_null cas; gsl::not_null tree; }; // absent roots are defined by a tree hash with no witnessing repository using absent_root_t = std::string; - using root_t = std::variant; + using root_t = std::variant; public: static constexpr auto kGitTreeMarker = "git tree"; @@ -181,23 +185,14 @@ class FileRoot { [[nodiscard]] auto ContainsBlob(std::string const& name) const noexcept -> bool { - try { - if (std::holds_alternative(data_)) { - auto const& data = std::get(data_); - auto ptr = data->LookupEntryByName(name); - if (static_cast(ptr)) { - return IsBlobObject(ptr->Type()); - } - return false; - } - if (std::holds_alternative(data_)) { - auto const& data = std::get(data_); - auto it = data.find(name); - return (it != data.end() and IsBlobObject(it->second)); - } - } catch (...) { + if (auto const* const data = std::get_if(&data_)) { + auto const ptr = (*data)->LookupEntryByName(name); + return ptr != nullptr and IsBlobObject(ptr->Type()); + } + if (auto const* const data = std::get_if(&data_)) { + auto const it = data->find(name); + return it != data->end() and IsBlobObject(it->second); } - return false; } @@ -218,9 +213,10 @@ class FileRoot { /// \brief Retrieve a root tree as a KNOWN artifact. /// Only succeeds if no entries have to be ignored. - [[nodiscard]] auto AsKnownTree(std::string const& repository) + [[nodiscard]] auto AsKnownTree(HashFunction::Type hash_type, + std::string const& repository) const noexcept -> std::optional { - if (Compatibility::IsCompatible()) { + if (not ProtocolTraits::IsNative(hash_type)) { return std::nullopt; } if (std::holds_alternative(data_)) { @@ -230,8 +226,16 @@ class FileRoot { if (auto id = data->Hash()) { auto const& size = data->Size(); if (size) { + auto digest = ArtifactDigestFactory::Create( + HashFunction::Type::GitSHA1, + *id, + *size, + /*is_tree=*/true); + if (not digest) { + return std::nullopt; + } return ArtifactDescription::CreateKnown( - ArtifactDigest{*id, *size, /*is_tree=*/true}, + *std::move(digest), ObjectType::Tree, repository); } @@ -320,24 +324,27 @@ class FileRoot { FileRoot(gsl::not_null const& cas, gsl::not_null const& tree, bool ignore_special = false) noexcept - : root_{git_root_t{cas, tree}}, ignore_special_{ignore_special} {} + : root_{RootGit{cas, tree}}, ignore_special_{ignore_special} {} [[nodiscard]] static auto FromGit(std::filesystem::path const& repo_path, std::string const& git_tree_id, bool ignore_special = false) noexcept -> std::optional { - if (auto cas = GitCAS::Open(repo_path)) { - if (auto tree = GitTree::Read(cas, git_tree_id, ignore_special)) { - try { - return FileRoot{ - cas, - std::make_shared(std::move(*tree)), - ignore_special}; - } catch (...) { - } - } + auto cas = GitCAS::Open(repo_path); + if (not cas) { + return std::nullopt; + } + auto tree = GitTree::Read(cas, git_tree_id, ignore_special); + if (not tree) { + return std::nullopt; + } + try { + return FileRoot{cas, + std::make_shared(std::move(*tree)), + ignore_special}; + } catch (...) { + return std::nullopt; } - return std::nullopt; } /// \brief Return a complete description of the content of this root, if @@ -346,12 +353,12 @@ class FileRoot { [[nodiscard]] auto ContentDescription() const noexcept -> std::optional { try { - if (std::holds_alternative(root_)) { + if (std::holds_alternative(root_)) { nlohmann::json j; j.push_back(ignore_special_ ? kGitTreeIgnoreSpecialMarker : kGitTreeMarker); // we need the root tree id, irrespective of ignore_special flag - j.push_back(std::get(root_).tree->FileRootHash()); + j.push_back(std::get(root_).tree->FileRootHash()); return j; } if (std::holds_alternative(root_)) { @@ -374,17 +381,17 @@ class FileRoot { // `IsDirectory()`, and `BlobType()` on contents of the same directory will // be served without any additional file system lookups. [[nodiscard]] auto HasFastDirectoryLookup() const noexcept -> bool { - return std::holds_alternative(root_); + return std::holds_alternative(root_); } [[nodiscard]] auto Exists(std::filesystem::path const& path) const noexcept -> bool { - if (std::holds_alternative(root_)) { + if (std::holds_alternative(root_)) { if (path == ".") { return true; } return static_cast( - std::get(root_).tree->LookupEntryByPath(path)); + std::get(root_).tree->LookupEntryByPath(path)); } if (std::holds_alternative(root_)) { auto root_path = std::get(root_) / path; @@ -400,10 +407,9 @@ class FileRoot { [[nodiscard]] auto IsFile( std::filesystem::path const& file_path) const noexcept -> bool { - if (std::holds_alternative(root_)) { - if (auto entry = - std::get(root_).tree->LookupEntryByPath( - file_path)) { + if (std::holds_alternative(root_)) { + if (auto entry = std::get(root_).tree->LookupEntryByPath( + file_path)) { return IsFileObject(entry->Type()); } } @@ -416,10 +422,9 @@ class FileRoot { [[nodiscard]] auto IsSymlink( std::filesystem::path const& file_path) const noexcept -> bool { - if (std::holds_alternative(root_)) { - if (auto entry = - std::get(root_).tree->LookupEntryByPath( - file_path)) { + if (std::holds_alternative(root_)) { + if (auto entry = std::get(root_).tree->LookupEntryByPath( + file_path)) { return IsSymlinkObject(entry->Type()); } } @@ -437,13 +442,12 @@ class FileRoot { [[nodiscard]] auto IsDirectory( std::filesystem::path const& dir_path) const noexcept -> bool { - if (std::holds_alternative(root_)) { + if (std::holds_alternative(root_)) { if (dir_path == ".") { return true; } - if (auto entry = - std::get(root_).tree->LookupEntryByPath( - dir_path)) { + if (auto entry = std::get(root_).tree->LookupEntryByPath( + dir_path)) { return entry->IsTree(); } } @@ -457,10 +461,9 @@ class FileRoot { /// \brief Read content of file or symlink. [[nodiscard]] auto ReadContent(std::filesystem::path const& file_path) const noexcept -> std::optional { - if (std::holds_alternative(root_)) { - if (auto entry = - std::get(root_).tree->LookupEntryByPath( - file_path)) { + if (std::holds_alternative(root_)) { + if (auto entry = std::get(root_).tree->LookupEntryByPath( + file_path)) { if (IsBlobObject(entry->Type())) { return entry->Blob(); } @@ -481,8 +484,8 @@ class FileRoot { [[nodiscard]] auto ReadDirectory(std::filesystem::path const& dir_path) const noexcept -> DirectoryEntries { try { - if (std::holds_alternative(root_)) { - auto const& tree = std::get(root_).tree; + if (std::holds_alternative(root_)) { + auto const& tree = std::get(root_).tree; if (dir_path == ".") { return DirectoryEntries{&(*tree)}; } @@ -516,10 +519,9 @@ class FileRoot { [[nodiscard]] auto BlobType(std::filesystem::path const& file_path) const noexcept -> std::optional { - if (std::holds_alternative(root_)) { - if (auto entry = - std::get(root_).tree->LookupEntryByPath( - file_path)) { + if (std::holds_alternative(root_)) { + if (auto entry = std::get(root_).tree->LookupEntryByPath( + file_path)) { if (IsBlobObject(entry->Type())) { return entry->Type(); } @@ -539,9 +541,9 @@ class FileRoot { /// \brief Read a blob from the root based on its ID. [[nodiscard]] auto ReadBlob(std::string const& blob_id) const noexcept -> std::optional { - if (std::holds_alternative(root_)) { - return std::get(root_).cas->ReadObject( - blob_id, /*is_hex_id=*/true); + if (std::holds_alternative(root_)) { + return std::get(root_).cas->ReadObject(blob_id, + /*is_hex_id=*/true); } return std::nullopt; } @@ -550,9 +552,9 @@ class FileRoot { /// This should include all valid entry types. [[nodiscard]] auto ReadTree(std::string const& tree_id) const noexcept -> std::optional { - if (std::holds_alternative(root_)) { + if (std::holds_alternative(root_)) { try { - auto const& cas = std::get(root_).cas; + auto const& cas = std::get(root_).cas; return GitTree::Read(cas, tree_id); } catch (...) { return std::nullopt; @@ -564,28 +566,39 @@ class FileRoot { // Create LOCAL or KNOWN artifact. Does not check existence for LOCAL. // `file_path` must reference a blob. [[nodiscard]] auto ToArtifactDescription( + HashFunction::Type hash_type, std::filesystem::path const& file_path, std::string const& repository) const noexcept -> std::optional { - if (std::holds_alternative(root_)) { - if (auto entry = - std::get(root_).tree->LookupEntryByPath( - file_path)) { + if (std::holds_alternative(root_)) { + if (auto entry = std::get(root_).tree->LookupEntryByPath( + file_path)) { if (entry->IsBlob()) { - if (Compatibility::IsCompatible()) { - auto compatible_hash = Compatibility::RegisterGitEntry( - entry->Hash(), *entry->Blob(), repository); + if (not ProtocolTraits::IsNative(hash_type)) { + auto compatible_hash = + GitHashesConverter::Instance().RegisterGitEntry( + entry->Hash(), *entry->Blob(), repository); + auto digest = + ArtifactDigestFactory::Create(hash_type, + compatible_hash, + *entry->Size(), + /*is_tree=*/false); + if (not digest) { + return std::nullopt; + } return ArtifactDescription::CreateKnown( - ArtifactDigest{compatible_hash, - *entry->Size(), - /*is_tree=*/false}, - entry->Type()); + *std::move(digest), entry->Type()); + } + auto digest = + ArtifactDigestFactory::Create(hash_type, + entry->Hash(), + *entry->Size(), + /*is_tree=*/false); + if (not digest) { + return std::nullopt; } return ArtifactDescription::CreateKnown( - ArtifactDigest{ - entry->Hash(), *entry->Size(), /*is_tree=*/false}, - entry->Type(), - repository); + *std::move(digest), entry->Type(), repository); } } return std::nullopt; @@ -648,9 +661,11 @@ class FileRoot { return std::pair(FileRoot{path}, std::move(path)); } if (root[0] == FileRoot::kGitTreeMarker) { - if (not(root.size() == 3 and root[1].is_string() and - root[2].is_string()) and - not(root.size() == 2 and root[1].is_string())) { + bool const has_one_arg = root.size() == 2 and root[1].is_string(); + bool const has_two_args = root.size() == 3 and + root[1].is_string() and + root[2].is_string(); + if (not has_one_arg and not has_two_args) { *error_msg = fmt::format( "\"git tree\" scheme expects one or two string " "arguments, but found {} for {} of repository {}", @@ -690,9 +705,11 @@ class FileRoot { std::move(path)); } if (root[0] == FileRoot::kGitTreeIgnoreSpecialMarker) { - if (not(root.size() == 3 and root[1].is_string() and - root[2].is_string()) and - not(root.size() == 2 and root[1].is_string())) { + bool const has_one_arg = root.size() == 2 and root[1].is_string(); + bool const has_two_args = root.size() == 3 and + root[1].is_string() and + root[2].is_string(); + if (not has_one_arg and not has_two_args) { *error_msg = fmt::format( "\"git tree ignore-special\" scheme expects one or two " "string arguments, but found {} for {} of repository {}", diff --git a/src/buildtool/file_system/file_storage.hpp b/src/buildtool/file_system/file_storage.hpp index fefd22a7f..80aac0e06 100644 --- a/src/buildtool/file_system/file_storage.hpp +++ b/src/buildtool/file_system/file_storage.hpp @@ -15,6 +15,7 @@ #ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_FILE_STORAGE_HPP #define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_FILE_STORAGE_HPP +#include #include #include #include @@ -25,7 +26,7 @@ #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" -enum class StoreMode { +enum class StoreMode : std::uint8_t { // First thread to write conflicting file wins. FirstWins, // Last thread to write conflicting file wins, effectively overwriting @@ -89,7 +90,7 @@ class FileStorage { private: static constexpr bool kFdLess{kType == ObjectType::Executable}; - std::filesystem::path storage_root_{}; + std::filesystem::path storage_root_; /// \brief Add file to storage from file path via link or copy and rename. /// If a race-condition occurs, the winning thread will be the one diff --git a/src/buildtool/file_system/file_system_manager.hpp b/src/buildtool/file_system/file_system_manager.hpp index 514fadd27..f7bcaee0f 100644 --- a/src/buildtool/file_system/file_system_manager.hpp +++ b/src/buildtool/file_system/file_system_manager.hpp @@ -19,6 +19,7 @@ #include // for errno #include #include +#include #include // for std::fopen #include // std::exit, std::getenv #include @@ -51,7 +52,7 @@ #include "src/utils/cpp/path.hpp" namespace detail { -static inline consteval auto BitWidth(int max_val) -> int { +static consteval auto BitWidth(int max_val) -> int { constexpr int kBitsPerByte = 8; int i = sizeof(max_val) * kBitsPerByte; while ((i-- > 0) and (((max_val >> i) & 0x01) == 0x00)) { // NOLINT @@ -78,9 +79,9 @@ class FileSystemManager { auto operator=(DirectoryAnchor const&) -> DirectoryAnchor& = delete; auto operator=(DirectoryAnchor&&) -> DirectoryAnchor& = delete; ~DirectoryAnchor() noexcept { - if (not kRestorePath.empty()) { + if (not restore_path_.empty()) { try { - std::filesystem::current_path(kRestorePath); + std::filesystem::current_path(restore_path_); } catch (std::exception const& e) { Logger::Log(LogLevel::Error, e.what()); } @@ -88,14 +89,14 @@ class FileSystemManager { } [[nodiscard]] auto GetRestorePath() const noexcept -> std::filesystem::path const& { - return kRestorePath; + return restore_path_; } private: - std::filesystem::path const kRestorePath{}; + std::filesystem::path const restore_path_; DirectoryAnchor() - : kRestorePath{FileSystemManager::GetCurrentDirectory()} {} + : restore_path_{FileSystemManager::GetCurrentDirectory()} {} DirectoryAnchor(DirectoryAnchor&&) = default; }; @@ -702,7 +703,7 @@ class FileSystemManager { std::ifstream file_reader(file.string(), std::ios::binary); if (file_reader.is_open()) { auto ssize = gsl::narrow(chunk.size()); - do { + while (file_reader.good()) { file_reader.read(chunk.data(), ssize); auto count = file_reader.gcount(); if (count == ssize) { @@ -712,7 +713,7 @@ class FileSystemManager { content += chunk.substr(0, gsl::narrow(count)); } - } while (file_reader.good()); + } file_reader.close(); return content; } @@ -984,7 +985,7 @@ class FileSystemManager { } private: - enum class CreationStatus { Created, Exists, Failed }; + enum class CreationStatus : std::uint8_t { Created, Exists, Failed }; static constexpr std::size_t kChunkSize{256}; @@ -1165,10 +1166,10 @@ class FileSystemManager { std::filesystem::file_status const& status) noexcept -> bool { try { namespace fs = std::filesystem; - static constexpr auto exec_flags = fs::perms::owner_exec bitor + static constexpr auto kExecFlags = fs::perms::owner_exec bitor fs::perms::group_exec bitor fs::perms::others_exec; - auto exec_perms = status.permissions() bitand exec_flags; + auto exec_perms = status.permissions() bitand kExecFlags; return exec_perms != fs::perms::none; } catch (std::exception const& e) { Logger::Log(LogLevel::Error, @@ -1184,14 +1185,14 @@ class FileSystemManager { /// Non-zero return values indicate errors, which can be decoded using /// \ref ErrorToString. class LowLevel { - static constexpr ssize_t kDefaultChunkSize = 1024 * 32; + static constexpr std::size_t kDefaultChunkSize = 1024UL * 32; static constexpr int kWriteFlags = O_WRONLY | O_CREAT | O_TRUNC; // NOLINT static constexpr int kWritePerms = // 644 S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH; // NOLINT public: - template + template [[nodiscard]] static auto CopyFile(char const* src, char const* dst, bool skip_existing) noexcept -> int { @@ -1237,18 +1238,19 @@ class FileSystemManager { return 0; } - template + template [[nodiscard]] static auto WriteFile(char const* content, - ssize_t size, + std::size_t size, char const* file) noexcept -> int { auto out = FdOpener{file, kWriteFlags, kWritePerms}; if (out.fd == -1) { return PackError(ERROR_OPEN_OUTPUT, errno); } - ssize_t pos{}; + std::size_t pos = 0; while (pos < size) { auto const write_len = std::min(kChunkSize, size - pos); - auto len = write(out.fd, content + pos, write_len); // NOLINT + auto const len = + write(out.fd, content + pos, write_len); // NOLINT if (len < 0) { return PackError(ERROR_WRITE_OUTPUT, errno); } @@ -1287,7 +1289,7 @@ class FileSystemManager { } private: - enum ErrorCodes { + enum ErrorCodes : std::uint8_t { ERROR_READ_INPUT, // read() input file failed ERROR_OPEN_INPUT, // open() input file failed ERROR_OPEN_OUTPUT, // open() output file failed diff --git a/src/buildtool/file_system/git_cas.hpp b/src/buildtool/file_system/git_cas.hpp index c1223411c..e6eed6649 100644 --- a/src/buildtool/file_system/git_cas.hpp +++ b/src/buildtool/file_system/git_cas.hpp @@ -64,12 +64,12 @@ class GitCAS { private: std::unique_ptr odb_{nullptr, odb_closer}; // git folder path of repo - std::filesystem::path git_path_{}; + std::filesystem::path git_path_; // mutex to guard odb while setting up a "fake" repository; it needs to be // uniquely owned while wrapping the odb, but then git operations are free // to share it. - mutable std::shared_mutex mutex_{}; + mutable std::shared_mutex mutex_; [[nodiscard]] auto OpenODB(std::filesystem::path const& repo_path) noexcept -> bool; diff --git a/src/buildtool/file_system/git_context.cpp b/src/buildtool/file_system/git_context.cpp index 94fb3cefa..f6f83c0d9 100644 --- a/src/buildtool/file_system/git_context.cpp +++ b/src/buildtool/file_system/git_context.cpp @@ -24,7 +24,9 @@ extern "C" { GitContext::GitContext() noexcept { #ifndef BOOTSTRAP_BUILD_TOOL - if (not(initialized_ = (git_libgit2_init() >= 0))) { + // NOLINTNEXTLINE(cppcoreguidelines-prefer-member-initializer) + initialized_ = git_libgit2_init() >= 0; + if (not initialized_) { Logger::Log(LogLevel::Error, "initializing libgit2 failed"); } #endif diff --git a/src/buildtool/file_system/git_repo.cpp b/src/buildtool/file_system/git_repo.cpp index c75029eec..176535b3d 100644 --- a/src/buildtool/file_system/git_repo.cpp +++ b/src/buildtool/file_system/git_repo.cpp @@ -22,7 +22,8 @@ #include #include -#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" @@ -204,8 +205,8 @@ std::unordered_set const kNonSpecialGitFileModes{ struct InMemoryODBBackend { git_odb_backend parent; - GitRepo::tree_entries_t const* entries{nullptr}; // object headers - std::unordered_map trees{}; // solid tree objects + GitRepo::tree_entries_t const* entries{nullptr}; // object headers + std::unordered_map trees; // solid tree objects }; [[nodiscard]] auto backend_read_header(size_t* len_p, @@ -357,12 +358,10 @@ void fetch_backend_free(git_odb_backend* /*_backend*/) {} auto const kFetchIntoODBParent = CreateFetchIntoODBParent(); // callback to remote fetch without an SSL certificate check -const auto certificate_passthrough_cb = [](git_cert* /*cert*/, - int /*valid*/, - const char* /*host*/, - void* /*payload*/) -> int { - return 0; -}; +const auto kCertificatePassthrough = [](git_cert* /*cert*/, + int /*valid*/, + const char* /*host*/, + void* /*payload*/) -> int { return 0; }; } // namespace #endif // BOOTSTRAP_BUILD_TOOL @@ -503,13 +502,10 @@ GitRepo::GitRepo(GitRepo&& other) noexcept } auto GitRepo::operator=(GitRepo&& other) noexcept -> GitRepo& { - try { - git_cas_ = std::move(other.git_cas_); - repo_ = std::move(other.repo_); - is_repo_fake_ = other.is_repo_fake_; - other.git_cas_ = nullptr; - } catch (...) { - } + git_cas_ = std::move(other.git_cas_); + repo_ = std::move(other.repo_); + is_repo_fake_ = other.is_repo_fake_; + other.git_cas_ = nullptr; return *this; } @@ -876,8 +872,6 @@ auto GitRepo::FetchFromPath(std::shared_ptr cfg, // wrap remote object auto remote = std::unique_ptr( remote_ptr, remote_closer); - // get the canonical url - auto canonical_url = std::string(git_remote_url(remote.get())); // get a well-defined config file if (not cfg) { @@ -901,25 +895,21 @@ auto GitRepo::FetchFromPath(std::shared_ptr cfg, // no proxy fetch_opts.proxy_opts.type = GIT_PROXY_NONE; // no SSL verification - fetch_opts.callbacks.certificate_check = certificate_passthrough_cb; + fetch_opts.callbacks.certificate_check = kCertificatePassthrough; // disable update of the FETCH_HEAD pointer fetch_opts.update_fetchhead = 0; // setup fetch refspecs array - git_strarray refspecs_array_obj{}; + GitStrArray refspecs_array_obj; if (branch) { // make sure we check for tags as well - std::string tag = fmt::format("+refs/tags/{}", *branch); - std::string head = fmt::format("+refs/heads/{}", *branch); - PopulateStrarray(&refspecs_array_obj, {tag, head}); + refspecs_array_obj.AddEntry(fmt::format("+refs/tags/{}", *branch)); + refspecs_array_obj.AddEntry(fmt::format("+refs/heads/{}", *branch)); } - auto refspecs_array = - std::unique_ptr( - &refspecs_array_obj, strarray_deleter); + auto const refspecs_array = refspecs_array_obj.Get(); if (git_remote_fetch( - remote.get(), refspecs_array.get(), &fetch_opts, nullptr) != - 0) { + remote.get(), &refspecs_array, &fetch_opts, nullptr) != 0) { (*logger)(fmt::format( "Fetching {} in local repository {} failed with:\n{}", branch ? fmt::format("branch {}", *branch) : "all", @@ -1687,7 +1677,7 @@ auto GitRepo::GetObjectByPathFromTree(std::string const& tree_id, GetGitCAS()->ReadObject(entry_id, /*is_hex_id=*/true)) { return TreeEntryInfo{.id = entry_id, .type = entry_type, - .symlink_content = *target}; + .symlink_content = std::move(target)}; } Logger::Log( LogLevel::Trace, @@ -1843,20 +1833,30 @@ auto GitRepo::ReadTree(std::string const& id, // ignore_special==false. if (not ignore_special) { // we first gather all symlink candidates - std::vector symlinks{}; + // to check symlinks in bulk, optimized for network-backed repos + std::vector symlinks{}; symlinks.reserve(entries.size()); // at most one symlink per entry for (auto const& entry : entries) { - for (auto const& item : entry.second) { - if (IsSymlinkObject(item.type)) { - symlinks.emplace_back(bazel_re::Digest( - ArtifactDigest(ToHexString(entry.first), - /*size=*/0, - /*is_tree=*/false))); - break; // no need to check other items with same hash + if (std::any_of(entry.second.begin(), + entry.second.end(), + [](TreeEntry const& item) { + return IsSymlinkObject(item.type); + })) { + auto digest = ArtifactDigestFactory::Create( + HashFunction::Type::GitSHA1, + ToHexString(entry.first), + /*size=*/0, + /*is_tree=*/false); + if (not digest) { + Logger::Log(LogLevel::Debug, + "Conversion error in GitRepo:\n {}", + std::move(digest).error()); + return std::nullopt; } + symlinks.emplace_back(*std::move(digest)); } } - // we check symlinks in bulk, optimized for network-backed repos + if (not symlinks.empty() and not std::invoke(check_symlinks.get(), symlinks)) { Logger::Log(LogLevel::Error, @@ -1939,11 +1939,7 @@ auto GitRepo::CreateTree(tree_entries_t const& entries) const noexcept GitLastError()); return std::nullopt; } - auto raw_id = ToRawString(oid); - if (not raw_id) { - return std::nullopt; - } - return std::move(*raw_id); + return ToRawString(oid); } catch (std::exception const& ex) { Logger::Log( LogLevel::Error, "creating tree failed with:\n{}", ex.what()); @@ -2095,25 +2091,13 @@ auto GitRepo::CreateTreeFromDirectory(std::filesystem::path const& dir, }; if (ReadDirectory(dir, dir_read_and_store, logger)) { - if (auto raw_id = CreateTree(entries)) { - return *raw_id; - } + return CreateTree(entries); } return std::nullopt; #endif // BOOTSTRAP_BUILD_TOOL } -void GitRepo::PopulateStrarray( - git_strarray* array, - std::vector const& string_list) noexcept { - array->count = string_list.size(); - array->strings = gsl::owner(new char*[string_list.size()]); - for (auto const& elem : string_list) { - auto i = - static_cast(&elem - &string_list[0]); // get index - // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic) - array->strings[i] = gsl::owner(new char[elem.size() + 1]); - // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic) - strncpy(array->strings[i], elem.c_str(), elem.size() + 1); - } +auto GitRepo::GitStrArray::Get() & noexcept -> git_strarray { + return git_strarray{.strings = entry_pointers_.data(), + .count = entry_pointers_.size()}; } diff --git a/src/buildtool/file_system/git_repo.hpp b/src/buildtool/file_system/git_repo.hpp index 4d1be53f3..df100b35b 100644 --- a/src/buildtool/file_system/git_repo.hpp +++ b/src/buildtool/file_system/git_repo.hpp @@ -24,7 +24,7 @@ #include #include "gsl/gsl" -#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/file_system/git_cas.hpp" #include "src/buildtool/file_system/git_types.hpp" #include "src/buildtool/storage/config.hpp" @@ -33,6 +33,7 @@ extern "C" { struct git_repository; struct git_config; +struct git_strarray; } /// \brief Git repository logic. @@ -43,12 +44,11 @@ class GitRepo { public: // Stores the data for defining a single Git tree entry, which consists of // a name (flat basename) and an object type (file/executable/tree). - struct tree_entry_t { - tree_entry_t(std::string n, ObjectType t) - : name{std::move(n)}, type{t} {} + struct TreeEntry { + TreeEntry(std::string n, ObjectType t) : name{std::move(n)}, type{t} {} std::string name; ObjectType type; - [[nodiscard]] auto operator==(tree_entry_t const& other) const noexcept + [[nodiscard]] auto operator==(TreeEntry const& other) const noexcept -> bool { return name == other.name and type == other.type; } @@ -58,11 +58,11 @@ class GitRepo { // Note that sharding by id is used as this format enables a more efficient // internal implementation for creating trees. using tree_entries_t = - std::unordered_map>; + std::unordered_map>; // Stores the info of an object read by its path. struct TreeEntryInfo { - std::string id{}; + std::string id; ObjectType type; // if type is symlink, read it in advance std::optional symlink_content{std::nullopt}; @@ -71,7 +71,7 @@ class GitRepo { // Checks whether a list of symlinks given by their hashes are // non-upwards, based on content read from an actual backend. using SymlinksCheckFunc = - std::function const&)>; + std::function const&)>; GitRepo() = delete; // no default ctor ~GitRepo() noexcept = default; @@ -378,13 +378,19 @@ class GitRepo { std::filesystem::path const& dir, anon_logger_ptr const& logger) noexcept -> std::optional; - /// \brief Helper function to allocate and populate the char** pointer of a - /// git_strarray from a vector of standard strings. User MUST use - /// git_strarray_dispose to deallocate the inner pointer when the strarray - /// is not needed anymore! - static void PopulateStrarray( - git_strarray* array, - std::vector const& string_list) noexcept; + class GitStrArray final { + public: + void AddEntry(std::string entry) { + char* const entry_ptr = + entries_.emplace_back(std::move(entry)).data(); + entry_pointers_.push_back(entry_ptr); + } + [[nodiscard]] auto Get() & noexcept -> git_strarray; + + private: + std::vector entries_; + std::vector entry_pointers_; + }; }; #endif // INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_REPO_HPP diff --git a/src/buildtool/file_system/git_tree.cpp b/src/buildtool/file_system/git_tree.cpp index e7a2f41db..1f916dc15 100644 --- a/src/buildtool/file_system/git_tree.cpp +++ b/src/buildtool/file_system/git_tree.cpp @@ -14,7 +14,9 @@ #include "src/buildtool/file_system/git_tree.hpp" +#include #include +#include #include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/logging/log_level.hpp" @@ -34,7 +36,6 @@ namespace { return (normalized / "").parent_path(); // strip trailing slash } -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto LookupEntryPyPath( GitTree const& tree, std::filesystem::path::const_iterator it, @@ -55,6 +56,24 @@ namespace { return entry; } +class SymlinksChecker final { + public: + explicit SymlinksChecker(gsl::not_null const& cas) noexcept + : cas_{*cas} {} + + [[nodiscard]] auto operator()( + std::vector const& ids) const noexcept -> bool { + return std::all_of( + ids.begin(), ids.end(), [&cas = cas_](ArtifactDigest const& id) { + auto content = cas.ReadObject(id.hash(), /*is_hex_id=*/true); + return content.has_value() and PathIsNonUpwards(*content); + }); + }; + + private: + GitCAS const& cas_; +}; + } // namespace auto GitTree::Read(std::filesystem::path const& repo_path, @@ -70,22 +89,11 @@ auto GitTree::Read(std::filesystem::path const& repo_path, auto GitTree::Read(gsl::not_null const& cas, std::string const& tree_id, bool ignore_special) noexcept -> std::optional { - // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; if (auto raw_id = FromHexString(tree_id)) { auto repo = GitRepo::Open(cas); if (repo != std::nullopt) { if (auto entries = repo->ReadTree(*raw_id, - check_symlinks, + SymlinksChecker{cas}, /*is_hex_id=*/false, ignore_special)) { // NOTE: the raw_id value is NOT recomputed when @@ -146,20 +154,9 @@ auto GitTreeEntry::Tree(bool ignore_special) const& noexcept if (repo == std::nullopt) { return std::nullopt; } - // create symlinks checker - auto check_symlinks = - [cas = cas_](std::vector const& ids) { - for (auto const& id : ids) { - auto content = cas->ReadObject( - ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + if (auto entries = repo->ReadTree(raw_id_, - check_symlinks, + SymlinksChecker{cas_}, /*is_hex_id=*/false, ignore_special)) { return GitTree::FromEntries( diff --git a/src/buildtool/file_system/git_tree.hpp b/src/buildtool/file_system/git_tree.hpp index f928b7d77..ac14e239e 100644 --- a/src/buildtool/file_system/git_tree.hpp +++ b/src/buildtool/file_system/git_tree.hpp @@ -158,7 +158,7 @@ class GitTreeEntry { gsl::not_null cas_; std::string raw_id_; ObjectType type_; - AtomicValue> tree_cached_{}; + AtomicValue> tree_cached_; }; using GitTreePtr = std::shared_ptr; diff --git a/src/buildtool/file_system/git_types.hpp b/src/buildtool/file_system/git_types.hpp index 529743974..2189c218f 100644 --- a/src/buildtool/file_system/git_types.hpp +++ b/src/buildtool/file_system/git_types.hpp @@ -15,7 +15,9 @@ #ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_TYPES_HPP #define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_TYPES_HPP -enum class GitLookupError { +#include + +enum class GitLookupError : std::uint8_t { Fatal = 0, NotFound = 1, }; diff --git a/src/buildtool/file_system/git_utils.cpp b/src/buildtool/file_system/git_utils.cpp index 8fed7d7b4..8612d039f 100644 --- a/src/buildtool/file_system/git_utils.cpp +++ b/src/buildtool/file_system/git_utils.cpp @@ -32,8 +32,8 @@ constexpr std::size_t kOIDHexSize{GIT_OID_HEXSZ}; auto GitLastError() noexcept -> std::string { #ifndef BOOTSTRAP_BUILD_TOOL - git_error const* err{nullptr}; - if ((err = git_error_last()) != nullptr and err->message != nullptr) { + git_error const* const err = git_error_last(); + if (err != nullptr and err->message != nullptr) { return fmt::format("error code {}: {}", err->klass, err->message); } #endif // BOOTSTRAP_BUILD_TOOL @@ -82,38 +82,6 @@ void tree_closer(gsl::owner tree) { #endif } -void treebuilder_closer(gsl::owner builder) { -#ifndef BOOTSTRAP_BUILD_TOOL - git_treebuilder_free(builder); -#endif -} - -void index_closer(gsl::owner index) { -#ifndef BOOTSTRAP_BUILD_TOOL - git_index_free(index); -#endif -} - -void strarray_closer(gsl::owner strarray) { -#ifndef BOOTSTRAP_BUILD_TOOL - git_strarray_dispose(strarray); -#endif -} - -void strarray_deleter(gsl::owner strarray) { -#ifndef BOOTSTRAP_BUILD_TOOL - if (strarray->strings != nullptr) { - for (std::size_t i = 0; i < strarray->count; ++i) { - // NOLINTNEXTLINE(cppcoreguidelines-owning-memory,cppcoreguidelines-pro-bounds-pointer-arithmetic) - delete[] strarray->strings[i]; - } - delete[] strarray->strings; - strarray->strings = nullptr; - strarray->count = 0; - } -#endif -} - void signature_closer(gsl::owner signature) { #ifndef BOOTSTRAP_BUILD_TOOL git_signature_free(signature); diff --git a/src/buildtool/file_system/git_utils.hpp b/src/buildtool/file_system/git_utils.hpp index b2c5173f6..916e9fbee 100644 --- a/src/buildtool/file_system/git_utils.hpp +++ b/src/buildtool/file_system/git_utils.hpp @@ -25,9 +25,6 @@ extern "C" { struct git_oid; struct git_odb; struct git_tree; -struct git_treebuilder; -struct git_index; -struct git_strarray; struct git_signature; struct git_object; struct git_remote; @@ -52,16 +49,6 @@ void odb_closer(gsl::owner odb); void tree_closer(gsl::owner tree); -void treebuilder_closer(gsl::owner builder); - -void index_closer(gsl::owner index); - -// to be used for strarrays allocated by libgit2 -void strarray_closer(gsl::owner strarray); - -// to be used for strarrays allocated manually -void strarray_deleter(gsl::owner strarray); - void signature_closer(gsl::owner signature); void object_closer(gsl::owner object); diff --git a/src/buildtool/file_system/object_cas.hpp b/src/buildtool/file_system/object_cas.hpp index 95091773b..2616814ca 100644 --- a/src/buildtool/file_system/object_cas.hpp +++ b/src/buildtool/file_system/object_cas.hpp @@ -23,7 +23,7 @@ #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" -#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_storage.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" @@ -43,7 +43,7 @@ class ObjectCAS { public: /// \brief Callback type for checking blob existence. /// \returns true if a blob for the given digest exists at the given path. - using ExistsFunc = std::function; /// \brief Create new object CAS in store_path directory. @@ -53,13 +53,13 @@ class ObjectCAS { /// \param store_path The path to use for storing blobs. /// \param exists (optional) Function for checking blob existence. explicit ObjectCAS( - HashFunction hash_function, + gsl::not_null const& hash_function, std::filesystem::path const& store_path, std::optional> exists = std::nullopt) : file_store_{store_path}, exists_{exists.has_value() ? std::move(exists)->get() : kDefaultExists}, - hash_function_{hash_function} {} + hash_function_{*hash_function} {} ObjectCAS(ObjectCAS const&) = delete; ObjectCAS(ObjectCAS&&) = delete; @@ -77,7 +77,7 @@ class ObjectCAS { /// \param bytes The bytes do create the blob from. /// \returns Digest of the stored blob or nullopt in case of error. [[nodiscard]] auto StoreBlobFromBytes(std::string const& bytes) - const noexcept -> std::optional { + const noexcept -> std::optional { return StoreBlob(bytes, /*is_owner=*/true); } @@ -87,16 +87,16 @@ class ObjectCAS { /// \returns Digest of the stored blob or nullopt in case of error. [[nodiscard]] auto StoreBlobFromFile(std::filesystem::path const& file_path, bool is_owner = false) const noexcept - -> std::optional { + -> std::optional { return StoreBlob(file_path, is_owner); } /// \brief Get path to blob. /// \param digest Digest of the blob to lookup. /// \returns Path to blob if found or nullopt otherwise. - [[nodiscard]] auto BlobPath(bazel_re::Digest const& digest) const noexcept + [[nodiscard]] auto BlobPath(ArtifactDigest const& digest) const noexcept -> std::optional { - auto id = NativeSupport::Unprefix(digest.hash()); + auto const& id = digest.hash(); auto blob_path = file_store_.GetPath(id); if (not IsAvailable(digest, blob_path)) { logger_.Emit(LogLevel::Debug, "Blob not found {}", id); @@ -115,7 +115,7 @@ class ObjectCAS { FileStorage file_store_; gsl::not_null exists_; - HashFunction const hash_function_; + HashFunction const& hash_function_; /// Default callback for checking blob existence. static inline ExistsFunc const kDefaultExists = [](auto const& /*digest*/, @@ -124,17 +124,18 @@ class ObjectCAS { }; [[nodiscard]] auto CreateDigest(std::string const& bytes) const noexcept - -> std::optional { - return ArtifactDigest::Create(hash_function_, bytes); + -> std::optional { + return ArtifactDigestFactory::HashDataAs(hash_function_, bytes); } [[nodiscard]] auto CreateDigest(std::filesystem::path const& file_path) - const noexcept -> std::optional { - return ArtifactDigest::CreateFromFile(hash_function_, file_path); + const noexcept -> std::optional { + return ArtifactDigestFactory::HashFileAs(hash_function_, + file_path); } [[nodiscard]] auto IsAvailable( - bazel_re::Digest const& digest, + ArtifactDigest const& digest, std::filesystem::path const& path) const noexcept -> bool { try { return std::invoke(exists_.get(), digest, path); @@ -160,9 +161,9 @@ class ObjectCAS { /// \brief Store blob from unspecified data to storage. template [[nodiscard]] auto StoreBlob(T const& data, bool is_owner) const noexcept - -> std::optional { + -> std::optional { if (auto digest = CreateDigest(data)) { - auto id = NativeSupport::Unprefix(digest->hash()); + auto const& id = digest->hash(); if (IsAvailable(*digest, file_store_.GetPath(id))) { return digest; } diff --git a/src/buildtool/file_system/symlinks_map/TARGETS b/src/buildtool/file_system/symlinks_map/TARGETS index 0d958168a..aa1b400b0 100644 --- a/src/buildtool/file_system/symlinks_map/TARGETS +++ b/src/buildtool/file_system/symlinks_map/TARGETS @@ -16,7 +16,6 @@ , "stage": ["src", "buildtool", "file_system", "symlinks_map"] , "private-deps": [ ["@", "fmt", "", "fmt"] - , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/storage", "config"] , ["src/utils/cpp", "gsl"] ] diff --git a/src/buildtool/file_system/symlinks_map/resolve_symlinks_map.cpp b/src/buildtool/file_system/symlinks_map/resolve_symlinks_map.cpp index 198629bbe..f130df718 100644 --- a/src/buildtool/file_system/symlinks_map/resolve_symlinks_map.cpp +++ b/src/buildtool/file_system/symlinks_map/resolve_symlinks_map.cpp @@ -129,9 +129,7 @@ void ResolveKnownEntry(GitObjectToResolve const& obj, } auto children = source_git_repo->ReadTree( entry_info.id, - [](std::vector const& /*unused*/) { - return true; - }, + [](auto const& /*unused*/) { return true; }, /*is_hex_id=*/true); if (not children) { (*logger)(fmt::format("ResolveSymlinks: failed to read entries of " @@ -159,7 +157,7 @@ void ResolveKnownEntry(GitObjectToResolve const& obj, std::make_optional(GitRepo::TreeEntryInfo{ .id = ToHexString(raw_id), .type = e.type, - .symlink_content = *target}), + .symlink_content = std::move(target)}), obj.source_cas, obj.target_cas); } diff --git a/src/buildtool/file_system/symlinks_map/resolve_symlinks_map.hpp b/src/buildtool/file_system/symlinks_map/resolve_symlinks_map.hpp index fc7062687..8145097d4 100644 --- a/src/buildtool/file_system/symlinks_map/resolve_symlinks_map.hpp +++ b/src/buildtool/file_system/symlinks_map/resolve_symlinks_map.hpp @@ -37,7 +37,7 @@ /// resolved entries being made available in the target Git repository. struct GitObjectToResolve { // hash of the root tree - std::string root_tree_id{}; /* key */ + std::string root_tree_id; /* key */ // path of this object relative to root tree, in normal form std::filesystem::path rel_path{"."}; /* key */ // how the tree should be resolved @@ -47,11 +47,11 @@ struct GitObjectToResolve { std::optional known_info{std::nullopt}; // object db to use as source of unresolved entries; it is guaranteed that // this repository is treated as read-only if it differs from target_cas - GitCASPtr source_cas{}; + GitCASPtr source_cas; // object db to use as target for resolved entries; can be the same as // source_cas and usually it is the Git cache; as the caller has access to // such a pointer, it reduces the overhead from opening the Git cache often - GitCASPtr target_cas{}; + GitCASPtr target_cas; GitObjectToResolve() = default; // needed for cycle detection only! diff --git a/src/buildtool/graph_traverser/TARGETS b/src/buildtool/graph_traverser/TARGETS index 3c64489b4..6ea498c27 100644 --- a/src/buildtool/graph_traverser/TARGETS +++ b/src/buildtool/graph_traverser/TARGETS @@ -3,26 +3,29 @@ , "name": ["graph_traverser"] , "hdrs": ["graph_traverser.hpp"] , "deps": - [ ["src/buildtool/common", "cli"] + [ ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "cli"] , ["src/buildtool/common", "common"] , ["src/buildtool/common", "tree"] - , ["src/buildtool/execution_engine/dag", "dag"] - , ["src/buildtool/execution_engine/executor", "context"] - , ["src/buildtool/execution_engine/executor", "executor"] - , ["src/buildtool/execution_engine/traverser", "traverser"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/common", "artifact_blob_container"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/execution_api/common", "common_api"] , ["src/buildtool/execution_api/remote", "config"] , ["src/buildtool/execution_api/utils", "subobject"] + , ["src/buildtool/execution_engine/dag", "dag"] + , ["src/buildtool/execution_engine/executor", "context"] + , ["src/buildtool/execution_engine/executor", "executor"] + , ["src/buildtool/execution_engine/traverser", "traverser"] , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/file_system", "jsonfs"] - , ["src/utils/cpp", "json"] - , ["@", "fmt", "", "fmt"] - , ["src/buildtool/common", "common"] + , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/buildtool/progress_reporting", "base_progress_reporter"] + , ["src/utils/cpp", "json"] ] , "stage": ["src", "buildtool", "graph_traverser"] } diff --git a/src/buildtool/graph_traverser/graph_traverser.hpp b/src/buildtool/graph_traverser/graph_traverser.hpp index b4268111d..4821e6042 100644 --- a/src/buildtool/graph_traverser/graph_traverser.hpp +++ b/src/buildtool/graph_traverser/graph_traverser.hpp @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -32,8 +33,10 @@ #include "fmt/core.h" #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/cli.hpp" #include "src/buildtool/common/tree.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/artifact_blob_container.hpp" #include "src/buildtool/execution_api/common/common_api.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" @@ -178,10 +181,13 @@ class GraphTraverser { } auto const [blobs, tree_descs, actions] = *desc; + HashFunction::Type const hash_type = + context_.apis->hash_function.GetType(); std::vector action_descriptions{}; action_descriptions.reserve(actions.size()); for (auto const& [id, description] : actions.items()) { - auto action = ActionDescription::FromJson(id, description); + auto action = + ActionDescription::FromJson(hash_type, id, description); if (not action) { return std::nullopt; // Error already logged } @@ -190,7 +196,7 @@ class GraphTraverser { std::vector trees{}; for (auto const& [id, description] : tree_descs.items()) { - auto tree = Tree::FromJson(id, description); + auto tree = Tree::FromJson(hash_type, id, description); if (not tree) { return std::nullopt; } @@ -199,7 +205,8 @@ class GraphTraverser { std::map artifact_descriptions{}; for (auto const& [rel_path, description] : artifacts.items()) { - auto artifact = ArtifactDescription::FromJson(description); + auto artifact = + ArtifactDescription::FromJson(hash_type, description); if (not artifact) { return std::nullopt; // Error already logged } @@ -274,11 +281,11 @@ class GraphTraverser { std::vector const& blobs) const noexcept -> bool { ArtifactBlobContainer container; for (auto const& blob : blobs) { - auto digest = ArtifactDigest::Create( + auto digest = ArtifactDigestFactory::HashDataAs( context_.apis->hash_function, blob); Logger::Log(logger_, LogLevel::Trace, [&]() { return fmt::format( - "Uploaded blob {}, its digest has id {} and size {}.", + "Will upload blob {}, its digest has id {} and size {}.", nlohmann::json(blob).dump(), digest.hash(), digest.size()); @@ -298,7 +305,16 @@ class GraphTraverser { } } // Upload remaining blobs. - return context_.apis->remote->Upload(std::move(container)); + auto result = context_.apis->remote->Upload(std::move(container)); + Logger::Log(logger_, LogLevel::Trace, [&]() { + std::stringstream msg{}; + msg << (result ? "Finished" : "Failed") << " upload of\n"; + for (auto const& blob : blobs) { + msg << " - " << nlohmann::json(blob).dump() << "\n"; + } + return msg.str(); + }); + return result; } /// \brief Adds the artifacts to be retrieved to the graph @@ -500,12 +516,13 @@ class GraphTraverser { } // split extra artifacts' nodes from artifact nodes + auto const it_extra = + std::next(artifact_nodes->begin(), + static_cast(output_paths.size())); auto extra_nodes = std::vector{ - std::make_move_iterator(artifact_nodes->begin() + - output_paths.size()), + std::make_move_iterator(it_extra), std::make_move_iterator(artifact_nodes->end())}; - artifact_nodes->erase(artifact_nodes->begin() + output_paths.size(), - artifact_nodes->end()); + artifact_nodes->erase(it_extra, artifact_nodes->end()); return std::make_tuple(std::move(output_paths), std::move(*artifact_nodes), @@ -568,7 +585,7 @@ class GraphTraverser { return std::nullopt; } - return std::move(*output_paths); + return output_paths; } void PrintOutputs( @@ -681,7 +698,7 @@ class GraphTraverser { auto info = artifacts[i]->Content().Info(); if (info) { auto new_info = - RetrieveSubPathId(*info, remote, relpath); + RetrieveSubPathId(*info, *context_.apis, relpath); if (new_info) { if (not remote.RetrieveToFds({*new_info}, {dup(fileno(stdout))}, diff --git a/src/buildtool/logging/log_config.hpp b/src/buildtool/logging/log_config.hpp index c37fc535c..341189ce6 100644 --- a/src/buildtool/logging/log_config.hpp +++ b/src/buildtool/logging/log_config.hpp @@ -26,10 +26,10 @@ /// The entire class is thread-safe. class LogConfig { struct ConfigData { - std::mutex mutex{}; + std::mutex mutex; LogLevel log_limit{LogLevel::Info}; - std::vector sinks{}; - std::vector factories{}; + std::vector sinks; + std::vector factories; }; public: diff --git a/src/buildtool/logging/log_level.hpp b/src/buildtool/logging/log_level.hpp index 65b7b7c25..12e4af7f0 100644 --- a/src/buildtool/logging/log_level.hpp +++ b/src/buildtool/logging/log_level.hpp @@ -17,12 +17,13 @@ #include #include +#include #include #include #include "gsl/gsl" -enum class LogLevel { +enum class LogLevel : std::uint8_t { Error, ///< Error messages, fatal errors Warning, ///< Warning messages, recoverable situations that shouldn't occur Info, ///< Informative messages, such as reporting status or statistics diff --git a/src/buildtool/logging/log_sink_cmdline.hpp b/src/buildtool/logging/log_sink_cmdline.hpp index a30aee562..3740eb160 100644 --- a/src/buildtool/logging/log_sink_cmdline.hpp +++ b/src/buildtool/logging/log_sink_cmdline.hpp @@ -91,7 +91,7 @@ class LogSinkCmdLine final : public ILogSink { private: bool colored_{}; - std::optional restrict_level_{}; + std::optional restrict_level_; [[nodiscard]] auto FormatPrefix(LogLevel level, std::string const& prefix) const noexcept -> std::string { diff --git a/src/buildtool/logging/log_sink_file.hpp b/src/buildtool/logging/log_sink_file.hpp index f99ce7697..c6f0126c7 100644 --- a/src/buildtool/logging/log_sink_file.hpp +++ b/src/buildtool/logging/log_sink_file.hpp @@ -15,6 +15,7 @@ #ifndef INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_FILE_HPP #define INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_FILE_HPP +#include #include #include #include @@ -39,12 +40,12 @@ #include "src/buildtool/logging/logger.hpp" /// \brief Thread-safe map of mutexes. -template +template class MutexMap { public: /// \brief Create mutex for key and run callback if successfully created. /// Callback is executed while the internal map is still held exclusively. - void Create(T_Key const& key, std::function const& callback) { + void Create(TKey const& key, std::function const& callback) { std::lock_guard lock(mutex_); if (not map_.contains(key)) { [[maybe_unused]] auto& mutex = map_[key]; @@ -52,19 +53,19 @@ class MutexMap { } } /// \brief Get mutex for key, creates mutex if key does not exist. - [[nodiscard]] auto Get(T_Key const& key) noexcept -> std::mutex& { + [[nodiscard]] auto Get(TKey const& key) noexcept -> std::mutex& { std::lock_guard lock(mutex_); return map_[key]; } private: - std::mutex mutex_{}; - std::unordered_map map_{}; + std::mutex mutex_; + std::unordered_map map_; }; class LogSinkFile final : public ILogSink { public: - enum class Mode { + enum class Mode : std::uint8_t { Append, ///< Append if log file already exists. Overwrite ///< Overwrite log file with each new program instantiation. }; @@ -139,7 +140,7 @@ class LogSinkFile final : public ILogSink { } private: - std::string file_path_{}; + std::string file_path_; [[nodiscard]] static auto FileMutexes() noexcept -> MutexMap& { static MutexMap instance{}; diff --git a/src/buildtool/logging/logger.hpp b/src/buildtool/logging/logger.hpp index bc380e6a6..613f2b257 100644 --- a/src/buildtool/logging/logger.hpp +++ b/src/buildtool/logging/logger.hpp @@ -67,13 +67,13 @@ class Logger { void SetLogLimit(LogLevel level) noexcept { log_limit_ = level; } /// \brief Emit log message from string via this logger instance. - template + template void Emit(LogLevel level, std::string const& msg, - T_Args&&... args) const noexcept { + TArgs&&... args) const noexcept { if (static_cast(level) <= static_cast(log_limit_)) { FormatAndForward( - this, sinks_, level, msg, std::forward(args)...); + this, sinks_, level, msg, std::forward(args)...); } } @@ -86,17 +86,17 @@ class Logger { } /// \brief Log message from string via LogConfig's sinks and log limit. - template + template static void Log(LogLevel level, std::string const& msg, - T_Args&&... args) noexcept { + TArgs&&... args) noexcept { if (static_cast(level) <= static_cast(LogConfig::LogLimit())) { FormatAndForward(nullptr, LogConfig::Sinks(), level, msg, - std::forward(args)...); + std::forward(args)...); } } @@ -112,11 +112,11 @@ class Logger { /// \brief Generic logging method. Provides a common interface between the /// global logger and named instances, hidden from the outside caller. /// For named instances no global configuration is used. - template + template static void Log(Logger const* logger, LogLevel level, std::string const& msg, - T_Args&&... args) noexcept { + TArgs&&... args) noexcept { if (static_cast(level) <= static_cast(logger != nullptr ? logger->log_limit_ : LogConfig::LogLimit())) { @@ -125,7 +125,7 @@ class Logger { logger != nullptr ? logger->sinks_ : LogConfig::Sinks(), level, msg, - std::forward(args)...); + std::forward(args)...); } } @@ -148,18 +148,20 @@ class Logger { } private: - std::string name_{}; + std::string name_; LogLevel log_limit_{}; - std::vector sinks_{}; + std::vector sinks_; /// \brief Format message and forward to sinks. - template - static void FormatAndForward(Logger const* logger, - std::vector const& sinks, - LogLevel level, - std::string const& msg, - T_Args&&... args) noexcept { - if constexpr (sizeof...(T_Args) == 0) { + template + static void FormatAndForward( + Logger const* logger, + std::vector const& sinks, + LogLevel level, + std::string const& msg, + // NOLINTNEXTLINE(cppcoreguidelines-missing-std-forward) + TArgs&&... args) noexcept { + if constexpr (sizeof...(TArgs) == 0) { // forward to sinks std::for_each(sinks.cbegin(), sinks.cend(), [&](auto& sink) { sink->Emit(logger, level, msg); diff --git a/src/buildtool/main/TARGETS b/src/buildtool/main/TARGETS index b3c893912..e25e16776 100644 --- a/src/buildtool/main/TARGETS +++ b/src/buildtool/main/TARGETS @@ -4,54 +4,61 @@ , "name": ["just"] , "srcs": ["main.cpp"] , "private-deps": - [ ["@", "fmt", "", "fmt"] + [ "add_to_cas" + , "analyse" + , "analyse_context" + , "build_utils" + , "cli" + , "common" + , "constants" + , "describe" + , "diagnose" + , "install_cas" + , "retry" + , "serve" + , "version" + , ["@", "fmt", "", "fmt"] , ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] + , ["src/buildtool/auth", "auth"] + , ["src/buildtool/build_engine/base_maps", "entity_name"] + , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/build_engine/target_map", "result_map"] + , ["src/buildtool/build_engine/target_map", "target_map"] + , ["src/buildtool/common", "artifact_description"] , ["src/buildtool/common", "common"] , ["src/buildtool/common", "config"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/common/remote", "remote_common"] - , ["src/buildtool/storage", "storage"] - , ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/graph_traverser", "graph_traverser"] - , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/logging", "logging"] - , ["src/buildtool/progress_reporting", "progress"] - , ["src/buildtool/progress_reporting", "progress_reporter"] - , ["src/buildtool/build_engine/target_map", "result_map"] - , ["src/buildtool/build_engine/target_map", "target_map"] - , ["src/buildtool/multithreading", "task_system"] - , ["src/utils/cpp", "concepts"] - , ["src/utils/cpp", "json"] - , ["src/buildtool/auth", "auth"] - , ["src/buildtool/execution_engine/executor", "context"] + , ["src/buildtool/common/remote", "retry_config"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/common", "api_bundle"] , [ "src/buildtool/execution_api/execution_service" , "server_implementation" ] , ["src/buildtool/execution_api/local", "config"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/execution_api/remote", "config"] + , ["src/buildtool/execution_api/remote", "context"] + , ["src/buildtool/execution_engine/executor", "context"] , ["src/buildtool/file_system", "file_root"] + , ["src/buildtool/graph_traverser", "graph_traverser"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/buildtool/multithreading", "async_map_consumer"] + , ["src/buildtool/multithreading", "task_system"] + , ["src/buildtool/progress_reporting", "progress"] + , ["src/buildtool/progress_reporting", "progress_reporter"] , ["src/buildtool/serve_api/remote", "config"] + , ["src/buildtool/serve_api/remote", "serve_api"] , ["src/buildtool/serve_api/serve_service", "serve_server_implementation"] - , ["src/buildtool/storage", "file_chunker"] , ["src/buildtool/storage", "backend_description"] - , ["src/buildtool/serve_api/remote", "serve_api"] - , ["src/buildtool/execution_api/common", "api_bundle"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "file_chunker"] + , ["src/buildtool/storage", "storage"] + , ["src/utils/cpp", "concepts"] , ["src/utils/cpp", "gsl"] - , ["src/buildtool/crypto", "hash_function"] - , "common" - , "cli" - , "version" - , "analyse" - , "analyse_context" - , "add_to_cas" - , "install_cas" - , "describe" - , "diagnose" - , "constants" - , "serve" - , "build_utils" - , "retry" + , ["src/utils/cpp", "json"] ] , "stage": ["src", "buildtool", "main"] , "private-ldflags": @@ -69,8 +76,8 @@ , "srcs": ["retry.cpp"] , "stage": ["src", "buildtool", "main"] , "deps": - [ ["src/buildtool/common/remote", "retry_config"] - , ["src/buildtool/common", "retry_cli"] + [ ["src/buildtool/common", "retry_cli"] + , ["src/buildtool/common/remote", "retry_config"] ] , "private-deps": [ ["src/buildtool/logging", "log_level"] @@ -87,18 +94,18 @@ , ["src/buildtool/build_engine/base_maps", "entity_name"] , ["src/buildtool/build_engine/target_map", "configured_target"] , ["src/buildtool/common", "config"] - , ["src/buildtool/serve_api/remote", "serve_api"] , ["src/buildtool/execution_api/common", "api_bundle"] + , ["src/buildtool/serve_api/remote", "serve_api"] ] , "stage": ["src", "buildtool", "main"] , "private-deps": - [ ["@", "json", "", "json"] + [ "common" + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/base_maps", "rule_map"] , ["src/buildtool/build_engine/base_maps", "targets_file_map"] , ["src/buildtool/build_engine/target_map", "target_map"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , "common" ] } , "common": @@ -115,8 +122,8 @@ , "stage": ["src", "buildtool", "main"] , "deps": [["src/buildtool/common", "cli"]] , "private-deps": - [ ["@", "gsl", "", "gsl"] - , "common" + [ "common" + , ["@", "gsl", "", "gsl"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] ] @@ -129,18 +136,18 @@ , "deps": [ ["src/buildtool/common", "cli"] , ["src/buildtool/common", "common"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_api/common", "api_bundle"] , ["src/buildtool/execution_api/remote", "context"] ] , "stage": ["src", "buildtool", "main"] , "private-deps": - [ ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/crypto", "hash_function"] + [ "archive" + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/execution_api/utils", "subobject"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/execution_api/utils", "subobject"] - , ["src/buildtool/execution_api/common", "common"] - , "archive" ] } , "add_to_cas": @@ -154,7 +161,9 @@ , ["src/buildtool/storage", "storage"] ] , "private-deps": - [ ["src/buildtool/compatibility", "compatibility"] + [ ["src/buildtool/common", "common"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/file_system", "file_system_manager"] @@ -171,31 +180,29 @@ , "deps": [ "analyse_context" , ["@", "gsl", "", "gsl"] - , ["src/buildtool/common", "cli"] + , ["src/buildtool/build_engine/analysed_target", "target"] , ["src/buildtool/build_engine/target_map", "absent_target_map"] , ["src/buildtool/build_engine/target_map", "configured_target"] , ["src/buildtool/build_engine/target_map", "result_map"] - , ["src/buildtool/build_engine/analysed_target", "target"] + , ["src/buildtool/common", "cli"] , ["src/buildtool/logging", "logging"] ] , "stage": ["src", "buildtool", "main"] , "private-deps": - [ ["src/buildtool/multithreading", "async_map_consumer"] - , ["src/buildtool/multithreading", "async_map_utils"] - , ["src/buildtool/multithreading", "task_system"] + [ ["src/buildtool/build_engine/base_maps", "directory_map"] , ["src/buildtool/build_engine/base_maps", "entity_name"] , ["src/buildtool/build_engine/base_maps", "expression_map"] - , ["src/buildtool/build_engine/base_maps", "directory_map"] , ["src/buildtool/build_engine/base_maps", "rule_map"] , ["src/buildtool/build_engine/base_maps", "source_map"] , ["src/buildtool/build_engine/base_maps", "targets_file_map"] , ["src/buildtool/build_engine/target_map", "target_map"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/multithreading", "async_map_consumer"] + , ["src/buildtool/multithreading", "async_map_utils"] + , ["src/buildtool/multithreading", "task_system"] , ["src/buildtool/progress_reporting", "exports_progress_reporter"] - , ["src/buildtool/progress_reporting", "progress"] , ["src/buildtool/serve_api/remote", "config"] - , ["src/buildtool/common", "common"] - , ["src/buildtool/common", "config"] , ["src/buildtool/storage", "storage"] ] } @@ -207,10 +214,9 @@ [ ["@", "gsl", "", "gsl"] , ["src/buildtool/common", "common"] , ["src/buildtool/common", "config"] - , ["src/buildtool/progress_reporting", "exports_progress_reporter"] , ["src/buildtool/progress_reporting", "progress"] - , ["src/buildtool/storage", "storage"] , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "storage"] ] , "stage": ["src", "buildtool", "main"] } @@ -222,15 +228,15 @@ , "stage": ["src", "buildtool", "main"] , "deps": [ "analyse" - , ["src/buildtool/common", "cli"] , ["src/buildtool/build_engine/target_map", "result_map"] + , ["src/buildtool/common", "cli"] ] , "private-deps": - [ ["src/utils/cpp", "json"] - , ["@", "gsl", "", "gsl"] + [ ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "json"] ] } , "version": @@ -284,16 +290,17 @@ , "name": ["serve"] , "hdrs": ["serve.hpp"] , "srcs": ["serve.cpp"] - , "deps": [["@", "gsl", "", "gsl"], "cli"] + , "deps": ["cli", ["@", "gsl", "", "gsl"]] , "stage": ["src", "buildtool", "main"] , "private-deps": - [ ["@", "json", "", "json"] + [ "common" + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/common", "location"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , "common" ] } , "build_utils": @@ -307,15 +314,16 @@ , ["src/buildtool/common", "common"] , ["src/buildtool/common", "config"] , ["src/buildtool/execution_api/common", "api_bundle"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/buildtool/multithreading", "async_map_consumer"] , ["src/buildtool/storage", "storage"] ] , "stage": ["src", "buildtool", "main"] , "private-deps": - [ ["src/buildtool/multithreading", "async_map_utils"] - , ["src/buildtool/execution_api/common", "common"] - , ["src/buildtool/logging", "log_level"] + [ ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/multithreading", "async_map_utils"] ] } , "archive": @@ -325,12 +333,18 @@ , "srcs": ["archive.cpp"] , "deps": [ ["src/buildtool/common", "common"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_api/common", "common"] ] , "stage": ["src", "buildtool", "main"] , "private-deps": [ ["", "libarchive"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/file_system", "git_repo"] + , ["src/buildtool/file_system", "object_type"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "expected"] , ["src/utils/cpp", "hex_string"] ] } diff --git a/src/buildtool/main/add_to_cas.cpp b/src/buildtool/main/add_to_cas.cpp index 7a20af656..819fa5327 100644 --- a/src/buildtool/main/add_to_cas.cpp +++ b/src/buildtool/main/add_to_cas.cpp @@ -21,7 +21,9 @@ #include #include -#include "src/buildtool/compatibility/native_support.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/protocol_traits.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" @@ -31,10 +33,7 @@ auto AddArtifactsToCas(ToAddArguments const& clargs, Storage const& storage, ApiBundle const& apis) -> bool { - auto const& cas = storage.CAS(); - std::optional digest{}; auto object_location = clargs.location; - if (clargs.follow_symlinks) { if (not FileSystemManager::ResolveSymlinks(&object_location)) { Logger::Log(LogLevel::Error, @@ -53,6 +52,8 @@ auto AddArtifactsToCas(ToAddArguments const& clargs, return false; } + auto const& cas = storage.CAS(); + std::optional digest{}; switch (*object_type) { case ObjectType::File: digest = cas.StoreBlob(object_location, /*is_executable=*/false); @@ -71,22 +72,23 @@ auto AddArtifactsToCas(ToAddArguments const& clargs, digest = cas.StoreBlob(*content, /*is_executable=*/false); } break; case ObjectType::Tree: { - if (Compatibility::IsCompatible()) { + if (not ProtocolTraits::IsTreeAllowed( + cas.GetHashFunction().GetType())) { Logger::Log(LogLevel::Error, "Storing of trees only supported in native mode"); return false; } auto store_blob = [&cas](std::filesystem::path const& path, - auto is_exec) -> std::optional { + auto is_exec) -> std::optional { return cas.StoreBlob(path, is_exec); }; auto store_tree = [&cas](std::string const& content) - -> std::optional { + -> std::optional { return cas.StoreTree(content); }; auto store_symlink = [&cas](std::string const& content) - -> std::optional { + -> std::optional { return cas.StoreBlob(content); }; digest = BazelMsgFactory::CreateGitTreeDigestFromLocalTree( @@ -101,10 +103,10 @@ auto AddArtifactsToCas(ToAddArguments const& clargs, return false; } - std::cout << NativeSupport::Unprefix(digest->hash()) << std::endl; + std::cout << digest->hash() << std::endl; - auto object = std::vector{ - Artifact::ObjectInfo{ArtifactDigest(*digest), *object_type, false}}; + auto const object = std::vector{ + Artifact::ObjectInfo{*digest, *object_type, false}}; if (not apis.local->RetrieveToCas(object, *apis.remote)) { Logger::Log(LogLevel::Error, @@ -115,4 +117,4 @@ auto AddArtifactsToCas(ToAddArguments const& clargs, return true; } -#endif +#endif // BOOTSTRAP_BUILD_TOOL diff --git a/src/buildtool/main/add_to_cas.hpp b/src/buildtool/main/add_to_cas.hpp index b7b25bb74..564caa7e1 100644 --- a/src/buildtool/main/add_to_cas.hpp +++ b/src/buildtool/main/add_to_cas.hpp @@ -25,4 +25,4 @@ ApiBundle const& apis) -> bool; #endif -#endif +#endif // INCLUDED_SRC_BUILDTOOL_MAIN_ADD_TO_CAS_HPP diff --git a/src/buildtool/main/analyse.cpp b/src/buildtool/main/analyse.cpp index c9d618728..fa332b9bf 100644 --- a/src/buildtool/main/analyse.cpp +++ b/src/buildtool/main/analyse.cpp @@ -26,11 +26,13 @@ #include "src/buildtool/build_engine/base_maps/targets_file_map.hpp" #include "src/buildtool/build_engine/target_map/absent_target_map.hpp" #include "src/buildtool/build_engine/target_map/target_map.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/multithreading/async_map_consumer.hpp" #include "src/buildtool/multithreading/async_map_utils.hpp" #include "src/buildtool/multithreading/task_system.hpp" #include "src/buildtool/progress_reporting/exports_progress_reporter.hpp" +#include "src/buildtool/storage/storage.hpp" #ifndef BOOTSTRAP_BUILD_TOOL #include "src/buildtool/serve_api/remote/config.hpp" #endif // BOOTSTRAP_BUILD_TOOL @@ -127,7 +129,10 @@ namespace Target = BuildMaps::Target; auto rule_map = Base::CreateRuleMap( &rule_file_map, &expr_map, context->repo_config, jobs); auto source_targets = Base::CreateSourceTargetMap( - &directory_entries, context->repo_config, jobs); + &directory_entries, + context->repo_config, + context->storage->GetHashFunction().GetType(), + jobs); auto absent_target_variables_map = Target::CreateAbsentTargetVariablesMap(context, jobs); diff --git a/src/buildtool/main/analyse.hpp b/src/buildtool/main/analyse.hpp index 0c637a0e9..4e75c26a9 100644 --- a/src/buildtool/main/analyse.hpp +++ b/src/buildtool/main/analyse.hpp @@ -43,4 +43,4 @@ struct AnalysisResult { Logger const* logger = nullptr, BuildMaps::Target::ServeFailureLogReporter* = nullptr) -> std::optional; -#endif +#endif // INCLUDED_SRC_BUILDOOL_MAIN_ANALYSE_HPP diff --git a/src/buildtool/main/archive.cpp b/src/buildtool/main/archive.cpp index 08c6b9a94..e49da2249 100644 --- a/src/buildtool/main/archive.cpp +++ b/src/buildtool/main/archive.cpp @@ -22,9 +22,13 @@ #error "Non-unix is not supported yet" #endif +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/file_system/git_repo.hpp" +#include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" +#include "src/utils/cpp/expected.hpp" #include "src/utils/cpp/hex_string.hpp" extern "C" { @@ -47,8 +51,8 @@ void archive_entry_cleanup(archive_entry* entry) { } } -// NOLINTNEXTLINE(misc-no-recursion) -auto add_to_archive(archive* archive, +auto add_to_archive(HashFunction::Type hash_type, + archive* archive, IExecutionApi const& api, const Artifact::ObjectInfo& artifact, const std::filesystem::path& location) -> bool { @@ -69,7 +73,8 @@ auto add_to_archive(archive* archive, std::unique_ptr entry{archive_entry_new(), archive_entry_cleanup}; archive_entry_set_pathname(entry.get(), location.string().c_str()); - archive_entry_set_size(entry.get(), payload->size()); + archive_entry_set_size(entry.get(), + static_cast(payload->size())); archive_entry_set_filetype(entry.get(), AE_IFREG); archive_entry_set_perm(entry.get(), artifact.type == ObjectType::Executable @@ -83,7 +88,8 @@ auto add_to_archive(archive* archive, std::unique_ptr entry{archive_entry_new(), archive_entry_cleanup}; archive_entry_set_pathname(entry.get(), location.string().c_str()); - archive_entry_set_size(entry.get(), payload->size()); + archive_entry_set_size(entry.get(), + static_cast(payload->size())); archive_entry_set_filetype(entry.get(), AE_IFLNK); archive_entry_set_symlink(entry.get(), payload->c_str()); archive_entry_set_perm(entry.get(), kDefaultPerm); @@ -120,15 +126,20 @@ auto add_to_archive(archive* archive, for (auto const& [hash, entries] : *git_tree) { auto hex_hash = ToHexString(hash); for (auto const& entry : entries) { - tree[entry.name] = Artifact::ObjectInfo{ - .digest = ArtifactDigest( - hex_hash, 0, entry.type == ObjectType::Tree), - .type = entry.type, - .failed = false}; + auto digest = ArtifactDigestFactory::Create( + hash_type, hex_hash, 0, IsTreeObject(entry.type)); + if (not digest) { + return false; + } + tree[entry.name] = + Artifact::ObjectInfo{.digest = *std::move(digest), + .type = entry.type, + .failed = false}; } } for (auto const& [name, obj] : tree) { - if (not add_to_archive(archive, api, obj, location / name)) { + if (not add_to_archive( + hash_type, archive, api, obj, location / name)) { return false; } } @@ -141,6 +152,7 @@ auto add_to_archive(archive* archive, } // namespace [[nodiscard]] auto GenerateArchive( + HashFunction::Type hash_type, IExecutionApi const& api, const Artifact::ObjectInfo& artifact, const std::optional& output_path) -> bool { @@ -184,8 +196,11 @@ auto add_to_archive(archive* archive, } } - if (not add_to_archive( - archive.get(), api, artifact, std::filesystem::path{""})) { + if (not add_to_archive(hash_type, + archive.get(), + api, + artifact, + std::filesystem::path{""})) { return false; } if (output_path) { diff --git a/src/buildtool/main/archive.hpp b/src/buildtool/main/archive.hpp index 209675d6f..da61a5c3b 100644 --- a/src/buildtool/main/archive.hpp +++ b/src/buildtool/main/archive.hpp @@ -21,13 +21,15 @@ #include #include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" [[nodiscard]] auto GenerateArchive( + HashFunction::Type hash_type, IExecutionApi const& api, const Artifact::ObjectInfo& artifact, const std::optional& output_path) -> bool; #endif -#endif +#endif // INCLUDED_SRC_BUILDTOOL_MAIN_ARCHIVE_HPP diff --git a/src/buildtool/main/build_utils.cpp b/src/buildtool/main/build_utils.cpp index 97b9e1b7d..cbd5873e1 100644 --- a/src/buildtool/main/build_utils.cpp +++ b/src/buildtool/main/build_utils.cpp @@ -14,6 +14,7 @@ #include "src/buildtool/main/build_utils.hpp" #ifndef BOOTSTRAP_BUILD_TOOL +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/multithreading/async_map_utils.hpp" #include "src/buildtool/storage/target_cache_entry.hpp" @@ -94,7 +95,8 @@ auto CreateTargetCacheWriterMap( return; } auto const& target = cache_targets.at(tc_key); - auto entry = TargetCacheEntry::FromTarget(target, extra_infos); + auto entry = TargetCacheEntry::FromTarget( + apis->hash_function.GetType(), target, extra_infos); if (not entry) { (*logger)( fmt::format("Failed creating target cache entry for key {}", diff --git a/src/buildtool/main/build_utils.hpp b/src/buildtool/main/build_utils.hpp index 2ea690256..f8f0a4705 100644 --- a/src/buildtool/main/build_utils.hpp +++ b/src/buildtool/main/build_utils.hpp @@ -16,6 +16,7 @@ #define INCLUDED_SRC_BUILDOOL_MAIN_BUILD_UTILS_HPP #include +#include #include #include #include @@ -47,7 +48,7 @@ std::unordered_map const& cache_targets) -> std::vector; -enum class TargetCacheWriteStrategy { +enum class TargetCacheWriteStrategy : std::uint8_t { Disable, ///< Do not create target-level cache entries Sync, ///< Create target-level cache entries after syncing the artifacts Split ///< Create target-level cache entries after syncing the artifacts; diff --git a/src/buildtool/main/cli.cpp b/src/buildtool/main/cli.cpp index 759c71241..8163abd49 100644 --- a/src/buildtool/main/cli.cpp +++ b/src/buildtool/main/cli.cpp @@ -33,7 +33,7 @@ auto SetupDescribeCommandArguments( SetupCommonAuthArguments(app, &clargs->auth); SetupClientAuthArguments(app, &clargs->cauth); SetupExecutionEndpointArguments(app, &clargs->endpoint); - SetupCompatibilityArguments(app); + SetupProtocolArguments(app, &clargs->protocol); SetupDescribeArguments(app, &clargs->describe); SetupRetryArguments(app, &clargs->retry); } @@ -52,7 +52,7 @@ auto SetupAnalyseCommandArguments( SetupCommonAuthArguments(app, &clargs->auth); SetupClientAuthArguments(app, &clargs->cauth); SetupDiagnosticArguments(app, &clargs->diagnose); - SetupCompatibilityArguments(app); + SetupProtocolArguments(app, &clargs->protocol); SetupRetryArguments(app, &clargs->retry); } @@ -72,7 +72,7 @@ auto SetupBuildCommandArguments( SetupCommonBuildArguments(app, &clargs->build); SetupBuildArguments(app, &clargs->build); SetupTCArguments(app, &clargs->tc); - SetupCompatibilityArguments(app); + SetupProtocolArguments(app, &clargs->protocol); SetupRetryArguments(app, &clargs->retry); } @@ -96,7 +96,7 @@ auto SetupRebuildCommandArguments( auto SetupInstallCasCommandArguments( gsl::not_null const& app, gsl::not_null const& clargs) { - SetupCompatibilityArguments(app); + SetupProtocolArguments(app, &clargs->protocol); SetupCacheArguments(app, &clargs->endpoint); SetupExecutionEndpointArguments(app, &clargs->endpoint); SetupCommonAuthArguments(app, &clargs->auth); @@ -110,7 +110,7 @@ auto SetupInstallCasCommandArguments( auto SetupAddToCasCommandArguments( gsl::not_null const& app, gsl::not_null const& clargs) { - SetupCompatibilityArguments(app); + SetupProtocolArguments(app, &clargs->protocol); SetupCacheArguments(app, &clargs->endpoint); SetupExecutionEndpointArguments(app, &clargs->endpoint); SetupCommonAuthArguments(app, &clargs->auth); @@ -135,7 +135,7 @@ auto SetupTraverseCommandArguments( SetupCommonBuildArguments(app, &clargs->build); SetupBuildArguments(app, &clargs->build); SetupStageArguments(app, &clargs->stage); - SetupCompatibilityArguments(app); + SetupProtocolArguments(app, &clargs->protocol); } /// \brief Setup arguments for sub command "just gc". @@ -151,7 +151,7 @@ auto SetupGcCommandArguments( auto SetupExecutionServiceCommandArguments( gsl::not_null const& app, gsl::not_null const& clargs) { - SetupCompatibilityArguments(app); + SetupProtocolArguments(app, &clargs->protocol); SetupCommonBuildArguments(app, &clargs->build); SetupCacheArguments(app, &clargs->endpoint); SetupServiceArguments(app, &clargs->service); diff --git a/src/buildtool/main/cli.hpp b/src/buildtool/main/cli.hpp index 4c28b1f65..7717d16cf 100644 --- a/src/buildtool/main/cli.hpp +++ b/src/buildtool/main/cli.hpp @@ -15,9 +15,11 @@ #ifndef INCLUDED_SRC_BUILDTOOL_MAIN_CLI #define INCLUDED_SRC_BUILDTOOL_MAIN_CLI +#include + #include "src/buildtool/common/cli.hpp" -enum class SubCommand { +enum class SubCommand : std::uint8_t { kUnknown, kVersion, kDescribe, @@ -34,7 +36,6 @@ enum class SubCommand { }; struct CommandLineArguments { - SubCommand cmd{SubCommand::kUnknown}; CommonArguments common; LogArguments log; AnalysisArguments analysis; @@ -55,9 +56,11 @@ struct CommandLineArguments { RetryArguments retry; GcArguments gc; ToAddArguments to_add; + ProtocolArguments protocol; + SubCommand cmd{SubCommand::kUnknown}; }; auto ParseCommandLineArguments(int argc, char const* const* argv) -> CommandLineArguments; -#endif +#endif // INCLUDED_SRC_BUILDTOOL_MAIN_CLI diff --git a/src/buildtool/main/diagnose.cpp b/src/buildtool/main/diagnose.cpp index 49aefc865..9171869ef 100644 --- a/src/buildtool/main/diagnose.cpp +++ b/src/buildtool/main/diagnose.cpp @@ -215,7 +215,6 @@ auto DumpExpressionToMap(gsl::not_null const& map, return false; } -// NOLINTNEXTLINE(misc-no-recursion) void DumpNodesInExpressionToMap(gsl::not_null const& map, ExpressionPtr const& expr) { if (expr->IsNode()) { diff --git a/src/buildtool/main/diagnose.hpp b/src/buildtool/main/diagnose.hpp index 79a150b8c..cb3d57a64 100644 --- a/src/buildtool/main/diagnose.hpp +++ b/src/buildtool/main/diagnose.hpp @@ -22,4 +22,4 @@ void DiagnoseResults(AnalysisResult const& result, BuildMaps::Target::ResultTargetMap const& result_map, DiagnosticArguments const& clargs); -#endif +#endif // INCLUDED_SRC_BUILDTOOL_MAIN_DIAGNOSE_HPP diff --git a/src/buildtool/main/exit_codes.hpp b/src/buildtool/main/exit_codes.hpp index d668e1545..c690c22c4 100644 --- a/src/buildtool/main/exit_codes.hpp +++ b/src/buildtool/main/exit_codes.hpp @@ -15,10 +15,12 @@ #ifndef INCLUDED_SRC_BUILDTOOL_MAIN_EXIT_CODES_HPP #define INCLUDED_SRC_BUILDTOOL_MAIN_EXIT_CODES_HPP -enum ExitCodes { +#include + +enum ExitCodes : std::uint8_t { kExitSuccess = 0, kExitFailure = 1, kExitSuccessFailedArtifacts = 2 }; -#endif +#endif // INCLUDED_SRC_BUILDTOOL_MAIN_EXIT_CODES_HPP diff --git a/src/buildtool/main/install_cas.cpp b/src/buildtool/main/install_cas.cpp index 1f5873e12..bac3556be 100644 --- a/src/buildtool/main/install_cas.cpp +++ b/src/buildtool/main/install_cas.cpp @@ -16,8 +16,7 @@ #include -#include "src/buildtool/compatibility/compatibility.hpp" -#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" #ifndef BOOTSTRAP_BUILD_TOOL @@ -28,14 +27,16 @@ namespace { -[[nodiscard]] auto InvalidSizeString(std::string const& size_str, +[[nodiscard]] auto InvalidSizeString(HashFunction::Type hash_type, + std::string const& size_str, std::string const& hash, bool has_remote) noexcept -> bool { // Only in compatible mode the size is checked, so an empty SHA256 hash is // needed. static auto const kEmptyHash = HashFunction{HashFunction::Type::PlainSHA256}.HashBlobData(""); - return Compatibility::IsCompatible() and // native mode is fine + return hash_type == + HashFunction::Type::PlainSHA256 and // native mode is fine (size_str == "0" or size_str.empty()) and // not "0" or "" is fine kEmptyHash.HexString() != hash and // empty hash is fine has_remote; // local is fine @@ -43,9 +44,10 @@ namespace { } // namespace -[[nodiscard]] auto ObjectInfoFromLiberalString(std::string const& s, +[[nodiscard]] auto ObjectInfoFromLiberalString(HashFunction::Type hash_type, + std::string const& s, bool has_remote) noexcept - -> Artifact::ObjectInfo { + -> std::optional { std::istringstream iss(s); std::string id{}; std::string size_str{}; @@ -60,7 +62,7 @@ namespace { if (not iss.eof()) { std::getline(iss, type, ']'); } - if (InvalidSizeString(size_str, id, has_remote)) { + if (InvalidSizeString(hash_type, size_str, id, has_remote)) { Logger::Log( LogLevel::Warning, "{} size in object-id is not supported in compatiblity mode.", @@ -69,9 +71,13 @@ namespace { auto size = static_cast( size_str.empty() ? 0 : std::atol(size_str.c_str())); auto const& object_type = FromChar(*type.c_str()); - return Artifact::ObjectInfo{ - .digest = ArtifactDigest{id, size, IsTreeObject(object_type)}, - .type = object_type}; + auto digest = ArtifactDigestFactory::Create( + hash_type, id, size, IsTreeObject(object_type)); + if (not digest) { + return std::nullopt; + } + return Artifact::ObjectInfo{.digest = *std::move(digest), + .type = object_type}; } #ifndef BOOTSTRAP_BUILD_TOOL @@ -79,24 +85,27 @@ auto FetchAndInstallArtifacts(ApiBundle const& apis, FetchArguments const& clargs, RemoteContext const& remote_context) -> bool { auto object_info = ObjectInfoFromLiberalString( + apis.hash_function.GetType(), clargs.object_id, remote_context.exec_config->remote_address.has_value()); + if (not object_info) { + return false; + } if (clargs.remember) { if (not apis.remote->ParallelRetrieveToCas( - {object_info}, *apis.local, 1, true)) { + {*object_info}, *apis.local, 1, true)) { Logger::Log(LogLevel::Warning, "Failed to copy artifact {} to local CAS", - object_info.ToString()); + object_info->ToString()); } } if (clargs.sub_path) { - std::filesystem::path sofar{}; auto new_object_info = - RetrieveSubPathId(object_info, *apis.remote, *clargs.sub_path); + RetrieveSubPathId(*object_info, apis, *clargs.sub_path); if (new_object_info) { - object_info = *new_object_info; + object_info = new_object_info; } else { return false; @@ -108,7 +117,7 @@ auto FetchAndInstallArtifacts(ApiBundle const& apis, // Compute output location and create parent directories auto output_path = (*clargs.output_path / "").parent_path(); if (FileSystemManager::IsDirectory(output_path)) { - output_path /= object_info.digest.hash(); + output_path /= object_info->digest.hash(); } if (not FileSystemManager::CreateDirectory(output_path.parent_path())) { @@ -121,30 +130,31 @@ auto FetchAndInstallArtifacts(ApiBundle const& apis, } if (clargs.archive) { - if (object_info.type != ObjectType::Tree) { + if (object_info->type != ObjectType::Tree) { Logger::Log(LogLevel::Error, "Archive requested on non-tree {}", - object_info.ToString()); + object_info->ToString()); return false; } - return GenerateArchive(*apis.remote, object_info, out); + return GenerateArchive( + apis.hash_function.GetType(), *apis.remote, *object_info, out); } if (out) { if (not apis.remote->RetrieveToPaths( - {object_info}, {*out}, &*apis.local)) { + {*object_info}, {*out}, &*apis.local)) { Logger::Log(LogLevel::Error, "failed to retrieve artifact."); return false; } Logger::Log(LogLevel::Info, "artifact {} was installed to {}", - object_info.ToString(), + object_info->ToString(), out->string()); } else { // dump to stdout if (not apis.remote->RetrieveToFds( - {object_info}, {dup(fileno(stdout))}, clargs.raw_tree)) { + {*object_info}, {dup(fileno(stdout))}, clargs.raw_tree)) { Logger::Log(LogLevel::Error, "failed to dump artifact."); return false; } diff --git a/src/buildtool/main/install_cas.hpp b/src/buildtool/main/install_cas.hpp index 973245693..e7a42ae4c 100644 --- a/src/buildtool/main/install_cas.hpp +++ b/src/buildtool/main/install_cas.hpp @@ -15,9 +15,11 @@ #ifndef INCLUDED_SRC_BUILDTOOL_MAIN_INSTALL_CAS_HPP #define INCLUDED_SRC_BUILDTOOL_MAIN_INSTALL_CAS_HPP +#include #include #include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #ifndef BOOTSTRAP_BUILD_TOOL #include "src/buildtool/common/cli.hpp" #include "src/buildtool/execution_api/common/api_bundle.hpp" @@ -25,9 +27,10 @@ #endif /// \note Method is public for use also in tests. -[[nodiscard]] auto ObjectInfoFromLiberalString(std::string const& s, +[[nodiscard]] auto ObjectInfoFromLiberalString(HashFunction::Type hash_type, + std::string const& s, bool has_remote) noexcept - -> Artifact::ObjectInfo; + -> std::optional; #ifndef BOOTSTRAP_BUILD_TOOL [[nodiscard]] auto FetchAndInstallArtifacts(ApiBundle const& apis, diff --git a/src/buildtool/main/main.cpp b/src/buildtool/main/main.cpp index 1cf9901b2..a2bdb0559 100644 --- a/src/buildtool/main/main.cpp +++ b/src/buildtool/main/main.cpp @@ -34,10 +34,10 @@ #include "src/buildtool/build_engine/expression/expression.hpp" #include "src/buildtool/build_engine/target_map/target_map.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/remote/remote_common.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/common/statistics.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/buildtool/logging/log_config.hpp" @@ -61,7 +61,6 @@ #include "src/buildtool/serve_api/remote/serve_api.hpp" #include "src/buildtool/storage/config.hpp" #include "src/buildtool/storage/file_chunker.hpp" -#include "src/buildtool/storage/garbage_collector.hpp" #include "src/buildtool/storage/storage.hpp" #include "src/buildtool/storage/target_cache.hpp" #include "src/utils/cpp/concepts.hpp" @@ -113,7 +112,7 @@ void SetupLogging(LogArguments const& clargs) { [[nodiscard]] auto CreateStorageConfig( EndpointArguments const& eargs, - bool is_compatible, + HashFunction::Type hash_type, std::optional const& remote_address = std::nullopt, ExecutionProperties const& remote_platform_properties = {}, std::vector const& remote_dispatch = {}) noexcept @@ -124,9 +123,7 @@ void SetupLogging(LogArguments const& clargs) { } auto config = - builder - .SetHashType(is_compatible ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1) + builder.SetHashType(hash_type) .SetRemoteExecutionArgs( remote_address, remote_platform_properties, remote_dispatch) .Build(); @@ -236,7 +233,7 @@ void SetupFileChunker() { /// \brief Write backend description (which determines the target cache shard) /// to CAS. void StoreTargetCacheShard( - StorageConfig const& storage_config, + StorageConfig const& storage_config, // NOLINT(misc-unused-parameters) Storage const& storage, RemoteExecutionConfig const& remote_exec_config) noexcept { auto backend_description = @@ -248,8 +245,7 @@ void StoreTargetCacheShard( std::exit(kExitFailure); } [[maybe_unused]] auto id = storage.CAS().StoreBlob(*backend_description); - EnsuresAudit(id and ArtifactDigest{*id}.hash() == - storage_config.backend_description_id); + EnsuresAudit(id and id->hash() == storage_config.backend_description_id); } #endif // BOOTSTRAP_BUILD_TOOL @@ -766,7 +762,7 @@ auto main(int argc, char* argv[]) -> int { if (arguments.cmd == SubCommand::kGc) { // Set up storage for GC, as we have all the config args we need. auto const storage_config = CreateStorageConfig( - arguments.endpoint, Compatibility::IsCompatible()); + arguments.endpoint, arguments.protocol.hash_type); if (not storage_config) { return kExitFailure; } @@ -804,7 +800,7 @@ auto main(int argc, char* argv[]) -> int { // Set up storage for local execution. auto const storage_config = CreateStorageConfig( - arguments.endpoint, Compatibility::IsCompatible()); + arguments.endpoint, arguments.protocol.hash_type); if (not storage_config) { return kExitFailure; } @@ -868,7 +864,7 @@ auto main(int argc, char* argv[]) -> int { // Set up storage for serve operation. auto const storage_config = CreateStorageConfig(arguments.endpoint, - Compatibility::IsCompatible(), + arguments.protocol.hash_type, remote_exec_config->remote_address, remote_exec_config->platform_properties, remote_exec_config->dispatch); @@ -943,7 +939,7 @@ auto main(int argc, char* argv[]) -> int { // correctly-sharded target cache. auto const storage_config = CreateStorageConfig(arguments.endpoint, - Compatibility::IsCompatible(), + arguments.protocol.hash_type, remote_exec_config->remote_address, remote_exec_config->platform_properties, remote_exec_config->dispatch); @@ -951,7 +947,7 @@ auto main(int argc, char* argv[]) -> int { // For bootstrapping the TargetCache sharding is not needed, so we can // default all execution arguments. auto const storage_config = CreateStorageConfig( - arguments.endpoint, Compatibility::IsCompatible()); + arguments.endpoint, arguments.protocol.hash_type); #endif // BOOTSTRAP_BUILD_TOOL if (not storage_config) { return kExitFailure; @@ -1042,7 +1038,8 @@ auto main(int argc, char* argv[]) -> int { if (arguments.cmd == SubCommand::kTraverse) { if (arguments.graph.git_cas) { - if (Compatibility::IsCompatible()) { + if (not ProtocolTraits::IsNative( + arguments.protocol.hash_type)) { Logger::Log(LogLevel::Error, "Command line options {} and {} cannot be used " "together.", @@ -1128,7 +1125,8 @@ auto main(int argc, char* argv[]) -> int { if (result) { Logger::Log(LogLevel::Info, "Analysed target {}", - result->id.ToShortString()); + result->id.ToShortString( + Evaluator::GetExpressionLogLimit())); { auto cached = stats.ExportsCachedCounter(); @@ -1151,6 +1149,12 @@ auto main(int argc, char* argv[]) -> int { result_map.ToFile( *arguments.analysis.graph_file, &stats, &progress); } + if (arguments.analysis.graph_file_plain) { + result_map.ToFile( + *arguments.analysis.graph_file_plain, + &stats, + &progress); + } auto const [artifacts, runfiles] = ReadOutputArtifacts(result->target); if (arguments.analysis.artifacts_to_build_file) { @@ -1191,7 +1195,8 @@ auto main(int argc, char* argv[]) -> int { result->modified ? fmt::format(" input of action {} of", *(result->modified)) : "", - result->id.ToShortString()); + result->id.ToShortString( + Evaluator::GetExpressionLogLimit())); auto build_result = traverser.BuildAndStage(artifacts, diff --git a/src/buildtool/main/serve.cpp b/src/buildtool/main/serve.cpp index 622df15ef..1ee803494 100644 --- a/src/buildtool/main/serve.cpp +++ b/src/buildtool/main/serve.cpp @@ -31,6 +31,7 @@ #include "src/buildtool/build_engine/expression/configuration.hpp" #include "src/buildtool/build_engine/expression/expression.hpp" #include "src/buildtool/common/location.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" @@ -390,7 +391,7 @@ void ReadJustServeConfig(gsl::not_null const& clargs) { } // compatibility is set immediately if flag is true if (compatible->Bool()) { - Compatibility::SetCompatible(); + clargs->protocol.hash_type = HashFunction::Type::PlainSHA256; } } // read the address @@ -422,7 +423,7 @@ void ReadJustServeConfig(gsl::not_null const& clargs) { jobs->ToString()); std::exit(kExitFailure); } - clargs->common.jobs = jobs->Number(); + clargs->common.jobs = static_cast(jobs->Number()); } // read build options auto build_args = serve_config["build"]; @@ -447,7 +448,8 @@ void ReadJustServeConfig(gsl::not_null const& clargs) { build_jobs->ToString()); std::exit(kExitFailure); } - clargs->build.build_jobs = build_jobs->Number(); + clargs->build.build_jobs = + static_cast(build_jobs->Number()); } else { clargs->build.build_jobs = clargs->common.jobs; @@ -465,7 +467,8 @@ void ReadJustServeConfig(gsl::not_null const& clargs) { std::exit(kExitFailure); } clargs->build.timeout = - std::size_t(timeout->Number()) * std::chrono::seconds{1}; + static_cast(timeout->Number()) * + std::chrono::seconds{1}; } // read target-cache writing strategy auto strategy = build_args->Get("target-cache write strategy", diff --git a/src/buildtool/main/version.cpp b/src/buildtool/main/version.cpp index 4297fa600..d00fe6310 100644 --- a/src/buildtool/main/version.cpp +++ b/src/buildtool/main/version.cpp @@ -23,7 +23,7 @@ auto version() -> std::string { std::size_t major = 1; std::size_t minor = 4; std::size_t revision = 0; - std::string suffix = "~alpha"; + std::string suffix = std::string{}; #ifdef VERSION_EXTRA_SUFFIX suffix += VERSION_EXTRA_SUFFIX; #endif diff --git a/src/buildtool/main/version.hpp b/src/buildtool/main/version.hpp index af72a12e7..30597f58e 100644 --- a/src/buildtool/main/version.hpp +++ b/src/buildtool/main/version.hpp @@ -19,4 +19,4 @@ auto version() -> std::string; -#endif +#endif // INCLUDED_SRC_BUILDOOL_MAIN_VERSION_HPP diff --git a/src/buildtool/multithreading/TARGETS b/src/buildtool/multithreading/TARGETS index 35db852d2..fa76b3d4b 100644 --- a/src/buildtool/multithreading/TARGETS +++ b/src/buildtool/multithreading/TARGETS @@ -10,7 +10,7 @@ { "type": ["@", "rules", "CC", "library"] , "name": ["notification_queue"] , "hdrs": ["notification_queue.hpp"] - , "deps": ["task", ["src/utils/cpp", "atomic"], ["@", "gsl", "", "gsl"]] + , "deps": ["task", ["@", "gsl", "", "gsl"], ["src/utils/cpp", "atomic"]] , "stage": ["src", "buildtool", "multithreading"] } , "task_system": @@ -27,14 +27,14 @@ , "name": ["async_map_node"] , "hdrs": ["async_map_node.hpp"] , "deps": - ["task", "task_system", ["src/utils/cpp", "gsl"], ["@", "gsl", "", "gsl"]] + ["task", "task_system", ["@", "gsl", "", "gsl"], ["src/utils/cpp", "gsl"]] , "stage": ["src", "buildtool", "multithreading"] } , "async_map": { "type": ["@", "rules", "CC", "library"] , "name": ["async_map"] , "hdrs": ["async_map.hpp"] - , "deps": ["task", "task_system", "async_map_node", ["@", "gsl", "", "gsl"]] + , "deps": ["async_map_node", "task", "task_system", ["@", "gsl", "", "gsl"]] , "stage": ["src", "buildtool", "multithreading"] } , "async_map_consumer": @@ -42,10 +42,10 @@ , "name": ["async_map_consumer"] , "hdrs": ["async_map_consumer.hpp"] , "deps": - [ "task" - , "task_system" + [ "async_map" , "async_map_node" - , "async_map" + , "task" + , "task_system" , ["@", "gsl", "", "gsl"] ] , "stage": ["src", "buildtool", "multithreading"] @@ -64,6 +64,7 @@ , "deps": [ "async_map_consumer" , ["@", "fmt", "", "fmt"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] ] , "stage": ["src", "buildtool", "multithreading"] diff --git a/src/buildtool/multithreading/async_map_consumer.hpp b/src/buildtool/multithreading/async_map_consumer.hpp index bc7f33acc..a523214ed 100644 --- a/src/buildtool/multithreading/async_map_consumer.hpp +++ b/src/buildtool/multithreading/async_map_consumer.hpp @@ -142,10 +142,10 @@ class AsyncMapConsumer { private: using NodeRequests = std::unordered_map>; - std::shared_ptr value_creator_{}; - Map map_{}; - mutable std::shared_mutex requests_m_{}; - std::unordered_map requests_by_thread_{}; + std::shared_ptr value_creator_; + Map map_; + mutable std::shared_mutex requests_m_; + std::unordered_map requests_by_thread_; // Similar to previous methods, but in this case the logger and failure // function are already std::shared_ptr type. @@ -223,7 +223,7 @@ class AsyncMapConsumer { std::move(logger), FailureFunctionPtr{failptr}); }); - auto wrappedLogger = + auto wrapped_logger = std::make_shared([logger, node, ts](auto msg, auto fatal) { if (fatal) { node->Fail(ts); @@ -236,9 +236,9 @@ class AsyncMapConsumer { ts, key, setterptr = std::move(setterptr), - wrappedLogger = std::move(wrappedLogger), + wrapped_logger = std::move(wrapped_logger), subcallerptr = std::move(subcallerptr)]() { - (*vc)(ts, setterptr, wrappedLogger, subcallerptr, key); + (*vc)(ts, setterptr, wrapped_logger, subcallerptr, key); }); return node; } diff --git a/src/buildtool/multithreading/async_map_node.hpp b/src/buildtool/multithreading/async_map_node.hpp index 15687344f..4cef35e9b 100644 --- a/src/buildtool/multithreading/async_map_node.hpp +++ b/src/buildtool/multithreading/async_map_node.hpp @@ -170,10 +170,10 @@ class AsyncMapNode { private: Key key_; - std::optional value_{}; - std::vector awaiting_tasks_{}; - std::vector failure_tasks_{}; - std::mutex m_{}; + std::optional value_; + std::vector awaiting_tasks_; + std::vector failure_tasks_; + std::mutex m_; std::atomic is_queued_to_be_processed_{false}; bool failed_{false}; diff --git a/src/buildtool/multithreading/async_map_utils.hpp b/src/buildtool/multithreading/async_map_utils.hpp index 47e5b5b4f..2b2defeb2 100644 --- a/src/buildtool/multithreading/async_map_utils.hpp +++ b/src/buildtool/multithreading/async_map_utils.hpp @@ -46,9 +46,13 @@ template oss << fmt::format("Cycle detected in {}:", name) << std::endl; for (auto const& k : *cycle) { auto match = (k == cycle->back()); - auto prefix{match ? found ? "`-- "s : ".-> "s - : found ? "| "s - : " "s}; + std::string prefix; + if (match) { + prefix = found ? "`-- "s : ".-> "s; + } + else { + prefix = found ? "| "s : " "s; + } oss << prefix << key_printer(k) << std::endl; found = found or match; } diff --git a/src/buildtool/multithreading/notification_queue.hpp b/src/buildtool/multithreading/notification_queue.hpp index f1c803e9e..4483b5be9 100644 --- a/src/buildtool/multithreading/notification_queue.hpp +++ b/src/buildtool/multithreading/notification_queue.hpp @@ -56,10 +56,10 @@ class WaitableZeroCounter { } private: - std::shared_mutex mutex_{}; - std::condition_variable_any cv_{}; - std::atomic count_{}; - std::atomic done_{}; + std::shared_mutex mutex_; + std::condition_variable_any cv_; + std::atomic count_; + std::atomic done_; [[nodiscard]] auto IsZero() noexcept -> bool { return count_ == 0 or done_; @@ -162,10 +162,10 @@ class NotificationQueue { } private: - std::deque queue_{}; + std::deque queue_; bool done_{false}; - std::mutex mutex_{}; - std::condition_variable ready_{}; + std::mutex mutex_; + std::condition_variable ready_; gsl::not_null total_workload_; }; diff --git a/src/buildtool/multithreading/task.hpp b/src/buildtool/multithreading/task.hpp index a8c4591ed..05331d751 100644 --- a/src/buildtool/multithreading/task.hpp +++ b/src/buildtool/multithreading/task.hpp @@ -47,7 +47,7 @@ class Task { explicit operator bool() const noexcept { return f_.operator bool(); } private: - TaskFunc f_{}; + TaskFunc f_; }; #endif // INCLUDED_SRC_BUILDTOOL_MULTITHREADING_TASK_HPP diff --git a/src/buildtool/multithreading/task_system.hpp b/src/buildtool/multithreading/task_system.hpp index 089dc3d87..95b4dedf8 100644 --- a/src/buildtool/multithreading/task_system.hpp +++ b/src/buildtool/multithreading/task_system.hpp @@ -73,11 +73,11 @@ class TaskSystem { private: std::size_t const thread_count_{ std::max(1U, std::thread::hardware_concurrency())}; - std::vector threads_{}; - std::vector queues_{}; + std::vector threads_; + std::vector queues_; std::atomic index_{0}; - std::atomic shutdown_{}; - WaitableZeroCounter total_workload_{}; + std::atomic shutdown_; + WaitableZeroCounter total_workload_; static constexpr std::size_t kNumberOfAttempts = 5; diff --git a/src/buildtool/progress_reporting/TARGETS b/src/buildtool/progress_reporting/TARGETS index adc6f8c5c..11dd87eb8 100644 --- a/src/buildtool/progress_reporting/TARGETS +++ b/src/buildtool/progress_reporting/TARGETS @@ -14,8 +14,8 @@ , "hdrs": ["task_tracker.hpp"] , "stage": ["src", "buildtool", "progress_reporting"] , "deps": - [ ["src/buildtool/logging", "logging"] - , ["src/buildtool/logging", "log_level"] + [ ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] ] } , "progress_reporter": @@ -40,7 +40,10 @@ , "hdrs": ["base_progress_reporter.hpp"] , "srcs": ["base_progress_reporter.cpp"] , "stage": ["src", "buildtool", "progress_reporting"] - , "private-deps": [["src/buildtool/logging", "logging"]] + , "private-deps": + [ ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + ] } , "exports_progress_reporter": { "type": ["@", "rules", "CC", "library"] diff --git a/src/buildtool/progress_reporting/progress.hpp b/src/buildtool/progress_reporting/progress.hpp index f04a46572..21bd439b4 100644 --- a/src/buildtool/progress_reporting/progress.hpp +++ b/src/buildtool/progress_reporting/progress.hpp @@ -47,7 +47,7 @@ class Progress { std::string, std::vector< std::pair>> - origin_map_{}; + origin_map_; }; #endif // INCLUDED_SRC_BUILDTOOL_PROGRESS_REPORTING_PROGRESS_HPP diff --git a/src/buildtool/progress_reporting/task_tracker.hpp b/src/buildtool/progress_reporting/task_tracker.hpp index 7fdfce319..b02852d3f 100644 --- a/src/buildtool/progress_reporting/task_tracker.hpp +++ b/src/buildtool/progress_reporting/task_tracker.hpp @@ -63,8 +63,8 @@ class TaskTracker { private: std::uint64_t prio_{}; - std::mutex m_{}; - std::unordered_map running_{}; + std::mutex m_; + std::unordered_map running_; }; #endif // INCLUDED_SRC_BUILDTOOL_PROGRESS_REPORTING_TASK_TRACKER_HPP diff --git a/src/buildtool/serve_api/remote/TARGETS b/src/buildtool/serve_api/remote/TARGETS index fe8bfcdaf..baf2b68cc 100644 --- a/src/buildtool/serve_api/remote/TARGETS +++ b/src/buildtool/serve_api/remote/TARGETS @@ -3,10 +3,10 @@ , "name": ["config"] , "hdrs": ["config.hpp"] , "deps": - [ ["src/buildtool/common/remote", "remote_common"] + [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/common/remote", "remote_common"] , ["src/buildtool/main", "build_utils"] , ["src/utils/cpp", "expected"] - , ["@", "fmt", "", "fmt"] ] , "stage": ["src", "buildtool", "serve_api", "remote"] } @@ -17,16 +17,20 @@ , "srcs": ["source_tree_client.cpp"] , "deps": [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "common"] , ["src/buildtool/common/remote", "remote_common"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_api/remote", "context"] , ["src/buildtool/file_system", "git_types"] , ["src/buildtool/file_system/symlinks_map", "pragma_special"] , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "expected"] ] , "proto": [["src/buildtool/serve_api/serve_service", "just_serve_proto"]] , "stage": ["src", "buildtool", "serve_api", "remote"] , "private-deps": - [ ["src/buildtool/common/remote", "client_common"] + [ ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common/remote", "client_common"] , ["src/buildtool/logging", "log_level"] ] } @@ -35,20 +39,20 @@ , "name": ["serve_api"] , "hdrs": ["serve_api.hpp"] , "deps": - [ ["src/buildtool/auth", "auth"] + [ "config" + , "configuration_client" + , "source_tree_client" + , "target_client" + , ["src/buildtool/auth", "auth"] , ["src/buildtool/common", "common"] , ["src/buildtool/common/remote", "port"] , ["src/buildtool/common/remote", "remote_common"] - , ["src/buildtool/file_system", "git_types"] - , ["src/buildtool/file_system/symlinks_map", "pragma_special"] , ["src/buildtool/execution_api/common", "api_bundle"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/execution_api/remote", "context"] + , ["src/buildtool/file_system", "git_types"] + , ["src/buildtool/file_system/symlinks_map", "pragma_special"] , ["src/utils/cpp", "expected"] - , "source_tree_client" - , "target_client" - , "configuration_client" - , "config" ] , "stage": ["src", "buildtool", "serve_api", "remote"] } @@ -59,22 +63,25 @@ , "srcs": ["target_client.cpp"] , "deps": [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "common"] , ["src/buildtool/common/remote", "port"] , ["src/buildtool/common/remote", "remote_common"] - , ["src/buildtool/common", "common"] - , ["src/buildtool/logging", "logging"] , ["src/buildtool/execution_api/common", "api_bundle"] , ["src/buildtool/execution_api/remote", "config"] , ["src/buildtool/execution_api/remote", "context"] + , ["src/buildtool/logging", "logging"] , ["src/buildtool/storage", "storage"] ] , "proto": [["src/buildtool/serve_api/serve_service", "just_serve_proto"]] , "stage": ["src", "buildtool", "serve_api", "remote"] , "private-deps": - [ ["src/buildtool/common/remote", "client_common"] - , ["src/buildtool/common", "bazel_types"] - , ["@", "fmt", "", "fmt"] + [ ["@", "fmt", "", "fmt"] , ["@", "json", "", "json"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/common/remote", "client_common"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/logging", "log_level"] ] } , "configuration_client": @@ -84,17 +91,17 @@ , "srcs": ["configuration_client.cpp"] , "deps": [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common/remote", "remote_common"] , ["src/buildtool/execution_api/remote", "config"] , ["src/buildtool/execution_api/remote", "context"] , ["src/buildtool/logging", "logging"] - , ["src/buildtool/common/remote", "remote_common"] ] , "proto": [["src/buildtool/serve_api/serve_service", "just_serve_proto"]] , "stage": ["src", "buildtool", "serve_api", "remote"] , "private-deps": [ ["@", "json", "", "json"] - , ["src/buildtool/logging", "log_level"] , ["src/buildtool/common/remote", "client_common"] + , ["src/buildtool/logging", "log_level"] ] } } diff --git a/src/buildtool/serve_api/remote/config.hpp b/src/buildtool/serve_api/remote/config.hpp index 005d56af3..699f285ab 100644 --- a/src/buildtool/serve_api/remote/config.hpp +++ b/src/buildtool/serve_api/remote/config.hpp @@ -34,10 +34,10 @@ struct RemoteServeConfig final { class Builder; // Server address of remote execution. - std::optional const remote_address{}; + std::optional const remote_address; // Known Git repositories to serve server. - std::vector const known_repositories{}; + std::vector const known_repositories; // Number of jobs std::size_t const jobs = 0; @@ -98,7 +98,7 @@ class RemoteServeConfig::Builder final { -> expected { // To not duplicate default arguments of RemoteServeConfig in builder, // create a default config and copy default arguments from there. - RemoteServeConfig const default_config; + RemoteServeConfig const default_config{}; auto remote_address = default_config.remote_address; if (remote_address_.has_value()) { diff --git a/src/buildtool/serve_api/remote/serve_api.hpp b/src/buildtool/serve_api/remote/serve_api.hpp index f3861eb76..3a9443faa 100644 --- a/src/buildtool/serve_api/remote/serve_api.hpp +++ b/src/buildtool/serve_api/remote/serve_api.hpp @@ -46,7 +46,9 @@ class ServeApi final { gsl::not_null const& local_context, gsl::not_null const& remote_context, gsl::not_null const& apis) noexcept - : stc_{address, remote_context}, + : stc_{address, + &local_context->storage_config->hash_function, + remote_context}, tc_{address, local_context->storage, remote_context, apis}, cc_{address, remote_context} {} @@ -71,10 +73,10 @@ class ServeApi final { return std::nullopt; } - [[nodiscard]] auto RetrieveTreeFromCommit(std::string const& commit, - std::string const& subdir = ".", - bool sync_tree = false) - const noexcept -> expected { + [[nodiscard]] auto RetrieveTreeFromCommit( + std::string const& commit, + std::string const& subdir = ".", + bool sync_tree = false) const noexcept -> SourceTreeClient::result_t { return stc_.ServeCommitTree(commit, subdir, sync_tree); } @@ -83,8 +85,7 @@ class ServeApi final { std::string const& archive_type = "archive", std::string const& subdir = ".", std::optional const& resolve_symlinks = std::nullopt, - bool sync_tree = false) const noexcept - -> expected { + bool sync_tree = false) const noexcept -> SourceTreeClient::result_t { return stc_.ServeArchiveTree( content, archive_type, subdir, resolve_symlinks, sync_tree); } @@ -92,25 +93,24 @@ class ServeApi final { [[nodiscard]] auto RetrieveTreeFromDistdir( std::shared_ptr> const& distfiles, - bool sync_tree = false) const noexcept - -> expected { + bool sync_tree = false) const noexcept -> SourceTreeClient::result_t { return stc_.ServeDistdirTree(distfiles, sync_tree); } - [[nodiscard]] auto RetrieveTreeFromForeignFile(const std::string& content, - const std::string& name, - bool executable) - const noexcept -> expected { + [[nodiscard]] auto RetrieveTreeFromForeignFile( + const std::string& content, + const std::string& name, + bool executable) const noexcept -> SourceTreeClient::result_t { return stc_.ServeForeignFileTree(content, name, executable); } - [[nodiscard]] auto ContentInRemoteCAS( - std::string const& content) const noexcept -> bool { + [[nodiscard]] auto ContentInRemoteCAS(std::string const& content) + const noexcept -> expected { return stc_.ServeContent(content); } - [[nodiscard]] auto TreeInRemoteCAS( - std::string const& tree_id) const noexcept -> bool { + [[nodiscard]] auto TreeInRemoteCAS(std::string const& tree_id) + const noexcept -> expected { return stc_.ServeTree(tree_id); } @@ -120,8 +120,8 @@ class ServeApi final { } [[nodiscard]] auto GetTreeFromRemote( - std::string const& tree_id) const noexcept -> bool { - return stc_.GetRemoteTree(tree_id); + ArtifactDigest const& digest) const noexcept -> bool { + return stc_.GetRemoteTree(digest); } [[nodiscard]] auto ServeTargetVariables(std::string const& target_root_id, @@ -139,8 +139,8 @@ class ServeApi final { } [[nodiscard]] auto ServeTarget(const TargetCacheKey& key, - const std::string& repo_key) const noexcept - -> std::optional { + const ArtifactDigest& repo_key) + const noexcept -> std::optional { return tc_.ServeTarget(key, repo_key); } diff --git a/src/buildtool/serve_api/remote/source_tree_client.cpp b/src/buildtool/serve_api/remote/source_tree_client.cpp index 994fcc21a..d0053d47b 100644 --- a/src/buildtool/serve_api/remote/source_tree_client.cpp +++ b/src/buildtool/serve_api/remote/source_tree_client.cpp @@ -16,6 +16,7 @@ #include "src/buildtool/serve_api/remote/source_tree_client.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/remote/client_common.hpp" #include "src/buildtool/logging/log_level.hpp" @@ -61,7 +62,9 @@ auto PragmaSpecialToSymlinksResolve( SourceTreeClient::SourceTreeClient( ServerAddress const& address, - gsl::not_null const& remote_context) noexcept { + gsl::not_null const& hash_function, + gsl::not_null const& remote_context) noexcept + : hash_function_{*hash_function} { stub_ = justbuild::just_serve::SourceTree::NewStub(CreateChannelWithCredentials( address.host, address.port, remote_context->auth)); @@ -95,7 +98,18 @@ auto SourceTreeClient::ServeCommitTree(std::string const& commit_id, ? GitLookupError::Fatal : GitLookupError::NotFound}; } - return response.tree(); // success + TreeResult result = {response.tree(), std::nullopt}; + // if asked to sync, get digest from response + if (sync_tree) { + auto digest = ArtifactDigestFactory::FromBazel(hash_function_.GetType(), + response.digest()); + if (not digest) { + logger_.Emit(LogLevel::Debug, std::move(digest).error()); + return unexpected{GitLookupError::Fatal}; + } + result.digest = *std::move(digest); + } + return result; // success } auto SourceTreeClient::ServeArchiveTree( @@ -131,7 +145,18 @@ auto SourceTreeClient::ServeArchiveTree( ? GitLookupError::Fatal : GitLookupError::NotFound}; } - return response.tree(); // success + TreeResult result = {response.tree(), std::nullopt}; + // if asked to sync, get digest from response + if (sync_tree) { + auto digest = ArtifactDigestFactory::FromBazel(hash_function_.GetType(), + response.digest()); + if (not digest) { + logger_.Emit(LogLevel::Debug, std::move(digest).error()); + return unexpected{GitLookupError::Fatal}; + } + result.digest = *std::move(digest); + } + return result; // success } auto SourceTreeClient::ServeDistdirTree( @@ -166,7 +191,18 @@ auto SourceTreeClient::ServeDistdirTree( ? GitLookupError::Fatal : GitLookupError::NotFound}; } - return response.tree(); // success + TreeResult result = {response.tree(), std::nullopt}; + // if asked to sync, get digest from response + if (sync_tree) { + auto digest = ArtifactDigestFactory::FromBazel(hash_function_.GetType(), + response.digest()); + if (not digest) { + logger_.Emit(LogLevel::Debug, std::move(digest).error()); + return unexpected{GitLookupError::Fatal}; + } + result.digest = *std::move(digest); + } + return result; // success } auto SourceTreeClient::ServeForeignFileTree(const std::string& content, @@ -199,11 +235,11 @@ auto SourceTreeClient::ServeForeignFileTree(const std::string& content, ? GitLookupError::Fatal : GitLookupError::NotFound}; } - return response.tree(); // success + return TreeResult{response.tree(), std::nullopt}; // success } auto SourceTreeClient::ServeContent(std::string const& content) const noexcept - -> bool { + -> expected { justbuild::just_serve::ServeContentRequest request{}; request.set_content(content); @@ -213,20 +249,30 @@ auto SourceTreeClient::ServeContent(std::string const& content) const noexcept if (not status.ok()) { LogStatus(&logger_, LogLevel::Debug, status); - return false; + return unexpected{GitLookupError::Fatal}; } if (response.status() != ::justbuild::just_serve::ServeContentResponse::OK) { logger_.Emit(LogLevel::Debug, "ServeContent response returned with {}", static_cast(response.status())); - return false; + return unexpected{ + response.status() != + ::justbuild::just_serve::ServeContentResponse::NOT_FOUND + ? GitLookupError::Fatal + : GitLookupError::NotFound}; } - return true; + auto digest = ArtifactDigestFactory::FromBazel(hash_function_.GetType(), + response.digest()); + if (not digest) { + logger_.Emit(LogLevel::Debug, std::move(digest).error()); + return unexpected{GitLookupError::Fatal}; + } + return *std::move(digest); // success } auto SourceTreeClient::ServeTree(std::string const& tree_id) const noexcept - -> bool { + -> expected { justbuild::just_serve::ServeTreeRequest request{}; request.set_tree(tree_id); @@ -236,15 +282,25 @@ auto SourceTreeClient::ServeTree(std::string const& tree_id) const noexcept if (not status.ok()) { LogStatus(&logger_, LogLevel::Debug, status); - return false; + return unexpected{GitLookupError::Fatal}; } if (response.status() != ::justbuild::just_serve::ServeTreeResponse::OK) { logger_.Emit(LogLevel::Debug, "ServeTree response returned with {}", static_cast(response.status())); - return false; + return unexpected{ + response.status() != + ::justbuild::just_serve::ServeTreeResponse::NOT_FOUND + ? GitLookupError::Fatal + : GitLookupError::NotFound}; } - return true; + auto digest = ArtifactDigestFactory::FromBazel(hash_function_.GetType(), + response.digest()); + if (not digest) { + logger_.Emit(LogLevel::Debug, std::move(digest).error()); + return unexpected{GitLookupError::Fatal}; + } + return *std::move(digest); // success } auto SourceTreeClient::CheckRootTree(std::string const& tree_id) const noexcept @@ -274,10 +330,10 @@ auto SourceTreeClient::CheckRootTree(std::string const& tree_id) const noexcept return true; // tree found } -auto SourceTreeClient::GetRemoteTree(std::string const& tree_id) const noexcept - -> bool { +auto SourceTreeClient::GetRemoteTree( + ArtifactDigest const& digest) const noexcept -> bool { justbuild::just_serve::GetRemoteTreeRequest request{}; - request.set_tree(tree_id); + (*request.mutable_digest()) = ArtifactDigestFactory::ToBazel(digest); grpc::ClientContext context; justbuild::just_serve::GetRemoteTreeResponse response; diff --git a/src/buildtool/serve_api/remote/source_tree_client.hpp b/src/buildtool/serve_api/remote/source_tree_client.hpp index c849eeb38..32fb910c1 100644 --- a/src/buildtool/serve_api/remote/source_tree_client.hpp +++ b/src/buildtool/serve_api/remote/source_tree_client.hpp @@ -16,12 +16,15 @@ #define INCLUDED_SRC_BUILDTOOL_SERVE_API_SOURCE_TREE_CLIENT_HPP #include +#include #include #include #include "gsl/gsl" #include "justbuild/just_serve/just_serve.grpc.pb.h" +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/common/remote/remote_common.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/remote/context.hpp" #include "src/buildtool/file_system/git_types.hpp" #include "src/buildtool/file_system/symlinks_map/pragma_special.hpp" @@ -34,19 +37,23 @@ class SourceTreeClient { public: explicit SourceTreeClient( ServerAddress const& address, + gsl::not_null const& hash_function, gsl::not_null const& remote_context) noexcept; - // An error + data union type - using result_t = expected; + struct TreeResult { + std::string tree; + std::optional digest; + }; + using result_t = expected; /// \brief Retrieve the Git tree of a given commit, if known by the /// endpoint. It is a fatal error if the commit is known to the endpoint but - /// no tree is able to be returned. + /// no result is able to be returned. /// \param[in] commit_id Hash of the Git commit to look up. /// \param[in] subdir Relative path of the tree inside commit. /// \param[in] sync_tree Sync tree to the remote-execution endpoint. - /// \returns The tree identifier on success or an unexpected error (fatal or - /// commit or subtree not found). + /// \returns The optional tree digest on success or an unexpected error + /// (fatal or commit or subtree not found). [[nodiscard]] auto ServeCommitTree(std::string const& commit_id, std::string const& subdir, bool sync_tree) const noexcept @@ -54,15 +61,15 @@ class SourceTreeClient { /// \brief Retrieve the Git tree of an archive content, if known by the /// endpoint. It is a fatal error if the content blob is known to the - /// endpoint but no tree is able to be returned. + /// endpoint but no result is able to be returned. /// \param[in] content Hash of the archive content to look up. /// \param[in] archive_type Type of archive ("archive"|"zip"). /// \param[in] subdir Relative path of the tree inside archive. /// \param[in] resolve_symlinks Optional enum to state how symlinks in the /// archive should be handled if the tree has to be actually computed. /// \param[in] sync_tree Sync tree to the remote-execution endpoint. - /// \returns The tree identifier on success or an unexpected error (fatal or - /// content blob not found). + /// \returns The optional tree digest on success or an unexpected error + /// (fatal or content blob not found). [[nodiscard]] auto ServeArchiveTree( std::string const& content, std::string const& archive_type, @@ -72,20 +79,25 @@ class SourceTreeClient { /// \brief Retrieve the Git tree of a directory of distfiles, if all the /// content blobs are known by the endpoint. It is a fatal error if all - /// content blobs are known but no tree is able to be returned. + /// content blobs are known but no result is able to be returned. /// \param[in] distfiles Mapping from distfile names to content blob ids. - /// \param[in] sync_tree Sync tree and all ditfile blobs to the + /// \param[in] sync_tree Sync tree and all distfile blobs to the /// remote-execution endpoint. - /// \returns The tree identifier on success or an unexpected error (fatal or - /// at least one distfile blob missing). + /// \returns The optional tree digest on success or an unexpected error + /// (fatal or at least one distfile blob missing). [[nodiscard]] auto ServeDistdirTree( std::shared_ptr> const& distfiles, bool sync_tree) const noexcept -> result_t; /// \brief Retrieve the Git tree of a foreign-file directory, if all content - /// blobs are known to the end point and, as a side effect, make that tree + /// blobs are known to the endpoint and, as a side-effect, make that tree /// known to the serve endpoint. + /// \param[in] content Hash of the foreign-file content. + /// \param[in] name Name of the foreign-file. + /// \param[in] executable Executable flag of foreign-file. + /// \returns The optional tree digest on success or an unexpected error + /// (fatal or content not found). [[nodiscard]] auto ServeForeignFileTree(const std::string& content, const std::string& name, bool executable) const noexcept @@ -96,14 +108,14 @@ class SourceTreeClient { /// \param[in] content Hash of the archive content to look up. /// \returns Flag to state whether content is in remote CAS. [[nodiscard]] auto ServeContent(std::string const& content) const noexcept - -> bool; + -> expected; /// \brief Make a given tree available in remote CAS, if known by serve /// remote. /// \param[in] tree_id Identifier of the Git tree to look up. /// \returns Flag to state whether tree is in remote CAS. [[nodiscard]] auto ServeTree(std::string const& tree_id) const noexcept - -> bool; + -> expected; /// \brief Checks if the serve endpoint has a given tree locally available /// and makes it available for a serve-orchestrated build. @@ -115,13 +127,14 @@ class SourceTreeClient { /// \brief Retrieve tree from the CAS of the associated remote-execution /// endpoint and makes it available for a serve-orchestrated build. - /// \param[in] tree_id Identifier of the Git tree to retrieve. + /// \param[in] digest Tree to retrieve. /// \returns Flag to state whether tree was successfully imported into the /// local Git storage or not. - [[nodiscard]] auto GetRemoteTree(std::string const& tree_id) const noexcept - -> bool; + [[nodiscard]] auto GetRemoteTree( + ArtifactDigest const& digest) const noexcept -> bool; private: + HashFunction const& hash_function_; // hash function of the remote std::unique_ptr stub_; Logger logger_{"RemoteSourceTreeClient"}; }; diff --git a/src/buildtool/serve_api/remote/target_client.cpp b/src/buildtool/serve_api/remote/target_client.cpp index 4d11fe840..972b97084 100644 --- a/src/buildtool/serve_api/remote/target_client.cpp +++ b/src/buildtool/serve_api/remote/target_client.cpp @@ -21,10 +21,29 @@ #include "fmt/core.h" #include "nlohmann/json.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/common/remote/client_common.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/logging/log_level.hpp" +namespace { +[[nodiscard]] auto GetTargetValue( + HashFunction::Type hash_type, + justbuild::just_serve::ServeTargetResponse const& response) noexcept + -> std::optional { + if (not response.has_target_value()) { + return std::nullopt; + } + auto result = + ArtifactDigestFactory::FromBazel(hash_type, response.target_value()); + if (not result) { + return std::nullopt; + } + return *std::move(result); +} +} // namespace + TargetClient::TargetClient( ServerAddress const& address, gsl::not_null const& storage, @@ -38,7 +57,7 @@ TargetClient::TargetClient( } auto TargetClient::ServeTarget(const TargetCacheKey& key, - const std::string& repo_key) const noexcept + const ArtifactDigest& repo_key) const noexcept -> std::optional { // make sure the blob containing the key is in the remote cas if (not apis_.local->RetrieveToCas({key.Id()}, *apis_.remote)) { @@ -49,18 +68,19 @@ auto TargetClient::ServeTarget(const TargetCacheKey& key, } // make sure the repository configuration blob is in the remote cas if (not apis_.local->RetrieveToCas( - {Artifact::ObjectInfo{.digest = ArtifactDigest{repo_key, 0, false}, + {Artifact::ObjectInfo{.digest = repo_key, .type = ObjectType::File}}, *apis_.remote)) { return serve_target_result_t{ std::in_place_index<1>, - fmt::format("Failed to retrieve to remote cas blob {}", repo_key)}; + fmt::format("Failed to retrieve to remote cas blob {}", + repo_key.hash())}; } // add target cache key to request - bazel_re::Digest key_dgst{key.Id().digest}; justbuild::just_serve::ServeTargetRequest request{}; - request.mutable_target_cache_key_id()->CopyFrom(key_dgst); + *request.mutable_target_cache_key_id() = + ArtifactDigestFactory::ToBazel(key.Id().digest); // add execution properties to request for (auto const& [k, v] : exec_config_.platform_properties) { @@ -71,14 +91,16 @@ auto TargetClient::ServeTarget(const TargetCacheKey& key, // add dispatch information to request, while ensuring blob is uploaded // to remote cas - auto dispatch_list = nlohmann::json::array(); + std::optional dispatch_digest; try { + auto dispatch_list = nlohmann::json::array(); for (auto const& [props, endpoint] : exec_config_.dispatch) { auto entry = nlohmann::json::array(); entry.push_back(nlohmann::json(props)); entry.push_back(endpoint.ToJson()); dispatch_list.push_back(entry); } + dispatch_digest = storage_.CAS().StoreBlob(dispatch_list.dump(2)); } catch (std::exception const& ex) { return serve_target_result_t{ std::in_place_index<1>, @@ -86,22 +108,21 @@ auto TargetClient::ServeTarget(const TargetCacheKey& key, ex.what())}; } - auto dispatch_dgst = storage_.CAS().StoreBlob(dispatch_list.dump(2)); - if (not dispatch_dgst) { + if (not dispatch_digest) { return serve_target_result_t{ - std::in_place_index<1>, - fmt::format("Failed to store blob {} to local cas", - dispatch_list.dump(2))}; + std::in_place_index<1>, "Failed to add dispatch info to local cas"}; } - auto const& dispatch_info = Artifact::ObjectInfo{ - .digest = ArtifactDigest{*dispatch_dgst}, .type = ObjectType::File}; + + auto const dispatch_info = Artifact::ObjectInfo{.digest = *dispatch_digest, + .type = ObjectType::File}; if (not apis_.local->RetrieveToCas({dispatch_info}, *apis_.remote)) { return serve_target_result_t{ std::in_place_index<1>, fmt::format("Failed to upload blob {} to remote cas", dispatch_info.ToString())}; } - request.mutable_dispatch_info()->CopyFrom(*dispatch_dgst); + (*request.mutable_dispatch_info()) = + ArtifactDigestFactory::ToBazel(*dispatch_digest); // call rpc grpc::ClientContext context; @@ -113,21 +134,29 @@ auto TargetClient::ServeTarget(const TargetCacheKey& key, case grpc::StatusCode::OK: { // if log has been set, pass it along as index 0 if (response.has_log()) { - return serve_target_result_t{ - std::in_place_index<0>, - ArtifactDigest(response.log()).hash()}; + auto log_digest = ArtifactDigestFactory::FromBazel( + storage_.GetHashFunction().GetType(), response.log()); + if (not log_digest) { + return serve_target_result_t{ + std::in_place_index<1>, + fmt::format("Failed to convert log digest: {}", + std::move(log_digest).error())}; + } + return serve_target_result_t{std::in_place_index<0>, + log_digest->hash()}; } // if no log has been set, it must have the target cache value - if (not response.has_target_value()) { + auto const target_value_dgst = + GetTargetValue(storage_.GetHashFunction().GetType(), response); + if (not target_value_dgst) { return serve_target_result_t{ std::in_place_index<1>, "Serve endpoint failed to set expected response field"}; } - auto const& target_value_dgst = - ArtifactDigest{response.target_value()}; - auto const& obj_info = Artifact::ObjectInfo{ - .digest = target_value_dgst, .type = ObjectType::File}; - if (not apis_.local->IsAvailable(target_value_dgst)) { + + auto const obj_info = Artifact::ObjectInfo{ + .digest = *target_value_dgst, .type = ObjectType::File}; + if (not apis_.local->IsAvailable(*target_value_dgst)) { if (not apis_.remote->RetrieveToCas({obj_info}, *apis_.local)) { return serve_target_result_t{ std::in_place_index<1>, @@ -145,7 +174,8 @@ auto TargetClient::ServeTarget(const TargetCacheKey& key, obj_info.ToString())}; } try { - auto const& result = TargetCacheEntry::FromJson( + auto const result = TargetCacheEntry::FromJson( + storage_.GetHashFunction().GetType(), nlohmann::json::parse(*target_value_str)); // return the target cache value information return serve_target_result_t{std::in_place_index<3>, @@ -222,7 +252,14 @@ auto TargetClient::ServeTargetDescription( LogStatus(&logger_, LogLevel::Error, status); return std::nullopt; } - return ArtifactDigest{response.description_id()}; + + auto result = ArtifactDigestFactory::FromBazel( + storage_.GetHashFunction().GetType(), response.description_id()); + if (not result) { + logger_.Emit(LogLevel::Error, "{}", std::move(result).error()); + return std::nullopt; + } + return *std::move(result); } #endif // BOOTSTRAP_BUILD_TOOL diff --git a/src/buildtool/serve_api/remote/target_client.hpp b/src/buildtool/serve_api/remote/target_client.hpp index e6ca07ff2..7e0fb726c 100644 --- a/src/buildtool/serve_api/remote/target_client.hpp +++ b/src/buildtool/serve_api/remote/target_client.hpp @@ -68,8 +68,8 @@ class TargetClient { /// \returns A correspondingly populated result union, or nullopt if remote /// reported that the target was not found. [[nodiscard]] auto ServeTarget(const TargetCacheKey& key, - const std::string& repo_key) const noexcept - -> std::optional; + const ArtifactDigest& repo_key) + const noexcept -> std::optional; /// \brief Retrieve the flexible config variables of an export target. /// \param[in] target_root_id Hash of target-level root tree. diff --git a/src/buildtool/serve_api/serve_service/TARGETS b/src/buildtool/serve_api/serve_service/TARGETS index 1a080b66b..4e9e3909a 100644 --- a/src/buildtool/serve_api/serve_service/TARGETS +++ b/src/buildtool/serve_api/serve_service/TARGETS @@ -14,33 +14,35 @@ , "proto": ["just_serve_proto"] , "deps": [ ["@", "gsl", "", "gsl"] - , ["src/buildtool/logging", "logging"] , ["src/buildtool/common/remote", "remote_common"] - , ["src/buildtool/execution_api/common", "common"] - , ["src/buildtool/execution_api/remote", "config"] , ["src/buildtool/execution_api/common", "api_bundle"] + , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/execution_api/local", "context"] + , ["src/buildtool/execution_api/remote", "config"] , ["src/buildtool/file_system", "git_types"] , ["src/buildtool/file_system/symlinks_map", "pragma_special"] , ["src/buildtool/file_system/symlinks_map", "resolve_symlinks_map"] + , ["src/buildtool/logging", "logging"] , ["src/buildtool/serve_api/remote", "config"] , ["src/utils/cpp", "expected"] - , ["src/buildtool/storage", "config"] - , ["src/buildtool/storage", "storage"] - , ["src/buildtool/storage", "repository_garbage_collector"] ] , "stage": ["src", "buildtool", "serve_api", "serve_service"] , "private-deps": [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/common", "common"] - , ["src/buildtool/compatibility", "compatibility"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/serve", "mr_git_api"] + , ["src/buildtool/execution_api/serve", "utils"] + , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/file_system", "git_repo"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/multithreading", "async_map_utils"] , ["src/buildtool/storage", "fs_utils"] + , ["src/buildtool/storage", "repository_garbage_collector"] + , ["src/buildtool/storage", "storage"] , ["src/utils/archive", "archive_ops"] - , ["src/buildtool/execution_api/git", "git"] - , ["src/buildtool/crypto", "hash_function"] ] } , "serve_server_implementation": @@ -50,31 +52,36 @@ , "srcs": ["serve_server_implementation.cpp"] , "deps": [ ["@", "gsl", "", "gsl"] - , ["src/buildtool/logging", "logging"] - , ["src/buildtool/serve_api/remote", "config"] - , ["src/buildtool/serve_api/remote", "serve_api"] , ["src/buildtool/execution_api/common", "api_bundle"] - , ["src/buildtool/execution_api/execution_service", "operation_cache"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/execution_api/remote", "context"] + , ["src/buildtool/logging", "logging"] + , ["src/buildtool/serve_api/remote", "config"] + , ["src/buildtool/serve_api/remote", "serve_api"] ] , "stage": ["src", "buildtool", "serve_api", "serve_service"] , "private-deps": - [ "source_tree" + [ "configuration_service" + , "source_tree" , "target_service" - , "configuration_service" , ["@", "fmt", "", "fmt"] , ["@", "grpc", "", "grpc++"] , ["@", "json", "", "json"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/common/remote", "port"] - , ["src/buildtool/compatibility", "compatibility"] - , ["src/buildtool/logging", "log_level"] - , ["src/buildtool/execution_api/execution_service", "execution_server"] , ["src/buildtool/execution_api/execution_service", "ac_server"] - , ["src/buildtool/execution_api/execution_service", "cas_server"] , ["src/buildtool/execution_api/execution_service", "bytestream_server"] , ["src/buildtool/execution_api/execution_service", "capabilities_server"] + , ["src/buildtool/execution_api/execution_service", "cas_server"] + , ["src/buildtool/execution_api/execution_service", "execution_server"] , ["src/buildtool/execution_api/execution_service", "operations_server"] + , ["src/buildtool/execution_api/local", "local"] + , ["src/buildtool/execution_api/serve", "mr_local_api"] + , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/file_system", "git_repo"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] ] } , "target_service": @@ -84,47 +91,48 @@ , "srcs": ["target.cpp"] , "proto": ["just_serve_proto"] , "deps": - [ ["src/buildtool/logging", "logging"] - , ["@", "gsl", "", "gsl"] + [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "common"] , ["src/buildtool/common/remote", "remote_common"] , ["src/buildtool/execution_api/common", "api_bundle"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/execution_api/local", "context"] , ["src/buildtool/execution_api/remote", "config"] , ["src/buildtool/execution_api/remote", "context"] + , ["src/buildtool/logging", "logging"] , ["src/buildtool/serve_api/remote", "config"] + , ["src/buildtool/serve_api/remote", "serve_api"] , ["src/utils/cpp", "expected"] ] , "stage": ["src", "buildtool", "serve_api", "serve_service"] , "private-deps": - [ ["@", "fmt", "", "fmt"] + [ "target_utils" + , ["@", "fmt", "", "fmt"] , ["@", "json", "", "json"] - , "target_utils" , ["src/buildtool/build_engine/base_maps", "entity_name"] , ["src/buildtool/build_engine/base_maps", "entity_name_data"] , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] , ["src/buildtool/build_engine/target_map", "configured_target"] , ["src/buildtool/build_engine/target_map", "result_map"] - , ["src/buildtool/common/remote", "remote_common"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "config"] , ["src/buildtool/common/remote", "retry_config"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/execution_engine/executor", "context"] , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/graph_traverser", "graph_traverser"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/main", "analyse"] + , ["src/buildtool/main", "analyse_context"] , ["src/buildtool/main", "build_utils"] , ["src/buildtool/multithreading", "task_system"] - , ["src/buildtool/progress_reporting", "progress_reporter"] - , ["src/buildtool/common", "common"] - , ["src/buildtool/file_system", "object_type"] - , ["src/utils/cpp", "verify_hash"] , ["src/buildtool/progress_reporting", "progress"] - , ["src/buildtool/main", "analyse_context"] - , ["src/buildtool/common", "config"] - , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/progress_reporting", "progress_reporter"] , ["src/buildtool/storage", "backend_description"] , ["src/buildtool/storage", "repository_garbage_collector"] + , ["src/buildtool/storage", "storage"] ] } , "configuration_service": @@ -134,9 +142,12 @@ , "srcs": ["configuration.cpp"] , "proto": ["just_serve_proto"] , "deps": - [["@", "gsl", "", "gsl"], ["src/buildtool/execution_api/remote", "config"]] + [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/remote", "config"] + ] , "stage": ["src", "buildtool", "serve_api", "serve_service"] - , "private-deps": [["src/buildtool/compatibility", "compatibility"]] + , "private-deps": [["src/buildtool/common", "protocol_traits"]] } , "target_utils": { "type": ["@", "rules", "CC", "library"] @@ -145,8 +156,8 @@ , "srcs": ["target_utils.cpp"] , "deps": [ ["@", "gsl", "", "gsl"] - , ["src/buildtool/common/remote", "remote_common"] , ["src/buildtool/common", "config"] + , ["src/buildtool/common/remote", "remote_common"] , ["src/buildtool/logging", "logging"] , ["src/buildtool/serve_api/remote", "config"] , ["src/buildtool/storage", "config"] diff --git a/src/buildtool/serve_api/serve_service/configuration.cpp b/src/buildtool/serve_api/serve_service/configuration.cpp index a5bd76557..86cf14a70 100644 --- a/src/buildtool/serve_api/serve_service/configuration.cpp +++ b/src/buildtool/serve_api/serve_service/configuration.cpp @@ -16,7 +16,7 @@ #include "src/buildtool/serve_api/serve_service/configuration.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/protocol_traits.hpp" auto ConfigurationService::RemoteExecutionEndpoint( ::grpc::ServerContext* /*context*/, @@ -33,7 +33,7 @@ auto ConfigurationService::Compatibility( const ::justbuild::just_serve::CompatibilityRequest* /*request*/, ::justbuild::just_serve::CompatibilityResponse* response) -> ::grpc::Status { - response->set_compatible(Compatibility::IsCompatible()); + response->set_compatible(not ProtocolTraits::IsNative(hash_type_)); return ::grpc::Status::OK; } diff --git a/src/buildtool/serve_api/serve_service/configuration.hpp b/src/buildtool/serve_api/serve_service/configuration.hpp index b374148af..90dceab25 100644 --- a/src/buildtool/serve_api/serve_service/configuration.hpp +++ b/src/buildtool/serve_api/serve_service/configuration.hpp @@ -17,6 +17,7 @@ #include "gsl/gsl" #include "justbuild/just_serve/just_serve.grpc.pb.h" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/remote/config.hpp" // This service can be used by the client to double-check the server @@ -25,9 +26,10 @@ class ConfigurationService final : public justbuild::just_serve::Configuration::Service { public: explicit ConfigurationService( + HashFunction::Type hash_type, gsl::not_null const& remote_config) noexcept - : remote_config_{*remote_config} {}; + : hash_type_{hash_type}, remote_config_{*remote_config} {}; // Returns the address of the associated remote endpoint, if set, // or an empty string signaling that the serve endpoint acts also @@ -51,6 +53,7 @@ class ConfigurationService final -> ::grpc::Status override; private: + HashFunction::Type hash_type_; RemoteExecutionConfig const& remote_config_; }; diff --git a/src/buildtool/serve_api/serve_service/just_serve.proto b/src/buildtool/serve_api/serve_service/just_serve.proto index 24b5cd981..5e52ce032 100644 --- a/src/buildtool/serve_api/serve_service/just_serve.proto +++ b/src/buildtool/serve_api/serve_service/just_serve.proto @@ -56,12 +56,17 @@ message ServeCommitTreeResponse { // If the status has a code `OK` or `SYNC_ERROR`, the tree is correct. // For any other value, the `tree` field is not set. ServeCommitTreeStatus status = 2; + + // The digest of the requested tree, which can be used to retrieve it from + // the associated remote-execution endpoint CAS, if tree was uploaded to the + // remote-execution endpoint. + build.bazel.remote.execution.v2.Digest digest = 3; } // A request message for // [SourceTree.ServeArchiveTree][justbuild.just_serve.SourceTree.ServeArchiveTree]. message ServeArchiveTreeRequest { - // The git blob identifier of the archive. + // The Git blob identifier of the archive. string content = 1; enum ArchiveType { @@ -126,6 +131,11 @@ message ServeArchiveTreeResponse { // If the status has a code `OK` or `SYNC_ERROR`, the tree is correct. // For any other value, the `tree` field is not set. ServeArchiveTreeStatus status = 2; + + // The digest of the requested tree, which can be used to retrieve it from + // the associated remote-execution endpoint CAS, if tree was uploaded to the + // remote-execution endpoint. + build.bazel.remote.execution.v2.Digest digest = 3; } // A request message for @@ -136,7 +146,7 @@ message ServeDistdirTreeRequest { // The name of the distfile. string name = 1; - // The git blob identifier of the distfile content. + // The Git blob identifier of the distfile content. string content = 2; // Whether the blob should occur executable in the resulting @@ -176,12 +186,17 @@ message ServeDistdirTreeResponse { // If the status has a code `OK` or `SYNC_ERROR`, the tree is correct. // For any other value, the `tree` field is not set. ServeDistdirTreeStatus status = 2; + + // The digest of the requested tree, which can be used to retrieve it from + // the associated remote-execution endpoint CAS, if tree was uploaded to the + // remote-execution endpoint. + build.bazel.remote.execution.v2.Digest digest = 3; } // A request message for // [SourceTree.ServeContent][justbuild.just_serve.SourceTree.ServeContent]. message ServeContentRequest { - // The content git blob identifier. + // The content Git blob identifier. string content = 1; } @@ -204,12 +219,16 @@ message ServeContentResponse { // If the status has a code `OK`, the content blob is in the remote CAS. ServeContentStatus status = 1; + + // The digest of the requested blob, which can be used to retrieve it from + // the associated remote-execution endpoint CAS. + build.bazel.remote.execution.v2.Digest digest = 2; } // A request message for // [SourceTree.ServeTree][justbuild.just_serve.SourceTree.ServeTree]. message ServeTreeRequest { - // The git tree identifier. + // The Git tree identifier. string tree = 1; } @@ -232,12 +251,16 @@ message ServeTreeResponse { // If the status has a code `OK`, the tree is in the remote CAS. ServeTreeStatus status = 1; + + // The digest of the requested tree, which can be used to retrieve it from + // the associated remote-execution endpoint CAS. + build.bazel.remote.execution.v2.Digest digest = 2; } // A request message for // [SourceTree.CheckRootTree][justbuild.just_serve.SourceTree.CheckRootTree]. message CheckRootTreeRequest { - // The git tree identifier. + // The Git tree identifier. string tree = 1; } @@ -263,8 +286,11 @@ message CheckRootTreeResponse { // A request message for // [SourceTree.GetRemoteTree][justbuild.just_serve.SourceTree.GetRemoteTree]. message GetRemoteTreeRequest { - // The git tree identifier. - string tree = 1; + reserved 1; // The Git tree identifier in an earlier version of the API. + + // The tree digest, which can be used to retrieve it from the associated + // remote-execution endpoint. + build.bazel.remote.execution.v2.Digest digest = 2; } // A response message for @@ -324,9 +350,9 @@ service SourceTree { // There are no method-specific errors. rpc CheckRootTree(CheckRootTreeRequest) returns (CheckRootTreeResponse) {} - // Retrieve a given Git-tree from the CAS of the associated - // remote-execution endpoint and make it available in a location where this - // serve instance can build against. + // Retrieve a given tree from the CAS of the associated remote-execution + // endpoint and make it available in a location where this serve instance + // can build against. // // There are no method-specific errors. rpc GetRemoteTree(GetRemoteTreeRequest) returns (GetRemoteTreeResponse) {} diff --git a/src/buildtool/serve_api/serve_service/serve_server_implementation.cpp b/src/buildtool/serve_api/serve_service/serve_server_implementation.cpp index 3edf04e03..88fad61ab 100644 --- a/src/buildtool/serve_api/serve_service/serve_server_implementation.cpp +++ b/src/buildtool/serve_api/serve_service/serve_server_implementation.cpp @@ -29,20 +29,24 @@ #include "fmt/core.h" #include "grpcpp/grpcpp.h" #include "nlohmann/json.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/remote/port.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/execution_api/execution_service/ac_server.hpp" #include "src/buildtool/execution_api/execution_service/bytestream_server.hpp" #include "src/buildtool/execution_api/execution_service/capabilities_server.hpp" #include "src/buildtool/execution_api/execution_service/cas_server.hpp" #include "src/buildtool/execution_api/execution_service/execution_server.hpp" #include "src/buildtool/execution_api/execution_service/operations_server.hpp" +#include "src/buildtool/execution_api/local/local_api.hpp" +#include "src/buildtool/execution_api/serve/mr_local_api.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/git_repo.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/serve_api/serve_service/configuration.hpp" #include "src/buildtool/serve_api/serve_service/source_tree.hpp" #include "src/buildtool/serve_api/serve_service/target.hpp" +#include "src/buildtool/storage/config.hpp" +#include "src/buildtool/storage/storage.hpp" namespace { template @@ -112,14 +116,68 @@ auto ServeServerImpl::Run( return false; } - SourceTreeService sts{&serve_config, local_context, &apis}; + auto const hash_type = + local_context->storage_config->hash_function.GetType(); + + // TargetService and ConfigurationService use the default apis, which know + // how to dispatch builds. TargetService ts{&serve_config, local_context, remote_context, &apis, serve ? &*serve : nullptr}; - ConfigurationService cs{remote_context->exec_config}; + ConfigurationService cs{hash_type, remote_context->exec_config}; + + // For the SourceTreeService we need to always have access to a native + // storage. In compatible mode, this requires creating a second local + // context, as the default one is compatible. + std::unique_ptr secondary_storage_config = nullptr; + std::unique_ptr secondary_storage = nullptr; + std::unique_ptr secondary_local_context = nullptr; + IExecutionApi::Ptr secondary_local_api = nullptr; + auto const is_compat = not ProtocolTraits::IsNative(hash_type); + if (is_compat) { + auto config = + StorageConfig::Builder{} + .SetBuildRoot(local_context->storage_config->build_root) + .SetHashType(HashFunction::Type::GitSHA1) + .Build(); + if (not config) { + Logger::Log(LogLevel::Error, config.error()); + return false; + } + secondary_storage_config = + std::make_unique(*std::move(config)); + secondary_storage = std::make_unique( + Storage::Create(&*secondary_storage_config)); + secondary_local_context = std::make_unique( + LocalContext{.exec_config = local_context->exec_config, + .storage_config = &*secondary_storage_config, + .storage = &*secondary_storage}); + secondary_local_api = + std::make_shared(&*secondary_local_context); + } + + // setup the overall local api, aware of compatibility + IExecutionApi::Ptr mr_local_api = std::make_shared( + is_compat ? &*secondary_local_context : local_context, + is_compat ? &*secondary_local_api : &*apis.local, + is_compat ? &*local_context : nullptr, + is_compat ? &*apis.local : nullptr); + // setup the apis to pass to SourceTreeService + auto const mr_apis = ApiBundle{.hash_function = apis.hash_function, + .local = mr_local_api, + .remote = apis.remote}; + + SourceTreeService sts{ + &serve_config, + &mr_apis, + is_compat ? &*secondary_local_context + : local_context, // native_context + is_compat ? &*local_context : nullptr // compat_context + }; + // set up the server grpc::ServerBuilder builder; builder.RegisterService(&sts); @@ -133,8 +191,8 @@ auto ServeServerImpl::Run( [[maybe_unused]] ActionCacheServiceImpl ac{local_context}; [[maybe_unused]] CASServiceImpl cas{local_context}; [[maybe_unused]] BytestreamServiceImpl b{local_context}; - [[maybe_unused]] CapabilitiesServiceImpl cap{}; - [[maybe_unused]] OperarationsServiceImpl op{&es.GetOpCache()}; + [[maybe_unused]] CapabilitiesServiceImpl cap{hash_type}; + [[maybe_unused]] OperationsServiceImpl op{&es.GetOpCache()}; if (with_execute) { builder.RegisterService(&es) .RegisterService(&ac) @@ -186,11 +244,11 @@ auto ServeServerImpl::Run( auto const& info_str = nlohmann::to_string(info); Logger::Log(LogLevel::Info, - fmt::format("{}serve{} service{} started: {}", - Compatibility::IsCompatible() ? "compatible " : "", - with_execute ? " and execute" : "", - with_execute ? "s" : "", - info_str)); + "{}serve{} service{} started: {}", + ProtocolTraits::IsNative(hash_type) ? "" : "compatible ", + with_execute ? " and execute" : "", + with_execute ? "s" : "", + info_str); if (not info_file_.empty()) { if (not TryWrite(info_file_, info_str)) { diff --git a/src/buildtool/serve_api/serve_service/serve_server_implementation.hpp b/src/buildtool/serve_api/serve_service/serve_server_implementation.hpp index 4e169696e..b272628a9 100644 --- a/src/buildtool/serve_api/serve_service/serve_server_implementation.hpp +++ b/src/buildtool/serve_api/serve_service/serve_server_implementation.hpp @@ -63,8 +63,8 @@ class ServeServerImpl final { std::string interface_{"127.0.0.1"}; int port_{0}; - std::string info_file_{}; - std::string pid_file_{}; + std::string info_file_; + std::string pid_file_; }; #endif // SERVE_SERVER_IMPLEMENTATION_HPP diff --git a/src/buildtool/serve_api/serve_service/source_tree.cpp b/src/buildtool/serve_api/serve_service/source_tree.cpp index 8b049691f..560d96d34 100644 --- a/src/buildtool/serve_api/serve_service/source_tree.cpp +++ b/src/buildtool/serve_api/serve_service/source_tree.cpp @@ -23,10 +23,11 @@ #include "fmt/core.h" #include "src/buildtool/common/artifact.hpp" #include "src/buildtool/common/artifact_digest.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" -#include "src/buildtool/compatibility/native_support.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/crypto/hash_function.hpp" -#include "src/buildtool/execution_api/git/git_api.hpp" +#include "src/buildtool/execution_api/serve/mr_git_api.hpp" +#include "src/buildtool/execution_api/serve/utils.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/git_repo.hpp" #include "src/buildtool/logging/log_level.hpp" @@ -35,6 +36,7 @@ #include "src/buildtool/storage/garbage_collector.hpp" #include "src/buildtool/storage/repository_garbage_collector.hpp" #include "src/utils/archive/archive_ops.hpp" +#include "src/utils/cpp/expected.hpp" namespace { @@ -199,7 +201,8 @@ auto SourceTreeService::ServeCommitTree( ::grpc::ServerContext* /* context */, const ::justbuild::just_serve::ServeCommitTreeRequest* request, ServeCommitTreeResponse* response) -> ::grpc::Status { - auto repo_lock = RepositoryGarbageCollector::SharedLock(storage_config_); + auto repo_lock = RepositoryGarbageCollector::SharedLock( + *native_context_->storage_config); if (not repo_lock) { logger_->Emit(LogLevel::Error, "Could not acquire repo gc SharedLock"); response->set_status(ServeCommitTreeResponse::INTERNAL_ERROR); @@ -210,16 +213,20 @@ auto SourceTreeService::ServeCommitTree( auto const& subdir{request->subdir()}; // try in local build root Git cache auto res = GetSubtreeFromCommit( - storage_config_.GitRoot(), commit, subdir, logger_); + native_context_->storage_config->GitRoot(), commit, subdir, logger_); if (res) { - auto tree_id = *std::move(res); + auto const tree_id = *std::move(res); auto status = ServeCommitTreeResponse::OK; if (request->sync_tree()) { status = SyncGitEntryToCas( - tree_id, storage_config_.GitRoot()); + tree_id, native_context_->storage_config->GitRoot()); + if (status == ServeCommitTreeResponse::OK) { + status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/true); + } } - *(response->mutable_tree()) = std::move(tree_id); + *(response->mutable_tree()) = tree_id; response->set_status(status); return ::grpc::Status::OK; } @@ -230,7 +237,7 @@ auto SourceTreeService::ServeCommitTree( "repository {}", subdir, commit, - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeCommitTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -238,14 +245,18 @@ auto SourceTreeService::ServeCommitTree( for (auto const& path : serve_config_.known_repositories) { auto res = GetSubtreeFromCommit(path, commit, subdir, logger_); if (res) { - auto tree_id = *std::move(res); + auto const tree_id = *std::move(res); auto status = ServeCommitTreeResponse::OK; if (request->sync_tree()) { status = SyncGitEntryToCas(tree_id, path); + if (status == ServeCommitTreeResponse::OK) { + status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/true); + } } - *(response->mutable_tree()) = std::move(tree_id); + *(response->mutable_tree()) = tree_id; response->set_status(status); return ::grpc::Status::OK; } @@ -275,6 +286,10 @@ auto SourceTreeService::SyncArchive(std::string const& tree_id, if (sync_tree) { status = SyncGitEntryToCas( tree_id, repo_path); + if (status == ServeArchiveTreeResponse::OK) { + status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/true); + } } *(response->mutable_tree()) = tree_id; response->set_status(status); @@ -286,7 +301,26 @@ auto SourceTreeService::SyncGitEntryToCas( std::string const& object_hash, std::filesystem::path const& repo_path) const noexcept -> std::remove_cvref_t { - if (IsTreeObject(kType) and Compatibility::IsCompatible()) { + // get gc locks for the local storages + auto native_lock = + GarbageCollector::SharedLock(*native_context_->storage_config); + if (not native_lock) { + logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); + return TResponse::INTERNAL_ERROR; + } + std::optional compat_lock = std::nullopt; + if (compat_context_ != nullptr) { + compat_lock = + GarbageCollector::SharedLock(*compat_context_->storage_config); + if (not compat_lock) { + logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); + return TResponse::INTERNAL_ERROR; + } + } + + auto const hash_type = + native_context_->storage_config->hash_function.GetType(); + if (IsTreeObject(kType) and not ProtocolTraits::IsTreeAllowed(hash_type)) { logger_->Emit(LogLevel::Error, "Cannot sync tree {} from repository {} with " "the remote in compatible mode", @@ -295,17 +329,28 @@ auto SourceTreeService::SyncGitEntryToCas( return TResponse::SYNC_ERROR; } - auto digest = ArtifactDigest{object_hash, 0, IsTreeObject(kType)}; auto repo = RepositoryConfig{}; if (not repo.SetGitCAS(repo_path)) { logger_->Emit( LogLevel::Error, "Failed to SetGitCAS at {}", repo_path.string()); return TResponse::INTERNAL_ERROR; } + auto const digest = ArtifactDigestFactory::Create( + hash_type, object_hash, 0, IsTreeObject(kType)); + if (not digest) { + logger_->Emit(LogLevel::Error, "SyncGitEntryToCas: {}", digest.error()); + return TResponse::INTERNAL_ERROR; + } - auto git_api = GitApi{&repo}; + auto const is_compat = compat_context_ != nullptr; + auto git_api = + MRGitApi{&repo, + native_context_->storage_config, + is_compat ? &*compat_context_->storage_config : nullptr, + is_compat ? &*compat_context_->storage : nullptr, + is_compat ? &*apis_.local : nullptr}; if (not git_api.RetrieveToCas( - {Artifact::ObjectInfo{.digest = digest, .type = kType}}, + {Artifact::ObjectInfo{.digest = *digest, .type = kType}}, *apis_.remote)) { logger_->Emit(LogLevel::Error, "Failed to sync object {} from repository {}", @@ -316,6 +361,79 @@ auto SourceTreeService::SyncGitEntryToCas( return TResponse::OK; } +template +auto SourceTreeService::SetDigestInResponse( + gsl::not_null const& response, + std::string const& object_hash, + bool is_tree, + bool from_git) const noexcept + -> std::remove_cvref_t { + // set digest in response + auto native_digest = ArtifactDigestFactory::Create( + native_context_->storage_config->hash_function.GetType(), + object_hash, + /*size is unknown*/ 0, + is_tree); + if (not native_digest) { + logger_->Emit(LogLevel::Error, + "SetDigestInResponse: {}", + std::move(native_digest).error()); + return TResponse::INTERNAL_ERROR; + } + // in native mode, set the native digest in response + if (ProtocolTraits::IsNative(apis_.hash_function.GetType())) { + *(response->mutable_digest()) = + ArtifactDigestFactory::ToBazel(*std::move(native_digest)); + } + else { + // in compatible mode, we need to respond with a compatible digest + if (compat_context_ == nullptr) { + // sanity check + logger_->Emit(LogLevel::Error, + "Compatible storage not available as required"); + return TResponse::INTERNAL_ERROR; + } + + // get gc locks for the local storages + auto native_lock = + GarbageCollector::SharedLock(*native_context_->storage_config); + if (not native_lock) { + logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); + return TResponse::INTERNAL_ERROR; + } + auto compat_lock = + GarbageCollector::SharedLock(*compat_context_->storage_config); + if (not compat_lock) { + logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); + return TResponse::INTERNAL_ERROR; + } + + // get the compatible digest from the mapping that was created during + // upload from Git cache + auto const cached_obj = + MRApiUtils::ReadRehashedDigest(*native_digest, + *native_context_->storage_config, + *compat_context_->storage_config, + from_git); + if (not cached_obj) { + logger_->Emit( + LogLevel::Error, "SetDigestInResponse: {}", cached_obj.error()); + return TResponse::INTERNAL_ERROR; + } + if (not *cached_obj) { + logger_->Emit( + LogLevel::Error, + "Cached compatible object for native digest {} not found", + native_digest->hash()); + return TResponse::INTERNAL_ERROR; + } + // set compatible digest in response + *(response->mutable_digest()) = + ArtifactDigestFactory::ToBazel(cached_obj->value().digest); + } + return TResponse::OK; +} + auto SourceTreeService::ResolveContentTree( std::string const& tree_id, std::filesystem::path const& repo_path, @@ -326,7 +444,7 @@ auto SourceTreeService::ResolveContentTree( if (resolve_special) { // get the resolved tree auto tree_id_file = StorageUtils::GetResolvedTreeIDFile( - storage_config_, tree_id, *resolve_special); + *native_context_->storage_config, tree_id, *resolve_special); if (FileSystemManager::Exists(tree_id_file)) { // read resolved tree id auto resolved_tree_id = FileSystemManager::ReadFile(tree_id_file); @@ -341,11 +459,12 @@ auto SourceTreeService::ResolveContentTree( *resolved_tree_id, repo_path, sync_tree, response); } // resolve tree; target repository is always the Git cache - auto target_cas = GitCAS::Open(storage_config_.GitRoot()); + auto target_cas = + GitCAS::Open(native_context_->storage_config->GitRoot()); if (not target_cas) { logger_->Emit(LogLevel::Error, "Failed to open Git ODB at {}", - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -408,7 +527,7 @@ auto SourceTreeService::ResolveContentTree( // keep tree alive in the Git cache via a tagged commit auto wrapped_logger = std::make_shared( [logger = logger_, - storage_config = &storage_config_, + storage_config = native_context_->storage_config, resolved_tree](auto const& msg, bool fatal) { if (fatal) { logger->Emit(LogLevel::Error, @@ -422,11 +541,13 @@ auto SourceTreeService::ResolveContentTree( // this is a non-thread-safe Git operation, so it must be guarded! std::shared_lock slock{mutex_}; // open real repository at Git CAS location - auto git_repo = GitRepo::Open(storage_config_.GitRoot()); + auto git_repo = + GitRepo::Open(native_context_->storage_config->GitRoot()); if (not git_repo) { - logger_->Emit(LogLevel::Error, - "Failed to open Git CAS repository {}", - storage_config_.GitRoot().string()); + logger_->Emit( + LogLevel::Error, + "Failed to open Git CAS repository {}", + native_context_->storage_config->GitRoot().string()); response->set_status(ServeArchiveTreeResponse::RESOLVE_ERROR); return ::grpc::Status::OK; } @@ -454,11 +575,12 @@ auto SourceTreeService::ResolveContentTree( } auto SourceTreeService::CommonImportToGit( - std::filesystem::path const& content_path, + std::filesystem::path const& root_path, std::string const& commit_message) -> expected { // the repository path that imports the content must be separate from the // content path, to avoid polluting the entries - auto tmp_dir = storage_config_.CreateTypedTmpDir("import-repo"); + auto tmp_dir = + native_context_->storage_config->CreateTypedTmpDir("import-repo"); if (not tmp_dir) { return unexpected{ std::string("Failed to create tmp path for import repository")}; @@ -474,36 +596,40 @@ auto SourceTreeService::CommonImportToGit( // wrap logger for GitRepo call std::string err; auto wrapped_logger = std::make_shared( - [content_path, repo_path, &err](auto const& msg, bool fatal) { + [&root_path, &repo_path, &err](auto const& msg, bool fatal) { if (fatal) { err = fmt::format( "While committing directory {} in repository {}:\n{}", - content_path.string(), + root_path.string(), repo_path.string(), msg); } }); // stage and commit all auto commit_hash = - git_repo->CommitDirectory(content_path, commit_message, wrapped_logger); + git_repo->CommitDirectory(root_path, commit_message, wrapped_logger); if (not commit_hash) { return unexpected{err}; } // open the Git CAS repo - auto just_git_cas = GitCAS::Open(storage_config_.GitRoot()); + auto just_git_cas = + GitCAS::Open(native_context_->storage_config->GitRoot()); if (not just_git_cas) { - return unexpected{fmt::format("Failed to open Git ODB at {}", - storage_config_.GitRoot().string())}; + return unexpected{ + fmt::format("Failed to open Git ODB at {}", + native_context_->storage_config->GitRoot().string())}; } auto just_git_repo = GitRepo::Open(just_git_cas); if (not just_git_repo) { - return unexpected{fmt::format("Failed to open Git repository {}", - storage_config_.GitRoot().string())}; + return unexpected{ + fmt::format("Failed to open Git repository {}", + native_context_->storage_config->GitRoot().string())}; } // wrap logger for GitRepo call err.clear(); wrapped_logger = std::make_shared( - [&err, storage_config = &storage_config_](auto const& msg, bool fatal) { + [&err, storage_config = native_context_->storage_config]( + auto const& msg, bool fatal) { if (fatal) { err = fmt::format("While fetching in repository {}:\n{}", storage_config->GitRoot().string(), @@ -512,17 +638,18 @@ auto SourceTreeService::CommonImportToGit( }); // fetch the new commit into the Git CAS via tmp directory; the call is // thread-safe, so it needs no guarding - if (not just_git_repo->LocalFetchViaTmpRepo(storage_config_, - repo_path.string(), - /*branch=*/std::nullopt, - wrapped_logger)) { + if (not just_git_repo->LocalFetchViaTmpRepo( + *native_context_->storage_config, + repo_path.string(), + /*branch=*/std::nullopt, + wrapped_logger)) { return unexpected{err}; } // wrap logger for GitRepo call err.clear(); wrapped_logger = std::make_shared( - [commit_hash, storage_config = &storage_config_, &err](auto const& msg, - bool fatal) { + [commit_hash, storage_config = native_context_->storage_config, &err]( + auto const& msg, bool fatal) { if (fatal) { err = fmt::format("While tagging commit {} in repository {}:\n{}", @@ -536,11 +663,12 @@ auto SourceTreeService::CommonImportToGit( // this is a non-thread-safe Git operation, so it must be guarded! std::shared_lock slock{mutex_}; // open real repository at Git CAS location - auto git_repo = GitRepo::Open(storage_config_.GitRoot()); + auto git_repo = + GitRepo::Open(native_context_->storage_config->GitRoot()); if (not git_repo) { - return unexpected{ - fmt::format("Failed to open Git CAS repository {}", - storage_config_.GitRoot().string())}; + return unexpected{fmt::format( + "Failed to open Git CAS repository {}", + native_context_->storage_config->GitRoot().string())}; } // Important: message must be consistent with just-mr! if (not git_repo->KeepTag(*commit_hash, @@ -598,11 +726,12 @@ auto SourceTreeService::ArchiveImportToGit( return ::grpc::Status::OK; } // open the Git CAS repo - auto just_git_cas = GitCAS::Open(storage_config_.GitRoot()); + auto just_git_cas = + GitCAS::Open(native_context_->storage_config->GitRoot()); if (not just_git_cas) { logger_->Emit(LogLevel::Error, "Failed to open Git ODB at {}", - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -610,7 +739,7 @@ auto SourceTreeService::ArchiveImportToGit( if (not just_git_repo) { logger_->Emit(LogLevel::Error, "Failed to open Git repository {}", - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -634,7 +763,7 @@ auto SourceTreeService::ArchiveImportToGit( return ::grpc::Status::OK; } return ResolveContentTree(*subtree_id, - storage_config_.GitRoot(), + native_context_->storage_config->GitRoot(), /*repo_is_git_cache=*/true, resolve_special, sync_tree, @@ -672,7 +801,8 @@ auto SourceTreeService::ServeArchiveTree( ::grpc::ServerContext* /* context */, const ::justbuild::just_serve::ServeArchiveTreeRequest* request, ServeArchiveTreeResponse* response) -> ::grpc::Status { - auto repo_lock = RepositoryGarbageCollector::SharedLock(storage_config_); + auto repo_lock = RepositoryGarbageCollector::SharedLock( + *native_context_->storage_config); if (not repo_lock) { logger_->Emit(LogLevel::Error, "Could not acquire repo gc SharedLock"); response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); @@ -687,7 +817,7 @@ auto SourceTreeService::ServeArchiveTree( // check for archive_tree_id_file auto archive_tree_id_file = StorageUtils::GetArchiveTreeIDFile( - storage_config_, archive_type, content); + *native_context_->storage_config, archive_type, content); if (FileSystemManager::Exists(archive_tree_id_file)) { // read archive_tree_id from file tree_id_file auto archive_tree_id = @@ -700,21 +830,25 @@ auto SourceTreeService::ServeArchiveTree( return ::grpc::Status::OK; } // check local build root Git cache - auto res = GetSubtreeFromTree( - storage_config_.GitRoot(), *archive_tree_id, subdir, logger_); + auto res = + GetSubtreeFromTree(native_context_->storage_config->GitRoot(), + *archive_tree_id, + subdir, + logger_); if (res) { - return ResolveContentTree(*res, // tree_id - storage_config_.GitRoot(), - /*repo_is_git_cache=*/true, - resolve_special, - request->sync_tree(), - response); + return ResolveContentTree( + *res, // tree_id + native_context_->storage_config->GitRoot(), + /*repo_is_git_cache=*/true, + resolve_special, + request->sync_tree(), + response); } // check for fatal error if (res.error() == GitLookupError::Fatal) { logger_->Emit(LogLevel::Error, "Failed to open repository {}", - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -747,30 +881,38 @@ auto SourceTreeService::ServeArchiveTree( response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } - // acquire lock for CAS - auto lock = GarbageCollector::SharedLock(storage_config_); + // acquire lock for native CAS + auto lock = GarbageCollector::SharedLock(*native_context_->storage_config); if (not lock) { logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } - // check if content is in local CAS already - auto digest = ArtifactDigest(content, 0, false); - auto const& cas = storage_.CAS(); - auto content_cas_path = cas.BlobPath(digest, /*is_executable=*/false); + // check if content is in native local CAS already + auto const digest = ArtifactDigestFactory::Create( + native_context_->storage_config->hash_function.GetType(), + content, + 0, + /*is_tree=*/false); + auto const& native_cas = native_context_->storage->CAS(); + auto content_cas_path = + digest ? native_cas.BlobPath(*digest, /*is_executable=*/false) + : std::nullopt; if (not content_cas_path) { // check if content blob is in Git cache - auto res = GetBlobFromRepo(storage_config_.GitRoot(), content, logger_); + auto res = GetBlobFromRepo( + native_context_->storage_config->GitRoot(), content, logger_); if (res) { - // add to CAS - content_cas_path = StorageUtils::AddToCAS(storage_, *res); + // add to native CAS + content_cas_path = + StorageUtils::AddToCAS(*native_context_->storage, *res); } if (res.error() == GitLookupError::Fatal) { logger_->Emit( LogLevel::Error, "Failed while trying to retrieve content {} from repository {}", content, - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -780,8 +922,9 @@ auto SourceTreeService::ServeArchiveTree( for (auto const& path : serve_config_.known_repositories) { auto res = GetBlobFromRepo(path, content, logger_); if (res) { - // add to CAS - content_cas_path = StorageUtils::AddToCAS(storage_, *res); + // add to native CAS + content_cas_path = + StorageUtils::AddToCAS(*native_context_->storage, *res); if (content_cas_path) { break; } @@ -797,29 +940,32 @@ auto SourceTreeService::ServeArchiveTree( } } } - if (not content_cas_path) { + if (digest and not content_cas_path) { // try to retrieve it from remote CAS - if (not(apis_.remote->IsAvailable(digest) and + if (not(apis_.remote->IsAvailable(*digest) and apis_.remote->RetrieveToCas( - {Artifact::ObjectInfo{.digest = digest, + {Artifact::ObjectInfo{.digest = *digest, .type = ObjectType::File}}, *apis_.local))) { // content could not be found response->set_status(ServeArchiveTreeResponse::NOT_FOUND); return ::grpc::Status::OK; } - // content should now be in CAS - content_cas_path = cas.BlobPath(digest, /*is_executable=*/false); + // content should now be in native CAS + content_cas_path = + native_cas.BlobPath(*digest, /*is_executable=*/false); if (not content_cas_path) { - logger_->Emit(LogLevel::Error, - "Retrieving content {} from CAS failed unexpectedly", - content); + logger_->Emit( + LogLevel::Error, + "Retrieving content {} from native CAS failed unexpectedly", + content); response->set_status(ServeArchiveTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } } // extract archive - auto tmp_dir = storage_config_.CreateTypedTmpDir(archive_type); + auto tmp_dir = + native_context_->storage_config->CreateTypedTmpDir(archive_type); if (not tmp_dir) { logger_->Emit( LogLevel::Error, @@ -833,7 +979,7 @@ auto SourceTreeService::ServeArchiveTree( ExtractArchive(*content_cas_path, archive_type, tmp_dir->GetPath()); if (res != std::nullopt) { logger_->Emit(LogLevel::Error, - "Failed to extract archive {} from CAS:\n{}", + "Failed to extract archive {} from native CAS:\n{}", content_cas_path->string(), *res); response->set_status(ServeArchiveTreeResponse::UNPACK_ERROR); @@ -857,7 +1003,8 @@ auto SourceTreeService::DistdirImportToGit( content_list, bool sync_tree, ServeDistdirTreeResponse* response) -> ::grpc::Status { - auto repo_lock = RepositoryGarbageCollector::SharedLock(storage_config_); + auto repo_lock = RepositoryGarbageCollector::SharedLock( + *native_context_->storage_config); if (not repo_lock) { logger_->Emit(LogLevel::Error, "Could not acquire repo gc SharedLock"); response->set_status(ServeDistdirTreeResponse::INTERNAL_ERROR); @@ -865,7 +1012,8 @@ auto SourceTreeService::DistdirImportToGit( } // create tmp directory for the distdir - auto distdir_tmp_dir = storage_config_.CreateTypedTmpDir("distdir"); + auto distdir_tmp_dir = + native_context_->storage_config->CreateTypedTmpDir("distdir"); if (not distdir_tmp_dir) { logger_->Emit(LogLevel::Error, "Failed to create tmp path for distdir target {}", @@ -874,15 +1022,22 @@ auto SourceTreeService::DistdirImportToGit( return ::grpc::Status::OK; } auto const& tmp_path = distdir_tmp_dir->GetPath(); - // link the CAS blobs into the tmp dir - auto const& cas = storage_.CAS(); + // link the native CAS blobs into the tmp dir + auto const& native_cas = native_context_->storage->CAS(); if (not std::all_of( content_list.begin(), content_list.end(), - [&cas, tmp_path](auto const& kv) { - auto content_path = cas.BlobPath( - ArtifactDigest(kv.second.first, 0, /*is_tree=*/false), - kv.second.second); + [&native_cas, tmp_path](auto const& kv) { + auto const digest = ArtifactDigestFactory::Create( + native_cas.GetHashFunction().GetType(), + kv.second.first, + 0, + /*is_tree=*/false); + if (not digest) { + return false; + } + auto content_path = + native_cas.BlobPath(*digest, kv.second.second); if (content_path) { return FileSystemManager::CreateFileHardlink( *content_path, // from: cas_path/content_id @@ -892,7 +1047,7 @@ auto SourceTreeService::DistdirImportToGit( return false; })) { logger_->Emit(LogLevel::Error, - "Failed to create links to CAS content {}", + "Failed to create links to native CAS content {}", content_id); response->set_status(ServeDistdirTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; @@ -922,7 +1077,11 @@ auto SourceTreeService::DistdirImportToGit( auto status = ServeDistdirTreeResponse::OK; if (sync_tree) { status = SyncGitEntryToCas( - tree_id, storage_config_.GitRoot()); + tree_id, native_context_->storage_config->GitRoot()); + if (status == ServeDistdirTreeResponse::OK) { + status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/true); + } } // set response on success *(response->mutable_tree()) = std::move(tree_id); @@ -934,8 +1093,8 @@ auto SourceTreeService::ServeDistdirTree( ::grpc::ServerContext* /* context */, const ::justbuild::just_serve::ServeDistdirTreeRequest* request, ServeDistdirTreeResponse* response) -> ::grpc::Status { - // acquire lock for CAS - auto lock = GarbageCollector::SharedLock(storage_config_); + // acquire lock for native CAS + auto lock = GarbageCollector::SharedLock(*native_context_->storage_config); if (not lock) { logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); response->set_status(ServeDistdirTreeResponse::INTERNAL_ERROR); @@ -945,56 +1104,64 @@ auto SourceTreeService::ServeDistdirTree( GitRepo::tree_entries_t entries{}; entries.reserve(request->distfiles().size()); - auto const& cas = storage_.CAS(); + auto const& native_cas = native_context_->storage->CAS(); std::unordered_map> content_list{}; content_list.reserve(request->distfiles().size()); + bool const is_native = + ProtocolTraits::IsNative(apis_.hash_function.GetType()); for (auto const& kv : request->distfiles()) { bool blob_found{}; std::string blob_digest; // The digest of the requested distfile, taken // by the hash applicable for our CAS; this // might be different from content, if our CAS - // ist not based on git blob identifiers + // is not based on git blob identifiers // (i.e., if we're not in native mode). auto const& content = kv.content(); // check content blob is known - // first check the local CAS itself, provided it uses the same type - // of identifier - if (not Compatibility::IsCompatible()) { - auto digest = ArtifactDigest(content, 0, /*is_tree=*/false); + // first check the native local CAS itself, provided it uses the same + // type of identifier + auto const digest = ArtifactDigestFactory::Create( + native_context_->storage_config->hash_function.GetType(), + content, + 0, + /*is_tree=*/false); + + if (is_native) { blob_found = - static_cast(cas.BlobPath(digest, kv.executable())); + digest and native_cas.BlobPath(*digest, kv.executable()); } if (blob_found) { blob_digest = content; } else { // check local Git cache - auto res = - GetBlobFromRepo(storage_config_.GitRoot(), content, logger_); + auto res = GetBlobFromRepo( + native_context_->storage_config->GitRoot(), content, logger_); if (res) { - // add content to local CAS - auto stored_blob = cas.StoreBlob(*res, kv.executable()); + // add content to native local CAS + auto stored_blob = native_cas.StoreBlob(*res, kv.executable()); if (not stored_blob) { logger_->Emit(LogLevel::Error, "Failed to store content {} from local Git " - "cache to local CAS", + "cache to native local CAS", content); response->set_status( ServeDistdirTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } blob_found = true; - blob_digest = NativeSupport::Unprefix(stored_blob->hash()); + blob_digest = stored_blob->hash(); } else { if (res.error() == GitLookupError::Fatal) { - logger_->Emit(LogLevel::Error, - "Failed while trying to retrieve content {} " - "from repository {}", - content, - storage_config_.GitRoot().string()); + logger_->Emit( + LogLevel::Error, + "Failed while trying to retrieve content {} from " + "repository {}", + content, + native_context_->storage_config->GitRoot().string()); response->set_status( ServeDistdirTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; @@ -1003,21 +1170,22 @@ auto SourceTreeService::ServeDistdirTree( for (auto const& path : serve_config_.known_repositories) { auto res = GetBlobFromRepo(path, content, logger_); if (res) { - // add content to local CAS - auto stored_blob = cas.StoreBlob(*res, kv.executable()); + // add content to native local CAS + auto stored_blob = + native_cas.StoreBlob(*res, kv.executable()); if (not stored_blob) { - logger_->Emit(LogLevel::Error, - "Failed to store content {} from " - "known repository {} to local CAS", - path.string(), - content); + logger_->Emit( + LogLevel::Error, + "Failed to store content {} from known " + "repository {} to native local CAS", + path.string(), + content); response->set_status( ServeDistdirTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } blob_found = true; - blob_digest = - NativeSupport::Unprefix(stored_blob->hash()); + blob_digest = stored_blob->hash(); break; } if (res.error() == GitLookupError::Fatal) { @@ -1033,26 +1201,20 @@ auto SourceTreeService::ServeDistdirTree( } } if (not blob_found) { - // Explanation: clang-tidy gets confused by the break in the - // for-loop above into falsely believing that it can reach - // this point with the variable "digest" already moved, so - // we work around this by creating a new variable here - auto digest_clone = - ArtifactDigest(content, 0, /*is_tree=*/false); // check remote CAS - if ((not Compatibility::IsCompatible()) and - apis_.remote->IsAvailable(digest_clone)) { - // retrieve content to local CAS + if (is_native and digest and + apis_.remote->IsAvailable(*digest)) { + // retrieve content to native local CAS if (not apis_.remote->RetrieveToCas( {Artifact::ObjectInfo{ - .digest = digest_clone, + .digest = *digest, .type = kv.executable() ? ObjectType::Executable : ObjectType::File}}, *apis_.local)) { logger_->Emit(LogLevel::Error, "Failed to retrieve content {} from " - "remote to local CAS", + "remote to native local CAS", content); response->set_status( ServeDistdirTreeResponse::INTERNAL_ERROR); @@ -1102,22 +1264,22 @@ auto SourceTreeService::ServeDistdirTree( } // get hash from raw_id auto tree_id = ToHexString(tree->first); - // add tree to local CAS - auto tree_digest = cas.StoreTree(tree->second); - if (not tree_digest) { + // add tree to native local CAS + if (not native_cas.StoreTree(tree->second)) { logger_->Emit(LogLevel::Error, - "Failed to store distdir tree {} to local CAS", + "Failed to store distdir tree {} to native local CAS", tree_id); response->set_status(ServeDistdirTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } // check if tree is already in Git cache - auto has_tree = IsTreeInRepo(tree_id, storage_config_.GitRoot(), logger_); + auto has_tree = IsTreeInRepo( + tree_id, native_context_->storage_config->GitRoot(), logger_); if (not has_tree) { logger_->Emit(LogLevel::Error, "Failed while checking for tree {} in repository {}", tree_id, - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeDistdirTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -1127,7 +1289,11 @@ auto SourceTreeService::ServeDistdirTree( if (request->sync_tree()) { status = SyncGitEntryToCas( - tree_id, storage_config_.GitRoot()); + tree_id, native_context_->storage_config->GitRoot()); + if (status == ServeDistdirTreeResponse::OK) { + status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/true); + } } // set response on success *(response->mutable_tree()) = std::move(tree_id); @@ -1152,6 +1318,10 @@ auto SourceTreeService::ServeDistdirTree( status = SyncGitEntryToCas(tree_id, path); + if (status == ServeDistdirTreeResponse::OK) { + status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/true); + } } // set response on success *(response->mutable_tree()) = std::move(tree_id); @@ -1159,7 +1329,7 @@ auto SourceTreeService::ServeDistdirTree( return ::grpc::Status::OK; } } - // otherwise, we import the tree from CAS ourselves + // otherwise, we import the tree from native local CAS ourselves return DistdirImportToGit( tree_id, content_id, content_list, request->sync_tree(), response); } @@ -1169,15 +1339,17 @@ auto SourceTreeService::ServeContent( const ::justbuild::just_serve::ServeContentRequest* request, ServeContentResponse* response) -> ::grpc::Status { auto const& content{request->content()}; + // acquire locks - auto repo_lock = RepositoryGarbageCollector::SharedLock(storage_config_); + auto repo_lock = RepositoryGarbageCollector::SharedLock( + *native_context_->storage_config); if (not repo_lock) { logger_->Emit(LogLevel::Error, "Could not acquire repo gc SharedLock"); response->set_status(ServeContentResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } - auto lock = GarbageCollector::SharedLock(storage_config_); + auto lock = GarbageCollector::SharedLock(*native_context_->storage_config); if (not lock) { logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); response->set_status(ServeContentResponse::INTERNAL_ERROR); @@ -1185,11 +1357,15 @@ auto SourceTreeService::ServeContent( } // check if content blob is in Git cache - auto res = GetBlobFromRepo(storage_config_.GitRoot(), content, logger_); + auto res = GetBlobFromRepo( + native_context_->storage_config->GitRoot(), content, logger_); if (res) { - auto const status = - SyncGitEntryToCas( - content, storage_config_.GitRoot()); + auto status = SyncGitEntryToCas( + content, native_context_->storage_config->GitRoot()); + if (status == ServeContentResponse::OK) { + status = SetDigestInResponse( + response, content, /*is_tree=*/false, /*from_git=*/true); + } response->set_status(status); return ::grpc::Status::OK; } @@ -1197,7 +1373,7 @@ auto SourceTreeService::ServeContent( logger_->Emit(LogLevel::Error, "Failed while checking for content {} in repository {}", content, - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeContentResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -1206,9 +1382,13 @@ auto SourceTreeService::ServeContent( auto res = GetBlobFromRepo(path, content, logger_); if (res) { // upload blob to remote CAS - auto const status = + auto status = SyncGitEntryToCas( content, path); + if (status == ServeContentResponse::OK) { + status = SetDigestInResponse( + response, content, /*is_tree=*/false, /*from_git=*/true); + } response->set_status(status); return ::grpc::Status::OK; } @@ -1223,21 +1403,32 @@ auto SourceTreeService::ServeContent( } } - // check also in the local CAS - auto const digest = ArtifactDigest{content, 0, /*is_tree=*/false}; - if (apis_.local->IsAvailable(digest)) { + // check also in the native local CAS + auto const native_digest = ArtifactDigestFactory::Create( + native_context_->storage_config->hash_function.GetType(), + content, + /*size is unknown*/ 0, + /*is_tree=*/false); + if (not native_digest) { + logger_->Emit(LogLevel::Error, "Failed to create digest object"); + response->set_status(ServeContentResponse::INTERNAL_ERROR); + return ::grpc::Status::OK; + } + if (apis_.local->IsAvailable(*native_digest)) { + // upload blob to remote CAS if (not apis_.local->RetrieveToCas( - {Artifact::ObjectInfo{.digest = digest, + {Artifact::ObjectInfo{.digest = *native_digest, .type = ObjectType::File}}, *apis_.remote)) { logger_->Emit(LogLevel::Error, - "Failed to sync content {} from local CAS", + "Failed to sync content {} from local native CAS", content); response->set_status(ServeContentResponse::SYNC_ERROR); return ::grpc::Status::OK; } - // success! - response->set_status(ServeContentResponse::OK); + auto const status = SetDigestInResponse( + response, content, /*is_tree=*/false, /*from_git=*/false); + response->set_status(status); return ::grpc::Status::OK; } // content blob not known @@ -1250,15 +1441,17 @@ auto SourceTreeService::ServeTree( const ::justbuild::just_serve::ServeTreeRequest* request, ServeTreeResponse* response) -> ::grpc::Status { auto const& tree_id{request->tree()}; + // acquire locks - auto repo_lock = RepositoryGarbageCollector::SharedLock(storage_config_); + auto repo_lock = RepositoryGarbageCollector::SharedLock( + *native_context_->storage_config); if (not repo_lock) { logger_->Emit(LogLevel::Error, "Could not acquire repo gc SharedLock"); response->set_status(ServeTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } - auto lock = GarbageCollector::SharedLock(storage_config_); + auto lock = GarbageCollector::SharedLock(*native_context_->storage_config); if (not lock) { logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); response->set_status(ServeTreeResponse::INTERNAL_ERROR); @@ -1266,19 +1459,24 @@ auto SourceTreeService::ServeTree( } // check if tree is in Git cache - auto has_tree = IsTreeInRepo(tree_id, storage_config_.GitRoot(), logger_); + auto has_tree = IsTreeInRepo( + tree_id, native_context_->storage_config->GitRoot(), logger_); if (not has_tree) { logger_->Emit(LogLevel::Error, "Failed while checking for tree {} in repository {}", tree_id, - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(ServeTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } if (*has_tree) { - auto const status = - SyncGitEntryToCas( - tree_id, storage_config_.GitRoot()); + // upload tree to remote CAS + auto status = SyncGitEntryToCas( + tree_id, native_context_->storage_config->GitRoot()); + if (status == ServeTreeResponse::OK) { + status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/true); + } response->set_status(status); return ::grpc::Status::OK; } @@ -1294,37 +1492,45 @@ auto SourceTreeService::ServeTree( return ::grpc::Status::OK; } if (*has_tree) { - auto const status = + // upload blob to remote CAS + auto status = SyncGitEntryToCas(tree_id, path); + if (status == ServeTreeResponse::OK) { + status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/true); + } response->set_status(status); return ::grpc::Status::OK; } } - // check also in the local CAS - auto digest = ArtifactDigest{tree_id, 0, /*is_tree=*/true}; - if (apis_.local->IsAvailable(digest)) { - // upload tree to remote CAS; only possible in native mode - if (Compatibility::IsCompatible()) { - logger_->Emit(LogLevel::Error, - "Cannot sync tree {} from local CAS with the remote " - "in compatible mode", - tree_id); - response->set_status(ServeTreeResponse::SYNC_ERROR); - return ::grpc::Status::OK; - } + + // check also in the native local CAS + auto const native_digest = ArtifactDigestFactory::Create( + native_context_->storage_config->hash_function.GetType(), + tree_id, + /*size is unknown*/ 0, + /*is_tree=*/true); + if (not native_digest) { + logger_->Emit(LogLevel::Error, "Failed to create digest object"); + response->set_status(ServeTreeResponse::INTERNAL_ERROR); + return ::grpc::Status::OK; + } + if (apis_.local->IsAvailable(*native_digest)) { + // upload tree to remote CAS if (not apis_.local->RetrieveToCas( - {Artifact::ObjectInfo{.digest = digest, + {Artifact::ObjectInfo{.digest = *native_digest, .type = ObjectType::Tree}}, *apis_.remote)) { logger_->Emit(LogLevel::Error, - "Failed to sync tree {} from local CAS", + "Failed to sync tree {} from native local CAS", tree_id); response->set_status(ServeTreeResponse::SYNC_ERROR); return ::grpc::Status::OK; } - // success! - response->set_status(ServeTreeResponse::OK); + auto const status = SetDigestInResponse( + response, tree_id, /*is_tree=*/true, /*from_git=*/false); + response->set_status(status); return ::grpc::Status::OK; } // tree not known @@ -1338,26 +1544,29 @@ auto SourceTreeService::CheckRootTree( CheckRootTreeResponse* response) -> ::grpc::Status { auto const& tree_id{request->tree()}; // acquire locks - auto repo_lock = RepositoryGarbageCollector::SharedLock(storage_config_); + auto repo_lock = RepositoryGarbageCollector::SharedLock( + *native_context_->storage_config); if (not repo_lock) { logger_->Emit(LogLevel::Error, "Could not acquire repo gc SharedLock"); response->set_status(CheckRootTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } - auto lock = GarbageCollector::SharedLock(storage_config_); + auto lock = GarbageCollector::SharedLock(*native_context_->storage_config); if (not lock) { logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); response->set_status(CheckRootTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } + // check first in the Git cache - auto has_tree = IsTreeInRepo(tree_id, storage_config_.GitRoot(), logger_); + auto has_tree = IsTreeInRepo( + tree_id, native_context_->storage_config->GitRoot(), logger_); if (not has_tree) { logger_->Emit(LogLevel::Error, "Failed while checking for tree {} in repository {}", tree_id, - storage_config_.GitRoot().string()); + native_context_->storage_config->GitRoot().string()); response->set_status(CheckRootTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } @@ -1383,23 +1592,33 @@ auto SourceTreeService::CheckRootTree( return ::grpc::Status::OK; } } - // now check in the local CAS - auto digest = ArtifactDigest{tree_id, 0, /*is_tree=*/true}; - if (auto path = storage_.CAS().TreePath(digest)) { + + // now check in the native local CAS + auto const native_digest = ArtifactDigestFactory::Create( + native_context_->storage_config->hash_function.GetType(), + tree_id, + 0, + /*is_tree=*/true); + if (not native_digest) { + logger_->Emit(LogLevel::Error, "Failed to create digest object"); + response->set_status(CheckRootTreeResponse::INTERNAL_ERROR); + return ::grpc::Status::OK; + } + if (native_context_->storage->CAS().TreePath(*native_digest)) { // As we currently build only against roots in Git repositories, we need // to move the tree from CAS to local Git storage - auto tmp_dir = - storage_config_.CreateTypedTmpDir("source-tree-check-root-tree"); + auto tmp_dir = native_context_->storage_config->CreateTypedTmpDir( + "source-tree-check-root-tree"); if (not tmp_dir) { logger_->Emit(LogLevel::Error, "Failed to create tmp directory for copying git-tree " "{} from remote CAS", - digest.hash()); + tree_id); response->set_status(CheckRootTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } if (not apis_.local->RetrieveToPaths( - {Artifact::ObjectInfo{.digest = digest, + {Artifact::ObjectInfo{.digest = *native_digest, .type = ObjectType::Tree}}, {tmp_dir->GetPath()})) { logger_->Emit(LogLevel::Error, @@ -1444,9 +1663,8 @@ auto SourceTreeService::GetRemoteTree( ::grpc::ServerContext* /* context */, const ::justbuild::just_serve::GetRemoteTreeRequest* request, GetRemoteTreeResponse* response) -> ::grpc::Status { - auto const& tree_id{request->tree()}; // acquire locks - auto lock = GarbageCollector::SharedLock(storage_config_); + auto lock = GarbageCollector::SharedLock(*native_context_->storage_config); if (not lock) { logger_->Emit(LogLevel::Error, "Could not acquire gc SharedLock"); response->set_status(GetRemoteTreeResponse::INTERNAL_ERROR); @@ -1454,56 +1672,51 @@ auto SourceTreeService::GetRemoteTree( } // get tree from remote CAS into tmp dir - auto digest = ArtifactDigest{tree_id, 0, /*is_tree=*/true}; - if (not apis_.remote->IsAvailable(digest)) { + auto const remote_digest = ArtifactDigestFactory::FromBazel( + apis_.hash_function.GetType(), request->digest()); + if (not remote_digest or not apis_.remote->IsAvailable(*remote_digest)) { logger_->Emit(LogLevel::Error, "Remote CAS does not contain expected tree {}", - tree_id); + request->digest().hash()); response->set_status(GetRemoteTreeResponse::FAILED_PRECONDITION); return ::grpc::Status::OK; } - auto tmp_dir = - storage_config_.CreateTypedTmpDir("source-tree-get-remote-tree"); + auto tmp_dir = native_context_->storage_config->CreateTypedTmpDir( + "source-tree-get-remote-tree"); if (not tmp_dir) { logger_->Emit(LogLevel::Error, "Failed to create tmp directory for copying git-tree {} " "from remote CAS", - digest.hash()); + remote_digest->hash()); response->set_status(GetRemoteTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } if (not apis_.remote->RetrieveToPaths( - {Artifact::ObjectInfo{.digest = digest, .type = ObjectType::Tree}}, + {Artifact::ObjectInfo{.digest = *remote_digest, + .type = ObjectType::Tree}}, {tmp_dir->GetPath()}, &(*apis_.local))) { logger_->Emit(LogLevel::Error, "Failed to retrieve tree {} from remote CAS", - tree_id); + remote_digest->hash()); response->set_status(GetRemoteTreeResponse::FAILED_PRECONDITION); return ::grpc::Status::OK; } // Import from tmp dir to Git cache - auto res = - CommonImportToGit(tmp_dir->GetPath(), - fmt::format("Content of tree {}", tree_id) // message - ); + auto res = CommonImportToGit( + tmp_dir->GetPath(), + fmt::format("Content of tree {}", remote_digest->hash()) // message + ); if (not res) { // report the error logger_->Emit(LogLevel::Error, "{}", res.error()); response->set_status(GetRemoteTreeResponse::INTERNAL_ERROR); return ::grpc::Status::OK; } - auto const& imported_tree_id = *res; - // sanity check - if (imported_tree_id != tree_id) { - logger_->Emit( - LogLevel::Error, - "Unexpected mismatch in imported tree:\nexpected {}, but got {}", - tree_id, - imported_tree_id); - response->set_status(GetRemoteTreeResponse::INTERNAL_ERROR); - return ::grpc::Status::OK; - } + logger_->Emit(LogLevel::Debug, + "GetRemoteTree: imported tree {} to Git as {}", + remote_digest->hash(), + *res); // success! response->set_status(GetRemoteTreeResponse::OK); return ::grpc::Status::OK; diff --git a/src/buildtool/serve_api/serve_service/source_tree.hpp b/src/buildtool/serve_api/serve_service/source_tree.hpp index fb0259f93..bf3cd9ef0 100644 --- a/src/buildtool/serve_api/serve_service/source_tree.hpp +++ b/src/buildtool/serve_api/serve_service/source_tree.hpp @@ -37,8 +37,6 @@ #include "src/buildtool/file_system/symlinks_map/resolve_symlinks_map.hpp" #include "src/buildtool/logging/logger.hpp" #include "src/buildtool/serve_api/remote/config.hpp" -#include "src/buildtool/storage/config.hpp" -#include "src/buildtool/storage/storage.hpp" #include "src/utils/cpp/expected.hpp" // Service for improved interaction with the target-level cache. @@ -63,12 +61,13 @@ class SourceTreeService final explicit SourceTreeService( gsl::not_null const& serve_config, - gsl::not_null const& local_context, - gsl::not_null const& apis) noexcept + gsl::not_null const& apis, + gsl::not_null const& native_context, + LocalContext const* compat_context = nullptr) noexcept : serve_config_{*serve_config}, - storage_{*local_context->storage}, - storage_config_{*local_context->storage_config}, - apis_{*apis} {} + apis_{*apis}, + native_context_{native_context}, + compat_context_{compat_context} {} // Retrieve the Git-subtree identifier from a given Git commit. // @@ -123,9 +122,9 @@ class SourceTreeService final const ::justbuild::just_serve::CheckRootTreeRequest* request, CheckRootTreeResponse* response) -> ::grpc::Status override; - // Retrieve a given Git-tree from the CAS of the associated - // remote-execution endpoint and make it available in a location where this - // serve instance can build against. + // Retrieve a given tree from the CAS of the associated remote-execution + // endpoint and make it available in a location where this serve instance + // can build against. // // There are no method-specific errors. auto GetRemoteTree( @@ -135,9 +134,9 @@ class SourceTreeService final private: RemoteServeConfig const& serve_config_; - StorageConfig const& storage_config_; - Storage const& storage_; ApiBundle const& apis_; + gsl::not_null native_context_; + LocalContext const* compat_context_; mutable std::shared_mutex mutex_; std::shared_ptr logger_{std::make_shared("serve-service")}; // symlinks resolver map @@ -183,6 +182,18 @@ class SourceTreeService final std::filesystem::path const& repo_path) const noexcept -> std::remove_cvref_t; + /// \brief Set the digest field of a serve response. + /// In compatible mode, this handles also the interaction with the storages + /// to recover the corresponding compatible digest from a native digest, as + /// stored in file mappings. + template + [[nodiscard]] auto SetDigestInResponse( + gsl::not_null const& response, + std::string const& object_hash, + bool is_tree, + bool from_git) const noexcept + -> std::remove_cvref_t; + /// \brief Resolves a tree from given repository with respect to symlinks. /// The resolved tree will always be placed in the Git cache. [[nodiscard]] auto ResolveContentTree( diff --git a/src/buildtool/serve_api/serve_service/target.cpp b/src/buildtool/serve_api/serve_service/target.cpp index ea0c36c92..f96f70da2 100644 --- a/src/buildtool/serve_api/serve_service/target.cpp +++ b/src/buildtool/serve_api/serve_service/target.cpp @@ -25,9 +25,12 @@ #include "src/buildtool/build_engine/target_map/configured_target.hpp" #include "src/buildtool/build_engine/target_map/result_map.hpp" #include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/remote/retry_config.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/common/statistics.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_engine/executor/context.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/object_type.hpp" @@ -45,7 +48,6 @@ #include "src/buildtool/storage/garbage_collector.hpp" #include "src/buildtool/storage/repository_garbage_collector.hpp" #include "src/buildtool/storage/target_cache_key.hpp" -#include "src/utils/cpp/verify_hash.hpp" auto TargetService::GetDispatchList( ArtifactDigest const& dispatch_digest) noexcept @@ -104,8 +106,7 @@ auto TargetService::HandleFailureLog( } // upload log blob to remote if (not apis_.local->RetrieveToCas( - {Artifact::ObjectInfo{.digest = ArtifactDigest{*digest}, - .type = ObjectType::File}}, + {Artifact::ObjectInfo{.digest = *digest, .type = ObjectType::File}}, *apis_.remote)) { auto msg = fmt::format("Failed to upload to remote CAS the failed {} log {}", @@ -115,7 +116,7 @@ auto TargetService::HandleFailureLog( return ::grpc::Status{::grpc::StatusCode::UNAVAILABLE, msg}; } // set response with log digest - response->mutable_log()->CopyFrom(*digest); + (*response->mutable_log()) = ArtifactDigestFactory::ToBazel(*digest); return ::grpc::Status::OK; } @@ -128,15 +129,18 @@ auto TargetService::CreateRemoteExecutionConfig( platform_properties[p.name()] = p.value(); } // read in the dispatch list - if (auto msg = IsAHash(request->dispatch_info().hash()); msg) { - logger_->Emit(LogLevel::Error, "{}", *msg); - return unexpected{ - ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, *msg}}; + auto const dispatch_info_digest = ArtifactDigestFactory::FromBazel( + local_context_.storage_config->hash_function.GetType(), + request->dispatch_info()); + if (not dispatch_info_digest) { + logger_->Emit(LogLevel::Error, "{}", dispatch_info_digest.error()); + return unexpected{::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, + dispatch_info_digest.error()}}; } - auto const& dispatch_info_digest = ArtifactDigest{request->dispatch_info()}; - auto res = GetDispatchList(dispatch_info_digest); + + auto res = GetDispatchList(*dispatch_info_digest); if (not res) { - auto err = move(res).error(); + auto err = std::move(res).error(); logger_->Emit(LogLevel::Error, "{}", err.error_message()); return unexpected{std::move(err)}; } @@ -153,12 +157,14 @@ auto TargetService::ServeTarget( const ::justbuild::just_serve::ServeTargetRequest* request, ::justbuild::just_serve::ServeTargetResponse* response) -> ::grpc::Status { // check target cache key hash for validity - if (auto msg = IsAHash(request->target_cache_key_id().hash()); msg) { - logger_->Emit(LogLevel::Error, "{}", *msg); - return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, *msg}; + auto const target_cache_key_digest = ArtifactDigestFactory::FromBazel( + local_context_.storage_config->hash_function.GetType(), + request->target_cache_key_id()); + if (not target_cache_key_digest) { + logger_->Emit(LogLevel::Error, "{}", target_cache_key_digest.error()); + return ::grpc::Status{::grpc::StatusCode::INVALID_ARGUMENT, + target_cache_key_digest.error()}; } - auto const& target_cache_key_digest = - ArtifactDigest{request->target_cache_key_id()}; // acquire locks auto repo_lock = @@ -203,13 +209,12 @@ auto TargetService::ServeTarget( } // get a target cache instance with the correct computed shard - auto shard = - remote_config->remote_address - ? std::make_optional(ArtifactDigest(*execution_backend_dgst).hash()) - : std::nullopt; + auto shard = remote_config->remote_address + ? std::make_optional(execution_backend_dgst->hash()) + : std::nullopt; auto const& tc = local_context_.storage->TargetCache().WithShard(shard); auto const& tc_key = - TargetCacheKey{{target_cache_key_digest, ObjectType::File}}; + TargetCacheKey{{*target_cache_key_digest, ObjectType::File}}; // check if target-level cache entry has already been computed if (auto target_entry = tc.Read(tc_key); target_entry) { @@ -234,15 +239,16 @@ auto TargetService::ServeTarget( return ::grpc::Status{::grpc::StatusCode::UNAVAILABLE, msg}; } // populate response with the target cache value - response->mutable_target_value()->CopyFrom(target_entry->second.digest); + (*response->mutable_target_value()) = + ArtifactDigestFactory::ToBazel(target_entry->second.digest); return ::grpc::Status::OK; } // get target description from remote cas auto const& target_cache_key_info = Artifact::ObjectInfo{ - .digest = target_cache_key_digest, .type = ObjectType::File}; + .digest = *target_cache_key_digest, .type = ObjectType::File}; - if (not apis_.local->IsAvailable(target_cache_key_digest) and + if (not apis_.local->IsAvailable(*target_cache_key_digest) and not apis_.remote->RetrieveToCas({target_cache_key_info}, *apis_.local)) { auto msg = fmt::format( @@ -269,7 +275,7 @@ auto TargetService::ServeTarget( nlohmann::json::parse(*target_description_str)); } catch (std::exception const& ex) { auto msg = fmt::format("Parsing TargetCacheKey {} failed with:\n{}", - target_cache_key_digest.hash(), + target_cache_key_digest->hash(), ex.what()); logger_->Emit(LogLevel::Error, "{}", msg); return ::grpc::Status{::grpc::StatusCode::INTERNAL, msg}; @@ -278,7 +284,7 @@ auto TargetService::ServeTarget( not target_description_dict->IsMap()) { auto msg = fmt::format("TargetCacheKey {} should contain a map, but found {}", - target_cache_key_digest.hash(), + target_cache_key_digest->hash(), target_description_dict.ToJson().dump()); logger_->Emit(LogLevel::Error, "{}", msg); return ::grpc::Status{::grpc::StatusCode::NOT_FOUND, msg}; @@ -294,7 +300,7 @@ auto TargetService::ServeTarget( if (not target_description_dict->At(key)) { error_msg = fmt::format("TargetCacheKey {} does not contain key \"{}\"", - target_cache_key_digest.hash(), + target_cache_key_digest->hash(), key); logger_->Emit(LogLevel::Error, "{}", error_msg); return false; @@ -314,15 +320,24 @@ auto TargetService::ServeTarget( auto msg = fmt::format( "TargetCacheKey {}: \"repo_key\" value should be a string, but " "found {}", - target_cache_key_digest.hash(), + target_cache_key_digest->hash(), repo_key.ToJson().dump()); logger_->Emit(LogLevel::Error, "{}", msg); return ::grpc::Status{::grpc::StatusCode::NOT_FOUND, msg}; } - ArtifactDigest repo_key_dgst{repo_key->String(), 0, /*is_tree=*/false}; - if (not apis_.local->IsAvailable(repo_key_dgst) and + auto const repo_key_dgst = + ArtifactDigestFactory::Create(apis_.hash_function.GetType(), + repo_key->String(), + 0, + /*is_tree=*/false); + if (not repo_key_dgst) { + logger_->Emit(LogLevel::Error, "{}", repo_key_dgst.error()); + return ::grpc::Status{::grpc::StatusCode::INTERNAL, + repo_key_dgst.error()}; + } + if (not apis_.local->IsAvailable(*repo_key_dgst) and not apis_.remote->RetrieveToCas( - {Artifact::ObjectInfo{.digest = repo_key_dgst, + {Artifact::ObjectInfo{.digest = *repo_key_dgst, .type = ObjectType::File}}, *apis_.local)) { auto msg = fmt::format( @@ -332,7 +347,7 @@ auto TargetService::ServeTarget( return ::grpc::Status{::grpc::StatusCode::FAILED_PRECONDITION, msg}; } auto repo_config_path = local_context_.storage->CAS().BlobPath( - repo_key_dgst, /*is_executable=*/false); + *repo_key_dgst, /*is_executable=*/false); if (not repo_config_path) { // This should not fail unless something went really bad... auto msg = fmt::format( @@ -362,7 +377,7 @@ auto TargetService::ServeTarget( auto msg = fmt::format( "TargetCacheKey {}: \"target_name\" value should be a string, but" " found {}", - target_cache_key_digest.hash(), + target_cache_key_digest->hash(), target_expr.ToJson().dump()); logger_->Emit(LogLevel::Error, "{}", msg); return ::grpc::Status{::grpc::StatusCode::FAILED_PRECONDITION, msg}; @@ -373,7 +388,7 @@ auto TargetService::ServeTarget( } catch (std::exception const& ex) { auto msg = fmt::format( "TargetCacheKey {}: parsing \"target_name\" failed with:\n{}", - target_cache_key_digest.hash(), + target_cache_key_digest->hash(), ex.what()); logger_->Emit(LogLevel::Error, "{}", msg); return ::grpc::Status{::grpc::StatusCode::FAILED_PRECONDITION, msg}; @@ -386,7 +401,7 @@ auto TargetService::ServeTarget( auto msg = fmt::format( "TargetCacheKey {}: \"effective_config\" value should be a string," " but found {}", - target_cache_key_digest.hash(), + target_cache_key_digest->hash(), config_expr.ToJson().dump()); logger_->Emit(LogLevel::Error, "{}", msg); return ::grpc::Status{::grpc::StatusCode::FAILED_PRECONDITION, msg}; @@ -398,7 +413,7 @@ auto TargetService::ServeTarget( } catch (std::exception const& ex) { auto msg = fmt::format( "TargetCacheKey {}: parsing \"effective_config\" failed with:\n{}", - target_cache_key_digest.hash(), + target_cache_key_digest->hash(), ex.what()); logger_->Emit(LogLevel::Error, "{}", msg); return ::grpc::Status{::grpc::StatusCode::FAILED_PRECONDITION, msg}; @@ -574,13 +589,14 @@ auto TargetService::ServeTarget( return ::grpc::Status{::grpc::StatusCode::UNAVAILABLE, msg}; } // populate response with the target cache value - response->mutable_target_value()->CopyFrom(target_entry->second.digest); + (*response->mutable_target_value()) = + ArtifactDigestFactory::ToBazel(target_entry->second.digest); return ::grpc::Status::OK; } // target cache value missing -- internally something is very wrong auto msg = fmt::format("Failed to read TargetCacheKey {} after store", - target_cache_key_digest.hash()); + target_cache_key_digest->hash()); logger_->Emit(LogLevel::Error, "{}", msg); return ::grpc::Status{::grpc::StatusCode::INTERNAL, msg}; } @@ -918,20 +934,20 @@ auto TargetService::ServeTargetDescription( if (auto dgst = local_context_.storage->CAS().StoreBlob(description_str, /*is_executable=*/false)) { - auto const& artifact_dgst = ArtifactDigest{*dgst}; if (not apis_.local->RetrieveToCas( - {Artifact::ObjectInfo{.digest = artifact_dgst, + {Artifact::ObjectInfo{.digest = *dgst, .type = ObjectType::File}}, *apis_.remote)) { auto error_msg = fmt::format( "Failed to upload to remote cas the description blob {}", - artifact_dgst.hash()); + dgst->hash()); logger_->Emit(LogLevel::Error, "{}", error_msg); return ::grpc::Status{::grpc::StatusCode::UNAVAILABLE, error_msg}; } // populate response - response->mutable_description_id()->CopyFrom(*dgst); + (*response->mutable_description_id()) = + ArtifactDigestFactory::ToBazel(*dgst); return ::grpc::Status::OK; } // failed to store blob diff --git a/src/buildtool/storage/TARGETS b/src/buildtool/storage/TARGETS index c43775812..1bd1fbb27 100644 --- a/src/buildtool/storage/TARGETS +++ b/src/buildtool/storage/TARGETS @@ -5,17 +5,18 @@ , "deps": [ "backend_description" , ["@", "gsl", "", "gsl"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/common/remote", "remote_common"] - , ["src/buildtool/compatibility", "compatibility"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/file_system", "object_type"] - , ["src/buildtool/logging", "logging"] , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "expected"] , ["src/utils/cpp", "gsl"] , ["src/utils/cpp", "tmp_dir"] - , ["src/utils/cpp", "expected"] - , ["src/buildtool/crypto", "hash_function"] ] , "stage": ["src", "buildtool", "storage"] } @@ -28,6 +29,7 @@ , "private-deps": [ ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] ] , "stage": ["src", "buildtool", "storage"] @@ -62,33 +64,37 @@ , "deps": [ "config" , "file_chunker" - , ["src/buildtool/common", "common"] - , ["src/buildtool/file_system", "file_storage"] - , ["src/buildtool/file_system", "object_cas"] - , ["src/buildtool/execution_api/common", "common"] + , ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/analysed_target", "target"] , ["src/buildtool/build_engine/base_maps", "entity_name_data"] , ["src/buildtool/build_engine/expression", "expression"] - , ["src/utils/cpp", "file_locking"] - , ["src/utils/cpp", "gsl"] - , ["src/utils/cpp", "expected"] - , ["@", "gsl", "", "gsl"] - , ["@", "json", "", "json"] - , ["@", "fmt", "", "fmt"] - , ["src/buildtool/file_system", "object_type"] - , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/logging", "logging"] - , ["src/buildtool/common", "bazel_types"] - , ["src/buildtool/file_system", "git_repo"] , ["src/buildtool/common", "artifact_description"] - , ["src/buildtool/compatibility", "compatibility"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/common", "bazel_types"] + , ["src/buildtool/common", "common"] + , ["src/buildtool/common", "protocol_traits"] , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/file_system", "file_storage"] + , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/file_system", "git_repo"] + , ["src/buildtool/file_system", "object_cas"] + , ["src/buildtool/file_system", "object_type"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] + , ["src/utils/cpp", "expected"] + , ["src/utils/cpp", "file_locking"] + , ["src/utils/cpp", "tmp_dir"] ] , "stage": ["src", "buildtool", "storage"] , "private-deps": - [ ["src/buildtool/logging", "log_level"] + [ ["src/buildtool/crypto", "hasher"] , ["src/buildtool/execution_api/common", "message_limits"] , ["src/buildtool/multithreading", "task_system"] + , ["src/utils/cpp", "gsl"] + , ["src/utils/cpp", "hex_string"] , ["src/utils/cpp", "path_hash"] ] } @@ -98,10 +104,11 @@ , "hdrs": ["fs_utils.hpp"] , "srcs": ["fs_utils.cpp"] , "deps": - [ ["src/buildtool/common", "user_structs"] - , ["src/buildtool/file_system/symlinks_map", "pragma_special"] - , "config" + [ "config" , "storage" + , ["src/buildtool/common", "user_structs"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/file_system/symlinks_map", "pragma_special"] ] , "stage": ["src", "buildtool", "storage"] , "private-deps": diff --git a/src/buildtool/storage/backend_description.cpp b/src/buildtool/storage/backend_description.cpp index 456f79c68..c5f790aa7 100644 --- a/src/buildtool/storage/backend_description.cpp +++ b/src/buildtool/storage/backend_description.cpp @@ -23,9 +23,18 @@ auto DescribeBackend(std::optional const& address, ExecutionProperties const& properties, std::vector const& dispatch) noexcept -> expected { - auto description = nlohmann::json{ - {"remote_address", address ? address->ToJson() : nlohmann::json{}}, - {"platform_properties", properties}}; + nlohmann::json description; + try { + description["remote_address"] = + address ? address->ToJson() : nlohmann::json{}; + description["platform_properties"] = properties; + } catch (std::exception const& e) { + return unexpected{ + fmt::format("Failed to serialize remote address and " + "platform_properties:\n{}", + e.what())}; + } + if (not dispatch.empty()) { try { // only add the dispatch list, if not empty, so that keys remain diff --git a/src/buildtool/storage/compactification_task.hpp b/src/buildtool/storage/compactification_task.hpp index e0f7e492a..dee2ee2c2 100644 --- a/src/buildtool/storage/compactification_task.hpp +++ b/src/buildtool/storage/compactification_task.hpp @@ -76,9 +76,11 @@ struct CompactificationTask final { CompactificationTask const& task) noexcept -> bool; template -void CompactificationTask::Log(LogLevel level, - std::string const& message, - Args&&... args) const noexcept { +void CompactificationTask::Log( + LogLevel level, + std::string const& message, + // NOLINTNEXTLINE(cppcoreguidelines-missing-std-forward) + Args&&... args) const noexcept { if (not logger) { ::Logger::Log(LogLevel::Error, "Logger is missing."); return; diff --git a/src/buildtool/storage/compactifier.cpp b/src/buildtool/storage/compactifier.cpp index 1e6033edc..3bd393240 100644 --- a/src/buildtool/storage/compactifier.cpp +++ b/src/buildtool/storage/compactifier.cpp @@ -22,6 +22,7 @@ #include #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/crypto/hasher.hpp" @@ -156,10 +157,10 @@ template } // Calculate reference hash size: - auto const kHashSize = + auto const hash_size = task.cas.GetHashFunction().MakeHasher().GetHashLength(); - auto const kFileNameSize = - kHashSize - FileStorageData::kDirectoryNameLength; + auto const file_name_size = + hash_size - FileStorageData::kDirectoryNameLength; // Check the directory itself is valid: std::string const d_name = directory.filename(); @@ -177,8 +178,8 @@ template } FileSystemManager::ReadDirEntryFunc callback = - [&task, &directory, kFileNameSize](std::filesystem::path const& file, - ObjectType type) -> bool { + [&task, &directory, file_name_size](std::filesystem::path const& file, + ObjectType type) -> bool { // Directories are unexpected in storage subdirectories if (IsTreeObject(type)) { task.Log(LogLevel::Error, @@ -187,9 +188,9 @@ template return false; } - // Check file has a hexadecimal name of length kFileNameSize: + // Check file has a hexadecimal name of length file_name_size: std::string const f_name = file.filename(); - if (f_name.size() == kFileNameSize and FromHexString(f_name)) { + if (f_name.size() == file_name_size and FromHexString(f_name)) { return true; } auto const path = directory / file; @@ -272,8 +273,8 @@ template } // Calculate the digest for the entry: - auto const digest = - ArtifactDigest::CreateFromFile(task.cas.GetHashFunction(), path); + auto const digest = ArtifactDigestFactory::HashFileAs( + task.cas.GetHashFunction(), path); if (not digest) { task.Log(LogLevel::Error, "Failed to calculate digest for {}", diff --git a/src/buildtool/storage/config.hpp b/src/buildtool/storage/config.hpp index 0841f9d51..598d93506 100644 --- a/src/buildtool/storage/config.hpp +++ b/src/buildtool/storage/config.hpp @@ -25,8 +25,9 @@ #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/remote/remote_common.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/object_type.hpp" @@ -131,19 +132,17 @@ struct StorageConfig final { [[nodiscard]] auto CreateGenerationConfig( std::size_t generation) const noexcept -> GenerationConfig { - bool const compatible = Compatibility::IsCompatible(); + bool const native = ProtocolTraits::IsNative(hash_function.GetType()); auto const cache_root = GenerationCacheRoot(generation); - auto const cache_dir = - UpdatePathForCompatibility(cache_root, compatible); + auto const cache_dir = UpdatePathForCompatibility(cache_root, native); return GenerationConfig{ .storage_config = this, .cas_f = cache_dir / "casf", .cas_x = cache_dir / "casx", - .cas_t = cache_dir / (compatible ? "casf" : "cast"), + .cas_t = cache_dir / (native ? "cast" : "casf"), .cas_large_f = cache_dir / "cas-large-f", - .cas_large_t = - cache_dir / (compatible ? "cas-large-f" : "cas-large-t"), + .cas_large_t = cache_dir / (native ? "cas-large-t" : "cas-large-f"), .action_cache = cache_dir / "ac", .target_cache = cache_dir / "tc"}; }; @@ -152,13 +151,13 @@ struct StorageConfig final { // different folder for different caching protocol [[nodiscard]] static auto UpdatePathForCompatibility( std::filesystem::path const& dir, - bool is_compatible) -> std::filesystem::path { - return dir / (is_compatible ? "compatible-sha256" : "git-sha1"); + bool is_native) -> std::filesystem::path { + return dir / (is_native ? "git-sha1" : "compatible-sha256"); }; [[nodiscard]] auto DefaultBackendDescriptionId() noexcept -> std::string { try { - return ArtifactDigest::Create( + return ArtifactDigestFactory::HashDataAs( hash_function, DescribeBackend(std::nullopt, {}, {}).value()) .hash(); @@ -232,7 +231,8 @@ class StorageConfig::Builder final { remote_address_, remote_platform_properties_, remote_dispatch_); if (desc) { backend_description_id = - ArtifactDigest::Create(hash_function, *desc) + ArtifactDigestFactory::HashDataAs( + hash_function, *desc) .hash(); } else { diff --git a/src/buildtool/storage/file_chunker.cpp b/src/buildtool/storage/file_chunker.cpp index 08f0dc11a..2a25cbf82 100644 --- a/src/buildtool/storage/file_chunker.cpp +++ b/src/buildtool/storage/file_chunker.cpp @@ -59,7 +59,7 @@ auto FileChunker::NextChunk() noexcept -> std::optional { auto remaining = size_ - pos_; if (remaining < max_chunk_size_ and not stream_.eof()) { // Move the remaining bytes of the buffer to the front. - buffer_.copy(&buffer_[0], remaining, pos_); + buffer_.copy(buffer_.data(), remaining, pos_); auto ssize = static_cast(buffer_.size() - remaining); // Fill the buffer with stream content. stream_.read(&buffer_[remaining], ssize); diff --git a/src/buildtool/storage/file_chunker.hpp b/src/buildtool/storage/file_chunker.hpp index 914de3f05..4a7e99e41 100644 --- a/src/buildtool/storage/file_chunker.hpp +++ b/src/buildtool/storage/file_chunker.hpp @@ -83,10 +83,10 @@ class FileChunker { const std::uint32_t min_chunk_size_{}; const std::uint32_t average_chunk_size_{}; const std::uint32_t max_chunk_size_{}; - std::ifstream stream_{}; // File stream to be splitted. - std::string buffer_{}; // Buffer for the file content. - std::size_t size_{0}; // Current size of the buffer. - std::size_t pos_{0}; // Current read position within the buffer. + std::ifstream stream_; // File stream to be splitted. + std::string buffer_; // Buffer for the file content. + std::size_t size_{0}; // Current size of the buffer. + std::size_t pos_{0}; // Current read position within the buffer. /// @brief Find the next chunk boundary from the current read position /// within the buffer. diff --git a/src/buildtool/storage/fs_utils.cpp b/src/buildtool/storage/fs_utils.cpp index 62adf1e98..058838143 100644 --- a/src/buildtool/storage/fs_utils.cpp +++ b/src/buildtool/storage/fs_utils.cpp @@ -104,6 +104,16 @@ auto GetResolvedTreeIDFile(StorageConfig const& storage_config, tree_hash; } +auto GetRehashIDFile(StorageConfig const& storage_config, + HashFunction::Type target_hash_type, + std::string const& hash, + bool from_git, + std::size_t generation) noexcept -> std::filesystem::path { + return storage_config.GenerationCacheRoot(generation) / + fmt::format("to-{}", ToString(target_hash_type)) / + (from_git ? "from-git" : "from-cas") / hash; +} + auto WriteTreeIDFile(std::filesystem::path const& tree_id_file, std::string const& tree_id) noexcept -> bool { // needs to be done safely, so use the rename trick @@ -127,8 +137,7 @@ auto AddToCAS(Storage const& storage, std::string const& data) noexcept // get file CAS instance auto const& cas = storage.CAS(); // write to cas - auto digest = cas.StoreBlob(data); - if (digest) { + if (auto digest = cas.StoreBlob(data)) { return cas.BlobPath(*digest, /*is_executable=*/false); } return std::nullopt; diff --git a/src/buildtool/storage/fs_utils.hpp b/src/buildtool/storage/fs_utils.hpp index 7482457b3..25392b917 100644 --- a/src/buildtool/storage/fs_utils.hpp +++ b/src/buildtool/storage/fs_utils.hpp @@ -20,6 +20,7 @@ #include #include "src/buildtool/common/user_structs.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/symlinks_map/pragma_special.hpp" #include "src/buildtool/storage/config.hpp" #include "src/buildtool/storage/storage.hpp" @@ -73,6 +74,20 @@ namespace StorageUtils { std::size_t generation = 0) noexcept -> std::filesystem::path; +/// \brief Get the path to the file storing the corresponding artifact hashed by +/// a different hash function. +/// \param storage_config Storage under which the file is to be found. +/// \param target_hash_type Hash type to identify mapping target. +/// \param hash Hash to identify mapping source. +/// \param from_git Flag to distinguish further mapping source (CAS / GitCAS) +/// \param generation Further specificity in location of the file. +[[nodiscard]] auto GetRehashIDFile(StorageConfig const& storage_config, + HashFunction::Type target_hash_type, + std::string const& hash, + bool from_git, + std::size_t generation = 0) noexcept + -> std::filesystem::path; + /// \brief Write a tree id to file. The parent folder of the file must exist! [[nodiscard]] auto WriteTreeIDFile(std::filesystem::path const& tree_id_file, std::string const& tree_id) noexcept -> bool; diff --git a/src/buildtool/storage/garbage_collector.cpp b/src/buildtool/storage/garbage_collector.cpp index a108ae734..c2d9ecd06 100644 --- a/src/buildtool/storage/garbage_collector.cpp +++ b/src/buildtool/storage/garbage_collector.cpp @@ -20,9 +20,7 @@ #include #include -#include "gsl/gsl" #include "src/buildtool/common/artifact.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/message_limits.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" @@ -70,13 +68,13 @@ auto GarbageCollector::LockFilePath( auto GarbageCollector::TriggerGarbageCollection( StorageConfig const& storage_config, bool no_rotation) noexcept -> bool { - auto const kRemoveMe = std::string{"remove-me"}; + std::string const remove_me = "remove-me"; auto pid = CreateProcessUniqueId(); if (not pid) { return false; } - auto remove_me_prefix = kRemoveMe + *pid + std::string{"-"}; + auto remove_me_prefix = remove_me + *pid + std::string{"-"}; std::vector to_remove{}; // With a shared lock, we can remove all directories with the given prefix, @@ -91,7 +89,8 @@ auto GarbageCollector::TriggerGarbageCollection( for (auto const& entry : std::filesystem::directory_iterator(storage_config.CacheRoot())) { - if (entry.path().filename().string().find(remove_me_prefix) == 0) { + if (entry.path().filename().string().starts_with( + remove_me_prefix)) { to_remove.emplace_back(entry.path()); } } @@ -122,7 +121,7 @@ auto GarbageCollector::TriggerGarbageCollection( std::vector left_over{}; for (auto const& entry : std::filesystem::directory_iterator(storage_config.CacheRoot())) { - if (entry.path().filename().string().find(kRemoveMe) == 0) { + if (entry.path().filename().string().starts_with(remove_me)) { left_over.emplace_back(entry.path()); } } @@ -214,21 +213,6 @@ auto GarbageCollector::TriggerGarbageCollection( auto GarbageCollector::Compactify(StorageConfig const& storage_config, size_t threshold) noexcept -> bool { - // Return to the initial compatibility mode once done: - auto const guard = gsl::finally([mode = Compatibility::IsCompatible()] { - Compatibility::SetCompatible(mode); - }); - - auto compactify = [threshold](StorageConfig const& config) -> bool { - Compatibility::SetCompatible(config.hash_function.GetType() == - HashFunction::Type::PlainSHA256); - auto const storage = ::Generation::Create(&config); - - return Compactifier::RemoveInvalid(storage.CAS()) and - Compactifier::RemoveSpliced(storage.CAS()) and - Compactifier::SplitLarge(storage.CAS(), threshold); - }; - // Compactification must be done for both native and compatible storages. static constexpr std::array kHashes = {HashFunction::Type::GitSHA1, HashFunction::Type::PlainSHA256}; @@ -236,13 +220,20 @@ auto GarbageCollector::Compactify(StorageConfig const& storage_config, .SetBuildRoot(storage_config.build_root) .SetNumGenerations(storage_config.num_generations); - return std::all_of(kHashes.begin(), - kHashes.end(), - [&builder, &compactify](HashFunction::Type hash_type) { - auto const config = - builder.SetHashType(hash_type).Build(); - return config.has_value() and compactify(*config); - }); + return std::all_of( + kHashes.begin(), + kHashes.end(), + [threshold, &builder](HashFunction::Type hash_type) { + auto const config = builder.SetHashType(hash_type).Build(); + if (not config) { + return false; + } + + auto const storage = ::Generation::Create(&*config); + return Compactifier::RemoveInvalid(storage.CAS()) and + Compactifier::RemoveSpliced(storage.CAS()) and + Compactifier::SplitLarge(storage.CAS(), threshold); + }); } #endif // BOOTSTRAP_BUILD_TOOL diff --git a/src/buildtool/storage/large_object_cas.hpp b/src/buildtool/storage/large_object_cas.hpp index 971b943d8..086875dfb 100644 --- a/src/buildtool/storage/large_object_cas.hpp +++ b/src/buildtool/storage/large_object_cas.hpp @@ -15,13 +15,14 @@ #ifndef INCLUDED_SRC_BUILDTOOL_STORAGE_LARGE_OBJECT_CAS_HPP #define INCLUDED_SRC_BUILDTOOL_STORAGE_LARGE_OBJECT_CAS_HPP +#include #include #include #include #include #include -#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/file_system/file_storage.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/storage/config.hpp" @@ -32,7 +33,7 @@ template class LocalCAS; -enum class LargeObjectErrorCode { +enum class LargeObjectErrorCode : std::uint8_t { /// \brief An internal error occured. Internal = 0, @@ -123,16 +124,16 @@ class LargeObjectCAS final { /// \brief Get the path to a large entry in the storage. /// \param digest The digest of a large object. /// \returns Path to the large entry if in the storage. - [[nodiscard]] auto GetEntryPath(bazel_re::Digest const& digest) - const noexcept -> std::optional; + [[nodiscard]] auto GetEntryPath(ArtifactDigest const& digest) const noexcept + -> std::optional; /// \brief Split an object from the main CAS into chunks. If the object had /// been split before, it would not get split again. /// \param digest The digest of the object to be split. /// \return A set of chunks the resulting object is composed of /// or an error on failure. - [[nodiscard]] auto Split(bazel_re::Digest const& digest) const noexcept - -> expected, LargeObjectError>; + [[nodiscard]] auto Split(ArtifactDigest const& digest) const noexcept + -> expected, LargeObjectError>; /// \brief Splice an object based on the reconstruction rules from the /// storage. This method doesn't check whether the result of splicing is @@ -140,7 +141,7 @@ class LargeObjectCAS final { /// \param digest The digest of the object to be spliced. /// \return A temporary directory that contains a single file /// "result" on success or an error on failure. - [[nodiscard]] auto TrySplice(bazel_re::Digest const& digest) const noexcept + [[nodiscard]] auto TrySplice(ArtifactDigest const& digest) const noexcept -> expected; /// \brief Splice an object from parts. This method doesn't check whether @@ -149,8 +150,8 @@ class LargeObjectCAS final { /// \param parts Parts to be concatenated. /// \return A temporary directory that contains a single file /// "result" on success or an error on failure. - [[nodiscard]] auto Splice(bazel_re::Digest const& digest, - std::vector const& parts) + [[nodiscard]] auto Splice(ArtifactDigest const& digest, + std::vector const& parts) const noexcept -> expected; /// \brief Uplink large entry from this generation to latest LocalCAS @@ -167,7 +168,7 @@ class LargeObjectCAS final { [[nodiscard]] auto LocalUplink( LocalCAS const& latest, LargeObjectCAS const& latest_large, - bazel_re::Digest const& digest) const noexcept -> bool; + ArtifactDigest const& digest) const noexcept -> bool; private: // By default, overwrite existing entries. Unless this is a generation @@ -185,18 +186,19 @@ class LargeObjectCAS final { /// \param digest The digest of a large object. /// \returns Parts the large object is composed of, if present in /// the storage. - [[nodiscard]] auto ReadEntry(bazel_re::Digest const& digest) const noexcept - -> std::optional>; + [[nodiscard]] auto ReadEntry(ArtifactDigest const& digest) const noexcept + -> std::optional>; /// \brief Create a new entry description and add it to the storage. /// \param digest The digest of the result. /// \param parts Parts the resulting object is composed of. /// \returns True if the entry exists afterwards. [[nodiscard]] auto WriteEntry( - bazel_re::Digest const& digest, - std::vector const& parts) const noexcept -> bool; + ArtifactDigest const& digest, + std::vector const& parts) const noexcept -> bool; }; +// NOLINTNEXTLINE(misc-header-include-cycle) #include "src/buildtool/storage/large_object_cas.tpp" #endif // INCLUDED_SRC_BUILDTOOL_STORAGE_LARGE_OBJECT_CAS_HPP diff --git a/src/buildtool/storage/large_object_cas.tpp b/src/buildtool/storage/large_object_cas.tpp index 5fbfee07d..bb7794bb2 100644 --- a/src/buildtool/storage/large_object_cas.tpp +++ b/src/buildtool/storage/large_object_cas.tpp @@ -16,14 +16,14 @@ #define INCLUDED_SRC_BUILDTOOL_STORAGE_LARGE_OBJECT_CAS_TPP #include -#include #include #include #include "fmt/core.h" #include "nlohmann/json.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" -#include "src/buildtool/compatibility/native_support.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/storage/file_chunker.hpp" #include "src/buildtool/storage/large_object_cas.hpp" @@ -36,10 +36,9 @@ inline constexpr std::size_t kSizeIndex = 1; template auto LargeObjectCAS::GetEntryPath( - bazel_re::Digest const& digest) const noexcept + ArtifactDigest const& digest) const noexcept -> std::optional { - const std::string hash = NativeSupport::Unprefix(digest.hash()); - const std::filesystem::path file_path = file_store_.GetPath(hash); + std::filesystem::path file_path = file_store_.GetPath(digest.hash()); if (FileSystemManager::IsFile(file_path)) { return file_path; } @@ -47,8 +46,9 @@ auto LargeObjectCAS::GetEntryPath( if constexpr (kDoGlobalUplink) { // To promote parts of the tree properly, regular uplinking logic for // trees is used: - bool uplinked = - IsTreeObject(kType) and not Compatibility::IsCompatible() + auto const hash_type = storage_config_.hash_function.GetType(); + bool const uplinked = + IsTreeObject(kType) and not ProtocolTraits::IsTreeAllowed(hash_type) ? uplinker_.UplinkTree(digest) : uplinker_.UplinkLargeBlob(digest); if (uplinked and FileSystemManager::IsFile(file_path)) { @@ -60,25 +60,31 @@ auto LargeObjectCAS::GetEntryPath( template auto LargeObjectCAS::ReadEntry( - bazel_re::Digest const& digest) const noexcept - -> std::optional> { + ArtifactDigest const& digest) const noexcept + -> std::optional> { auto const file_path = GetEntryPath(digest); if (not file_path) { return std::nullopt; } - std::vector parts; + std::vector parts; try { std::ifstream stream(*file_path); nlohmann::json j = nlohmann::json::parse(stream); parts.reserve(j.size()); + auto const hash_type = local_cas_.GetHashFunction().GetType(); for (auto const& j_part : j) { - auto hash = j_part.at(kHashIndex).template get(); - auto size = j_part.at(kSizeIndex).template get(); + auto digest = ArtifactDigestFactory::Create( + hash_type, + j_part.at(kHashIndex).template get(), + j_part.at(kSizeIndex).template get(), + /*is_tree=*/false); + if (not digest) { + return std::nullopt; + } - parts.emplace_back( - ArtifactDigest{std::move(hash), size, /*is_tree=*/false}); + parts.emplace_back(*std::move(digest)); } } catch (...) { return std::nullopt; @@ -88,8 +94,8 @@ auto LargeObjectCAS::ReadEntry( template auto LargeObjectCAS::WriteEntry( - bazel_re::Digest const& digest, - std::vector const& parts) const noexcept -> bool { + ArtifactDigest const& digest, + std::vector const& parts) const noexcept -> bool { if (GetEntryPath(digest)) { return true; } @@ -106,23 +112,18 @@ auto LargeObjectCAS::WriteEntry( try { for (auto const& part : parts) { auto& j_part = j.emplace_back(); - - ArtifactDigest const a_digest(part); - j_part[kHashIndex] = a_digest.hash(); - j_part[kSizeIndex] = a_digest.size(); + j_part[kHashIndex] = part.hash(); + j_part[kSizeIndex] = part.size(); } } catch (...) { return false; } - - const auto hash = NativeSupport::Unprefix(digest.hash()); - return file_store_.AddFromBytes(hash, j.dump()); + return file_store_.AddFromBytes(digest.hash(), j.dump()); } template -auto LargeObjectCAS::Split( - bazel_re::Digest const& digest) const noexcept - -> expected, LargeObjectError> { +auto LargeObjectCAS::Split(ArtifactDigest const& digest) + const noexcept -> expected, LargeObjectError> { if (auto large_entry = ReadEntry(digest)) { return std::move(*large_entry); } @@ -154,7 +155,7 @@ auto LargeObjectCAS::Split( fmt::format("could not split {}", digest.hash())}}; } - std::vector parts; + std::vector parts; try { while (auto chunk = chunker.NextChunk()) { auto part = local_cas_.StoreBlob(*chunk, /*is_executable=*/false); @@ -162,7 +163,7 @@ auto LargeObjectCAS::Split( return unexpected{LargeObjectError{ LargeObjectErrorCode::Internal, "could not store a part."}}; } - parts.push_back(std::move(*part)); + parts.emplace_back(*std::move(part)); } } catch (...) { return unexpected{LargeObjectError{LargeObjectErrorCode::Internal, @@ -180,7 +181,7 @@ auto LargeObjectCAS::Split( template auto LargeObjectCAS::TrySplice( - bazel_re::Digest const& digest) const noexcept + ArtifactDigest const& digest) const noexcept -> expected { auto parts = ReadEntry(digest); if (not parts) { @@ -193,8 +194,8 @@ auto LargeObjectCAS::TrySplice( template auto LargeObjectCAS::Splice( - bazel_re::Digest const& digest, - std::vector const& parts) const noexcept + ArtifactDigest const& digest, + std::vector const& parts) const noexcept -> expected { // Create temporary space for splicing: LargeObject large_object(storage_config_); @@ -236,7 +237,7 @@ auto LargeObjectCAS::Splice( stream.close(); } catch (...) { return unexpected{LargeObjectError{LargeObjectErrorCode::Internal, - "an unknown error occured"}}; + "an unknown error occurred"}}; } return large_object; } @@ -247,7 +248,7 @@ template auto LargeObjectCAS::LocalUplink( LocalCAS const& latest, LargeObjectCAS const& latest_large, - bazel_re::Digest const& digest) const noexcept -> bool { + ArtifactDigest const& digest) const noexcept -> bool { // Check the large entry in the youngest generation: if (latest_large.GetEntryPath(digest)) { return true; @@ -262,10 +263,10 @@ auto LargeObjectCAS::LocalUplink( // Promoting the parts of the large entry: for (auto const& part : *parts) { - static constexpr bool is_executable = false; - static constexpr bool skip_sync = true; + static constexpr bool kIsExecutable = false; + static constexpr bool kSkipSync = true; if (not local_cas_.LocalUplinkBlob( - latest, part, is_executable, skip_sync)) { + latest, part, kIsExecutable, kSkipSync)) { return false; } } @@ -274,9 +275,8 @@ auto LargeObjectCAS::LocalUplink( if (not path) { return false; } - - const auto hash = NativeSupport::Unprefix(digest.hash()); - return latest_large.file_store_.AddFromFile(hash, *path, /*is_owner=*/true); + return latest_large.file_store_.AddFromFile( + digest.hash(), *path, /*is_owner=*/true); } #endif // INCLUDED_SRC_BUILDTOOL_STORAGE_LARGE_OBJECT_CAS_TPP diff --git a/src/buildtool/storage/local_ac.hpp b/src/buildtool/storage/local_ac.hpp index b39c63ddb..7edb5bf33 100644 --- a/src/buildtool/storage/local_ac.hpp +++ b/src/buildtool/storage/local_ac.hpp @@ -32,7 +32,6 @@ // forward declarations namespace build::bazel::remote::execution::v2 { -class Digest; class ActionResult; } // namespace build::bazel::remote::execution::v2 namespace bazel_re = build::bazel::remote::execution::v2; @@ -65,13 +64,13 @@ class LocalAC { /// \param result The action result to store. /// \returns true on success. [[nodiscard]] auto StoreResult( - bazel_re::Digest const& action_id, + ArtifactDigest const& action_id, bazel_re::ActionResult const& result) const noexcept -> bool; /// \brief Read cached action result. /// \param action_id The id of the action the result was produced by. /// \returns The action result if found or nullopt otherwise. - [[nodiscard]] auto CachedResult(bazel_re::Digest const& action_id) + [[nodiscard]] auto CachedResult(ArtifactDigest const& action_id) const noexcept -> std::optional; /// \brief Uplink entry from this generation to latest LocalAC generation. @@ -85,7 +84,7 @@ class LocalAC { requires(kIsLocalGeneration) [[nodiscard]] auto LocalUplinkEntry( LocalGenerationAC const& latest, - bazel_re::Digest const& action_id) const noexcept -> bool; + ArtifactDigest const& action_id) const noexcept -> bool; private: // The action cache stores the results of failed actions. For those to be @@ -113,26 +112,27 @@ class LocalAC { /// \param action_id The id of the action that produced the result. /// \return The key of an Action pointing at an ActionResult in the LocalCAS /// on success or an error message on failure. - [[nodiscard]] auto ReadActionKey(bazel_re::Digest const& action_id) - const noexcept -> expected; + [[nodiscard]] auto ReadActionKey(ArtifactDigest const& action_id) + const noexcept -> expected; /// \brief Add an action to the LocalCAS. /// \param action The action result to store. /// \return The key pointing at an ActionResult present in the LocalCAS on /// success or std::nullopt on failure. [[nodiscard]] auto WriteAction(bazel_re::ActionResult const& action) - const noexcept -> std::optional; + const noexcept -> std::optional; /// \brief Get the action specified by a key from the LocalCAS. /// \param cas_key The key pointing at an ActionResult present in the /// LocalCAS. /// \return The ActionResult corresponding to a cas_key on success /// or std::nullopt on failure. - [[nodiscard]] auto ReadAction(bazel_re::Digest const& cas_key) - const noexcept -> std::optional; + [[nodiscard]] auto ReadAction(ArtifactDigest const& cas_key) const noexcept + -> std::optional; }; #ifndef BOOTSTRAP_BUILD_TOOL +// NOLINTNEXTLINE(misc-header-include-cycle) #include "src/buildtool/storage/local_ac.tpp" #endif diff --git a/src/buildtool/storage/local_ac.tpp b/src/buildtool/storage/local_ac.tpp index 8ed930c40..d5cbbe5ef 100644 --- a/src/buildtool/storage/local_ac.tpp +++ b/src/buildtool/storage/local_ac.tpp @@ -20,22 +20,21 @@ #include "fmt/core.h" #include "nlohmann/json.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/storage/local_ac.hpp" template auto LocalAC::StoreResult( - bazel_re::Digest const& action_id, + ArtifactDigest const& action_id, bazel_re::ActionResult const& result) const noexcept -> bool { auto const cas_key = WriteAction(result); - return cas_key.has_value() and - WriteActionKey(static_cast(action_id), - static_cast(*cas_key)); + return cas_key.has_value() and WriteActionKey(action_id, *cas_key); } template -auto LocalAC::CachedResult(bazel_re::Digest const& action_id) +auto LocalAC::CachedResult(ArtifactDigest const& action_id) const noexcept -> std::optional { auto const cas_key = ReadActionKey(action_id); if (not cas_key) { @@ -46,7 +45,7 @@ auto LocalAC::CachedResult(bazel_re::Digest const& action_id) if (not result) { logger_->Emit(LogLevel::Warning, "Parsing action result failed for action {}", - NativeSupport::Unprefix(action_id.hash())); + action_id.hash()); return std::nullopt; } return std::move(result); @@ -57,10 +56,10 @@ template requires(kIsLocalGeneration) auto LocalAC::LocalUplinkEntry( LocalGenerationAC const& latest, - bazel_re::Digest const& action_id) const noexcept -> bool { + ArtifactDigest const& action_id) const noexcept -> bool { // Determine action cache key path in latest generation. - auto const key_digest = NativeSupport::Unprefix(action_id.hash()); - if (FileSystemManager::IsFile(latest.file_store_.GetPath(key_digest))) { + if (FileSystemManager::IsFile( + latest.file_store_.GetPath(action_id.hash()))) { return true; } @@ -78,29 +77,41 @@ auto LocalAC::LocalUplinkEntry( // Uplink result content for (auto const& file : result->output_files()) { + auto const digest = ArtifactDigestFactory::FromBazel( + cas_.GetHashFunction().GetType(), file.digest()); + if (not digest) { + return false; + } if (not cas_.LocalUplinkBlob( - latest.cas_, file.digest(), file.is_executable())) { + latest.cas_, *digest, file.is_executable())) { return false; } } for (auto const& link : result->output_file_symlinks()) { - if (not cas_.LocalUplinkBlob(latest.cas_, - ArtifactDigest::Create( - cas_.GetHashFunction(), link.target()), - /*is_executable=*/false)) { + if (not cas_.LocalUplinkBlob( + latest.cas_, + ArtifactDigestFactory::HashDataAs( + cas_.GetHashFunction(), link.target()), + /*is_executable=*/false)) { return false; } } for (auto const& link : result->output_directory_symlinks()) { - if (not cas_.LocalUplinkBlob(latest.cas_, - ArtifactDigest::Create( - cas_.GetHashFunction(), link.target()), - /*is_executable=*/false)) { + if (not cas_.LocalUplinkBlob( + latest.cas_, + ArtifactDigestFactory::HashDataAs( + cas_.GetHashFunction(), link.target()), + /*is_executable=*/false)) { return false; } } for (auto const& directory : result->output_directories()) { - if (not cas_.LocalUplinkTree(latest.cas_, directory.tree_digest())) { + auto const digest = ArtifactDigestFactory::FromBazel( + cas_.GetHashFunction().GetType(), directory.tree_digest()); + if (not digest) { + return false; + } + if (not cas_.LocalUplinkTree(latest.cas_, *digest)) { return false; } } @@ -112,9 +123,9 @@ auto LocalAC::LocalUplinkEntry( return false; } - auto const ac_entry_path = file_store_.GetPath(key_digest); + auto const ac_entry_path = file_store_.GetPath(action_id.hash()); // Uplink cache key - return latest.file_store_.AddFromFile(key_digest, + return latest.file_store_.AddFromFile(action_id.hash(), ac_entry_path, /*is_owner=*/true); } @@ -128,10 +139,9 @@ auto LocalAC::WriteActionKey( } template -auto LocalAC::ReadActionKey(bazel_re::Digest const& action_id) - const noexcept -> expected { - auto const key_path = - file_store_.GetPath(NativeSupport::Unprefix(action_id.hash())); +auto LocalAC::ReadActionKey(ArtifactDigest const& action_id) + const noexcept -> expected { + auto const key_path = file_store_.GetPath(action_id.hash()); if constexpr (kDoGlobalUplink) { // Uplink any existing action-cache entries in storage generations @@ -145,29 +155,27 @@ auto LocalAC::ReadActionKey(bazel_re::Digest const& action_id) fmt::format("Cache miss, entry not found {}", key_path.string())}; } - std::optional action_key; try { - nlohmann::json j = nlohmann::json::parse(*key_content); - action_key = ArtifactDigest{j[0].template get(), - j[1].template get(), - /*is_tree=*/false}; + nlohmann::json const j = nlohmann::json::parse(*key_content); + return ArtifactDigestFactory::Create(cas_.GetHashFunction().GetType(), + j[0].template get(), + j[1].template get(), + /*is_tree=*/false); } catch (...) { - return unexpected{ - fmt::format("Parsing cache entry failed for action {}", - NativeSupport::Unprefix(action_id.hash()))}; + return unexpected{fmt::format( + "Parsing cache entry failed for action {}", action_id.hash())}; } - return *std::move(action_key); } template auto LocalAC::WriteAction(bazel_re::ActionResult const& action) - const noexcept -> std::optional { + const noexcept -> std::optional { return cas_.StoreBlob(action.SerializeAsString(), /*is_executable=*/false); } template -auto LocalAC::ReadAction(bazel_re::Digest const& cas_key) +auto LocalAC::ReadAction(ArtifactDigest const& cas_key) const noexcept -> std::optional { auto const action_path = cas_.BlobPath(cas_key, /*is_executable=*/false); diff --git a/src/buildtool/storage/local_cas.hpp b/src/buildtool/storage/local_cas.hpp index 86bfc0e4f..5f3a9db6a 100644 --- a/src/buildtool/storage/local_cas.hpp +++ b/src/buildtool/storage/local_cas.hpp @@ -23,6 +23,7 @@ #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/git_repo.hpp" #include "src/buildtool/file_system/object_cas.hpp" @@ -51,15 +52,15 @@ class LocalCAS { explicit LocalCAS( GenerationConfig const& config, gsl::not_null const*> const& uplinker) - : cas_file_{config.storage_config->hash_function, + : cas_file_{&config.storage_config->hash_function, config.cas_f, - MakeUplinker(uplinker)}, - cas_exec_{config.storage_config->hash_function, + MakeUplinker(config, uplinker)}, + cas_exec_{&config.storage_config->hash_function, config.cas_x, - MakeUplinker(uplinker)}, - cas_tree_{config.storage_config->hash_function, + MakeUplinker(config, uplinker)}, + cas_tree_{&config.storage_config->hash_function, config.cas_t, - MakeUplinker(uplinker)}, + MakeUplinker(config, uplinker)}, cas_file_large_{this, config, uplinker}, cas_tree_large_{this, config, uplinker}, hash_function_{config.storage_config->hash_function} {} @@ -95,7 +96,7 @@ class LocalCAS { template [[nodiscard]] auto StoreBlob(std::filesystem::path const& file_path, bool is_executable) const noexcept - -> std::optional { + -> std::optional { return is_executable ? cas_exec_.StoreBlobFromFile(file_path, kOwner) : cas_file_.StoreBlobFromFile(file_path, kOwner); } @@ -106,7 +107,7 @@ class LocalCAS { /// \returns Digest of the stored blob or nullopt otherwise. [[nodiscard]] auto StoreBlob(std::string const& bytes, bool is_executable = false) const noexcept - -> std::optional { + -> std::optional { return is_executable ? cas_exec_.StoreBlobFromBytes(bytes) : cas_file_.StoreBlobFromBytes(bytes); } @@ -117,7 +118,7 @@ class LocalCAS { /// \returns Digest of the stored tree or nullopt otherwise. template [[nodiscard]] auto StoreTree(std::filesystem::path const& file_path) - const noexcept -> std::optional { + const noexcept -> std::optional { return cas_tree_.StoreBlobFromFile(file_path, kOwner); } @@ -125,7 +126,7 @@ class LocalCAS { /// \param bytes The bytes to create the tree from. /// \returns Digest of the stored tree or nullopt otherwise. [[nodiscard]] auto StoreTree(std::string const& bytes) const noexcept - -> std::optional { + -> std::optional { return cas_tree_.StoreBlobFromBytes(bytes); } @@ -134,7 +135,7 @@ class LocalCAS { /// \param digest Digest of the blob to lookup. /// \param is_executable Lookup blob with executable permissions. /// \returns Path to the blob if found or nullopt otherwise. - [[nodiscard]] auto BlobPath(bazel_re::Digest const& digest, + [[nodiscard]] auto BlobPath(ArtifactDigest const& digest, bool is_executable) const noexcept -> std::optional { auto const path = BlobPathNoSync(digest, is_executable); @@ -146,7 +147,7 @@ class LocalCAS { /// \param digest Digest of the blob to lookup. /// \param is_executable Lookup blob with executable permissions. /// \returns Path to the blob if found or nullopt otherwise. - [[nodiscard]] auto BlobPathNoSync(bazel_re::Digest const& digest, + [[nodiscard]] auto BlobPathNoSync(ArtifactDigest const& digest, bool is_executable) const noexcept -> std::optional { return is_executable ? cas_exec_.BlobPath(digest) @@ -157,8 +158,8 @@ class LocalCAS { /// \param digest The digest of a blob to be split. /// \returns Digests of the parts of the large object or an /// error code on failure. - [[nodiscard]] auto SplitBlob(bazel_re::Digest const& digest) const noexcept - -> expected, LargeObjectError> { + [[nodiscard]] auto SplitBlob(ArtifactDigest const& digest) const noexcept + -> expected, LargeObjectError> { return cas_file_large_.Split(digest); } @@ -168,10 +169,10 @@ class LocalCAS { /// \param is_executable Splice the blob with executable permissions. /// \return The digest of the result or an error code on /// failure. - [[nodiscard]] auto SpliceBlob(bazel_re::Digest const& digest, - std::vector const& parts, + [[nodiscard]] auto SpliceBlob(ArtifactDigest const& digest, + std::vector const& parts, bool is_executable) const noexcept - -> expected { + -> expected { return is_executable ? Splice(digest, parts) : Splice(digest, parts); } @@ -179,7 +180,7 @@ class LocalCAS { /// \brief Obtain tree path from digest. /// \param digest Digest of the tree to lookup. /// \returns Path to the tree if found or nullopt otherwise. - [[nodiscard]] auto TreePath(bazel_re::Digest const& digest) const noexcept + [[nodiscard]] auto TreePath(ArtifactDigest const& digest) const noexcept -> std::optional { return cas_tree_.BlobPath(digest); } @@ -188,8 +189,8 @@ class LocalCAS { /// \param digest The digest of a tree to be split. /// \returns Digests of the parts of the large object or an /// error code on failure. - [[nodiscard]] auto SplitTree(bazel_re::Digest const& digest) const noexcept - -> expected, LargeObjectError> { + [[nodiscard]] auto SplitTree(ArtifactDigest const& digest) const noexcept + -> expected, LargeObjectError> { return cas_tree_large_.Split(digest); } @@ -198,9 +199,9 @@ class LocalCAS { /// \param parts The parts of the large object. /// \return The digest of the result or an error code on /// failure. - [[nodiscard]] auto SpliceTree(bazel_re::Digest const& digest, - std::vector const& parts) - const noexcept -> expected { + [[nodiscard]] auto SpliceTree(ArtifactDigest const& digest, + std::vector const& parts) + const noexcept -> expected { return Splice(digest, parts); } @@ -208,10 +209,18 @@ class LocalCAS { /// \param tree_digest Digest of the tree to be checked. /// \param tree_data Content of the tree. /// \return An error on fail. - [[nodiscard]] auto CheckTreeInvariant(bazel_re::Digest const& tree_digest, + [[nodiscard]] auto CheckTreeInvariant(ArtifactDigest const& tree_digest, std::string const& tree_data) const noexcept -> std::optional; + /// \brief Check whether all parts of the tree are in the storage. + /// \param tree_digest Digest of the tree to be checked. + /// \param file Content of the tree. + /// \return An error on fail. + [[nodiscard]] auto CheckTreeInvariant(ArtifactDigest const& tree_digest, + std::filesystem::path const& file) + const noexcept -> std::optional; + /// \brief Uplink blob from this generation to latest LocalCAS generation. /// Performs a synchronization if requested and if blob is only available /// with inverse x-bit. This function is only available for instances that @@ -228,7 +237,7 @@ class LocalCAS { requires(kIsLocalGeneration) [[nodiscard]] auto LocalUplinkBlob( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest, + ArtifactDigest const& digest, bool is_executable, bool skip_sync = false, bool splice_result = false) const noexcept -> bool; @@ -251,7 +260,7 @@ class LocalCAS { requires(kIsLocalGeneration) [[nodiscard]] auto LocalUplinkTree( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest, + ArtifactDigest const& digest, bool splice_result = false) const noexcept -> bool; /// \brief Uplink large entry from this generation to latest LocalCAS @@ -267,7 +276,7 @@ class LocalCAS { requires(kIsLocalGeneration) [[nodiscard]] auto LocalUplinkLargeObject( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest) const noexcept -> bool; + ArtifactDigest const& digest) const noexcept -> bool; private: ObjectCAS cas_file_; @@ -275,18 +284,22 @@ class LocalCAS { ObjectCAS cas_tree_; LargeObjectCAS cas_file_large_; LargeObjectCAS cas_tree_large_; - HashFunction const hash_function_; + HashFunction const& hash_function_; /// \brief Provides uplink via "exists callback" for physical object CAS. template [[nodiscard]] static auto MakeUplinker( + GenerationConfig const& config, gsl::not_null const*> const& uplinker) { if constexpr (kDoGlobalUplink) { - return [uplinker](auto const& digest, auto const& /*path*/) { + bool const native = ProtocolTraits::IsNative( + config.storage_config->hash_function.GetType()); + return [native, uplinker](auto const& digest, + auto const& /*path*/) { if constexpr (IsTreeObject(kType)) { // in non-compatible mode, do explicit deep tree uplink // in compatible mode, treat all trees as blobs - if (not Compatibility::IsCompatible()) { + if (native) { return uplinker->UplinkTree(digest); } } @@ -302,7 +315,7 @@ class LocalCAS { /// \param digest Blob digest. /// \param to_executable Sync direction. /// \returns Path to blob in target CAS. - [[nodiscard]] auto TrySyncBlob(bazel_re::Digest const& digest, + [[nodiscard]] auto TrySyncBlob(ArtifactDigest const& digest, bool to_executable) const noexcept -> std::optional { auto const src_blob = BlobPathNoSync(digest, not to_executable); @@ -316,45 +329,53 @@ class LocalCAS { requires(kIsLocalGeneration) [[nodiscard]] auto LocalUplinkGitTree( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest, + ArtifactDigest const& digest, bool splice_result = false) const noexcept -> bool; template requires(kIsLocalGeneration) [[nodiscard]] auto LocalUplinkBazelDirectory( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest, - gsl::not_null*> const& seen, + ArtifactDigest const& digest, + gsl::not_null*> const& seen, bool splice_result = false) const noexcept -> bool; template requires(kIsLocalGeneration) - [[nodiscard]] auto TrySplice(bazel_re::Digest const& digest) const noexcept + [[nodiscard]] auto TrySplice(ArtifactDigest const& digest) const noexcept -> std::optional; template - [[nodiscard]] auto Splice(bazel_re::Digest const& digest, - std::vector const& parts) - const noexcept -> expected; + [[nodiscard]] auto Splice(ArtifactDigest const& digest, + std::vector const& parts) + const noexcept -> expected; }; #ifndef BOOTSTRAP_BUILD_TOOL +// NOLINTNEXTLINE(misc-header-include-cycle) #include "src/buildtool/storage/local_cas.tpp" #else template auto LocalCAS::CheckTreeInvariant( - bazel_re::Digest const& tree_digest, + ArtifactDigest const& tree_digest, std::string const& tree_data) const noexcept -> std::optional { return std::nullopt; } +template +auto LocalCAS::CheckTreeInvariant( + ArtifactDigest const& tree_digest, + std::filesystem::path const& file) const noexcept + -> std::optional { + return std::nullopt; +} + template template -auto LocalCAS::Splice( - bazel_re::Digest const& digest, - std::vector const& parts) const noexcept - -> expected { +auto LocalCAS::Splice(ArtifactDigest const& digest, + std::vector const& parts) + const noexcept -> expected { return unexpected{ LargeObjectError{LargeObjectErrorCode::Internal, "not allowed"}}; } diff --git a/src/buildtool/storage/local_cas.tpp b/src/buildtool/storage/local_cas.tpp index 96224c3c1..29bf28f56 100644 --- a/src/buildtool/storage/local_cas.tpp +++ b/src/buildtool/storage/local_cas.tpp @@ -19,32 +19,17 @@ #include // std::move #include "fmt/core.h" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/storage/local_cas.hpp" -namespace detail { - -[[nodiscard]] static inline auto CheckDigestConsistency( - bazel_re::Digest const& lhs, - bazel_re::Digest const& rhs) noexcept -> bool { - if (lhs.hash() != rhs.hash()) { - return false; - } - bool const both_known = lhs.size_bytes() != 0 and rhs.size_bytes() != 0; - if (Compatibility::IsCompatible() or both_known) { - return lhs.size_bytes() == rhs.size_bytes(); - } - return true; -} - -} // namespace detail - template template requires(kIsLocalGeneration) auto LocalCAS::LocalUplinkBlob( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest, + ArtifactDigest const& digest, bool is_executable, bool skip_sync, bool splice_result) const noexcept -> bool { @@ -96,10 +81,10 @@ template requires(kIsLocalGeneration) auto LocalCAS::LocalUplinkTree( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest, + ArtifactDigest const& digest, bool splice_result) const noexcept -> bool { - if (Compatibility::IsCompatible()) { - std::unordered_set seen{}; + if (not ProtocolTraits::IsNative(hash_function_.GetType())) { + std::unordered_set seen{}; return LocalUplinkBazelDirectory(latest, digest, &seen, splice_result); } return LocalUplinkGitTree(latest, digest, splice_result); @@ -110,7 +95,7 @@ template requires(kIsLocalGeneration) auto LocalCAS::LocalUplinkGitTree( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest, + ArtifactDigest const& digest, bool splice_result) const noexcept -> bool { // Determine tree path in latest generation. auto tree_path_latest = latest.cas_tree_.BlobPath(digest); @@ -131,9 +116,8 @@ auto LocalCAS::LocalUplinkGitTree( // Determine tree entries. auto content = FileSystemManager::ReadFile(*tree_path); - auto id = NativeSupport::Unprefix(digest.hash()); auto check_symlinks = - [this](std::vector const& ids) -> bool { + [this](std::vector const& ids) -> bool { for (auto const& id : ids) { auto link_path = cas_file_.BlobPath(id); std::optional spliced; @@ -154,7 +138,7 @@ auto LocalCAS::LocalUplinkGitTree( return true; }; auto tree_entries = GitRepo::ReadTreeData(*content, - id, + digest.hash(), check_symlinks, /*is_hex_id=*/true); if (not tree_entries) { @@ -166,17 +150,23 @@ auto LocalCAS::LocalUplinkGitTree( // Process only first entry from 'entry_vector' since all // entries represent the same blob, just with different // names. - auto entry = entry_vector.front(); - auto hash = ToHexString(raw_id); - auto digest = ArtifactDigest{hash, 0, IsTreeObject(entry.type)}; - if (entry.type == ObjectType::Tree) { - if (not LocalUplinkGitTree(latest, digest)) { + auto const entry_type = entry_vector.front().type; + auto const digest = + ArtifactDigestFactory::Create(hash_function_.GetType(), + ToHexString(raw_id), + 0, + IsTreeObject(entry_type)); + if (not digest) { + return false; + } + if (digest->IsTree()) { + if (not LocalUplinkGitTree(latest, *digest)) { return false; } } else { if (not LocalUplinkBlob( - latest, digest, IsExecutableObject(entry.type))) { + latest, *digest, IsExecutableObject(entry_type))) { return false; } } @@ -205,8 +195,8 @@ template requires(kIsLocalGeneration) auto LocalCAS::LocalUplinkBazelDirectory( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest, - gsl::not_null*> const& seen, + ArtifactDigest const& digest, + gsl::not_null*> const& seen, bool splice_result) const noexcept -> bool { // Skip already uplinked directories if (seen->contains(digest)) { @@ -233,19 +223,28 @@ auto LocalCAS::LocalUplinkBazelDirectory( // Uplink bazel directory entries. for (auto const& file : dir.files()) { - if (not LocalUplinkBlob(latest, file.digest(), file.is_executable())) { + auto const digest = ArtifactDigestFactory::FromBazel( + hash_function_.GetType(), file.digest()); + if (not digest) { + return false; + } + if (not LocalUplinkBlob(latest, *digest, file.is_executable())) { return false; } } for (auto const& directory : dir.directories()) { - if (not LocalUplinkBazelDirectory(latest, directory.digest(), seen)) { + auto const digest = ArtifactDigestFactory::FromBazel( + hash_function_.GetType(), directory.digest()); + if (not digest) { + return false; + } + if (not LocalUplinkBazelDirectory(latest, *digest, seen)) { return false; } } // Determine bazel directory path in latest generation. - auto dir_path_latest = latest.cas_tree_.BlobPath(digest); - + auto const dir_path_latest = latest.cas_tree_.BlobPath(digest); if (spliced) { // Uplink the large entry afterwards: // The result of uplinking of a large object must not affect the @@ -267,6 +266,7 @@ auto LocalCAS::LocalUplinkBazelDirectory( seen->emplace(digest); return true; } catch (...) { + return false; } } return false; @@ -277,7 +277,7 @@ template requires(kIsLocalGeneration) auto LocalCAS::LocalUplinkLargeObject( LocalGenerationCAS const& latest, - bazel_re::Digest const& digest) const noexcept -> bool { + ArtifactDigest const& digest) const noexcept -> bool { if constexpr (IsTreeObject(kType)) { return cas_tree_large_.LocalUplink( latest, latest.cas_tree_large_, digest); @@ -291,8 +291,8 @@ auto LocalCAS::LocalUplinkLargeObject( template template requires(kIsLocalGeneration) -auto LocalCAS::TrySplice(bazel_re::Digest const& digest) - const noexcept -> std::optional { +auto LocalCAS::TrySplice( + ArtifactDigest const& digest) const noexcept -> std::optional { auto spliced = IsTreeObject(kType) ? cas_tree_large_.TrySplice(digest) : cas_file_large_.TrySplice(digest); return spliced and spliced->IsValid() ? std::optional{std::move(*spliced)} @@ -301,19 +301,18 @@ auto LocalCAS::TrySplice(bazel_re::Digest const& digest) template auto LocalCAS::CheckTreeInvariant( - bazel_re::Digest const& tree_digest, + ArtifactDigest const& tree_digest, std::string const& tree_data) const noexcept -> std::optional { - if (Compatibility::IsCompatible()) { + if (not ProtocolTraits::IsNative(hash_function_.GetType())) { return std::nullopt; } auto skip_symlinks = [](auto const& /*unused*/) { return true; }; - auto const entries = - GitRepo::ReadTreeData(tree_data, - NativeSupport::Unprefix(tree_digest.hash()), - skip_symlinks, - /*is_hex_id=*/true); + auto const entries = GitRepo::ReadTreeData(tree_data, + tree_digest.hash(), + skip_symlinks, + /*is_hex_id=*/true); if (not entries) { return LargeObjectError{ LargeObjectErrorCode::Internal, @@ -324,36 +323,57 @@ auto LocalCAS::CheckTreeInvariant( // Ensure all entries are in the storage: for (const auto& entry : *entries) { for (auto const& item : entry.second) { - bazel_re::Digest const digest = - ArtifactDigest(ToHexString(entry.first), - /*size_unknown=*/0ULL, - IsTreeObject(item.type)); + auto const digest = + ArtifactDigestFactory::Create(hash_function_.GetType(), + ToHexString(entry.first), + 0, // size unknown + IsTreeObject(item.type)); + if (not digest) { + return LargeObjectError{ + LargeObjectErrorCode::InvalidTree, + fmt::format("tree invariant violated {}:\n {}", + tree_digest.hash(), + digest.error())}; + } // To avoid splicing during search, large CASes are inspected first. bool const entry_exists = IsTreeObject(item.type) - ? cas_tree_large_.GetEntryPath(digest) or TreePath(digest) - : cas_file_large_.GetEntryPath(digest) or - BlobPath(digest, IsExecutableObject(item.type)); + ? cas_tree_large_.GetEntryPath(*digest) or TreePath(*digest) + : cas_file_large_.GetEntryPath(*digest) or + BlobPath(*digest, IsExecutableObject(item.type)); if (not entry_exists) { return LargeObjectError{ LargeObjectErrorCode::InvalidTree, fmt::format("tree invariant violated {} : missing part {}", tree_digest.hash(), - digest.hash())}; + digest->hash())}; } } } return std::nullopt; } +template +auto LocalCAS::CheckTreeInvariant( + ArtifactDigest const& tree_digest, + std::filesystem::path const& file) const noexcept + -> std::optional { + auto const tree_data = FileSystemManager::ReadFile(file); + if (not tree_data) { + return LargeObjectError{ + LargeObjectErrorCode::Internal, + fmt::format("could not read tree {}", tree_digest.hash())}; + } + return CheckTreeInvariant(tree_digest, *tree_data); +} + template template -auto LocalCAS::Splice( - bazel_re::Digest const& digest, - std::vector const& parts) const noexcept - -> expected { +auto LocalCAS::Splice(ArtifactDigest const& digest, + std::vector const& parts) + const noexcept -> expected { static constexpr bool kIsTree = IsTreeObject(kType); static constexpr bool kIsExec = IsExecutableObject(kType); @@ -378,13 +398,13 @@ auto LocalCAS::Splice( // calculation is done instead. auto const& file_path = large_object.GetPath(); auto spliced_digest = - ArtifactDigest::CreateFromFile(hash_function_, file_path); + ArtifactDigestFactory::HashFileAs(hash_function_, file_path); if (not spliced_digest) { return unexpected{LargeObjectError{LargeObjectErrorCode::Internal, "could not calculate digest"}}; } - if (not detail::CheckDigestConsistency(*spliced_digest, digest)) { + if (*spliced_digest != digest) { return unexpected{LargeObjectError{ LargeObjectErrorCode::InvalidResult, fmt::format("actual result {} differs from the expected one {}", @@ -394,15 +414,8 @@ auto LocalCAS::Splice( // Check tree invariants: if constexpr (kIsTree) { - if (not Compatibility::IsCompatible()) { - // Read tree entries: - auto const tree_data = FileSystemManager::ReadFile(file_path); - if (not tree_data) { - return unexpected{LargeObjectError{ - LargeObjectErrorCode::Internal, - fmt::format("could not read tree {}", digest.hash())}}; - } - if (auto error = CheckTreeInvariant(digest, *tree_data)) { + if (ProtocolTraits::IsNative(hash_function_.GetType())) { + if (auto error = CheckTreeInvariant(digest, file_path)) { return unexpected{std::move(*error)}; } } @@ -411,12 +424,12 @@ auto LocalCAS::Splice( static constexpr bool kOwner = true; auto const stored_digest = kIsTree ? StoreTree(file_path) : StoreBlob(file_path, kIsExec); - if (stored_digest) { - return std::move(*stored_digest); + if (not stored_digest) { + return unexpected{LargeObjectError{ + LargeObjectErrorCode::Internal, + fmt::format("could not splice {}", digest.hash())}}; } - return unexpected{ - LargeObjectError{LargeObjectErrorCode::Internal, - fmt::format("could not splice {}", digest.hash())}}; + return *std::move(stored_digest); } #endif // INCLUDED_SRC_BUILDTOOL_STORAGE_LOCAL_CAS_TPP diff --git a/src/buildtool/storage/repository_garbage_collector.cpp b/src/buildtool/storage/repository_garbage_collector.cpp index f1025795e..67a08f9f0 100644 --- a/src/buildtool/storage/repository_garbage_collector.cpp +++ b/src/buildtool/storage/repository_garbage_collector.cpp @@ -36,13 +36,14 @@ auto RepositoryGarbageCollector::LockFilePath( auto RepositoryGarbageCollector::TriggerGarbageCollection( StorageConfig const& storage_config) noexcept -> bool { - auto const kRemoveMe = std::string{"remove-me"}; + auto const remove_me_prefix = std::string{"remove-me"}; auto pid = CreateProcessUniqueId(); if (not pid) { return false; } - auto remove_me = storage_config.RepositoryRoot() / (kRemoveMe + *pid); + auto remove_me = + storage_config.RepositoryRoot() / (remove_me_prefix + *pid); // With a shared lock, we can remove that directory, if it exists, // as we own the process id. diff --git a/src/buildtool/storage/target_cache.hpp b/src/buildtool/storage/target_cache.hpp index 6feff6a27..cfd8534ec 100644 --- a/src/buildtool/storage/target_cache.hpp +++ b/src/buildtool/storage/target_cache.hpp @@ -27,6 +27,7 @@ #include "src/buildtool/build_engine/base_maps/entity_name_data.hpp" #include "src/buildtool/build_engine/expression/configuration.hpp" #include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/file_system/file_storage.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/logging/logger.hpp" @@ -92,7 +93,7 @@ class TargetCache { /// Doesn't create a TargetCacheEntry in the TargetCache. /// \return TargetCacheKey on success. [[nodiscard]] auto ComputeKey( - std::string const& repo_key, + ArtifactDigest const& repo_key, BuildMaps::Base::NamedTarget const& target_name, Configuration const& effective_config) const noexcept -> std::optional; @@ -158,6 +159,7 @@ using ActiveTargetCache = TargetCache; using ActiveTargetCache = TargetCache; #endif // BOOTSTRAP_BUILD_TOOL +// NOLINTNEXTLINE(misc-header-include-cycle) #include "src/buildtool/storage/target_cache.tpp" namespace std { diff --git a/src/buildtool/storage/target_cache.tpp b/src/buildtool/storage/target_cache.tpp index 502c346a4..c31fa1cc4 100644 --- a/src/buildtool/storage/target_cache.tpp +++ b/src/buildtool/storage/target_cache.tpp @@ -19,6 +19,7 @@ #include //std::ignore #include "nlohmann/json.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/storage/target_cache.hpp" @@ -31,9 +32,7 @@ auto TargetCache::Store( return false; } if (auto digest = cas_.StoreBlob(value.ToJson().dump(2))) { - auto data = - Artifact::ObjectInfo{ArtifactDigest{*digest}, ObjectType::File} - .ToString(); + auto data = Artifact::ObjectInfo{*digest, ObjectType::File}.ToString(); logger_->Emit(LogLevel::Debug, "Adding entry for key {} as {}", key.Id().ToString(), @@ -45,21 +44,20 @@ auto TargetCache::Store( template auto TargetCache::ComputeKey( - std::string const& repo_key, + ArtifactDigest const& repo_key, BuildMaps::Base::NamedTarget const& target_name, Configuration const& effective_config) const noexcept -> std::optional { try { // target's repository is content-fixed, we can compute a cache key auto target_desc = nlohmann::json{ - {"repo_key", repo_key}, + {"repo_key", repo_key.hash()}, {"target_name", nlohmann::json{target_name.module, target_name.name}.dump()}, {"effective_config", effective_config.ToString()}}; if (auto target_key = cas_.StoreBlob(target_desc.dump(2), /*is_executable=*/false)) { - return TargetCacheKey{ - {ArtifactDigest{*target_key}, ObjectType::File}}; + return TargetCacheKey{{*target_key, ObjectType::File}}; } } catch (std::exception const& ex) { logger_->Emit(LogLevel::Error, @@ -89,12 +87,14 @@ auto TargetCache::Read( entry_path.string()); return std::nullopt; } - if (auto info = Artifact::ObjectInfo::FromString(*entry)) { + auto const hash_type = cas_.GetHashFunction().GetType(); + if (auto info = Artifact::ObjectInfo::FromString(hash_type, *entry)) { if (auto path = cas_.BlobPath(info->digest, /*is_executable=*/false)) { if (auto value = FileSystemManager::ReadFile(*path)) { try { return std::make_pair( - TargetCacheEntry{nlohmann::json::parse(*value)}, + TargetCacheEntry{hash_type, + nlohmann::json::parse(*value)}, std::move(*info)); } catch (std::exception const& ex) { logger_->Emit(LogLevel::Warning, @@ -139,7 +139,8 @@ auto TargetCache::LocalUplinkEntry( } // Determine target cache entry location. - auto entry_info = Artifact::ObjectInfo::FromString(*raw_key); + auto entry_info = Artifact::ObjectInfo::FromString( + cas_.GetHashFunction().GetType(), *raw_key); if (not entry_info) { return false; } @@ -162,7 +163,8 @@ auto TargetCache::LocalUplinkEntry( } catch (std::exception const& ex) { return false; } - auto entry = TargetCacheEntry::FromJson(json_desc); + auto entry = + TargetCacheEntry::FromJson(cas_.GetHashFunction().GetType(), json_desc); // Uplink the implied export targets first for (auto const& implied_digest : entry.ToImplied()) { diff --git a/src/buildtool/storage/target_cache_entry.cpp b/src/buildtool/storage/target_cache_entry.cpp index becc6a133..263993c85 100644 --- a/src/buildtool/storage/target_cache_entry.cpp +++ b/src/buildtool/storage/target_cache_entry.cpp @@ -20,11 +20,15 @@ #include #include +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" +#include "src/utils/cpp/expected.hpp" #include "src/utils/cpp/gsl.hpp" auto TargetCacheEntry::FromTarget( + HashFunction::Type hash_type, AnalysedTargetPtr const& target, std::unordered_map const& replacements) noexcept -> std::optional { @@ -42,17 +46,18 @@ auto TargetCacheEntry::FromTarget( if (not implied.empty()) { (*desc)["implied export targets"] = implied; } - return TargetCacheEntry{*desc}; + return TargetCacheEntry{hash_type, *desc}; } -auto TargetCacheEntry::FromJson(nlohmann::json desc) noexcept +auto TargetCacheEntry::FromJson(HashFunction::Type hash_type, + nlohmann::json desc) noexcept -> TargetCacheEntry { - return TargetCacheEntry(std::move(desc)); + return TargetCacheEntry(hash_type, std::move(desc)); } auto TargetCacheEntry::ToResult() const noexcept -> std::optional { - return TargetResult::FromJson(desc_); + return TargetResult::FromJson(hash_type_, desc_); } auto TargetCacheEntry::ToImplied() const noexcept -> std::set { @@ -78,9 +83,16 @@ auto TargetCacheEntry::ToImpliedIds(std::string const& entry_key_hash) try { for (auto const& x : desc_["implied export targets"]) { if (x != entry_key_hash) { - result.emplace_back(Artifact::ObjectInfo{ - .digest = ArtifactDigest{x, 0, /*is_tree=*/false}, - .type = ObjectType::File}); + auto digest = ArtifactDigestFactory::Create( + hash_type_, x, 0, /*is_tree=*/false); + if (not digest) { + Logger::Log( + LogLevel::Debug, "{}", std::move(digest).error()); + return std::nullopt; + } + result.emplace_back( + Artifact::ObjectInfo{.digest = *std::move(digest), + .type = ObjectType::File}); } } } catch (std::exception const& ex) { @@ -93,9 +105,10 @@ auto TargetCacheEntry::ToImpliedIds(std::string const& entry_key_hash) return result; } -[[nodiscard]] auto ToObjectInfo(nlohmann::json const& json) +[[nodiscard]] static auto ToObjectInfo(HashFunction::Type hash_type, + nlohmann::json const& json) -> Artifact::ObjectInfo { - auto const& desc = ArtifactDescription::FromJson(json); + auto const desc = ArtifactDescription::FromJson(hash_type, json); // The assumption is that all artifacts mentioned in a target cache // entry are KNOWN to the remote side. ExpectsAudit(desc and desc->IsKnown()); @@ -104,7 +117,8 @@ auto TargetCacheEntry::ToImpliedIds(std::string const& entry_key_hash) return *info; } -[[nodiscard]] auto ScanArtifactMap( +[[nodiscard]] static auto ScanArtifactMap( + HashFunction::Type hash_type, gsl::not_null*> const& infos, nlohmann::json const& json) -> bool { if (not json.is_object()) { @@ -114,11 +128,14 @@ auto TargetCacheEntry::ToImpliedIds(std::string const& entry_key_hash) std::transform(json.begin(), json.end(), std::back_inserter(*infos), - [](auto const& item) { return ToObjectInfo(item); }); + [hash_type](auto const& item) { + return ToObjectInfo(hash_type, item); + }); return true; } -[[nodiscard]] auto ScanProvidesMap( +[[nodiscard]] static auto ScanProvidesMap( + HashFunction::Type hash_type, gsl::not_null*> const& infos, nlohmann::json const& json) -> bool { if (not json.is_object()) { @@ -127,13 +144,13 @@ auto TargetCacheEntry::ToImpliedIds(std::string const& entry_key_hash) auto const& nodes = json["nodes"]; auto const& provided_artifacts = json["provided_artifacts"]; infos->reserve(infos->size() + provided_artifacts.size()); - std::transform( - provided_artifacts.begin(), - provided_artifacts.end(), - std::back_inserter(*infos), - [&nodes](auto const& item) { - return ToObjectInfo(nodes[item.template get()]); - }); + std::transform(provided_artifacts.begin(), + provided_artifacts.end(), + std::back_inserter(*infos), + [hash_type, &nodes](auto const& item) { + return ToObjectInfo( + hash_type, nodes[item.template get()]); + }); return true; } @@ -141,9 +158,9 @@ auto TargetCacheEntry::ToArtifacts( gsl::not_null*> const& infos) const noexcept -> bool { try { - if (ScanArtifactMap(infos, desc_["artifacts"]) and - ScanArtifactMap(infos, desc_["runfiles"]) and - ScanProvidesMap(infos, desc_["provides"])) { + if (ScanArtifactMap(hash_type_, infos, desc_["artifacts"]) and + ScanArtifactMap(hash_type_, infos, desc_["runfiles"]) and + ScanProvidesMap(hash_type_, infos, desc_["provides"])) { return true; } } catch (std::exception const& ex) { diff --git a/src/buildtool/storage/target_cache_entry.hpp b/src/buildtool/storage/target_cache_entry.hpp index b0bd1020b..a1cddf629 100644 --- a/src/buildtool/storage/target_cache_entry.hpp +++ b/src/buildtool/storage/target_cache_entry.hpp @@ -27,21 +27,25 @@ #include "src/buildtool/build_engine/expression/target_result.hpp" #include "src/buildtool/common/artifact.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/crypto/hash_function.hpp" // Entry for target cache. Created from target, contains TargetResult. class TargetCacheEntry { public: - explicit TargetCacheEntry(nlohmann::json desc) : desc_(std::move(desc)) {} + explicit TargetCacheEntry(HashFunction::Type hash_type, nlohmann::json desc) + : hash_type_{hash_type}, desc_(std::move(desc)) {} // Create the entry from target with replacement artifacts/infos. // Replacement artifacts must replace all non-known artifacts by known. [[nodiscard]] static auto FromTarget( + HashFunction::Type hash_type, AnalysedTargetPtr const& target, std::unordered_map const& replacements) noexcept -> std::optional; // Create a target-cache entry from a json description. - [[nodiscard]] static auto FromJson(nlohmann::json desc) noexcept + [[nodiscard]] static auto FromJson(HashFunction::Type hash_type, + nlohmann::json desc) noexcept -> TargetCacheEntry; // Obtain TargetResult from cache entry. @@ -70,6 +74,7 @@ class TargetCacheEntry { } private: + HashFunction::Type hash_type_; nlohmann::json desc_; }; diff --git a/src/buildtool/storage/uplinker.cpp b/src/buildtool/storage/uplinker.cpp index fee01af49..8b3b8c065 100644 --- a/src/buildtool/storage/uplinker.cpp +++ b/src/buildtool/storage/uplinker.cpp @@ -19,7 +19,6 @@ #include #include -#include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/storage/local_ac.hpp" #include "src/buildtool/storage/local_cas.hpp" @@ -46,7 +45,7 @@ GlobalUplinker::GlobalUplinker( : storage_config_{*storage_config}, generations_{CreateGenerations(&storage_config_)} {} -auto GlobalUplinker::UplinkBlob(bazel_re::Digest const& digest, +auto GlobalUplinker::UplinkBlob(ArtifactDigest const& digest, bool is_executable) const noexcept -> bool { // Try to find blob in all generations. auto const& latest = generations_[Generation::kYoungest].CAS(); @@ -62,7 +61,7 @@ auto GlobalUplinker::UplinkBlob(bazel_re::Digest const& digest, }); } -auto GlobalUplinker::UplinkTree(bazel_re::Digest const& digest) const noexcept +auto GlobalUplinker::UplinkTree(ArtifactDigest const& digest) const noexcept -> bool { // Try to find blob in all generations. auto const& latest = generations_[Generation::kYoungest].CAS(); @@ -75,7 +74,7 @@ auto GlobalUplinker::UplinkTree(bazel_re::Digest const& digest) const noexcept } auto GlobalUplinker::UplinkLargeBlob( - bazel_re::Digest const& digest) const noexcept -> bool { + ArtifactDigest const& digest) const noexcept -> bool { // Try to find large entry in all generations. auto const& latest = generations_[Generation::kYoungest].CAS(); return std::any_of( @@ -88,7 +87,7 @@ auto GlobalUplinker::UplinkLargeBlob( } auto GlobalUplinker::UplinkActionCacheEntry( - bazel_re::Digest const& action_id) const noexcept -> bool { + ArtifactDigest const& action_id) const noexcept -> bool { // Try to find action-cache entry in all generations. auto const& latest = generations_[Generation::kYoungest].ActionCache(); return std::any_of(generations_.begin(), diff --git a/src/buildtool/storage/uplinker.hpp b/src/buildtool/storage/uplinker.hpp index c713155cb..353caac68 100644 --- a/src/buildtool/storage/uplinker.hpp +++ b/src/buildtool/storage/uplinker.hpp @@ -21,17 +21,13 @@ #include #include "gsl/gsl" +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/storage/config.hpp" template class LocalStorage; class TargetCacheKey; -namespace build::bazel::remote::execution::v2 { -class Digest; -} -namespace bazel_re = build::bazel::remote::execution::v2; - /// \brief Global uplinker implementation. /// Responsible for uplinking objects across all generations to latest /// generation. @@ -45,7 +41,7 @@ class GlobalUplinker final { /// \param digest Digest of the blob to uplink. /// \param is_executable Indicate that blob is an executable. /// \returns true if blob was found and successfully uplinked. - [[nodiscard]] auto UplinkBlob(bazel_re::Digest const& digest, + [[nodiscard]] auto UplinkBlob(ArtifactDigest const& digest, bool is_executable) const noexcept -> bool; /// \brief Uplink tree across LocalCASes from all generations to latest. @@ -53,7 +49,7 @@ class GlobalUplinker final { /// by this tree will be uplinked before (including sub-trees). /// \param digest Digest of the tree to uplink. /// \returns true if tree was found and successfully uplinked (deep). - [[nodiscard]] auto UplinkTree(bazel_re::Digest const& digest) const noexcept + [[nodiscard]] auto UplinkTree(ArtifactDigest const& digest) const noexcept -> bool; /// \brief Uplink large blob entry across LocalCASes from all generations to @@ -61,14 +57,14 @@ class GlobalUplinker final { /// \param digest Digest of the large blob entry to uplink. /// \returns true if large entry was found and successfully uplinked. [[nodiscard]] auto UplinkLargeBlob( - bazel_re::Digest const& digest) const noexcept -> bool; + ArtifactDigest const& digest) const noexcept -> bool; /// \brief Uplink entry from action cache across all generations to latest. /// Note that the entry will be uplinked including all referenced items. /// \param action_id Id of the action to uplink entry for. /// \returns true if cache entry was found and successfully uplinked. [[nodiscard]] auto UplinkActionCacheEntry( - bazel_re::Digest const& action_id) const noexcept -> bool; + ArtifactDigest const& action_id) const noexcept -> bool; /// \brief Uplink entry from target cache across all generations to latest. /// Note that the entry will be uplinked including all referenced items. diff --git a/src/buildtool/system/TARGETS b/src/buildtool/system/TARGETS index 691fbc3b1..72797bbb5 100644 --- a/src/buildtool/system/TARGETS +++ b/src/buildtool/system/TARGETS @@ -18,10 +18,10 @@ , "hdrs": ["system_command.hpp"] , "deps": [ "system" + , ["@", "gsl", "", "gsl"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["@", "gsl", "", "gsl"] ] , "stage": ["src", "buildtool", "system"] } diff --git a/src/buildtool/system/system.hpp b/src/buildtool/system/system.hpp index 06b45d792..5077351e2 100644 --- a/src/buildtool/system/system.hpp +++ b/src/buildtool/system/system.hpp @@ -26,19 +26,19 @@ void ExitWithoutCleanup(int exit_code); /// \brief Obtain POSIX epoch time for a given clock. /// Clocks may have different epoch times. To obtain the POSIX epoch time /// (1970-01-01 00:00:00 UTC) for a given clock, it must be converted. -template -static auto GetPosixEpoch() -> std::chrono::time_point { +template +static auto GetPosixEpoch() -> std::chrono::time_point { // Since C++20, the system clock's default value is the POSIX epoch time. std::chrono::time_point sys_epoch{}; - if constexpr (std::is_same_v) { + if constexpr (std::is_same_v) { // No conversion necessary for the system clock. return sys_epoch; } - else if constexpr (ClockHasFromSys) { + else if constexpr (ClockHasFromSys) { // The correct C++20 way to perform the time point conversion. - return T_Clock::from_sys(sys_epoch); + return TClock::from_sys(sys_epoch); } - else if constexpr (ClockHasFromTime) { + else if constexpr (ClockHasFromTime) { // Older releases of libcxx did not implement the standard conversion // function from_sys() for std::chrono::file_clock. Instead the // non-standard function file_clock::from_time_t() must be used. Since @@ -46,11 +46,11 @@ static auto GetPosixEpoch() -> std::chrono::time_point { // - https://reviews.llvm.org/D113027 // - https://reviews.llvm.org/D113430 // TODO(modernize): remove this once we require clang version >= 14.0.0 - return T_Clock::from_time_t( + return TClock::from_time_t( std::chrono::system_clock::to_time_t(sys_epoch)); } - static_assert(std::is_same_v or - ClockHasFromSys or ClockHasFromTime, + static_assert(std::is_same_v or + ClockHasFromSys or ClockHasFromTime, "Time point conversion function unavailable."); return {}; } diff --git a/src/buildtool/system/system_command.hpp b/src/buildtool/system/system_command.hpp index 6f1d80ce8..909c047ad 100644 --- a/src/buildtool/system/system_command.hpp +++ b/src/buildtool/system/system_command.hpp @@ -120,7 +120,7 @@ class SystemCommand { if (auto const err = OpenFile(stderr_file)) { if (auto retval = ForkAndExecute( cmd, envp, cwd, fileno(out.get()), fileno(err.get()))) { - return *retval; + return retval; } } else { @@ -201,7 +201,7 @@ class SystemCommand { // wait for child to finish and obtain return value int status{}; std::optional retval{std::nullopt}; - do { + while (not retval) { if (::waitpid(pid, &status, 0) == -1) { // this should never happen logger_.Emit(LogLevel::Error, @@ -221,7 +221,7 @@ class SystemCommand { LogLevel::Debug, "Child got killed by signal {}", sig); } // continue waitpid() in case we got STOPSIG from child - } while (not retval); + } return retval; } diff --git a/src/other_tools/git_operations/TARGETS b/src/other_tools/git_operations/TARGETS index 9a6aed5e0..f4e8611f5 100644 --- a/src/other_tools/git_operations/TARGETS +++ b/src/other_tools/git_operations/TARGETS @@ -12,12 +12,12 @@ , "hdrs": ["git_operations.hpp"] , "srcs": ["git_operations.cpp"] , "deps": - [["src/buildtool/multithreading", "async_map_consumer"], "git_ops_types"] + ["git_ops_types", ["src/buildtool/multithreading", "async_map_consumer"]] , "stage": ["src", "other_tools", "git_operations"] , "private-deps": - [ ["@", "fmt", "", "fmt"] + [ "git_repo_remote" + , ["@", "fmt", "", "fmt"] , ["src/buildtool/file_system", "file_system_manager"] - , "git_repo_remote" , ["src/buildtool/logging", "logging"] ] } @@ -32,14 +32,14 @@ ] , "stage": ["src", "other_tools", "git_operations"] , "private-deps": - [ ["src/buildtool/logging", "log_level"] - , ["src/buildtool/logging", "logging"] - , ["src/buildtool/file_system", "git_utils"] + [ "git_config_settings" , ["@", "fmt", "", "fmt"] - , ["", "libgit2"] , ["@", "json", "", "json"] + , ["", "libgit2"] + , ["src/buildtool/file_system", "git_utils"] + , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] , ["src/buildtool/system", "system_command"] - , "git_config_settings" ] } , "git_config_settings": @@ -49,9 +49,9 @@ , "srcs": ["git_config_settings.cpp"] , "stage": ["src", "other_tools", "git_operations"] , "private-deps": - [ ["src/other_tools/utils", "curl_url_handle"] + [ ["@", "fmt", "", "fmt"] , ["", "libgit2"] - , ["@", "fmt", "", "fmt"] + , ["src/other_tools/utils", "curl_url_handle"] ] } } diff --git a/src/other_tools/git_operations/git_config_settings.cpp b/src/other_tools/git_operations/git_config_settings.cpp index c820aa328..816e51a7c 100644 --- a/src/other_tools/git_operations/git_config_settings.cpp +++ b/src/other_tools/git_operations/git_config_settings.cpp @@ -30,18 +30,16 @@ void config_iter_closer(gsl::owner iter) { } // callback to enable SSL certificate check for remote fetch -const auto certificate_check_cb = [](git_cert* /*cert*/, - int /*valid*/, - const char* /*host*/, - void* /*payload*/) -> int { return 1; }; +const auto kCertificateCheck = [](git_cert* /*cert*/, + int /*valid*/, + const char* /*host*/, + void* /*payload*/) -> int { return 1; }; // callback to remote fetch without an SSL certificate check -const auto certificate_passthrough_cb = [](git_cert* /*cert*/, - int /*valid*/, - const char* /*host*/, - void* /*payload*/) -> int { - return 0; -}; +const auto kCertificatePassthrough = [](git_cert* /*cert*/, + int /*valid*/, + const char* /*host*/, + void* /*payload*/) -> int { return 0; }; /// \brief Custom comparison of matching degrees. Return true if left argument's /// degree of matching is better that the right argument's. When both are @@ -187,8 +185,8 @@ auto GitConfigSettings::GetSSLCallback(std::shared_ptr const& cfg, } } // set callback: passthrough only if check_cert is false - return (check_cert and not *check_cert) ? certificate_passthrough_cb - : certificate_check_cb; + return (check_cert and not *check_cert) ? kCertificatePassthrough + : kCertificateCheck; } catch (std::exception const& ex) { (*logger)( fmt::format("Getting SSL callback failed with:\n{}", ex.what()), @@ -327,7 +325,7 @@ auto GitConfigSettings::GetProxySettings(std::shared_ptr const& cfg, true /*fatal*/); return std::nullopt; } - return proxy_info.value(); + return proxy_info; } } // check the generic "http.proxy" gitconfig entry; @@ -348,7 +346,7 @@ auto GitConfigSettings::GetProxySettings(std::shared_ptr const& cfg, true /*fatal*/); return std::nullopt; } - return proxy_info.value(); + return proxy_info; } // cleanup memory git_buf_dispose(&tmp_buf); @@ -376,7 +374,7 @@ auto GitConfigSettings::GetProxySettings(std::shared_ptr const& cfg, true /*fatal*/); return std::nullopt; } - return proxy_info.value(); + return proxy_info; } // check HTTPS_PROXY envariable if (const char* envar = std::getenv("HTTPS_PROXY")) { @@ -390,7 +388,7 @@ auto GitConfigSettings::GetProxySettings(std::shared_ptr const& cfg, true /*fatal*/); return std::nullopt; } - return proxy_info.value(); + return proxy_info; } } else if (url_scheme.value() == "http") { @@ -406,7 +404,7 @@ auto GitConfigSettings::GetProxySettings(std::shared_ptr const& cfg, true /*fatal*/); return std::nullopt; } - return proxy_info.value(); + return proxy_info; } } // check all_proxy envariable @@ -421,7 +419,7 @@ auto GitConfigSettings::GetProxySettings(std::shared_ptr const& cfg, true /*fatal*/); return std::nullopt; } - return proxy_info.value(); + return proxy_info; } // check ALL_PROXY envariable if (const char* envar = std::getenv("ALL_PROXY")) { @@ -435,7 +433,7 @@ auto GitConfigSettings::GetProxySettings(std::shared_ptr const& cfg, true /*fatal*/); return std::nullopt; } - return proxy_info.value(); + return proxy_info; } } } diff --git a/src/other_tools/git_operations/git_operations.cpp b/src/other_tools/git_operations/git_operations.cpp index 5135743f2..be6d9ce16 100644 --- a/src/other_tools/git_operations/git_operations.cpp +++ b/src/other_tools/git_operations/git_operations.cpp @@ -63,7 +63,7 @@ auto CriticalGitOps::GitInitialCommit(GitOpParams const& crit_op_params, return {.git_cas = nullptr, .result = std::nullopt}; } // success - return {.git_cas = git_repo->GetGitCAS(), .result = commit_hash.value()}; + return {.git_cas = git_repo->GetGitCAS(), .result = std::move(commit_hash)}; } auto CriticalGitOps::GitEnsureInit(GitOpParams const& crit_op_params, @@ -132,7 +132,7 @@ auto CriticalGitOps::GitKeepTag(GitOpParams const& crit_op_params, return {.git_cas = nullptr, .result = std::nullopt}; } // success - return {.git_cas = git_repo->GetGitCAS(), .result = *tag_result}; + return {.git_cas = git_repo->GetGitCAS(), .result = std::move(tag_result)}; } auto CriticalGitOps::GitGetHeadId(GitOpParams const& crit_op_params, @@ -165,7 +165,7 @@ auto CriticalGitOps::GitGetHeadId(GitOpParams const& crit_op_params, return {.git_cas = nullptr, .result = std::nullopt}; } // success - return {.git_cas = git_repo->GetGitCAS(), .result = *head_commit}; + return {.git_cas = git_repo->GetGitCAS(), .result = std::move(head_commit)}; } auto CriticalGitOps::GitKeepTree(GitOpParams const& crit_op_params, @@ -208,5 +208,5 @@ auto CriticalGitOps::GitKeepTree(GitOpParams const& crit_op_params, return {.git_cas = nullptr, .result = std::nullopt}; } // success - return {.git_cas = git_repo->GetGitCAS(), .result = *tag_result}; + return {.git_cas = git_repo->GetGitCAS(), .result = std::move(tag_result)}; } diff --git a/src/other_tools/git_operations/git_ops_types.hpp b/src/other_tools/git_operations/git_ops_types.hpp index 2c60b0861..f36c18c36 100644 --- a/src/other_tools/git_operations/git_ops_types.hpp +++ b/src/other_tools/git_operations/git_ops_types.hpp @@ -15,6 +15,7 @@ #ifndef INCLUDED_SRC_OTHER_TOOLS_GIT_OPERATIONS_GIT_OPS_TYPES_HPP #define INCLUDED_SRC_OTHER_TOOLS_GIT_OPERATIONS_GIT_OPS_TYPES_HPP +#include #include #include #include @@ -25,8 +26,8 @@ /// \brief Common parameters for all critical Git operations struct GitOpParams { - std::filesystem::path target_path{}; /*key*/ - std::string git_hash{}; /*key*/ + std::filesystem::path target_path; /*key*/ + std::string git_hash; /*key*/ std::optional message{ std::nullopt}; // mandatory for commits and tags std::optional source_path{ @@ -53,7 +54,7 @@ struct GitOpParams { }; /// \brief Defines the type of Git operation -enum class GitOpType { +enum class GitOpType : std::uint8_t { DEFAULT_OP, // default value; does nothing INITIAL_COMMIT, ENSURE_INIT, diff --git a/src/other_tools/git_operations/git_repo_remote.cpp b/src/other_tools/git_operations/git_repo_remote.cpp index 810df8605..b9b9cc933 100644 --- a/src/other_tools/git_operations/git_repo_remote.cpp +++ b/src/other_tools/git_operations/git_repo_remote.cpp @@ -361,20 +361,16 @@ auto GitRepoRemote::FetchFromRemote(std::shared_ptr cfg, fetch_opts.update_fetchhead = 0; // setup fetch refspecs array - git_strarray refspecs_array_obj{}; + GitStrArray refspecs_array_obj; if (branch) { // make sure we check for tags as well - std::string tag = fmt::format("+refs/tags/{}", *branch); - std::string head = fmt::format("+refs/heads/{}", *branch); - PopulateStrarray(&refspecs_array_obj, {tag, head}); + refspecs_array_obj.AddEntry(fmt::format("+refs/tags/{}", *branch)); + refspecs_array_obj.AddEntry(fmt::format("+refs/heads/{}", *branch)); } - auto refspecs_array = - std::unique_ptr( - &refspecs_array_obj, strarray_deleter); + auto const refspecs_array = refspecs_array_obj.Get(); if (git_remote_fetch( - remote.get(), refspecs_array.get(), &fetch_opts, nullptr) != - 0) { + remote.get(), &refspecs_array, &fetch_opts, nullptr) != 0) { (*logger)( fmt::format("Fetching{} in git repository {} failed " "with:\n{}", diff --git a/src/other_tools/just_mr/TARGETS b/src/other_tools/just_mr/TARGETS index 5d03675ae..f0793d5d9 100644 --- a/src/other_tools/just_mr/TARGETS +++ b/src/other_tools/just_mr/TARGETS @@ -4,28 +4,27 @@ , "name": ["just-mr"] , "srcs": ["main.cpp"] , "private-deps": - [ ["@", "cli11", "", "cli11"] + [ "cli" + , "exit_codes" + , "fetch" + , "launch" + , "rc" + , "setup" + , "setup_utils" + , "update" + , ["@", "cli11", "", "cli11"] , ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/common", "retry_cli"] - , ["src/buildtool/compatibility", "compatibility"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/file_system", "git_context"] - , ["src/buildtool/logging", "logging"] , ["src/buildtool/logging", "log_level"] + , ["src/buildtool/logging", "logging"] , ["src/buildtool/main", "version"] , ["src/buildtool/storage", "config"] , ["src/buildtool/storage", "repository_garbage_collector"] , ["src/buildtool/storage", "storage"] - , "cli" - , "exit_codes" - , "fetch" - , "launch" - , "rc" - , "setup" - , "setup_utils" - , "update" - , ["src/buildtool/crypto", "hash_function"] ] , "stage": ["src", "other_tools", "just_mr"] , "private-ldflags": @@ -44,9 +43,9 @@ , "deps": [ ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] - , ["src/buildtool/storage", "config"] - , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/file_system", "file_system_manager"] + , ["src/buildtool/storage", "config"] ] , "stage": ["src", "other_tools", "just_mr"] , "private-deps": @@ -65,7 +64,9 @@ , "name": ["cli"] , "hdrs": ["cli.hpp"] , "deps": - [ ["@", "cli11", "", "cli11"] + [ "mirrors" + , "utils" + , ["@", "cli11", "", "cli11"] , ["@", "fmt", "", "fmt"] , ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] @@ -74,8 +75,6 @@ , ["src/buildtool/common", "user_structs"] , ["src/buildtool/execution_api/local", "config"] , ["src/buildtool/logging", "log_level"] - , "mirrors" - , "utils" ] , "stage": ["src", "other_tools", "just_mr"] } @@ -85,21 +84,21 @@ , "hdrs": ["setup_utils.hpp"] , "srcs": ["setup_utils.cpp"] , "deps": - [ ["src/buildtool/auth", "auth"] - , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] + [ "cli" + , ["src/buildtool/auth", "auth"] , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] , ["src/buildtool/execution_api/local", "config"] + , ["src/buildtool/execution_api/remote", "config"] , ["src/buildtool/serve_api/remote", "config"] - , "cli" ] , "stage": ["src", "other_tools", "just_mr"] , "private-deps": - [ ["@", "json", "", "json"] - , ["src/buildtool/execution_api/remote", "config"] + [ "exit_codes" + , ["@", "json", "", "json"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , "exit_codes" ] } , "fetch": @@ -108,23 +107,38 @@ , "hdrs": ["fetch.hpp"] , "srcs": ["fetch.cpp"] , "deps": - [ ["src/buildtool/build_engine/expression", "expression"] - , "cli" + [ "cli" + , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/common", "retry_cli"] , ["src/buildtool/storage", "config"] , ["src/buildtool/storage", "storage"] ] , "stage": ["src", "other_tools", "just_mr"] , "private-deps": - [ ["@", "fmt", "", "fmt"] + [ "exit_codes" + , "setup_utils" + , ["@", "fmt", "", "fmt"] , ["@", "json", "", "json"] , ["src/buildtool/auth", "auth"] , ["src/buildtool/common/remote", "retry_config"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"] + , ["src/buildtool/execution_api/common", "api_bundle"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/execution_api/local", "config"] + , ["src/buildtool/execution_api/local", "context"] + , ["src/buildtool/execution_api/local", "local"] + , ["src/buildtool/execution_api/remote", "bazel"] + , ["src/buildtool/execution_api/remote", "config"] + , ["src/buildtool/execution_api/remote", "context"] + , ["src/buildtool/execution_api/serve", "mr_local_api"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/buildtool/main", "retry"] , ["src/buildtool/multithreading", "async_map_utils"] , ["src/buildtool/multithreading", "task_system"] - , "exit_codes" + , ["src/buildtool/serve_api/remote", "config"] + , ["src/buildtool/serve_api/remote", "serve_api"] , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/just_mr/progress_reporting", "progress_reporter"] , ["src/other_tools/just_mr/progress_reporting", "statistics"] @@ -134,15 +148,8 @@ , ["src/other_tools/ops_maps", "git_tree_fetch_map"] , ["src/other_tools/ops_maps", "import_to_git_map"] , ["src/other_tools/utils", "parse_archive"] - , "setup_utils" - , ["src/buildtool/execution_api/common", "common"] - , ["src/buildtool/execution_api/common", "api_bundle"] - , ["src/buildtool/execution_api/local", "config"] - , ["src/buildtool/execution_api/local", "context"] - , ["src/buildtool/execution_api/remote", "config"] - , ["src/buildtool/execution_api/remote", "context"] - , ["src/buildtool/serve_api/remote", "config"] - , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/other_tools/utils", "parse_git_tree"] + , ["src/utils/cpp", "file_locking"] ] } , "update": @@ -151,24 +158,24 @@ , "hdrs": ["update.hpp"] , "srcs": ["update.cpp"] , "deps": - [ ["src/buildtool/build_engine/expression", "expression"] - , "cli" + [ "cli" + , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/storage", "config"] ] , "stage": ["src", "other_tools", "just_mr"] , "private-deps": - [ ["@", "fmt", "", "fmt"] + [ "exit_codes" + , "utils" + , ["@", "fmt", "", "fmt"] , ["@", "json", "", "json"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/buildtool/multithreading", "async_map_utils"] , ["src/buildtool/multithreading", "task_system"] - , ["src/buildtool/storage", "config"] , ["src/other_tools/git_operations", "git_repo_remote"] - , "exit_codes" , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/just_mr/progress_reporting", "progress_reporter"] , ["src/other_tools/just_mr/progress_reporting", "statistics"] - , "utils" , ["src/other_tools/ops_maps", "git_update_map"] ] } @@ -178,27 +185,43 @@ , "hdrs": ["setup.hpp"] , "srcs": ["setup.cpp"] , "deps": - [ ["src/buildtool/build_engine/expression", "expression"] - , "cli" + [ "cli" + , ["src/buildtool/build_engine/expression", "expression"] , ["src/buildtool/common", "retry_cli"] , ["src/buildtool/storage", "config"] , ["src/buildtool/storage", "storage"] ] , "stage": ["src", "other_tools", "just_mr"] , "private-deps": - [ ["@", "json", "", "json"] + [ "exit_codes" + , "setup_utils" + , "utils" + , ["@", "json", "", "json"] , ["src/buildtool/auth", "auth"] , ["src/buildtool/common/remote", "retry_config"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"] + , ["src/buildtool/execution_api/common", "api_bundle"] + , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/execution_api/local", "config"] + , ["src/buildtool/execution_api/local", "context"] + , ["src/buildtool/execution_api/local", "local"] + , ["src/buildtool/execution_api/remote", "bazel"] + , ["src/buildtool/execution_api/remote", "config"] + , ["src/buildtool/execution_api/remote", "context"] + , ["src/buildtool/execution_api/serve", "mr_local_api"] + , ["src/buildtool/file_system/symlinks_map", "resolve_symlinks_map"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/buildtool/main", "retry"] , ["src/buildtool/multithreading", "async_map_utils"] , ["src/buildtool/multithreading", "task_system"] + , ["src/buildtool/serve_api/remote", "config"] + , ["src/buildtool/serve_api/remote", "serve_api"] , ["src/buildtool/storage", "fs_utils"] - , "exit_codes" , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/just_mr/progress_reporting", "progress_reporter"] , ["src/other_tools/just_mr/progress_reporting", "statistics"] - , "utils" , ["src/other_tools/ops_maps", "content_cas_map"] , ["src/other_tools/ops_maps", "critical_git_op_map"] , ["src/other_tools/ops_maps", "git_tree_fetch_map"] @@ -208,16 +231,7 @@ , ["src/other_tools/root_maps", "distdir_git_map"] , ["src/other_tools/root_maps", "fpath_git_map"] , ["src/other_tools/root_maps", "tree_id_git_map"] - , ["src/buildtool/file_system/symlinks_map", "resolve_symlinks_map"] - , "setup_utils" - , ["src/buildtool/execution_api/common", "common"] - , ["src/buildtool/execution_api/common", "api_bundle"] - , ["src/buildtool/execution_api/local", "config"] - , ["src/buildtool/execution_api/local", "context"] - , ["src/buildtool/execution_api/remote", "config"] - , ["src/buildtool/execution_api/remote", "context"] - , ["src/buildtool/serve_api/remote", "config"] - , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/utils/cpp", "file_locking"] ] } , "launch": @@ -233,16 +247,16 @@ ] , "stage": ["src", "other_tools", "just_mr"] , "private-deps": - [ ["@", "json", "", "json"] + [ "exit_codes" + , "setup" + , "setup_utils" + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] , ["src/buildtool/multithreading", "task_system"] , ["src/buildtool/storage", "repository_garbage_collector"] , ["src/utils/cpp", "file_locking"] - , "exit_codes" - , "setup" - , "utils" - , "setup_utils" ] } , "mirrors": @@ -262,16 +276,17 @@ , "name": ["rc"] , "hdrs": ["rc.hpp"] , "srcs": ["rc.cpp"] - , "deps": ["cli"] + , "deps": ["cli", ["@", "gsl", "", "gsl"]] , "private-deps": - [ ["@", "json", "", "json"] + [ "exit_codes" + , "rc_merge" + , ["@", "json", "", "json"] , ["src/buildtool/build_engine/expression", "expression"] + , ["src/buildtool/build_engine/expression", "expression_ptr_interface"] , ["src/buildtool/common", "location"] , ["src/buildtool/file_system", "file_system_manager"] - , ["src/buildtool/logging", "logging"] , ["src/buildtool/logging", "log_level"] - , "exit_codes" - , "rc_merge" + , ["src/buildtool/logging", "logging"] ] , "stage": ["src", "other_tools", "just_mr"] } diff --git a/src/other_tools/just_mr/cli.hpp b/src/other_tools/just_mr/cli.hpp index 9543689eb..7652ce32f 100644 --- a/src/other_tools/just_mr/cli.hpp +++ b/src/other_tools/just_mr/cli.hpp @@ -16,6 +16,7 @@ #define INCLUDED_SRC_OTHER_TOOLS_JUST_MR_CLI_HPP #include +#include #include #include #include @@ -41,7 +42,7 @@ struct MultiRepoCommonArguments { std::optional repository_config{std::nullopt}; std::optional absent_repository_file{std::nullopt}; std::optional checkout_locations_file{std::nullopt}; - std::vector explicit_distdirs{}; + std::vector explicit_distdirs; LocalPathsPtr just_mr_paths = std::make_shared(); MirrorsPtr alternative_mirrors = std::make_shared(); std::optional> local_launcher{std::nullopt}; @@ -53,7 +54,7 @@ struct MultiRepoCommonArguments { std::optional dump_rc{std::nullopt}; bool norc{false}; std::size_t jobs{std::max(1U, std::thread::hardware_concurrency())}; - std::vector defines{}; + std::vector defines; std::optional remote_execution_address; bool compatible{false}; std::optional remote_serve_address; @@ -61,9 +62,9 @@ struct MultiRepoCommonArguments { }; struct MultiRepoLogArguments { - std::vector log_files{}; - std::optional log_limit{}; - std::optional restrict_stderr_log_limit{}; + std::vector log_files; + std::optional log_limit; + std::optional restrict_stderr_log_limit; bool plain_log{false}; bool log_append{false}; }; @@ -79,15 +80,15 @@ struct MultiRepoFetchArguments { }; struct MultiRepoUpdateArguments { - std::vector repos_to_update{}; + std::vector repos_to_update; }; struct MultiRepoJustSubCmdsArguments { std::optional subcmd_name{std::nullopt}; - std::vector additional_just_args{}; - std::unordered_map> just_args{}; - std::optional config{}; - std::optional endpoint_configuration{}; + std::vector additional_just_args; + std::unordered_map> just_args; + std::optional config; + std::optional endpoint_configuration; }; // corresponding to the similarly-named arguments in 'just' @@ -98,10 +99,10 @@ struct MultiRepoRemoteAuthArguments { }; struct ForwardOnlyArguments { - std::vector remote_execution_properties{}; + std::vector remote_execution_properties; }; -enum class SubCommand { +enum class SubCommand : std::uint8_t { kUnknown, kMRVersion, kFetch, diff --git a/src/other_tools/just_mr/exit_codes.hpp b/src/other_tools/just_mr/exit_codes.hpp index e4402538b..9c3276199 100644 --- a/src/other_tools/just_mr/exit_codes.hpp +++ b/src/other_tools/just_mr/exit_codes.hpp @@ -15,7 +15,10 @@ #ifndef INCLUDED_SRC_OTHER_TOOLS_JUST_MR_EXIT_CODES_HPP #define INCLUDED_SRC_OTHER_TOOLS_JUST_MR_EXIT_CODES_HPP -enum JustMRExitCodes { +#include + +// NOLINTNEXTLINE(performance-enum-size) +enum JustMRExitCodes : std::uint16_t { kExitSuccess = 0, kExitExecError = 64, // error in execvp kExitGenericFailure = 65, // none of the known errors diff --git a/src/other_tools/just_mr/fetch.cpp b/src/other_tools/just_mr/fetch.cpp index c1d65010e..99f21eb14 100644 --- a/src/other_tools/just_mr/fetch.cpp +++ b/src/other_tools/just_mr/fetch.cpp @@ -22,12 +22,17 @@ #include "nlohmann/json.hpp" #include "src/buildtool/auth/authentication.hpp" #include "src/buildtool/common/remote/retry_config.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp" #include "src/buildtool/execution_api/common/api_bundle.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/execution_api/local/config.hpp" #include "src/buildtool/execution_api/local/context.hpp" +#include "src/buildtool/execution_api/local/local_api.hpp" +#include "src/buildtool/execution_api/remote/bazel/bazel_api.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/execution_api/remote/context.hpp" +#include "src/buildtool/execution_api/serve/mr_local_api.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" #include "src/buildtool/main/retry.hpp" @@ -35,6 +40,7 @@ #include "src/buildtool/multithreading/task_system.hpp" #include "src/buildtool/serve_api/remote/config.hpp" #include "src/buildtool/serve_api/remote/serve_api.hpp" +#include "src/buildtool/storage/garbage_collector.hpp" #include "src/other_tools/just_mr/exit_codes.hpp" #include "src/other_tools/just_mr/progress_reporting/progress.hpp" #include "src/other_tools/just_mr/progress_reporting/progress_reporter.hpp" @@ -46,6 +52,8 @@ #include "src/other_tools/ops_maps/git_tree_fetch_map.hpp" #include "src/other_tools/ops_maps/import_to_git_map.hpp" #include "src/other_tools/utils/parse_archive.hpp" +#include "src/other_tools/utils/parse_git_tree.hpp" +#include "src/utils/cpp/file_locking.hpp" auto MultiRepoFetch(std::shared_ptr const& config, MultiRepoCommonArguments const& common_args, @@ -53,8 +61,8 @@ auto MultiRepoFetch(std::shared_ptr const& config, MultiRepoFetchArguments const& fetch_args, MultiRepoRemoteAuthArguments const& auth_args, RetryArguments const& retry_args, - StorageConfig const& storage_config, - Storage const& storage, + StorageConfig const& native_storage_config, + Storage const& native_storage, std::string multi_repository_tool_name) -> int { // provide report Logger::Log(LogLevel::Info, "Performing repositories fetch"); @@ -272,107 +280,17 @@ auto MultiRepoFetch(std::shared_ptr const& config, // explicitly told to fetch absent archives if (not pragma_absent_value or common_args.fetch_absent) { // enforce mandatory fields - auto repo_desc_hash = (*resolved_repo_desc)->At("id"); - if (not repo_desc_hash) { + auto tree_info = + ParseGitTree(*resolved_repo_desc, repo_name); + if (not tree_info) { Logger::Log( LogLevel::Error, - "Config: Mandatory field \"id\" is missing"); + fmt::format("Config: {}", + std::move(tree_info).error())); return kExitFetchError; } - if (not repo_desc_hash->get()->IsString()) { - Logger::Log( - LogLevel::Error, - fmt::format("Config: Unsupported value {} for " - "mandatory field \"id\"", - repo_desc_hash->get()->ToString())); - return kExitFetchError; - } - auto repo_desc_cmd = (*resolved_repo_desc)->At("cmd"); - if (not repo_desc_cmd) { - Logger::Log( - LogLevel::Error, - "Config: Mandatory field \"cmd\" is missing"); - return kExitFetchError; - } - if (not repo_desc_cmd->get()->IsList()) { - Logger::Log( - LogLevel::Error, - fmt::format("Config: Unsupported value {} for " - "mandatory field \"cmd\"", - repo_desc_cmd->get()->ToString())); - return kExitFetchError; - } - std::vector cmd{}; - for (auto const& token : repo_desc_cmd->get()->List()) { - if (token.IsNotNull() and token->IsString()) { - cmd.emplace_back(token->String()); - } - else { - Logger::Log( - LogLevel::Error, - fmt::format("Config: Unsupported entry {} " - "in mandatory field \"cmd\"", - token->ToString())); - return kExitFetchError; - } - } - std::map env{}; - auto repo_desc_env = - (*resolved_repo_desc) - ->Get("env", Expression::none_t{}); - if (repo_desc_env.IsNotNull() and - repo_desc_env->IsMap()) { - for (auto const& envar : - repo_desc_env->Map().Items()) { - if (envar.second.IsNotNull() and - envar.second->IsString()) { - env.insert( - {envar.first, envar.second->String()}); - } - else { - Logger::Log( - LogLevel::Error, - fmt::format( - "Config: Unsupported value {} for " - "key {} in optional field \"envs\"", - envar.second->ToString(), - nlohmann::json(envar.first) - .dump())); - return kExitFetchError; - } - } - } - std::vector inherit_env{}; - auto repo_desc_inherit_env = - (*resolved_repo_desc) - ->Get("inherit env", Expression::none_t{}); - if (repo_desc_inherit_env.IsNotNull() and - repo_desc_inherit_env->IsList()) { - for (auto const& envvar : - repo_desc_inherit_env->List()) { - if (envvar->IsString()) { - inherit_env.emplace_back(envvar->String()); - } - else { - Logger::Log( - LogLevel::Error, - fmt::format("Config: Not a variable " - "name in the specification " - "of \"inherit env\": {}", - envvar->ToString())); - return kExitFetchError; - } - } - } - // populate struct - GitTreeInfo tree_info = { - .hash = repo_desc_hash->get()->String(), - .env_vars = std::move(env), - .inherit_env = std::move(inherit_env), - .command = std::move(cmd), - .origin = repo_name}; // add to list - git_trees_to_fetch.emplace_back(std::move(tree_info)); + git_trees_to_fetch.emplace_back(*std::move(tree_info)); } } break; default: @@ -411,52 +329,116 @@ auto MultiRepoFetch(std::shared_ptr const& config, return kExitConfigError; } - // pack the local context instances to be passed to ApiBundle - LocalContext const local_context{.exec_config = &*local_exec_config, - .storage_config = &storage_config, - .storage = &storage}; + // pack the native local context and create api + LocalContext const native_local_context{ + .exec_config = &*local_exec_config, + .storage_config = &native_storage_config, + .storage = &native_storage}; + IExecutionApi::Ptr const native_local_api = + std::make_shared(&native_local_context); + + // pack the compatible local context, if needed + std::unique_ptr compat_storage_config = nullptr; + std::unique_ptr compat_storage = nullptr; + std::unique_ptr compat_local_context = nullptr; + std::optional compat_lock = std::nullopt; + IExecutionApi::Ptr compat_local_api = nullptr; + if (common_args.compatible) { + auto config = StorageConfig::Builder{} + .SetBuildRoot(native_storage_config.build_root) + .SetHashType(HashFunction::Type::PlainSHA256) + .Build(); + if (not config) { + Logger::Log(LogLevel::Error, config.error()); + return kExitConfigError; + } + compat_storage_config = + std::make_unique(*std::move(config)); + compat_storage = std::make_unique( + Storage::Create(compat_storage_config.get())); + compat_local_context = std::make_unique( + LocalContext{.exec_config = &*local_exec_config, + .storage_config = compat_storage_config.get(), + .storage = compat_storage.get()}); + // if a compatible storage is created, one must get a lock for it the + // same way as done for the native one + compat_lock = GarbageCollector::SharedLock(*compat_storage_config); + if (not compat_lock) { + Logger::Log(LogLevel::Error, + "Failed to acquire compatible storage gc lock"); + return kExitConfigError; + } + compat_local_api = std::make_shared(&*compat_local_context); + } + + // setup the overall local api, aware of compatibility + IExecutionApi::Ptr mr_local_api = std::make_shared( + &native_local_context, + &*native_local_api, + common_args.compatible ? &*compat_local_context : nullptr, + common_args.compatible ? &*compat_local_api : nullptr); // setup authentication config - auto auth_config = JustMR::Utils::CreateAuthConfig(auth_args); + auto const auth_config = JustMR::Utils::CreateAuthConfig(auth_args); if (not auth_config) { return kExitConfigError; } // setup the retry config - auto retry_config = CreateRetryConfig(retry_args); + auto const retry_config = CreateRetryConfig(retry_args); if (not retry_config) { return kExitConfigError; } // setup remote execution config - auto remote_exec_config = JustMR::Utils::CreateRemoteExecutionConfig( + auto const remote_exec_config = JustMR::Utils::CreateRemoteExecutionConfig( common_args.remote_execution_address, common_args.remote_serve_address); if (not remote_exec_config) { return kExitConfigError; } - // pack the remote context instances to be passed to ApiBundle + // create the remote api + auto const hash_fct = + compat_local_context != nullptr + ? compat_local_context->storage_config->hash_function + : native_local_context.storage_config->hash_function; + IExecutionApi::Ptr remote_api = nullptr; + if (auto const address = remote_exec_config->remote_address) { + ExecutionConfiguration config; + config.skip_cache_lookup = false; + remote_api = std::make_shared("remote-execution", + address->host, + address->port, + &*auth_config, + &*retry_config, + config, + &hash_fct); + } + bool const has_remote_api = remote_api != nullptr; + + // pack the remote context RemoteContext const remote_context{.auth = &*auth_config, .retry_config = &*retry_config, .exec_config = &*remote_exec_config}; - // setup the APIs for archive fetches; only happens if in native mode - auto const apis = ApiBundle::Create(&local_context, - &remote_context, - /*repo_config=*/nullptr); - - bool const has_remote_api = - apis.local != apis.remote and not common_args.compatible; - - // setup the API for serving roots + // setup the api for serving roots auto serve_config = JustMR::Utils::CreateServeConfig(common_args.remote_serve_address); if (not serve_config) { return kExitConfigError; } + auto const apis = + ApiBundle{.hash_function = hash_fct, + .local = mr_local_api, + .remote = has_remote_api ? remote_api : mr_local_api}; + auto serve = ServeApi::Create( + *serve_config, + compat_local_context != nullptr + ? &*compat_local_context + : &native_local_context, // defines the client's hash_function + &remote_context, + &apis /*unused*/); - auto serve = - ServeApi::Create(*serve_config, &local_context, &remote_context, &apis); // check configuration of the serve endpoint provided if (serve) { // if we have a remote endpoint explicitly given by the user, it must @@ -491,23 +473,25 @@ auto MultiRepoFetch(std::shared_ptr const& config, auto crit_git_op_ptr = std::make_shared(); auto critical_git_op_map = CreateCriticalGitOpMap(crit_git_op_ptr); - auto content_cas_map = - CreateContentCASMap(common_args.just_mr_paths, - common_args.alternative_mirrors, - common_args.ca_info, - &critical_git_op_map, - serve ? &*serve : nullptr, - &storage_config, - &storage, - &(*apis.local), - has_remote_api ? &*apis.remote : nullptr, - &progress, - common_args.jobs); + auto content_cas_map = CreateContentCASMap( + common_args.just_mr_paths, + common_args.alternative_mirrors, + common_args.ca_info, + &critical_git_op_map, + serve ? &*serve : nullptr, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + &native_storage, + compat_storage != nullptr ? &*compat_storage : nullptr, + &(*apis.local), + has_remote_api ? &*apis.remote : nullptr, + &progress, + common_args.jobs); auto archive_fetch_map = CreateArchiveFetchMap( &content_cas_map, *fetch_dir, - &storage, + &native_storage, &(*apis.local), (fetch_args.backup_to_remote and has_remote_api) ? &*apis.remote : nullptr, @@ -518,21 +502,23 @@ auto MultiRepoFetch(std::shared_ptr const& config, CreateImportToGitMap(&critical_git_op_map, common_args.git_path->string(), *common_args.local_launcher, - &storage_config, + &native_storage_config, common_args.jobs); - auto git_tree_fetch_map = - CreateGitTreeFetchMap(&critical_git_op_map, - &import_to_git_map, - common_args.git_path->string(), - *common_args.local_launcher, - serve ? &*serve : nullptr, - &storage_config, - &(*apis.local), - has_remote_api ? &*apis.remote : nullptr, - fetch_args.backup_to_remote, - &progress, - common_args.jobs); + auto git_tree_fetch_map = CreateGitTreeFetchMap( + &critical_git_op_map, + &import_to_git_map, + common_args.git_path->string(), + *common_args.local_launcher, + serve ? &*serve : nullptr, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + compat_storage != nullptr ? &*compat_storage : nullptr, + &(*apis.local), + has_remote_api ? &*apis.remote : nullptr, + fetch_args.backup_to_remote, + &progress, + common_args.jobs); // set up progress observer std::atomic done{false}; diff --git a/src/other_tools/just_mr/launch.cpp b/src/other_tools/just_mr/launch.cpp index ccebd966a..8430713c0 100644 --- a/src/other_tools/just_mr/launch.cpp +++ b/src/other_tools/just_mr/launch.cpp @@ -44,7 +44,7 @@ auto CallJust(std::optional const& config_file, StorageConfig const& storage_config, Storage const& storage, bool forward_build_root, - std::string multi_repo_tool_name) -> int { + std::string const& multi_repo_tool_name) -> int { // check if subcmd_name can be taken from additional args auto additional_args_offset = 0U; auto subcommand = just_cmd_args.subcmd_name; @@ -89,7 +89,7 @@ auto CallJust(std::optional const& config_file, storage_config, storage, /*interactive=*/false, - std::move(multi_repo_tool_name)); + multi_repo_tool_name); if (not mr_config_path) { Logger::Log(LogLevel::Error, "Failed to setup config for calling \"{} {}\"", @@ -138,13 +138,13 @@ auto CallJust(std::optional const& config_file, if (log_args.log_limit and *log_args.log_limit != kDefaultLogLevel) { cmd.emplace_back("--log-limit"); cmd.emplace_back( - std::to_string(static_cast::type>( + std::to_string(static_cast>( *log_args.log_limit))); } if (log_args.restrict_stderr_log_limit) { cmd.emplace_back("--restrict-stderr-log-limit"); cmd.emplace_back( - std::to_string(static_cast::type>( + std::to_string(static_cast>( *log_args.restrict_stderr_log_limit))); } if (log_args.plain_log) { diff --git a/src/other_tools/just_mr/launch.hpp b/src/other_tools/just_mr/launch.hpp index cea43534c..69c1603a9 100644 --- a/src/other_tools/just_mr/launch.hpp +++ b/src/other_tools/just_mr/launch.hpp @@ -36,6 +36,6 @@ StorageConfig const& storage_config, Storage const& storage, bool forward_build_root, - std::string multi_repo_tool_name) -> int; + std::string const& multi_repo_tool_name) -> int; #endif // INCLUDED_SRC_OTHER_TOOLS_JUST_MR_LAUNCH_HPP diff --git a/src/other_tools/just_mr/main.cpp b/src/other_tools/just_mr/main.cpp index 24bf8ffd9..e4eaa18c7 100644 --- a/src/other_tools/just_mr/main.cpp +++ b/src/other_tools/just_mr/main.cpp @@ -27,7 +27,6 @@ #include "nlohmann/json.hpp" #include "src/buildtool/build_engine/expression/configuration.hpp" #include "src/buildtool/common/retry_cli.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/git_context.hpp" #include "src/buildtool/logging/log_config.hpp" @@ -208,14 +207,13 @@ void SetupLogging(MultiRepoLogArguments const& clargs) { } [[nodiscard]] auto CreateStorageConfig(MultiRepoCommonArguments const& args, - bool is_compatible) noexcept + HashFunction::Type hash_type) noexcept -> std::optional { StorageConfig::Builder builder; if (args.just_mr_paths->root.has_value()) { builder.SetBuildRoot(*args.just_mr_paths->root); } - builder.SetHashType(is_compatible ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1); + builder.SetHashType(hash_type); // As just-mr does not require the TargetCache, we do not need to set any of // the remote execution fields for the backend description. @@ -238,6 +236,7 @@ auto main(int argc, char* argv[]) -> int { my_name = std::filesystem::path(argv[0]).filename().string(); } catch (...) { // ignore, as my_name is only used for error messages + my_name.clear(); } } try { @@ -317,10 +316,11 @@ auto main(int argc, char* argv[]) -> int { arguments.common.explicit_distdirs.end()); // Setup LocalStorageConfig to store the local_build_root properly - // and make the cas and git cache roots available - auto storage_config = CreateStorageConfig( - arguments.common, Compatibility::IsCompatible()); - if (not storage_config) { + // and make the cas and git cache roots available. A native storage is + // always instantiated, while a compatible one only if needed. + auto const native_storage_config = + CreateStorageConfig(arguments.common, HashFunction::Type::GitSHA1); + if (not native_storage_config) { Logger::Log(LogLevel::Error, "Failed to configure local build root."); return kExitGenericFailure; @@ -328,12 +328,12 @@ auto main(int argc, char* argv[]) -> int { if (arguments.cmd == SubCommand::kGcRepo) { return RepositoryGarbageCollector::TriggerGarbageCollection( - *storage_config) + *native_storage_config) ? kExitSuccess : kExitBuiltinCommandFailure; } - auto const storage = Storage::Create(&*storage_config); + auto const native_storage = Storage::Create(&*native_storage_config); // check for conflicts in main repo name if ((not arguments.setup.sub_all) and arguments.common.main and @@ -365,13 +365,6 @@ auto main(int argc, char* argv[]) -> int { // Run subcommands known to just and `do` if (arguments.cmd == SubCommand::kJustDo or arguments.cmd == SubCommand::kJustSubCmd) { - // check setup configuration arguments for validity - if (arguments.common.compatible and arguments.common.fetch_absent) { - Logger::Log(LogLevel::Error, - "Fetching absent repositories only available in " - "native mode!"); - return kExitConfigError; - } return CallJust(config_file, arguments.common, arguments.setup, @@ -380,17 +373,17 @@ auto main(int argc, char* argv[]) -> int { arguments.auth, arguments.retry, arguments.launch_fwd, - *storage_config, - storage, + *native_storage_config, + native_storage, forward_build_root, my_name); } auto repo_lock = - RepositoryGarbageCollector::SharedLock(*storage_config); + RepositoryGarbageCollector::SharedLock(*native_storage_config); if (not repo_lock) { return kExitGenericFailure; } - auto lock = GarbageCollector::SharedLock(*storage_config); + auto lock = GarbageCollector::SharedLock(*native_storage_config); if (not lock) { return kExitGenericFailure; } @@ -402,13 +395,6 @@ auto main(int argc, char* argv[]) -> int { // Run subcommand `setup` or `setup-env` if (arguments.cmd == SubCommand::kSetup or arguments.cmd == SubCommand::kSetupEnv) { - // check setup configuration arguments for validity - if (arguments.common.compatible and arguments.common.fetch_absent) { - Logger::Log(LogLevel::Error, - "Fetching absent repositories only available in " - "native mode!"); - return kExitConfigError; - } auto mr_config_path = MultiRepoSetup( config, arguments.common, @@ -416,8 +402,8 @@ auto main(int argc, char* argv[]) -> int { arguments.just_cmd, arguments.auth, arguments.retry, - *storage_config, - storage, + *native_storage_config, + native_storage, /*interactive=*/(arguments.cmd == SubCommand::kSetupEnv), my_name); // dump resulting config to stdout @@ -436,37 +422,20 @@ auto main(int argc, char* argv[]) -> int { return MultiRepoUpdate(config, arguments.common, arguments.update, - *storage_config, + *native_storage_config, my_name); } // Run subcommand `fetch` if (arguments.cmd == SubCommand::kFetch) { - // check fetch configuration arguments for validity - if (arguments.common.compatible) { - if (arguments.common.remote_execution_address and - arguments.fetch.backup_to_remote) { - Logger::Log( - LogLevel::Error, - "Remote backup for fetched archives only available " - "in native mode!"); - return kExitConfigError; - } - if (arguments.common.fetch_absent) { - Logger::Log(LogLevel::Error, - "Fetching absent repositories only available " - "in native mode!"); - return kExitConfigError; - } - } return MultiRepoFetch(config, arguments.common, arguments.setup, arguments.fetch, arguments.auth, arguments.retry, - *storage_config, - storage, + *native_storage_config, + native_storage, my_name); } diff --git a/src/other_tools/just_mr/mirrors.hpp b/src/other_tools/just_mr/mirrors.hpp index f3fa8b14d..2e512e8ca 100644 --- a/src/other_tools/just_mr/mirrors.hpp +++ b/src/other_tools/just_mr/mirrors.hpp @@ -27,8 +27,8 @@ #include "src/other_tools/utils/curl_url_handle.hpp" struct Mirrors { - nlohmann::json local_mirrors{}; // maps URLs to list of local mirrors - nlohmann::json preferred_hostnames{}; // list of mirror hostnames + nlohmann::json local_mirrors; // maps URLs to list of local mirrors + nlohmann::json preferred_hostnames; // list of mirror hostnames }; using MirrorsPtr = std::shared_ptr; diff --git a/src/other_tools/just_mr/rc.hpp b/src/other_tools/just_mr/rc.hpp index fe8e3d898..a24fdb2e2 100644 --- a/src/other_tools/just_mr/rc.hpp +++ b/src/other_tools/just_mr/rc.hpp @@ -26,4 +26,4 @@ [[nodiscard]] auto ReadJustMRRC( gsl::not_null const& clargs) -> std::optional; -#endif +#endif // INCLUDED_SRC_OTHER_TOOLS_JUST_MR_RC_HPP diff --git a/src/other_tools/just_mr/rc_merge.hpp b/src/other_tools/just_mr/rc_merge.hpp index d559ed9da..61901a058 100644 --- a/src/other_tools/just_mr/rc_merge.hpp +++ b/src/other_tools/just_mr/rc_merge.hpp @@ -21,4 +21,4 @@ [[nodiscard]] auto MergeMRRC(const Configuration& base, const Configuration& delta) noexcept -> Configuration; -#endif +#endif // INCLUDED_SRC_OTHER_TOOLS_JUST_MR_RC_MERGE_HPP diff --git a/src/other_tools/just_mr/setup.cpp b/src/other_tools/just_mr/setup.cpp index ba22936de..4ab032de4 100644 --- a/src/other_tools/just_mr/setup.cpp +++ b/src/other_tools/just_mr/setup.cpp @@ -24,12 +24,17 @@ #include "nlohmann/json.hpp" #include "src/buildtool/auth/authentication.hpp" #include "src/buildtool/common/remote/retry_config.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp" #include "src/buildtool/execution_api/common/api_bundle.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/execution_api/local/config.hpp" #include "src/buildtool/execution_api/local/context.hpp" +#include "src/buildtool/execution_api/local/local_api.hpp" +#include "src/buildtool/execution_api/remote/bazel/bazel_api.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/execution_api/remote/context.hpp" +#include "src/buildtool/execution_api/serve/mr_local_api.hpp" #include "src/buildtool/file_system/symlinks_map/resolve_symlinks_map.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" @@ -39,6 +44,7 @@ #include "src/buildtool/serve_api/remote/config.hpp" #include "src/buildtool/serve_api/remote/serve_api.hpp" #include "src/buildtool/storage/fs_utils.hpp" +#include "src/buildtool/storage/garbage_collector.hpp" #include "src/other_tools/just_mr/exit_codes.hpp" #include "src/other_tools/just_mr/progress_reporting/progress.hpp" #include "src/other_tools/just_mr/progress_reporting/progress_reporter.hpp" @@ -54,6 +60,7 @@ #include "src/other_tools/root_maps/distdir_git_map.hpp" #include "src/other_tools/root_maps/fpath_git_map.hpp" #include "src/other_tools/root_maps/tree_id_git_map.hpp" +#include "src/utils/cpp/file_locking.hpp" auto MultiRepoSetup(std::shared_ptr const& config, MultiRepoCommonArguments const& common_args, @@ -61,10 +68,10 @@ auto MultiRepoSetup(std::shared_ptr const& config, MultiRepoJustSubCmdsArguments const& just_cmd_args, MultiRepoRemoteAuthArguments const& auth_args, RetryArguments const& retry_args, - StorageConfig const& storage_config, - Storage const& storage, + StorageConfig const& native_storage_config, + Storage const& native_storage, bool interactive, - std::string multi_repo_tool_name) + std::string const& multi_repo_tool_name) -> std::optional { // provide report Logger::Log(LogLevel::Info, "Performing repositories setup"); @@ -104,6 +111,7 @@ auto MultiRepoSetup(std::shared_ptr const& config, LogLevel::Error, "Unsupported value {} for field \"main\" in configuration.", main_from_config->ToString()); + return std::nullopt; } } } @@ -127,57 +135,121 @@ auto MultiRepoSetup(std::shared_ptr const& config, setup_repos->to_setup.size()); // setup local execution config - auto local_exec_config = + auto const local_exec_config = JustMR::Utils::CreateLocalExecutionConfig(common_args); if (not local_exec_config) { return std::nullopt; } - // pack the local context instances to be passed to ApiBundle - LocalContext const local_context{.exec_config = &*local_exec_config, - .storage_config = &storage_config, - .storage = &storage}; + // pack the native local context and create api + LocalContext const native_local_context{ + .exec_config = &*local_exec_config, + .storage_config = &native_storage_config, + .storage = &native_storage}; + IExecutionApi::Ptr const native_local_api = + std::make_shared(&native_local_context); + + // pack the compatible local context, if needed + std::unique_ptr compat_storage_config = nullptr; + std::unique_ptr compat_storage = nullptr; + std::unique_ptr compat_local_context = nullptr; + std::optional compat_lock = std::nullopt; + IExecutionApi::Ptr compat_local_api = nullptr; + if (common_args.compatible) { + auto config = StorageConfig::Builder{} + .SetBuildRoot(native_storage_config.build_root) + .SetHashType(HashFunction::Type::PlainSHA256) + .Build(); + if (not config) { + Logger::Log(LogLevel::Error, config.error()); + return std::nullopt; + } + compat_storage_config = + std::make_unique(*std::move(config)); + compat_storage = std::make_unique( + Storage::Create(compat_storage_config.get())); + compat_local_context = std::make_unique( + LocalContext{.exec_config = &*local_exec_config, + .storage_config = compat_storage_config.get(), + .storage = compat_storage.get()}); + // if a compatible storage is created, one must get a lock for it the + // same way as done for the native one + compat_lock = GarbageCollector::SharedLock(*compat_storage_config); + if (not compat_lock) { + Logger::Log(LogLevel::Error, + "Failed to acquire compatible storage gc lock"); + return std::nullopt; + } + compat_local_api = std::make_shared(&*compat_local_context); + } + + // setup the overall local api, aware of compatibility + IExecutionApi::Ptr mr_local_api = std::make_shared( + &native_local_context, + &*native_local_api, + common_args.compatible ? &*compat_local_context : nullptr, + common_args.compatible ? &*compat_local_api : nullptr); // setup authentication config - auto auth_config = JustMR::Utils::CreateAuthConfig(auth_args); + auto const auth_config = JustMR::Utils::CreateAuthConfig(auth_args); if (not auth_config) { return std::nullopt; } // setup the retry config - auto retry_config = CreateRetryConfig(retry_args); + auto const retry_config = CreateRetryConfig(retry_args); if (not retry_config) { return std::nullopt; } // setup remote execution config - auto remote_exec_config = JustMR::Utils::CreateRemoteExecutionConfig( + auto const remote_exec_config = JustMR::Utils::CreateRemoteExecutionConfig( common_args.remote_execution_address, common_args.remote_serve_address); if (not remote_exec_config) { return std::nullopt; } - // pack the remote context instances to be passed to ApiBundle + // create the remote api + auto const hash_fct = + compat_local_context != nullptr + ? compat_local_context->storage_config->hash_function + : native_local_context.storage_config->hash_function; + IExecutionApi::Ptr remote_api = nullptr; + if (auto const address = remote_exec_config->remote_address) { + ExecutionConfiguration config; + config.skip_cache_lookup = false; + remote_api = std::make_shared("remote-execution", + address->host, + address->port, + &*auth_config, + &*retry_config, + config, + &hash_fct); + } + bool const has_remote_api = remote_api != nullptr; + + // pack the remote context RemoteContext const remote_context{.auth = &*auth_config, .retry_config = &*retry_config, .exec_config = &*remote_exec_config}; - auto const apis = ApiBundle::Create(&local_context, - &remote_context, - /*repo_config=*/nullptr); - - bool const has_remote_api = - apis.local != apis.remote and not common_args.compatible; - - // setup the API for serving roots + // setup the api for serving roots auto serve_config = JustMR::Utils::CreateServeConfig(common_args.remote_serve_address); if (not serve_config) { return std::nullopt; } - - auto serve = - ServeApi::Create(*serve_config, &local_context, &remote_context, &apis); + auto const apis = + ApiBundle{.hash_function = hash_fct, + .local = mr_local_api, + .remote = has_remote_api ? remote_api : mr_local_api}; + auto serve = ServeApi::Create( + *serve_config, + compat_local_context != nullptr + ? &*compat_local_context + : &native_local_context, // defines the client's hash_function + &remote_context, + &apis /*unused*/); // check configuration of the serve endpoint provided if (serve) { @@ -213,78 +285,87 @@ auto MultiRepoSetup(std::shared_ptr const& config, auto crit_git_op_ptr = std::make_shared(); auto critical_git_op_map = CreateCriticalGitOpMap(crit_git_op_ptr); - auto content_cas_map = - CreateContentCASMap(common_args.just_mr_paths, - common_args.alternative_mirrors, - common_args.ca_info, - &critical_git_op_map, - serve ? &*serve : nullptr, - &storage_config, - &storage, - &(*apis.local), - has_remote_api ? &*apis.remote : nullptr, - &progress, - common_args.jobs); + auto content_cas_map = CreateContentCASMap( + common_args.just_mr_paths, + common_args.alternative_mirrors, + common_args.ca_info, + &critical_git_op_map, + serve ? &*serve : nullptr, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + &native_storage, + compat_storage != nullptr ? &*compat_storage : nullptr, + &(*apis.local), + has_remote_api ? &*apis.remote : nullptr, + &progress, + common_args.jobs); auto import_to_git_map = CreateImportToGitMap(&critical_git_op_map, common_args.git_path->string(), *common_args.local_launcher, - &storage_config, + &native_storage_config, common_args.jobs); - auto git_tree_fetch_map = - CreateGitTreeFetchMap(&critical_git_op_map, - &import_to_git_map, - common_args.git_path->string(), - *common_args.local_launcher, - serve ? &*serve : nullptr, - &storage_config, - &(*apis.local), - has_remote_api ? &*apis.remote : nullptr, - false, /* backup_to_remote */ - &progress, - common_args.jobs); + auto git_tree_fetch_map = CreateGitTreeFetchMap( + &critical_git_op_map, + &import_to_git_map, + common_args.git_path->string(), + *common_args.local_launcher, + serve ? &*serve : nullptr, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + compat_storage != nullptr ? &*compat_storage : nullptr, + &(*apis.local), + has_remote_api ? &*apis.remote : nullptr, + false, /* backup_to_remote */ + &progress, + common_args.jobs); auto resolve_symlinks_map = CreateResolveSymlinksMap(); - auto commit_git_map = - CreateCommitGitMap(&critical_git_op_map, - &import_to_git_map, - common_args.just_mr_paths, - common_args.alternative_mirrors, - common_args.git_path->string(), - *common_args.local_launcher, - serve ? &*serve : nullptr, - &storage_config, - &(*apis.local), - has_remote_api ? &*apis.remote : nullptr, - common_args.fetch_absent, - &progress, - common_args.jobs); - - auto content_git_map = - CreateContentGitMap(&content_cas_map, - &import_to_git_map, - common_args.just_mr_paths, - common_args.alternative_mirrors, - common_args.ca_info, - &resolve_symlinks_map, - &critical_git_op_map, - serve ? &*serve : nullptr, - &storage_config, - &storage, - has_remote_api ? &*apis.remote : nullptr, - common_args.fetch_absent, - &progress, - common_args.jobs); + auto commit_git_map = CreateCommitGitMap( + &critical_git_op_map, + &import_to_git_map, + common_args.just_mr_paths, + common_args.alternative_mirrors, + common_args.git_path->string(), + *common_args.local_launcher, + serve ? &*serve : nullptr, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + compat_storage != nullptr ? &*compat_storage : nullptr, + &(*apis.local), + has_remote_api ? &*apis.remote : nullptr, + common_args.fetch_absent, + &progress, + common_args.jobs); + + auto content_git_map = CreateContentGitMap( + &content_cas_map, + &import_to_git_map, + common_args.just_mr_paths, + common_args.alternative_mirrors, + common_args.ca_info, + &resolve_symlinks_map, + &critical_git_op_map, + serve ? &*serve : nullptr, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + &native_storage, + compat_storage != nullptr ? &*compat_storage : nullptr, + has_remote_api ? &*apis.local : nullptr, // only needed if remote given + has_remote_api ? &*apis.remote : nullptr, + common_args.fetch_absent, + &progress, + common_args.jobs); auto foreign_file_git_map = CreateForeignFileGitMap(&content_cas_map, &import_to_git_map, serve ? &*serve : nullptr, - &storage_config, - &storage, + &native_storage_config, + &native_storage, common_args.fetch_absent, common_args.jobs); @@ -294,34 +375,41 @@ auto MultiRepoSetup(std::shared_ptr const& config, &import_to_git_map, &resolve_symlinks_map, serve ? &*serve : nullptr, - &storage_config, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + compat_storage != nullptr ? &*compat_storage : nullptr, + has_remote_api ? &*apis.local : nullptr, // only needed if remote given has_remote_api ? &*apis.remote : nullptr, common_args.jobs, multi_repo_tool_name, common_args.just_path ? common_args.just_path->string() : kDefaultJustPath); - auto distdir_git_map = - CreateDistdirGitMap(&content_cas_map, - &import_to_git_map, - &critical_git_op_map, - serve ? &*serve : nullptr, - &storage_config, - &storage, - &(*apis.local), - has_remote_api ? &*apis.remote : nullptr, - common_args.jobs); - - auto tree_id_git_map = - CreateTreeIdGitMap(&git_tree_fetch_map, - &critical_git_op_map, - &import_to_git_map, - common_args.fetch_absent, - serve ? &*serve : nullptr, - &storage_config, - &(*apis.local), - has_remote_api ? &*apis.remote : nullptr, - common_args.jobs); + auto distdir_git_map = CreateDistdirGitMap( + &content_cas_map, + &import_to_git_map, + &critical_git_op_map, + serve ? &*serve : nullptr, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + &native_storage, + compat_storage != nullptr ? &*compat_storage : nullptr, + &(*apis.local), + has_remote_api ? &*apis.remote : nullptr, + common_args.jobs); + + auto tree_id_git_map = CreateTreeIdGitMap( + &git_tree_fetch_map, + &critical_git_op_map, + &import_to_git_map, + common_args.fetch_absent, + serve ? &*serve : nullptr, + &native_storage_config, + compat_storage_config != nullptr ? &*compat_storage_config : nullptr, + compat_storage != nullptr ? &*compat_storage : nullptr, + &(*apis.local), + has_remote_api ? &*apis.remote : nullptr, + common_args.jobs); auto repos_to_setup_map = CreateReposToSetupMap(config, main, @@ -366,11 +454,11 @@ auto MultiRepoSetup(std::shared_ptr const& config, nlohmann::json mr_repos{}; for (auto const& repo : setup_repos->to_setup) { auto i = static_cast( - &repo - &setup_repos->to_setup[0]); // get index + &repo - setup_repos->to_setup.data()); // get index mr_repos[repo] = *values[i]; } // populate ALT_DIRS - constexpr auto err_msg_format = + constexpr auto kErrMsgFormat = "While performing {} {}:\nWhile populating fields for " "repository {}:\nExpected value for key \"{}\" to be a " "string, but found {}"; @@ -386,7 +474,7 @@ auto MultiRepoSetup(std::shared_ptr const& config, // we expect a string if (not val->IsString()) { Logger::Log(LogLevel::Error, - err_msg_format, + kErrMsgFormat, multi_repo_tool_name, interactive ? "setup-env" @@ -456,5 +544,5 @@ auto MultiRepoSetup(std::shared_ptr const& config, return std::nullopt; } // if successful, return the output config - return StorageUtils::AddToCAS(storage, mr_config.dump(2)); + return StorageUtils::AddToCAS(native_storage, mr_config.dump(2)); } diff --git a/src/other_tools/just_mr/setup.hpp b/src/other_tools/just_mr/setup.hpp index f9a37c364..28328b4b1 100644 --- a/src/other_tools/just_mr/setup.hpp +++ b/src/other_tools/just_mr/setup.hpp @@ -34,9 +34,10 @@ MultiRepoJustSubCmdsArguments const& just_cmd_args, MultiRepoRemoteAuthArguments const& auth_args, RetryArguments const& retry_args, - StorageConfig const& storage_config, - Storage const& storage, + StorageConfig const& native_storage_config, + Storage const& native_storage, bool interactive, - std::string multi_repo_tool_name) -> std::optional; + std::string const& multi_repo_tool_name) + -> std::optional; #endif // INCLUDED_SRC_OTHER_TOOLS_JUST_MR_SETUP_HPP diff --git a/src/other_tools/just_mr/setup_utils.cpp b/src/other_tools/just_mr/setup_utils.cpp index 4e5e5ad38..777343f58 100644 --- a/src/other_tools/just_mr/setup_utils.cpp +++ b/src/other_tools/just_mr/setup_utils.cpp @@ -14,6 +14,7 @@ #include "src/other_tools/just_mr/setup_utils.hpp" +#include #include #include #include @@ -26,6 +27,31 @@ #include "src/buildtool/logging/logger.hpp" #include "src/other_tools/just_mr/exit_codes.hpp" +namespace { + +void WarnUnknownKeys(std::string const& name, ExpressionPtr const& repo_def) { + if (not repo_def->IsMap()) { + return; + } + for (auto const& [key, value] : repo_def->Map()) { + if (not kRepositoryExpectedFields.contains(key)) { + Logger::Log(std::any_of(kRepositoryPossibleFieldTrunks.begin(), + kRepositoryPossibleFieldTrunks.end(), + [k = key](auto const& trunk) { + return k.find(trunk) != + std::string::npos; + }) + ? LogLevel::Debug + : LogLevel::Warning, + "Ignoring unknown field {} in repository {}", + key, + name); + } + } +} + +} // namespace + namespace JustMR::Utils { void ReachableRepositories( @@ -46,6 +72,7 @@ void ReachableRepositories( if (not repos_repo_name.IsNotNull()) { return; } + WarnUnknownKeys(repo_name, repos_repo_name); auto bindings = repos_repo_name->Get("bindings", Expression::none_t{}); if (bindings.IsNotNull() and bindings->IsMap()) { @@ -99,7 +126,7 @@ void DefaultReachableRepositories( auto ReadConfiguration( std::optional const& config_file_opt, - std::optional const& absent_file_opt) noexcept + std::optional const& absent_file_opt) -> std::shared_ptr { if (not config_file_opt) { Logger::Log(LogLevel::Error, "Cannot find repository configuration."); diff --git a/src/other_tools/just_mr/setup_utils.hpp b/src/other_tools/just_mr/setup_utils.hpp index 666914413..c96db5a30 100644 --- a/src/other_tools/just_mr/setup_utils.hpp +++ b/src/other_tools/just_mr/setup_utils.hpp @@ -18,6 +18,7 @@ #include #include #include +#include #include #include @@ -35,6 +36,22 @@ std::vector const kAltDirs = {"target_root", "rule_root", "expression_root"}; +auto const kRepositoryExpectedFields = + std::unordered_set{"bindings", + "expression_file_name", + "expression_root", + "repository", + "rule_file_name", + "rule_root", + "target_file_name", + "target_root"}; + +// Substrings in repository field names that indicate commonly-used +// additional keys not used by just-mr but deliberately added by the +// author of the repository configuration. +auto const kRepositoryPossibleFieldTrunks = + std::vector{"bootstrap", "doc", "extra"}; + namespace JustMR { struct SetupRepos { @@ -60,7 +77,7 @@ void DefaultReachableRepositories( /// \brief Read in a just-mr configuration file. [[nodiscard]] auto ReadConfiguration( std::optional const& config_file_opt, - std::optional const& absent_file_opt) noexcept + std::optional const& absent_file_opt) -> std::shared_ptr; [[nodiscard]] auto CreateAuthConfig( diff --git a/src/other_tools/just_mr/update.cpp b/src/other_tools/just_mr/update.cpp index 1c1626ebd..819ccf64e 100644 --- a/src/other_tools/just_mr/update.cpp +++ b/src/other_tools/just_mr/update.cpp @@ -38,8 +38,8 @@ auto MultiRepoUpdate(std::shared_ptr const& config, MultiRepoCommonArguments const& common_args, MultiRepoUpdateArguments const& update_args, - StorageConfig const& storage_config, - std::string multi_repo_tool_name) -> int { + StorageConfig const& native_storage_config, + std::string const& multi_repo_tool_name) -> int { // provide report Logger::Log(LogLevel::Info, "Performing repositories update"); @@ -193,7 +193,7 @@ auto MultiRepoUpdate(std::shared_ptr const& config, } } // Create fake repo for the anonymous remotes - auto tmp_dir = storage_config.CreateTypedTmpDir("update"); + auto tmp_dir = native_storage_config.CreateTypedTmpDir("update"); if (not tmp_dir) { Logger::Log(LogLevel::Error, "Failed to create commit update tmp dir"); return kExitUpdateError; @@ -227,7 +227,7 @@ auto MultiRepoUpdate(std::shared_ptr const& config, auto git_update_map = CreateGitUpdateMap(git_repo->GetGitCAS(), common_args.git_path->string(), *common_args.local_launcher, - &storage_config, + &native_storage_config, &stats, &progress, common_args.jobs); @@ -257,7 +257,7 @@ auto MultiRepoUpdate(std::shared_ptr const& config, for (auto const& repo_name : repos_to_update_names) { auto i = static_cast( &repo_name - - &repos_to_update_names[0]); // get index + repos_to_update_names.data()); // get index // we know "repository" is a map for repo_name, so // field "commit" is here either overwritten or set if // missing; either way, this should always work diff --git a/src/other_tools/just_mr/update.hpp b/src/other_tools/just_mr/update.hpp index 267299355..3cc4ca116 100644 --- a/src/other_tools/just_mr/update.hpp +++ b/src/other_tools/just_mr/update.hpp @@ -25,7 +25,8 @@ [[nodiscard]] auto MultiRepoUpdate(std::shared_ptr const& config, MultiRepoCommonArguments const& common_args, MultiRepoUpdateArguments const& update_args, - StorageConfig const& storage_config, - std::string multi_repo_tool_name) -> int; + StorageConfig const& native_storage_config, + std::string const& multi_repo_tool_name) + -> int; #endif // INCLUDED_SRC_OTHER_TOOLS_JUST_MR_UPDATE_HPP diff --git a/src/other_tools/just_mr/utils.cpp b/src/other_tools/just_mr/utils.cpp index 2243b799e..e49da9d89 100644 --- a/src/other_tools/just_mr/utils.cpp +++ b/src/other_tools/just_mr/utils.cpp @@ -19,7 +19,6 @@ namespace JustMR::Utils { -// NOLINTNEXTLINE(misc-no-recursion) auto ResolveRepo(ExpressionPtr const& repo_desc, ExpressionPtr const& repos, gsl::not_null*> const& seen) @@ -33,7 +32,7 @@ auto ResolveRepo(ExpressionPtr const& repo_desc, return std::nullopt; } [[maybe_unused]] auto insert_res = seen->insert(desc_str); - auto new_repo_desc = repos[desc_str]; + auto const& new_repo_desc = repos[desc_str]; if (not new_repo_desc->IsMap()) { Logger::Log(LogLevel::Error, "Config: While resolving dependencies:\nDescription of " diff --git a/src/other_tools/ops_maps/TARGETS b/src/other_tools/ops_maps/TARGETS index 548db656e..545de74f1 100644 --- a/src/other_tools/ops_maps/TARGETS +++ b/src/other_tools/ops_maps/TARGETS @@ -19,18 +19,18 @@ , "deps": [ "critical_git_op_map" , ["@", "gsl", "", "gsl"] + , ["src/buildtool/storage", "config"] , ["src/other_tools/git_operations", "git_repo_remote"] , ["src/utils/cpp", "path"] , ["src/utils/cpp", "path_hash"] - , ["src/buildtool/storage", "config"] ] , "stage": ["src", "other_tools", "ops_maps"] , "private-deps": [ ["@", "fmt", "", "fmt"] - , ["src/buildtool/multithreading", "task_system"] - , ["src/buildtool/storage", "fs_utils"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/execution_api/local", "config"] + , ["src/buildtool/multithreading", "task_system"] + , ["src/buildtool/storage", "fs_utils"] ] } , "git_update_map": @@ -40,12 +40,12 @@ , "srcs": ["git_update_map.cpp"] , "deps": [ ["@", "gsl", "", "gsl"] - , ["src/other_tools/git_operations", "git_repo_remote"] , ["src/buildtool/multithreading", "async_map_consumer"] - , ["src/utils/cpp", "hash_combine"] , ["src/buildtool/storage", "config"] + , ["src/other_tools/git_operations", "git_repo_remote"] , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/just_mr/progress_reporting", "statistics"] + , ["src/utils/cpp", "hash_combine"] ] , "stage": ["src", "other_tools", "ops_maps"] , "private-deps": @@ -62,24 +62,27 @@ , "deps": [ ["@", "gsl", "", "gsl"] , ["src/buildtool/common", "user_structs"] - , ["src/buildtool/file_system/symlinks_map", "pragma_special"] + , ["src/buildtool/crypto", "hash_info"] , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/file_system/symlinks_map", "pragma_special"] , ["src/buildtool/multithreading", "async_map_consumer"] + , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] , ["src/other_tools/just_mr", "mirrors"] , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/ops_maps", "critical_git_op_map"] , ["src/utils/cpp", "hash_combine"] - , ["src/buildtool/serve_api/remote", "serve_api"] - , ["src/buildtool/storage", "config"] - , ["src/buildtool/storage", "storage"] ] , "stage": ["src", "other_tools", "ops_maps"] , "private-deps": [ ["@", "fmt", "", "fmt"] - , ["src/other_tools/utils", "content"] + , ["src/buildtool/execution_api/serve", "utils"] , ["src/buildtool/file_system", "file_storage"] , ["src/buildtool/storage", "fs_utils"] , ["src/other_tools/git_operations", "git_repo_remote"] + , ["src/other_tools/utils", "content"] + , ["src/other_tools/utils", "curl_url_handle"] ] } , "archive_fetch_map": @@ -88,8 +91,9 @@ , "hdrs": ["archive_fetch_map.hpp"] , "srcs": ["archive_fetch_map.cpp"] , "deps": - [ ["@", "gsl", "", "gsl"] - , "content_cas_map" + [ "content_cas_map" + , ["@", "gsl", "", "gsl"] + , ["src/buildtool/crypto", "hash_info"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/storage", "storage"] , ["src/other_tools/just_mr/progress_reporting", "statistics"] @@ -97,9 +101,9 @@ , "stage": ["src", "other_tools", "ops_maps"] , "private-deps": [ ["@", "fmt", "", "fmt"] - , ["src/other_tools/just_mr", "utils"] , ["src/buildtool/file_system", "file_storage"] , ["src/buildtool/file_system", "file_system_manager"] + , ["src/other_tools/just_mr", "utils"] ] } , "git_tree_fetch_map": @@ -109,26 +113,27 @@ , "srcs": ["git_tree_fetch_map.cpp"] , "deps": [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/crypto", "hash_info"] , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/ops_maps", "critical_git_op_map"] , ["src/other_tools/ops_maps", "import_to_git_map"] - , ["src/buildtool/serve_api/remote", "serve_api"] - , ["src/buildtool/storage", "config"] ] , "stage": ["src", "other_tools", "ops_maps"] , "private-deps": - [ ["src/other_tools/ops_maps", "critical_git_op_map"] + [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "common"] , ["src/buildtool/common", "config"] - , ["src/buildtool/compatibility", "compatibility"] + , ["src/buildtool/common", "protocol_traits"] + , ["src/buildtool/execution_api/serve", "mr_git_api"] + , ["src/buildtool/execution_api/serve", "utils"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/multithreading", "task_system"] , ["src/buildtool/system", "system_command"] , ["src/other_tools/git_operations", "git_repo_remote"] - , ["src/other_tools/ops_maps", "content_cas_map"] - , ["src/other_tools/ops_maps", "import_to_git_map"] - , ["src/buildtool/execution_api/git", "git"] - , ["@", "fmt", "", "fmt"] ] } } diff --git a/src/other_tools/ops_maps/archive_fetch_map.cpp b/src/other_tools/ops_maps/archive_fetch_map.cpp index 17b330924..91d71ce65 100644 --- a/src/other_tools/ops_maps/archive_fetch_map.cpp +++ b/src/other_tools/ops_maps/archive_fetch_map.cpp @@ -27,21 +27,20 @@ void ProcessContent(std::filesystem::path const& content_path, std::filesystem::path const& target_name, gsl::not_null const& local_api, IExecutionApi const* remote_api, - std::string const& content, + ArtifactDigest const& content_digest, gsl::not_null const& stats, ArchiveFetchMap::SetterPtr const& setter, ArchiveFetchMap::LoggerPtr const& logger) { // try to back up to remote CAS if (remote_api != nullptr) { if (not local_api->RetrieveToCas( - {Artifact::ObjectInfo{ - .digest = ArtifactDigest{content, 0, /*is_tree=*/false}, - .type = ObjectType::File}}, + {Artifact::ObjectInfo{.digest = content_digest, + .type = ObjectType::File}}, *remote_api)) { // give a warning (*logger)(fmt::format("Failed to back up content {} from local CAS " "to remote", - content), + content_digest.hash()), /*fatal=*/false); } } @@ -53,7 +52,7 @@ void ProcessContent(std::filesystem::path const& content_path, } if (not FileSystemManager::CopyFile(content_path, target_name)) { (*logger)(fmt::format("Failed to copy content {} from CAS to {}", - content, + content_digest.hash(), target_name.string()), /*fatal=*/true); return; @@ -96,30 +95,30 @@ auto CreateArchiveFetchMap(gsl::not_null const& content_cas_map, storage, local_api, remote_api, - content = key.content, + hash_info = key.content_hash, stats, setter, logger]([[maybe_unused]] auto const& values) { // content is in local CAS now auto const& cas = storage->CAS(); - auto content_path = - cas.BlobPath(ArtifactDigest{content, 0, /*is_tree=*/false}, - /*is_executable=*/false) - .value(); + ArtifactDigest const digest{hash_info, 0}; + auto content_path = cas.BlobPath(digest, + /*is_executable=*/false) + .value(); ProcessContent(content_path, target_name, local_api, remote_api, - content, + digest, stats, setter, logger); }, - [logger, content = key.content](auto const& msg, bool fatal) { + [logger, hash = key.content_hash.Hash()](auto const& msg, + bool fatal) { (*logger)( - fmt::format("While ensuring content {} is in CAS:\n{}", - content, - msg), + fmt::format( + "While ensuring content {} is in CAS:\n{}", hash, msg), fatal); }); }; diff --git a/src/other_tools/ops_maps/archive_fetch_map.hpp b/src/other_tools/ops_maps/archive_fetch_map.hpp index 3c4ed82ce..e1c26b451 100644 --- a/src/other_tools/ops_maps/archive_fetch_map.hpp +++ b/src/other_tools/ops_maps/archive_fetch_map.hpp @@ -22,6 +22,7 @@ #include #include "gsl/gsl" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/storage/storage.hpp" #include "src/other_tools/just_mr/progress_reporting/statistics.hpp" @@ -41,7 +42,8 @@ using ArchiveFetchMap = AsyncMapConsumer; // use explicit cast to std::function to allow template deduction when used static const std::function - kArchiveContentPrinter = - [](ArchiveContent const& x) -> std::string { return x.content; }; + kArchiveContentPrinter = [](ArchiveContent const& x) -> std::string { + return x.content_hash.Hash(); +}; #endif // INCLUDED_SRC_OTHER_TOOLS_OPS_MAPS_ARCHIVE_FETCH_MAP_HPP diff --git a/src/other_tools/ops_maps/content_cas_map.cpp b/src/other_tools/ops_maps/content_cas_map.cpp index 955183264..a8e42adb1 100644 --- a/src/other_tools/ops_maps/content_cas_map.cpp +++ b/src/other_tools/ops_maps/content_cas_map.cpp @@ -17,6 +17,7 @@ #include // std::move #include "fmt/core.h" +#include "src/buildtool/execution_api/serve/utils.hpp" #include "src/buildtool/file_system/file_storage.hpp" #include "src/buildtool/storage/fs_utils.hpp" #include "src/other_tools/git_operations/git_repo_remote.hpp" @@ -28,7 +29,7 @@ namespace { void FetchFromNetwork(ArchiveContent const& key, MirrorsPtr const& additional_mirrors, CAInfoPtr const& ca_info, - Storage const& storage, + Storage const& native_storage, gsl::not_null const& progress, ContentCASMap::SetterPtr const& setter, ContentCASMap::LoggerPtr const& logger) { @@ -44,7 +45,7 @@ void FetchFromNetwork(ArchiveContent const& key, if (not data) { (*logger)(fmt::format("Failed to fetch a file with id {} from provided " "remotes:{}", - key.content, + key.content_hash.Hash(), data.error()), /*fatal=*/true); return; @@ -72,9 +73,9 @@ void FetchFromNetwork(ArchiveContent const& key, return; } } - // add the fetched data to CAS - auto path = StorageUtils::AddToCAS(storage, *data); - // check one last time if content is in CAS now + // add the fetched data to native CAS + auto path = StorageUtils::AddToCAS(native_storage, *data); + // check one last time if content is in native CAS now if (not path) { (*logger)(fmt::format("Failed to store fetched content from {}", key.fetch_url), @@ -82,12 +83,12 @@ void FetchFromNetwork(ArchiveContent const& key, return; } // check that the data we stored actually produces the requested digest - auto const& cas = storage.CAS(); - if (not cas.BlobPath(ArtifactDigest{key.content, 0, /*is_tree=*/false}, - /*is_executable=*/false)) { + auto const& native_cas = native_storage.CAS(); + if (not native_cas.BlobPath(ArtifactDigest{key.content_hash, 0}, + /*is_executable=*/false)) { (*logger)( fmt::format("Content {} was not found at given fetch location {}", - key.content, + key.content_hash.Hash(), key.fetch_url), /*fatal=*/true); return; @@ -105,8 +106,10 @@ auto CreateContentCASMap( CAInfoPtr const& ca_info, gsl::not_null const& critical_git_op_map, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + gsl::not_null const& native_storage, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, gsl::not_null const& progress, @@ -116,8 +119,10 @@ auto CreateContentCASMap( ca_info, critical_git_op_map, serve, - storage, - storage_config, + native_storage_config, + compat_storage_config, + native_storage, + compat_storage, local_api, remote_api, progress](auto ts, @@ -125,34 +130,37 @@ auto CreateContentCASMap( auto logger, auto /*unused*/, auto const& key) { - auto digest = ArtifactDigest(key.content, 0, false); - // check local CAS - if (local_api->IsAvailable(digest)) { + auto const native_digest = ArtifactDigest{key.content_hash, 0}; + // check native local CAS + if (local_api->IsAvailable(native_digest)) { (*setter)(nullptr); return; } // check if content is in Git cache; // ensure Git cache - GitOpKey op_key = {.params = - { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare - }, - .op_type = GitOpType::ENSURE_INIT}; + GitOpKey op_key = { + .params = + { + native_storage_config->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare + }, + .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( ts, {std::move(op_key)}, [key, - digest, + native_digest, just_mr_paths, additional_mirrors, ca_info, serve, - storage, - storage_config, + native_storage_config, + compat_storage_config, + native_storage, + compat_storage, local_api, remote_api, progress, @@ -175,59 +183,61 @@ auto CreateContentCASMap( } // verify if local Git knows content blob auto wrapped_logger = std::make_shared( - [&logger, blob = key.content](auto const& msg, bool fatal) { + [&logger, hash = key.content_hash.Hash()](auto const& msg, + bool fatal) { (*logger)(fmt::format("While verifying presence of " "blob {}:\n{}", - blob, + hash, msg), fatal); }); - auto res = - just_git_repo->TryReadBlob(key.content, wrapped_logger); + auto res = just_git_repo->TryReadBlob(key.content_hash.Hash(), + wrapped_logger); if (not res.first) { // blob check failed return; } - auto const& cas = storage->CAS(); + auto const& native_cas = native_storage->CAS(); if (res.second) { - // blob found; add it to CAS - if (not cas.StoreBlob(*res.second, - /*is_executable=*/false)) { + // blob found; add it to native CAS + if (not native_cas.StoreBlob(*res.second, + /*is_executable=*/false)) { (*logger)(fmt::format("Failed to store content {} " - "to local CAS", - key.content), + "to native local CAS", + key.content_hash.Hash()), /*fatal=*/true); return; } - // content stored to CAS + // content stored to native CAS (*setter)(nullptr); return; } // check for blob in older generations for (std::size_t generation = 1; - generation < storage_config->num_generations; + generation < native_storage_config->num_generations; generation++) { - auto old = storage_config->GitGenerationRoot(generation); + auto old = + native_storage_config->GitGenerationRoot(generation); if (FileSystemManager::IsDirectory(old)) { auto old_repo = GitRepo::Open(old); auto no_logging = std::make_shared( [](auto /*unused*/, auto /*unused*/) {}); if (old_repo) { - auto res = - old_repo->TryReadBlob(key.content, no_logging); + auto res = old_repo->TryReadBlob( + key.content_hash.Hash(), no_logging); if (res.first and res.second) { // read blob from older generation - auto const& cas = storage->CAS(); - if (not cas.StoreBlob( + if (not native_cas.StoreBlob( *res.second, /*is_executable=*/false)) { (*logger)(fmt::format( "Failed to store content {} " - "to local CAS", - key.content), + "to native local CAS", + key.content_hash.Hash()), /*fatal=*/true); + return; } - // content stored in CAS + // content stored in native CAS (*setter)(nullptr); return; } @@ -237,37 +247,91 @@ auto CreateContentCASMap( // blob not found in Git cache progress->TaskTracker().Start(key.origin); - // add distfile to CAS + // add distfile to native CAS auto repo_distfile = (key.distfile ? key.distfile.value() : std::filesystem::path(key.fetch_url) .filename() .string()); StorageUtils::AddDistfileToCAS( - *storage, repo_distfile, just_mr_paths); - // check if content is in CAS now - if (cas.BlobPath(digest, /*is_executable=*/false)) { + *native_storage, repo_distfile, just_mr_paths); + // check if content is in native CAS now + if (native_cas.BlobPath(native_digest, + /*is_executable=*/false)) { progress->TaskTracker().Stop(key.origin); (*setter)(nullptr); return; } // check if content is known to remote serve service - if (serve != nullptr and remote_api != nullptr and - serve->ContentInRemoteCAS(key.content)) { + if (serve != nullptr and remote_api != nullptr) { + auto const remote_digest = + serve->ContentInRemoteCAS(key.content_hash.Hash()); // try to get content from remote CAS - if (remote_api->RetrieveToCas( - {Artifact::ObjectInfo{.digest = digest, + if (remote_digest and + remote_api->RetrieveToCas( + {Artifact::ObjectInfo{.digest = *remote_digest, .type = ObjectType::File}}, *local_api)) { progress->TaskTracker().Stop(key.origin); + if (remote_digest->hash() == key.content_hash.Hash()) { + // content is in native local CAS, so all done + (*setter)(nullptr); + return; + } + // if content is in compatible local CAS, rehash it + if (compat_storage_config == nullptr or + compat_storage == nullptr) { + // sanity check + (*logger)("No compatible local storage set up!", + /*fatal=*/true); + return; + } + auto const& compat_cas = compat_storage->CAS(); + auto const cas_path = compat_cas.BlobPath( + *remote_digest, /*is_executable=*/false); + if (not cas_path) { + (*logger)(fmt::format("Expected content {} not " + "found in " + "compatible local CAS", + remote_digest->hash()), + /*fatal=*/true); + return; + } + auto rehashed_digest = native_cas.StoreBlob( + *cas_path, /*is_executable=*/false); + if (not rehashed_digest or + rehashed_digest->hash() != + key.content_hash.Hash()) { + (*logger)(fmt::format("Failed to rehash content {} " + "into native local CAS", + remote_digest->hash()), + /*fatal=*/true); + return; + } + // cache association between digests + auto error_msg = MRApiUtils::StoreRehashedDigest( + native_digest, + *rehashed_digest, + ObjectType::File, + *native_storage_config, + *compat_storage_config); + if (error_msg) { + (*logger)(fmt::format("Failed to cache digests " + "mapping with:\n{}", + *error_msg), + /*fatal=*/true); + return; + } + // content is in native local CAS now (*setter)(nullptr); return; } } - // check remote execution endpoint, if given - if (remote_api != nullptr and + // check if content is on remote, if given and native + if (compat_storage_config == nullptr and + remote_api != nullptr and remote_api->RetrieveToCas( - {Artifact::ObjectInfo{.digest = digest, + {Artifact::ObjectInfo{.digest = native_digest, .type = ObjectType::File}}, *local_api)) { progress->TaskTracker().Stop(key.origin); @@ -278,13 +342,13 @@ auto CreateContentCASMap( FetchFromNetwork(key, additional_mirrors, ca_info, - *storage, + *native_storage, progress, setter, logger); }, - [logger, target_path = storage_config->GitRoot()](auto const& msg, - bool fatal) { + [logger, target_path = native_storage_config->GitRoot()]( + auto const& msg, bool fatal) { (*logger)(fmt::format("While running critical Git op " "ENSURE_INIT for target {}:\n{}", target_path.string(), diff --git a/src/other_tools/ops_maps/content_cas_map.hpp b/src/other_tools/ops_maps/content_cas_map.hpp index ccfb6dadd..e161be19d 100644 --- a/src/other_tools/ops_maps/content_cas_map.hpp +++ b/src/other_tools/ops_maps/content_cas_map.hpp @@ -22,6 +22,7 @@ #include "gsl/gsl" #include "src/buildtool/common/user_structs.hpp" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/file_system/symlinks_map/pragma_special.hpp" #include "src/buildtool/multithreading/async_map_consumer.hpp" @@ -34,25 +35,25 @@ #include "src/utils/cpp/hash_combine.hpp" struct ArchiveContent { - std::string content{}; /* key */ + HashInfo content_hash; /* key */ std::optional distfile{std::nullopt}; - std::string fetch_url{}; - std::vector mirrors{}; + std::string fetch_url; + std::vector mirrors; std::optional sha256{std::nullopt}; std::optional sha512{std::nullopt}; // name of repository for which work is done; used in progress reporting - std::string origin{}; + std::string origin; [[nodiscard]] auto operator==(const ArchiveContent& other) const -> bool { - return content == other.content; + return content_hash.Hash() == other.content_hash.Hash(); } }; // Used in callers of ContentCASMap which need extra fields struct ArchiveRepoInfo { - ArchiveContent archive{}; /* key */ - std::string repo_type{}; /* key */ - std::string subdir{}; /* key */ + ArchiveContent archive; /* key */ + std::string repo_type; /* key */ + std::string subdir; /* key */ // create root based on "special" pragma value std::optional pragma_special{std::nullopt}; /* key */ // create an absent root @@ -67,10 +68,10 @@ struct ArchiveRepoInfo { }; struct ForeignFileInfo { - ArchiveContent archive{}; /* key */ - std::string name{}; /* key */ - bool executable{}; /* key */ - bool absent{}; /* key */ + ArchiveContent archive; /* key */ + std::string name; /* key */ + bool executable{}; /* key */ + bool absent{}; /* key */ [[nodiscard]] auto operator==(const ForeignFileInfo& other) const -> bool { return archive == other.archive and name == other.name and @@ -88,8 +89,10 @@ using ContentCASMap = AsyncMapConsumer; CAInfoPtr const& ca_info, gsl::not_null const& critical_git_op_map, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + gsl::not_null const& native_storage, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, gsl::not_null const& progress, @@ -100,7 +103,7 @@ template <> struct hash { [[nodiscard]] auto operator()(const ArchiveContent& ct) const noexcept -> std::size_t { - return std::hash{}(ct.content); + return std::hash{}(ct.content_hash.Hash()); } }; diff --git a/src/other_tools/ops_maps/critical_git_op_map.cpp b/src/other_tools/ops_maps/critical_git_op_map.cpp index cbab7c42a..b7981db8b 100644 --- a/src/other_tools/ops_maps/critical_git_op_map.cpp +++ b/src/other_tools/ops_maps/critical_git_op_map.cpp @@ -15,7 +15,7 @@ #include "src/other_tools/ops_maps/critical_git_op_map.hpp" // define the mapping to actual operations being called -GitOpKeyMap const GitOpKey::map_ = { +GitOpKeyMap const GitOpKey::kMap = { {GitOpType::INITIAL_COMMIT, CriticalGitOps::GitInitialCommit}, {GitOpType::ENSURE_INIT, CriticalGitOps::GitEnsureInit}, {GitOpType::KEEP_TAG, CriticalGitOps::GitKeepTag}, diff --git a/src/other_tools/ops_maps/critical_git_op_map.hpp b/src/other_tools/ops_maps/critical_git_op_map.hpp index 8c75874c4..5832462ab 100644 --- a/src/other_tools/ops_maps/critical_git_op_map.hpp +++ b/src/other_tools/ops_maps/critical_git_op_map.hpp @@ -40,7 +40,7 @@ struct GitOpKey { [[nodiscard]] auto operation(GitOpParams const& params, AsyncMapConsumerLoggerPtr const& logger) const -> GitOpValue { - return map_.at(op_type)(params, logger); + return kMap.at(op_type)(params, logger); } [[nodiscard]] auto operator==(GitOpKey const& other) const -> bool { @@ -48,7 +48,7 @@ struct GitOpKey { } private: - static GitOpKeyMap const map_; + static GitOpKeyMap const kMap; }; class CriticalGitOpGuard; @@ -84,7 +84,7 @@ class CriticalGitOpGuard { } private: - std::unordered_map curr_critical_key_{}; + std::unordered_map curr_critical_key_; std::mutex critical_key_mutex_; }; diff --git a/src/other_tools/ops_maps/git_tree_fetch_map.cpp b/src/other_tools/ops_maps/git_tree_fetch_map.cpp index 95582556e..0980e8a76 100644 --- a/src/other_tools/ops_maps/git_tree_fetch_map.cpp +++ b/src/other_tools/ops_maps/git_tree_fetch_map.cpp @@ -19,10 +19,12 @@ #include // std::move #include "fmt/core.h" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/repository_config.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/execution_api/common/execution_common.hpp" -#include "src/buildtool/execution_api/git/git_api.hpp" +#include "src/buildtool/execution_api/serve/mr_git_api.hpp" +#include "src/buildtool/execution_api/serve/utils.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/multithreading/task_system.hpp" #include "src/buildtool/system/system_command.hpp" @@ -30,52 +32,63 @@ namespace { -void BackupToRemote(std::string const& tree_id, - StorageConfig const& storage_config, +void BackupToRemote(ArtifactDigest const& digest, + StorageConfig const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + gsl::not_null const& local_api, IExecutionApi const& remote_api, GitTreeFetchMap::LoggerPtr const& logger) { // try to back up to remote CAS auto repo = RepositoryConfig{}; - if (repo.SetGitCAS(storage_config.GitRoot())) { - auto git_api = GitApi{&repo}; + if (repo.SetGitCAS(native_storage_config.GitRoot())) { + auto git_api = + MRGitApi{&repo, + &native_storage_config, + compat_storage_config, + compat_storage, + compat_storage_config != nullptr ? &*local_api : nullptr}; if (not git_api.RetrieveToCas( - {Artifact::ObjectInfo{ - .digest = ArtifactDigest{tree_id, 0, /*is_tree=*/true}, - .type = ObjectType::Tree}}, + {Artifact::ObjectInfo{.digest = digest, + .type = ObjectType::Tree}}, remote_api)) { // give a warning (*logger)(fmt::format( "Failed to back up tree {} from local CAS to remote", - tree_id), + digest.hash()), /*fatal=*/false); } } else { // give a warning (*logger)(fmt::format("Failed to SetGitCAS at {}", - storage_config.GitRoot().string()), + native_storage_config.GitRoot().string()), /*fatal=*/false); } } /// \brief Moves the root tree from local CAS to the Git cache and sets the /// root. -void MoveCASTreeToGit(std::string const& tree_id, - ArtifactDigest const& digest, - gsl::not_null const& import_to_git_map, - gsl::not_null const& storage_config, - gsl::not_null const& local_api, - IExecutionApi const* remote_api, - bool backup_to_remote, - gsl::not_null const& ts, - GitTreeFetchMap::SetterPtr const& setter, - GitTreeFetchMap::LoggerPtr const& logger) { +void MoveCASTreeToGit( + HashInfo const& tree_hash, + ArtifactDigest const& digest, // native or compatible + gsl::not_null const& import_to_git_map, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + gsl::not_null const& local_api, + IExecutionApi const* remote_api, + bool backup_to_remote, + gsl::not_null const& ts, + GitTreeFetchMap::SetterPtr const& setter, + GitTreeFetchMap::LoggerPtr const& logger) { // Move tree from CAS to local Git storage - auto tmp_dir = storage_config->CreateTypedTmpDir("fetch-remote-git-tree"); + auto tmp_dir = + native_storage_config->CreateTypedTmpDir("fetch-remote-git-tree"); if (not tmp_dir) { (*logger)(fmt::format("Failed to create tmp directory for copying " "git-tree {} from remote CAS", - digest.hash()), + tree_hash.Hash()), true); return; } @@ -83,18 +96,21 @@ void MoveCASTreeToGit(std::string const& tree_id, {Artifact::ObjectInfo{.digest = digest, .type = ObjectType::Tree}}, {tmp_dir->GetPath()})) { (*logger)(fmt::format("Failed to copy git-tree {} to {}", - tree_id, + tree_hash.Hash(), tmp_dir->GetPath().string()), true); return; } - CommitInfo c_info{tmp_dir->GetPath(), "tree", tree_id}; + CommitInfo c_info{tmp_dir->GetPath(), "tree", tree_hash.Hash()}; import_to_git_map->ConsumeAfterKeysReady( ts, {std::move(c_info)}, [tmp_dir, // keep tmp_dir alive - tree_id, - storage_config, + tree_hash, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, backup_to_remote, setter, @@ -106,41 +122,60 @@ void MoveCASTreeToGit(std::string const& tree_id, } // backup to remote if needed and in compatibility mode if (backup_to_remote and remote_api != nullptr) { - BackupToRemote(tree_id, *storage_config, *remote_api, logger); + // back up only native digests, as that is what Git stores + auto const native_digest = ArtifactDigest{tree_hash, 0}; + BackupToRemote(native_digest, + *native_storage_config, + compat_storage_config, + compat_storage, + local_api, + *remote_api, + logger); } (*setter)(false /*no cache hit*/); }, - [logger, tmp_dir, tree_id](auto const& msg, bool fatal) { + [logger, tmp_dir, tree_hash](auto const& msg, bool fatal) { (*logger)(fmt::format( "While moving git-tree {} from {} to local git:\n{}", - tree_id, + tree_hash.Hash(), tmp_dir->GetPath().string(), msg), fatal); }); } -void TagAndSetRoot(std::string tree_id, - gsl::not_null const& storage_config, - gsl::not_null const& critical_git_op_map, - IExecutionApi const* remote_api, - bool backup_to_remote, - gsl::not_null const& ts, - GitTreeFetchMap::SetterPtr const& setter, - GitTreeFetchMap::LoggerPtr const& logger) { - auto repo = storage_config->GitRoot(); +void TagAndSetRoot( + ArtifactDigest const& digest, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + gsl::not_null const& critical_git_op_map, + gsl::not_null const& local_api, + IExecutionApi const* remote_api, + bool backup_to_remote, + gsl::not_null const& ts, + GitTreeFetchMap::SetterPtr const& setter, + GitTreeFetchMap::LoggerPtr const& logger) { + auto repo = native_storage_config->GitRoot(); GitOpKey op_key = {.params = { repo, // target_path - tree_id, // git_hash + digest.hash(), // git_hash "Keep referenced tree alive" // message }, .op_type = GitOpType::KEEP_TREE}; critical_git_op_map->ConsumeAfterKeysReady( ts, {std::move(op_key)}, - [tree_id, backup_to_remote, storage_config, remote_api, logger, setter]( - auto const& values) { + [digest, + backup_to_remote, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, + remote_api, + logger, + setter](auto const& values) { GitOpValue op_result = *values[0]; if (not op_result.result) { (*logger)("Tree tagging failed", @@ -149,52 +184,64 @@ void TagAndSetRoot(std::string tree_id, } // backup to remote if needed and in compatibility mode if (backup_to_remote and remote_api != nullptr) { - BackupToRemote(tree_id, *storage_config, *remote_api, logger); + BackupToRemote(digest, + *native_storage_config, + compat_storage_config, + compat_storage, + local_api, + *remote_api, + logger); } (*setter)(false /*no cache hit*/); }, - [logger, repo, tree_id](auto const& msg, bool fatal) { - (*logger)( - fmt::format("While tagging tree {} in {} to keep it alive:\n{}", - tree_id, - repo.string(), - msg), - fatal); + [logger, repo, digest](auto const& msg, bool fatal) { + (*logger)(fmt::format("While tagging tree {} in {} to keep it " + "alive:\n{}", + digest.hash(), + repo.string(), + msg), + fatal); }); } void TakeTreeFromOlderGeneration( std::size_t generation, - std::string tree_id, - gsl::not_null const& storage_config, + ArtifactDigest const& digest, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, GitCASPtr const& git_cas, gsl::not_null const& critical_git_op_map, + gsl::not_null const& local_api, IExecutionApi const* remote_api, bool backup_to_remote, gsl::not_null const& ts, GitTreeFetchMap::SetterPtr const& setter, GitTreeFetchMap::LoggerPtr const& logger) { - auto source = storage_config->GitGenerationRoot(generation); + auto source = native_storage_config->GitGenerationRoot(generation); GitOpKey op_key = {.params = { source, // target_path - tree_id, // git_hash + digest.hash(), // git_hash "Tag commit for fetching" // message }, .op_type = GitOpType::KEEP_TREE}; critical_git_op_map->ConsumeAfterKeysReady( ts, {std::move(op_key)}, - [tree_id, + [digest, git_cas, critical_git_op_map, + local_api, remote_api, backup_to_remote, ts, setter, logger, source, - storage_config](auto const& values) { + native_storage_config, + compat_storage_config, + compat_storage](auto const& values) { GitOpValue op_result = *values[0]; if (not op_result.result) { (*logger)("Tree tagging failed", /*fatal=*/true); @@ -203,7 +250,8 @@ void TakeTreeFromOlderGeneration( auto tag = *op_result.result; auto git_repo = GitRepoRemote::Open(git_cas); if (not git_repo) { - (*logger)("Could not open main git repository", /*fatal=*/true); + (*logger)("Could not open main git repository", + /*fatal=*/true); return; } auto fetch_logger = std::make_shared( @@ -215,23 +263,26 @@ void TakeTreeFromOlderGeneration( fatal); }); if (not git_repo->LocalFetchViaTmpRepo( - *storage_config, source, tag, fetch_logger)) { + *native_storage_config, source, tag, fetch_logger)) { return; } - TagAndSetRoot(tree_id, - storage_config, + TagAndSetRoot(digest, + native_storage_config, + compat_storage_config, + compat_storage, critical_git_op_map, + local_api, remote_api, backup_to_remote, ts, setter, logger); }, - [logger, source, tree_id](auto const& msg, bool fatal) { + [logger, source, digest](auto const& msg, bool fatal) { (*logger)( fmt::format("While tagging tree {} in {} for fetching:\n{}", source.string(), - tree_id, + digest.hash(), msg), fatal); }); @@ -245,7 +296,9 @@ auto CreateGitTreeFetchMap( std::string const& git_bin, std::vector const& launcher, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, bool backup_to_remote, @@ -256,7 +309,9 @@ auto CreateGitTreeFetchMap( git_bin, launcher, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, local_api, remote_api, backup_to_remote, @@ -267,15 +322,16 @@ auto CreateGitTreeFetchMap( auto const& key) { // check whether tree exists already in Git cache; // ensure Git cache exists - GitOpKey op_key = {.params = - { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare - }, - .op_type = GitOpType::ENSURE_INIT}; + GitOpKey op_key = { + .params = + { + native_storage_config->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare + }, + .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( ts, {std::move(op_key)}, @@ -284,7 +340,9 @@ auto CreateGitTreeFetchMap( git_bin, launcher, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, local_api, remote_api, backup_to_remote, @@ -304,9 +362,10 @@ auto CreateGitTreeFetchMap( auto git_repo = GitRepoRemote::Open( op_result.git_cas); // link fake repo to odb if (not git_repo) { - (*logger)(fmt::format("Could not open repository {}", - storage_config->GitRoot().string()), - /*fatal=*/true); + (*logger)( + fmt::format("Could not open repository {}", + native_storage_config->GitRoot().string()), + /*fatal=*/true); return; } // setup wrapped logger @@ -318,17 +377,22 @@ auto CreateGitTreeFetchMap( fatal); }); // check if the desired tree ID is in Git cache - auto tree_found = - git_repo->CheckTreeExists(key.hash, wrapped_logger); + auto tree_found = git_repo->CheckTreeExists( + key.tree_hash.Hash(), wrapped_logger); if (not tree_found) { // errors encountered return; } if (*tree_found) { - // backup to remote if needed and in native mode + // backup to remote if needed if (backup_to_remote and remote_api != nullptr) { - BackupToRemote( - key.hash, *storage_config, *remote_api, logger); + BackupToRemote(ArtifactDigest{key.tree_hash, 0}, + *native_storage_config, + compat_storage_config, + compat_storage, + local_api, + *remote_api, + logger); } // success (*setter)(true /*cache hit*/); @@ -337,42 +401,49 @@ auto CreateGitTreeFetchMap( // Check older generations for presence of the tree for (std::size_t generation = 1; - generation < storage_config->num_generations; + generation < native_storage_config->num_generations; generation++) { - auto old = storage_config->GitGenerationRoot(generation); + auto old = + native_storage_config->GitGenerationRoot(generation); if (FileSystemManager::IsDirectory(old)) { auto old_repo = GitRepo::Open(old); auto no_logging = std::make_shared( [](auto /*unused*/, auto /*unused*/) {}); if (old_repo) { - auto check_result = - old_repo->CheckTreeExists(key.hash, no_logging); + auto check_result = old_repo->CheckTreeExists( + key.tree_hash.Hash(), no_logging); if (check_result and *check_result) { - TakeTreeFromOlderGeneration(generation, - key.hash, - storage_config, - op_result.git_cas, - critical_git_op_map, - remote_api, - backup_to_remote, - ts, - setter, - logger); + TakeTreeFromOlderGeneration( + generation, + ArtifactDigest{key.tree_hash, 0}, + native_storage_config, + compat_storage_config, + compat_storage, + op_result.git_cas, + critical_git_op_map, + local_api, + remote_api, + backup_to_remote, + ts, + setter, + logger); return; } } } } - // check if tree is known to local CAS - auto digest = ArtifactDigest{key.hash, 0, /*is_tree=*/true}; - if (local_api->IsAvailable(digest)) { + // check if tree is known to native local CAS + auto const native_digest = ArtifactDigest{key.tree_hash, 0}; + if (local_api->IsAvailable(native_digest)) { // import tree to Git cache - MoveCASTreeToGit(key.hash, - digest, + MoveCASTreeToGit(key.tree_hash, + native_digest, import_to_git_map, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, local_api, remote_api, backup_to_remote, @@ -384,37 +455,63 @@ auto CreateGitTreeFetchMap( } progress->TaskTracker().Start(key.origin); // check if tree is known to remote serve service and can be - // made available in remote CAS + // provided via the remote CAS if (serve != nullptr and remote_api != nullptr) { - // as we anyway interrogate the remote execution endpoint, - // we're only interested here in the serve endpoint making - // an attempt to upload the tree, if known, to remote CAS - std::ignore = serve->TreeInRemoteCAS(key.hash); + auto const remote_digest = + serve->TreeInRemoteCAS(key.tree_hash.Hash()); + // try to get content from remote CAS into local CAS; + // whether it is retrieved locally in native or + // compatible CAS, it will be imported to Git either way + if (remote_digest and + remote_api->RetrieveToCas( + {Artifact::ObjectInfo{.digest = *remote_digest, + .type = ObjectType::Tree}}, + *local_api)) { + progress->TaskTracker().Stop(key.origin); + MoveCASTreeToGit(key.tree_hash, + *remote_digest, + import_to_git_map, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, + remote_api, + false, // tree already on remote, + // so ignore backing up + ts, + setter, + logger); + // done! + return; + } } - // check if tree is in remote CAS, if a remote is given - if (remote_api != nullptr and + // check if tree is on remote, if given and native + if (compat_storage_config == nullptr and + remote_api != nullptr and remote_api->RetrieveToCas( - {Artifact::ObjectInfo{.digest = digest, + {Artifact::ObjectInfo{.digest = native_digest, .type = ObjectType::Tree}}, *local_api)) { progress->TaskTracker().Stop(key.origin); - MoveCASTreeToGit( - key.hash, - digest, - import_to_git_map, - storage_config, - local_api, - remote_api, - false, // tree already in remote, so ignore backing up - ts, - setter, - logger); + MoveCASTreeToGit(key.tree_hash, + native_digest, + import_to_git_map, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, + remote_api, + false, // tree already on remote, + // so ignore backing up + ts, + setter, + logger); // done! return; } // create temporary location for command execution root auto content_dir = - storage_config->CreateTypedTmpDir("git-tree"); + native_storage_config->CreateTypedTmpDir("git-tree"); if (not content_dir) { (*logger)( "Failed to create execution root tmp directory for " @@ -423,7 +520,8 @@ auto CreateGitTreeFetchMap( return; } // create temporary location for storing command result files - auto out_dir = storage_config->CreateTypedTmpDir("git-tree"); + auto out_dir = + native_storage_config->CreateTypedTmpDir("git-tree"); if (not out_dir) { (*logger)( "Failed to create results tmp directory for tree id " @@ -432,7 +530,7 @@ auto CreateGitTreeFetchMap( return; } // execute command in temporary location - SystemCommand system{key.hash}; + SystemCommand system{key.tree_hash.Hash()}; auto cmdline = launcher; std::copy(key.command.begin(), key.command.end(), @@ -454,7 +552,7 @@ auto CreateGitTreeFetchMap( } // create temporary location for the import repository auto repo_dir = - storage_config->CreateTypedTmpDir("import-repo"); + native_storage_config->CreateTypedTmpDir("import-repo"); if (not repo_dir) { (*logger)( "Failed to create tmp directory for import repository", @@ -468,8 +566,8 @@ auto CreateGitTreeFetchMap( repo_dir->GetPath(), // target_path "", // git_hash fmt::format("Content of tree {}", - key.hash), // message - content_dir->GetPath() // source_path + key.tree_hash.Hash()), // message + content_dir->GetPath() // source_path }, .op_type = GitOpType::INITIAL_COMMIT}; critical_git_op_map->ConsumeAfterKeysReady( @@ -484,7 +582,10 @@ auto CreateGitTreeFetchMap( key, git_bin, launcher, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, backup_to_remote, progress, @@ -518,8 +619,8 @@ auto CreateGitTreeFetchMap( fatal); }); // check that the desired tree ID is part of the repo - auto tree_check = - git_repo->CheckTreeExists(key.hash, wrapped_logger); + auto tree_check = git_repo->CheckTreeExists( + key.tree_hash.Hash(), wrapped_logger); if (not tree_check) { // errors encountered return; @@ -548,7 +649,7 @@ auto CreateGitTreeFetchMap( fmt::format("Executing {} did not create " "specified tree {}{}", nlohmann::json(cmdline).dump(), - key.hash, + key.tree_hash.Hash(), output), /*fatal=*/true); return; @@ -558,14 +659,15 @@ auto CreateGitTreeFetchMap( auto just_git_repo = GitRepoRemote::Open(just_git_cas); if (not just_git_repo) { (*logger)( - fmt::format("Could not open Git repository {}", - storage_config->GitRoot().string()), + fmt::format( + "Could not open Git repository {}", + native_storage_config->GitRoot().string()), /*fatal=*/true); return; } // define temp repo path - auto tmp_dir = - storage_config->CreateTypedTmpDir("git-tree"); + auto tmp_dir = native_storage_config->CreateTypedTmpDir( + "git-tree"); ; if (not tmp_dir) { (*logger)(fmt::format("Could not create unique " @@ -586,7 +688,7 @@ auto CreateGitTreeFetchMap( fatal); }); if (not just_git_repo->FetchViaTmpRepo( - *storage_config, + *native_storage_config, target_path.string(), std::nullopt, key.inherit_env, @@ -612,7 +714,8 @@ auto CreateGitTreeFetchMap( GitOpKey op_key = { .params = { - storage_config->GitRoot(), // target_path + native_storage_config + ->GitRoot(), // target_path *op_result.result, // git_hash "Keep referenced tree alive" // message }, @@ -621,7 +724,10 @@ auto CreateGitTreeFetchMap( ts, {std::move(op_key)}, [remote_api, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, backup_to_remote, key, progress, @@ -638,17 +744,21 @@ auto CreateGitTreeFetchMap( // backup to remote if needed and in native mode if (backup_to_remote and remote_api != nullptr) { - BackupToRemote(key.hash, - *storage_config, - *remote_api, - logger); + BackupToRemote( + ArtifactDigest{key.tree_hash, 0}, + *native_storage_config, + compat_storage_config, + compat_storage, + local_api, + *remote_api, + logger); } // success (*setter)(false /*no cache hit*/); }, [logger, commit = *op_result.result, - target_path = storage_config->GitRoot()]( + target_path = native_storage_config->GitRoot()]( auto const& msg, bool fatal) { (*logger)( fmt::format("While running critical Git op " @@ -670,8 +780,8 @@ auto CreateGitTreeFetchMap( fatal); }); }, - [logger, target_path = storage_config->GitRoot()](auto const& msg, - bool fatal) { + [logger, target_path = native_storage_config->GitRoot()]( + auto const& msg, bool fatal) { (*logger)(fmt::format("While running critical Git op " "ENSURE_INIT bare for target {}:\n{}", target_path.string(), diff --git a/src/other_tools/ops_maps/git_tree_fetch_map.hpp b/src/other_tools/ops_maps/git_tree_fetch_map.hpp index b7400ae21..7ff6b301f 100644 --- a/src/other_tools/ops_maps/git_tree_fetch_map.hpp +++ b/src/other_tools/ops_maps/git_tree_fetch_map.hpp @@ -23,24 +23,26 @@ #include #include "gsl/gsl" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/serve_api/remote/serve_api.hpp" #include "src/buildtool/storage/config.hpp" +#include "src/buildtool/storage/storage.hpp" #include "src/other_tools/just_mr/progress_reporting/progress.hpp" #include "src/other_tools/ops_maps/critical_git_op_map.hpp" #include "src/other_tools/ops_maps/import_to_git_map.hpp" // Stores all the information needed to make a Git tree available struct GitTreeInfo { - std::string hash{}; /* key */ - std::map env_vars{}; - std::vector inherit_env{}; - std::vector command{}; + HashInfo tree_hash; /* key */ + std::map env_vars; + std::vector inherit_env; + std::vector command; // name of repository for which work is done; used in progress reporting - std::string origin{}; + std::string origin; [[nodiscard]] auto operator==(const GitTreeInfo& other) const -> bool { - return hash == other.hash; + return tree_hash.Hash() == other.tree_hash.Hash(); } }; @@ -49,7 +51,7 @@ template <> struct hash { [[nodiscard]] auto operator()(const GitTreeInfo& gti) const noexcept -> std::size_t { - return std::hash{}(gti.hash); + return std::hash{}(gti.tree_hash.Hash()); } }; } // namespace std @@ -64,7 +66,9 @@ using GitTreeFetchMap = AsyncMapConsumer; std::string const& git_bin, std::vector const& launcher, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, bool backup_to_remote, @@ -74,6 +78,6 @@ using GitTreeFetchMap = AsyncMapConsumer; // use explicit cast to std::function to allow template deduction when used static const std::function kGitTreeInfoPrinter = - [](GitTreeInfo const& x) -> std::string { return x.hash; }; + [](GitTreeInfo const& x) -> std::string { return x.tree_hash.Hash(); }; #endif // INCLUDED_SRC_OTHER_TOOLS_OPS_MAPS_GIT_TREE_FETCH_MAP_HPP diff --git a/src/other_tools/ops_maps/git_update_map.hpp b/src/other_tools/ops_maps/git_update_map.hpp index d30ebe078..913774072 100644 --- a/src/other_tools/ops_maps/git_update_map.hpp +++ b/src/other_tools/ops_maps/git_update_map.hpp @@ -30,9 +30,9 @@ #include "src/utils/cpp/hash_combine.hpp" struct RepoDescriptionForUpdating { - std::string repo{}; - std::string branch{}; - std::vector inherit_env{}; /*non-key!*/ + std::string repo; + std::string branch; + std::vector inherit_env; /*non-key!*/ [[nodiscard]] auto operator==(const RepoDescriptionForUpdating& other) const -> bool { @@ -60,7 +60,8 @@ struct hash { GitCASPtr const& git_cas, std::string const& git_bin, std::vector const& launcher, - gsl::not_null const& storage_config, + gsl::not_null const& + storage_config, // native storage config gsl::not_null const& stats, gsl::not_null const& progress, std::size_t jobs) -> GitUpdateMap; diff --git a/src/other_tools/ops_maps/import_to_git_map.hpp b/src/other_tools/ops_maps/import_to_git_map.hpp index cd6fdbef6..d0a59f3ce 100644 --- a/src/other_tools/ops_maps/import_to_git_map.hpp +++ b/src/other_tools/ops_maps/import_to_git_map.hpp @@ -28,9 +28,9 @@ #include "src/utils/cpp/path_hash.hpp" struct CommitInfo { - std::filesystem::path target_path{}; /*key*/ - std::string repo_type{}; - std::string content{}; // hash or path + std::filesystem::path target_path; /*key*/ + std::string repo_type; + std::string content; // hash or path CommitInfo(std::filesystem::path const& target_path_, std::string repo_type_, diff --git a/src/other_tools/repo_map/TARGETS b/src/other_tools/repo_map/TARGETS index 428b89816..d7d2e2afb 100644 --- a/src/other_tools/repo_map/TARGETS +++ b/src/other_tools/repo_map/TARGETS @@ -5,18 +5,20 @@ , "srcs": ["repos_to_setup_map.cpp"] , "deps": [ ["@", "gsl", "", "gsl"] + , ["src/buildtool/build_engine/expression", "expression"] , ["src/other_tools/just_mr/progress_reporting", "statistics"] , ["src/other_tools/root_maps", "commit_git_map"] , ["src/other_tools/root_maps", "content_git_map"] - , ["src/other_tools/root_maps", "foreign_file_git_map"] , ["src/other_tools/root_maps", "distdir_git_map"] + , ["src/other_tools/root_maps", "foreign_file_git_map"] , ["src/other_tools/root_maps", "fpath_git_map"] - , ["src/buildtool/build_engine/expression", "expression"] , ["src/other_tools/root_maps", "tree_id_git_map"] ] , "stage": ["src", "other_tools", "repo_map"] , "private-deps": [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/crypto", "hash_info"] , ["src/buildtool/file_system", "file_root"] , ["src/buildtool/file_system/symlinks_map", "pragma_special"] , ["src/buildtool/logging", "log_level"] @@ -25,7 +27,7 @@ , ["src/other_tools/ops_maps", "content_cas_map"] , ["src/other_tools/ops_maps", "git_tree_fetch_map"] , ["src/other_tools/utils", "parse_archive"] - , ["src/buildtool/crypto", "hash_function"] + , ["src/other_tools/utils", "parse_git_tree"] ] } } diff --git a/src/other_tools/repo_map/repos_to_setup_map.cpp b/src/other_tools/repo_map/repos_to_setup_map.cpp index 48074fd4d..351c6215d 100644 --- a/src/other_tools/repo_map/repos_to_setup_map.cpp +++ b/src/other_tools/repo_map/repos_to_setup_map.cpp @@ -19,6 +19,7 @@ #include "fmt/core.h" #include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/buildtool/file_system/symlinks_map/pragma_special.hpp" #include "src/buildtool/logging/log_level.hpp" @@ -27,6 +28,7 @@ #include "src/other_tools/ops_maps/content_cas_map.hpp" #include "src/other_tools/ops_maps/git_tree_fetch_map.hpp" #include "src/other_tools/utils/parse_archive.hpp" +#include "src/other_tools/utils/parse_git_tree.hpp" namespace { @@ -495,106 +497,30 @@ void DistdirCheckout(ExpressionPtr const& repo_desc, } // only do work if repo is archive type if (kCheckoutTypeMap.at(repo_type_str) == CheckoutType::Archive) { - // check mandatory fields - auto repo_desc_content = (*resolved_repo_desc)->At("content"); - if (not repo_desc_content) { - (*logger)(fmt::format( - "DistdirCheckout: Mandatory field \"content\" is " - "missing for repository {}", - nlohmann::json(dist_repo_name).dump()), - /*fatal=*/true); - return; - } - if (not repo_desc_content->get()->IsString()) { - (*logger)(fmt::format("DistdirCheckout: Unsupported value {} " - "for mandatory field \"content\" for " - "repository {}", - repo_desc_content->get()->ToString(), - nlohmann::json(dist_repo_name).dump()), - /*fatal=*/true); - return; - } - auto repo_desc_fetch = (*resolved_repo_desc)->At("fetch"); - if (not repo_desc_fetch) { - (*logger)(fmt::format("DistdirCheckout: Mandatory field " - "\"fetch\" is missing for repository {}", - nlohmann::json(dist_repo_name).dump()), - /*fatal=*/true); - return; - } - if (not repo_desc_fetch->get()->IsString()) { - (*logger)(fmt::format( - "DistdirCheckout: Unsupported value {} " - "for mandatory field \"fetch\" for repository {}", - repo_desc_fetch->get()->ToString(), - nlohmann::json(dist_repo_name).dump()), - /*fatal=*/true); - return; - } - auto repo_desc_distfile = - (*resolved_repo_desc)->Get("distfile", Expression::none_t{}); - auto repo_desc_sha256 = - (*resolved_repo_desc)->Get("sha256", Expression::none_t{}); - auto repo_desc_sha512 = - (*resolved_repo_desc)->Get("sha512", Expression::none_t{}); - - // check optional mirrors - auto repo_desc_mirrors = - (*resolved_repo_desc)->Get("mirrors", Expression::list_t{}); - std::vector mirrors{}; - if (repo_desc_mirrors->IsList()) { - mirrors.reserve(repo_desc_mirrors->List().size()); - for (auto const& elem : repo_desc_mirrors->List()) { - if (not elem->IsString()) { - (*logger)(fmt::format( - "DistdirCheckout: Unsupported list entry " - "{} in optional field \"mirrors\" for " - "repository {}", - elem->ToString(), - nlohmann::json(dist_repo_name).dump()), - /*fatal=*/true); - return; - } - mirrors.emplace_back(elem->String()); - } - } - else { - (*logger)(fmt::format("DistdirCheckout: Optional field " - "\"mirrors\" for repository {} should be " - "a list of strings, but found: {}", + auto const archive = + ParseArchiveContent(*resolved_repo_desc, dist_repo_name); + if (not archive) { + (*logger)(fmt::format("DistdirCheckout: an error occurred " + "while parsing repository {}\n{}", nlohmann::json(dist_repo_name).dump(), - repo_desc_mirrors->ToString()), + archive.error()), /*fatal=*/true); return; } - ArchiveContent archive = { - .content = repo_desc_content->get()->String(), - .distfile = - repo_desc_distfile->IsString() - ? std::make_optional(repo_desc_distfile->String()) - : std::nullopt, - .fetch_url = repo_desc_fetch->get()->String(), - .mirrors = std::move(mirrors), - .sha256 = repo_desc_sha256->IsString() - ? std::make_optional(repo_desc_sha256->String()) - : std::nullopt, - .sha512 = repo_desc_sha512->IsString() - ? std::make_optional(repo_desc_sha512->String()) - : std::nullopt, - .origin = dist_repo_name}; - // add to distdir content map auto repo_distfile = - (archive.distfile ? archive.distfile.value() - : std::filesystem::path(archive.fetch_url) - .filename() - .string()); + (archive->distfile ? archive->distfile.value() + : std::filesystem::path(archive->fetch_url) + .filename() + .string()); distdir_content_for_id->insert_or_assign( - repo_distfile, std::make_pair(archive.content, false)); - distdir_content->insert_or_assign(repo_distfile, archive.content); + repo_distfile, + std::make_pair(archive->content_hash.Hash(), false)); + distdir_content->insert_or_assign(repo_distfile, + archive->content_hash.Hash()); // add to fetch list - dist_repos_to_fetch->emplace_back(std::move(archive)); + dist_repos_to_fetch->emplace_back(*archive); } } // get hash of distdir content @@ -646,81 +572,13 @@ void GitTreeCheckout(ExpressionPtr const& repo_desc, gsl::not_null const& ts, ReposToSetupMap::SetterPtr const& setter, ReposToSetupMap::LoggerPtr const& logger) { - // enforce mandatory fields - auto repo_desc_hash = repo_desc->At("id"); - if (not repo_desc_hash) { - (*logger)("GitTreeCheckout: Mandatory field \"id\" is missing", - /*fatal=*/true); - return; - } - if (not repo_desc_hash->get()->IsString()) { - (*logger)(fmt::format("GitTreeCheckout: Unsupported value {} for " - "mandatory field \"id\"", - repo_desc_hash->get()->ToString()), - /*fatal=*/true); - return; - } - auto repo_desc_cmd = repo_desc->At("cmd"); - if (not repo_desc_cmd) { - (*logger)("GitTreeCheckout: Mandatory field \"cmd\" is missing", - /*fatal=*/true); - return; - } - if (not repo_desc_cmd->get()->IsList()) { - (*logger)(fmt::format("GitTreeCheckout: Unsupported value {} for " - "mandatory field \"cmd\"", - repo_desc_cmd->get()->ToString()), - /*fatal=*/true); + auto tree_info = ParseGitTree(repo_desc); + if (not tree_info) { + (*logger)( + fmt::format("GitTreeCheckout: {}", std::move(tree_info).error()), + /*fatal=*/true); return; } - std::vector cmd{}; - for (auto const& token : repo_desc_cmd->get()->List()) { - if (token.IsNotNull() and token->IsString()) { - cmd.emplace_back(token->String()); - } - else { - (*logger)(fmt::format("GitTreeCheckout: Unsupported entry {} in " - "mandatory field \"cmd\"", - token->ToString()), - /*fatal=*/true); - return; - } - } - std::map env{}; - auto repo_desc_env = repo_desc->Get("env", Expression::none_t{}); - if (repo_desc_env.IsNotNull() and repo_desc_env->IsMap()) { - for (auto const& envar : repo_desc_env->Map().Items()) { - if (envar.second.IsNotNull() and envar.second->IsString()) { - env.insert({envar.first, envar.second->String()}); - } - else { - (*logger)(fmt::format("GitTreeCheckout: Unsupported value {} " - "for key {} in optional field \"envs\"", - envar.second->ToString(), - nlohmann::json(envar.first).dump()), - /*fatal=*/true); - return; - } - } - } - std::vector inherit_env{}; - auto repo_desc_inherit_env = - repo_desc->Get("inherit env", Expression::none_t{}); - if (repo_desc_inherit_env.IsNotNull() and repo_desc_inherit_env->IsList()) { - for (auto const& envvar : repo_desc_inherit_env->List()) { - if (envvar->IsString()) { - inherit_env.emplace_back(envvar->String()); - } - else { - (*logger)( - fmt::format("GitTreeCheckout: Not a variable name in the " - "specification of \"inherit env\": {}", - envvar->ToString()), - /*fatal=*/true); - return; - } - } - } // check "special" pragma auto repo_desc_pragma = repo_desc->At("pragma"); bool const& pragma_is_map = @@ -741,10 +599,7 @@ void GitTreeCheckout(ExpressionPtr const& repo_desc, pragma_absent->get()->Bool(); // populate struct TreeIdInfo tree_id_info = { - .tree_info = GitTreeInfo{.hash = repo_desc_hash->get()->String(), - .env_vars = std::move(env), - .inherit_env = std::move(inherit_env), - .command = std::move(cmd)}, + .tree_info = *std::move(tree_info), .ignore_special = pragma_special_value == PragmaSpecial::Ignore, .absent = not fetch_absent and pragma_absent_value}; // get the WS root as git tree diff --git a/src/other_tools/root_maps/TARGETS b/src/other_tools/root_maps/TARGETS index 49e1ee604..5340ef32d 100644 --- a/src/other_tools/root_maps/TARGETS +++ b/src/other_tools/root_maps/TARGETS @@ -6,28 +6,29 @@ , "deps": [ ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] - , ["src/other_tools/ops_maps", "import_to_git_map"] , ["src/buildtool/execution_api/common", "common"] - , ["src/other_tools/ops_maps", "content_cas_map"] , ["src/buildtool/serve_api/remote", "serve_api"] , ["src/buildtool/storage", "config"] , ["src/buildtool/storage", "storage"] + , ["src/other_tools/ops_maps", "content_cas_map"] + , ["src/other_tools/ops_maps", "import_to_git_map"] ] , "stage": ["src", "other_tools", "root_maps"] , "private-deps": - [ ["@", "fmt", "", "fmt"] - , "root_utils" + [ "root_utils" + , ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/common", "common"] - , ["src/buildtool/execution_api/local", "config"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/file_system", "file_root"] , ["src/buildtool/file_system", "file_storage"] , ["src/buildtool/file_system", "git_repo"] , ["src/buildtool/file_system", "object_type"] , ["src/buildtool/multithreading", "task_system"] , ["src/buildtool/storage", "fs_utils"] - , ["src/other_tools/ops_maps", "critical_git_op_map"] , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/just_mr/progress_reporting", "statistics"] + , ["src/other_tools/ops_maps", "critical_git_op_map"] , ["src/utils/cpp", "tmp_dir"] ] } @@ -42,25 +43,27 @@ , ["src/buildtool/common", "user_structs"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] , ["src/other_tools/just_mr", "mirrors"] , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/ops_maps", "critical_git_op_map"] , ["src/other_tools/ops_maps", "import_to_git_map"] , ["src/utils/cpp", "hash_combine"] - , ["src/utils/cpp", "path"] - , ["src/buildtool/storage", "config"] ] , "stage": ["src", "other_tools", "root_maps"] , "private-deps": - [ ["@", "fmt", "", "fmt"] - , "root_utils" + [ "root_utils" + , ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "artifact_digest_factory"] + , ["src/buildtool/crypto", "hash_function"] , ["src/buildtool/file_system", "file_root"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/multithreading", "task_system"] , ["src/buildtool/storage", "fs_utils"] , ["src/other_tools/git_operations", "git_repo_remote"] , ["src/other_tools/utils", "curl_url_handle"] - , ["src/utils/cpp", "tmp_dir"] + , ["src/utils/cpp", "path"] ] } , "fpath_git_map": @@ -74,17 +77,18 @@ , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/file_system/symlinks_map", "pragma_special"] , ["src/buildtool/file_system/symlinks_map", "resolve_symlinks_map"] + , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] , ["src/other_tools/just_mr", "utils"] , ["src/other_tools/ops_maps", "import_to_git_map"] , ["src/utils/cpp", "hash_combine"] , ["src/utils/cpp", "path_hash"] - , ["src/buildtool/serve_api/remote", "serve_api"] - , ["src/buildtool/storage", "config"] ] , "stage": ["src", "other_tools", "root_maps"] , "private-deps": - [ ["@", "fmt", "", "fmt"] - , "root_utils" + [ "root_utils" + , ["@", "fmt", "", "fmt"] , ["src/buildtool/execution_api/local", "config"] , ["src/buildtool/file_system", "file_root"] , ["src/buildtool/file_system", "git_repo"] @@ -105,25 +109,24 @@ , ["src/buildtool/common", "user_structs"] , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/file_system/symlinks_map", "resolve_symlinks_map"] + , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] , ["src/other_tools/just_mr", "mirrors"] , ["src/other_tools/just_mr/progress_reporting", "progress"] , ["src/other_tools/ops_maps", "content_cas_map"] , ["src/other_tools/ops_maps", "import_to_git_map"] - , ["src/buildtool/serve_api/remote", "serve_api"] - , ["src/buildtool/storage", "storage"] - , ["src/buildtool/storage", "config"] ] , "stage": ["src", "other_tools", "root_maps"] , "private-deps": - [ ["@", "fmt", "", "fmt"] - , "root_utils" + [ "root_utils" + , ["@", "fmt", "", "fmt"] + , ["src/buildtool/crypto", "hash_info"] , ["src/buildtool/file_system", "file_root"] , ["src/buildtool/file_system", "file_storage"] - , ["src/buildtool/file_system", "git_repo"] , ["src/buildtool/file_system/symlinks_map", "pragma_special"] , ["src/buildtool/multithreading", "task_system"] , ["src/buildtool/storage", "fs_utils"] - , ["src/other_tools/git_operations", "git_repo_remote"] , ["src/other_tools/utils", "content"] , ["src/utils/archive", "archive_ops"] ] @@ -136,19 +139,21 @@ , "deps": [ ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] - , ["src/other_tools/ops_maps", "content_cas_map"] - , ["src/other_tools/ops_maps", "import_to_git_map"] , ["src/buildtool/serve_api/remote", "serve_api"] , ["src/buildtool/storage", "config"] , ["src/buildtool/storage", "storage"] + , ["src/other_tools/ops_maps", "content_cas_map"] + , ["src/other_tools/ops_maps", "import_to_git_map"] ] , "private-deps": - [ ["@", "fmt", "", "fmt"] + [ "root_utils" + , ["@", "fmt", "", "fmt"] + , ["src/buildtool/crypto", "hash_info"] , ["src/buildtool/file_system", "file_root"] + , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/storage", "fs_utils"] , ["src/utils/cpp", "tmp_dir"] - , "root_utils" ] , "stage": ["src", "other_tools", "root_maps"] } @@ -161,19 +166,22 @@ [ ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] , ["src/buildtool/execution_api/common", "common"] + , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] , ["src/other_tools/ops_maps", "critical_git_op_map"] , ["src/other_tools/ops_maps", "git_tree_fetch_map"] , ["src/other_tools/ops_maps", "import_to_git_map"] , ["src/utils/cpp", "hash_combine"] - , ["src/buildtool/serve_api/remote", "serve_api"] - , ["src/buildtool/storage", "config"] ] , "stage": ["src", "other_tools", "root_maps"] , "private-deps": - [ ["@", "fmt", "", "fmt"] - , "root_utils" + [ "root_utils" + , ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "common"] , ["src/buildtool/common", "config"] - , ["src/buildtool/execution_api/git", "git"] + , ["src/buildtool/crypto", "hash_info"] + , ["src/buildtool/execution_api/serve", "mr_git_api"] , ["src/buildtool/file_system", "file_root"] ] } @@ -187,13 +195,18 @@ , ["src/buildtool/execution_api/common", "common"] , ["src/buildtool/multithreading", "async_map_consumer"] , ["src/buildtool/serve_api/remote", "serve_api"] + , ["src/buildtool/storage", "config"] + , ["src/buildtool/storage", "storage"] ] , "stage": ["src", "other_tools", "root_maps"] , "private-deps": [ ["@", "fmt", "", "fmt"] + , ["src/buildtool/common", "artifact_digest_factory"] , ["src/buildtool/common", "common"] , ["src/buildtool/common", "config"] - , ["src/buildtool/execution_api/git", "git"] + , ["src/buildtool/crypto", "hash_function"] + , ["src/buildtool/execution_api/serve", "mr_git_api"] + , ["src/buildtool/execution_api/serve", "utils"] , ["src/buildtool/file_system", "object_type"] ] } diff --git a/src/other_tools/root_maps/commit_git_map.cpp b/src/other_tools/root_maps/commit_git_map.cpp index 2d64d599c..8f96eb824 100644 --- a/src/other_tools/root_maps/commit_git_map.cpp +++ b/src/other_tools/root_maps/commit_git_map.cpp @@ -19,6 +19,8 @@ #include #include "fmt/core.h" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/multithreading/task_system.hpp" @@ -47,23 +49,29 @@ namespace { } [[nodiscard]] auto IsCacheGitRoot( - StorageConfig const& storage_config, + StorageConfig const& native_storage_config, std::filesystem::path const& repo_root) noexcept -> bool { return std::filesystem::absolute(ToNormalPath(repo_root)) == - std::filesystem::absolute(ToNormalPath(storage_config.GitRoot())); + std::filesystem::absolute( + ToNormalPath(native_storage_config.GitRoot())); } /// \brief Helper function for ensuring the serve endpoint, if given, has the /// root if it was marked absent. /// It guarantees the logger is called exactly once with fatal on failure, and /// the setter on success. -void EnsureRootAsAbsent(std::string const& tree_id, - std::filesystem::path const& repo_root, - GitRepoInfo const& repo_info, - ServeApi const* serve, - IExecutionApi const* remote_api, - CommitGitMap::SetterPtr const& ws_setter, - CommitGitMap::LoggerPtr const& logger) { +void EnsureRootAsAbsent( + std::string const& tree_id, + std::filesystem::path const& repo_root, + GitRepoInfo const& repo_info, + ServeApi const* serve, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + gsl::not_null const& local_api, + IExecutionApi const* remote_api, + CommitGitMap::SetterPtr const& ws_setter, + CommitGitMap::LoggerPtr const& logger) { // this is an absent root if (serve != nullptr) { // check if the serve endpoint has this root @@ -74,13 +82,13 @@ void EnsureRootAsAbsent(std::string const& tree_id, if (not *has_tree) { // try to see if serve endpoint has the information to prepare the // root itself - auto serve_result = + auto const serve_result = serve->RetrieveTreeFromCommit(repo_info.hash, repo_info.subdir, /*sync_tree = */ false); if (serve_result) { // if serve has set up the tree, it must match what we expect - auto const& served_tree_id = *serve_result; + auto const& served_tree_id = serve_result->tree; if (tree_id != served_tree_id) { (*logger)(fmt::format("Mismatch in served root tree " "id:\nexpected {}, but got {}", @@ -114,6 +122,10 @@ void EnsureRootAsAbsent(std::string const& tree_id, if (not EnsureAbsentRootOnServe(*serve, tree_id, repo_root, + native_storage_config, + compat_storage_config, + compat_storage, + &*local_api, remote_api, logger, true /*no_sync_is_fatal*/)) { @@ -144,7 +156,7 @@ void EnsureRootAsAbsent(std::string const& tree_id, void WriteIdFileAndSetWSRoot(std::string const& root_tree_id, std::string const& subdir, bool ignore_special, - StorageConfig const& storage_config, + StorageConfig const& native_storage_config, GitCASPtr const& git_cas, std::filesystem::path const& tree_id_file, CommitGitMap::SetterPtr const& ws_setter, @@ -161,7 +173,7 @@ void WriteIdFileAndSetWSRoot(std::string const& root_tree_id, auto git_repo = GitRepoRemote::Open(git_cas); // link fake repo to odb if (not git_repo) { (*logger)(fmt::format("Could not open cache object database {}", - storage_config.GitRoot().string()), + native_storage_config.GitRoot().string()), /*fatal=*/true); return; } @@ -184,7 +196,7 @@ void WriteIdFileAndSetWSRoot(std::string const& root_tree_id, ? FileRoot::kGitTreeIgnoreSpecialMarker : FileRoot::kGitTreeMarker, *tree_id, - storage_config.GitRoot().string()}), + native_storage_config.GitRoot().string()}), false)); } @@ -267,7 +279,7 @@ void TagAndSetRoot(std::filesystem::path const& repo_root, void TakeCommitFromOlderGeneration( std::filesystem::path const& source, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, std::filesystem::path const& repo_root, GitRepoInfo const& repo_info, GitCASPtr const& git_cas, @@ -289,7 +301,7 @@ void TakeCommitFromOlderGeneration( [logger, git_cas, repo_root, - storage_config, + native_storage_config, source, repo_info, critical_git_op_map, @@ -319,7 +331,7 @@ void TakeCommitFromOlderGeneration( fatal); }); if (not git_repo->LocalFetchViaTmpRepo( - *storage_config, source, tag, fetch_logger)) { + *native_storage_config, source, tag, fetch_logger)) { return; } TagAndSetRoot(repo_root, @@ -347,7 +359,7 @@ void NetworkFetchAndSetPresentRoot( std::string const& fetch_repo, MirrorsPtr const& additional_mirrors, GitCASPtr const& git_cas, - StorageConfig const& storage_config, + StorageConfig const& native_storage_config, gsl::not_null const& critical_git_op_map, std::string const& git_bin, std::vector const& launcher, @@ -408,7 +420,7 @@ void NetworkFetchAndSetPresentRoot( err_messages += fmt::format( "While attempting fetch from URL {}:\n{}\n", mirror, msg); }); - if (git_repo->FetchViaTmpRepo(storage_config, + if (git_repo->FetchViaTmpRepo(native_storage_config, mirror, repo_info.branch, repo_info.inherit_env, @@ -454,7 +466,7 @@ void NetworkFetchAndSetPresentRoot( return; } // if witnessing repository is the Git cache, then also tag the commit - if (IsCacheGitRoot(storage_config, repo_root)) { + if (IsCacheGitRoot(native_storage_config, repo_root)) { TagAndSetRoot(repo_root, repo_info, true, @@ -503,24 +515,27 @@ void NetworkFetchAndSetPresentRoot( /// the root. /// It guarantees the logger is called exactly once with fatal on failure, and /// the setter on success. -void EnsureCommit(GitRepoInfo const& repo_info, - std::filesystem::path const& repo_root, - std::string const& fetch_repo, - MirrorsPtr const& additional_mirrors, - GitCASPtr const& git_cas, - gsl::not_null const& critical_git_op_map, - gsl::not_null const& import_to_git_map, - std::string const& git_bin, - std::vector const& launcher, - ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& local_api, - IExecutionApi const* remote_api, - bool fetch_absent, - gsl::not_null const& progress, - gsl::not_null const& ts, - CommitGitMap::SetterPtr const& ws_setter, - CommitGitMap::LoggerPtr const& logger) { +void EnsureCommit( + GitRepoInfo const& repo_info, + std::filesystem::path const& repo_root, + std::string const& fetch_repo, + MirrorsPtr const& additional_mirrors, + GitCASPtr const& git_cas, + gsl::not_null const& critical_git_op_map, + gsl::not_null const& import_to_git_map, + std::string const& git_bin, + std::vector const& launcher, + ServeApi const* serve, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + gsl::not_null const& local_api, + IExecutionApi const* remote_api, + bool fetch_absent, + gsl::not_null const& progress, + gsl::not_null const& ts, + CommitGitMap::SetterPtr const& ws_setter, + CommitGitMap::LoggerPtr const& logger) { // link fake repo to odb auto git_repo = GitRepoRemote::Open(git_cas); if (not git_repo) { @@ -541,8 +556,8 @@ void EnsureCommit(GitRepoInfo const& repo_info, return; } if (not is_commit_present.value()) { - auto tree_id_file = - StorageUtils::GetCommitTreeIDFile(*storage_config, repo_info.hash); + auto tree_id_file = StorageUtils::GetCommitTreeIDFile( + *native_storage_config, repo_info.hash); // Check if we have stored a file association between commit and tree; // if an association file exists, the respective tree MUST be in the // Git cache @@ -555,18 +570,20 @@ void EnsureCommit(GitRepoInfo const& repo_info, /*fatal=*/true); return; } - auto just_git_cas = GitCAS::Open(storage_config->GitRoot()); + auto just_git_cas = GitCAS::Open(native_storage_config->GitRoot()); if (not just_git_cas) { - (*logger)(fmt::format("Could not open Git cache database {}", - storage_config->GitRoot().string()), - /*fatal=*/true); + (*logger)( + fmt::format("Could not open Git cache database {}", + native_storage_config->GitRoot().string()), + /*fatal=*/true); return; } auto just_git_repo = GitRepo::Open(just_git_cas); if (not just_git_repo) { - (*logger)(fmt::format("Could not open Git cache repository {}", - storage_config->GitRoot().string()), - /*fatal=*/true); + (*logger)( + fmt::format("Could not open Git cache repository {}", + native_storage_config->GitRoot().string()), + /*fatal=*/true); return; } // extract the subdir tree @@ -588,13 +605,18 @@ void EnsureCommit(GitRepoInfo const& repo_info, // set the workspace root if (repo_info.absent and not fetch_absent) { // try by all available means to generate & set the absent root - EnsureRootAsAbsent(*tree_id, - storage_config->GitRoot(), - repo_info, - serve, - remote_api, - ws_setter, - logger); + EnsureRootAsAbsent( + *tree_id, + native_storage_config->GitRoot(), /*repo_root*/ + repo_info, + serve, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, + remote_api, + ws_setter, + logger); } else { // this root is present @@ -604,7 +626,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, ? FileRoot::kGitTreeIgnoreSpecialMarker : FileRoot::kGitTreeMarker, *tree_id, - storage_config->GitRoot().string()}), + native_storage_config->GitRoot().string()}), /*is_cache_hit=*/false)); } // done! @@ -613,9 +635,9 @@ void EnsureCommit(GitRepoInfo const& repo_info, // Check older generations for presence of the commit for (std::size_t generation = 1; - generation < storage_config->num_generations; + generation < native_storage_config->num_generations; generation++) { - auto old = storage_config->GitGenerationRoot(generation); + auto old = native_storage_config->GitGenerationRoot(generation); if (FileSystemManager::IsDirectory(old)) { auto old_repo = GitRepo::Open(old); auto no_logging = std::make_shared( @@ -625,7 +647,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, old_repo->CheckCommitExists(repo_info.hash, no_logging); if (check_result and *check_result) { TakeCommitFromOlderGeneration(old, - storage_config, + native_storage_config, repo_root, repo_info, git_cas, @@ -649,7 +671,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, if (serve != nullptr) { // if root purely absent, request only the subdir tree if (repo_info.absent and not fetch_absent) { - auto serve_result = + auto const serve_result = serve->RetrieveTreeFromCommit(repo_info.hash, repo_info.subdir, /*sync_tree = */ false); @@ -661,7 +683,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, {repo_info.ignore_special ? FileRoot::kGitTreeIgnoreSpecialMarker : FileRoot::kGitTreeMarker, - *std::move(serve_result)}), + serve_result->tree}), /*is_cache_hit=*/false)); return; } @@ -678,33 +700,35 @@ void EnsureCommit(GitRepoInfo const& repo_info, // otherwise, request (and sync) the whole commit tree, to ensure // we maintain the id file association else { - auto serve_result = + auto const serve_result = serve->RetrieveTreeFromCommit(repo_info.hash, /*subdir = */ ".", /*sync_tree = */ true); if (serve_result) { - auto const& root_tree_id = *serve_result; + auto const root_tree_id = serve_result->tree; + auto const remote_digest = serve_result->digest; // verify if we know the tree already in the local Git cache - GitOpKey op_key = { - .params = - { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare - }, - .op_type = GitOpType::ENSURE_INIT}; + GitOpKey op_key = {.params = + { + native_storage_config + ->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare + }, + .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( ts, {std::move(op_key)}, [root_tree_id, + remote_digest, tree_id_file, repo_info, repo_root, fetch_repo, additional_mirrors, - storage_config, + native_storage_config, git_cas, critical_git_op_map, import_to_git_map, @@ -728,28 +752,31 @@ void EnsureCommit(GitRepoInfo const& repo_info, GitRepoRemote::Open(op_result.git_cas); if (not just_git_repo) { (*logger)( - fmt::format( - "Could not open Git " - "cache repository " - "{}", - storage_config->GitRoot().string()), + fmt::format("Could not open Git " + "cache repository " + "{}", + native_storage_config->GitRoot() + .string()), /*fatal=*/true); return; } // check tree existence - auto wrapped_logger = std::make_shared< - AsyncMapConsumerLogger>( - [logger, storage_config, tree = root_tree_id]( - auto const& msg, bool fatal) { - (*logger)( - fmt::format( - "While verifying presence of " - "tree {} in repository {}:\n{}", - tree, - storage_config->GitRoot().string(), - msg), - fatal); - }); + auto wrapped_logger = + std::make_shared( + [logger, + native_storage_config, + tree = root_tree_id](auto const& msg, + bool fatal) { + (*logger)( + fmt::format( + "While verifying presence of " + "tree {} in repository {}:\n{}", + tree, + native_storage_config->GitRoot() + .string(), + msg), + fatal); + }); auto tree_present = just_git_repo->CheckTreeExists( root_tree_id, wrapped_logger); if (not tree_present) { @@ -763,7 +790,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, root_tree_id, repo_info.subdir, repo_info.ignore_special, - *storage_config, + *native_storage_config, op_result.git_cas, tree_id_file, ws_setter, @@ -774,7 +801,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, // now check if the tree is in the local checkout, // if this checkout is not our Git cache; this can // save an unnecessary remote CAS call - if (not IsCacheGitRoot(*storage_config, + if (not IsCacheGitRoot(*native_storage_config, repo_root)) { auto git_repo = GitRepoRemote::Open(git_cas); if (not git_repo) { @@ -850,19 +877,19 @@ void EnsureCommit(GitRepoInfo const& repo_info, } } - // try to get root tree from remote CAS - auto root_digest = ArtifactDigest{ - root_tree_id, 0, /*is_tree=*/true}; - if (remote_api != nullptr and + // try to get root tree from remote CAS; use the + // digest received from serve; whether native or + // compatible, it will either way be imported to Git + if (remote_api != nullptr and remote_digest and remote_api->RetrieveToCas( {Artifact::ObjectInfo{ - .digest = root_digest, + .digest = *remote_digest, .type = ObjectType::Tree}}, *local_api)) { progress->TaskTracker().Stop(repo_info.origin); // Move tree from local CAS to local Git storage auto tmp_dir = - storage_config->CreateTypedTmpDir( + native_storage_config->CreateTypedTmpDir( "fetch-absent-root"); if (not tmp_dir) { (*logger)( @@ -877,7 +904,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, } if (not local_api->RetrieveToPaths( {Artifact::ObjectInfo{ - .digest = root_digest, + .digest = *remote_digest, .type = ObjectType::Tree}}, {tmp_dir->GetPath()})) { (*logger)(fmt::format( @@ -895,7 +922,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, {std::move(c_info)}, [tmp_dir, // keep tmp_dir alive root_tree_id, - storage_config, + native_storage_config, subdir = repo_info.subdir, ignore_special = repo_info.ignore_special, just_git_cas = op_result.git_cas, @@ -924,14 +951,15 @@ void EnsureCommit(GitRepoInfo const& repo_info, // write association to id file, get // subdir tree, and set the workspace // root as present - WriteIdFileAndSetWSRoot(root_tree_id, - subdir, - ignore_special, - *storage_config, - just_git_cas, - tree_id_file, - ws_setter, - logger); + WriteIdFileAndSetWSRoot( + root_tree_id, + subdir, + ignore_special, + *native_storage_config, + just_git_cas, + tree_id_file, + ws_setter, + logger); }, [logger, tmp_dir, root_tree_id]( auto const& msg, bool fatal) { @@ -955,22 +983,24 @@ void EnsureCommit(GitRepoInfo const& repo_info, root_tree_id), /*fatal=*/false); - NetworkFetchAndSetPresentRoot(repo_info, - repo_root, - fetch_repo, - additional_mirrors, - git_cas, - *storage_config, - critical_git_op_map, - git_bin, - launcher, - fetch_absent, - progress, - ts, - ws_setter, - logger); + NetworkFetchAndSetPresentRoot( + repo_info, + repo_root, + fetch_repo, + additional_mirrors, + git_cas, + *native_storage_config, + critical_git_op_map, + git_bin, + launcher, + fetch_absent, + progress, + ts, + ws_setter, + logger); }, - [logger, target_path = storage_config->GitRoot()]( + [logger, + target_path = native_storage_config->GitRoot()]( auto const& msg, bool fatal) { (*logger)(fmt::format("While running critical Git " "op ENSURE_INIT bare for " @@ -1001,7 +1031,7 @@ void EnsureCommit(GitRepoInfo const& repo_info, fetch_repo, additional_mirrors, git_cas, - *storage_config, + *native_storage_config, critical_git_op_map, git_bin, launcher, @@ -1035,6 +1065,10 @@ void EnsureCommit(GitRepoInfo const& repo_info, repo_root, repo_info, serve, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ws_setter, logger); @@ -1064,7 +1098,9 @@ auto CreateCommitGitMap( std::string const& git_bin, std::vector const& launcher, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, bool fetch_absent, @@ -1077,7 +1113,9 @@ auto CreateCommitGitMap( git_bin, launcher, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, local_api, remote_api, fetch_absent, @@ -1094,7 +1132,7 @@ auto CreateCommitGitMap( fetch_repo = std::filesystem::absolute(*fetch_repo_path).string(); } std::filesystem::path repo_root = StorageUtils::GetGitRoot( - *storage_config, just_mr_paths, fetch_repo); + *native_storage_config, just_mr_paths, fetch_repo); // ensure git repo // define Git operation to be done GitOpKey op_key = { @@ -1120,7 +1158,9 @@ auto CreateCommitGitMap( git_bin, launcher, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, local_api, remote_api, fetch_absent, @@ -1155,7 +1195,9 @@ auto CreateCommitGitMap( git_bin, launcher, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, local_api, remote_api, fetch_absent, diff --git a/src/other_tools/root_maps/commit_git_map.hpp b/src/other_tools/root_maps/commit_git_map.hpp index 1916ce139..fad046143 100644 --- a/src/other_tools/root_maps/commit_git_map.hpp +++ b/src/other_tools/root_maps/commit_git_map.hpp @@ -27,6 +27,7 @@ #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/serve_api/remote/serve_api.hpp" #include "src/buildtool/storage/config.hpp" +#include "src/buildtool/storage/storage.hpp" #include "src/other_tools/just_mr/mirrors.hpp" #include "src/other_tools/just_mr/progress_reporting/progress.hpp" #include "src/other_tools/ops_maps/critical_git_op_map.hpp" @@ -35,14 +36,14 @@ struct GitRepoInfo { // hash can be a commit or tree - std::string hash{}; /* key */ - std::string repo_url{}; - std::string branch{}; - std::string subdir{}; /* key */ - std::vector inherit_env{}; - std::vector mirrors{}; + std::string hash; /* key */ + std::string repo_url; + std::string branch; + std::string subdir; /* key */ + std::vector inherit_env; + std::vector mirrors; // name of repository for which work is done; used in progress reporting - std::string origin{}; + std::string origin; // create root that ignores symlinks bool ignore_special{}; /* key */ // create an absent root @@ -84,7 +85,9 @@ using CommitGitMap = std::string const& git_bin, std::vector const& launcher, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, bool fetch_absent, diff --git a/src/other_tools/root_maps/content_git_map.cpp b/src/other_tools/root_maps/content_git_map.cpp index d39a70e58..2338c066e 100644 --- a/src/other_tools/root_maps/content_git_map.cpp +++ b/src/other_tools/root_maps/content_git_map.cpp @@ -15,6 +15,7 @@ #include "src/other_tools/root_maps/content_git_map.hpp" #include "fmt/core.h" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/buildtool/file_system/file_storage.hpp" #include "src/buildtool/file_system/symlinks_map/pragma_special.hpp" @@ -51,7 +52,10 @@ void EnsureRootAsAbsent( std::string const& tree_id, ArchiveRepoInfo const& key, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, bool is_cache_hit, ContentGitMap::SetterPtr const& ws_setter, @@ -67,21 +71,20 @@ void EnsureRootAsAbsent( // try to see if serve endpoint has the information to prepare the // root itself; this is redundant if root is not already cached if (is_cache_hit) { - auto serve_result = - serve->RetrieveTreeFromArchive(key.archive.content, - key.repo_type, - key.subdir, - key.pragma_special, - /*sync_tree=*/false); + auto const serve_result = serve->RetrieveTreeFromArchive( + key.archive.content_hash.Hash(), + key.repo_type, + key.subdir, + key.pragma_special, + /*sync_tree=*/false); if (serve_result) { // if serve has set up the tree, it must match what we // expect - auto const& served_tree_id = *serve_result; - if (tree_id != served_tree_id) { + if (tree_id != serve_result->tree) { (*logger)(fmt::format("Mismatch in served root tree " "id:\nexpected {}, but got {}", tree_id, - served_tree_id), + serve_result->tree), /*fatal=*/true); return; } @@ -93,7 +96,7 @@ void EnsureRootAsAbsent( (*logger)( fmt::format("Serve endpoint failed to set up " "root from known archive content {}", - key.archive.content), + key.archive.content_hash.Hash()), /*fatal=*/true); return; } @@ -113,7 +116,11 @@ void EnsureRootAsAbsent( if (not EnsureAbsentRootOnServe( *serve, tree_id, - storage_config->GitRoot(), + native_storage_config->GitRoot(), /*repo_path*/ + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, logger, /*no_sync_is_fatal=*/true)) { @@ -136,12 +143,17 @@ void EnsureRootAsAbsent( // the tree is known locally, so we can upload it to remote // CAS for the serve endpoint to retrieve it and set up the // root - if (not EnsureAbsentRootOnServe(*serve, - tree_id, - storage_config->GitRoot(), - remote_api, - logger, - /*no_sync_is_fatal=*/true)) { + if (not EnsureAbsentRootOnServe( + *serve, + tree_id, + native_storage_config->GitRoot(), /*repo_root*/ + native_storage_config, + compat_storage_config, + compat_storage, + local_api, + remote_api, + logger, + /*no_sync_is_fatal=*/true)) { return; } } @@ -170,7 +182,10 @@ void ResolveContentTree( bool is_cache_hit, bool is_absent, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& critical_git_op_map, gsl::not_null const& resolve_symlinks_map, @@ -180,7 +195,7 @@ void ResolveContentTree( if (key.pragma_special) { // get the resolved tree auto tree_id_file = StorageUtils::GetResolvedTreeIDFile( - *storage_config, tree_hash, *key.pragma_special); + *native_storage_config, tree_hash, *key.pragma_special); if (FileSystemManager::Exists(tree_id_file)) { // read resolved tree id auto resolved_tree_id = FileSystemManager::ReadFile(tree_id_file); @@ -196,18 +211,22 @@ void ResolveContentTree( EnsureRootAsAbsent(*resolved_tree_id, key, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, is_cache_hit, ws_setter, logger); } else { - (*ws_setter)(std::pair( - nlohmann::json::array({FileRoot::kGitTreeMarker, - *resolved_tree_id, - storage_config->GitRoot().string()}), - /*is_cache_hit=*/is_cache_hit)); + (*ws_setter)( + std::pair(nlohmann::json::array( + {FileRoot::kGitTreeMarker, + *resolved_tree_id, + native_storage_config->GitRoot().string()}), + /*is_cache_hit=*/is_cache_hit)); } } else { @@ -228,7 +247,10 @@ void ResolveContentTree( key, is_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ts, ws_setter, @@ -238,7 +260,8 @@ void ResolveContentTree( GitOpKey op_key = { .params = { - storage_config->GitRoot(), // target_path + native_storage_config + ->GitRoot(), // target_path resolved_tree_id, // git_hash "Keep referenced tree alive" // message }, @@ -251,7 +274,10 @@ void ResolveContentTree( tree_id_file, is_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, is_cache_hit, ws_setter, @@ -278,7 +304,10 @@ void ResolveContentTree( EnsureRootAsAbsent(resolved_tree_id, key, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, is_cache_hit, ws_setter, @@ -289,11 +318,13 @@ void ResolveContentTree( nlohmann::json::array( {FileRoot::kGitTreeMarker, resolved_tree_id, - storage_config->GitRoot().string()}), + native_storage_config->GitRoot() + .string()}), /*is_cache_hit=*/is_cache_hit)); } }, - [logger, target_path = storage_config->GitRoot()]( + [logger, + target_path = native_storage_config->GitRoot()]( auto const& msg, bool fatal) { (*logger)( fmt::format("While running critical Git op " @@ -303,11 +334,11 @@ void ResolveContentTree( fatal); }); }, - [logger, content = key.archive.content](auto const& msg, - bool fatal) { + [logger, hash = key.archive.content_hash.Hash()]( + auto const& msg, bool fatal) { (*logger)(fmt::format("While resolving symlinks for " "content {}:\n{}", - content, + hash, msg), fatal); }); @@ -319,18 +350,22 @@ void ResolveContentTree( EnsureRootAsAbsent(tree_hash, key, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, is_cache_hit, ws_setter, logger); } else { - (*ws_setter)(std::pair( - nlohmann::json::array({FileRoot::kGitTreeMarker, - tree_hash, - storage_config->GitRoot().string()}), - /*is_cache_hit=*/is_cache_hit)); + (*ws_setter)( + std::pair(nlohmann::json::array( + {FileRoot::kGitTreeMarker, + tree_hash, + native_storage_config->GitRoot().string()}), + /*is_cache_hit=*/is_cache_hit)); } } } @@ -345,7 +380,10 @@ void WriteIdFileAndSetWSRoot( std::filesystem::path const& archive_tree_id_file, bool is_absent, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& critical_git_op_map, gsl::not_null const& resolve_symlinks_map, @@ -390,7 +428,10 @@ void WriteIdFileAndSetWSRoot( false, /*is_cache_hit*/ is_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, critical_git_op_map, resolve_symlinks_map, @@ -408,7 +449,10 @@ void ExtractAndImportToGit( std::filesystem::path const& archive_tree_id_file, bool is_absent, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& critical_git_op_map, gsl::not_null const& import_to_git_map, @@ -417,11 +461,11 @@ void ExtractAndImportToGit( ContentGitMap::SetterPtr const& setter, ContentGitMap::LoggerPtr const& logger) { // extract archive - auto tmp_dir = storage_config->CreateTypedTmpDir(key.repo_type); + auto tmp_dir = native_storage_config->CreateTypedTmpDir(key.repo_type); if (not tmp_dir) { (*logger)(fmt::format("Failed to create tmp path for {} target {}", key.repo_type, - key.archive.content), + key.archive.content_hash.Hash()), /*fatal=*/true); return; } @@ -436,7 +480,8 @@ void ExtractAndImportToGit( return; } // import to git - CommitInfo c_info{tmp_dir->GetPath(), key.repo_type, key.archive.content}; + CommitInfo c_info{ + tmp_dir->GetPath(), key.repo_type, key.archive.content_hash.Hash()}; import_to_git_map->ConsumeAfterKeysReady( ts, {std::move(c_info)}, @@ -445,7 +490,10 @@ void ExtractAndImportToGit( key, is_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, critical_git_op_map, resolve_symlinks_map, @@ -467,7 +515,10 @@ void ExtractAndImportToGit( archive_tree_id_file, is_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, critical_git_op_map, resolve_symlinks_map, @@ -485,13 +536,16 @@ void ExtractAndImportToGit( } auto IdFileExistsInOlderGeneration( - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, ArchiveRepoInfo const& key) -> std::optional { for (std::size_t generation = 1; - generation < storage_config->num_generations; + generation < native_storage_config->num_generations; generation++) { - auto archive_tree_id_file = StorageUtils::GetArchiveTreeIDFile( - *storage_config, key.repo_type, key.archive.content, generation); + auto archive_tree_id_file = + StorageUtils::GetArchiveTreeIDFile(*native_storage_config, + key.repo_type, + key.archive.content_hash.Hash(), + generation); if (FileSystemManager::Exists(archive_tree_id_file)) { return generation; } @@ -504,9 +558,12 @@ void HandleLocallyKnownTree( std::filesystem::path const& archive_tree_id_file, bool fetch_absent, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& resolve_symlinks_map, gsl::not_null const& critical_git_op_map, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& ts, ContentGitMap::SetterPtr const& setter, @@ -523,11 +580,11 @@ void HandleLocallyKnownTree( // define Git operation to be done GitOpKey op_key = {.params = { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare + native_storage_config->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare }, .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( @@ -537,7 +594,10 @@ void HandleLocallyKnownTree( key, fetch_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, critical_git_op_map, resolve_symlinks_map, @@ -580,7 +640,10 @@ void HandleLocallyKnownTree( /*is_cache_hit = */ true, /*is_absent = */ (key.absent and not fetch_absent), serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, critical_git_op_map, resolve_symlinks_map, @@ -588,8 +651,8 @@ void HandleLocallyKnownTree( setter, logger); }, - [logger, target_path = storage_config->GitRoot()](auto const& msg, - bool fatal) { + [logger, target_path = native_storage_config->GitRoot()]( + auto const& msg, bool fatal) { (*logger)(fmt::format("While running critical Git " "op ENSURE_INIT for " "target {}:\n{}", @@ -604,9 +667,12 @@ void HandleKnownInOlderGenerationAfterImport( const std::string& tree_id, bool fetch_absent, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& resolve_symlinks_map, gsl::not_null const& critical_git_op_map, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& ts, ContentGitMap::SetterPtr const& setter, @@ -614,7 +680,7 @@ void HandleKnownInOlderGenerationAfterImport( // Now that we have the tree persisted in the git repository of the youngest // generation; hence we can write the map-entry. auto archive_tree_id_file = StorageUtils::GetArchiveTreeIDFile( - *storage_config, key.repo_type, key.archive.content); + *native_storage_config, key.repo_type, key.archive.content_hash.Hash()); if (not StorageUtils::WriteTreeIDFile(archive_tree_id_file, tree_id)) { (*logger)(fmt::format("Failed to write tree id to file {}", archive_tree_id_file.string()), @@ -627,9 +693,12 @@ void HandleKnownInOlderGenerationAfterImport( archive_tree_id_file, fetch_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, @@ -644,10 +713,12 @@ void HandleKnownInOlderGenerationAfterTaggingAndInit( std::filesystem::path const& source, bool fetch_absent, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& resolve_symlinks_map, gsl::not_null const& critical_git_op_map, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& ts, ContentGitMap::SetterPtr const& setter, @@ -667,14 +738,14 @@ void HandleKnownInOlderGenerationAfterTaggingAndInit( fatal); }); if (not git_repo->LocalFetchViaTmpRepo( - *storage_config, source, tag, fetch_logger)) { + *native_storage_config, source, tag, fetch_logger)) { return; } GitOpKey op_key = {.params = { - storage_config->GitRoot(), // target_path - tree_id, // git_hash - "Keep referenced tree alive" // message + native_storage_config->GitRoot(), // target_path + tree_id, // git_hash + "Keep referenced tree alive" // message }, .op_type = GitOpType::KEEP_TREE}; critical_git_op_map->ConsumeAfterKeysReady( @@ -685,10 +756,12 @@ void HandleKnownInOlderGenerationAfterTaggingAndInit( git_cas, fetch_absent, serve, - storage_config, - storage, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, @@ -704,9 +777,12 @@ void HandleKnownInOlderGenerationAfterTaggingAndInit( tree_id, fetch_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, @@ -727,21 +803,23 @@ void HandleKnownInOlderGenerationAfterTagging( std::filesystem::path const& source, bool fetch_absent, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& resolve_symlinks_map, gsl::not_null const& critical_git_op_map, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& ts, ContentGitMap::SetterPtr const& setter, ContentGitMap::LoggerPtr const& logger) { GitOpKey op_key = {.params = { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare + native_storage_config->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare }, .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( @@ -753,10 +831,12 @@ void HandleKnownInOlderGenerationAfterTagging( source, fetch_absent, serve, - storage_config, - storage, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, @@ -775,17 +855,19 @@ void HandleKnownInOlderGenerationAfterTagging( source, fetch_absent, serve, - storage_config, - storage, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, logger); }, - [logger, target_path = storage_config->GitRoot()](auto const& msg, - bool fatal) { + [logger, target_path = native_storage_config->GitRoot()]( + auto const& msg, bool fatal) { (*logger)(fmt::format("While running critical Git op " "ENSURE_INIT for target {}:\n{}", target_path.string(), @@ -799,16 +881,21 @@ void HandleKnownInOlderGeneration( std::size_t generation, bool fetch_absent, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& resolve_symlinks_map, gsl::not_null const& critical_git_op_map, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& ts, ContentGitMap::SetterPtr const& setter, ContentGitMap::LoggerPtr const& logger) { - auto archive_tree_id_file = StorageUtils::GetArchiveTreeIDFile( - *storage_config, key.repo_type, key.archive.content, generation); + auto archive_tree_id_file = + StorageUtils::GetArchiveTreeIDFile(*native_storage_config, + key.repo_type, + key.archive.content_hash.Hash(), + generation); auto archive_tree_id = FileSystemManager::ReadFile(archive_tree_id_file); if (not archive_tree_id) { (*logger)(fmt::format("Failed to read tree id from file {}", @@ -816,7 +903,7 @@ void HandleKnownInOlderGeneration( /*fatal=*/true); return; } - auto source = storage_config->GitGenerationRoot(generation); + auto source = native_storage_config->GitGenerationRoot(generation); GitOpKey op_key = {.params = { @@ -833,10 +920,12 @@ void HandleKnownInOlderGeneration( source, fetch_absent, serve, - storage_config, - storage, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, @@ -853,10 +942,12 @@ void HandleKnownInOlderGeneration( source, fetch_absent, serve, - storage_config, - storage, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, @@ -883,8 +974,11 @@ auto CreateContentGitMap( gsl::not_null const& resolve_symlinks_map, gsl::not_null const& critical_git_op_map, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + gsl::not_null const& native_storage, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, bool fetch_absent, gsl::not_null const& progress, @@ -897,8 +991,11 @@ auto CreateContentGitMap( additional_mirrors, ca_info, serve, - storage, - storage_config, + native_storage_config, + compat_storage_config, + native_storage, + compat_storage, + local_api, remote_api, fetch_absent, progress](auto ts, @@ -906,31 +1003,38 @@ auto CreateContentGitMap( auto logger, auto /* unused */, auto const& key) { - auto archive_tree_id_file = StorageUtils::GetArchiveTreeIDFile( - *storage_config, key.repo_type, key.archive.content); + auto archive_tree_id_file = + StorageUtils::GetArchiveTreeIDFile(*native_storage_config, + key.repo_type, + key.archive.content_hash.Hash()); if (FileSystemManager::Exists(archive_tree_id_file)) { HandleLocallyKnownTree(key, archive_tree_id_file, fetch_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, logger); } - else if (auto generation = - IdFileExistsInOlderGeneration(storage_config, key)) { + else if (auto generation = IdFileExistsInOlderGeneration( + native_storage_config, key)) { HandleKnownInOlderGeneration(key, *generation, fetch_absent, serve, - storage_config, - storage, + native_storage_config, + compat_storage_config, + compat_storage, resolve_symlinks_map, critical_git_op_map, + local_api, remote_api, ts, setter, @@ -942,18 +1046,18 @@ auto CreateContentGitMap( // request the resolved subdir tree from the serve endpoint, if // given if (serve != nullptr) { - auto serve_result = - serve->RetrieveTreeFromArchive(key.archive.content, - key.repo_type, - key.subdir, - key.pragma_special, - /*sync_tree = */ false); + auto const serve_result = serve->RetrieveTreeFromArchive( + key.archive.content_hash.Hash(), + key.repo_type, + key.subdir, + key.pragma_special, + /*sync_tree = */ false); if (serve_result) { // set the workspace root as absent progress->TaskTracker().Stop(key.archive.origin); (*setter)(std::pair( nlohmann::json::array( - {FileRoot::kGitTreeMarker, *serve_result}), + {FileRoot::kGitTreeMarker, serve_result->tree}), /*is_cache_hit = */ false)); return; } @@ -963,7 +1067,7 @@ auto CreateContentGitMap( (*logger)( fmt::format("Serve endpoint failed to set up root " "from known archive content {}", - key.archive.content), + key.archive.content_hash.Hash()), /*fatal=*/true); return; } @@ -973,16 +1077,19 @@ auto CreateContentGitMap( // a serve endpoint exists we can upload it the root ourselves; // check if content already in CAS - auto const& cas = storage->CAS(); - auto digest = ArtifactDigest(key.archive.content, 0, false); + auto const& native_cas = native_storage->CAS(); + auto const digest = ArtifactDigest{key.archive.content_hash, 0}; if (auto content_cas_path = - cas.BlobPath(digest, /*is_executable=*/false)) { + native_cas.BlobPath(digest, /*is_executable=*/false)) { ExtractAndImportToGit(key, *content_cas_path, archive_tree_id_file, /*is_absent = */ true, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, critical_git_op_map, import_to_git_map, @@ -999,11 +1106,11 @@ auto CreateContentGitMap( GitOpKey op_key = { .params = { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare + native_storage_config->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare }, .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( @@ -1019,8 +1126,11 @@ auto CreateContentGitMap( additional_mirrors, ca_info, serve, - storage, - storage_config, + native_storage_config, + compat_storage_config, + native_storage, + compat_storage, + local_api, remote_api, progress, ts, @@ -1044,40 +1154,46 @@ auto CreateContentGitMap( // verify if local Git knows content blob auto wrapped_logger = std::make_shared( - [&logger, blob = key.archive.content]( + [&logger, + hash = key.archive.content_hash.Hash()]( auto const& msg, bool fatal) { (*logger)( fmt::format("While verifying presence " "of blob {}:\n{}", - blob, + hash, msg), fatal); }); auto res = just_git_repo->TryReadBlob( - key.archive.content, wrapped_logger); + key.archive.content_hash.Hash(), wrapped_logger); if (not res.first) { // blob check failed return; } - auto const& cas = storage->CAS(); + auto const& native_cas = native_storage->CAS(); if (res.second) { // blob found; add it to CAS - if (not cas.StoreBlob(*res.second, - /*is_executable=*/false)) { - (*logger)(fmt::format("Failed to store content " - "{} to local CAS", - key.archive.content), + if (not native_cas.StoreBlob( + *res.second, + /*is_executable=*/false)) { + (*logger)(fmt::format( + "Failed to store content " + "{} to local CAS", + key.archive.content_hash.Hash()), /*fatal=*/true); return; } - if (auto content_cas_path = cas.BlobPath( + if (auto content_cas_path = native_cas.BlobPath( digest, /*is_executable=*/false)) { ExtractAndImportToGit(key, *content_cas_path, archive_tree_id_file, /*is_absent=*/true, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, critical_git_op_map, import_to_git_map, @@ -1105,17 +1221,20 @@ auto CreateContentGitMap( .filename() .string()); StorageUtils::AddDistfileToCAS( - *storage, repo_distfile, just_mr_paths); + *native_storage, repo_distfile, just_mr_paths); // check if content is in CAS now - if (auto content_cas_path = - cas.BlobPath(digest, /*is_executable=*/false)) { + if (auto content_cas_path = native_cas.BlobPath( + digest, /*is_executable=*/false)) { progress->TaskTracker().Stop(key.archive.origin); ExtractAndImportToGit(key, *content_cas_path, archive_tree_id_file, /*is_absent=*/true, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, critical_git_op_map, import_to_git_map, @@ -1129,10 +1248,10 @@ auto CreateContentGitMap( // report not being able to set up this root as absent (*logger)(fmt::format("Cannot create workspace root as " "absent for content {}.", - key.archive.content), + key.archive.content_hash.Hash()), /*fatal=*/true); }, - [logger, target_path = storage_config->GitRoot()]( + [logger, target_path = native_storage_config->GitRoot()]( auto const& msg, bool fatal) { (*logger)(fmt::format("While running critical Git op " "ENSURE_INIT for target {}:\n{}", @@ -1148,8 +1267,8 @@ auto CreateContentGitMap( {key.archive}, [archive_tree_id_file, key, - storage, - storage_config, + native_storage_config, + native_storage, critical_git_op_map, import_to_git_map, resolve_symlinks_map, @@ -1157,11 +1276,12 @@ auto CreateContentGitMap( setter, logger]([[maybe_unused]] auto const& values) { // content is in local CAS now - auto const& cas = storage->CAS(); + auto const& native_cas = native_storage->CAS(); auto content_cas_path = - cas.BlobPath(ArtifactDigest( - key.archive.content, 0, false), - /*is_executable=*/false) + native_cas + .BlobPath( + ArtifactDigest{key.archive.content_hash, 0}, + /*is_executable=*/false) .value(); // root can only be present, so default all arguments // that refer to a serve endpoint @@ -1170,7 +1290,10 @@ auto CreateContentGitMap( archive_tree_id_file, /*is_absent=*/false, /*serve=*/nullptr, - storage_config, + native_storage_config, + /*compat_storage_config=*/nullptr, + /*compat_storage=*/nullptr, + /*local_api=*/nullptr, /*remote_api=*/nullptr, critical_git_op_map, import_to_git_map, @@ -1179,11 +1302,11 @@ auto CreateContentGitMap( setter, logger); }, - [logger, content = key.archive.content](auto const& msg, - bool fatal) { + [logger, hash = key.archive.content_hash.Hash()]( + auto const& msg, bool fatal) { (*logger)(fmt::format("While ensuring content {} is in " "CAS:\n{}", - content, + hash, msg), fatal); }); diff --git a/src/other_tools/root_maps/content_git_map.hpp b/src/other_tools/root_maps/content_git_map.hpp index e969f20a2..52e6929e1 100644 --- a/src/other_tools/root_maps/content_git_map.hpp +++ b/src/other_tools/root_maps/content_git_map.hpp @@ -47,8 +47,11 @@ using ContentGitMap = gsl::not_null const& resolve_symlinks_map, gsl::not_null const& critical_git_op_map, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + gsl::not_null const& native_storage, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, bool fetch_absent, gsl::not_null const& progress, diff --git a/src/other_tools/root_maps/distdir_git_map.cpp b/src/other_tools/root_maps/distdir_git_map.cpp index 4a7e9b51e..ef460949b 100644 --- a/src/other_tools/root_maps/distdir_git_map.cpp +++ b/src/other_tools/root_maps/distdir_git_map.cpp @@ -19,6 +19,8 @@ #include "fmt/core.h" #include "src/buildtool/common/artifact.hpp" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/execution_common.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/buildtool/file_system/file_storage.hpp" @@ -46,9 +48,16 @@ namespace { content_list->begin(), content_list->end(), [&cas, tmp_dir](auto const& kv) { - auto content_path = - cas.BlobPath(ArtifactDigest(kv.second, 0, false), - /*is_executable=*/false); + auto const digest = + ArtifactDigestFactory::Create(cas.GetHashFunction().GetType(), + kv.second, + 0, + /*is_tree=*/false); + if (not digest) { + return false; + } + auto content_path = cas.BlobPath(*digest, + /*is_executable=*/false); if (content_path) { return FileSystemManager::CreateFileHardlink( *content_path, // from: cas_path/content_id @@ -65,15 +74,15 @@ namespace { /// the setter on success. void ImportFromCASAndSetRoot( DistdirInfo const& key, - StorageConfig const& storage_config, - Storage const& storage, + StorageConfig const& native_storage_config, + Storage const& native_storage, std::filesystem::path const& distdir_tree_id_file, gsl::not_null const& import_to_git_map, gsl::not_null const& ts, DistdirGitMap::SetterPtr const& setter, DistdirGitMap::LoggerPtr const& logger) { // create the links to CAS - auto tmp_dir = storage_config.CreateTypedTmpDir("distdir"); + auto tmp_dir = native_storage_config.CreateTypedTmpDir("distdir"); if (not tmp_dir) { (*logger)(fmt::format("Failed to create tmp path for " "distdir target {}", @@ -82,7 +91,7 @@ void ImportFromCASAndSetRoot( return; } // link content from CAS into tmp dir - if (not LinkToCAS(storage, key.content_list, tmp_dir->GetPath())) { + if (not LinkToCAS(native_storage, key.content_list, tmp_dir->GetPath())) { (*logger)(fmt::format("Failed to create links to CAS content!", key.content_id), /*fatal=*/true); @@ -95,7 +104,7 @@ void ImportFromCASAndSetRoot( {std::move(c_info)}, [tmp_dir, // keep tmp_dir alive distdir_tree_id_file, - git_root = storage_config.GitRoot().string(), + git_root = native_storage_config.GitRoot().string(), setter, logger](auto const& values) { // check for errors @@ -136,8 +145,10 @@ auto CreateDistdirGitMap( gsl::not_null const& import_to_git_map, gsl::not_null const& critical_git_op_map, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + gsl::not_null const& native_storage, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, std::size_t jobs) -> DistdirGitMap { @@ -145,16 +156,18 @@ auto CreateDistdirGitMap( import_to_git_map, critical_git_op_map, serve, - storage, - storage_config, + native_storage_config, + compat_storage_config, + native_storage, + compat_storage, local_api, remote_api](auto ts, auto setter, auto logger, auto /* unused */, auto const& key) { - auto distdir_tree_id_file = - StorageUtils::GetDistdirTreeIDFile(*storage_config, key.content_id); + auto distdir_tree_id_file = StorageUtils::GetDistdirTreeIDFile( + *native_storage_config, key.content_id); if (FileSystemManager::Exists(distdir_tree_id_file)) { // read distdir_tree_id from file tree_id_file auto distdir_tree_id = @@ -170,11 +183,11 @@ auto CreateDistdirGitMap( GitOpKey op_key = { .params = { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare + native_storage_config->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare }, .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( @@ -184,7 +197,10 @@ auto CreateDistdirGitMap( content_id = key.content_id, key, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, setter, logger](auto const& values) { @@ -208,21 +224,20 @@ auto CreateDistdirGitMap( if (not *has_tree) { // try to see if serve endpoint has the // information to prepare the root itself - auto serve_result = + auto const serve_result = serve->RetrieveTreeFromDistdir( key.content_list, /*sync_tree=*/false); if (serve_result) { // if serve has set up the tree, it must // match what we expect - auto const& served_tree_id = *serve_result; - if (distdir_tree_id != served_tree_id) { + if (distdir_tree_id != serve_result->tree) { (*logger)( fmt::format( "Mismatch in served root tree " "id:\nexpected {}, but got {}", distdir_tree_id, - served_tree_id), + serve_result->tree), /*fatal=*/true); return; } @@ -259,7 +274,12 @@ auto CreateDistdirGitMap( if (not EnsureAbsentRootOnServe( *serve, distdir_tree_id, - storage_config->GitRoot(), + native_storage_config + ->GitRoot(), /*repo_root*/ + native_storage_config, + compat_storage_config, + compat_storage, + &*local_api, remote_api, logger, true /*no_sync_is_fatal*/)) { @@ -284,15 +304,15 @@ auto CreateDistdirGitMap( } else { // set root as present - (*setter)( - std::pair(nlohmann::json::array( - {FileRoot::kGitTreeMarker, - distdir_tree_id, - storage_config->GitRoot().string()}), - /*is_cache_hit=*/true)); + (*setter)(std::pair( + nlohmann::json::array( + {FileRoot::kGitTreeMarker, + distdir_tree_id, + native_storage_config->GitRoot().string()}), + /*is_cache_hit=*/true)); } }, - [logger, target_path = storage_config->GitRoot()]( + [logger, target_path = native_storage_config->GitRoot()]( auto const& msg, bool fatal) { (*logger)(fmt::format("While running critical Git op " "ENSURE_INIT for target {}:\n{}", @@ -329,9 +349,10 @@ auto CreateDistdirGitMap( return; } // get hash from raw_id - auto tree_id = ToHexString(tree->first); + auto const tree_id = ToHexString(tree->first); // get digest object - auto digest = ArtifactDigest{tree_id, 0, /*is_tree=*/true}; + auto const digest = ArtifactDigestFactory::Create( + HashFunction::Type::GitSHA1, tree_id, 0, /*is_tree=*/true); // use this knowledge of the resulting tree identifier to try to set // up the absent root without actually checking the local status of @@ -354,19 +375,18 @@ auto CreateDistdirGitMap( } // try to see if serve endpoint has the information to // prepare the root itself - auto serve_result = + auto const serve_result = serve->RetrieveTreeFromDistdir(key.content_list, /*sync_tree=*/false); if (serve_result) { // if serve has set up the tree, it must match what we // expect - auto const& served_tree_id = *serve_result; - if (tree_id != served_tree_id) { + if (tree_id != serve_result->tree) { (*logger)( fmt::format("Mismatch in served root tree " "id:\nexpected {}, but got {}", tree_id, - served_tree_id), + serve_result->tree), /*fatal=*/true); return; } @@ -398,13 +418,17 @@ auto CreateDistdirGitMap( } // try to supply the serve endpoint with the tree via the // remote CAS - if (remote_api->IsAvailable({digest})) { + if (digest and remote_api->IsAvailable({*digest})) { // tell serve to set up the root from the remote CAS // tree; upload can be skipped if (EnsureAbsentRootOnServe( *serve, tree_id, /*repo_path=*/"", + native_storage_config, + /*compat_storage_config=*/nullptr, + /*compat_storage=*/nullptr, + /*local_api=*/nullptr, /*remote_api=*/nullptr, logger, /*no_sync_is_fatal=*/true)) { @@ -424,10 +448,10 @@ auto CreateDistdirGitMap( } // check if we have the tree in local CAS; if yes, upload it // to remote for the serve endpoint to find it - if (local_api->IsAvailable({digest})) { + if (digest and local_api->IsAvailable({*digest})) { if (not local_api->RetrieveToCas( {Artifact::ObjectInfo{ - .digest = digest, + .digest = *digest, .type = ObjectType::Tree}}, *remote_api)) { (*logger)(fmt::format("Failed to sync tree {} from " @@ -442,6 +466,10 @@ auto CreateDistdirGitMap( *serve, tree_id, /*repo_path=*/"", + native_storage_config, + /*compat_storage_config=*/nullptr, + /*compat_storage=*/nullptr, + /*local_api=*/nullptr, /*remote_api=*/nullptr, logger, /*no_sync_is_fatal=*/true)) { @@ -475,10 +503,10 @@ auto CreateDistdirGitMap( // if the root is not-absent, the order of checks is different; // first, look in the local CAS - if (local_api->IsAvailable({digest})) { + if (digest and local_api->IsAvailable({*digest})) { ImportFromCASAndSetRoot(key, - *storage_config, - *storage, + *native_storage_config, + *native_storage, distdir_tree_id_file, import_to_git_map, ts, @@ -490,18 +518,17 @@ auto CreateDistdirGitMap( // now ask serve endpoint if it can set up the root; as this is for // a present root, a corresponding remote endpoint is needed if (serve != nullptr and remote_api != nullptr) { - auto serve_result = + auto const serve_result = serve->RetrieveTreeFromDistdir(key.content_list, /*sync_tree=*/true); if (serve_result) { // if serve has set up the tree, it must match what we // expect - auto const& served_tree_id = *serve_result; - if (tree_id != served_tree_id) { + if (tree_id != serve_result->tree) { (*logger)(fmt::format("Mismatch in served root tree " "id:\nexpected {}, but got {}", tree_id, - served_tree_id), + serve_result->tree), /*fatal=*/true); return; } @@ -531,15 +558,15 @@ auto CreateDistdirGitMap( [distdir_tree_id_file, key, import_to_git_map, + native_storage_config, + native_storage, ts, - storage, - storage_config, setter, logger]([[maybe_unused]] auto const& values) { // archive blobs are in CAS ImportFromCASAndSetRoot(key, - *storage_config, - *storage, + *native_storage_config, + *native_storage, distdir_tree_id_file, import_to_git_map, ts, diff --git a/src/other_tools/root_maps/distdir_git_map.hpp b/src/other_tools/root_maps/distdir_git_map.hpp index 9244f3412..2dc678f8e 100644 --- a/src/other_tools/root_maps/distdir_git_map.hpp +++ b/src/other_tools/root_maps/distdir_git_map.hpp @@ -57,8 +57,10 @@ using DistdirGitMap = gsl::not_null const& import_to_git_map, gsl::not_null const& critical_git_op_map, ServeApi const* serve, - gsl::not_null const& storage_config, - gsl::not_null const& storage, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + gsl::not_null const& native_storage, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, std::size_t jobs) -> DistdirGitMap; diff --git a/src/other_tools/root_maps/foreign_file_git_map.cpp b/src/other_tools/root_maps/foreign_file_git_map.cpp index 37ab10e12..25c779475 100644 --- a/src/other_tools/root_maps/foreign_file_git_map.cpp +++ b/src/other_tools/root_maps/foreign_file_git_map.cpp @@ -15,6 +15,7 @@ #include "src/other_tools/root_maps/foreign_file_git_map.hpp" #include "fmt/core.h" +#include "src/buildtool/crypto/hash_info.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/logging/log_level.hpp" @@ -33,8 +34,11 @@ void WithRootImportedToGit(ForeignFileInfo const& key, (*logger)("Importing to git failed", /*fatal=*/true); return; } - auto tree_id_file = StorageUtils::GetForeignFileTreeIDFile( - storage_config, key.archive.content, key.name, key.executable); + auto tree_id_file = + StorageUtils::GetForeignFileTreeIDFile(storage_config, + key.archive.content_hash.Hash(), + key.name, + key.executable); auto cache_written = StorageUtils::WriteTreeIDFile(tree_id_file, result.first); if (not cache_written) { @@ -58,12 +62,12 @@ void WithFetchedFile(ForeignFileInfo const& key, ForeignFileGitMap::LoggerPtr const& logger) { auto tmp_dir = storage_config->CreateTypedTmpDir("foreign-file"); auto const& cas = storage.CAS(); - auto digest = ArtifactDigest(key.archive.content, 0, key.executable); + auto digest = ArtifactDigest{key.archive.content_hash, 0}; auto content_cas_path = cas.BlobPath(digest, key.executable); if (not content_cas_path) { (*logger)( fmt::format("Failed to locally find {} after fetching for repo {}", - key.archive.content, + key.archive.content_hash.Hash(), nlohmann::json(key.archive.origin).dump()), true); return; @@ -82,7 +86,7 @@ void WithFetchedFile(ForeignFileInfo const& key, CommitInfo c_info{ tmp_dir->GetPath(), fmt::format("foreign file at {}", nlohmann::json(key.name).dump()), - key.archive.content}; + key.archive.content_hash.Hash()}; import_to_git_map->ConsumeAfterKeysReady( ts, {std::move(c_info)}, @@ -122,10 +126,10 @@ void HandleAbsentForeignFile(ForeignFileInfo const& key, ForeignFileGitMap::LoggerPtr const& logger) { // Compute tree in memory GitRepo::tree_entries_t entries{}; - auto raw_id = FromHexString(key.archive.content); + auto raw_id = FromHexString(key.archive.content_hash.Hash()); if (not raw_id) { (*logger)(fmt::format("Failure converting {} to raw id.", - key.archive.content), + key.archive.content_hash.Hash()), true); return; } @@ -135,7 +139,7 @@ void HandleAbsentForeignFile(ForeignFileInfo const& key, if (not tree) { (*logger)(fmt::format("Failure to construct in-memory tree with entry " "{} at place {}", - key.archive.content, + key.archive.content_hash.Hash(), nlohmann::json(key.name).dump()), true); return; @@ -152,17 +156,15 @@ void HandleAbsentForeignFile(ForeignFileInfo const& key, /*is_cache_hit=*/false)); return; } - auto serve_result = serve->RetrieveTreeFromForeignFile( - key.archive.content, key.name, key.executable); + auto const serve_result = serve->RetrieveTreeFromForeignFile( + key.archive.content_hash.Hash(), key.name, key.executable); if (serve_result) { - // if serve has set up the tree, it must match what we - // expect - auto const& served_tree_id = *serve_result; - if (tree_id != served_tree_id) { + // if serve has set up the tree, it must match what we expect + if (tree_id != serve_result->tree) { (*logger)(fmt::format("Mismatch in served root tree " "id: expected {}, but got {}", tree_id, - served_tree_id), + serve_result->tree), /*fatal=*/true); return; } @@ -175,7 +177,7 @@ void HandleAbsentForeignFile(ForeignFileInfo const& key, if (serve_result.error() == GitLookupError::Fatal) { (*logger)(fmt::format("Serve endpoint failed to set up root " "from known foreign-file content {}", - key.archive.content), + key.archive.content_hash.Hash()), /*fatal=*/true); return; } @@ -218,7 +220,10 @@ void HandleAbsentForeignFile(ForeignFileInfo const& key, return; } auto tree_id_file = StorageUtils::GetForeignFileTreeIDFile( - *storage_config, key.archive.content, key.name, key.executable); + *storage_config, + key.archive.content_hash.Hash(), + key.name, + key.executable); if (FileSystemManager::Exists(tree_id_file)) { auto tree_id = FileSystemManager::ReadFile(tree_id_file); if (not tree_id) { @@ -248,11 +253,11 @@ void HandleAbsentForeignFile(ForeignFileInfo const& key, setter, logger); }, - [logger, content = key.archive.content](auto const& msg, - bool fatal) { + [logger, hash = key.archive.content_hash.Hash()](auto const& msg, + bool fatal) { (*logger)(fmt::format("While ensuring content {} is in " "CAS:\n{}", - content, + hash, msg), fatal); }); diff --git a/src/other_tools/root_maps/foreign_file_git_map.hpp b/src/other_tools/root_maps/foreign_file_git_map.hpp index 2efd1ae66..e2662c4c7 100644 --- a/src/other_tools/root_maps/foreign_file_git_map.hpp +++ b/src/other_tools/root_maps/foreign_file_git_map.hpp @@ -42,4 +42,4 @@ using ForeignFileGitMap = bool fetch_absent, std::size_t jobs) -> ForeignFileGitMap; -#endif +#endif // INCLUDED_SRC_OTHER_TOOLS_ROOT_MAPS_FOREIGN_FILE_GIT_MAP_HPP diff --git a/src/other_tools/root_maps/fpath_git_map.cpp b/src/other_tools/root_maps/fpath_git_map.cpp index dbda05ce3..ca8bdd52e 100644 --- a/src/other_tools/root_maps/fpath_git_map.cpp +++ b/src/other_tools/root_maps/fpath_git_map.cpp @@ -31,13 +31,18 @@ namespace { /// \brief Does the serve endpoint checks and sets the workspace root. /// It guarantees the logger is called exactly once with fatal on failure, and /// the setter on success. -void CheckServeAndSetRoot(std::string const& tree_id, - std::string const& repo_root, - bool absent, - ServeApi const* serve, - IExecutionApi const* remote_api, - FilePathGitMap::SetterPtr const& ws_setter, - FilePathGitMap::LoggerPtr const& logger) { +void CheckServeAndSetRoot( + std::string const& tree_id, + std::string const& repo_root, + bool absent, + ServeApi const* serve, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, + IExecutionApi const* remote_api, + FilePathGitMap::SetterPtr const& ws_setter, + FilePathGitMap::LoggerPtr const& logger) { // if serve endpoint is given, try to ensure it has this tree available to // be able to build against it. If root is not absent, do not fail if we // don't have a suitable remote endpoint, but warn user nonetheless. @@ -63,6 +68,10 @@ void CheckServeAndSetRoot(std::string const& tree_id, if (not EnsureAbsentRootOnServe(*serve, tree_id, repo_root, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, logger, /*no_sync_is_fatal=*/absent)) { @@ -99,7 +108,10 @@ void ResolveFilePathTree( gsl::not_null const& critical_git_op_map, gsl::not_null const& resolve_symlinks_map, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, gsl::not_null const& ts, FilePathGitMap::SetterPtr const& ws_setter, @@ -107,7 +119,7 @@ void ResolveFilePathTree( if (pragma_special) { // get the resolved tree auto tree_id_file = StorageUtils::GetResolvedTreeIDFile( - *storage_config, tree_hash, *pragma_special); + *native_storage_config, tree_hash, *pragma_special); if (FileSystemManager::Exists(tree_id_file)) { // read resolved tree id auto resolved_tree_id = FileSystemManager::ReadFile(tree_id_file); @@ -122,9 +134,13 @@ void ResolveFilePathTree( // available to be able to build against it; the tree is resolved, // so it is in our Git cache CheckServeAndSetRoot(*resolved_tree_id, - storage_config->GitRoot().string(), + native_storage_config->GitRoot().string(), absent, serve, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ws_setter, logger); @@ -145,7 +161,10 @@ void ResolveFilePathTree( tree_id_file, absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ts, ws_setter, @@ -155,7 +174,8 @@ void ResolveFilePathTree( GitOpKey op_key = { .params = { - storage_config->GitRoot(), // target_path + native_storage_config + ->GitRoot(), // target_path resolved_tree_id, // git_hash "Keep referenced tree alive" // message }, @@ -167,7 +187,10 @@ void ResolveFilePathTree( tree_id_file, absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ws_setter, logger](auto const& values) { @@ -193,14 +216,19 @@ void ResolveFilePathTree( // it; the resolved tree is in the Git cache CheckServeAndSetRoot( resolved_tree_id, - storage_config->GitRoot().string(), + native_storage_config->GitRoot().string(), absent, serve, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ws_setter, logger); }, - [logger, target_path = storage_config->GitRoot()]( + [logger, + target_path = native_storage_config->GitRoot()]( auto const& msg, bool fatal) { (*logger)( fmt::format("While running critical Git op " @@ -223,8 +251,17 @@ void ResolveFilePathTree( // tree needs no further processing; // if serve endpoint is given, try to ensure it has this tree available // to be able to build against it - CheckServeAndSetRoot( - tree_hash, repo_root, absent, serve, remote_api, ws_setter, logger); + CheckServeAndSetRoot(tree_hash, + repo_root, + absent, + serve, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, + remote_api, + ws_setter, + logger); } } @@ -236,17 +273,23 @@ auto CreateFilePathGitMap( gsl::not_null const& import_to_git_map, gsl::not_null const& resolve_symlinks_map, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, std::size_t jobs, - std::string multi_repo_tool_name, - std::string build_tool_name) -> FilePathGitMap { + std::string const& multi_repo_tool_name, + std::string const& build_tool_name) -> FilePathGitMap { auto dir_to_git = [current_subcmd, critical_git_op_map, import_to_git_map, resolve_symlinks_map, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, multi_repo_tool_name, build_tool_name](auto ts, @@ -285,7 +328,10 @@ auto CreateFilePathGitMap( critical_git_op_map, resolve_symlinks_map, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ts, setter, @@ -324,16 +370,16 @@ auto CreateFilePathGitMap( // resolve tree and set workspace root; tree gets resolved // from source repo into the Git cache, which we first need // to ensure is initialized - GitOpKey op_key = { - .params = - { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare - }, - .op_type = GitOpType::ENSURE_INIT}; + GitOpKey op_key = {.params = + { + native_storage_config + ->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare + }, + .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( ts, {std::move(op_key)}, @@ -346,7 +392,10 @@ auto CreateFilePathGitMap( critical_git_op_map, resolve_symlinks_map, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ts, setter, @@ -369,13 +418,17 @@ auto CreateFilePathGitMap( critical_git_op_map, resolve_symlinks_map, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ts, setter, logger); }, - [logger, target_path = storage_config->GitRoot()]( + [logger, + target_path = native_storage_config->GitRoot()]( auto const& msg, bool fatal) { (*logger)( fmt::format("While running critical Git op " @@ -408,7 +461,7 @@ auto CreateFilePathGitMap( /*fatal=*/false); } // it's not a git repo, so import it to git cache - auto tmp_dir = storage_config->CreateTypedTmpDir("file"); + auto tmp_dir = native_storage_config->CreateTypedTmpDir("file"); if (not tmp_dir) { (*logger)("Failed to create import-to-git tmp directory!", /*fatal=*/true); @@ -437,7 +490,10 @@ auto CreateFilePathGitMap( critical_git_op_map, resolve_symlinks_map, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, remote_api, ts, setter, @@ -452,21 +508,25 @@ auto CreateFilePathGitMap( std::string tree = values[0]->first; // resolve tree and set workspace root; // we work on the Git CAS directly - ResolveFilePathTree(storage_config->GitRoot().string(), - fpath.string(), - tree, - pragma_special, - values[0]->second, /*source_cas*/ - values[0]->second, /*target_cas*/ - absent, - critical_git_op_map, - resolve_symlinks_map, - serve, - storage_config, - remote_api, - ts, - setter, - logger); + ResolveFilePathTree( + native_storage_config->GitRoot().string(), + fpath.string(), + tree, + pragma_special, + values[0]->second, /*source_cas*/ + values[0]->second, /*target_cas*/ + absent, + critical_git_op_map, + resolve_symlinks_map, + serve, + native_storage_config, + compat_storage_config, + compat_storage, + local_api, + remote_api, + ts, + setter, + logger); }, [logger, target_path = key.fpath](auto const& msg, bool fatal) { (*logger)( diff --git a/src/other_tools/root_maps/fpath_git_map.hpp b/src/other_tools/root_maps/fpath_git_map.hpp index d116496f1..ad2f4c399 100644 --- a/src/other_tools/root_maps/fpath_git_map.hpp +++ b/src/other_tools/root_maps/fpath_git_map.hpp @@ -27,13 +27,14 @@ #include "src/buildtool/file_system/symlinks_map/resolve_symlinks_map.hpp" #include "src/buildtool/serve_api/remote/serve_api.hpp" #include "src/buildtool/storage/config.hpp" +#include "src/buildtool/storage/storage.hpp" #include "src/other_tools/just_mr/utils.hpp" #include "src/other_tools/ops_maps/import_to_git_map.hpp" #include "src/utils/cpp/hash_combine.hpp" #include "src/utils/cpp/path_hash.hpp" struct FpathInfo { - std::filesystem::path fpath{}; /* key */ + std::filesystem::path fpath; /* key */ // create root based on "special" pragma value std::optional pragma_special{std::nullopt}; /* key */ // create an absent root @@ -56,11 +57,14 @@ using FilePathGitMap = AsyncMapConsumer; gsl::not_null const& import_to_git_map, gsl::not_null const& resolve_symlinks_map, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, std::size_t jobs, - std::string multi_repo_tool_name, - std::string build_tool_name) -> FilePathGitMap; + std::string const& multi_repo_tool_name, + std::string const& build_tool_name) -> FilePathGitMap; namespace std { template <> diff --git a/src/other_tools/root_maps/root_utils.cpp b/src/other_tools/root_maps/root_utils.cpp index 6e77cae87..89e716533 100644 --- a/src/other_tools/root_maps/root_utils.cpp +++ b/src/other_tools/root_maps/root_utils.cpp @@ -17,8 +17,11 @@ #include "fmt/core.h" #include "src/buildtool/common/artifact.hpp" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/repository_config.hpp" -#include "src/buildtool/execution_api/git/git_api.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/execution_api/serve/mr_git_api.hpp" +#include "src/buildtool/execution_api/serve/utils.hpp" #include "src/buildtool/file_system/object_type.hpp" auto CheckServeHasAbsentRoot(ServeApi const& serve, @@ -26,7 +29,7 @@ auto CheckServeHasAbsentRoot(ServeApi const& serve, AsyncMapConsumerLoggerPtr const& logger) -> std::optional { if (auto has_tree = serve.CheckRootTree(tree_id)) { - return *has_tree; + return has_tree; } (*logger)(fmt::format("Checking that the serve endpoint knows tree " "{} failed.", @@ -35,12 +38,25 @@ auto CheckServeHasAbsentRoot(ServeApi const& serve, return std::nullopt; } -auto EnsureAbsentRootOnServe(ServeApi const& serve, - std::string const& tree_id, - std::filesystem::path const& repo_path, - IExecutionApi const* remote_api, - AsyncMapConsumerLoggerPtr const& logger, - bool no_sync_is_fatal) -> bool { +auto EnsureAbsentRootOnServe( + ServeApi const& serve, + std::string const& tree_id, + std::filesystem::path const& repo_path, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, + IExecutionApi const* remote_api, + AsyncMapConsumerLoggerPtr const& logger, + bool no_sync_is_fatal) -> bool { + auto const native_digest = ArtifactDigestFactory::Create( + HashFunction::Type::GitSHA1, tree_id, 0, /*is_tree=*/true); + if (not native_digest) { + (*logger)(fmt::format("Failed to create digest for {}", tree_id), + /*fatal=*/true); + return false; + } + // check if upload is required if (remote_api != nullptr) { // upload tree to remote CAS auto repo = RepositoryConfig{}; @@ -50,11 +66,14 @@ auto EnsureAbsentRootOnServe(ServeApi const& serve, /*fatal=*/true); return false; } - auto git_api = GitApi{&repo}; + auto git_api = MRGitApi{&repo, + native_storage_config, + compat_storage_config, + compat_storage, + local_api}; if (not git_api.RetrieveToCas( - {Artifact::ObjectInfo{ - .digest = ArtifactDigest{tree_id, 0, /*is_tree=*/true}, - .type = ObjectType::Tree}}, + {Artifact::ObjectInfo{.digest = *native_digest, + .type = ObjectType::Tree}}, *remote_api)) { (*logger)(fmt::format("Failed to sync tree {} from repository {}", tree_id, @@ -63,8 +82,29 @@ auto EnsureAbsentRootOnServe(ServeApi const& serve, return false; } } - // ask serve endpoint to retrieve the uploaded tree - if (not serve.GetTreeFromRemote(tree_id)) { + // ask serve endpoint to retrieve the uploaded tree; this can only happen if + // we have access to a digest that the remote knows + ArtifactDigest remote_digest = *native_digest; + if (compat_storage_config != nullptr) { + // in compatible mode, get compatible digest from mapping, if exists + auto cached_obj = MRApiUtils::ReadRehashedDigest(*native_digest, + *native_storage_config, + *compat_storage_config, + /*from_git=*/true); + if (not cached_obj) { + (*logger)(cached_obj.error(), /*fatal=*/true); + return false; + } + if (not *cached_obj) { + // digest is not known; respond based on no_sync_is_fatal flag + (*logger)(fmt::format("No digest provided to sync root tree {}.", + tree_id), + /*fatal=*/no_sync_is_fatal); + return not no_sync_is_fatal; + } + remote_digest = cached_obj->value().digest; + } + if (not serve.GetTreeFromRemote(remote_digest)) { // respond based on no_sync_is_fatal flag (*logger)( fmt::format("Serve endpoint failed to sync root tree {}.", tree_id), diff --git a/src/other_tools/root_maps/root_utils.hpp b/src/other_tools/root_maps/root_utils.hpp index c055633f0..2d3ec6a75 100644 --- a/src/other_tools/root_maps/root_utils.hpp +++ b/src/other_tools/root_maps/root_utils.hpp @@ -22,6 +22,8 @@ #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/multithreading/async_map_consumer.hpp" #include "src/buildtool/serve_api/remote/serve_api.hpp" +#include "src/buildtool/storage/config.hpp" +#include "src/buildtool/storage/storage.hpp" /// \brief Calls the ServeApi to check whether the serve endpoint has the given /// tree available to build against. @@ -43,9 +45,16 @@ /// used by given remote execution endpoint! /// \param tree_id The Git-tree identifier. /// \param repo_path Local witnessing Git repository for the tree. -/// \param remote_api Optional API of the remote-execution endpoint. If nullopt, -/// skip the upload to the remote CAS; this assumes prior knowledge which -/// guarantees the tree given by tree_id exists in the remote CAS for the +/// \param native_storage_config Configuration of the native local storage. +/// \param compat_storage_config Optional configuration of the compatible local +/// storage, if it was set up. +/// \param compat_storage Optional compatible local storage, if it was set up. +/// \param local_api Optional API that knows how to communicate with the +/// remote-execution endpoint specified by parameter remote_api, if given. In +/// particular, it is expected to be provided if the remote is compatible. +/// \param remote_api Optional API of the remote-execution endpoint. +/// If nullopt, skip the upload to the remote CAS; this assumes prior knowledge +/// which guarantees the tree given by tree_id exists in the remote CAS for the /// duration of the subsequent serve API call; this option should be used /// carefully, but does result in less remote communication. /// \param logger An AsyncMapConsumer logger instance. @@ -58,6 +67,10 @@ ServeApi const& serve, std::string const& tree_id, std::filesystem::path const& repo_path, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + IExecutionApi const* local_api, IExecutionApi const* remote_api, AsyncMapConsumerLoggerPtr const& logger, bool no_sync_is_fatal) -> bool; diff --git a/src/other_tools/root_maps/tree_id_git_map.cpp b/src/other_tools/root_maps/tree_id_git_map.cpp index 78c1cffd5..7b3099ae2 100644 --- a/src/other_tools/root_maps/tree_id_git_map.cpp +++ b/src/other_tools/root_maps/tree_id_git_map.cpp @@ -15,8 +15,10 @@ #include "src/other_tools/root_maps/tree_id_git_map.hpp" #include "fmt/core.h" +#include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/common/repository_config.hpp" -#include "src/buildtool/execution_api/git/git_api.hpp" +#include "src/buildtool/crypto/hash_info.hpp" +#include "src/buildtool/execution_api/serve/mr_git_api.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/other_tools/root_maps/root_utils.hpp" @@ -24,18 +26,27 @@ namespace { /// \brief Guarantees it terminates by either calling the setter or calling the /// logger with fatal. -void UploadToServeAndSetRoot(ServeApi const& serve, - StorageConfig const& storage_config, - std::string const& tree_id, - ArtifactDigest const& digest, - IExecutionApi const& remote_api, - bool ignore_special, - TreeIdGitMap::SetterPtr const& setter, - TreeIdGitMap::LoggerPtr const& logger) { +void UploadToServeAndSetRoot( + ServeApi const& serve, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, + std::string const& tree_id, + ArtifactDigest const& digest, + gsl::not_null const& local_api, + IExecutionApi const& remote_api, + bool ignore_special, + TreeIdGitMap::SetterPtr const& setter, + TreeIdGitMap::LoggerPtr const& logger) { // upload to remote CAS auto repo_config = RepositoryConfig{}; - if (repo_config.SetGitCAS(storage_config.GitRoot())) { - auto git_api = GitApi{&repo_config}; + if (repo_config.SetGitCAS(native_storage_config->GitRoot())) { + auto git_api = + MRGitApi{&repo_config, + native_storage_config, + compat_storage_config, + compat_storage, + compat_storage_config != nullptr ? &*local_api : nullptr}; if (not git_api.RetrieveToCas( {Artifact::ObjectInfo{.digest = digest, .type = ObjectType::Tree}}, @@ -49,7 +60,7 @@ void UploadToServeAndSetRoot(ServeApi const& serve, } else { (*logger)(fmt::format("Failed to SetGitCAS at {}", - storage_config.GitRoot().string()), + native_storage_config->GitRoot().string()), /*fatal=*/true); return; } @@ -58,6 +69,10 @@ void UploadToServeAndSetRoot(ServeApi const& serve, if (EnsureAbsentRootOnServe(serve, tree_id, /*repo_path=*/"", + native_storage_config, + /*compat_storage_config=*/nullptr, + /*compat_storage=*/nullptr, + /*local_api=*/nullptr, /*remote_api=*/nullptr, logger, /*no_sync_is_fatal=*/true)) { @@ -75,7 +90,9 @@ void UploadToServeAndSetRoot(ServeApi const& serve, /// logger with fatal. void MoveCASTreeToGitAndProcess( ServeApi const& serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, std::string const& tree_id, ArtifactDigest const& digest, gsl::not_null const& import_to_git_map, @@ -86,7 +103,8 @@ void MoveCASTreeToGitAndProcess( TreeIdGitMap::SetterPtr const& setter, TreeIdGitMap::LoggerPtr const& logger) { // Move tree from CAS to local Git storage - auto tmp_dir = storage_config->CreateTypedTmpDir("fetch-remote-git-tree"); + auto tmp_dir = + native_storage_config->CreateTypedTmpDir("fetch-remote-git-tree"); if (not tmp_dir) { (*logger)(fmt::format("Failed to create tmp directory for copying " "git-tree {} from remote CAS", @@ -108,10 +126,13 @@ void MoveCASTreeToGitAndProcess( ts, {std::move(c_info)}, [&serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, tmp_dir, // keep tmp_dir alive tree_id, digest, + local_api, remote_api, ignore_special, setter, @@ -124,9 +145,12 @@ void MoveCASTreeToGitAndProcess( // upload tree from Git cache to remote CAS and tell serve to set up // the root from the remote CAS tree; set root as absent on success UploadToServeAndSetRoot(serve, - *storage_config, + native_storage_config, + compat_storage_config, + compat_storage, tree_id, digest, + local_api, *remote_api, ignore_special, setter, @@ -150,7 +174,9 @@ auto CreateTreeIdGitMap( gsl::not_null const& import_to_git_map, bool fetch_absent, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, std::size_t jobs) -> TreeIdGitMap { @@ -159,7 +185,9 @@ auto CreateTreeIdGitMap( import_to_git_map, fetch_absent, serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, local_api, remote_api](auto ts, auto setter, @@ -173,8 +201,8 @@ auto CreateTreeIdGitMap( if (key.absent and not fetch_absent) { if (serve != nullptr) { // check serve endpoint - auto has_tree = - CheckServeHasAbsentRoot(*serve, key.tree_info.hash, logger); + auto has_tree = CheckServeHasAbsentRoot( + *serve, key.tree_info.tree_hash.Hash(), logger); if (not has_tree) { return; } @@ -184,7 +212,7 @@ auto CreateTreeIdGitMap( {key.ignore_special ? FileRoot::kGitTreeIgnoreSpecialMarker : FileRoot::kGitTreeMarker, - key.tree_info.hash}); + key.tree_info.tree_hash.Hash()}); (*setter)( std::pair(std::move(root), /*is_cache_hit=*/false)); return; @@ -194,28 +222,32 @@ auto CreateTreeIdGitMap( (*logger)( fmt::format("Cannot create workspace root {} as absent " "for the provided serve endpoint.", - key.tree_info.hash), + key.tree_info.tree_hash.Hash()), /*fatal=*/true); return; } // check if tree in already in remote CAS - auto digest = - ArtifactDigest{key.tree_info.hash, 0, /*is_tree=*/true}; + auto const digest = ArtifactDigest{key.tree_info.tree_hash, 0}; if (remote_api->IsAvailable({digest})) { // tell serve to set up the root from the remote CAS tree; // upload can be skipped - if (EnsureAbsentRootOnServe(*serve, - key.tree_info.hash, - /*repo_path=*/"", - /*remote_api=*/nullptr, - logger, - /*no_sync_is_fatal=*/true)) { + if (EnsureAbsentRootOnServe( + *serve, + key.tree_info.tree_hash.Hash(), + /*repo_path=*/"", + native_storage_config, + /*compat_storage_config=*/nullptr, + /*compat_storage=*/nullptr, + /*local_api=*/nullptr, + /*remote_api=*/nullptr, + logger, + /*no_sync_is_fatal=*/true)) { // set workspace root as absent auto root = nlohmann::json::array( {key.ignore_special ? FileRoot::kGitTreeIgnoreSpecialMarker : FileRoot::kGitTreeMarker, - key.tree_info.hash}); + key.tree_info.tree_hash.Hash()}); (*setter)( std::pair(std::move(root), /*is_cache_hit=*/false)); return; @@ -223,7 +255,7 @@ auto CreateTreeIdGitMap( (*logger)( fmt::format("Serve endpoint failed to create workspace " "root {} that locally was marked absent.", - key.tree_info.hash), + key.tree_info.tree_hash.Hash()), /*fatal=*/true); return; } @@ -232,18 +264,20 @@ auto CreateTreeIdGitMap( GitOpKey op_key = { .params = { - storage_config->GitRoot(), // target_path - "", // git_hash - std::nullopt, // message - std::nullopt, // source_path - true // init_bare + native_storage_config->GitRoot(), // target_path + "", // git_hash + std::nullopt, // message + std::nullopt, // source_path + true // init_bare }, .op_type = GitOpType::ENSURE_INIT}; critical_git_op_map->ConsumeAfterKeysReady( ts, {std::move(op_key)}, [serve, - storage_config, + native_storage_config, + compat_storage_config, + compat_storage, digest, import_to_git_map, local_api, @@ -265,8 +299,9 @@ auto CreateTreeIdGitMap( op_result.git_cas); // link fake repo to odb if (not git_repo) { (*logger)( - fmt::format("Could not open repository {}", - storage_config->GitRoot().string()), + fmt::format( + "Could not open repository {}", + native_storage_config->GitRoot().string()), /*fatal=*/true); return; } @@ -282,7 +317,7 @@ auto CreateTreeIdGitMap( }); // check if the desired tree ID is in Git cache auto tree_found = git_repo->CheckTreeExists( - key.tree_info.hash, wrapped_logger); + key.tree_info.tree_hash.Hash(), wrapped_logger); if (not tree_found) { // errors encountered return; @@ -291,14 +326,18 @@ auto CreateTreeIdGitMap( // upload tree from Git cache to remote CAS and tell // serve to set up the root from the remote CAS // tree, then set root as absent - UploadToServeAndSetRoot(*serve, - *storage_config, - key.tree_info.hash, - digest, - *remote_api, - key.ignore_special, - setter, - logger); + UploadToServeAndSetRoot( + *serve, + native_storage_config, + compat_storage_config, + compat_storage, + key.tree_info.tree_hash.Hash(), + digest, + local_api, + *remote_api, + key.ignore_special, + setter, + logger); // done! return; } @@ -306,17 +345,20 @@ auto CreateTreeIdGitMap( if (local_api->IsAvailable(digest)) { // Move tree locally from CAS to Git cache, then // continue processing it by UploadToServeAndSetRoot - MoveCASTreeToGitAndProcess(*serve, - storage_config, - key.tree_info.hash, - digest, - import_to_git_map, - local_api, - remote_api, - key.ignore_special, - ts, - setter, - logger); + MoveCASTreeToGitAndProcess( + *serve, + native_storage_config, + compat_storage_config, + compat_storage, + key.tree_info.tree_hash.Hash(), + digest, + import_to_git_map, + local_api, + remote_api, + key.ignore_special, + ts, + setter, + logger); // done! return; } @@ -326,10 +368,10 @@ auto CreateTreeIdGitMap( (*logger)(fmt::format("Cannot create workspace root " "{} as absent for the provided " "serve endpoint.", - key.tree_info.hash), + key.tree_info.tree_hash.Hash()), /*fatal=*/true); }, - [logger, target_path = storage_config->GitRoot()]( + [logger, target_path = native_storage_config->GitRoot()]( auto const& msg, bool fatal) { (*logger)( fmt::format("While running critical Git op " @@ -344,13 +386,13 @@ auto CreateTreeIdGitMap( // give warning that serve endpoint is missing (*logger)(fmt::format("Workspace root {} marked absent but no " "suitable serve endpoint provided.", - key.tree_info.hash), + key.tree_info.tree_hash.Hash()), /*fatal=*/false); // set workspace root as absent auto root = nlohmann::json::array( {key.ignore_special ? FileRoot::kGitTreeIgnoreSpecialMarker : FileRoot::kGitTreeMarker, - key.tree_info.hash}); + key.tree_info.tree_hash.Hash()}); (*setter)(std::pair(std::move(root), false)); return; } @@ -360,7 +402,7 @@ auto CreateTreeIdGitMap( git_tree_fetch_map->ConsumeAfterKeysReady( ts, {key.tree_info}, - [storage_config, key, setter](auto const& values) { + [native_storage_config, key, setter](auto const& values) { // tree is now in Git cache; // get cache hit info auto is_cache_hit = *values[0]; @@ -370,15 +412,15 @@ auto CreateTreeIdGitMap( {key.ignore_special ? FileRoot::kGitTreeIgnoreSpecialMarker : FileRoot::kGitTreeMarker, - key.tree_info.hash, - storage_config->GitRoot().string()}), + key.tree_info.tree_hash.Hash(), + native_storage_config->GitRoot().string()}), is_cache_hit)); }, - [logger, tree_id = key.tree_info.hash](auto const& msg, - bool fatal) { + [logger, hash = key.tree_info.tree_hash.Hash()](auto const& msg, + bool fatal) { (*logger)(fmt::format( "While ensuring git-tree {} is in Git cache:\n{}", - tree_id, + hash, msg), fatal); }); diff --git a/src/other_tools/root_maps/tree_id_git_map.hpp b/src/other_tools/root_maps/tree_id_git_map.hpp index 4276d1433..db7dfd68c 100644 --- a/src/other_tools/root_maps/tree_id_git_map.hpp +++ b/src/other_tools/root_maps/tree_id_git_map.hpp @@ -26,13 +26,14 @@ #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/serve_api/remote/serve_api.hpp" #include "src/buildtool/storage/config.hpp" +#include "src/buildtool/storage/storage.hpp" #include "src/other_tools/ops_maps/critical_git_op_map.hpp" #include "src/other_tools/ops_maps/git_tree_fetch_map.hpp" #include "src/other_tools/ops_maps/import_to_git_map.hpp" #include "src/utils/cpp/hash_combine.hpp" struct TreeIdInfo { - GitTreeInfo tree_info{}; /* key */ + GitTreeInfo tree_info; /* key */ // create root that ignores symlinks bool ignore_special{}; /* key */ // create an absent root @@ -71,7 +72,9 @@ using TreeIdGitMap = gsl::not_null const& import_to_git_map, bool fetch_absent, ServeApi const* serve, - gsl::not_null const& storage_config, + gsl::not_null const& native_storage_config, + StorageConfig const* compat_storage_config, + Storage const* compat_storage, gsl::not_null const& local_api, IExecutionApi const* remote_api, std::size_t jobs) -> TreeIdGitMap; diff --git a/src/other_tools/utils/TARGETS b/src/other_tools/utils/TARGETS index 27cbb93d9..4d44c9e5a 100644 --- a/src/other_tools/utils/TARGETS +++ b/src/other_tools/utils/TARGETS @@ -5,9 +5,9 @@ , "srcs": ["curl_context.cpp"] , "stage": ["src", "other_tools", "utils"] , "private-deps": - [ ["src/buildtool/logging", "log_level"] + [ ["", "libcurl"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["", "libcurl"] ] } , "curl_easy_handle": @@ -19,9 +19,9 @@ , "stage": ["src", "other_tools", "utils"] , "private-deps": [ ["@", "gsl", "", "gsl"] + , ["", "libcurl"] , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "logging"] - , ["", "libcurl"] ] } , "curl_url_handle": @@ -32,9 +32,9 @@ , "deps": ["curl_context", ["@", "gsl", "", "gsl"]] , "stage": ["src", "other_tools", "utils"] , "private-deps": - [ ["src/buildtool/logging", "log_level"] + [ ["", "libcurl"] + , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["", "libcurl"] ] } , "content": @@ -60,8 +60,24 @@ , "deps": [ ["src/buildtool/build_engine/expression", "expression"] , ["src/other_tools/ops_maps", "content_cas_map"] + , ["src/utils/cpp", "expected"] + ] + , "private-deps": + [["@", "fmt", "", "fmt"], ["src/buildtool/crypto", "hash_info"]] + , "stage": ["src", "other_tools", "utils"] + } +, "parse_git_tree": + { "type": ["@", "rules", "CC", "library"] + , "name": ["parse_git_tree"] + , "hdrs": ["parse_git_tree.hpp"] + , "srcs": ["parse_git_tree.cpp"] + , "deps": + [ ["src/buildtool/build_engine/expression", "expression"] + , ["src/other_tools/ops_maps", "git_tree_fetch_map"] + , ["src/utils/cpp", "expected"] ] - , "private-deps": [["@", "fmt", "", "fmt"]] + , "private-deps": + [["@", "fmt", "", "fmt"], ["src/buildtool/crypto", "hash_info"]] , "stage": ["src", "other_tools", "utils"] } } diff --git a/src/other_tools/utils/content.hpp b/src/other_tools/utils/content.hpp index eb6aca76b..15268dafd 100644 --- a/src/other_tools/utils/content.hpp +++ b/src/other_tools/utils/content.hpp @@ -86,10 +86,10 @@ return *data; } -template +template [[nodiscard]] static auto GetContentHash(std::string const& data) noexcept -> std::string { - auto hasher = Hasher::Create(type); + auto hasher = Hasher::Create(kType); hasher->Update(data); auto digest = std::move(*hasher).Finalize(); return digest.HexString(); diff --git a/src/other_tools/utils/curl_context.cpp b/src/other_tools/utils/curl_context.cpp index 44d8048dc..7006d7271 100644 --- a/src/other_tools/utils/curl_context.cpp +++ b/src/other_tools/utils/curl_context.cpp @@ -21,9 +21,9 @@ extern "C" { #include } -CurlContext::CurlContext() noexcept { - // NOLINTNEXTLINE(hicpp-signed-bitwise) - if (not(initialized_ = (curl_global_init(CURL_GLOBAL_DEFAULT) >= 0))) { +CurlContext::CurlContext() noexcept + : initialized_{curl_global_init(CURL_GLOBAL_DEFAULT) >= 0} { + if (not initialized_) { Logger::Log(LogLevel::Error, "initializing libcurl failed"); } } diff --git a/src/other_tools/utils/curl_easy_handle.cpp b/src/other_tools/utils/curl_easy_handle.cpp index 2fcfb877a..43bdfcedf 100644 --- a/src/other_tools/utils/curl_easy_handle.cpp +++ b/src/other_tools/utils/curl_easy_handle.cpp @@ -37,7 +37,11 @@ auto read_stream_data(gsl::not_null const& stream) noexcept // obtain stream size std::fseek(stream, 0, SEEK_END); auto size = std::ftell(stream); - std::rewind(stream); + auto pos = std::fseek(stream, 0, SEEK_SET); + if (pos != 0) { + Logger::Log(LogLevel::Warning, + "Rewinding temporary file for curl log failed."); + } // create string buffer to hold stream content std::string content(static_cast(size), '\0'); diff --git a/src/other_tools/utils/curl_easy_handle.hpp b/src/other_tools/utils/curl_easy_handle.hpp index 81bcbed86..a3dd6e06a 100644 --- a/src/other_tools/utils/curl_easy_handle.hpp +++ b/src/other_tools/utils/curl_easy_handle.hpp @@ -72,7 +72,7 @@ class CurlEasyHandle { private: // IMPORTANT: the CurlContext must to be initialized before any curl object! - CurlContext curl_context_{}; + CurlContext curl_context_; std::unique_ptr handle_{ nullptr, curl_easy_closer}; diff --git a/src/other_tools/utils/curl_url_handle.hpp b/src/other_tools/utils/curl_url_handle.hpp index 476277f3f..6a7a7917a 100644 --- a/src/other_tools/utils/curl_url_handle.hpp +++ b/src/other_tools/utils/curl_url_handle.hpp @@ -70,7 +70,7 @@ struct ConfigKeyMatchDegree { struct NoproxyPattern { // stores the substrings of the host portion of the pattern, obtained by // splitting with delimiter '.' - std::vector host_tokens{}; + std::vector host_tokens; // port number as string, or nullopt if port missing std::optional port; }; @@ -163,7 +163,7 @@ class CurlURLHandle { private: // IMPORTANT: the CurlContext must be initialized before any curl // object! - CurlContext curl_context_{}; + CurlContext curl_context_; std::unique_ptr handle_{nullptr, curl_url_closer}; diff --git a/src/other_tools/utils/parse_archive.cpp b/src/other_tools/utils/parse_archive.cpp index 1aebe2399..fb03309a9 100644 --- a/src/other_tools/utils/parse_archive.cpp +++ b/src/other_tools/utils/parse_archive.cpp @@ -17,39 +17,42 @@ #include // std::move #include "fmt/core.h" +#include "src/buildtool/crypto/hash_info.hpp" -namespace { auto ParseArchiveContent(ExpressionPtr const& repo_desc, - std::string const& origin, - const AsyncMapConsumerLoggerPtr& logger) - -> std::optional { - + std::string const& origin) + -> expected { // enforce mandatory fields auto repo_desc_content = repo_desc->At("content"); if (not repo_desc_content) { - (*logger)("ArchiveCheckout: Mandatory field \"content\" is missing", - /*fatal=*/true); - return std::nullopt; + return unexpected{ + "Mandatory field \"content\" is missing"}; } if (not repo_desc_content->get()->IsString()) { - (*logger)(fmt::format("ArchiveCheckout: Unsupported value {} for " - "mandatory field \"content\"", - repo_desc_content->get()->ToString()), - /*fatal=*/true); - return std::nullopt; + return unexpected{ + fmt::format("Unsupported value {} for mandatory field \"content\"", + repo_desc_content->get()->ToString())}; + } + + auto const repo_desc_hash_info = + HashInfo::Create(HashFunction::Type::GitSHA1, + repo_desc_content->get()->String(), + /*is_tree=*/false); + if (not repo_desc_hash_info) { + return unexpected{fmt::format( + "Unsupported value {} for mandatory field \"content\"\n{}", + repo_desc_content->get()->ToString(), + repo_desc_hash_info.error())}; } + auto repo_desc_fetch = repo_desc->At("fetch"); if (not repo_desc_fetch) { - (*logger)("ArchiveCheckout: Mandatory field \"fetch\" is missing", - /*fatal=*/true); - return std::nullopt; + return unexpected{"Mandatory field \"fetch\" is missing"}; } if (not repo_desc_fetch->get()->IsString()) { - (*logger)(fmt::format("ArchiveCheckout: Unsupported value {} for " - "mandatory field \"fetch\"", - repo_desc_fetch->get()->ToString()), - /*fatal=*/true); - return std::nullopt; + return unexpected{ + fmt::format("Unsupported value {} for mandatory field \"fetch\"", + repo_desc_fetch->get()->ToString())}; } auto repo_desc_distfile = repo_desc->Get("distfile", Expression::none_t{}); auto repo_desc_sha256 = repo_desc->Get("sha256", Expression::none_t{}); @@ -61,25 +64,22 @@ auto ParseArchiveContent(ExpressionPtr const& repo_desc, mirrors.reserve(repo_desc_mirrors->List().size()); for (auto const& elem : repo_desc_mirrors->List()) { if (not elem->IsString()) { - (*logger)(fmt::format("ArchiveCheckout: Unsupported list entry " - "{} in optional field \"mirrors\"", - elem->ToString()), - /*fatal=*/true); - return std::nullopt; + return unexpected{fmt::format( + "Unsupported list entry {} in optional field \"mirrors\"", + elem->ToString())}; } mirrors.emplace_back(elem->String()); } } else { - (*logger)(fmt::format("ArchiveCheckout: Optional field \"mirrors\" " - "should be a list of strings, but found: {}", - repo_desc_mirrors->ToString()), - /*fatal=*/true); - return std::nullopt; + return unexpected{ + fmt::format("Optional field \"mirrors\" should be a list of " + "strings, but found: {}", + repo_desc_mirrors->ToString())}; } return ArchiveContent{ - .content = repo_desc_content->get()->String(), + .content_hash = *repo_desc_hash_info, .distfile = repo_desc_distfile->IsString() ? std::make_optional(repo_desc_distfile->String()) : std::nullopt, @@ -104,15 +104,15 @@ auto IsValidFileName(const std::string& s) -> bool { return true; } -} // namespace - auto ParseArchiveDescription(ExpressionPtr const& repo_desc, std::string const& repo_type, std::string const& origin, const AsyncMapConsumerLoggerPtr& logger) -> std::optional { - auto archive_content = ParseArchiveContent(repo_desc, origin, logger); + auto const archive_content = ParseArchiveContent(repo_desc, origin); if (not archive_content) { + (*logger)(fmt::format("ArchiveCheckout: {}", archive_content.error()), + /*fatal=*/true); return std::nullopt; } // additional mandatory fields @@ -151,8 +151,9 @@ auto ParseForeignFileDescription(ExpressionPtr const& repo_desc, std::string const& origin, const AsyncMapConsumerLoggerPtr& logger) -> std::optional { - auto archive_content = ParseArchiveContent(repo_desc, origin, logger); + auto const archive_content = ParseArchiveContent(repo_desc, origin); if (not archive_content) { + (*logger)(archive_content.error(), /*fatal=*/true); return std::nullopt; } auto name = repo_desc->At("name"); diff --git a/src/other_tools/utils/parse_archive.hpp b/src/other_tools/utils/parse_archive.hpp index 6d356dd4b..198bfa48e 100644 --- a/src/other_tools/utils/parse_archive.hpp +++ b/src/other_tools/utils/parse_archive.hpp @@ -20,6 +20,11 @@ #include "src/buildtool/build_engine/expression/expression.hpp" #include "src/other_tools/ops_maps/content_cas_map.hpp" +#include "src/utils/cpp/expected.hpp" + +auto ParseArchiveContent(ExpressionPtr const& repo_desc, + std::string const& origin) + -> expected; // Parse the description of an archive repository; if an error // occurs, call the logger with fatal set to true and return std::nullopt @@ -38,4 +43,4 @@ auto ParseForeignFileDescription(ExpressionPtr const& repo_desc, const AsyncMapConsumerLoggerPtr& logger) -> std::optional; -#endif +#endif // INCLUDED_SRC_OTHER_TOOLS_UTILS_PARSE_ARCHIVE_HPP diff --git a/src/other_tools/utils/parse_git_tree.cpp b/src/other_tools/utils/parse_git_tree.cpp new file mode 100644 index 000000000..0e58676db --- /dev/null +++ b/src/other_tools/utils/parse_git_tree.cpp @@ -0,0 +1,113 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/other_tools/utils/parse_git_tree.hpp" + +#include +#include // std::move +#include + +#include "fmt/core.h" +#include "src/buildtool/crypto/hash_info.hpp" + +[[nodiscard]] auto ParseGitTree(ExpressionPtr const& repo_desc, + std::optional origin) + -> expected { + auto repo_desc_hash = repo_desc->At("id"); + if (not repo_desc_hash) { + return unexpected{"Mandatory field \"id\" is missing"}; + } + if (not repo_desc_hash->get()->IsString()) { + return unexpected{ + fmt::format("Unsupported value {} for " + "mandatory field \"id\"", + repo_desc_hash->get()->ToString())}; + } + + auto repo_desc_hash_info = HashInfo::Create(HashFunction::Type::GitSHA1, + repo_desc_hash->get()->String(), + /*is_tree=*/true); + if (not repo_desc_hash_info) { + return unexpected{ + fmt::format("Unsupported value {} for " + "mandatory field \"id\"\n{}", + repo_desc_hash->get()->ToString(), + std::move(repo_desc_hash_info).error())}; + } + + auto repo_desc_cmd = repo_desc->At("cmd"); + if (not repo_desc_cmd) { + return unexpected{"Mandatory field \"cmd\" is missing"}; + } + if (not repo_desc_cmd->get()->IsList()) { + return unexpected{ + fmt::format("Unsupported value {} for " + "mandatory field \"cmd\"", + repo_desc_cmd->get()->ToString())}; + } + std::vector cmd{}; + for (auto const& token : repo_desc_cmd->get()->List()) { + if (token.IsNotNull() and token->IsString()) { + cmd.emplace_back(token->String()); + } + else { + return unexpected{ + fmt::format("Unsupported entry {} " + "in mandatory field \"cmd\"", + token->ToString())}; + } + } + std::map env{}; + auto repo_desc_env = repo_desc->Get("env", Expression::none_t{}); + if (repo_desc_env.IsNotNull() and repo_desc_env->IsMap()) { + for (auto const& envar : repo_desc_env->Map().Items()) { + if (envar.second.IsNotNull() and envar.second->IsString()) { + env.insert({envar.first, envar.second->String()}); + } + else { + return unexpected{ + fmt::format("Unsupported value {} for " + "key {} in optional field \"envs\"", + envar.second->ToString(), + nlohmann::json(envar.first).dump())}; + } + } + } + std::vector inherit_env{}; + auto repo_desc_inherit_env = + repo_desc->Get("inherit env", Expression::none_t{}); + if (repo_desc_inherit_env.IsNotNull() and repo_desc_inherit_env->IsList()) { + for (auto const& envvar : repo_desc_inherit_env->List()) { + if (envvar->IsString()) { + inherit_env.emplace_back(envvar->String()); + } + else { + return unexpected{ + fmt::format("Not a variable " + "name in the specification " + "of \"inherit env\": {}", + envvar->ToString())}; + } + } + } + // populate struct + auto info = GitTreeInfo{.tree_hash = *std::move(repo_desc_hash_info), + .env_vars = std::move(env), + .inherit_env = std::move(inherit_env), + .command = std::move(cmd)}; + if (origin) { + info.origin = *std::move(origin); + } + return info; +} diff --git a/src/other_tools/utils/parse_git_tree.hpp b/src/other_tools/utils/parse_git_tree.hpp new file mode 100644 index 000000000..5aab1a5b6 --- /dev/null +++ b/src/other_tools/utils/parse_git_tree.hpp @@ -0,0 +1,30 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_OTHER_TOOLS_UTILS_PARSE_GIT_TREE_HPP +#define INCLUDED_SRC_OTHER_TOOLS_UTILS_PARSE_GIT_TREE_HPP + +#include +#include + +#include "src/buildtool/build_engine/expression/expression.hpp" +#include "src/other_tools/ops_maps/git_tree_fetch_map.hpp" +#include "src/utils/cpp/expected.hpp" + +[[nodiscard]] auto ParseGitTree( + ExpressionPtr const& repo_desc, + std::optional origin = std::nullopt) + -> expected; + +#endif // INCLUDED_SRC_OTHER_TOOLS_UTILS_PARSE_GIT_TREE_HPP diff --git a/src/utils/archive/TARGETS b/src/utils/archive/TARGETS index bed5d3a0f..25fdf8977 100644 --- a/src/utils/archive/TARGETS +++ b/src/utils/archive/TARGETS @@ -6,10 +6,10 @@ , "deps": [["@", "gsl", "", "gsl"]] , "stage": ["src", "utils", "archive"] , "private-deps": - [ ["src/buildtool/file_system", "file_system_manager"] + [ ["", "libarchive"] + , ["src/buildtool/file_system", "file_system_manager"] , ["src/buildtool/logging", "log_level"] , ["src/buildtool/logging", "logging"] - , ["", "libarchive"] ] } } diff --git a/src/utils/archive/archive_ops.cpp b/src/utils/archive/archive_ops.cpp index 8e70c8144..14bbd648e 100644 --- a/src/utils/archive/archive_ops.cpp +++ b/src/utils/archive/archive_ops.cpp @@ -255,7 +255,7 @@ auto ArchiveOps::CreateArchive(ArchiveType type, // enable the correct format for archive type auto res = EnableWriteFormats(a_out.get(), type); if (res != std::nullopt) { - return *res; + return res; } // open archive to write if (not FileSystemManager::CreateDirectory(destDir)) { @@ -307,7 +307,7 @@ auto ArchiveOps::CreateArchive(ArchiveType type, // write entry into archive auto res = WriteEntry(entry.get(), a_out.get()); if (res != std::nullopt) { - return *res; + return res; } } } catch (std::exception const& ex) { @@ -343,7 +343,7 @@ auto ArchiveOps::ExtractArchive(ArchiveType type, // enable support for known formats auto res = EnableReadFormats(a_in.get(), type); if (res != std::nullopt) { - return *res; + return res; } // open archive for reading if (archive_read_open_filename( @@ -392,7 +392,7 @@ auto ArchiveOps::ExtractArchive(ArchiveType type, if (archive_entry_size(entry) > 0) { auto res = CopyData(a_in.get(), disk.get()); if (res != std::nullopt) { - return *res; + return res; } } // finish entry writing diff --git a/src/utils/archive/archive_ops.hpp b/src/utils/archive/archive_ops.hpp index 89151790f..972b65b6a 100644 --- a/src/utils/archive/archive_ops.hpp +++ b/src/utils/archive/archive_ops.hpp @@ -16,6 +16,7 @@ #define INCLUDED_SRC_UTILS_ARCHIVE_ARCHIVE_OPS_HPP #include +#include #include #include @@ -26,7 +27,7 @@ using archive = struct archive; using archive_entry = struct archive_entry; } -enum class ArchiveType : std::size_t { +enum class ArchiveType : std::uint8_t { Zip, _7Zip, ZipAuto, // autodetect zip-like archives diff --git a/src/utils/automata/TARGETS b/src/utils/automata/TARGETS index 29f43a92b..61122df5d 100644 --- a/src/utils/automata/TARGETS +++ b/src/utils/automata/TARGETS @@ -6,8 +6,8 @@ , "deps": [ ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] - , ["src/utils/cpp", "hex_string"] , ["src/utils/cpp", "hash_combine"] + , ["src/utils/cpp", "hex_string"] ] } } diff --git a/src/utils/automata/dfa_minimizer.hpp b/src/utils/automata/dfa_minimizer.hpp index 950635ac0..69bc0b52c 100644 --- a/src/utils/automata/dfa_minimizer.hpp +++ b/src/utils/automata/dfa_minimizer.hpp @@ -44,14 +44,14 @@ class DFAMinimizer { // Bucket of states with equal local properties (content and acceptance) struct Bucket { - std::vector symbols{}; - states_t states{}; + std::vector symbols; + states_t states; }; // Key used for state pairs. Reordering names will result in the same key. class StatePairKey { public: - struct hash_t { + struct Hash { [[nodiscard]] auto operator()(StatePairKey const& p) const -> std::size_t { std::size_t hash{}; @@ -85,13 +85,13 @@ class DFAMinimizer { // Value of state pairs. struct StatePairValue { // Parent pairs depending on this pair's distinguishability - std::vector parents{}; + std::vector parents; // Distinguishability flag (true means distinguishable) bool marked{}; }; using state_pairs_t = - std::unordered_map; + std::unordered_map; public: using bisimulation_t = std::unordered_map; @@ -170,8 +170,8 @@ class DFAMinimizer { } private: - std::unordered_map buckets_{}; - std::unordered_map buckets_by_state_{}; + std::unordered_map buckets_; + std::unordered_map buckets_by_state_; template [[nodiscard]] static auto GetKeys(M const& map) -> std::vector { @@ -196,7 +196,6 @@ class DFAMinimizer { } // Mark pair as distinguishable and recursively mark all parents. - // NOLINTNEXTLINE(misc-no-recursion) static void MarkPairValue(gsl::not_null const& data) { data->marked = true; for (auto* parent : data->parents) { diff --git a/src/utils/cpp/TARGETS b/src/utils/cpp/TARGETS index fda8f473d..f32a34b0f 100644 --- a/src/utils/cpp/TARGETS +++ b/src/utils/cpp/TARGETS @@ -18,9 +18,9 @@ , "hdrs": ["json.hpp"] , "deps": [ "gsl" - , ["@", "json", "", "json"] - , ["@", "gsl", "", "gsl"] , ["@", "fmt", "", "fmt"] + , ["@", "gsl", "", "gsl"] + , ["@", "json", "", "json"] ] , "stage": ["src", "utils", "cpp"] } @@ -99,13 +99,6 @@ , "hdrs": ["path_hash.hpp"] , "stage": ["src", "utils", "cpp"] } -, "verify_hash": - { "type": ["@", "rules", "CC", "library"] - , "name": ["verify_hash"] - , "hdrs": ["verify_hash.hpp"] - , "deps": [["@", "fmt", "", "fmt"]] - , "stage": ["src", "utils", "cpp"] - } , "transformed_range": { "type": ["@", "rules", "CC", "library"] , "name": ["transformed_range"] diff --git a/src/utils/cpp/atomic.hpp b/src/utils/cpp/atomic.hpp index 4ef7e40c9..94bda75fb 100644 --- a/src/utils/cpp/atomic.hpp +++ b/src/utils/cpp/atomic.hpp @@ -18,6 +18,7 @@ #include #include #include +#include #include // std::move // Atomic wrapper with notify/wait capabilities. @@ -25,7 +26,7 @@ // libcxx adds support for notify_*() and wait(). // [https://libcxx.llvm.org/docs/Cxx2aStatus.html] template -class atomic { +class atomic { // NOLINT(readability-identifier-naming) public: atomic() = default; explicit atomic(T value) : value_{std::move(value)} {} @@ -51,22 +52,26 @@ class atomic { return value_.load(order); } - template >> + template + requires(std::is_integral_v) auto operator++() -> T { std::shared_lock lock(mutex_); return ++value_; } - template >> + template + requires(std::is_integral_v) [[nodiscard]] auto operator++(int) -> T { std::shared_lock lock(mutex_); return value_++; } - template >> + template + requires(std::is_integral_v) auto operator--() -> T { std::shared_lock lock(mutex_); return --value_; } - template >> + template + requires(std::is_integral_v) [[nodiscard]] auto operator--(int) -> T { std::shared_lock lock(mutex_); return value_--; @@ -83,8 +88,8 @@ class atomic { private: std::atomic value_{}; - mutable std::shared_mutex mutex_{}; - mutable std::condition_variable_any cv_{}; + mutable std::shared_mutex mutex_; + mutable std::condition_variable_any cv_; }; // Atomic shared_pointer with notify/wait capabilities. @@ -92,7 +97,7 @@ class atomic { // std::atomic>, once libcxx adds support for it. // [https://libcxx.llvm.org/docs/Cxx2aStatus.html] template -class atomic_shared_ptr { +class atomic_shared_ptr { // NOLINT(readability-identifier-naming) using ptr_t = std::shared_ptr; public: @@ -129,8 +134,8 @@ class atomic_shared_ptr { private: ptr_t value_{}; - mutable std::shared_mutex mutex_{}; - mutable std::condition_variable_any cv_{}; + mutable std::shared_mutex mutex_; + mutable std::condition_variable_any cv_; }; #endif // INCLUDED_SRC_UTILS_CPP_ATOMIC_HPP diff --git a/src/utils/cpp/expected.hpp b/src/utils/cpp/expected.hpp index 823c4650d..da3955b72 100644 --- a/src/utils/cpp/expected.hpp +++ b/src/utils/cpp/expected.hpp @@ -20,7 +20,7 @@ // TODO(modernize): replace this by std::unexpected once we switched to C++23 template -class unexpected { +class unexpected { // NOLINT(readability-identifier-naming) public: explicit unexpected(E error) : error_{std::move(error)} {} [[nodiscard]] auto error() && -> E { return std::move(error_); } @@ -31,7 +31,7 @@ class unexpected { // TODO(modernize): replace this by std::expected once we switched to C++23 template -class expected { +class expected { // NOLINT(readability-identifier-naming) public: expected(T value) noexcept // NOLINT : value_{std::in_place_index<0>, std::move(value)} {} diff --git a/src/utils/cpp/file_locking.hpp b/src/utils/cpp/file_locking.hpp index f8ac14438..abcca121b 100644 --- a/src/utils/cpp/file_locking.hpp +++ b/src/utils/cpp/file_locking.hpp @@ -50,7 +50,7 @@ class LockFile { private: gsl::owner file_handle_{nullptr}; - std::filesystem::path lock_file_{}; + std::filesystem::path lock_file_; /// \brief Private ctor. Instances are only created by Acquire method. explicit LockFile(gsl::owner file_handle, diff --git a/src/utils/cpp/hash_combine.hpp b/src/utils/cpp/hash_combine.hpp index 78fed4179..5a63abfa2 100644 --- a/src/utils/cpp/hash_combine.hpp +++ b/src/utils/cpp/hash_combine.hpp @@ -28,4 +28,4 @@ inline auto hash_combine(gsl::not_null const& seed, std::hash{}(v) + 0x9e3779b9 + (*seed << 6) + (*seed >> 2); // NOLINT } -#endif +#endif // INCLUDED_SRC_UTILS_CPP_HASH_COMBINE_HPP diff --git a/src/utils/cpp/hex_string.hpp b/src/utils/cpp/hex_string.hpp index 23544d49b..5cca094a9 100644 --- a/src/utils/cpp/hex_string.hpp +++ b/src/utils/cpp/hex_string.hpp @@ -15,6 +15,8 @@ #ifndef INCLUDED_SRC_UTILS_CPP_HEX_STRING_HPP #define INCLUDED_SRC_UTILS_CPP_HEX_STRING_HPP +#include +#include #include #include #include @@ -22,6 +24,12 @@ #include #include +[[nodiscard]] static inline auto IsHexString(std::string const& s) noexcept + -> bool { + return std::all_of( + s.begin(), s.end(), [](unsigned char c) { return std::isxdigit(c); }); +} + [[nodiscard]] static inline auto ToHexString(std::string const& bytes) -> std::string { std::ostringstream ss{}; @@ -36,7 +44,7 @@ [[nodiscard]] static inline auto FromHexString(std::string const& hexstring) -> std::optional { try { - const std::size_t kHexBase = 16; + static constexpr std::size_t kHexBase = 16; std::stringstream ss{}; for (std::size_t i = 0; i < hexstring.length(); i += 2) { unsigned char c = diff --git a/src/utils/cpp/json.hpp b/src/utils/cpp/json.hpp index 1fe4df3fe..589a5884b 100644 --- a/src/utils/cpp/json.hpp +++ b/src/utils/cpp/json.hpp @@ -50,7 +50,6 @@ auto ExtractValueAs( namespace detail { -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] static inline auto IndentListsOnlyUntilDepth( nlohmann::json const& json, std::string const& indent, @@ -89,7 +88,6 @@ namespace detail { return json.dump(); } -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] static inline auto IndentOnlyUntilDepth( nlohmann::json const& json, std::string const& indent, @@ -164,7 +162,6 @@ namespace detail { } // \brief Dump json, replacing subexpressions at the given depths by "*". -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] static inline auto TruncateJson(nlohmann::json const& json, std::size_t depth) -> std::string { diff --git a/src/utils/cpp/path.hpp b/src/utils/cpp/path.hpp index f9b937a16..77ea647c5 100644 --- a/src/utils/cpp/path.hpp +++ b/src/utils/cpp/path.hpp @@ -59,4 +59,4 @@ return PathIsNonUpwards(applied_to.parent_path() / path); } -#endif +#endif // INCLUDED_SRC_UTILS_CPP_PATH_HPP diff --git a/src/utils/cpp/path_rebase.hpp b/src/utils/cpp/path_rebase.hpp index 25b997241..e98baf5b2 100644 --- a/src/utils/cpp/path_rebase.hpp +++ b/src/utils/cpp/path_rebase.hpp @@ -36,4 +36,4 @@ return result; } -#endif +#endif // INCLUDED_SRC_UTILS_CPP_PATH_REBASE_HPP diff --git a/src/utils/cpp/prefix.hpp b/src/utils/cpp/prefix.hpp index 604e82262..5574c01a0 100644 --- a/src/utils/cpp/prefix.hpp +++ b/src/utils/cpp/prefix.hpp @@ -30,4 +30,4 @@ return out.str(); } -#endif +#endif // INCLUDED_SRC_UTILS_CPP_PREFIX_HPP diff --git a/src/utils/cpp/tmp_dir.cpp b/src/utils/cpp/tmp_dir.cpp index 11f105594..db41512b9 100644 --- a/src/utils/cpp/tmp_dir.cpp +++ b/src/utils/cpp/tmp_dir.cpp @@ -40,7 +40,7 @@ auto TmpDir::Create(std::filesystem::path const& prefix, // attempt to make the tmp dir // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg,hicpp-vararg) - char* tmp_dir = mkdtemp(&c_tmpl[0]); + char* tmp_dir = mkdtemp(c_tmpl.data()); if (tmp_dir == nullptr) { return nullptr; } diff --git a/src/utils/cpp/tmp_dir.hpp b/src/utils/cpp/tmp_dir.hpp index 9188fa601..bff739163 100644 --- a/src/utils/cpp/tmp_dir.hpp +++ b/src/utils/cpp/tmp_dir.hpp @@ -51,7 +51,7 @@ class TmpDir { -> TmpDirPtr; private: - std::filesystem::path tmp_dir_{}; + std::filesystem::path tmp_dir_; }; #endif // INCLUDED_SRC_OTHER_TOOLS_TMP_DIR_HPP diff --git a/src/utils/cpp/transformed_range.hpp b/src/utils/cpp/transformed_range.hpp index 0522313d6..cc3fe5d63 100644 --- a/src/utils/cpp/transformed_range.hpp +++ b/src/utils/cpp/transformed_range.hpp @@ -20,6 +20,7 @@ #include #include #include //std::move +#include #include "gsl/gsl" @@ -29,16 +30,16 @@ /// \brief Transform iterable sequence "on the fly" invoking the given /// transformation callback. If the callback throws an exception, /// std::terminate is called. -/// \tparam Iterator Type of the iterator of the sequence to be +/// \tparam TIterator Type of the iterator of the sequence to be /// transformed. /// \tparam Result Type of the transformation result. -template +template class TransformedRange final { public: using converter_t = - std::function; + std::function; - class iterator final { + class Iterator final { public: using value_type = std::remove_reference_t; using pointer = value_type*; @@ -46,9 +47,9 @@ class TransformedRange final { using difference_type = std::ptrdiff_t; using iterator_category = std::forward_iterator_tag; - iterator() noexcept = default; - iterator(Iterator iterator, converter_t c) noexcept - : iterator_(std::move(iterator)), c_(std::move(c)) {} + Iterator() noexcept = default; + Iterator(TIterator Iterator, converter_t c) noexcept + : iterator_(std::move(Iterator)), c_(std::move(c)) {} auto operator*() const noexcept -> decltype(auto) { try { @@ -58,47 +59,59 @@ class TransformedRange final { } } - auto operator++() noexcept -> iterator& { + auto operator++() noexcept -> Iterator& { ++iterator_; return *this; } - [[nodiscard]] friend auto operator==(iterator const& lhs, - iterator const& rhs) noexcept + [[nodiscard]] friend auto operator==(Iterator const& lhs, + Iterator const& rhs) noexcept -> bool { return lhs.iterator_ == rhs.iterator_; } - [[nodiscard]] friend auto operator!=(iterator const& lhs, - iterator const& rhs) noexcept + [[nodiscard]] friend auto operator!=(Iterator const& lhs, + Iterator const& rhs) noexcept -> bool { return not(lhs == rhs); } private: - Iterator iterator_{}; + TIterator iterator_{}; converter_t c_{}; }; - TransformedRange(Iterator begin, Iterator end, converter_t c) noexcept + TransformedRange(TIterator begin, TIterator end, converter_t c) noexcept : begin_{std::move(begin), std::move(c)}, end_{std::move(end), nullptr} {} - [[nodiscard]] auto begin() const noexcept -> iterator { return begin_; } - [[nodiscard]] auto end() const noexcept -> iterator { return end_; } + [[nodiscard]] auto begin() const noexcept -> Iterator { return begin_; } + [[nodiscard]] auto end() const noexcept -> Iterator { return end_; } + [[nodiscard]] auto size() const -> typename Iterator::difference_type { + return std::distance(begin_, end_); + } + + [[nodiscard]] auto ToVector() const -> std::vector { + std::vector result; + result.reserve(size()); + for (auto item : *this) { + result.emplace_back(std::move(item)); + } + return result; + } private: - iterator const begin_; - iterator const end_; + Iterator const begin_; + Iterator const end_; }; // User-defined deduction guide to help compiler dealing with generic lambdas // and invokable objects. -template -TransformedRange(Iterator, Iterator, Function) - -> TransformedRange +TransformedRange(TIterator, TIterator, Function) + -> TransformedRange>; #endif // INCLUDED_SRC_OTHER_TOOLS_TRANSFORMED_RANGE_HPP diff --git a/src/utils/cpp/type_safe_arithmetic.hpp b/src/utils/cpp/type_safe_arithmetic.hpp index be2ca4054..2076a5bfc 100644 --- a/src/utils/cpp/type_safe_arithmetic.hpp +++ b/src/utils/cpp/type_safe_arithmetic.hpp @@ -20,17 +20,17 @@ #include "gsl/gsl" -/// \struct type_safe_arithmetic_tag +/// \struct TypeSafeArithmeticTag /// \brief Abstract tag defining types and limits for custom arithmetic types. /// Usage example: -/// struct my_type_tag : type_safe_arithmetic_tag {}; -/// using my_type_t = type_safe_arithmetic; +/// struct my_type_tag : TypeSafeArithmeticTag {}; +/// using my_type_t = TypeSafeArithmetic; template ::lowest(), - T MAX_VALUE = std::numeric_limits::max(), - T SMALLEST_VALUE = std::numeric_limits::min()> -struct type_safe_arithmetic_tag { - static_assert(std::is_arithmetic::value, + T kMin = std::numeric_limits::lowest(), + T kMax = std::numeric_limits::max(), + T kSmallest = std::numeric_limits::min()> +struct TypeSafeArithmeticTag { + static_assert(std::is_arithmetic_v, "T must be an arithmetic type (integer or floating-point)"); using value_t = T; @@ -39,17 +39,17 @@ struct type_safe_arithmetic_tag { using pointer_t = T*; using const_pointer_t = T const*; - static constexpr value_t max_value = MAX_VALUE; - static constexpr value_t min_value = MIN_VALUE; - static constexpr value_t smallest_value = SMALLEST_VALUE; + static constexpr value_t kMaxValue = kMax; + static constexpr value_t kMinValue = kMin; + static constexpr value_t kSmallestValue = kSmallest; }; -/// \class type_safe_arithmetic +/// \class TypeSafeArithmetic /// \brief Abstract class for defining custom arithmetic types. -/// \tparam TAG The actual \ref type_safe_arithmetic_tag +/// \tparam TAG The actual \ref TypeSafeArithmeticTag template -class type_safe_arithmetic { - typename TAG::value_t m_value{}; +class TypeSafeArithmetic { + typename TAG::value_t value_{}; public: using tag_t = TAG; @@ -59,152 +59,150 @@ class type_safe_arithmetic { using pointer_t = typename tag_t::pointer_t; using const_pointer_t = typename tag_t::const_pointer_t; - static constexpr value_t max_value = tag_t::max_value; - static constexpr value_t min_value = tag_t::min_value; - static constexpr value_t smallest_value = tag_t::smallest_value; + static constexpr value_t kMaxValue = tag_t::kMaxValue; + static constexpr value_t kMinValue = tag_t::kMinValue; + static constexpr value_t kSmallestValue = tag_t::kSmallestValue; - constexpr type_safe_arithmetic() = default; + constexpr TypeSafeArithmetic() = default; // NOLINTNEXTLINE - constexpr /*explicit*/ type_safe_arithmetic(value_t value) { set(value); } + constexpr /*explicit*/ TypeSafeArithmetic(value_t value) { set(value); } - type_safe_arithmetic(type_safe_arithmetic const&) = default; - type_safe_arithmetic(type_safe_arithmetic&&) noexcept = default; - auto operator=(type_safe_arithmetic const&) -> type_safe_arithmetic& = - default; - auto operator=(type_safe_arithmetic&&) noexcept -> type_safe_arithmetic& = - default; - ~type_safe_arithmetic() = default; + TypeSafeArithmetic(TypeSafeArithmetic const&) = default; + TypeSafeArithmetic(TypeSafeArithmetic&&) noexcept = default; + auto operator=(TypeSafeArithmetic const&) -> TypeSafeArithmetic& = default; + auto operator=(TypeSafeArithmetic&&) noexcept -> TypeSafeArithmetic& = + default; + ~TypeSafeArithmetic() = default; - auto operator=(value_t value) -> type_safe_arithmetic& { + auto operator=(value_t value) -> TypeSafeArithmetic& { set(value); return *this; } // NOLINTNEXTLINE - constexpr /*explicit*/ operator value_t() const { return m_value; } + constexpr /*explicit*/ operator value_t() const { return value_; } - constexpr auto get() const -> value_t { return m_value; } + constexpr auto get() const -> value_t { return value_; } constexpr void set(value_t value) { - Expects(value >= min_value and value <= max_value and + Expects(value >= kMinValue and value <= kMaxValue and "value output of range"); - m_value = value; + value_ = value; } - auto pointer() const -> const_pointer_t { return &m_value; } + auto pointer() const -> const_pointer_t { return &value_; } }; // template -// bool operator==(type_safe_arithmetic lhs, type_safe_arithmetic rhs) +// bool operator==(TypeSafeArithmetic lhs, TypeSafeArithmetic rhs) // { // return lhs.get() == rhs.get(); // } // // template -// bool operator!=(type_safe_arithmetic lhs, type_safe_arithmetic rhs) +// bool operator!=(TypeSafeArithmetic lhs, TypeSafeArithmetic rhs) // { // return !(lhs == rhs); // } // // template -// bool operator>(type_safe_arithmetic lhs, type_safe_arithmetic rhs) +// bool operator>(TypeSafeArithmetic lhs, TypeSafeArithmetic rhs) // { // return lhs.get() > rhs.get(); // } // // template -// bool operator>=(type_safe_arithmetic lhs, type_safe_arithmetic rhs) +// bool operator>=(TypeSafeArithmetic lhs, TypeSafeArithmetic rhs) // { // return lhs.get() >= rhs.get(); // } // // template -// bool operator<(type_safe_arithmetic lhs, type_safe_arithmetic rhs) +// bool operator<(TypeSafeArithmetic lhs, TypeSafeArithmetic rhs) // { // return lhs.get() < rhs.get(); // } // // template -// bool operator<=(type_safe_arithmetic lhs, type_safe_arithmetic rhs) +// bool operator<=(TypeSafeArithmetic lhs, TypeSafeArithmetic rhs) // { // return lhs.get() <= rhs.get(); // } // // template -// type_safe_arithmetic operator+(type_safe_arithmetic lhs, -// type_safe_arithmetic rhs) { -// return type_safe_arithmetic{lhs.get() + rhs.get()}; +// TypeSafeArithmetic operator+(TypeSafeArithmetic lhs, +// TypeSafeArithmetic rhs) { +// return TypeSafeArithmetic{lhs.get() + rhs.get()}; // } template -auto operator+=(type_safe_arithmetic& lhs, - type_safe_arithmetic rhs) -> type_safe_arithmetic& { +auto operator+=(TypeSafeArithmetic& lhs, + TypeSafeArithmetic rhs) -> TypeSafeArithmetic& { lhs.set(lhs.get() + rhs.get()); return lhs; } // template -// type_safe_arithmetic operator-(type_safe_arithmetic lhs, -// type_safe_arithmetic rhs) { -// return type_safe_arithmetic{lhs.get() - rhs.get()}; +// TypeSafeArithmetic operator-(TypeSafeArithmetic lhs, +// TypeSafeArithmetic rhs) { +// return TypeSafeArithmetic{lhs.get() - rhs.get()}; // } // // template -// type_safe_arithmetic& operator-=(type_safe_arithmetic& lhs, -// type_safe_arithmetic rhs) { +// TypeSafeArithmetic& operator-=(TypeSafeArithmetic& lhs, +// TypeSafeArithmetic rhs) { // lhs.set(lhs.get() - rhs.get()); // return lhs; // } // // template -// type_safe_arithmetic operator*(type_safe_arithmetic lhs, +// TypeSafeArithmetic operator*(TypeSafeArithmetic lhs, // typename TAG::value_t rhs) { -// return type_safe_arithmetic{lhs.get() - rhs}; +// return TypeSafeArithmetic{lhs.get() - rhs}; // } // // template -// type_safe_arithmetic& operator*=(type_safe_arithmetic& lhs, +// TypeSafeArithmetic& operator*=(TypeSafeArithmetic& lhs, // typename TAG::value_t rhs) { // lhs.set(lhs.get() * rhs); // return lhs; // } // // template -// type_safe_arithmetic operator/(type_safe_arithmetic lhs, +// TypeSafeArithmetic operator/(TypeSafeArithmetic lhs, // typename TAG::value_t rhs) { -// return type_safe_arithmetic{lhs.get() / rhs}; +// return TypeSafeArithmetic{lhs.get() / rhs}; // } // // template -// type_safe_arithmetic& operator/=(type_safe_arithmetic& lhs, +// TypeSafeArithmetic& operator/=(TypeSafeArithmetic& lhs, // typename TAG::value_t rhs) { // lhs.set(lhs.get() / rhs); // return lhs; // } // // template -// type_safe_arithmetic& operator++(type_safe_arithmetic& a) { -// return a += type_safe_arithmetic{1}; +// TypeSafeArithmetic& operator++(TypeSafeArithmetic& a) { +// return a += TypeSafeArithmetic{1}; // } template -auto operator++(type_safe_arithmetic& a, - int) -> type_safe_arithmetic { +auto operator++(TypeSafeArithmetic& a, int) -> TypeSafeArithmetic { auto r = a; - a += type_safe_arithmetic{1}; + a += TypeSafeArithmetic{1}; return r; } // template -// type_safe_arithmetic& operator--(type_safe_arithmetic& a) { -// return a -= type_safe_arithmetic{1}; +// TypeSafeArithmetic& operator--(TypeSafeArithmetic& a) { +// return a -= TypeSafeArithmetic{1}; // } // // template -// type_safe_arithmetic operator--(type_safe_arithmetic& a, int) { +// TypeSafeArithmetic operator--(TypeSafeArithmetic& a, int) { // auto r = a; -// a += type_safe_arithmetic{1}; +// a += TypeSafeArithmetic{1}; // return r; // } diff --git a/src/utils/cpp/vector.hpp b/src/utils/cpp/vector.hpp index ce76f9cfa..e5bcd3da2 100644 --- a/src/utils/cpp/vector.hpp +++ b/src/utils/cpp/vector.hpp @@ -25,4 +25,4 @@ void sort_and_deduplicate(std::vector* x) { auto it = std::unique(x->begin(), x->end()); x->erase(it, x->end()); } -#endif +#endif // INCLUDED_SRC_UTILS_CPP_VECTOR_HPP diff --git a/src/utils/cpp/verify_hash.hpp b/src/utils/cpp/verify_hash.hpp deleted file mode 100644 index f105eb2b7..000000000 --- a/src/utils/cpp/verify_hash.hpp +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2023 Huawei Cloud Computing Technology Co., Ltd. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef INCLUDED_SRC_UTILS_CPP_VERIFY_HASH_HPP -#define INCLUDED_SRC_UTILS_CPP_VERIFY_HASH_HPP -#include -#include -#include -#include - -#include "fmt/core.h" - -/// \brief Check if the passed string \p s is a hash. -/// This function is mainly used to check that the hash of a Digest received -/// over the wire is a real hash, to prevent a malicious attack. -/// \returns Nullopt on success, error message on failure. -[[nodiscard]] static inline auto IsAHash(std::string const& s) noexcept - -> std::optional { - if (not std::all_of(s.begin(), s.end(), [](unsigned char c) { - return std::isxdigit(c); - })) { - return fmt::format("Invalid hash {}", s); - } - return std::nullopt; -} -#endif diff --git a/test/TARGETS b/test/TARGETS index 30878fb0e..3b6602952 100644 --- a/test/TARGETS +++ b/test/TARGETS @@ -37,10 +37,10 @@ , "srcs": ["main.cpp"] , "deps": [ ["@", "catch2", "", "catch2"] - , ["utils", "log_config"] , ["@", "src", "src/buildtool/file_system", "git_context"] , ["@", "src", "src/buildtool/storage", "config"] , ["@", "src", "src/buildtool/storage", "file_chunker"] + , ["utils", "log_config"] , ["utils", "test_env"] ] , "stage": ["test"] diff --git a/test/bootstrap/TARGETS b/test/bootstrap/TARGETS index 2739c40ab..690b20e99 100644 --- a/test/bootstrap/TARGETS +++ b/test/bootstrap/TARGETS @@ -7,9 +7,9 @@ ] , "test": ["test-bootstrap.sh"] , "deps": - [ ["@", "src", "", "bootstrap-src"] + [ "prune-config.py" , ["@", "just-distfiles", "", "distdir"] - , "prune-config.py" + , ["@", "src", "", "bootstrap-src"] ] } , "bundled-test-debug": @@ -60,6 +60,7 @@ , ["@", "protoc", "", "protoc"] , ["@", "protoc", "", "libprotobuf"] , ["@", "grpc", "", "grpc++"] + , ["@", "grpc", "", "grpc"] , ["@", "grpc", "", "grpc_cpp_plugin"] , ["@", "src", "", "libarchive"] ] @@ -107,8 +108,8 @@ [ "bundled-test" , "bundled-test-debug" , "bundled-test-gnu" - , "pkgconfig-test" , "mixed-test" + , "pkgconfig-test" , "symlink-test" ] } diff --git a/test/bootstrap/test-mixed-bootstrap.sh b/test/bootstrap/test-mixed-bootstrap.sh index 80a082f32..2eaf37f7c 100755 --- a/test/bootstrap/test-mixed-bootstrap.sh +++ b/test/bootstrap/test-mixed-bootstrap.sh @@ -44,7 +44,7 @@ cp distdir/v4.0.0.tar.gz "${DISTDIR}" # - fmt rm -rf "${LOCALBASE}/include/fmt*" rm -rf "${LOCALBASE}/lib/libfmt*" -cp distdir/fmt-10.2.1.zip "${DISTDIR}" +cp distdir/fmt-11.0.2.zip "${DISTDIR}" # bootstrap command diff --git a/test/buildtool/build_engine/base_maps/TARGETS b/test/buildtool/build_engine/base_maps/TARGETS index 855d6217f..e243290c8 100644 --- a/test/buildtool/build_engine/base_maps/TARGETS +++ b/test/buildtool/build_engine/base_maps/TARGETS @@ -15,8 +15,8 @@ , "srcs": ["entity_name.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/build_engine/base_maps", "entity_name"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "build_engine", "base_maps"] } @@ -28,11 +28,12 @@ , "private-deps": [ "test_repo" , ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/build_engine/base_maps", "directory_map"] , ["@", "src", "src/buildtool/build_engine/base_maps", "module_name"] + , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/file_system", "file_root"] + , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "build_engine", "base_maps"] } @@ -44,11 +45,11 @@ , "private-deps": [ "test_repo" , ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/build_engine/base_maps", "json_file_map"] + , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/file_system", "file_root"] , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "build_engine", "base_maps"] } @@ -60,13 +61,19 @@ , "private-deps": [ "test_repo" , ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "json", "", "json"] - , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/build_engine/base_maps", "directory_map"] - , ["@", "src", "src/buildtool/build_engine/base_maps", "source_map"] + , ["@", "src", "src/buildtool/build_engine/base_maps", "entity_name"] , ["@", "src", "src/buildtool/build_engine/base_maps", "entity_name_data"] + , ["@", "src", "src/buildtool/build_engine/base_maps", "source_map"] + , ["@", "src", "src/buildtool/common", "config"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/file_system", "file_root"] + , ["@", "src", "src/buildtool/multithreading", "async_map_consumer"] + , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "build_engine", "base_maps"] } @@ -78,14 +85,14 @@ , "private-deps": [ "test_repo" , ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["@", "src", "src/buildtool/build_engine/base_maps", "expression_map"] , ["@", "src", "src/buildtool/build_engine/base_maps", "entity_name_data"] + , ["@", "src", "src/buildtool/build_engine/base_maps", "expression_map"] , ["@", "src", "src/buildtool/build_engine/base_maps", "json_file_map"] , ["@", "src", "src/buildtool/build_engine/expression", "expression"] , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/file_system", "file_root"] , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "build_engine", "base_maps"] } @@ -97,16 +104,15 @@ , "private-deps": [ "test_repo" , ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["@", "src", "src/buildtool/common", "config"] - , ["@", "src", "src/buildtool/build_engine/base_maps", "rule_map"] , ["@", "src", "src/buildtool/build_engine/base_maps", "entity_name_data"] , ["@", "src", "src/buildtool/build_engine/base_maps", "expression_map"] , ["@", "src", "src/buildtool/build_engine/base_maps", "json_file_map"] + , ["@", "src", "src/buildtool/build_engine/base_maps", "rule_map"] , ["@", "src", "src/buildtool/build_engine/expression", "expression"] , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/file_system", "file_root"] , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "build_engine", "base_maps"] } diff --git a/test/buildtool/build_engine/base_maps/json_file_map.test.cpp b/test/buildtool/build_engine/base_maps/json_file_map.test.cpp index be657cff7..4a951012c 100644 --- a/test/buildtool/build_engine/base_maps/json_file_map.test.cpp +++ b/test/buildtool/build_engine/base_maps/json_file_map.test.cpp @@ -146,11 +146,12 @@ TEST_CASE("non existent") { TEST_CASE("Bad syntax") { std::atomic failcont_counter{0}; + auto fail_func = [&failcont_counter]() { ++failcont_counter; }; CHECK_FALSE(ReadJsonFile( "bad.json", {"", "data_json"}, [](auto const& /* unused */) {}, /*use_git=*/false, - [&failcont_counter]() { failcont_counter++; })); + fail_func)); CHECK(failcont_counter == 1); } diff --git a/test/buildtool/build_engine/base_maps/source_map.test.cpp b/test/buildtool/build_engine/base_maps/source_map.test.cpp index 44c6863a5..ab5323e41 100644 --- a/test/buildtool/build_engine/base_maps/source_map.test.cpp +++ b/test/buildtool/build_engine/base_maps/source_map.test.cpp @@ -25,11 +25,14 @@ #include "src/buildtool/build_engine/base_maps/directory_map.hpp" #include "src/buildtool/build_engine/base_maps/entity_name.hpp" #include "src/buildtool/build_engine/base_maps/entity_name_data.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/repository_config.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_root.hpp" #include "src/buildtool/multithreading/async_map_consumer.hpp" #include "src/buildtool/multithreading/task_system.hpp" #include "test/buildtool/build_engine/base_maps/test_repo.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" namespace { @@ -59,13 +62,14 @@ auto SetupConfig(bool use_git) -> RepositoryConfig { auto ReadSourceTarget(EntityName const& id, SourceTargetMap::Consumer consumer, + HashFunction::Type hash_type, bool use_git = false, std::optional fail_func = std::nullopt) -> bool { auto repo_config = SetupConfig(use_git); auto directory_entries = CreateDirectoryEntriesMap(&repo_config); auto source_artifacts = - CreateSourceTargetMap(&directory_entries, &repo_config); + CreateSourceTargetMap(&directory_entries, &repo_config, hash_type); std::string error_msg; bool success{true}; { @@ -86,6 +90,8 @@ auto ReadSourceTarget(EntityName const& id, } // namespace TEST_CASE("from file") { + auto const hash_type = TestHashType::ReadFromEnvironment(); + nlohmann::json artifacts; auto name = EntityName{"", ".", "file"}; auto consumer = [&artifacts](auto values) { @@ -93,21 +99,24 @@ TEST_CASE("from file") { }; SECTION("via file") { - CHECK(ReadSourceTarget(name, consumer, /*use_git=*/false)); + CHECK(ReadSourceTarget(name, consumer, hash_type, /*use_git=*/false)); CHECK(artifacts["file"]["type"] == "LOCAL"); CHECK(artifacts["file"]["data"]["path"] == "file"); } SECTION("via git tree") { - CHECK(ReadSourceTarget(name, consumer, /*use_git=*/true)); + CHECK(ReadSourceTarget(name, consumer, hash_type, /*use_git=*/true)); CHECK(artifacts["file"]["type"] == "KNOWN"); - CHECK(artifacts["file"]["data"]["id"] == - (Compatibility::IsCompatible() ? kEmptySha256 : kEmptySha1)); + CHECK( + artifacts["file"]["data"]["id"] == + (ProtocolTraits::IsNative(hash_type) ? kEmptySha1 : kEmptySha256)); CHECK(artifacts["file"]["data"]["size"] == 0); } } TEST_CASE("not present at all") { + auto const hash_type = TestHashType::ReadFromEnvironment(); + bool consumed{false}; bool failure_called{false}; auto name = EntityName{"", ".", "does_not_exist"}; @@ -115,21 +124,23 @@ TEST_CASE("not present at all") { auto fail_func = [&failure_called]() { failure_called = true; }; SECTION("via file") { - CHECK_FALSE( - ReadSourceTarget(name, consumer, /*use_git=*/false, fail_func)); + CHECK_FALSE(ReadSourceTarget( + name, consumer, hash_type, /*use_git=*/false, fail_func)); CHECK_FALSE(consumed); CHECK(failure_called); } SECTION("via git tree") { - CHECK_FALSE( - ReadSourceTarget(name, consumer, /*use_git=*/true, fail_func)); + CHECK_FALSE(ReadSourceTarget( + name, consumer, hash_type, /*use_git=*/true, fail_func)); CHECK_FALSE(consumed); CHECK(failure_called); } } TEST_CASE("malformed entry") { + auto const hash_type = TestHashType::ReadFromEnvironment(); + bool consumed{false}; bool failure_called{false}; auto name = EntityName{"", ".", "bad_entry"}; @@ -137,21 +148,23 @@ TEST_CASE("malformed entry") { auto fail_func = [&failure_called]() { failure_called = true; }; SECTION("via git tree") { - CHECK_FALSE( - ReadSourceTarget(name, consumer, /*use_git=*/false, fail_func)); + CHECK_FALSE(ReadSourceTarget( + name, consumer, hash_type, /*use_git=*/false, fail_func)); CHECK_FALSE(consumed); CHECK(failure_called); } SECTION("via git tree") { - CHECK_FALSE( - ReadSourceTarget(name, consumer, /*use_git=*/true, fail_func)); + CHECK_FALSE(ReadSourceTarget( + name, consumer, hash_type, /*use_git=*/true, fail_func)); CHECK_FALSE(consumed); CHECK(failure_called); } } TEST_CASE("subdir file") { + auto const hash_type = TestHashType::ReadFromEnvironment(); + nlohmann::json artifacts; auto name = EntityName{"", "foo", "bar/file"}; auto consumer = [&artifacts](auto values) { @@ -159,21 +172,24 @@ TEST_CASE("subdir file") { }; SECTION("via file") { - CHECK(ReadSourceTarget(name, consumer, /*use_git=*/false)); + CHECK(ReadSourceTarget(name, consumer, hash_type, /*use_git=*/false)); CHECK(artifacts["bar/file"]["type"] == "LOCAL"); CHECK(artifacts["bar/file"]["data"]["path"] == "foo/bar/file"); } SECTION("via git tree") { - CHECK(ReadSourceTarget(name, consumer, /*use_git=*/true)); + CHECK(ReadSourceTarget(name, consumer, hash_type, /*use_git=*/true)); CHECK(artifacts["bar/file"]["type"] == "KNOWN"); - CHECK(artifacts["bar/file"]["data"]["id"] == - (Compatibility::IsCompatible() ? kEmptySha256 : kEmptySha1)); + CHECK( + artifacts["bar/file"]["data"]["id"] == + (ProtocolTraits::IsNative(hash_type) ? kEmptySha1 : kEmptySha256)); CHECK(artifacts["bar/file"]["data"]["size"] == 0); } } TEST_CASE("subdir symlink") { + auto const hash_type = TestHashType::ReadFromEnvironment(); + nlohmann::json artifacts; auto name = EntityName{"", "foo", "link"}; auto consumer = [&artifacts](auto values) { @@ -181,17 +197,17 @@ TEST_CASE("subdir symlink") { }; SECTION("via file") { - CHECK(ReadSourceTarget(name, consumer, /*use_git=*/false)); + CHECK(ReadSourceTarget(name, consumer, hash_type, /*use_git=*/false)); CHECK(artifacts["link"]["type"] == "LOCAL"); CHECK(artifacts["link"]["data"]["path"] == "foo/link"); } SECTION("via git tree") { - CHECK(ReadSourceTarget(name, consumer, /*use_git=*/true)); + CHECK(ReadSourceTarget(name, consumer, hash_type, /*use_git=*/true)); CHECK(artifacts["link"]["type"] == "KNOWN"); - CHECK(artifacts["link"]["data"]["id"] == (Compatibility::IsCompatible() - ? kSrcLinkIdSha256 - : kSrcLinkIdSha1)); + CHECK(artifacts["link"]["data"]["id"] == + (ProtocolTraits::IsNative(hash_type) ? kSrcLinkIdSha1 + : kSrcLinkIdSha256)); CHECK(artifacts["link"]["data"]["size"] == 5); // content: dummy } } diff --git a/test/buildtool/build_engine/expression/TARGETS b/test/buildtool/build_engine/expression/TARGETS index 9284aba31..1b994a022 100644 --- a/test/buildtool/build_engine/expression/TARGETS +++ b/test/buildtool/build_engine/expression/TARGETS @@ -4,9 +4,9 @@ , "srcs": ["linked_map.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/build_engine/expression", "linked_map"] , ["", "catch-main"] , ["utils", "container_matchers"] - , ["@", "src", "src/buildtool/build_engine/expression", "linked_map"] ] , "stage": ["test", "buildtool", "build_engine", "expression"] } @@ -16,10 +16,16 @@ , "srcs": ["expression.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["utils", "container_matchers"] , ["@", "src", "src/buildtool/build_engine/expression", "expression"] + , [ "@" + , "src" + , "src/buildtool/build_engine/expression" + , "expression_ptr_interface" + ] + , ["@", "src", "src/buildtool/common", "artifact_description"] , ["@", "src", "src/buildtool/common", "common"] + , ["", "catch-main"] + , ["utils", "container_matchers"] ] , "stage": ["test", "buildtool", "build_engine", "expression"] } @@ -29,9 +35,9 @@ , "srcs": ["configuration.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/build_engine/expression", "expression"] , ["", "catch-main"] , ["utils", "container_matchers"] - , ["@", "src", "src/buildtool/build_engine/expression", "expression"] ] , "stage": ["test", "buildtool", "build_engine", "expression"] } diff --git a/test/buildtool/build_engine/expression/expression.test.cpp b/test/buildtool/build_engine/expression/expression.test.cpp index aaca1c713..3a34702d3 100644 --- a/test/buildtool/build_engine/expression/expression.test.cpp +++ b/test/buildtool/build_engine/expression/expression.test.cpp @@ -1042,14 +1042,11 @@ TEST_CASE("Expression Evaluation", "[expression]") { // NOLINT REQUIRE(multi->IsString()); CHECK(multi == Expression::FromJson(R"("foo;bar;baz")"_json)); + // only list of strings are allowed expr = Replace(expr, "$1", foo); REQUIRE(expr); - auto string = expr.Evaluate(env, fcts); - REQUIRE(string); - REQUIRE(string->IsString()); - CHECK(string == Expression::FromJson(R"("foo")"_json)); + CHECK_FALSE(expr.Evaluate(env, fcts)); - // only list of strings or string is allowed expr = Replace(expr, "$1", list_t{foo, ExpressionPtr{number_t{}}}); REQUIRE(expr); CHECK_FALSE(expr.Evaluate(env, fcts)); @@ -1070,6 +1067,14 @@ TEST_CASE("Expression Evaluation", "[expression]") { // NOLINT REQUIRE(result->IsString()); CHECK(result == Expression::FromJson(R"("'foo' 'bar'\\''s' 'baz'")"_json)); + + expr = Expression::FromJson(R"( + {"type": "join_cmd" + , "$1": "not a list" + } + )"_json); + REQUIRE(expr); + CHECK_FALSE(expr.Evaluate(env, fcts)); } SECTION("escape_chars expression") { diff --git a/test/buildtool/build_engine/expression/linked_map.test.cpp b/test/buildtool/build_engine/expression/linked_map.test.cpp index 4d0bf9933..1a8ceff59 100644 --- a/test/buildtool/build_engine/expression/linked_map.test.cpp +++ b/test/buildtool/build_engine/expression/linked_map.test.cpp @@ -95,9 +95,8 @@ TEST_CASE("Lookup and iteration", "[linked_map]") { class CopyCounter { public: CopyCounter() : count_{std::make_shared()} {} - CopyCounter(CopyCounter const& other) { + CopyCounter(CopyCounter const& other) : count_{other.count_} { ++(*other.count_); - count_ = other.count_; } CopyCounter(CopyCounter&&) = default; ~CopyCounter() = default; @@ -113,7 +112,7 @@ class CopyCounter { private: // all copies of this object share the same counter - std::shared_ptr count_{}; + std::shared_ptr count_; }; TEST_CASE("Zero copies", "[linked_map]") { @@ -218,7 +217,7 @@ class CustomContainer { [[nodiscard]] auto Map() & noexcept -> linked_map_t& { return map_; } private: - linked_map_t map_{}; + linked_map_t map_; }; TEST_CASE("Custom NextPtr", "[linked_map]") { diff --git a/test/buildtool/build_engine/target_map/TARGETS b/test/buildtool/build_engine/target_map/TARGETS index 6db3919e0..f2d50fce7 100644 --- a/test/buildtool/build_engine/target_map/TARGETS +++ b/test/buildtool/build_engine/target_map/TARGETS @@ -4,15 +4,15 @@ , "srcs": ["result_map.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "json", "", "json"] - , ["@", "src", "src/buildtool/build_engine/expression", "expression"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/build_engine/analysed_target", "target"] + , ["@", "src", "src/buildtool/build_engine/expression", "expression"] , ["@", "src", "src/buildtool/build_engine/target_map", "result_map"] - , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/common", "action_description"] + , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/progress_reporting", "progress"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "build_engine", "target_map"] } @@ -23,30 +23,36 @@ , "data": ["test_data"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/auth", "auth"] , ["@", "src", "src/buildtool/build_engine/base_maps", "directory_map"] + , ["@", "src", "src/buildtool/build_engine/base_maps", "entity_name"] + , ["@", "src", "src/buildtool/build_engine/base_maps", "expression_map"] , ["@", "src", "src/buildtool/build_engine/base_maps", "rule_map"] , ["@", "src", "src/buildtool/build_engine/base_maps", "source_map"] , ["@", "src", "src/buildtool/build_engine/base_maps", "targets_file_map"] + , ["@", "src", "src/buildtool/build_engine/expression", "expression"] , ["@", "src", "src/buildtool/build_engine/target_map", "target_map"] , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["@", "src", "src/buildtool/execution_api/common", "api_bundle"] , ["@", "src", "src/buildtool/execution_api/local", "config"] , ["@", "src", "src/buildtool/execution_api/local", "context"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/execution_api/remote", "context"] , ["@", "src", "src/buildtool/file_system", "file_root"] - , ["@", "src", "src/buildtool/progress_reporting", "progress"] - , ["@", "src", "src/buildtool/storage", "config"] - , ["@", "src", "src/buildtool/storage", "storage"] , ["@", "src", "src/buildtool/main", "analyse_context"] - , ["@", "src", "src/buildtool/common", "config"] + , ["@", "src", "src/buildtool/multithreading", "async_map_consumer"] + , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["@", "src", "src/buildtool/progress_reporting", "progress"] , ["@", "src", "src/buildtool/serve_api/remote", "config"] , ["@", "src", "src/buildtool/serve_api/remote", "serve_api"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["utils", "test_storage_config"] + , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["", "catch-main"] , ["utils", "test_serve_config"] + , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "build_engine", "target_map"] } @@ -56,13 +62,13 @@ , "srcs": ["target_map_internals.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] + , ["@", "src", "src/buildtool/build_engine/expression", "expression"] , [ "@" , "src" , "src/buildtool/build_engine/target_map" , "target_map_testable_internals" ] - , ["@", "src", "src/buildtool/build_engine/expression", "expression"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "build_engine", "target_map"] } diff --git a/test/buildtool/build_engine/target_map/data_targets/simple_targets/TARGETS b/test/buildtool/build_engine/target_map/data_targets/simple_targets/TARGETS index 9eace0f28..52be68c02 100644 --- a/test/buildtool/build_engine/target_map/data_targets/simple_targets/TARGETS +++ b/test/buildtool/build_engine/target_map/data_targets/simple_targets/TARGETS @@ -7,11 +7,11 @@ } , "collect dep artifacts": { "type": ["simple_rules", "collect deps"] - , "deps": ["foo.txt", "bar.txt", "baz.txt", "link"] + , "deps": ["bar.txt", "baz.txt", "foo.txt", "link"] } , "collect as runfiles": { "type": ["simple_rules", "collect deps as runfiles"] - , "deps": ["foo.txt", "bar.txt", "baz.txt", "link"] + , "deps": ["bar.txt", "baz.txt", "foo.txt", "link"] } , "stage blob": { "type": ["simple_rules", "text file"] @@ -72,7 +72,7 @@ } , "install": { "type": "install" - , "deps": ["foo.txt", "bar.txt", "link"] + , "deps": ["bar.txt", "foo.txt", "link"] , "files": { "link_gen": "use generic sym" , "combined.txt": "use generic" diff --git a/test/buildtool/build_engine/target_map/target_map.test.cpp b/test/buildtool/build_engine/target_map/target_map.test.cpp index cb8aa2305..9f66fd923 100644 --- a/test/buildtool/build_engine/target_map/target_map.test.cpp +++ b/test/buildtool/build_engine/target_map/target_map.test.cpp @@ -86,15 +86,17 @@ auto SetupConfig() -> RepositoryConfig { } // namespace -TEST_CASE("simple targets", "[target_map]") { +TEST_CASE("simple targets", "[target_map]") { // NOLINT auto const storage_config = TestStorageConfig::Create(); auto const storage = Storage::Create(&storage_config.Get()); auto repo_config = SetupConfig(); auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap(&repo_config); - auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries, - &repo_config); + auto source = BuildMaps::Base::CreateSourceTargetMap( + &directory_entries, + &repo_config, + storage_config.Get().hash_function.GetType()); auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(&repo_config, 0); auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(&repo_config, 0); @@ -720,8 +722,10 @@ TEST_CASE("configuration deduplication", "[target_map]") { auto repo_config = SetupConfig(); auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap(&repo_config); - auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries, - &repo_config); + auto source = BuildMaps::Base::CreateSourceTargetMap( + &directory_entries, + &repo_config, + storage_config.Get().hash_function.GetType()); auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(&repo_config, 0); auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(&repo_config, 0); @@ -830,8 +834,10 @@ TEST_CASE("generator functions in string arguments", "[target_map]") { auto repo_config = SetupConfig(); auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap(&repo_config); - auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries, - &repo_config); + auto source = BuildMaps::Base::CreateSourceTargetMap( + &directory_entries, + &repo_config, + storage_config.Get().hash_function.GetType()); auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(&repo_config, 0); auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(&repo_config, 0); @@ -952,8 +958,10 @@ TEST_CASE("built-in rules", "[target_map]") { auto repo_config = SetupConfig(); auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap(&repo_config); - auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries, - &repo_config); + auto source = BuildMaps::Base::CreateSourceTargetMap( + &directory_entries, + &repo_config, + storage_config.Get().hash_function.GetType()); auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(&repo_config, 0); auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(&repo_config, 0); @@ -1186,8 +1194,10 @@ TEST_CASE("target reference", "[target_map]") { auto repo_config = SetupConfig(); auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap(&repo_config); - auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries, - &repo_config); + auto source = BuildMaps::Base::CreateSourceTargetMap( + &directory_entries, + &repo_config, + storage_config.Get().hash_function.GetType()); auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(&repo_config, 0); auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(&repo_config, 0); @@ -1351,8 +1361,10 @@ TEST_CASE("trees", "[target_map]") { auto repo_config = SetupConfig(); auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap(&repo_config); - auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries, - &repo_config); + auto source = BuildMaps::Base::CreateSourceTargetMap( + &directory_entries, + &repo_config, + storage_config.Get().hash_function.GetType()); auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(&repo_config, 0); auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(&repo_config, 0); @@ -1480,8 +1492,10 @@ TEST_CASE("RESULT error reporting", "[target_map]") { auto repo_config = SetupConfig(); auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap(&repo_config); - auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries, - &repo_config); + auto source = BuildMaps::Base::CreateSourceTargetMap( + &directory_entries, + &repo_config, + storage_config.Get().hash_function.GetType()); auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(&repo_config, 0); auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(&repo_config, 0); @@ -1668,8 +1682,10 @@ TEST_CASE("wrong arguments", "[target_map]") { auto repo_config = SetupConfig(); auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap(&repo_config); - auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries, - &repo_config); + auto source = BuildMaps::Base::CreateSourceTargetMap( + &directory_entries, + &repo_config, + storage_config.Get().hash_function.GetType()); auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(&repo_config, 0); auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(&repo_config, 0); diff --git a/test/buildtool/common/TARGETS b/test/buildtool/common/TARGETS index 90d3742c7..298f93ebd 100644 --- a/test/buildtool/common/TARGETS +++ b/test/buildtool/common/TARGETS @@ -4,11 +4,14 @@ , "srcs": ["artifact_description.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "json", "", "json"] , ["@", "src", "src/buildtool/common", "artifact_description"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/file_system", "object_type"] + , ["", "catch-main"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "common"] } @@ -18,10 +21,13 @@ , "srcs": ["action_description.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "json", "", "json"] , ["@", "src", "src/buildtool/common", "action_description"] + , ["@", "src", "src/buildtool/common", "artifact_description"] , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["", "catch-main"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "common"] } @@ -31,16 +37,16 @@ , "srcs": ["repository_config.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "json", "", "json"] - , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/common", "config"] + , ["@", "src", "src/buildtool/execution_api/local", "local"] , ["@", "src", "src/buildtool/file_system", "file_root"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] - , ["utils", "test_storage_config"] - , ["@", "src", "src/buildtool/execution_api/local", "local"] - , ["@", "src", "src/buildtool/storage", "storage"] , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["", "catch-main"] + , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "common"] } diff --git a/test/buildtool/common/action_description.test.cpp b/test/buildtool/common/action_description.test.cpp index eab2cc48b..65f5d92ec 100644 --- a/test/buildtool/common/action_description.test.cpp +++ b/test/buildtool/common/action_description.test.cpp @@ -20,6 +20,8 @@ #include "nlohmann/json.hpp" #include "src/buildtool/common/action.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" TEST_CASE("From JSON", "[action_description]") { using path = std::filesystem::path; @@ -37,56 +39,57 @@ TEST_CASE("From JSON", "[action_description]") { desc.Inputs()} .ToJson(); + auto const hash_type = TestHashType::ReadFromEnvironment(); SECTION("Parse full action") { - auto description = ActionDescription::FromJson("id", json); + auto description = ActionDescription::FromJson(hash_type, "id", json); REQUIRE(description); CHECK((*description)->ToJson() == json); } SECTION("Parse action without optional input") { json["input"] = nlohmann::json::object(); - CHECK(ActionDescription::FromJson("id", json)); + CHECK(ActionDescription::FromJson(hash_type, "id", json)); json["input"] = nlohmann::json::array(); - CHECK_FALSE(ActionDescription::FromJson("id", json)); + CHECK_FALSE(ActionDescription::FromJson(hash_type, "id", json)); json.erase("input"); - CHECK(ActionDescription::FromJson("id", json)); + CHECK(ActionDescription::FromJson(hash_type, "id", json)); } SECTION("Parse action without optional env") { json["env"] = nlohmann::json::object(); - CHECK(ActionDescription::FromJson("id", json)); + CHECK(ActionDescription::FromJson(hash_type, "id", json)); json["env"] = nlohmann::json::array(); - CHECK_FALSE(ActionDescription::FromJson("id", json)); + CHECK_FALSE(ActionDescription::FromJson(hash_type, "id", json)); json.erase("env"); - CHECK(ActionDescription::FromJson("id", json)); + CHECK(ActionDescription::FromJson(hash_type, "id", json)); } SECTION("Parse action without mandatory outputs") { json["output"] = nlohmann::json::array(); json["output_dirs"] = nlohmann::json::array(); - CHECK_FALSE(ActionDescription::FromJson("id", json)); + CHECK_FALSE(ActionDescription::FromJson(hash_type, "id", json)); json["output"] = nlohmann::json::object(); json["output_dirs"] = nlohmann::json::object(); - CHECK_FALSE(ActionDescription::FromJson("id", json)); + CHECK_FALSE(ActionDescription::FromJson(hash_type, "id", json)); json.erase("output"); json.erase("output_dirs"); - CHECK_FALSE(ActionDescription::FromJson("id", json)); + CHECK_FALSE(ActionDescription::FromJson(hash_type, "id", json)); } SECTION("Parse action without mandatory command") { json["command"] = nlohmann::json::array(); - CHECK_FALSE(ActionDescription::FromJson("id", json)); + CHECK_FALSE(ActionDescription::FromJson(hash_type, "id", json)); json["command"] = nlohmann::json::object(); - CHECK_FALSE(ActionDescription::FromJson("id", json)); + CHECK_FALSE(ActionDescription::FromJson(hash_type, "id", json)); json.erase("command"); - CHECK_FALSE(ActionDescription::FromJson("id", json)); + CHECK_FALSE(ActionDescription::FromJson(hash_type, "id", json)); } } diff --git a/test/buildtool/common/artifact_description.test.cpp b/test/buildtool/common/artifact_description.test.cpp index c6d3a8d58..44acaae6f 100644 --- a/test/buildtool/common/artifact_description.test.cpp +++ b/test/buildtool/common/artifact_description.test.cpp @@ -20,7 +20,17 @@ #include "catch2/catch_test_macros.hpp" #include "nlohmann/json.hpp" #include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/object_type.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" + +static auto NamedDigest(std::string const& str) -> ArtifactDigest { + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + return ArtifactDigestFactory::HashDataAs(hash_function, + str); +} [[nodiscard]] auto operator==(Artifact const& lhs, Artifact const& rhs) -> bool { @@ -31,30 +41,31 @@ TEST_CASE("Local artifact", "[artifact_description]") { auto local_desc = ArtifactDescription::CreateLocal( std::filesystem::path{"local_path"}, "repo"); - auto from_json = ArtifactDescription::FromJson(local_desc.ToJson()); + auto from_json = ArtifactDescription::FromJson( + TestHashType::ReadFromEnvironment(), local_desc.ToJson()); CHECK(local_desc == *from_json); } TEST_CASE("Known artifact", "[artifact_description]") { SECTION("File object") { auto known_desc = ArtifactDescription::CreateKnown( - ArtifactDigest{std::string{"f_fake_hash"}, 0, /*is_tree=*/false}, - ObjectType::File); - auto from_json = ArtifactDescription::FromJson(known_desc.ToJson()); + NamedDigest("f_fake_hash"), ObjectType::File); + auto from_json = ArtifactDescription::FromJson( + TestHashType::ReadFromEnvironment(), known_desc.ToJson()); CHECK(known_desc == *from_json); } SECTION("Executable object") { auto known_desc = ArtifactDescription::CreateKnown( - ArtifactDigest{std::string{"x_fake_hash"}, 1, /*is_tree=*/false}, - ObjectType::Executable); - auto from_json = ArtifactDescription::FromJson(known_desc.ToJson()); + NamedDigest("x_fake_hash"), ObjectType::Executable); + auto from_json = ArtifactDescription::FromJson( + TestHashType::ReadFromEnvironment(), known_desc.ToJson()); CHECK(known_desc == *from_json); } SECTION("Symlink object") { auto known_desc = ArtifactDescription::CreateKnown( - ArtifactDigest{std::string{"l_fake_hash"}, 2, /*is_tree=*/false}, - ObjectType::Symlink); - auto from_json = ArtifactDescription::FromJson(known_desc.ToJson()); + NamedDigest("l_fake_hash"), ObjectType::Symlink); + auto from_json = ArtifactDescription::FromJson( + TestHashType::ReadFromEnvironment(), known_desc.ToJson()); CHECK(known_desc == *from_json); } } @@ -62,88 +73,89 @@ TEST_CASE("Known artifact", "[artifact_description]") { TEST_CASE("Action artifact", "[artifact_description]") { auto action_desc = ArtifactDescription::CreateAction( "action_id", std::filesystem::path{"out_path"}); - auto from_json = ArtifactDescription::FromJson(action_desc.ToJson()); + auto from_json = ArtifactDescription::FromJson( + TestHashType::ReadFromEnvironment(), action_desc.ToJson()); CHECK(action_desc == *from_json); } TEST_CASE("From JSON", "[artifact_description]") { auto local = ArtifactDescription::CreateLocal("local", "repo").ToJson(); auto known = - ArtifactDescription::CreateKnown( - ArtifactDigest{"hash", 0, /*is_tree=*/false}, ObjectType::File) + ArtifactDescription::CreateKnown(NamedDigest("hash"), ObjectType::File) .ToJson(); auto action = ArtifactDescription::CreateAction("id", "output").ToJson(); + auto const hash_type = TestHashType::ReadFromEnvironment(); SECTION("Parse artifacts") { - CHECK(ArtifactDescription::FromJson(local)); - CHECK(ArtifactDescription::FromJson(known)); - CHECK(ArtifactDescription::FromJson(action)); + CHECK(ArtifactDescription::FromJson(hash_type, local)); + CHECK(ArtifactDescription::FromJson(hash_type, known)); + CHECK(ArtifactDescription::FromJson(hash_type, action)); } SECTION("Parse artifact without mandatory type") { local.erase("type"); known.erase("type"); action.erase("type"); - CHECK_FALSE(ArtifactDescription::FromJson(local)); - CHECK_FALSE(ArtifactDescription::FromJson(known)); - CHECK_FALSE(ArtifactDescription::FromJson(action)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, local)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, known)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, action)); } SECTION("Parse artifact without mandatory data") { local.erase("data"); known.erase("data"); action.erase("data"); - CHECK_FALSE(ArtifactDescription::FromJson(local)); - CHECK_FALSE(ArtifactDescription::FromJson(known)); - CHECK_FALSE(ArtifactDescription::FromJson(action)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, local)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, known)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, action)); } SECTION("Parse local artifact without mandatory path") { local["data"]["path"] = 0; - CHECK_FALSE(ArtifactDescription::FromJson(local)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, local)); local["data"].erase("path"); - CHECK_FALSE(ArtifactDescription::FromJson(local)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, local)); } SECTION("Parse known artifact") { SECTION("without mandatory id") { known["data"]["id"] = 0; - CHECK_FALSE(ArtifactDescription::FromJson(known)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, known)); known["data"].erase("id"); - CHECK_FALSE(ArtifactDescription::FromJson(known)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, known)); } SECTION("without mandatory size") { known["data"]["size"] = "0"; - CHECK_FALSE(ArtifactDescription::FromJson(known)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, known)); known["data"].erase("size"); - CHECK_FALSE(ArtifactDescription::FromJson(known)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, known)); } SECTION("without mandatory file_type") { known["data"]["file_type"] = "more_than_one_char"; - CHECK_FALSE(ArtifactDescription::FromJson(known)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, known)); known["data"].erase("file_type"); - CHECK_FALSE(ArtifactDescription::FromJson(known)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, known)); } } SECTION("Parse action artifact") { SECTION("without mandatory id") { action["data"]["id"] = 0; - CHECK_FALSE(ArtifactDescription::FromJson(action)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, action)); action["data"].erase("id"); - CHECK_FALSE(ArtifactDescription::FromJson(action)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, action)); } SECTION("without mandatory path") { action["data"]["path"] = 0; - CHECK_FALSE(ArtifactDescription::FromJson(action)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, action)); action["data"].erase("path"); - CHECK_FALSE(ArtifactDescription::FromJson(action)); + CHECK_FALSE(ArtifactDescription::FromJson(hash_type, action)); } } } @@ -151,16 +163,19 @@ TEST_CASE("From JSON", "[artifact_description]") { TEST_CASE("Description missing mandatory key/value pair", "[artifact_description]") { nlohmann::json const missing_type = {{"data", {{"path", "some/path"}}}}; - CHECK(not ArtifactDescription::FromJson(missing_type)); + CHECK(not ArtifactDescription::FromJson(TestHashType::ReadFromEnvironment(), + missing_type)); nlohmann::json const missing_data = {{"type", "LOCAL"}}; - CHECK(not ArtifactDescription::FromJson(missing_data)); + CHECK(not ArtifactDescription::FromJson(TestHashType::ReadFromEnvironment(), + missing_data)); } TEST_CASE("Local artifact description contains incorrect value for \"data\"", "[artifact_description]") { nlohmann::json const local_art_missing_path = { {"type", "LOCAL"}, {"data", nlohmann::json::object()}}; - CHECK(not ArtifactDescription::FromJson(local_art_missing_path)); + CHECK(not ArtifactDescription::FromJson(TestHashType::ReadFromEnvironment(), + local_art_missing_path)); } TEST_CASE("Known artifact description contains incorrect value for \"data\"", @@ -171,19 +186,22 @@ TEST_CASE("Known artifact description contains incorrect value for \"data\"", nlohmann::json const known_art_missing_id = { {"type", "KNOWN"}, {"data", {{"size", 15}, {"file_type", file_type}}}}; - CHECK(not ArtifactDescription::FromJson(known_art_missing_id)); + CHECK(not ArtifactDescription::FromJson( + TestHashType::ReadFromEnvironment(), known_art_missing_id)); } SECTION("missing \"size\"") { nlohmann::json const known_art_missing_size = { {"type", "KNOWN"}, {"data", {{"id", "known_input"}, {"file_type", file_type}}}}; - CHECK(not ArtifactDescription::FromJson(known_art_missing_size)); + CHECK(not ArtifactDescription::FromJson( + TestHashType::ReadFromEnvironment(), known_art_missing_size)); } SECTION("missing \"file_type\"") { nlohmann::json const known_art_missing_file_type = { {"type", "KNOWN"}, {"data", {{"id", "known_input"}, {"size", 15}}}}; - CHECK(not ArtifactDescription::FromJson(known_art_missing_file_type)); + CHECK(not ArtifactDescription::FromJson( + TestHashType::ReadFromEnvironment(), known_art_missing_file_type)); } } @@ -191,9 +209,11 @@ TEST_CASE("Action artifact description contains incorrect value for \"data\"", "[artifact_description]") { nlohmann::json const action_art_missing_id = { {"type", "ACTION"}, {"data", {{"path", "output/path"}}}}; - CHECK(not ArtifactDescription::FromJson(action_art_missing_id)); + CHECK(not ArtifactDescription::FromJson(TestHashType::ReadFromEnvironment(), + action_art_missing_id)); nlohmann::json const action_art_missing_path = { {"type", "ACTION"}, {"data", {{"id", "action_id"}}}}; - CHECK(not ArtifactDescription::FromJson(action_art_missing_path)); + CHECK(not ArtifactDescription::FromJson(TestHashType::ReadFromEnvironment(), + action_art_missing_path)); } diff --git a/test/buildtool/common/repository_config.test.cpp b/test/buildtool/common/repository_config.test.cpp index 931ead30e..6741b99f1 100644 --- a/test/buildtool/common/repository_config.test.cpp +++ b/test/buildtool/common/repository_config.test.cpp @@ -88,11 +88,10 @@ template // Read graph from CAS [[nodiscard]] auto ReadGraph(Storage const& storage, - std::string const& hash) -> nlohmann::json { + ArtifactDigest const& repo_key) -> nlohmann::json { auto const& cas = storage.CAS(); - auto blob = cas.BlobPath( - ArtifactDigest{hash, /*does not matter*/ 0, /*is_tree=*/false}, - /*is_executable=*/false); + auto blob = cas.BlobPath(repo_key, + /*is_executable=*/false); REQUIRE(blob); auto content = FileSystemManager::ReadFile(*blob); REQUIRE(content); diff --git a/test/buildtool/crypto/TARGETS b/test/buildtool/crypto/TARGETS index b05af69b4..409718160 100644 --- a/test/buildtool/crypto/TARGETS +++ b/test/buildtool/crypto/TARGETS @@ -4,8 +4,8 @@ , "srcs": ["hasher.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/crypto", "hasher"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "crypto"] } @@ -15,14 +15,26 @@ , "srcs": ["hash_function.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["", "catch-main"] + ] + , "stage": ["test", "buildtool", "crypto"] + } +, "hash_info": + { "type": ["@", "rules", "CC/test", "test"] + , "name": ["hash_info"] + , "srcs": ["hash_info.test.cpp"] + , "private-deps": + [ ["@", "catch2", "", "catch2"] , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["@", "src", "src/buildtool/crypto", "hash_info"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "crypto"] } , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["crypto"] - , "deps": ["hasher", "hash_function"] + , "deps": ["hash_function", "hash_info", "hasher"] } } diff --git a/test/buildtool/crypto/hash_info.test.cpp b/test/buildtool/crypto/hash_info.test.cpp new file mode 100644 index 000000000..0ef93f30c --- /dev/null +++ b/test/buildtool/crypto/hash_info.test.cpp @@ -0,0 +1,96 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/crypto/hash_info.hpp" + +#include +#include + +#include "catch2/catch_test_macros.hpp" +#include "src/buildtool/crypto/hash_function.hpp" + +inline constexpr auto kValidGitSHA1 = + "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"; +inline constexpr auto kInvalidGitSHA1 = + "e69de29bb2d1d6434b8b29ae775ad8c2e48c539z"; +inline constexpr auto kValidPlainSHA256 = + "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"; +inline constexpr auto kInvalidPlainSHA256 = + "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7az"; + +TEST_CASE("Empty HashInfo", "[crypto]") { + HashInfo info; + CHECK_FALSE(info.Hash().empty()); + CHECK(info.HashType() == HashFunction::Type::GitSHA1); + CHECK(info.IsTree() == false); +} + +TEST_CASE("Native HashInfo", "[crypto]") { + SECTION("Valid hash") { + // Valid hex string of valid length + CHECK(HashInfo::Create(HashFunction::Type::GitSHA1, + kValidGitSHA1, + /*is_tree=*/false)); + CHECK(HashInfo::Create(HashFunction::Type::GitSHA1, + kValidGitSHA1, + /*is_tree=*/true)); + } + + SECTION("Invalid hash") { + // Invalid hex string (contains z) + CHECK_FALSE(HashInfo::Create(HashFunction::Type::GitSHA1, + kInvalidGitSHA1, + /*is_tree=*/false)); + CHECK_FALSE(HashInfo::Create(HashFunction::Type::GitSHA1, + kInvalidGitSHA1, + /*is_tree=*/true)); + + // Valid hex string, but wrong length + CHECK_FALSE(HashInfo::Create(HashFunction::Type::GitSHA1, + kValidPlainSHA256, + /*is_tree=*/false)); + CHECK_FALSE(HashInfo::Create(HashFunction::Type::GitSHA1, + kValidPlainSHA256, + /*is_tree=*/true)); + } +} + +TEST_CASE("Compatible HashInfo", "[crypto]") { + SECTION("Valid hash") { + // Valid hex string of valid length, not a tree + CHECK(HashInfo::Create(HashFunction::Type::PlainSHA256, + kValidPlainSHA256, + /*is_tree=*/false)); + } + + SECTION("No trees") { + // Valid hex string of valid length, a tree that is not allowed in + // the compatible mode + CHECK_FALSE(HashInfo::Create(HashFunction::Type::PlainSHA256, + kValidPlainSHA256, + /*is_tree=*/true)); + } + + SECTION("Invalid hash") { + // Invalid hex string (contains z) + CHECK_FALSE(HashInfo::Create(HashFunction::Type::PlainSHA256, + kInvalidPlainSHA256, + /*is_tree=*/false)); + + // Valid hex string, but wrong length + CHECK_FALSE(HashInfo::Create(HashFunction::Type::PlainSHA256, + kValidGitSHA1, + /*is_tree=*/false)); + } +} diff --git a/test/buildtool/crypto/hasher.test.cpp b/test/buildtool/crypto/hasher.test.cpp index 8414cc814..8ace48ce1 100644 --- a/test/buildtool/crypto/hasher.test.cpp +++ b/test/buildtool/crypto/hasher.test.cpp @@ -19,9 +19,9 @@ #include "catch2/catch_test_macros.hpp" -template +template void test_increment_hash(std::string const& bytes, std::string const& result) { - auto hasher = Hasher::Create(type); + auto hasher = Hasher::Create(kType); REQUIRE(hasher.has_value()); hasher->Update(bytes.substr(0, bytes.size() / 2)); hasher->Update(bytes.substr(bytes.size() / 2)); diff --git a/test/buildtool/execution_api/TARGETS b/test/buildtool/execution_api/TARGETS index 1368ac084..130769886 100644 --- a/test/buildtool/execution_api/TARGETS +++ b/test/buildtool/execution_api/TARGETS @@ -13,8 +13,9 @@ , "stage": ["execution_api"] , "deps": [ ["./", "bazel", "TESTS"] - , ["./", "local", "TESTS"] + , ["./", "common", "TESTS"] , ["./", "execution_service", "TESTS"] + , ["./", "local", "TESTS"] ] } } diff --git a/test/buildtool/execution_api/bazel/TARGETS b/test/buildtool/execution_api/bazel/TARGETS index 90bddd804..7f0bd860c 100644 --- a/test/buildtool/execution_api/bazel/TARGETS +++ b/test/buildtool/execution_api/bazel/TARGETS @@ -5,16 +5,17 @@ , "private-deps": [ ["@", "catch2", "", "catch2"] , ["@", "gsl", "", "gsl"] - , ["utils", "catch-main-remote-execution"] - , ["utils", "test_auth_config"] - , ["utils", "test_remote_config"] - , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/common", "bazel_digest_factory"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/bazel_msg", "bazel_msg"] , ["@", "src", "src/buildtool/execution_api/remote", "bazel_network"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["utils", "catch-main-remote-execution"] + , ["utils", "test_auth_config"] + , ["utils", "test_hash_function_type"] + , ["utils", "test_remote_config"] ] , "stage": ["test", "buildtool", "execution_api", "bazel"] } @@ -24,17 +25,17 @@ , "srcs": ["bazel_execution_client.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["utils", "catch-main-remote-execution"] - , ["utils", "execution_bazel"] - , ["utils", "test_auth_config"] - , ["utils", "test_remote_config"] - , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/common", "bazel_digest_factory"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/remote", "bazel_network"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] + , ["utils", "catch-main-remote-execution"] + , ["utils", "execution_bazel"] + , ["utils", "test_auth_config"] + , ["utils", "test_hash_function_type"] + , ["utils", "test_remote_config"] ] , "stage": ["test", "buildtool", "execution_api", "bazel"] } @@ -44,17 +45,19 @@ , "srcs": ["bytestream_client.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/auth", "auth"] + , ["@", "src", "src/buildtool/common", "bazel_digest_factory"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["@", "src", "src/buildtool/execution_api/common", "bytestream_utils"] + , ["@", "src", "src/buildtool/execution_api/common", "common"] + , ["@", "src", "src/buildtool/execution_api/remote", "bazel_network"] + , ["@", "src", "src/buildtool/execution_api/remote", "config"] + , ["@", "src", "src/buildtool/file_system", "object_type"] , ["utils", "catch-main-remote-execution"] , ["utils", "execution_bazel"] , ["utils", "test_auth_config"] + , ["utils", "test_hash_function_type"] , ["utils", "test_remote_config"] - , ["@", "src", "src/buildtool/common", "common"] - , ["@", "src", "src/buildtool/execution_api/bazel_msg", "bazel_msg"] - , ["@", "src", "src/buildtool/execution_api/remote", "bazel_network"] - , ["@", "src", "src/buildtool/execution_api/remote", "config"] - , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] ] , "stage": ["test", "buildtool", "execution_api", "bazel"] } @@ -64,18 +67,20 @@ , "srcs": ["bazel_network.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["utils", "catch-main-remote-execution"] - , ["utils", "execution_bazel"] - , ["utils", "test_auth_config"] - , ["utils", "test_remote_config"] - , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/auth", "auth"] + , ["@", "src", "src/buildtool/common", "bazel_digest_factory"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/bazel_msg", "bazel_msg"] , ["@", "src", "src/buildtool/execution_api/remote", "bazel_network"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["utils", "catch-main-remote-execution"] + , ["utils", "execution_bazel"] + , ["utils", "test_auth_config"] + , ["utils", "test_hash_function_type"] + , ["utils", "test_remote_config"] ] , "stage": ["test", "buildtool", "execution_api", "bazel"] } @@ -86,17 +91,25 @@ , "data": [["buildtool/storage", "test_data"]] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/common", "artifact_description"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] + , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , [ "@" , "src" , "src/buildtool/execution_api/bazel_msg" , "bazel_msg_factory" ] + , [ "@" + , "src" + , "src/buildtool/execution_api/common" + , "artifact_blob_container" + ] + , ["@", "src", "src/buildtool/execution_api/common", "common"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["utils", "blob_creator"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] + , ["", "catch-main"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "execution_api", "bazel"] } @@ -106,14 +119,15 @@ , "srcs": ["bazel_api.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["utils", "catch-main-remote-execution"] - , ["utils", "test_auth_config"] - , ["utils", "test_remote_config"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/remote", "bazel"] + , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["buildtool/execution_api/common", "api_test"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] + , ["utils", "catch-main-remote-execution"] + , ["utils", "test_auth_config"] + , ["utils", "test_hash_function_type"] + , ["utils", "test_remote_config"] ] , "stage": ["test", "buildtool", "execution_api", "bazel"] } @@ -121,12 +135,12 @@ { "type": ["@", "rules", "test", "suite"] , "stage": ["bazel"] , "deps": - [ "bytestream_client" + [ "bazel_api" + , "bytestream_client" , "cas_client" , "execution_client" , "msg_factory" , "network" - , "bazel_api" ] } } diff --git a/test/buildtool/execution_api/bazel/bazel_api.test.cpp b/test/buildtool/execution_api/bazel/bazel_api.test.cpp index bced4a87a..e201111c6 100644 --- a/test/buildtool/execution_api/bazel/bazel_api.test.cpp +++ b/test/buildtool/execution_api/bazel/bazel_api.test.cpp @@ -19,10 +19,10 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/remote/retry_config.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "test/buildtool/execution_api/common/api_test.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/remote_execution/test_auth_config.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" @@ -46,7 +46,7 @@ class FactoryApi final { &auth_, &retry_config, {}, - hash_function_}}; + &hash_function_}}; } private: @@ -59,15 +59,13 @@ class FactoryApi final { TEST_CASE("BazelAPI: No input, no output", "[execution_api]") { auto remote_config = TestRemoteConfig::ReadFromEnvironment(); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + REQUIRE(remote_config); REQUIRE(remote_config->remote_address); auto auth = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - FactoryApi api_factory{ &*remote_config->remote_address, &*auth, hash_function}; TestNoInputNoOutput(api_factory, remote_config->platform_properties); @@ -75,15 +73,13 @@ TEST_CASE("BazelAPI: No input, no output", "[execution_api]") { TEST_CASE("BazelAPI: No input, create output", "[execution_api]") { auto remote_config = TestRemoteConfig::ReadFromEnvironment(); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + REQUIRE(remote_config); REQUIRE(remote_config->remote_address); auto auth = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - FactoryApi api_factory{ &*remote_config->remote_address, &*auth, hash_function}; TestNoInputCreateOutput(api_factory, remote_config->platform_properties); @@ -91,15 +87,13 @@ TEST_CASE("BazelAPI: No input, create output", "[execution_api]") { TEST_CASE("BazelAPI: One input copied to output", "[execution_api]") { auto remote_config = TestRemoteConfig::ReadFromEnvironment(); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + REQUIRE(remote_config); REQUIRE(remote_config->remote_address); auto auth = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - FactoryApi api_factory{ &*remote_config->remote_address, &*auth, hash_function}; TestOneInputCopiedToOutput(api_factory, remote_config->platform_properties); @@ -107,15 +101,13 @@ TEST_CASE("BazelAPI: One input copied to output", "[execution_api]") { TEST_CASE("BazelAPI: Non-zero exit code, create output", "[execution_api]") { auto remote_config = TestRemoteConfig::ReadFromEnvironment(); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + REQUIRE(remote_config); REQUIRE(remote_config->remote_address); auto auth = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - FactoryApi api_factory{ &*remote_config->remote_address, &*auth, hash_function}; TestNonZeroExitCodeCreateOutput(api_factory, @@ -124,15 +116,13 @@ TEST_CASE("BazelAPI: Non-zero exit code, create output", "[execution_api]") { TEST_CASE("BazelAPI: Retrieve two identical trees to path", "[execution_api]") { auto remote_config = TestRemoteConfig::ReadFromEnvironment(); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + REQUIRE(remote_config); REQUIRE(remote_config->remote_address); auto auth = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - FactoryApi api_factory{ &*remote_config->remote_address, &*auth, hash_function}; TestRetrieveTwoIdenticalTreesToPath( @@ -142,15 +132,13 @@ TEST_CASE("BazelAPI: Retrieve two identical trees to path", "[execution_api]") { TEST_CASE("BazelAPI: Retrieve file and symlink with same content to path", "[execution_api]") { auto remote_config = TestRemoteConfig::ReadFromEnvironment(); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + REQUIRE(remote_config); REQUIRE(remote_config->remote_address); auto auth = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - FactoryApi api_factory{ &*remote_config->remote_address, &*auth, hash_function}; TestRetrieveFileAndSymlinkWithSameContentToPath( @@ -159,15 +147,13 @@ TEST_CASE("BazelAPI: Retrieve file and symlink with same content to path", TEST_CASE("BazelAPI: Retrieve mixed blobs and trees", "[execution_api]") { auto remote_config = TestRemoteConfig::ReadFromEnvironment(); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + REQUIRE(remote_config); REQUIRE(remote_config->remote_address); auto auth = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - FactoryApi api_factory{ &*remote_config->remote_address, &*auth, hash_function}; TestRetrieveMixedBlobsAndTrees( @@ -176,15 +162,13 @@ TEST_CASE("BazelAPI: Retrieve mixed blobs and trees", "[execution_api]") { TEST_CASE("BazelAPI: Create directory prior to execution", "[execution_api]") { auto remote_config = TestRemoteConfig::ReadFromEnvironment(); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + REQUIRE(remote_config); REQUIRE(remote_config->remote_address); auto auth = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - FactoryApi api_factory{ &*remote_config->remote_address, &*auth, hash_function}; TestCreateDirPriorToExecution(api_factory, diff --git a/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp b/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp index bd005497e..e8234282e 100644 --- a/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp +++ b/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp @@ -20,13 +20,14 @@ #include "catch2/catch_test_macros.hpp" #include "gsl/gsl" -#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" #include "src/buildtool/common/remote/retry_config.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/file_system/object_type.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/remote_execution/test_auth_config.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" @@ -49,11 +50,9 @@ TEST_CASE("Bazel internals: CAS Client", "[execution_api]") { SECTION("Valid digest and blob") { // digest of "test" - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - auto digest = - ArtifactDigest::Create(hash_function, content); + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + auto digest = BazelDigestFactory::HashDataAs( + hash_function, content); // Valid blob BazelBlob blob{digest, content, /*is_exec=*/false}; diff --git a/test/buildtool/execution_api/bazel/bazel_execution_client.test.cpp b/test/buildtool/execution_api/bazel/bazel_execution_client.test.cpp index 093c4eb13..4594de8ee 100644 --- a/test/buildtool/execution_api/bazel/bazel_execution_client.test.cpp +++ b/test/buildtool/execution_api/bazel/bazel_execution_client.test.cpp @@ -17,12 +17,12 @@ #include #include "catch2/catch_test_macros.hpp" -#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" #include "src/buildtool/common/remote/retry_config.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/file_system/object_type.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/remote_execution/bazel_action_creator.hpp" #include "test/utils/remote_execution/test_auth_config.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" @@ -31,12 +31,10 @@ TEST_CASE("Bazel internals: Execution Client", "[execution_api]") { std::string instance_name{"remote-execution"}; std::string content("test"); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; - auto test_digest = static_cast( - ArtifactDigest::Create(hash_function, content)); + auto test_digest = BazelDigestFactory::HashDataAs( + hash_function, content); auto auth_config = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth_config); @@ -95,6 +93,9 @@ TEST_CASE("Bazel internals: Execution Client", "[execution_api]") { } SECTION("Non-blocking, obtain result later") { + // note that the boolean false means do not wait for the stream to + // become available, and it has nothing to do with waiting until the + // action completes. This is WaitExecution's job :) auto response = execution_client.Execute( instance_name, *action_delayed, config, false); @@ -115,12 +116,10 @@ TEST_CASE("Bazel internals: Execution Client using env variables", std::string instance_name{"remote-execution"}; std::string content("contents of env variable"); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; - auto test_digest = static_cast( - ArtifactDigest::Create(hash_function, content)); + auto test_digest = BazelDigestFactory::HashDataAs( + hash_function, content); auto auth_config = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth_config); diff --git a/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp b/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp index 2c4035b03..ca9d3ca96 100644 --- a/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp +++ b/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp @@ -15,16 +15,40 @@ #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" #include +#include +#include #include #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/artifact_description.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/crypto/hash_function.hpp" -#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" +#include "src/buildtool/execution_api/common/artifact_blob_container.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/object_type.hpp" -#include "test/utils/blob_creator.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" + +namespace { +/// \brief Create a blob from the content found in file or symlink pointed to by +/// given path. +[[nodiscard]] inline auto CreateBlobFromPath( + std::filesystem::path const& fpath, + HashFunction hash_function) noexcept -> std::optional { + auto const type = FileSystemManager::Type(fpath, /*allow_upwards=*/true); + if (not type) { + return std::nullopt; + } + auto const content = FileSystemManager::ReadContentAtPath(fpath, *type); + if (not content.has_value()) { + return std::nullopt; + } + return ArtifactBlob{ArtifactDigestFactory::HashDataAs( + hash_function, *content), + *content, + IsExecutableObject(*type)}; +} +} // namespace TEST_CASE("Bazel internals: MessageFactory", "[execution_api]") { std::filesystem::path workspace{"test/buildtool/storage/data"}; @@ -38,9 +62,7 @@ TEST_CASE("Bazel internals: MessageFactory", "[execution_api]") { std::filesystem::path link = subdir1 / "link"; REQUIRE(FileSystemManager::CreateSymlink("file1", link)); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; // create the corresponding blobs auto file1_blob = CreateBlobFromPath(file1, hash_function); @@ -54,24 +76,21 @@ TEST_CASE("Bazel internals: MessageFactory", "[execution_api]") { // both files are the same and should result in identical blobs CHECK(*file1_blob->data == *file2_blob->data); CHECK(file1_blob->digest.hash() == file2_blob->digest.hash()); - CHECK(file1_blob->digest.size_bytes() == file2_blob->digest.size_bytes()); + CHECK(file1_blob->digest.size() == file2_blob->digest.size()); // create known artifacts auto artifact1_opt = - ArtifactDescription::CreateKnown(ArtifactDigest{file1_blob->digest}, - ObjectType::File) + ArtifactDescription::CreateKnown(file1_blob->digest, ObjectType::File) .ToArtifact(); auto artifact1 = DependencyGraph::ArtifactNode{std::move(artifact1_opt)}; auto artifact2_opt = - ArtifactDescription::CreateKnown(ArtifactDigest{file2_blob->digest}, - ObjectType::File) + ArtifactDescription::CreateKnown(file2_blob->digest, ObjectType::File) .ToArtifact(); auto artifact2 = DependencyGraph::ArtifactNode{std::move(artifact2_opt)}; auto artifact3_opt = - ArtifactDescription::CreateKnown(ArtifactDigest{link_blob->digest}, - ObjectType::Symlink) + ArtifactDescription::CreateKnown(link_blob->digest, ObjectType::Symlink) .ToArtifact(); auto artifact3 = DependencyGraph::ArtifactNode{std::move(artifact3_opt)}; @@ -84,16 +103,16 @@ TEST_CASE("Bazel internals: MessageFactory", "[execution_api]") { // a mapping between digests and content is needed; usually via a concrete // API one gets this content either locally or from the network - std::unordered_map fake_cas{ + std::unordered_map fake_cas{ {file1_blob->digest, file1}, {file2_blob->digest, file2}, {link_blob->digest, link}}; // create blobs via tree - BazelBlobContainer blobs{}; + ArtifactBlobContainer blobs{}; REQUIRE(BazelMsgFactory::CreateDirectoryDigestFromTree( *tree, - [&fake_cas](std::vector const& digests, + [&fake_cas](std::vector const& digests, std::vector* targets) { targets->reserve(digests.size()); for (auto const& digest : digests) { @@ -112,7 +131,7 @@ TEST_CASE("Bazel internals: MessageFactory", "[execution_api]") { } } }, - [&blobs](BazelBlob&& blob) { + [&blobs](ArtifactBlob&& blob) { blobs.Emplace(std::move(blob)); return true; })); diff --git a/test/buildtool/execution_api/bazel/bazel_network.test.cpp b/test/buildtool/execution_api/bazel/bazel_network.test.cpp index edc35ea73..e5dab8682 100644 --- a/test/buildtool/execution_api/bazel/bazel_network.test.cpp +++ b/test/buildtool/execution_api/bazel/bazel_network.test.cpp @@ -21,14 +21,15 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/auth/authentication.hpp" -#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/remote/retry_config.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/file_system/object_type.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/remote_execution/test_auth_config.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" @@ -46,9 +47,7 @@ TEST_CASE("Bazel network: write/read blobs", "[execution_api]") { RetryConfig retry_config{}; // default retry config - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; auto network = BazelNetwork{instance_name, remote_config->remote_address->host, @@ -56,24 +55,24 @@ TEST_CASE("Bazel network: write/read blobs", "[execution_api]") { &*auth_config, &retry_config, {}, - hash_function}; + &hash_function}; std::string content_foo("foo"); std::string content_bar("bar"); std::string content_baz(kLargeSize, 'x'); // single larger blob - BazelBlob foo{ - ArtifactDigest::Create(hash_function, content_foo), - content_foo, - /*is_exec=*/false}; - BazelBlob bar{ - ArtifactDigest::Create(hash_function, content_bar), - content_bar, - /*is_exec=*/false}; - BazelBlob baz{ - ArtifactDigest::Create(hash_function, content_baz), - content_baz, - /*is_exec=*/false}; + BazelBlob foo{BazelDigestFactory::HashDataAs( + hash_function, content_foo), + content_foo, + /*is_exec=*/false}; + BazelBlob bar{BazelDigestFactory::HashDataAs( + hash_function, content_bar), + content_bar, + /*is_exec=*/false}; + BazelBlob baz{BazelDigestFactory::HashDataAs( + hash_function, content_baz), + content_baz, + /*is_exec=*/false}; // Search blobs via digest REQUIRE(network.UploadBlobs(BazelBlobContainer{{foo, bar, baz}})); @@ -97,7 +96,8 @@ TEST_CASE("Bazel network: write/read blobs", "[execution_api]") { } TEST_CASE("Bazel network: read blobs with unknown size", "[execution_api]") { - if (Compatibility::IsCompatible()) { + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + if (not ProtocolTraits::IsNative(hash_function.GetType())) { // only supported in native mode return; } @@ -113,29 +113,25 @@ TEST_CASE("Bazel network: read blobs with unknown size", "[execution_api]") { RetryConfig retry_config{}; // default retry config - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - auto network = BazelNetwork{instance_name, remote_config->remote_address->host, remote_config->remote_address->port, &*auth_config, &retry_config, {}, - hash_function}; + &hash_function}; std::string content_foo("foo"); std::string content_bar(kLargeSize, 'x'); // single larger blob - BazelBlob foo{ - ArtifactDigest::Create(hash_function, content_foo), - content_foo, - /*is_exec=*/false}; - BazelBlob bar{ - ArtifactDigest::Create(hash_function, content_bar), - content_bar, - /*is_exec=*/false}; + BazelBlob foo{BazelDigestFactory::HashDataAs( + hash_function, content_foo), + content_foo, + /*is_exec=*/false}; + BazelBlob bar{BazelDigestFactory::HashDataAs( + hash_function, content_bar), + content_bar, + /*is_exec=*/false}; // Upload blobs REQUIRE(network.UploadBlobs(BazelBlobContainer{{foo, bar}})); diff --git a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp index 10fe0c837..16adb0cdb 100644 --- a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp +++ b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp @@ -20,18 +20,16 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/auth/authentication.hpp" -#include "src/buildtool/common/artifact_digest.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" #include "src/buildtool/crypto/hash_function.hpp" -#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" +#include "src/buildtool/execution_api/common/bytestream_utils.hpp" #include "src/buildtool/execution_api/common/execution_common.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/file_system/object_type.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/remote_execution/test_auth_config.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" -constexpr std::size_t kLargeSize = GRPC_DEFAULT_MAX_RECV_MESSAGE_LENGTH + 1; - TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { auto auth_config = TestAuthConfig::ReadFromEnvironment(); REQUIRE(auth_config); @@ -45,30 +43,23 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { &*auth_config}; auto uuid = CreateUUIDVersion4(*CreateProcessUniqueId()); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; SECTION("Upload small blob") { std::string instance_name{"remote-execution"}; std::string content("foobar"); // digest of "foobar" - auto digest = static_cast( - ArtifactDigest::Create(hash_function, content)); + auto digest = BazelDigestFactory::HashDataAs( + hash_function, content); - CHECK(stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - digest.hash(), - digest.size_bytes()), - content)); + CHECK(stream.Write( + ByteStreamUtils::WriteRequest{instance_name, uuid, digest}, + content)); SECTION("Download small blob") { - auto data = stream.Read(fmt::format("{}/blobs/{}/{}", - instance_name, - digest.hash(), - digest.size_bytes())); + auto const data = stream.Read( + ByteStreamUtils::ReadRequest{instance_name, digest}); CHECK(data == content); } @@ -80,19 +71,17 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { std::string other_content("This is a differnt string"); // Valid digest, but for a different string - auto digest = static_cast( - ArtifactDigest::Create(hash_function, - other_content)); + auto digest = BazelDigestFactory::HashDataAs( + hash_function, other_content); - CHECK(not stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - digest.hash(), - digest.size_bytes()), - content)); + CHECK(not stream.Write( + ByteStreamUtils::WriteRequest{instance_name, uuid, digest}, + content)); } SECTION("Upload large blob") { + static constexpr std::size_t kLargeSize = + GRPC_DEFAULT_MAX_RECV_MESSAGE_LENGTH + 1; std::string instance_name{"remote-execution"}; std::string content(kLargeSize, '\0'); @@ -101,31 +90,23 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { } // digest of "instance_nameinstance_nameinstance_..." - auto digest = static_cast( - ArtifactDigest::Create(hash_function, content)); + auto digest = BazelDigestFactory::HashDataAs( + hash_function, content); - CHECK(stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - digest.hash(), - digest.size_bytes()), - content)); + CHECK(stream.Write( + ByteStreamUtils::WriteRequest{instance_name, uuid, digest}, + content)); SECTION("Download large blob") { - auto data = stream.Read(fmt::format("{}/blobs/{}/{}", - instance_name, - digest.hash(), - digest.size_bytes())); + auto const data = stream.Read( + ByteStreamUtils::ReadRequest{instance_name, digest}); CHECK(data == content); } SECTION("Incrementally download large blob") { - auto reader = - stream.IncrementalRead(fmt::format("{}/blobs/{}/{}", - instance_name, - digest.hash(), - digest.size_bytes())); + auto reader = stream.IncrementalRead( + ByteStreamUtils::ReadRequest{instance_name, digest}); std::string data{}; auto chunk = reader.Next(); @@ -139,124 +120,3 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { } } } - -TEST_CASE("ByteStream Client: Transfer multiple blobs", "[execution_api]") { - auto auth_config = TestAuthConfig::ReadFromEnvironment(); - REQUIRE(auth_config); - - auto remote_config = TestRemoteConfig::ReadFromEnvironment(); - REQUIRE(remote_config); - REQUIRE(remote_config->remote_address); - - auto stream = ByteStreamClient{remote_config->remote_address->host, - remote_config->remote_address->port, - &*auth_config}; - auto uuid = CreateUUIDVersion4(*CreateProcessUniqueId()); - - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; - - SECTION("Upload small blobs") { - std::string instance_name{"remote-execution"}; - - BazelBlob foo{ - ArtifactDigest::Create(hash_function, "foo"), - "foo", - /*is_exec=*/false}; - BazelBlob bar{ - ArtifactDigest::Create(hash_function, "bar"), - "bar", - /*is_exec=*/false}; - BazelBlob baz{ - ArtifactDigest::Create(hash_function, "baz"), - "baz", - /*is_exec=*/false}; - - CHECK(stream.WriteMany( - {foo, bar, baz}, - [&instance_name, &uuid](auto const& blob) { - return fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - blob.digest.hash(), - blob.digest.size_bytes()); - }, - [](auto const& blob) { return *blob.data; })); - - SECTION("Download small blobs") { - std::vector contents{}; - stream.ReadMany( - {foo.digest, bar.digest, baz.digest}, - [&instance_name](auto const& digest) -> std::string { - return fmt::format("{}/blobs/{}/{}", - instance_name, - digest.hash(), - digest.size_bytes()); - }, - [&contents](auto data) { - contents.emplace_back(std::move(data)); - }); - REQUIRE(contents.size() == 3); - CHECK(contents[0] == *foo.data); - CHECK(contents[1] == *bar.data); - CHECK(contents[2] == *baz.data); - } - } - - SECTION("Upload large blobs") { - std::string instance_name{"remote-execution"}; - - std::string content_foo(kLargeSize, '\0'); - std::string content_bar(kLargeSize, '\0'); - std::string content_baz(kLargeSize, '\0'); - for (std::size_t i{}; i < content_foo.size(); ++i) { - content_foo[i] = instance_name[(i + 0) % instance_name.size()]; - content_bar[i] = instance_name[(i + 1) % instance_name.size()]; - content_baz[i] = instance_name[(i + 2) % instance_name.size()]; - } - - BazelBlob foo{ArtifactDigest::Create(hash_function, - content_foo), - content_foo, - /*is_exec=*/false}; - BazelBlob bar{ArtifactDigest::Create(hash_function, - content_bar), - content_bar, - /*is_exec=*/false}; - BazelBlob baz{ArtifactDigest::Create(hash_function, - content_baz), - content_baz, - /*is_exec=*/false}; - - CHECK(stream.WriteMany( - {foo, bar, baz}, - [&instance_name, &uuid](auto const& blob) { - return fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - blob.digest.hash(), - blob.digest.size_bytes()); - }, - [](auto const& blob) { return *blob.data; })); - - SECTION("Download large blobs") { - std::vector contents{}; - stream.ReadMany( - {foo.digest, bar.digest, baz.digest}, - [&instance_name](auto const& digest) -> std::string { - return fmt::format("{}/blobs/{}/{}", - instance_name, - digest.hash(), - digest.size_bytes()); - }, - [&contents](auto data) { - contents.emplace_back(std::move(data)); - }); - REQUIRE(contents.size() == 3); - CHECK(contents[0] == *foo.data); - CHECK(contents[1] == *bar.data); - CHECK(contents[2] == *baz.data); - } - } -} diff --git a/test/buildtool/execution_api/common/TARGETS b/test/buildtool/execution_api/common/TARGETS index c181447b9..13b393e31 100644 --- a/test/buildtool/execution_api/common/TARGETS +++ b/test/buildtool/execution_api/common/TARGETS @@ -5,14 +5,62 @@ , "deps": [ ["@", "catch2", "", "catch2"] , ["@", "src", "src/buildtool/common", "artifact_description"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/common", "common"] , ["@", "src", "src/buildtool/execution_api/local", "config"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] + , ["utils", "test_hash_function_type"] + ] + , "stage": ["test", "buildtool", "execution_api", "common"] + } +, "bytestream_utils": + { "type": ["@", "rules", "CC/test", "test"] + , "name": ["bytestream_utils"] + , "srcs": ["bytestream_utils.test.cpp"] + , "private-deps": + [ ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/common", "bazel_digest_factory"] + , ["@", "src", "src/buildtool/common", "bazel_types"] , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] + , ["@", "src", "src/buildtool/execution_api/common", "bytestream_utils"] + , ["@", "src", "src/buildtool/execution_api/common", "common"] + , ["", "catch-main"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "execution_api", "common"] } +, "tree_rehashing": + { "type": ["@", "rules", "CC/test", "test"] + , "name": ["tree_rehashing"] + , "srcs": ["tree_rehashing.test.cpp"] + , "private-deps": + [ ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] + , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] + , [ "@" + , "src" + , "src/buildtool/execution_api/bazel_msg" + , "bazel_msg_factory" + ] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] + , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["@", "src", "src/utils/cpp", "expected"] + , ["@", "src", "src/utils/cpp", "tmp_dir"] + , ["", "catch-main"] + , ["utils", "large_object_utils"] + , ["utils", "test_storage_config"] + ] + , "stage": ["test", "buildtool", "execution_api", "common"] + } +, "TESTS": + { "type": ["@", "rules", "test", "suite"] + , "stage": ["common"] + , "deps": ["bytestream_utils", "tree_rehashing"] + } } diff --git a/test/buildtool/execution_api/common/api_test.hpp b/test/buildtool/execution_api/common/api_test.hpp index b93215a5a..d00eec7be 100644 --- a/test/buildtool/execution_api/common/api_test.hpp +++ b/test/buildtool/execution_api/common/api_test.hpp @@ -24,7 +24,7 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/artifact_description.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/execution_action.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" @@ -33,6 +33,7 @@ #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" using ApiFactory = std::function; using ExecProps = std::map; @@ -139,12 +140,10 @@ using ExecProps = std::map; bool is_hermetic = false) { std::string test_content("test"); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; - auto test_digest = - ArtifactDigest::Create(hash_function, test_content); + auto test_digest = ArtifactDigestFactory::HashDataAs( + hash_function, test_content); std::string output_path{"output_file"}; @@ -165,26 +164,28 @@ using ExecProps = std::map; action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); if (is_hermetic) { CHECK(not response->IsCached()); SECTION("Rerun execution to verify caching") { // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(response->IsCached()); } } @@ -194,24 +195,25 @@ using ExecProps = std::map; action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(not response->IsCached()); SECTION("Rerun execution to verify caching") { // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(not response->IsCached()); } } @@ -223,12 +225,10 @@ using ExecProps = std::map; bool is_hermetic = false) { std::string test_content("test"); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; - auto test_digest = - ArtifactDigest::Create(hash_function, test_content); + auto test_digest = ArtifactDigestFactory::HashDataAs( + hash_function, test_content); auto input_artifact_opt = ArtifactDescription::CreateKnown(test_digest, ObjectType::File) @@ -257,26 +257,28 @@ using ExecProps = std::map; action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); if (is_hermetic) { CHECK(not response->IsCached()); SECTION("Rerun execution to verify caching") { // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(response->IsCached()); } } @@ -286,24 +288,26 @@ using ExecProps = std::map; action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(not response->IsCached()); SECTION("Rerun execution to verify caching") { // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(not response->IsCached()); } } @@ -314,12 +318,10 @@ using ExecProps = std::map; ExecProps const& props) { std::string test_content("test"); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; - auto test_digest = - ArtifactDigest::Create(hash_function, test_content); + auto test_digest = ArtifactDigestFactory::HashDataAs( + hash_function, test_content); std::string output_path{"output_file"}; @@ -340,27 +342,29 @@ using ExecProps = std::map; action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result CHECK(response->ExitCode() == 1); - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(not response->IsCached()); SECTION("Rerun execution to verify that non-zero actions are rerun") { // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result CHECK(response->ExitCode() == 1); - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(not response->IsCached()); } } @@ -369,14 +373,15 @@ using ExecProps = std::map; action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result CHECK(response->ExitCode() == 1); - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(not response->IsCached()); SECTION("Rerun execution to verify non-zero actions are not cached") { @@ -386,9 +391,10 @@ using ExecProps = std::map; // verify result CHECK(response->ExitCode() == 1); - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); CHECK(not response->IsCached()); } } @@ -432,7 +438,7 @@ using ExecProps = std::map; action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result @@ -442,10 +448,11 @@ using ExecProps = std::map; CHECK_FALSE(response->IsCached()); } - auto artifacts = response->Artifacts(); - REQUIRE_FALSE(artifacts.empty()); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE_FALSE(artifacts.value()->empty()); - auto info = artifacts.begin()->second; + auto info = artifacts.value()->begin()->second; SECTION("retrieve via same API object") { auto out_path = GetTestDir(test_name) / "out1"; @@ -505,7 +512,7 @@ TestRetrieveFileAndSymlinkWithSameContentToPath(ApiFactory const& api_factory, action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result @@ -515,10 +522,11 @@ TestRetrieveFileAndSymlinkWithSameContentToPath(ApiFactory const& api_factory, CHECK_FALSE(response->IsCached()); } - auto artifacts = response->Artifacts(); - REQUIRE_FALSE(artifacts.empty()); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE_FALSE(artifacts.value()->empty()); - auto info = artifacts.begin()->second; + auto info = artifacts.value()->begin()->second; SECTION("retrieve via same API object") { auto out_path = GetTestDir(test_name) / "out1"; @@ -574,7 +582,7 @@ TestRetrieveFileAndSymlinkWithSameContentToPath(ApiFactory const& api_factory, action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result @@ -584,16 +592,17 @@ TestRetrieveFileAndSymlinkWithSameContentToPath(ApiFactory const& api_factory, CHECK_FALSE(response->IsCached()); } - auto artifacts = response->Artifacts(); - REQUIRE_FALSE(artifacts.empty()); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE_FALSE(artifacts.value()->empty()); std::vector paths{}; std::vector infos{}; SECTION("retrieve via same API object") { auto out_path = GetTestDir(test_name) / "out1"; - std::for_each(artifacts.begin(), - artifacts.end(), + std::for_each(artifacts.value()->begin(), + artifacts.value()->end(), [&out_path, &paths, &infos](auto const& entry) { paths.emplace_back(out_path / entry.first); infos.emplace_back(entry.second); @@ -607,8 +616,8 @@ TestRetrieveFileAndSymlinkWithSameContentToPath(ApiFactory const& api_factory, SECTION("retrieve from new API object but same endpoint") { auto second_api = api_factory(); auto out_path = GetTestDir(test_name) / "out2"; - std::for_each(artifacts.begin(), - artifacts.end(), + std::for_each(artifacts.value()->begin(), + artifacts.value()->end(), [&out_path, &paths, &infos](auto const& entry) { paths.emplace_back(out_path / entry.first); infos.emplace_back(entry.second); @@ -643,26 +652,28 @@ TestRetrieveFileAndSymlinkWithSameContentToPath(ApiFactory const& api_factory, action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(IsTreeObject(artifacts.at(output_path).type)); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(IsTreeObject(artifacts.value()->at(output_path).type)); if (is_hermetic) { CHECK(not response->IsCached()); SECTION("Rerun execution to verify caching") { // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(IsTreeObject(artifacts.at(output_path).type)); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(IsTreeObject(artifacts.value()->at(output_path).type)); CHECK(response->IsCached()); } } @@ -672,24 +683,26 @@ TestRetrieveFileAndSymlinkWithSameContentToPath(ApiFactory const& api_factory, action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput); // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(IsTreeObject(artifacts.at(output_path).type)); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(IsTreeObject(artifacts.value()->at(output_path).type)); CHECK(not response->IsCached()); SECTION("Rerun execution to verify caching") { // run execution - auto response = action->Execute(); + auto const response = action->Execute(); REQUIRE(response); // verify result - auto artifacts = response->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(IsTreeObject(artifacts.at(output_path).type)); + auto const artifacts = response->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(IsTreeObject(artifacts.value()->at(output_path).type)); CHECK(not response->IsCached()); } } diff --git a/test/buildtool/execution_api/common/bytestream_utils.test.cpp b/test/buildtool/execution_api/common/bytestream_utils.test.cpp new file mode 100644 index 000000000..a2f139e7a --- /dev/null +++ b/test/buildtool/execution_api/common/bytestream_utils.test.cpp @@ -0,0 +1,60 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "src/buildtool/execution_api/common/bytestream_utils.hpp" + +#include +#include + +#include "catch2/catch_test_macros.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" +#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/execution_api/common/execution_common.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" + +TEST_CASE("ReadRequest", "[common]") { + static constexpr auto* kInstanceName = "instance_name"; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + + auto const digest = BazelDigestFactory::HashDataAs( + hash_function, "test_string"); + + std::string const request = + ByteStreamUtils::ReadRequest{kInstanceName, digest}.ToString(); + auto const parsed = ByteStreamUtils::ReadRequest::FromString(request); + REQUIRE(parsed); + CHECK(parsed->GetInstanceName() == kInstanceName); + CHECK(std::equal_to{}(parsed->GetDigest(), digest)); +} + +TEST_CASE("WriteRequest", "[common]") { + static constexpr auto* kInstanceName = "instance_name"; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + + auto id = CreateProcessUniqueId(); + REQUIRE(id); + std::string const uuid = CreateUUIDVersion4(*id); + + auto const digest = BazelDigestFactory::HashDataAs( + hash_function, "test_string"); + + std::string const request = + ByteStreamUtils::WriteRequest{kInstanceName, uuid, digest}.ToString(); + auto const parsed = ByteStreamUtils::WriteRequest::FromString(request); + REQUIRE(parsed); + CHECK(parsed->GetInstanceName() == kInstanceName); + CHECK(parsed->GetUUID() == uuid); + CHECK(std::equal_to{}(parsed->GetDigest(), digest)); +} diff --git a/test/buildtool/execution_api/common/tree_rehashing.test.cpp b/test/buildtool/execution_api/common/tree_rehashing.test.cpp new file mode 100644 index 000000000..bccec3ab4 --- /dev/null +++ b/test/buildtool/execution_api/common/tree_rehashing.test.cpp @@ -0,0 +1,303 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include +#include +#include +#include + +#include "catch2/catch_test_macros.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" +#include "src/buildtool/file_system/file_system_manager.hpp" +#include "src/buildtool/storage/config.hpp" +#include "src/buildtool/storage/storage.hpp" +#include "src/utils/cpp/expected.hpp" +#include "src/utils/cpp/tmp_dir.hpp" +#include "test/utils/hermeticity/test_storage_config.hpp" +#include "test/utils/large_objects/large_object_utils.hpp" + +namespace { +[[nodiscard]] auto GetTypedStorageConfig(StorageConfig const& config, + HashFunction::Type hash_type) + -> expected; + +[[nodiscard]] auto GenerateTestDirectory() -> std::optional; + +/// \brief Deeply hash a local tree and add it to the storage. +[[nodiscard]] auto StoreHashedTree(Storage const& storage, + std::filesystem::path const& path) noexcept + -> std::optional; + +/// \brief Deeply hash a local tree, doesn't add anything to the +/// storage, just calls for ArtifactDigestFactory. +[[nodiscard]] auto HashTree(HashFunction::Type hash_type, + std::filesystem::path const& path) noexcept + -> std::optional; + +/// \brief Deeply rehash tree that is present in source storage and add it to +/// the target storage. +[[nodiscard]] auto StoreRehashedTree(Storage const& source, + Storage const& target, + ArtifactDigest const& tree) + -> std::optional; +} // namespace + +TEST_CASE("Rehash tree", "[common]") { + // Read storage config from the environment: + auto const env_config = TestStorageConfig::Create(); + + // Deploy native storage: + auto const native_config = + GetTypedStorageConfig(env_config.Get(), HashFunction::Type::GitSHA1); + REQUIRE(native_config); + auto const native_storage = Storage::Create(&*native_config); + + // Deploy compatible storage: + auto const compatible_config = GetTypedStorageConfig( + env_config.Get(), HashFunction::Type::PlainSHA256); + REQUIRE(compatible_config); + auto const compatible_storage = Storage::Create(&*compatible_config); + + // Randomize test directory: + auto const test_dir = GenerateTestDirectory(); + REQUIRE(test_dir.has_value()); + auto const& test_dir_path = (*test_dir)->GetPath(); + + auto const check_rehash = [&test_dir_path](Storage const& source, + Storage const& target) -> void { + // Add digest to the source storage: + auto const stored_digest = StoreHashedTree(source, test_dir_path); + REQUIRE(stored_digest.has_value()); + + // calculate the "expected" after rehashing digest: + auto const expected_rehashed = + HashTree(target.GetHashFunction().GetType(), test_dir_path); + REQUIRE(expected_rehashed.has_value()); + + // Rehash source digest present in the source storage and add + // it to the target storage. The resulting digest must be + // equal to expected_rehashed. + auto const rehashed = StoreRehashedTree(source, target, *stored_digest); + REQUIRE(rehashed.has_value()); + CHECK(rehashed->hash() == expected_rehashed->hash()); + }; + + SECTION("GitTree to bazel::Directory") { + check_rehash(native_storage, compatible_storage); + } +} + +namespace { +[[nodiscard]] auto GetTypedStorageConfig(StorageConfig const& config, + HashFunction::Type hash_type) + -> expected { + return StorageConfig::Builder{} + .SetBuildRoot(config.build_root) + .SetHashType(hash_type) + .Build(); +} + +[[nodiscard]] auto GenerateTestDirectory() -> std::optional { + auto const test_dir = FileSystemManager::GetCurrentDirectory() / "tmp"; + auto head_temp_directory = TmpDir::Create(test_dir / "head_dir"); + auto const head_temp_dir_path = head_temp_directory->GetPath(); + // ├── exec_1 + // ├── file_1 + // ├── symlink_to_nested_dir_1_1 -> nested_dir_1 / nested_dir_1_1 + // ├── symlink_to_nested_dir_2_1 -> nested_dir_2 / nested_dir_2_1 + // ├── nested_dir_1 + // │ ├── ... + // │ ├── nested_dir_1_1 + // │ │ └── ... + // │ └── nested_dir_1_2 + // │ └── ... + // └── nested_dir_2 + // ├── ... + // ├── nested_dir_2_1 + // │ └── ... + // └── nested_dir_2_2 + // └── ... + static constexpr std::size_t kFileSize = 128; + auto const file_path = head_temp_dir_path / "file_1"; + if (not LargeObjectUtils::GenerateFile(file_path, kFileSize)) { + return std::nullopt; + } + + auto const exec_path = head_temp_dir_path / "exec_1"; + if (not LargeObjectUtils::GenerateFile(exec_path, + kFileSize, + /*is_executable =*/true)) { + return std::nullopt; + } + + std::array const directories = { + head_temp_dir_path / "nested_dir_1", + head_temp_dir_path / "nested_dir_1" / "nested_dir_1_1", + head_temp_dir_path / "nested_dir_1" / "nested_dir_1_2", + head_temp_dir_path / "nested_dir_2", + head_temp_dir_path / "nested_dir_2" / "nested_dir_2_1", + head_temp_dir_path / "nested_dir_2" / "nested_dir_2_2"}; + + static constexpr std::size_t kDirEntries = 16; + for (auto const& path : directories) { + if (not LargeObjectUtils::GenerateDirectory(path, kDirEntries)) { + return std::nullopt; + } + } + + // Create non-upwards symlinks in the top directory: + if (not FileSystemManager::CreateNonUpwardsSymlink( + std::filesystem::path("nested_dir_1") / "nested_dir_1_1", + head_temp_dir_path / "symlink_to_nested_dir_1_1") or + not FileSystemManager::CreateNonUpwardsSymlink( + std::filesystem::path("nested_dir_2") / "nested_dir_2_1", + head_temp_dir_path / "symlink_to_nested_dir_2_1")) { + return std::nullopt; + } + return head_temp_directory; +} + +[[nodiscard]] auto StoreHashedTree(Storage const& storage, + std::filesystem::path const& path) noexcept + -> std::optional { + auto const& cas = storage.CAS(); + + auto store_blob = [&cas](std::filesystem::path const& path, + auto is_exec) -> std::optional { + return cas.StoreBlob(path, is_exec); + }; + auto store_tree = + [&cas](std::string const& content) -> std::optional { + return cas.StoreTree(content); + }; + auto store_symlink = + [&cas](std::string const& content) -> std::optional { + return cas.StoreBlob(content); + }; + + return ProtocolTraits::IsNative(cas.GetHashFunction().GetType()) + ? BazelMsgFactory::CreateGitTreeDigestFromLocalTree( + path, store_blob, store_tree, store_symlink) + : BazelMsgFactory::CreateDirectoryDigestFromLocalTree( + path, store_blob, store_tree, store_symlink); +} + +[[nodiscard]] auto HashTree(HashFunction::Type hash_type, + std::filesystem::path const& path) noexcept + -> std::optional { + HashFunction const hash_function{hash_type}; + auto hash_blob = [hash_function]( + std::filesystem::path const& path, + auto /*is_exec*/) -> std::optional { + return ArtifactDigestFactory::HashFileAs( + hash_function, path); + }; + auto hash_tree = + [hash_function]( + std::string const& content) -> std::optional { + return ArtifactDigestFactory::HashDataAs( + hash_function, content); + }; + auto hash_symlink = + [hash_function]( + std::string const& content) -> std::optional { + return ArtifactDigestFactory::HashDataAs( + hash_function, content); + }; + return ProtocolTraits::IsNative(hash_type) + ? BazelMsgFactory::CreateGitTreeDigestFromLocalTree( + path, hash_blob, hash_tree, hash_symlink) + : BazelMsgFactory::CreateDirectoryDigestFromLocalTree( + path, hash_blob, hash_tree, hash_symlink); +} + +[[nodiscard]] auto StoreRehashedTree(Storage const& source, + Storage const& target, + ArtifactDigest const& tree) + -> std::optional { + BazelMsgFactory::GitReadFunc const read_git = + [&source](ArtifactDigest const& digest, ObjectType type) + -> std::optional> { + return IsTreeObject(type) + ? source.CAS().TreePath(digest) + : source.CAS().BlobPath(digest, IsExecutableObject(type)); + }; + + BazelMsgFactory::BlobStoreFunc const store_file = + [&target](std::variant const& data, + bool is_exec) -> std::optional { + if (std::holds_alternative(data)) { + return target.CAS().StoreBlob( + std::get(data), is_exec); + } + if (std::holds_alternative(data)) { + return target.CAS().StoreBlob( + std::get(data), is_exec); + } + return std::nullopt; + }; + + BazelMsgFactory::TreeStoreFunc const store_tree = + [&target](std::string const& content) -> std::optional { + return target.CAS().StoreTree(content); + }; + + BazelMsgFactory::SymlinkStoreFunc const store_symlink = + [&target](std::string const& content) -> std::optional { + return target.CAS().StoreBlob(content); + }; + + // Emulate rehash mapping using a regular std::unordered_map: + std::unordered_map rehash_map; + + BazelMsgFactory::RehashedDigestReadFunc const read_rehashed = + [&rehash_map](ArtifactDigest const& digest) + -> std::optional { + auto const it = rehash_map.find(digest); + if (it == rehash_map.end()) { + return std::nullopt; + } + return it->second; + }; + + BazelMsgFactory::RehashedDigestStoreFunc const store_rehashed = + [&rehash_map](ArtifactDigest const& key, + ArtifactDigest const& value, + ObjectType type) mutable -> std::optional { + rehash_map[key] = Artifact::ObjectInfo{.digest = value, .type = type}; + return std::nullopt; + }; + + REQUIRE(not ProtocolTraits::IsNative(target.GetHashFunction().GetType())); + auto result = + BazelMsgFactory::CreateDirectoryDigestFromGitTree(tree, + read_git, + store_file, + store_tree, + store_symlink, + read_rehashed, + store_rehashed); + if (result) { + return *std::move(result); + } + return std::nullopt; +} +} // namespace diff --git a/test/buildtool/execution_api/execution_service/TARGETS b/test/buildtool/execution_api/execution_service/TARGETS index 0e32c4ad9..5c585c126 100644 --- a/test/buildtool/execution_api/execution_service/TARGETS +++ b/test/buildtool/execution_api/execution_service/TARGETS @@ -3,9 +3,11 @@ , "name": ["cas_server"] , "srcs": ["cas_server.test.cpp"] , "private-deps": - [ ["", "catch-main"] - , ["@", "catch2", "", "catch2"] - , ["utils", "test_storage_config"] + [ ["@", "catch2", "", "catch2"] + , ["@", "gsl", "", "gsl"] + , ["@", "src", "src/buildtool/common", "bazel_digest_factory"] + , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] , [ "@" , "src" , "src/buildtool/execution_api/execution_service" @@ -15,10 +17,11 @@ , ["@", "src", "src/buildtool/execution_api/local", "context"] , ["@", "src", "src/buildtool/file_system", "git_repo"] , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/storage", "config"] , ["@", "src", "src/buildtool/storage", "storage"] - , ["@", "gsl", "", "gsl"] + , ["", "catch-main"] + , ["utils", "test_hash_function_type"] + , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "execution_api", "execution_service"] } diff --git a/test/buildtool/execution_api/execution_service/cas_server.test.cpp b/test/buildtool/execution_api/execution_service/cas_server.test.cpp index 4bc905460..af5a3d584 100644 --- a/test/buildtool/execution_api/execution_service/cas_server.test.cpp +++ b/test/buildtool/execution_api/execution_service/cas_server.test.cpp @@ -19,12 +19,15 @@ #include "catch2/catch_test_macros.hpp" #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/local/config.hpp" #include "src/buildtool/execution_api/local/context.hpp" #include "src/buildtool/file_system/git_repo.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/storage/config.hpp" #include "src/buildtool/storage/storage.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/hermeticity/test_storage_config.hpp" namespace { @@ -46,7 +49,7 @@ namespace { TEST_CASE("CAS Service: upload incomplete tree", "[execution_service]") { // For compatible mode tree invariants aren't checked. - if (Compatibility::IsCompatible()) { + if (not ProtocolTraits::IsNative(TestHashType::ReadFromEnvironment())) { return; } @@ -66,7 +69,7 @@ TEST_CASE("CAS Service: upload incomplete tree", "[execution_service]") { auto empty_entries = GitRepo::tree_entries_t{}; auto empty_tree = GitRepo::CreateShallowTree(empty_entries); REQUIRE(empty_tree); - auto empty_tree_digest = ArtifactDigest::Create( + auto empty_tree_digest = BazelDigestFactory::HashDataAs( storage_config.Get().hash_function, empty_tree->second); // Create a tree containing the empty tree. @@ -74,7 +77,7 @@ TEST_CASE("CAS Service: upload incomplete tree", "[execution_service]") { entries[empty_tree->first].emplace_back("empty_tree", ObjectType::Tree); auto tree = GitRepo::CreateShallowTree(entries); REQUIRE(tree); - auto tree_digest = ArtifactDigest::Create( + auto tree_digest = BazelDigestFactory::HashDataAs( storage_config.Get().hash_function, tree->second); // Upload tree. The tree invariant is violated, thus, a negative answer is diff --git a/test/buildtool/execution_api/local/TARGETS b/test/buildtool/execution_api/local/TARGETS index f88a8ff01..bbf83da9f 100644 --- a/test/buildtool/execution_api/local/TARGETS +++ b/test/buildtool/execution_api/local/TARGETS @@ -4,8 +4,8 @@ , "srcs": ["local_execution.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/common", "artifact_description"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/execution_api/local", "config"] , ["@", "src", "src/buildtool/execution_api/local", "context"] @@ -13,9 +13,10 @@ , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] - , ["utils", "test_storage_config"] - , ["@", "src", "src/buildtool/storage", "storage"] , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["", "catch-main"] + , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "execution_api", "local"] } @@ -25,14 +26,14 @@ , "srcs": ["local_api.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/execution_api/local", "config"] , ["@", "src", "src/buildtool/execution_api/local", "context"] , ["@", "src", "src/buildtool/execution_api/local", "local"] + , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["", "catch-main"] , ["buildtool/execution_api/common", "api_test"] , ["utils", "test_storage_config"] - , ["@", "src", "src/buildtool/storage", "storage"] - , ["@", "src", "src/buildtool/storage", "config"] ] , "stage": ["test", "buildtool", "execution_api", "local"] } diff --git a/test/buildtool/execution_api/local/local_execution.test.cpp b/test/buildtool/execution_api/local/local_execution.test.cpp index 491410769..28fd03c66 100644 --- a/test/buildtool/execution_api/local/local_execution.test.cpp +++ b/test/buildtool/execution_api/local/local_execution.test.cpp @@ -20,6 +20,7 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/execution_api/local/config.hpp" #include "src/buildtool/execution_api/local/context.hpp" @@ -189,7 +190,7 @@ TEST_CASE("LocalExecution: No input, create output", "[execution_api]") { auto api = LocalApi(&local_context, &repo_config); std::string test_content("test"); - auto test_digest = ArtifactDigest::Create( + auto test_digest = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, test_content); std::string output_path{"output_file"}; @@ -210,9 +211,10 @@ TEST_CASE("LocalExecution: No input, create output", "[execution_api]") { // verify result CHECK_FALSE(output->IsCached()); - auto artifacts = output->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = output->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); // ensure result IS in cache output = action->Execute(nullptr); @@ -228,9 +230,10 @@ TEST_CASE("LocalExecution: No input, create output", "[execution_api]") { // verify result CHECK_FALSE(output->IsCached()); - auto artifacts = output->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = output->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); // ensure result IS STILL NOT in cache output = action->Execute(nullptr); @@ -254,7 +257,7 @@ TEST_CASE("LocalExecution: One input copied to output", "[execution_api]") { auto api = LocalApi(&local_context, &repo_config); std::string test_content("test"); - auto test_digest = ArtifactDigest::Create( + auto test_digest = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, test_content); REQUIRE(api.Upload(ArtifactBlobContainer{{ArtifactBlob{ test_digest, test_content, /*is_exec=*/false}}}, @@ -289,9 +292,10 @@ TEST_CASE("LocalExecution: One input copied to output", "[execution_api]") { // verify result CHECK_FALSE(output->IsCached()); - auto artifacts = output->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = output->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); // ensure result IS in cache output = action->Execute(nullptr); @@ -307,9 +311,10 @@ TEST_CASE("LocalExecution: One input copied to output", "[execution_api]") { // verify result CHECK_FALSE(output->IsCached()); - auto artifacts = output->Artifacts(); - REQUIRE(artifacts.contains(output_path)); - CHECK(artifacts.at(output_path).digest == test_digest); + auto const artifacts = output->Artifacts(); + REQUIRE(artifacts.has_value()); + REQUIRE(artifacts.value()->contains(output_path)); + CHECK(artifacts.value()->at(output_path).digest == test_digest); // ensure result IS STILL NOT in cache output = action->Execute(nullptr); diff --git a/test/buildtool/execution_engine/dag/TARGETS b/test/buildtool/execution_engine/dag/TARGETS index 1baf9f0e4..566943d5c 100644 --- a/test/buildtool/execution_engine/dag/TARGETS +++ b/test/buildtool/execution_engine/dag/TARGETS @@ -4,13 +4,13 @@ , "srcs": ["dag.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "gsl", "", "gsl"] - , ["utils", "container_matchers"] , ["@", "src", "src/buildtool/common", "action_description"] , ["@", "src", "src/buildtool/common", "artifact_description"] , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/execution_engine/dag", "dag"] + , ["", "catch-main"] + , ["utils", "container_matchers"] ] , "stage": ["test", "buildtool", "execution_engine", "dag"] } diff --git a/test/buildtool/execution_engine/dag/dag.test.cpp b/test/buildtool/execution_engine/dag/dag.test.cpp index 201a537fc..521e650ee 100644 --- a/test/buildtool/execution_engine/dag/dag.test.cpp +++ b/test/buildtool/execution_engine/dag/dag.test.cpp @@ -353,7 +353,6 @@ namespace { } template -// NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto IsValidNode( TNode const& node, gsl::not_null*> const& seen) -> bool { diff --git a/test/buildtool/execution_engine/executor/TARGETS b/test/buildtool/execution_engine/executor/TARGETS index bbdccc7c5..cc3e94d76 100644 --- a/test/buildtool/execution_engine/executor/TARGETS +++ b/test/buildtool/execution_engine/executor/TARGETS @@ -3,9 +3,15 @@ , "name": ["executor_api_tests"] , "hdrs": ["executor_api.test.hpp"] , "deps": - [ ["@", "src", "src/buildtool/auth", "auth"] + [ ["@", "catch2", "", "catch2"] + , ["@", "gsl", "", "gsl"] + , ["@", "src", "src/buildtool/auth", "auth"] , ["@", "src", "src/buildtool/common", "artifact_description"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/common", "config"] + , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/common", "common"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/execution_api/remote", "context"] @@ -14,11 +20,8 @@ , ["@", "src", "src/buildtool/execution_engine/executor", "executor"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/progress_reporting", "progress"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] - , ["@", "catch2", "", "catch2"] - , ["@", "gsl", "", "gsl"] , ["utils", "test_api_bundle"] + , ["utils", "test_hash_function_type"] , ["utils", "test_remote_config"] ] , "stage": ["test", "buildtool", "execution_engine", "executor"] @@ -28,24 +31,27 @@ , "name": ["executor"] , "srcs": ["executor.test.cpp"] , "private-deps": - [ ["@", "src", "src/buildtool/auth", "auth"] + [ ["@", "catch2", "", "catch2"] + , ["@", "gsl", "", "gsl"] + , ["@", "src", "src/buildtool/auth", "auth"] , ["@", "src", "src/buildtool/common", "artifact_description"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/common", "common"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/execution_api/remote", "context"] , ["@", "src", "src/buildtool/execution_engine/dag", "dag"] , ["@", "src", "src/buildtool/execution_engine/executor", "context"] , ["@", "src", "src/buildtool/execution_engine/executor", "executor"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/progress_reporting", "progress"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] + , ["@", "src", "src/utils/cpp", "expected"] , ["", "catch-main"] - , ["@", "catch2", "", "catch2"] - , ["@", "gsl", "", "gsl"] , ["utils", "test_api_bundle"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "execution_engine", "executor"] } @@ -56,6 +62,7 @@ , "data": ["test_data"] , "private-deps": [ "executor_api_tests" + , ["@", "catch2", "", "catch2"] , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/execution_api/local", "config"] @@ -64,12 +71,11 @@ , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/execution_engine/executor", "executor"] , ["@", "src", "src/buildtool/progress_reporting", "progress"] + , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] , ["utils", "catch-main-remote-execution"] - , ["utils", "test_storage_config"] , ["utils", "test_auth_config"] - , ["@", "catch2", "", "catch2"] - , ["@", "src", "src/buildtool/storage", "storage"] - , ["@", "src", "src/buildtool/storage", "config"] + , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "execution_engine", "executor"] } @@ -80,19 +86,19 @@ , "data": ["test_data"] , "private-deps": [ "executor_api_tests" + , ["@", "catch2", "", "catch2"] , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/common", "config"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/remote", "bazel"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/execution_engine/executor", "executor"] , ["@", "src", "src/buildtool/progress_reporting", "progress"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] , ["utils", "catch-main-remote-execution"] , ["utils", "test_auth_config"] + , ["utils", "test_hash_function_type"] , ["utils", "test_remote_config"] - , ["@", "catch2", "", "catch2"] ] , "stage": ["test", "buildtool", "execution_engine", "executor"] } diff --git a/test/buildtool/execution_engine/executor/executor.test.cpp b/test/buildtool/execution_engine/executor/executor.test.cpp index 22cd2cc4c..59b06648f 100644 --- a/test/buildtool/execution_engine/executor/executor.test.cpp +++ b/test/buildtool/execution_engine/executor/executor.test.cpp @@ -17,6 +17,7 @@ #include #include #include +#include #include #include #include @@ -27,9 +28,10 @@ #include "gsl/gsl" #include "src/buildtool/auth/authentication.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/common/statistics.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/execution_api/remote/config.hpp" @@ -37,7 +39,9 @@ #include "src/buildtool/execution_engine/executor/context.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/progress_reporting/progress.hpp" +#include "src/utils/cpp/expected.hpp" #include "test/utils/executor/test_api_bundle.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" /// \brief Mockup API test config. struct TestApiConfig { @@ -48,7 +52,7 @@ struct TestApiConfig { struct TestExecutionConfig { bool failed{}; - std::vector outputs{}; + std::vector outputs; }; struct TestResponseConfig { @@ -56,11 +60,17 @@ struct TestApiConfig { int exit_code{}; }; - std::unordered_map artifacts{}; + std::unordered_map artifacts; TestExecutionConfig execution; TestResponseConfig response; }; +static auto NamedDigest(std::string const& str) -> ArtifactDigest { + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; + return ArtifactDigestFactory::HashDataAs(hash_function, + str); +} + // forward declarations class TestApi; class TestAction; @@ -68,9 +78,10 @@ class TestResponse; /// \brief Mockup Response, stores only config and action result class TestResponse : public IExecutionResponse { - friend class TestAction; - public: + explicit TestResponse(TestApiConfig config) noexcept + : config_{std::move(config)} {} + [[nodiscard]] auto Status() const noexcept -> StatusCode final { return StatusCode::Success; } @@ -89,16 +100,17 @@ class TestResponse : public IExecutionResponse { static const std::string kEmptyHash; return kEmptyHash; } - [[nodiscard]] auto Artifacts() noexcept -> ArtifactInfos const& final { + [[nodiscard]] auto Artifacts() noexcept + -> expected, std::string> final { if (not populated_) { Populate(); } - return artifacts_; + return gsl::not_null(&artifacts_); } [[nodiscard]] auto DirectorySymlinks() noexcept - -> DirSymlinks const& final { + -> expected, std::string> final { static const DirSymlinks kEmptySymlinks{}; - return kEmptySymlinks; + return gsl::not_null(&kEmptySymlinks); } private: @@ -106,9 +118,6 @@ class TestResponse : public IExecutionResponse { ArtifactInfos artifacts_; bool populated_ = false; - explicit TestResponse(TestApiConfig config) noexcept - : config_{std::move(config)} {} - void Populate() noexcept { if (populated_) { return; @@ -123,9 +132,8 @@ class TestResponse : public IExecutionResponse { try { artifacts.emplace( path, - Artifact::ObjectInfo{ - .digest = ArtifactDigest{path, 0, /*is_tree=*/false}, - .type = ObjectType::File}); + Artifact::ObjectInfo{.digest = NamedDigest(path), + .type = ObjectType::File}); } catch (...) { return; } @@ -136,23 +144,22 @@ class TestResponse : public IExecutionResponse { /// \brief Mockup Action, stores only config class TestAction : public IExecutionAction { - friend class TestApi; - public: + explicit TestAction(TestApiConfig config) noexcept + : config_{std::move(config)} {} + auto Execute(Logger const* /*unused*/) noexcept -> IExecutionResponse::Ptr final { if (config_.execution.failed) { return nullptr; } - return IExecutionResponse::Ptr{new TestResponse{config_}}; + return std::make_unique(config_); } void SetCacheFlag(CacheFlag /*unused*/) noexcept final {} void SetTimeout(std::chrono::milliseconds /*unused*/) noexcept final {} private: TestApiConfig config_{}; - explicit TestAction(TestApiConfig config) noexcept - : config_{std::move(config)} {} }; /// \brief Mockup Api, use config to create action and handle artifact upload @@ -170,7 +177,7 @@ class TestApi : public IExecutionApi { std::map const& /*unused*/, std::map const& /*unused*/) const noexcept -> IExecutionAction::Ptr final { - return IExecutionAction::Ptr{new TestAction(config_)}; + return std::make_unique(config_); } [[nodiscard]] auto RetrieveToPaths( std::vector const& /*unused*/, @@ -261,24 +268,25 @@ class TestApi : public IExecutionApi { auto const local_cpp_desc = ArtifactDescription::CreateLocal(path{"local.cpp"}, ""); - auto const known_cpp_desc = ArtifactDescription::CreateKnown( - ArtifactDigest{"known.cpp", 0, /*is_tree=*/false}, ObjectType::File); + auto const known_digest = NamedDigest("known.cpp"); + auto const known_cpp_desc = + ArtifactDescription::CreateKnown(known_digest, ObjectType::File); auto const test_action_desc = ActionDescription{ {"output1.exe", "output2.exe"}, {}, Action{"test_action", {"cmd", "line"}, {}}, - {{"local.cpp", local_cpp_desc}, {"known.cpp", known_cpp_desc}}}; + {{"local.cpp", local_cpp_desc}, {known_digest.hash(), known_cpp_desc}}}; CHECK(g->AddAction(test_action_desc)); CHECK(FileSystemManager::WriteFile("local.cpp", ws / "local.cpp")); TestApiConfig config{}; - config.artifacts["local.cpp"].uploads = true; - config.artifacts["known.cpp"].available = true; - config.artifacts["output1.exe"].available = true; - config.artifacts["output2.exe"].available = true; + config.artifacts[NamedDigest("local.cpp").hash()].uploads = true; + config.artifacts[NamedDigest("known.cpp").hash()].available = true; + config.artifacts[NamedDigest("output1.exe").hash()].available = true; + config.artifacts[NamedDigest("output2.exe").hash()].available = true; config.execution.failed = false; config.execution.outputs = {"output1.exe", "output2.exe"}; @@ -295,17 +303,14 @@ TEST_CASE("Executor: Process artifact", "[executor]") { DependencyGraph g; auto [config, repo_config] = CreateTest(&g, workspace_path); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; auto const local_cpp_id = ArtifactDescription::CreateLocal("local.cpp", "").Id(); - auto const known_cpp_id = - ArtifactDescription::CreateKnown( - ArtifactDigest{"known.cpp", 0, /*is_tree=*/false}, ObjectType::File) - .Id(); + auto const known_cpp_id = ArtifactDescription::CreateKnown( + NamedDigest("known.cpp"), ObjectType::File) + .Id(); Auth auth{}; RetryConfig retry_config{}; // default retry config @@ -315,10 +320,10 @@ TEST_CASE("Executor: Process artifact", "[executor]") { .exec_config = &remote_config}; SECTION("Processing succeeds for valid config") { - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, @@ -331,12 +336,12 @@ TEST_CASE("Executor: Process artifact", "[executor]") { } SECTION("Processing fails if uploading local artifact failed") { - config.artifacts["local.cpp"].uploads = false; + config.artifacts[NamedDigest("local.cpp").hash()].uploads = false; - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, @@ -349,12 +354,12 @@ TEST_CASE("Executor: Process artifact", "[executor]") { } SECTION("Processing fails if known artifact is not available") { - config.artifacts["known.cpp"].available = false; + config.artifacts[NamedDigest("known.cpp").hash()].available = false; - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, @@ -374,17 +379,14 @@ TEST_CASE("Executor: Process action", "[executor]") { DependencyGraph g; auto [config, repo_config] = CreateTest(&g, workspace_path); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; auto const local_cpp_id = ArtifactDescription::CreateLocal("local.cpp", "").Id(); - auto const known_cpp_id = - ArtifactDescription::CreateKnown( - ArtifactDigest{"known.cpp", 0, /*is_tree=*/false}, ObjectType::File) - .Id(); + auto const known_cpp_id = ArtifactDescription::CreateKnown( + NamedDigest("known.cpp"), ObjectType::File) + .Id(); ActionIdentifier const action_id{"test_action"}; auto const output1_id = @@ -401,10 +403,10 @@ TEST_CASE("Executor: Process action", "[executor]") { .exec_config = &remote_config}; SECTION("Processing succeeds for valid config") { - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, @@ -422,10 +424,10 @@ TEST_CASE("Executor: Process action", "[executor]") { SECTION("Processing succeeds even if result was is not cached") { config.response.cached = false; - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, @@ -441,12 +443,12 @@ TEST_CASE("Executor: Process action", "[executor]") { } SECTION("Processing succeeds even if output is not available in CAS") { - config.artifacts["output2.exe"].available = false; + config.artifacts[NamedDigest("output2.exe").hash()].available = false; - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, @@ -467,10 +469,10 @@ TEST_CASE("Executor: Process action", "[executor]") { SECTION("Processing fails if execution failed") { config.execution.failed = true; - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, @@ -488,10 +490,10 @@ TEST_CASE("Executor: Process action", "[executor]") { SECTION("Processing fails if exit code is non-zero") { config.response.exit_code = 1; - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, @@ -512,10 +514,10 @@ TEST_CASE("Executor: Process action", "[executor]") { SECTION("Processing fails if any output is missing") { config.execution.outputs = {"output1.exe" /*, "output2.exe"*/}; - auto api = TestApi::Ptr{new TestApi{config}}; + auto api = std::make_shared(config); Statistics stats{}; Progress progress{}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = &repo_config, .apis = &apis, .remote_context = &remote_context, diff --git a/test/buildtool/execution_engine/executor/executor_api.test.hpp b/test/buildtool/execution_engine/executor/executor_api.test.hpp index 896279ef3..826435761 100644 --- a/test/buildtool/execution_engine/executor/executor_api.test.hpp +++ b/test/buildtool/execution_engine/executor/executor_api.test.hpp @@ -26,10 +26,10 @@ #include "gsl/gsl" #include "src/buildtool/common/artifact.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/remote/retry_config.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/common/statistics.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/execution_api/remote/config.hpp" @@ -40,6 +40,7 @@ #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/progress_reporting/progress.hpp" #include "test/utils/executor/test_api_bundle.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" using ApiFactory = std::function; @@ -55,17 +56,14 @@ static inline void RunBlobUpload(RepositoryConfig* repo_config, ApiFactory const& factory) { SetupConfig(repo_config); auto api = factory(); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; std::string const blob = "test"; - CHECK(api->Upload(ArtifactBlobContainer{{ArtifactBlob{ - ArtifactDigest{hash_function.HashBlobData(blob).HexString(), - blob.size(), - /*is_tree=*/false}, - blob, - /*is_exec=*/false}}})); + CHECK(api->Upload(ArtifactBlobContainer{ + {ArtifactBlob{ArtifactDigestFactory::HashDataAs( + hash_function, blob), + blob, + /*is_exec=*/false}}})); } [[nodiscard]] static inline auto GetTestDir() -> std::filesystem::path { @@ -144,12 +142,10 @@ static inline void RunHelloWorldCompilation( .retry_config = &retry_config, .exec_config = &*remote_config}; - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; auto api = factory(); - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = repo_config, .apis = &apis, @@ -279,12 +275,10 @@ static inline void RunGreeterCompilation( .retry_config = &retry_config, .exec_config = &*remote_config}; - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; auto api = factory(); - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = repo_config, .apis = &apis, @@ -420,20 +414,14 @@ static inline void TestUploadAndDownloadTrees( env.emplace("PATH", "/bin:/usr/bin"); } - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; auto foo = std::string{"foo"}; auto bar = std::string{"bar"}; - auto foo_digest = - ArtifactDigest{hash_function.HashBlobData(foo).HexString(), - foo.size(), - /*is_tree=*/false}; - auto bar_digest = - ArtifactDigest{hash_function.HashBlobData(bar).HexString(), - bar.size(), - /*is_tree=*/false}; + auto const foo_digest = + ArtifactDigestFactory::HashDataAs(hash_function, foo); + auto const bar_digest = + ArtifactDigestFactory::HashDataAs(hash_function, bar); // upload blobs auto api = factory(); @@ -459,7 +447,7 @@ static inline void TestUploadAndDownloadTrees( .retry_config = &retry_config, .exec_config = &*remote_config}; - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = repo_config, .apis = &apis, @@ -579,11 +567,8 @@ static inline void TestRetrieveOutputDirectories( int /*expected_queued*/ = 0, int /*expected_cached*/ = 0) { SetupConfig(repo_config); - auto tmpdir = GetTestDir(); - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; auto const make_tree_id = std::string{"make_tree"}; auto const* make_tree_cmd = @@ -636,7 +621,7 @@ static inline void TestRetrieveOutputDirectories( // run action auto api = factory(); - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = repo_config, .apis = &apis, @@ -690,7 +675,7 @@ static inline void TestRetrieveOutputDirectories( // run action auto api = factory(); - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = repo_config, .apis = &apis, @@ -761,7 +746,7 @@ static inline void TestRetrieveOutputDirectories( // run action auto api = factory(); - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{.repo_config = repo_config, .apis = &apis, @@ -834,7 +819,7 @@ static inline void TestRetrieveOutputDirectories( // run action auto api = factory(); - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{ .repo_config = repo_config, @@ -861,7 +846,7 @@ static inline void TestRetrieveOutputDirectories( // run action auto api = factory(); - auto const apis = CreateTestApiBundle(hash_function, api); + auto const apis = CreateTestApiBundle(&hash_function, api); ExecutionContext const exec_context{ .repo_config = repo_config, diff --git a/test/buildtool/execution_engine/executor/executor_api_remote_bazel.test.cpp b/test/buildtool/execution_engine/executor/executor_api_remote_bazel.test.cpp index 0e068ea96..7fc5018f0 100644 --- a/test/buildtool/execution_engine/executor/executor_api_remote_bazel.test.cpp +++ b/test/buildtool/execution_engine/executor/executor_api_remote_bazel.test.cpp @@ -18,13 +18,13 @@ #include "src/buildtool/common/remote/retry_config.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/common/statistics.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" #include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/remote/bazel/bazel_api.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/execution_engine/executor/executor.hpp" #include "src/buildtool/progress_reporting/progress.hpp" #include "test/buildtool/execution_engine/executor/executor_api.test.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/remote_execution/test_auth_config.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" @@ -41,9 +41,7 @@ TEST_CASE("Executor: Upload blob", "[executor]") { RetryConfig retry_config{}; // default retry config - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; TestBlobUpload(&repo_config, [&] { return BazelApi::Ptr{new BazelApi{"remote-execution", @@ -52,7 +50,7 @@ TEST_CASE("Executor: Upload blob", "[executor]") { &*auth_config, &retry_config, config, - hash_function}}; + &hash_function}}; }); } @@ -72,9 +70,7 @@ TEST_CASE("Executor: Compile hello world", "[executor]") { RetryConfig retry_config{}; // default retry config - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; TestHelloWorldCompilation( &repo_config, @@ -88,7 +84,7 @@ TEST_CASE("Executor: Compile hello world", "[executor]") { &*auth_config, &retry_config, config, - hash_function}}; + &hash_function}}; }, &*auth_config, false /* not hermetic */); @@ -110,9 +106,7 @@ TEST_CASE("Executor: Compile greeter", "[executor]") { RetryConfig retry_config{}; // default retry config - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; TestGreeterCompilation( &repo_config, @@ -126,7 +120,7 @@ TEST_CASE("Executor: Compile greeter", "[executor]") { &*auth_config, &retry_config, config, - hash_function}}; + &hash_function}}; }, &*auth_config, false /* not hermetic */); @@ -148,9 +142,7 @@ TEST_CASE("Executor: Upload and download trees", "[executor]") { RetryConfig retry_config{}; // default retry config - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; TestUploadAndDownloadTrees( &repo_config, @@ -164,7 +156,7 @@ TEST_CASE("Executor: Upload and download trees", "[executor]") { &*auth_config, &retry_config, config, - hash_function}}; + &hash_function}}; }, &*auth_config, false /* not hermetic */); @@ -186,9 +178,7 @@ TEST_CASE("Executor: Retrieve output directories", "[executor]") { RetryConfig retry_config{}; // default retry config - HashFunction const hash_function{Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1}; + HashFunction const hash_function{TestHashType::ReadFromEnvironment()}; TestRetrieveOutputDirectories( &repo_config, @@ -202,7 +192,7 @@ TEST_CASE("Executor: Retrieve output directories", "[executor]") { &*auth_config, &retry_config, config, - hash_function}}; + &hash_function}}; }, &*auth_config, false /* not hermetic */); diff --git a/test/buildtool/execution_engine/traverser/TARGETS b/test/buildtool/execution_engine/traverser/TARGETS index 821c4bc89..ba159245d 100644 --- a/test/buildtool/execution_engine/traverser/TARGETS +++ b/test/buildtool/execution_engine/traverser/TARGETS @@ -4,12 +4,14 @@ , "srcs": ["traverser.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["utils", "container_matchers"] - , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/common", "artifact_description"] + , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_engine/dag", "dag"] , ["@", "src", "src/buildtool/execution_engine/traverser", "traverser"] + , ["", "catch-main"] + , ["utils", "container_matchers"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "execution_engine", "traverser"] } diff --git a/test/buildtool/execution_engine/traverser/traverser.test.cpp b/test/buildtool/execution_engine/traverser/traverser.test.cpp index 97e2095e7..6bbafc03f 100644 --- a/test/buildtool/execution_engine/traverser/traverser.test.cpp +++ b/test/buildtool/execution_engine/traverser/traverser.test.cpp @@ -24,8 +24,10 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_engine/dag/dag.hpp" #include "test/utils/container_matchers.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" namespace { @@ -93,11 +95,11 @@ class TestBuildInfo { } private: - std::unordered_set correctly_built_{}; - std::unordered_set incorrectly_built_{}; - std::unordered_set artifacts_uploaded_{}; - std::unordered_set uploaded_more_than_once_{}; - std::string name_{}; + std::unordered_set correctly_built_; + std::unordered_set incorrectly_built_; + std::unordered_set artifacts_uploaded_; + std::unordered_set uploaded_more_than_once_; + std::string name_; std::mutex mutex_; }; @@ -132,9 +134,10 @@ class TestExecutor { [[maybe_unused]] auto was_it_added = build_info_->InsertIncorrectlyBuilt(node->Content().Id()); } + return false; } catch (...) { + return false; } - return false; } [[nodiscard]] auto Process( @@ -153,6 +156,7 @@ class TestExecutor { TestBuildInfo* build_info_; template + // NOLINTNEXTLINE(cppcoreguidelines-missing-std-forward) [[nodiscard]] auto AllAvailable(Container&& c) const noexcept -> bool { return std::all_of(std::begin(c), std::end(c), [](auto node) { return node->TraversalState()->IsAvailable(); @@ -182,8 +186,10 @@ class TestProject { auto inputs_desc = ActionDescription::inputs_t{}; if (not inputs.empty()) { command.emplace_back("FROM"); + auto const hash_type = TestHashType::ReadFromEnvironment(); for (auto const& input_desc : inputs) { - auto artifact = ArtifactDescription::FromJson(input_desc); + auto artifact = + ArtifactDescription::FromJson(hash_type, input_desc); REQUIRE(artifact); auto const input_id = artifact->Id(); command.push_back(input_id); @@ -213,9 +219,9 @@ class TestProject { } private: - std::vector graph_full_description_{}; - std::unordered_set artifacts_to_be_built_{}; - std::unordered_set local_artifacts_{}; + std::vector graph_full_description_; + std::unordered_set artifacts_to_be_built_; + std::unordered_set local_artifacts_; }; } // namespace diff --git a/test/buildtool/file_system/TARGETS b/test/buildtool/file_system/TARGETS index 8f474e678..4210a13ad 100644 --- a/test/buildtool/file_system/TARGETS +++ b/test/buildtool/file_system/TARGETS @@ -5,11 +5,11 @@ , "data": ["test_data"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/file_system", "object_type"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "file_system"] } @@ -19,13 +19,13 @@ , "srcs": ["object_cas.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] + , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/file_system", "object_cas"] , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/common", "bazel_types"] - , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/storage", "config"] + , ["", "catch-main"] , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "file_system"] @@ -37,16 +37,15 @@ , "data": ["test_data"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "fmt", "", "fmt"] - , ["utils", "container_matchers"] + , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/file_system", "git_cas"] , ["@", "src", "src/buildtool/file_system", "git_repo"] , ["@", "src", "src/buildtool/file_system", "git_tree"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] - , ["@", "src", "src/buildtool/common", "bazel_types"] - , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/utils/cpp", "hex_string"] + , ["", "catch-main"] + , ["utils", "container_matchers"] , ["utils", "shell_quoting"] ] , "stage": ["test", "buildtool", "file_system"] @@ -58,12 +57,16 @@ , "data": ["test_data"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["utils", "container_matchers"] , ["@", "src", "src/buildtool/common", "artifact_description"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] + , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/file_system", "file_root"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] + , ["", "catch-main"] + , ["utils", "container_matchers"] , ["utils", "shell_quoting"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "file_system"] } @@ -74,12 +77,12 @@ , "data": ["test_data"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "fmt", "", "fmt"] - , ["utils", "container_matchers"] , ["@", "src", "src/buildtool/common", "artifact_description"] , ["@", "src", "src/buildtool/file_system", "file_root"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] + , ["", "catch-main"] + , ["utils", "container_matchers"] , ["utils", "shell_quoting"] ] , "stage": ["test", "buildtool", "file_system"] @@ -114,7 +117,6 @@ , "data": ["test_data"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "fmt", "", "fmt"] , ["@", "json", "", "json"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] @@ -122,9 +124,10 @@ , ["@", "src", "src/buildtool/file_system", "git_repo"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] + , ["@", "src", "src/buildtool/storage", "config"] , ["@", "src", "src/utils/cpp", "atomic"] , ["@", "src", "src/utils/cpp", "hex_string"] - , ["@", "src", "src/buildtool/storage", "config"] + , ["", "catch-main"] , ["utils", "shell_quoting"] , ["utils", "test_storage_config"] ] @@ -137,7 +140,6 @@ , "data": ["test_data"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "fmt", "", "fmt"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/file_system", "git_cas"] @@ -150,6 +152,7 @@ , "resolve_symlinks_map" ] , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] , ["utils", "shell_quoting"] ] , "stage": ["test", "buildtool", "file_system"] @@ -158,12 +161,12 @@ { "type": ["@", "rules", "test", "suite"] , "stage": ["file_system"] , "deps": - [ "file_root" + [ "directory_entries" + , "file_root" , "file_system_manager" - , "object_cas" - , "git_tree" - , "directory_entries" , "git_repo" + , "git_tree" + , "object_cas" , "resolve_symlinks_map" ] } diff --git a/test/buildtool/file_system/file_root.test.cpp b/test/buildtool/file_system/file_root.test.cpp index 67fd77f3f..94a2a552b 100644 --- a/test/buildtool/file_system/file_root.test.cpp +++ b/test/buildtool/file_system/file_root.test.cpp @@ -23,8 +23,12 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/artifact_description.hpp" +#include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "test/utils/container_matchers.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/shell_quoting.hpp" namespace { @@ -370,123 +374,92 @@ TEST_CASE("Reading blob type", "[file_root]") { } } -TEST_CASE("Creating artifact descriptions", "[file_root]") { - SECTION("local root") { - auto root_path = CreateTestRepoSymlinks(true); - REQUIRE(root_path); - auto root = FileRoot{*root_path}; +static void CheckLocalRoot(HashFunction::Type hash_type, + bool ignore_special) noexcept; +static void CheckGitRoot(HashFunction::Type hash_type, + bool ignore_special) noexcept; - auto desc = root.ToArtifactDescription("baz/foo", "repo"); - REQUIRE(desc); - CHECK(*desc == ArtifactDescription::CreateLocal( - std::filesystem::path{"baz/foo"}, "repo")); +TEST_CASE("Creating artifact descriptions", "[file_root]") { + auto const hash_type = TestHashType::ReadFromEnvironment(); - CHECK(root.ToArtifactDescription("does_not_exist", "repo")); + SECTION("local root") { + CheckLocalRoot(hash_type, /*ignore_special=*/false); } - SECTION("git root") { - auto repo_path = CreateTestRepoSymlinks(false); - REQUIRE(repo_path); - auto root = FileRoot::FromGit(*repo_path, kTreeSymId); - REQUIRE(root); - - auto foo = root->ToArtifactDescription("baz/foo", "repo"); - REQUIRE(foo); - if (Compatibility::IsCompatible()) { - CHECK(*foo == - ArtifactDescription::CreateKnown( - ArtifactDigest{ - kFooIdSha256, kFooContentLength, /*is_tree=*/false}, - ObjectType::File)); - } - else { - CHECK(*foo == - ArtifactDescription::CreateKnown( - ArtifactDigest{ - kFooIdGitSha1, kFooContentLength, /*is_tree=*/false}, - ObjectType::File, - "repo")); - } - - auto bar = root->ToArtifactDescription("baz/bar", "repo"); - REQUIRE(bar); - if (Compatibility::IsCompatible()) { - CHECK(*bar == - ArtifactDescription::CreateKnown( - ArtifactDigest{ - kBarIdSha256, kBarContentLength, /*is_tree=*/false}, - ObjectType::Executable)); - } - else { - CHECK(*bar == - ArtifactDescription::CreateKnown( - ArtifactDigest{ - kBarIdGitSha1, kBarContentLength, /*is_tree=*/false}, - ObjectType::Executable, - "repo")); - } - - CHECK_FALSE(root->ToArtifactDescription("baz", "repo")); - CHECK_FALSE(root->ToArtifactDescription("does_not_exist", "repo")); + CheckGitRoot(hash_type, /*ignore_special=*/false); } - SECTION("local root ignore-special") { - auto root_path = CreateTestRepoSymlinks(true); - REQUIRE(root_path); - auto root = FileRoot{*root_path, /*ignore_special=*/true}; + CheckLocalRoot(hash_type, /*ignore_special=*/true); + } + SECTION("git root ignore-special") { + CheckGitRoot(hash_type, /*ignore_special=*/true); + } +} - auto desc = root.ToArtifactDescription("baz/foo", "repo"); - REQUIRE(desc); - CHECK(*desc == ArtifactDescription::CreateLocal( - std::filesystem::path{"baz/foo"}, "repo")); +static void CheckLocalRoot(HashFunction::Type hash_type, + bool ignore_special) noexcept { + auto const root_path = CreateTestRepoSymlinks(true); + REQUIRE(root_path); + auto const root = FileRoot{*root_path, ignore_special}; - CHECK(root.ToArtifactDescription("does_not_exist", "repo")); - } + auto const desc = root.ToArtifactDescription(hash_type, "baz/foo", "repo"); + REQUIRE(desc); + CHECK(*desc == ArtifactDescription::CreateLocal( + std::filesystem::path{"baz/foo"}, "repo")); - SECTION("git root ignore-special") { - auto repo_path = CreateTestRepoSymlinks(false); - REQUIRE(repo_path); - auto root = - FileRoot::FromGit(*repo_path, kTreeSymId, /*ignore_special=*/true); - REQUIRE(root); + CHECK(root.ToArtifactDescription(hash_type, "does_not_exist", "repo")); +} - auto foo = root->ToArtifactDescription("baz/foo", "repo"); - REQUIRE(foo); - if (Compatibility::IsCompatible()) { - CHECK(*foo == - ArtifactDescription::CreateKnown( - ArtifactDigest{ - kFooIdSha256, kFooContentLength, /*is_tree=*/false}, - ObjectType::File)); - } - else { - CHECK(*foo == - ArtifactDescription::CreateKnown( - ArtifactDigest{ - kFooIdGitSha1, kFooContentLength, /*is_tree=*/false}, - ObjectType::File, - "repo")); - } - - auto bar = root->ToArtifactDescription("baz/bar", "repo"); - REQUIRE(bar); - if (Compatibility::IsCompatible()) { - CHECK(*bar == - ArtifactDescription::CreateKnown( - ArtifactDigest{ - kBarIdSha256, kBarContentLength, /*is_tree=*/false}, - ObjectType::Executable)); - } - else { - CHECK(*bar == - ArtifactDescription::CreateKnown( - ArtifactDigest{ - kBarIdGitSha1, kBarContentLength, /*is_tree=*/false}, - ObjectType::Executable, - "repo")); - } - - CHECK_FALSE(root->ToArtifactDescription("baz", "repo")); - CHECK_FALSE(root->ToArtifactDescription("does_not_exist", "repo")); +static void CheckGitRoot(HashFunction::Type hash_type, + bool ignore_special) noexcept { + auto const repo_path = CreateTestRepoSymlinks(false); + REQUIRE(repo_path); + auto const root = FileRoot::FromGit(*repo_path, kTreeSymId, ignore_special); + REQUIRE(root); + + auto const foo = root->ToArtifactDescription(hash_type, "baz/foo", "repo"); + REQUIRE(foo); + if (not ProtocolTraits::IsNative(hash_type)) { + auto const digest = ArtifactDigestFactory::Create(hash_type, + kFooIdSha256, + kFooContentLength, + /*is_tree=*/false); + REQUIRE(digest); + CHECK(*foo == + ArtifactDescription::CreateKnown(*digest, ObjectType::File)); + } + else { + auto const digest = ArtifactDigestFactory::Create(hash_type, + kFooIdGitSha1, + kFooContentLength, + /*is_tree=*/false); + REQUIRE(digest); + CHECK(*foo == ArtifactDescription::CreateKnown( + *digest, ObjectType::File, "repo")); } + + auto const bar = root->ToArtifactDescription(hash_type, "baz/bar", "repo"); + REQUIRE(bar); + if (not ProtocolTraits::IsNative(hash_type)) { + auto const digest = ArtifactDigestFactory::Create(hash_type, + kBarIdSha256, + kBarContentLength, + /*is_tree=*/false); + REQUIRE(digest); + CHECK(*bar == ArtifactDescription::CreateKnown(*digest, + ObjectType::Executable)); + } + else { + auto const digest = ArtifactDigestFactory::Create(hash_type, + kBarIdGitSha1, + kBarContentLength, + /*is_tree=*/false); + REQUIRE(digest); + CHECK(*bar == ArtifactDescription::CreateKnown( + *digest, ObjectType::Executable, "repo")); + } + + CHECK_FALSE(root->ToArtifactDescription(hash_type, "baz", "repo")); + CHECK_FALSE( + root->ToArtifactDescription(hash_type, "does_not_exist", "repo")); } diff --git a/test/buildtool/file_system/git_repo.test.cpp b/test/buildtool/file_system/git_repo.test.cpp index 2663e9bd0..4fb5c0cb5 100644 --- a/test/buildtool/file_system/git_repo.test.cpp +++ b/test/buildtool/file_system/git_repo.test.cpp @@ -78,7 +78,7 @@ class TestUtils { : (*repo_path / ".git").string()), QuoteForShell(repo_path->string())); if (std::system(cmd.c_str()) == 0) { - return *repo_path; + return repo_path; } return std::nullopt; } @@ -214,12 +214,12 @@ TEST_CASE("Single-threaded real repository local operations", "[git_repo]") { } SECTION("Get head commit") { - auto repo_wHead_path = TestUtils::CreateTestRepoWithCheckout(); - REQUIRE(repo_wHead_path); - auto repo_wHead = GitRepo::Open(*repo_wHead_path); - REQUIRE(repo_wHead); + auto repo_head_path = TestUtils::CreateTestRepoWithCheckout(); + REQUIRE(repo_head_path); + auto repo_head = GitRepo::Open(*repo_head_path); + REQUIRE(repo_head); - auto head_commit = repo_wHead->GetHeadCommit(logger); + auto head_commit = repo_head->GetHeadCommit(logger); REQUIRE(head_commit); CHECK(*head_commit == kRootCommit); } @@ -263,9 +263,9 @@ TEST_CASE("Single-threaded real repository local operations", "[git_repo]") { // tag uncommitted tree auto foo_bar = GitRepo::tree_entries_t{ {FromHexString(kFooId).value_or({}), - {GitRepo::tree_entry_t{"foo", ObjectType::File}}}, + {GitRepo::TreeEntry{"foo", ObjectType::File}}}, {FromHexString(kBarId).value_or({}), - {GitRepo::tree_entry_t{"bar", ObjectType::Executable}}}}; + {GitRepo::TreeEntry{"bar", ObjectType::Executable}}}}; auto foo_bar_id = repo_tag->CreateTree(foo_bar); REQUIRE(foo_bar_id); auto tree_id = ToHexString(*foo_bar_id); @@ -611,7 +611,7 @@ TEST_CASE("Multi-threaded fake repository operations", "[git_repo]") { threads.reserve(kNumThreads); SECTION("Lookups in the same ODB") { - constexpr int NUM_CASES = 10; + constexpr int kNumCases = 10; for (int id{}; id < kNumThreads; ++id) { threads.emplace_back( [&storage_config, @@ -621,7 +621,7 @@ TEST_CASE("Multi-threaded fake repository operations", "[git_repo]") { &starting_signal](int tid) { starting_signal.wait(false); // cases based on thread number - switch (tid % NUM_CASES) { + switch (tid % kNumCases) { case 0: { auto remote_repo = GitRepo::Open(remote_cas); REQUIRE(remote_repo); @@ -743,6 +743,8 @@ TEST_CASE("Multi-threaded fake repository operations", "[git_repo]") { std::nullopt, logger)); } break; + default: + REQUIRE(false); } }, id); diff --git a/test/buildtool/file_system/git_tree.test.cpp b/test/buildtool/file_system/git_tree.test.cpp index 2cef0e000..aeaa1a80d 100644 --- a/test/buildtool/file_system/git_tree.test.cpp +++ b/test/buildtool/file_system/git_tree.test.cpp @@ -14,18 +14,19 @@ #include "src/buildtool/file_system/git_tree.hpp" +#include #include #include #include #include #include #include +#include #include #include "catch2/catch_test_macros.hpp" #include "fmt/core.h" #include "src/buildtool/common/artifact_digest.hpp" -#include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/git_cas.hpp" #include "src/buildtool/file_system/git_repo.hpp" @@ -98,6 +99,24 @@ auto const kFooLinkId = std::string{"b24736f10d3c60015386047ebc98b4ab63056041"}; return std::nullopt; } +class SymlinksChecker final { + public: + explicit SymlinksChecker(gsl::not_null const& cas) noexcept + : cas_{*cas} {} + + [[nodiscard]] auto operator()( + std::vector const& ids) const noexcept -> bool { + return std::all_of( + ids.begin(), ids.end(), [&cas = cas_](ArtifactDigest const& id) { + auto content = cas.ReadObject(id.hash(), /*is_hex_id=*/true); + return content.has_value() and PathIsNonUpwards(*content); + }); + }; + + private: + GitCAS const& cas_; +}; + } // namespace TEST_CASE("Open Git CAS", "[git_cas]") { @@ -207,16 +226,7 @@ TEST_CASE("Read Git Trees", "[git_cas]") { REQUIRE(repo); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; SECTION("invalid trees") { CHECK_FALSE(repo->ReadTree("", check_symlinks, /*is_hex_id=*/true)); @@ -265,16 +275,7 @@ TEST_CASE("Read Git Trees with symlinks -- ignore special", "[git_cas]") { REQUIRE(repo); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; SECTION("invalid trees") { CHECK_FALSE(repo->ReadTree( @@ -349,16 +350,7 @@ TEST_CASE("Read Git Trees with symlinks -- allow non-upwards", "[git_cas]") { REQUIRE(repo); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; SECTION("invalid trees") { CHECK_FALSE(repo->ReadTree("", check_symlinks, /*is_hex_id=*/true)); @@ -407,16 +399,7 @@ TEST_CASE("Create Git Trees", "[git_cas]") { REQUIRE(repo); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; SECTION("empty tree") { auto tree_id = repo->CreateTree({}); @@ -438,15 +421,15 @@ TEST_CASE("Create Git Trees", "[git_cas]") { SECTION("entry order") { auto foo_bar = GitRepo::tree_entries_t{ {HexToRaw(kFooId), - {GitRepo::tree_entry_t{"foo", ObjectType::File}, - GitRepo::tree_entry_t{"bar", ObjectType::Executable}}}}; + {GitRepo::TreeEntry{"foo", ObjectType::File}, + GitRepo::TreeEntry{"bar", ObjectType::Executable}}}}; auto foo_bar_id = repo->CreateTree(foo_bar); REQUIRE(foo_bar_id); auto bar_foo = GitRepo::tree_entries_t{ {HexToRaw(kFooId), - {GitRepo::tree_entry_t{"bar", ObjectType::Executable}, - GitRepo::tree_entry_t{"foo", ObjectType::File}}}}; + {GitRepo::TreeEntry{"bar", ObjectType::Executable}, + GitRepo::TreeEntry{"foo", ObjectType::File}}}}; auto bar_foo_id = repo->CreateTree(bar_foo); REQUIRE(bar_foo_id); @@ -463,16 +446,7 @@ TEST_CASE("Create Git Trees with symlinks", "[git_cas]") { REQUIRE(repo); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; SECTION("existing tree with symlinks -- ignore special") { auto entries = repo->ReadTree(kTreeSymId, @@ -510,16 +484,7 @@ TEST_CASE("Read Git Tree Data", "[git_cas]") { REQUIRE(repo); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; SECTION("empty tree") { auto entries = @@ -555,16 +520,7 @@ TEST_CASE("Read Git Tree Data with non-upwards symlinks", "[git_cas]") { REQUIRE(repo); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; SECTION("empty tree") { auto entries = @@ -600,16 +556,7 @@ TEST_CASE("Create Shallow Git Trees", "[git_cas]") { REQUIRE(repo); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = - cas->ReadObject(ArtifactDigest(id).hash(), /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; SECTION("empty tree") { auto tree = GitRepo::CreateShallowTree({}); @@ -1000,16 +947,7 @@ TEST_CASE("Thread-safety", "[git_tree]") { REQUIRE(cas); // create symlinks checker - auto check_symlinks = [&cas](std::vector const& ids) { - for (auto const& id : ids) { - auto content = cas->ReadObject(ArtifactDigest(id).hash(), - /*is_hex_id=*/true); - if (not content or not PathIsNonUpwards(*content)) { - return false; - } - } - return true; - }; + auto const check_symlinks = SymlinksChecker{cas}; for (int id{}; id < kNumThreads; ++id) { threads.emplace_back([&cas, &starting_signal, check_symlinks]() { diff --git a/test/buildtool/file_system/object_cas.test.cpp b/test/buildtool/file_system/object_cas.test.cpp index e9ebf5805..1872f25bb 100644 --- a/test/buildtool/file_system/object_cas.test.cpp +++ b/test/buildtool/file_system/object_cas.test.cpp @@ -14,12 +14,12 @@ #include "src/buildtool/file_system/object_cas.hpp" -#include // std::equal_to +#include // has_value() #include #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/artifact_digest.hpp" -#include "src/buildtool/common/bazel_types.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/storage/config.hpp" @@ -30,11 +30,11 @@ TEST_CASE("ObjectCAS", "[file_system]") { auto gen_config = storage_config.Get().CreateGenerationConfig(0); std::string test_content{"test"}; - auto test_digest = ArtifactDigest::Create( + auto test_digest = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, test_content); SECTION("CAS for files") { - ObjectCAS cas{storage_config.Get().hash_function, + ObjectCAS cas{&storage_config.Get().hash_function, gen_config.cas_f}; CHECK(not cas.BlobPath(test_digest)); @@ -42,7 +42,7 @@ TEST_CASE("ObjectCAS", "[file_system]") { // add blob auto cas_digest = cas.StoreBlobFromBytes(test_content); CHECK(cas_digest); - CHECK(std::equal_to{}(*cas_digest, test_digest)); + CHECK(*cas_digest == test_digest); // verify blob auto blob_path = cas.BlobPath(*cas_digest); @@ -60,7 +60,7 @@ TEST_CASE("ObjectCAS", "[file_system]") { // add blob auto cas_digest = cas.StoreBlobFromFile("tmp/test"); CHECK(cas_digest); - CHECK(std::equal_to{}(*cas_digest, test_digest)); + CHECK(*cas_digest == test_digest); // verify blob auto blob_path = cas.BlobPath(*cas_digest); @@ -74,14 +74,14 @@ TEST_CASE("ObjectCAS", "[file_system]") { SECTION("CAS for executables") { ObjectCAS cas{ - storage_config.Get().hash_function, gen_config.cas_x}; + &storage_config.Get().hash_function, gen_config.cas_x}; CHECK(not cas.BlobPath(test_digest)); SECTION("Add blob from bytes and verify") { // add blob auto cas_digest = cas.StoreBlobFromBytes(test_content); CHECK(cas_digest); - CHECK(std::equal_to{}(*cas_digest, test_digest)); + CHECK(*cas_digest == test_digest); // verify blob auto blob_path = cas.BlobPath(*cas_digest); @@ -99,7 +99,7 @@ TEST_CASE("ObjectCAS", "[file_system]") { // add blob auto cas_digest = cas.StoreBlobFromFile("tmp/test"); CHECK(cas_digest); - CHECK(std::equal_to{}(*cas_digest, test_digest)); + CHECK(*cas_digest == test_digest); // verify blob auto blob_path = cas.BlobPath(*cas_digest); diff --git a/test/buildtool/file_system/resolve_symlinks_map.test.cpp b/test/buildtool/file_system/resolve_symlinks_map.test.cpp index d1cd7692d..5831aee14 100644 --- a/test/buildtool/file_system/resolve_symlinks_map.test.cpp +++ b/test/buildtool/file_system/resolve_symlinks_map.test.cpp @@ -103,7 +103,7 @@ auto const kBazId = std::string{"27b32561185c2825150893774953906c6daa6798"}; : (*repo_path / ".git").string()), QuoteForShell(repo_path->string())); if (std::system(cmd.c_str()) == 0) { - return *repo_path; + return repo_path; } return std::nullopt; } @@ -120,7 +120,7 @@ TEST_CASE("Resolve symlinks", "[resolve_symlinks_map]") { auto resolve_symlinks_map = CreateResolveSymlinksMap(); SECTION("Source repo is target repo") { - constexpr auto NUM_CASES = 3; + constexpr auto kNumCases = 3; std::vector expected = { {kFooId, ObjectType::File, "baz/foo"}, {kBazBarLinkId, ObjectType::Symlink, "bar_l"}, @@ -151,7 +151,7 @@ TEST_CASE("Resolve symlinks", "[resolve_symlinks_map]") { source_cas, source_cas)}, [&expected, &source_cas](auto const& values) { - for (auto i = 0; i < NUM_CASES; ++i) { + for (auto i = 0; i < kNumCases; ++i) { auto const& res = ResolvedGitObject{*values[i]}; CHECK(res.id == expected[i].id); CHECK(res.type == expected[i].type); @@ -176,7 +176,7 @@ TEST_CASE("Resolve symlinks", "[resolve_symlinks_map]") { auto target_cas = GitCAS::Open(*target_repo_path); REQUIRE(target_cas); - constexpr auto NUM_CASES = 3; + constexpr auto kNumCases = 3; std::vector expected = { {kFooId, ObjectType::File, "baz/foo"}, {kBazBarLinkId, ObjectType::Symlink, "bar_l"}, @@ -207,7 +207,7 @@ TEST_CASE("Resolve symlinks", "[resolve_symlinks_map]") { source_cas, target_cas)}, [&expected, &target_cas](auto const& values) { - for (auto i = 0; i < NUM_CASES; ++i) { + for (auto i = 0; i < kNumCases; ++i) { auto const& res = ResolvedGitObject{*values[i]}; CHECK(res.id == expected[i].id); CHECK(res.type == expected[i].type); diff --git a/test/buildtool/graph_traverser/TARGETS b/test/buildtool/graph_traverser/TARGETS index fcf2da20c..5e5633d50 100644 --- a/test/buildtool/graph_traverser/TARGETS +++ b/test/buildtool/graph_traverser/TARGETS @@ -4,10 +4,13 @@ , "hdrs": ["graph_traverser.test.hpp"] , "deps": [ ["@", "catch2", "", "catch2"] + , ["@", "gsl", "", "gsl"] , ["@", "json", "", "json"] , ["@", "src", "src/buildtool/auth", "auth"] , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/execution_api/common", "api_bundle"] , ["@", "src", "src/buildtool/execution_api/local", "config"] , ["@", "src", "src/buildtool/execution_api/local", "context"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] @@ -19,9 +22,10 @@ , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] , ["@", "src", "src/buildtool/progress_reporting", "progress"] - , ["@", "src", "src/buildtool/execution_api/common", "api_bundle"] - , ["@", "src", "src/buildtool/storage", "storage"] , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["@", "src", "src/utils/cpp", "json"] + , ["utils", "test_hash_function_type"] ] , "stage": ["test", "buildtool", "graph_traverser"] } @@ -33,12 +37,14 @@ , "private-deps": [ "graph_traverser_tests" , ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["utils", "test_auth_config"] - , ["utils", "test_storage_config"] + , ["@", "src", "src/buildtool/auth", "auth"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/storage", "config"] , ["@", "src", "src/buildtool/storage", "storage"] + , ["", "catch-main"] + , ["utils", "test_auth_config"] + , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "graph_traverser"] } @@ -50,12 +56,14 @@ , "private-deps": [ "graph_traverser_tests" , ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["@", "src", "src/buildtool/execution_api/remote", "config"] + , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] , ["utils", "catch-main-remote-execution"] , ["utils", "test_auth_config"] + , ["utils", "test_hash_function_type"] , ["utils", "test_remote_config"] - , ["@", "src", "src/buildtool/storage", "storage"] - , ["@", "src", "src/buildtool/storage", "config"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] ] , "stage": ["test", "buildtool", "graph_traverser"] } diff --git a/test/buildtool/graph_traverser/graph_traverser.test.hpp b/test/buildtool/graph_traverser/graph_traverser.test.hpp index 28ec44e02..a98396385 100644 --- a/test/buildtool/graph_traverser/graph_traverser.test.hpp +++ b/test/buildtool/graph_traverser/graph_traverser.test.hpp @@ -29,6 +29,7 @@ #include "gsl/gsl" #include "nlohmann/json.hpp" #include "src/buildtool/auth/authentication.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/remote/retry_config.hpp" #include "src/buildtool/common/statistics.hpp" #include "src/buildtool/execution_api/common/api_bundle.hpp" @@ -46,6 +47,7 @@ #include "src/buildtool/storage/config.hpp" #include "src/buildtool/storage/storage.hpp" #include "src/utils/cpp/json.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" // NOLINTNEXTLINE(google-build-namespaces) namespace { @@ -54,8 +56,8 @@ class TestProject { public: struct CommandLineArguments { GraphTraverser::CommandLineArguments gtargs; - nlohmann::json artifacts{}; - std::filesystem::path graph_description{}; + nlohmann::json artifacts; + std::filesystem::path graph_description; explicit CommandLineArguments( GraphTraverser::CommandLineArguments gtargs) @@ -109,8 +111,8 @@ class TestProject { "test/buildtool/graph_traverser/data/"; static inline std::string const kDefaultEntryPointsFileName = "_entry_points"; - std::string example_name_{}; - std::filesystem::path root_dir_{}; + std::string example_name_; + std::filesystem::path root_dir_; RepositoryConfig repo_config_{}; void SetupConfig() { @@ -127,7 +129,8 @@ class TestProject { CommandLineArguments clargs{gtargs}; clargs.artifacts = entry_points; auto const comp_graph = root_dir_ / "graph_description_compatible"; - if (Compatibility::IsCompatible() and + if (not ProtocolTraits::IsNative( + TestHashType::ReadFromEnvironment()) and FileSystemManager::Exists(comp_graph)) { clargs.graph_description = comp_graph; } diff --git a/test/buildtool/graph_traverser/graph_traverser_remote.test.cpp b/test/buildtool/graph_traverser/graph_traverser_remote.test.cpp index d92b9ef96..a7888ac47 100644 --- a/test/buildtool/graph_traverser/graph_traverser_remote.test.cpp +++ b/test/buildtool/graph_traverser/graph_traverser_remote.test.cpp @@ -18,6 +18,7 @@ #include "src/buildtool/storage/config.hpp" #include "src/buildtool/storage/storage.hpp" #include "test/buildtool/graph_traverser/graph_traverser.test.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/remote_execution/test_auth_config.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" @@ -34,9 +35,7 @@ StorageConfig::Builder builder; auto config = builder.SetBuildRoot(cache_dir) - .SetHashType(Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1) + .SetHashType(TestHashType::ReadFromEnvironment()) .SetRemoteExecutionArgs(remote_config.remote_address, remote_config.platform_properties, remote_config.dispatch) diff --git a/test/buildtool/logging/TARGETS b/test/buildtool/logging/TARGETS index bbff8a411..2baed6b88 100644 --- a/test/buildtool/logging/TARGETS +++ b/test/buildtool/logging/TARGETS @@ -4,9 +4,9 @@ , "srcs": ["logger.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "logging"] } @@ -16,10 +16,10 @@ , "srcs": ["log_sink_file.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "logging"] , "private-ldflags": ["-pthread"] diff --git a/test/buildtool/logging/logger.test.cpp b/test/buildtool/logging/logger.test.cpp index 474a656f9..deee74774 100644 --- a/test/buildtool/logging/logger.test.cpp +++ b/test/buildtool/logging/logger.test.cpp @@ -27,8 +27,8 @@ // Stores prints from test sink instances class TestPrints { struct PrintData { - std::atomic counter{}; - std::unordered_map> prints{}; + std::atomic counter; + std::unordered_map> prints; }; public: @@ -61,7 +61,7 @@ class LogSinkTest : public ILogSink { return [] { return std::make_shared(); }; } - LogSinkTest() noexcept { id_ = TestPrints::GetId(); } + LogSinkTest() noexcept : id_{TestPrints::GetId()} {} void Emit(Logger const* logger, LogLevel level, diff --git a/test/buildtool/main/TARGETS b/test/buildtool/main/TARGETS index e0871cf1f..128870bab 100644 --- a/test/buildtool/main/TARGETS +++ b/test/buildtool/main/TARGETS @@ -4,9 +4,10 @@ , "srcs": ["install_cas.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/main", "install_cas"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "main"] } diff --git a/test/buildtool/main/install_cas.test.cpp b/test/buildtool/main/install_cas.test.cpp index 9c9ef8e07..b74ef143d 100644 --- a/test/buildtool/main/install_cas.test.cpp +++ b/test/buildtool/main/install_cas.test.cpp @@ -16,54 +16,69 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/artifact.hpp" +#include "src/buildtool/crypto/hash_function.hpp" TEST_CASE("ObjectInfoFromLiberalString", "[artifact]") { auto expected = *Artifact::ObjectInfo::FromString( + HashFunction::Type::GitSHA1, "[5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:11:f]"); auto expected_as_tree = *Artifact::ObjectInfo::FromString( + HashFunction::Type::GitSHA1, "[5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:0:t]"); // Check (default) file hashes CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "[5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:11:f]", /*has_remote=*/false) == expected); CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:11:f]", /*has_remote=*/false) == expected); CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "[5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:11:f", /*has_remote=*/false) == expected); CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:11:f", /*has_remote=*/false) == expected); CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:11:file", /*has_remote=*/false) == expected); CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:11:notavalidletter", /*has_remote=*/false) == expected); // Without size, which is not honored in equality CHECK( - ObjectInfoFromLiberalString("5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689", + ObjectInfoFromLiberalString(HashFunction::Type::GitSHA1, + "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689", /*has_remote=*/false) == expected); CHECK( - ObjectInfoFromLiberalString("5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:", + ObjectInfoFromLiberalString(HashFunction::Type::GitSHA1, + "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:", /*has_remote=*/false) == expected); // Syntactically invalid size should be ignored CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:xyz", /*has_remote=*/false) == expected); // Check tree hashes CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689::t", /*has_remote=*/false) == expected_as_tree); CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689::tree", /*has_remote=*/false) == expected_as_tree); CHECK(ObjectInfoFromLiberalString( + HashFunction::Type::GitSHA1, "5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689:xyz:t", /*has_remote=*/false) == expected_as_tree); } diff --git a/test/buildtool/multithreading/TARGETS b/test/buildtool/multithreading/TARGETS index bf4e5e739..bd501759b 100644 --- a/test/buildtool/multithreading/TARGETS +++ b/test/buildtool/multithreading/TARGETS @@ -4,8 +4,9 @@ , "srcs": ["task.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] + , ["@", "src", "src/buildtool/multithreading", "task"] , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "multithreading"] } @@ -15,9 +16,9 @@ , "srcs": ["task_system.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/multithreading", "task_system"] , ["", "catch-main"] , ["utils", "container_matchers"] - , ["@", "src", "src/buildtool/multithreading", "task_system"] ] , "stage": ["test", "buildtool", "multithreading"] } @@ -27,10 +28,10 @@ , "srcs": ["async_map_node.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["utils", "container_matchers"] , ["@", "src", "src/buildtool/multithreading", "async_map_node"] , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] + , ["utils", "container_matchers"] ] , "stage": ["test", "buildtool", "multithreading"] } @@ -40,11 +41,11 @@ , "srcs": ["async_map.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["utils", "container_matchers"] , ["@", "src", "src/buildtool/multithreading", "async_map"] , ["@", "src", "src/buildtool/multithreading", "async_map_node"] , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] + , ["utils", "container_matchers"] ] , "stage": ["test", "buildtool", "multithreading"] } @@ -54,11 +55,11 @@ , "srcs": ["async_map_consumer.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["utils", "container_matchers"] - , ["@", "src", "src/buildtool/multithreading", "async_map_consumer"] , ["@", "src", "src/buildtool/multithreading", "async_map"] + , ["@", "src", "src/buildtool/multithreading", "async_map_consumer"] , ["@", "src", "src/buildtool/multithreading", "task_system"] + , ["", "catch-main"] + , ["utils", "container_matchers"] ] , "stage": ["test", "buildtool", "multithreading"] } diff --git a/test/buildtool/multithreading/async_map_consumer.test.cpp b/test/buildtool/multithreading/async_map_consumer.test.cpp index 3ba3eebf7..4f7d54d82 100644 --- a/test/buildtool/multithreading/async_map_consumer.test.cpp +++ b/test/buildtool/multithreading/async_map_consumer.test.cpp @@ -248,7 +248,7 @@ TEST_CASE("Failure detection", "[async_map_consumer]") { bool failed{}; SECTION("Unfinished pending keys") { - int const kStep{3}; + static constexpr int kStep = 3; REQUIRE(std::lcm(kMaxVal, kStep) > kMaxVal); auto map = CountToMaxConsumer(kMaxVal, kStep); { diff --git a/test/buildtool/multithreading/task_system.test.cpp b/test/buildtool/multithreading/task_system.test.cpp index 8e9f5c777..36b1cc14c 100644 --- a/test/buildtool/multithreading/task_system.test.cpp +++ b/test/buildtool/multithreading/task_system.test.cpp @@ -18,6 +18,7 @@ #include #include #include +#include #include #include #include // std::iota @@ -33,7 +34,7 @@ namespace { -enum class CallStatus { kNotExecuted, kExecuted }; +enum class CallStatus : std::uint8_t { kNotExecuted, kExecuted }; } // namespace @@ -48,7 +49,7 @@ TEST_CASE("Basic", "[task_system]") { } SECTION("1-argument constructor") { std::size_t const desired_number_of_threads_in_ts = - GENERATE(1u, 2u, 5u, 10u, std::thread::hardware_concurrency()); + GENERATE(1U, 2U, 5U, 10U, std::thread::hardware_concurrency()); TaskSystem ts(desired_number_of_threads_in_ts); CHECK(ts.NumberOfThreads() == desired_number_of_threads_in_ts); } @@ -91,10 +92,10 @@ TEST_CASE("Side effects of tasks are reflected out of ts", "[task_system]") { SECTION("Lambda capturing `this` inside struct") { std::string ext_name{}; struct Wrapper { - std::string name{}; + std::string name; // ts must be second, otherwise name will get destroyed before the // task system is finished. - TaskSystem ts{}; + TaskSystem ts; explicit Wrapper(std::string n) : name{std::move(n)} {} diff --git a/test/buildtool/serve_api/TARGETS b/test/buildtool/serve_api/TARGETS index 5c5accdf0..89e2379cd 100644 --- a/test/buildtool/serve_api/TARGETS +++ b/test/buildtool/serve_api/TARGETS @@ -5,14 +5,17 @@ , "data": [["buildtool/file_system", "test_data"]] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["utils", "catch-main-serve"] , ["@", "src", "src/buildtool/auth", "auth"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] , ["@", "src", "src/buildtool/common/remote", "retry_config"] + , ["@", "src", "src/buildtool/crypto", "hash_function"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/execution_api/remote", "context"] + , ["@", "src", "src/buildtool/serve_api/remote", "config"] , ["@", "src", "src/buildtool/serve_api/remote", "source_tree_client"] + , ["utils", "catch-main-serve"] + , ["utils", "test_hash_function_type"] , ["utils", "test_serve_config"] - , ["@", "src", "src/buildtool/serve_api/remote", "config"] ] , "stage": ["test", "buildtool", "serve_api"] } diff --git a/test/buildtool/serve_api/source_tree_client.test.cpp b/test/buildtool/serve_api/source_tree_client.test.cpp index 7c9634df1..8ada13170 100644 --- a/test/buildtool/serve_api/source_tree_client.test.cpp +++ b/test/buildtool/serve_api/source_tree_client.test.cpp @@ -19,10 +19,13 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/auth/authentication.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/common/remote/retry_config.hpp" +#include "src/buildtool/crypto/hash_function.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/execution_api/remote/context.hpp" #include "src/buildtool/serve_api/remote/config.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" #include "test/utils/serve_service/test_serve_config.hpp" auto const kRootCommit = @@ -36,9 +39,11 @@ auto const kRootSymId = std::string{"18770dacfe14c15d88450c21c16668e13ab0e7f9"}; auto const kBazSymId = std::string{"1868f82682c290f0b1db3cacd092727eef1fa57f"}; TEST_CASE("Serve service client: tree-of-commit request", "[serve_api]") { - auto config = TestServeConfig::ReadFromEnvironment(); + auto const config = TestServeConfig::ReadFromEnvironment(); REQUIRE(config); REQUIRE(config->remote_address); + auto const hash_function = + HashFunction{TestHashType::ReadFromEnvironment()}; // Create TLC client Auth auth{}; @@ -48,26 +53,39 @@ TEST_CASE("Serve service client: tree-of-commit request", "[serve_api]") { .retry_config = &retry_config, .exec_config = &exec_config}; - SourceTreeClient st_client(*config->remote_address, &remote_context); + SourceTreeClient st_client( + *config->remote_address, &hash_function, &remote_context); SECTION("Commit in bare checkout") { auto root_id = st_client.ServeCommitTree(kRootCommit, ".", false); REQUIRE(root_id); - CHECK(*root_id == kRootId); + CHECK_FALSE(root_id->digest); // digest is not provided if not syncing + if (ProtocolTraits::IsNative(hash_function.GetType())) { + CHECK(root_id->tree == kRootId); + } auto baz_id = st_client.ServeCommitTree(kRootCommit, "baz", false); REQUIRE(baz_id); - CHECK(*baz_id == kBazId); + CHECK_FALSE(baz_id->digest); // digest is not provided if not syncing + if (ProtocolTraits::IsNative(hash_function.GetType())) { + CHECK(baz_id->tree == kBazId); + } } SECTION("Commit in non-bare checkout") { auto root_id = st_client.ServeCommitTree(kRootSymCommit, ".", false); REQUIRE(root_id); - CHECK(*root_id == kRootSymId); + CHECK_FALSE(root_id->digest); // digest is not provided if not syncing + if (ProtocolTraits::IsNative(hash_function.GetType())) { + CHECK(root_id->tree == kRootSymId); + } auto baz_id = st_client.ServeCommitTree(kRootSymCommit, "baz", false); REQUIRE(baz_id); - CHECK(*baz_id == kBazSymId); + CHECK_FALSE(baz_id->digest); // digest is not provided if not syncing + if (ProtocolTraits::IsNative(hash_function.GetType())) { + CHECK(baz_id->tree == kBazSymId); + } } SECTION("Subdir not found") { @@ -81,7 +99,7 @@ TEST_CASE("Serve service client: tree-of-commit request", "[serve_api]") { auto root_id = st_client.ServeCommitTree( "0123456789abcdef0123456789abcdef01234567", ".", false); REQUIRE_FALSE(root_id); - CHECK_FALSE(root_id.error() == - GitLookupError::Fatal); // non-fatal failure + CHECK(root_id.error() == + GitLookupError::NotFound); // non-fatal failure } } diff --git a/test/buildtool/storage/TARGETS b/test/buildtool/storage/TARGETS index 69d131373..d3cc1c4c0 100644 --- a/test/buildtool/storage/TARGETS +++ b/test/buildtool/storage/TARGETS @@ -15,15 +15,15 @@ , "data": [["buildtool/storage", "test_data"]] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["utils", "test_storage_config"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] , ["@", "src", "src/buildtool/common", "common"] + , ["@", "src", "src/buildtool/execution_api/bazel_msg", "bazel_msg"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/execution_api/bazel_msg", "bazel_msg"] - , ["utils", "blob_creator"] - , ["@", "src", "src/buildtool/storage", "storage"] , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["", "catch-main"] + , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "storage"] } @@ -33,13 +33,15 @@ , "srcs": ["local_ac.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] + , ["@", "gsl", "", "gsl"] + , ["@", "src", "src/buildtool/common", "artifact_digest_factory"] , ["@", "src", "src/buildtool/common", "bazel_types"] , ["@", "src", "src/buildtool/common", "common"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/storage", "storage"] , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["", "catch-main"] , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "storage"] @@ -50,27 +52,27 @@ , "srcs": ["large_object_cas.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] - , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/storage", "storage"] - , ["@", "src", "src/buildtool/storage", "config"] - , ["utils", "test_storage_config"] , ["@", "src", "src/buildtool/common", "bazel_types"] - , ["utils", "large_object_utils"] - , ["@", "src", "src/utils/cpp", "tmp_dir"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] , [ "@" , "src" , "src/buildtool/execution_api/bazel_msg" , "bazel_msg_factory" ] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] + , ["@", "src", "src/buildtool/file_system", "object_type"] + , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/buildtool/storage", "storage"] + , ["@", "src", "src/utils/cpp", "tmp_dir"] + , ["", "catch-main"] + , ["utils", "large_object_utils"] + , ["utils", "test_storage_config"] ] , "stage": ["test", "buildtool", "storage"] } , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["storage"] - , "deps": ["local_cas", "local_ac", "large_object_cas"] + , "deps": ["large_object_cas", "local_ac", "local_cas"] } } diff --git a/test/buildtool/storage/large_object_cas.test.cpp b/test/buildtool/storage/large_object_cas.test.cpp index 7d191629b..bd8fa98e6 100644 --- a/test/buildtool/storage/large_object_cas.test.cpp +++ b/test/buildtool/storage/large_object_cas.test.cpp @@ -28,7 +28,7 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/bazel_types.hpp" -#include "src/buildtool/compatibility/native_support.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/object_type.hpp" @@ -42,23 +42,23 @@ namespace { namespace LargeTestUtils { -template +template class Blob final { public: - static constexpr auto kLargeId = std::string_view("bl_8Mb"); - static constexpr auto kLargeSize = std::uintmax_t(8 * 1024 * 1024); + static constexpr auto kLargeId = "bl_8Mb"; + static constexpr std::uintmax_t kLargeSize = 8UL * 1024 * 1024; - static constexpr auto kSmallId = std::string_view("bl_1kB"); - static constexpr auto kSmallSize = std::uintmax_t(1024); + static constexpr auto kSmallId = "bl_1kB"; + static constexpr std::uintmax_t kSmallSize = 1024; - static constexpr auto kEmptyId = std::string_view("bl_0"); - static constexpr auto kEmptySize = std::uintmax_t(0); + static constexpr auto kEmptyId = "bl_0"; + static constexpr std::uintmax_t kEmptySize = 0; [[nodiscard]] static auto Create( LocalCAS const& cas, std::string const& id, std::uintmax_t size) noexcept - -> std::optional>; + -> std::optional>; [[nodiscard]] static auto Generate(std::string const& id, std::uintmax_t size) noexcept @@ -69,20 +69,20 @@ using File = Blob; class Tree final { public: - static constexpr auto kLargeId = std::string_view("tree_4096"); - static constexpr auto kLargeSize = std::uintmax_t(4096); + static constexpr auto kLargeId = "tree_4096"; + static constexpr std::uintmax_t kLargeSize = 4096; - static constexpr auto kSmallId = std::string_view("tree_1"); - static constexpr auto kSmallSize = std::uintmax_t(1); + static constexpr auto kSmallId = "tree_1"; + static constexpr std::uintmax_t kSmallSize = 1; - static constexpr auto kEmptyId = std::string_view("tree_0"); - static constexpr auto kEmptySize = std::uintmax_t(0); + static constexpr auto kEmptyId = "tree_0"; + static constexpr std::uintmax_t kEmptySize = 0; [[nodiscard]] static auto Create( LocalCAS const& cas, std::string const& id, std::uintmax_t entries_count) noexcept - -> std::optional>; + -> std::optional>; [[nodiscard]] static auto Generate(std::string const& id, std::uintmax_t entries_count) noexcept @@ -91,7 +91,7 @@ class Tree final { [[nodiscard]] static auto StoreRaw( LocalCAS const& cas, std::filesystem::path const& directory) noexcept - -> std::optional; + -> std::optional; }; } // namespace LargeTestUtils @@ -105,8 +105,7 @@ TEST_CASE("LargeObjectCAS: split a small tree", "[storage]") { // Create a small tree: using LargeTestUtils::Tree; - auto small = - Tree::Create(cas, std::string(Tree::kSmallId), Tree::kSmallSize); + auto small = Tree::Create(cas, Tree::kSmallId, Tree::kSmallSize); REQUIRE(small); auto const& [digest, path] = *small; @@ -116,7 +115,7 @@ TEST_CASE("LargeObjectCAS: split a small tree", "[storage]") { // The result must contain one blob digest: CHECK(split_pack->size() == 1); - CHECK_FALSE(NativeSupport::IsTree(split_pack->front().hash())); + CHECK_FALSE(split_pack->front().IsTree()); } // Test splitting of a large object. The split must be successful and the entry @@ -137,8 +136,8 @@ static void TestLarge(StorageConfig const& storage_config, auto const& cas = storage.CAS(); // Create a large object: - auto object = TestType::Create( - cas, std::string(TestType::kLargeId), TestType::kLargeSize); + auto object = + TestType::Create(cas, TestType::kLargeId, TestType::kLargeSize); CHECK(object); auto const& [digest, path] = *object; @@ -224,8 +223,8 @@ static void TestSmall(Storage const& storage) noexcept { auto const& cas = storage.CAS(); // Create a small object: - auto object = TestType::Create( - cas, std::string(TestType::kSmallId), TestType::kSmallSize); + auto object = + TestType::Create(cas, TestType::kSmallId, TestType::kSmallSize); CHECK(object); auto const& [digest, path] = *object; @@ -233,7 +232,7 @@ static void TestSmall(Storage const& storage) noexcept { auto pack_1 = kIsTree ? cas.SplitTree(digest) : cas.SplitBlob(digest); CHECK(pack_1); CHECK(pack_1->size() == 1); - CHECK_FALSE(NativeSupport::IsTree(pack_1->front().hash())); + CHECK_FALSE(pack_1->front().IsTree()); // Test that there is no large entry in the storage: // To ensure there is no split of the initial object, it is removed: @@ -279,7 +278,7 @@ static void TestEmpty(Storage const& storage) noexcept { // Create an empty file: auto temp_path = LargeTestUtils::Blob::Generate( - std::string(TestType::kEmptyId), TestType::kEmptySize); + TestType::kEmptyId, TestType::kEmptySize); REQUIRE(temp_path); auto const& cas = storage.CAS(); @@ -336,8 +335,8 @@ static void TestExternal(StorageConfig const& storage_config, auto const& cas = storage.CAS(); // Create a large object: - auto object = TestType::Create( - cas, std::string(TestType::kLargeId), TestType::kLargeSize); + auto object = + TestType::Create(cas, TestType::kLargeId, TestType::kLargeSize); CHECK(object); auto const& [digest, path] = *object; @@ -351,8 +350,8 @@ static void TestExternal(StorageConfig const& storage_config, // generation: REQUIRE(GarbageCollector::TriggerGarbageCollection(storage_config)); for (auto const& part : *pack_1) { - static constexpr bool is_executable = false; - REQUIRE(cas.BlobPath(part, is_executable)); + static constexpr bool kIsExecutable = false; + REQUIRE(cas.BlobPath(part, kIsExecutable)); } auto const youngest = ::Generation::Create(&storage_config); @@ -383,8 +382,8 @@ static void TestExternal(StorageConfig const& storage_config, REQUIRE(*implicit_splice == path); // Randomize one more object to simulate invalidation: - auto small = TestType::Create( - cas, std::string(TestType::kSmallId), TestType::kSmallSize); + auto small = + TestType::Create(cas, TestType::kSmallId, TestType::kSmallSize); REQUIRE(small); auto const& [small_digest, small_path] = *small; @@ -405,7 +404,8 @@ static void TestExternal(StorageConfig const& storage_config, REQUIRE(FileSystemManager::IsFile(path)); } - if (kIsTree and not Compatibility::IsCompatible()) { + if (kIsTree and ProtocolTraits::IsTreeAllowed( + storage_config.hash_function.GetType())) { // Tree invariants check is omitted in compatible mode. SECTION("Tree invariants check fails") { // Check splice fails due to the tree invariants check. @@ -436,8 +436,8 @@ static void TestCompactification(StorageConfig const& storage_config, auto const& cas = storage.CAS(); // Create a large object and split it: - auto object = TestType::Create( - cas, std::string(TestType::kLargeId), TestType::kLargeSize); + auto object = + TestType::Create(cas, TestType::kLargeId, TestType::kLargeSize); REQUIRE(object); auto& [digest, path] = *object; auto result = kIsTree ? cas.SplitTree(digest) : cas.SplitBlob(digest); @@ -445,12 +445,12 @@ static void TestCompactification(StorageConfig const& storage_config, // For trees the size must be increased to exceed the internal // compactification threshold: - static constexpr auto ExceedThresholdSize = + static constexpr auto kExceedThresholdSize = kIsTree ? TestType::kLargeSize * 8 : TestType::kLargeSize; // Create a large object that is to be split during compactification: auto object_2 = TestType::Create( - cas, std::string(TestType::kLargeId) + "_2", ExceedThresholdSize); + cas, std::string(TestType::kLargeId) + "_2", kExceedThresholdSize); REQUIRE(object_2); auto& [digest_2, path_2] = *object_2; @@ -458,7 +458,7 @@ static void TestCompactification(StorageConfig const& storage_config, // may be present in the storage. To ensure compactification deals with // them properly, a "unique" file is created: auto invalid_object = TestType::Create( - cas, std::string(TestType::kLargeId) + "_3", ExceedThresholdSize); + cas, std::string(TestType::kLargeId) + "_3", kExceedThresholdSize); REQUIRE(invalid_object); auto& [invalid_digest, invalid_path] = *invalid_object; @@ -467,7 +467,7 @@ static void TestCompactification(StorageConfig const& storage_config, REQUIRE(FileSystemManager::Rename(invalid_path, *unique_path)); // Ensure all entries are in the storage: - auto get_path = [](auto const& cas, bazel_re::Digest const& digest) { + auto get_path = [](auto const& cas, ArtifactDigest const& digest) { return kIsTree ? cas.TreePath(digest) : cas.BlobPath(digest, kIsExec); }; @@ -542,7 +542,7 @@ TEST_CASE("LargeObjectCAS: uplink nested large objects", "[storage]") { // Randomize a large directory: auto tree_path = LargeTestUtils::Tree::Generate( - std::string("nested_tree"), LargeTestUtils::Tree::kLargeSize); + "nested_tree", LargeTestUtils::Tree::kLargeSize); REQUIRE(tree_path); // Randomize a large nested tree: @@ -606,7 +606,8 @@ TEST_CASE("LargeObjectCAS: uplink nested large objects", "[storage]") { // However, in native mode they might be reconstructed on request because // their entries are in the latest generation: - if (not Compatibility::IsCompatible()) { + if (ProtocolTraits::IsNative( + storage_config.Get().hash_function.GetType())) { auto split_nested_tree_2 = latest.CAS().SplitTree(*nested_tree_digest); REQUIRE(split_nested_tree_2); @@ -647,24 +648,24 @@ class TestFilesDirectory final { }; namespace LargeTestUtils { -template -auto Blob::Create(LocalCAS const& cas, - std::string const& id, - std::uintmax_t size) noexcept - -> std::optional> { +template +auto Blob::Create(LocalCAS const& cas, + std::string const& id, + std::uintmax_t size) noexcept + -> std::optional> { auto path = Generate(id, size); - auto digest = path ? cas.StoreBlob(*path, IsExecutable) : std::nullopt; + auto digest = path ? cas.StoreBlob(*path, kIsExecutable) : std::nullopt; auto blob_path = - digest ? cas.BlobPath(*digest, IsExecutable) : std::nullopt; + digest ? cas.BlobPath(*digest, kIsExecutable) : std::nullopt; if (digest and blob_path) { return std::make_pair(std::move(*digest), std::move(*blob_path)); } return std::nullopt; } -template -auto Blob::Generate(std::string const& id, - std::uintmax_t size) noexcept +template +auto Blob::Generate(std::string const& id, + std::uintmax_t size) noexcept -> std::optional { std::string const path_id = "blob" + id; auto path = TestFilesDirectory::Instance().GetPath() / path_id; @@ -678,7 +679,7 @@ auto Blob::Generate(std::string const& id, auto Tree::Create(LocalCAS const& cas, std::string const& id, std::uintmax_t entries_count) noexcept - -> std::optional> { + -> std::optional> { auto path = Generate(id, entries_count); auto digest = path ? StoreRaw(cas, *path) : std::nullopt; auto cas_path = digest ? cas.TreePath(*digest) : std::nullopt; @@ -702,28 +703,28 @@ auto Tree::Generate(std::string const& id, auto Tree::StoreRaw(LocalCAS const& cas, std::filesystem::path const& directory) noexcept - -> std::optional { + -> std::optional { if (not FileSystemManager::IsDirectory(directory)) { return std::nullopt; } auto store_blob = [&cas](std::filesystem::path const& path, - auto is_exec) -> std::optional { + auto is_exec) -> std::optional { return cas.StoreBlob(path, is_exec); }; auto store_tree = - [&cas](std::string const& content) -> std::optional { + [&cas](std::string const& content) -> std::optional { return cas.StoreTree(content); }; auto store_symlink = - [&cas](std::string const& content) -> std::optional { + [&cas](std::string const& content) -> std::optional { return cas.StoreBlob(content); }; - return Compatibility::IsCompatible() - ? BazelMsgFactory::CreateDirectoryDigestFromLocalTree( + return ProtocolTraits::IsNative(cas.GetHashFunction().GetType()) + ? BazelMsgFactory::CreateGitTreeDigestFromLocalTree( directory, store_blob, store_tree, store_symlink) - : BazelMsgFactory::CreateGitTreeDigestFromLocalTree( + : BazelMsgFactory::CreateDirectoryDigestFromLocalTree( directory, store_blob, store_tree, store_symlink); } } // namespace LargeTestUtils diff --git a/test/buildtool/storage/local_ac.test.cpp b/test/buildtool/storage/local_ac.test.cpp index c692a75cd..73320ce44 100644 --- a/test/buildtool/storage/local_ac.test.cpp +++ b/test/buildtool/storage/local_ac.test.cpp @@ -17,6 +17,7 @@ #include "catch2/catch_test_macros.hpp" #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/object_type.hpp" @@ -27,7 +28,7 @@ [[nodiscard]] static auto RunDummyExecution( gsl::not_null const*> const& ac, gsl::not_null const*> const& cas_, - bazel_re::Digest const& action_id, + ArtifactDigest const& action_id, std::string const& seed) -> bool; TEST_CASE("LocalAC: Single action, single result", "[storage]") { @@ -36,7 +37,7 @@ TEST_CASE("LocalAC: Single action, single result", "[storage]") { auto const& ac = storage.ActionCache(); auto const& cas = storage.CAS(); - auto action_id = ArtifactDigest::Create( + auto action_id = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, "action"); CHECK(not ac.CachedResult(action_id)); CHECK(RunDummyExecution(&ac, &cas, action_id, "result")); @@ -51,9 +52,9 @@ TEST_CASE("LocalAC: Two different actions, two different results", auto const& ac = storage.ActionCache(); auto const& cas = storage.CAS(); - auto action_id1 = ArtifactDigest::Create( + auto action_id1 = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, "action1"); - auto action_id2 = ArtifactDigest::Create( + auto action_id2 = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, "action2"); CHECK(not ac.CachedResult(action_id1)); CHECK(not ac.CachedResult(action_id2)); @@ -82,9 +83,9 @@ TEST_CASE("LocalAC: Two different actions, same two results", "[storage]") { auto const& ac = storage.ActionCache(); auto const& cas = storage.CAS(); - auto action_id1 = ArtifactDigest::Create( + auto action_id1 = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, "action1"); - auto action_id2 = ArtifactDigest::Create( + auto action_id2 = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, "action2"); CHECK(not ac.CachedResult(action_id1)); CHECK(not ac.CachedResult(action_id2)); @@ -113,7 +114,7 @@ TEST_CASE("LocalAC: Same two actions, two different results", "[storage]") { auto const& ac = storage.ActionCache(); auto const& cas = storage.CAS(); - auto action_id = ArtifactDigest::Create( + auto action_id = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, "same action"); CHECK(not ac.CachedResult(action_id)); @@ -136,15 +137,14 @@ TEST_CASE("LocalAC: Same two actions, two different results", "[storage]") { auto RunDummyExecution(gsl::not_null const*> const& ac, gsl::not_null const*> const& cas_, - bazel_re::Digest const& action_id, + ArtifactDigest const& action_id, std::string const& seed) -> bool { bazel_re::ActionResult result{}; *result.add_output_files() = [&]() { bazel_re::OutputFile out{}; out.set_path(seed); auto digest = cas_->StoreBlob(""); - out.set_allocated_digest( - gsl::owner{new bazel_re::Digest{*digest}}); + *out.mutable_digest() = ArtifactDigestFactory::ToBazel(*digest); out.set_is_executable(false); return out; }(); diff --git a/test/buildtool/storage/local_cas.test.cpp b/test/buildtool/storage/local_cas.test.cpp index b4ede8caa..c7b7f720c 100644 --- a/test/buildtool/storage/local_cas.test.cpp +++ b/test/buildtool/storage/local_cas.test.cpp @@ -17,12 +17,12 @@ #include "catch2/catch_test_macros.hpp" #include "src/buildtool/common/artifact_digest.hpp" +#include "src/buildtool/common/artifact_digest_factory.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/file_system/object_type.hpp" #include "src/buildtool/storage/config.hpp" #include "src/buildtool/storage/storage.hpp" -#include "test/utils/blob_creator.hpp" #include "test/utils/hermeticity/test_storage_config.hpp" TEST_CASE("LocalCAS: Add blob to storage from bytes", "[storage]") { @@ -32,7 +32,7 @@ TEST_CASE("LocalCAS: Add blob to storage from bytes", "[storage]") { std::string test_bytes("test"); - auto test_digest = ArtifactDigest::Create( + auto test_digest = ArtifactDigestFactory::HashDataAs( storage_config.Get().hash_function, test_bytes); // check blob not in storage @@ -83,27 +83,27 @@ TEST_CASE("LocalCAS: Add blob to storage from non-executable file", std::filesystem::path non_exec_file{ "test/buildtool/storage/data/non_executable_file"}; - auto test_blob = - CreateBlobFromPath(non_exec_file, storage_config.Get().hash_function); + auto test_blob = ArtifactDigestFactory::HashFileAs( + storage_config.Get().hash_function, non_exec_file); REQUIRE(test_blob); // check blob not in storage - CHECK(not cas.BlobPath(test_blob->digest, true)); - CHECK(not cas.BlobPath(test_blob->digest, false)); + CHECK(not cas.BlobPath(*test_blob, true)); + CHECK(not cas.BlobPath(*test_blob, false)); // ensure previous calls did not accidentially create the blob - CHECK(not cas.BlobPath(test_blob->digest, true)); - CHECK(not cas.BlobPath(test_blob->digest, false)); + CHECK(not cas.BlobPath(*test_blob, true)); + CHECK(not cas.BlobPath(*test_blob, false)); SECTION("Add non-executable blob to storage") { CHECK(cas.StoreBlob(non_exec_file, false)); - auto file_path = cas.BlobPath(test_blob->digest, false); + auto file_path = cas.BlobPath(*test_blob, false); REQUIRE(file_path); CHECK(FileSystemManager::IsFile(*file_path)); CHECK(not FileSystemManager::IsExecutable(*file_path)); - auto exe_path = cas.BlobPath(test_blob->digest, true); + auto exe_path = cas.BlobPath(*test_blob, true); REQUIRE(exe_path); CHECK(FileSystemManager::IsFile(*exe_path)); CHECK(FileSystemManager::IsExecutable(*exe_path)); @@ -113,12 +113,12 @@ TEST_CASE("LocalCAS: Add blob to storage from non-executable file", SECTION("Add executable blob to storage") { CHECK(cas.StoreBlob(non_exec_file, true)); - auto file_path = cas.BlobPath(test_blob->digest, false); + auto file_path = cas.BlobPath(*test_blob, false); REQUIRE(file_path); CHECK(FileSystemManager::IsFile(*file_path)); CHECK(not FileSystemManager::IsExecutable(*file_path)); - auto exe_path = cas.BlobPath(test_blob->digest, true); + auto exe_path = cas.BlobPath(*test_blob, true); REQUIRE(exe_path); CHECK(FileSystemManager::IsFile(*exe_path)); CHECK(FileSystemManager::IsExecutable(*exe_path)); @@ -134,27 +134,27 @@ TEST_CASE("LocalCAS: Add blob to storage from executable file", "[storage]") { std::filesystem::path exec_file{ "test/buildtool/storage/data/executable_file"}; - auto test_blob = - CreateBlobFromPath(exec_file, storage_config.Get().hash_function); + auto test_blob = ArtifactDigestFactory::HashFileAs( + storage_config.Get().hash_function, exec_file); REQUIRE(test_blob); // check blob not in storage - CHECK(not cas.BlobPath(test_blob->digest, true)); - CHECK(not cas.BlobPath(test_blob->digest, false)); + CHECK(not cas.BlobPath(*test_blob, true)); + CHECK(not cas.BlobPath(*test_blob, false)); // ensure previous calls did not accidentially create the blob - CHECK(not cas.BlobPath(test_blob->digest, true)); - CHECK(not cas.BlobPath(test_blob->digest, false)); + CHECK(not cas.BlobPath(*test_blob, true)); + CHECK(not cas.BlobPath(*test_blob, false)); SECTION("Add non-executable blob to storage") { CHECK(cas.StoreBlob(exec_file, false)); - auto file_path = cas.BlobPath(test_blob->digest, false); + auto file_path = cas.BlobPath(*test_blob, false); REQUIRE(file_path); CHECK(FileSystemManager::IsFile(*file_path)); CHECK(not FileSystemManager::IsExecutable(*file_path)); - auto exe_path = cas.BlobPath(test_blob->digest, true); + auto exe_path = cas.BlobPath(*test_blob, true); REQUIRE(exe_path); CHECK(FileSystemManager::IsFile(*exe_path)); CHECK(FileSystemManager::IsExecutable(*exe_path)); @@ -164,12 +164,12 @@ TEST_CASE("LocalCAS: Add blob to storage from executable file", "[storage]") { SECTION("Add executable blob to storage") { CHECK(cas.StoreBlob(exec_file, true)); - auto file_path = cas.BlobPath(test_blob->digest, false); + auto file_path = cas.BlobPath(*test_blob, false); REQUIRE(file_path); CHECK(FileSystemManager::IsFile(*file_path)); CHECK(not FileSystemManager::IsExecutable(*file_path)); - auto exe_path = cas.BlobPath(test_blob->digest, true); + auto exe_path = cas.BlobPath(*test_blob, true); REQUIRE(exe_path); CHECK(FileSystemManager::IsFile(*exe_path)); CHECK(FileSystemManager::IsExecutable(*exe_path)); diff --git a/test/buildtool/system/TARGETS b/test/buildtool/system/TARGETS index d04bb71fa..1f951b96b 100644 --- a/test/buildtool/system/TARGETS +++ b/test/buildtool/system/TARGETS @@ -4,9 +4,9 @@ , "srcs": ["system_command.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/system", "system_command"] + , ["", "catch-main"] ] , "stage": ["test", "buildtool", "system"] } diff --git a/test/end-to-end/TARGETS b/test/end-to-end/TARGETS index a1b50e468..81ac180ea 100644 --- a/test/end-to-end/TARGETS +++ b/test/end-to-end/TARGETS @@ -10,28 +10,20 @@ ["@", "src", "", "bin/just-deduplicate-repos.py"] } } -, "TESTS": +, "remote tests (unconfigured)": { "type": ["@", "rules", "test", "suite"] - , "arguments_config": ["TEST_BOOTSTRAP_JUST_MR"] - , "stage": ["end-to-end"] + , "arguments_config": ["TEST_COMPATIBLE_REMOTE", "TEST_BOOTSTRAP_JUST_MR"] + , "stage": + [ { "type": "if" + , "cond": {"type": "var", "name": "TEST_COMPATIBLE_REMOTE"} + , "then": "compatible" + , "else": "native" + } + ] , "deps": { "type": "++" , "$1": - [ [ ["./", "actions", "TESTS"] - , ["./", "cli", "TESTS"] - , ["./", "generated-binary", "TESTS"] - , ["./", "target-tests", "TESTS"] - , ["./", "user-errors", "TESTS"] - , ["./", "built-in-rules", "TESTS"] - , ["./", "build-fails", "TESTS"] - , ["./", "remote-execution", "TESTS"] - , ["./", "target-cache", "TESTS"] - , ["./", "just-mr", "TESTS"] - , ["./", "git-import", "TESTS"] - , ["./", "gc", "TESTS"] - , ["./", "execution-service", "TESTS"] - , ["./", "symlinks", "TESTS"] - ] + [ [["./", "remote-execution", "TESTS"], ["./", "just-mr", "TESTS"]] , { "type": "if" , "cond": {"type": "var", "name": "TEST_BOOTSTRAP_JUST_MR"} , "then": [] @@ -40,4 +32,43 @@ ] } } +, "remote tests, compatible": + { "type": "configure" + , "tainted": ["test"] + , "target": "remote tests (unconfigured)" + , "config": + {"type": "singleton_map", "key": "TEST_COMPATIBLE_REMOTE", "value": true} + } +, "remote tests, native": + { "type": "configure" + , "tainted": ["test"] + , "target": "remote tests (unconfigured)" + , "config": + {"type": "singleton_map", "key": "TEST_COMPATIBLE_REMOTE", "value": false} + } +, "remote tests": + { "type": ["@", "rules", "test", "suite"] + , "stage": ["using-remote"] + , "deps": ["remote tests, compatible", "remote tests, native"] + } +, "TESTS": + { "type": ["@", "rules", "test", "suite"] + , "arguments_config": [] + , "stage": ["end-to-end"] + , "deps": + [ "remote tests" + , ["./", "actions", "TESTS"] + , ["./", "build-fails", "TESTS"] + , ["./", "built-in-rules", "TESTS"] + , ["./", "cli", "TESTS"] + , ["./", "execution-service", "TESTS"] + , ["./", "gc", "TESTS"] + , ["./", "generated-binary", "TESTS"] + , ["./", "git-import", "TESTS"] + , ["./", "symlinks", "TESTS"] + , ["./", "target-cache", "TESTS"] + , ["./", "target-tests", "TESTS"] + , ["./", "user-errors", "TESTS"] + ] + } } diff --git a/test/end-to-end/built-in-rules/TARGETS b/test/end-to-end/built-in-rules/TARGETS index 8708a3794..c645ecf7e 100644 --- a/test/end-to-end/built-in-rules/TARGETS +++ b/test/end-to-end/built-in-rules/TARGETS @@ -39,12 +39,12 @@ { "type": ["@", "rules", "test", "suite"] , "stage": ["built-in-rules"] , "deps": - [ "generic_out_dirs" - , "generic_sh" + [ "export_counting" , "filegen_config" - , "tree" + , "generic_out_dirs" + , "generic_sh" , "symlink_config" - , "export_counting" + , "tree" ] } } diff --git a/test/end-to-end/cli/TARGETS b/test/end-to-end/cli/TARGETS index c449a2d4a..c9a20eef2 100644 --- a/test/end-to-end/cli/TARGETS +++ b/test/end-to-end/cli/TARGETS @@ -32,27 +32,27 @@ { "type": ["@", "rules", "shell/test", "script"] , "name": ["git-cas-P"] , "test": ["git-cas-p.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "just-mr reporting": { "type": ["@", "rules", "shell/test", "script"] , "name": ["just-mr-reporting"] , "test": ["just-mr-reporting.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] , "keep": ["log/warning.txt"] } , "install --archive": { "type": ["@", "rules", "shell/test", "script"] , "name": ["install-archive"] , "test": ["install-archive.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] , "keep": ["src.tar", "reconstructed.tar", "fromstdout.tar"] } , "install archived repo": { "type": ["@", "rules", "shell/test", "script"] , "name": ["install-archived-repo"] , "test": ["install-archived-repo.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] , "keep": ["src.tar", "reconstructed.tar", "fromstdout.tar"] } , "conflict report": @@ -65,7 +65,7 @@ { "type": ["@", "rules", "shell/test", "script"] , "name": ["log-limit"] , "test": ["log-limit.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "describe": { "type": ["@", "rules", "shell/test", "script"] @@ -77,7 +77,7 @@ { "type": ["@", "rules", "shell/test", "script"] , "name": ["output"] , "test": ["output.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] , "keep": ["log"] } , "TESTS": diff --git a/test/end-to-end/gc/TARGETS b/test/end-to-end/gc/TARGETS index 5d3cb38ba..5e4bf7226 100644 --- a/test/end-to-end/gc/TARGETS +++ b/test/end-to-end/gc/TARGETS @@ -8,25 +8,25 @@ { "type": ["@", "rules", "shell/test", "script"] , "name": ["compactification"] , "test": ["compactification.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "export": { "type": ["@", "rules", "shell/test", "script"] , "name": ["export"] , "test": ["export.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "tc-deps": { "type": ["@", "rules", "shell/test", "script"] , "name": ["tc-deps"] , "test": ["tc-deps.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "reconstruct-executable": { "type": ["@", "rules", "shell/test", "script"] , "name": ["reconstruct-executable"] , "test": ["reconstruct-executable.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "TESTS": { "type": ["@", "rules", "test", "suite"] @@ -35,8 +35,8 @@ [ "basic" , "compactification" , "export" - , "tc-deps" , "reconstruct-executable" + , "tc-deps" ] } } diff --git a/test/end-to-end/generated-binary/TARGETS b/test/end-to-end/generated-binary/TARGETS index ef876c373..a50e5ddc5 100644 --- a/test/end-to-end/generated-binary/TARGETS +++ b/test/end-to-end/generated-binary/TARGETS @@ -12,11 +12,11 @@ , "name": ["compiled"] , "test": ["compiled.sh"] , "keep": ["graph.json", "out/out.txt"] - , "deps": [["", "tool-under-test"], "compile rules"] + , "deps": ["compile rules", ["", "tool-under-test"]] } , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["generated-binary"] - , "deps": ["shell", "compiled"] + , "deps": ["compiled", "shell"] } } diff --git a/test/end-to-end/git-import/TARGETS b/test/end-to-end/git-import/TARGETS index 6ffe15df5..afd79db2a 100644 --- a/test/end-to-end/git-import/TARGETS +++ b/test/end-to-end/git-import/TARGETS @@ -3,26 +3,20 @@ , "name": ["chained-import"] , "test": ["chained-import.sh"] , "deps": - [ ["end-to-end", "git-import-under-test"] - , ["", "mr-tool-under-test"] + [ ["", "mr-tool-under-test"] , ["", "tool-under-test"] + , ["end-to-end", "git-import-under-test"] ] } -, "check-action-equality": - { "type": "install" - , "tainted": ["test"] - , "files": {"bin/actions-graph-equal": "check-action-equality.py"} - } , "deduplicate": { "type": ["@", "rules", "shell/test", "script"] , "name": ["deduplicate"] , "test": ["deduplicate.sh"] , "deps": - [ ["end-to-end", "git-import-under-test"] - , ["end-to-end", "deduplicate-tool-under-test"] - , ["", "mr-tool-under-test"] + [ ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "check-action-equality" + , ["end-to-end", "deduplicate-tool-under-test"] + , ["end-to-end", "git-import-under-test"] ] , "keep": ["repos-full.json", "actions-full.json", "repos.json", "actions.json"] @@ -32,9 +26,9 @@ , "name": ["annotations"] , "test": ["annotations.sh"] , "deps": - [ ["end-to-end", "git-import-under-test"] - , ["", "mr-tool-under-test"] + [ ["", "mr-tool-under-test"] , ["", "tool-under-test"] + , ["end-to-end", "git-import-under-test"] ] , "keep": ["repos.json"] } @@ -43,8 +37,8 @@ , "name": ["absent"] , "test": ["absent.sh"] , "deps": - [ ["end-to-end", "git-import-under-test"] - , ["end-to-end", "deduplicate-tool-under-test"] + [ ["end-to-end", "deduplicate-tool-under-test"] + , ["end-to-end", "git-import-under-test"] ] , "keep": ["repos.json", "deduplicated.json"] } diff --git a/test/end-to-end/git-import/check-action-equality.py b/test/end-to-end/git-import/check-action-equality.py deleted file mode 100755 index ab82798e2..000000000 --- a/test/end-to-end/git-import/check-action-equality.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Huawei Cloud Computing Technology Co., Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import sys - -from typing import Any - -Json = Any - -def normalize(a: Json): - for n in a["actions"].keys(): - del a["actions"][n]["origins"] - -if __name__ == "__main__": - with open(sys.argv[1]) as f: - a: Json = json.load(f) - with open(sys.argv[2]) as f: - b: Json = json.load(f) - normalize(a) - normalize(b) - if a != b: - print("The action graphs in %s and %s differ!" - % (sys.argv[1], sys.argv[2])) - sys.exit(1) diff --git a/test/end-to-end/git-import/deduplicate.sh b/test/end-to-end/git-import/deduplicate.sh index dce77fbec..ec7e614c3 100755 --- a/test/end-to-end/git-import/deduplicate.sh +++ b/test/end-to-end/git-import/deduplicate.sh @@ -20,7 +20,6 @@ readonly DEDUPLICATE="${PWD}/bin/deduplicate-tool-under-test" readonly GIT_IMPORT="${PWD}/bin/git-import-under-test" readonly JUST="${PWD}/bin/tool-under-test" readonly JUST_MR="${PWD}/bin/mr-tool-under-test" -readonly ACTIONS_EQUAL="${PWD}/bin/actions-graph-equal" readonly LBR="${TEST_TMPDIR}/local-build-root" readonly OUT="${TEST_TMPDIR}/build-output" readonly REPO_DIRS="${TEST_TMPDIR}/repos" @@ -102,7 +101,7 @@ cat repos-full.json echo "${JUST_MR}" -C repos-full.json --norc --just "${JUST}" \ --local-build-root "${LBR}" analyse \ - --dump-graph actions-full.json 2>&1 + --dump-plain-graph actions-full.json 2>&1 echo cat repos-full.json | "${DEDUPLICATE}" > repos.json cat repos.json @@ -110,11 +109,11 @@ echo "${JUST_MR}" -C repos.json --norc --just "${JUST}" \ --local-build-root "${LBR}" analyse \ - --dump-graph actions.json 2>&1 + --dump-plain-graph actions.json 2>&1 # Verify that we reduced the number of repositories, but did # not change the action graph (except for the origins of the actions). [ $(jq -aM '.repositories | length' repos.json) -lt $(jq -aM '.repositories | length' repos-full.json) ] -"${ACTIONS_EQUAL}" actions-full.json actions.json +cmp actions-full.json actions.json echo "OK" diff --git a/test/end-to-end/just-mr/TARGETS b/test/end-to-end/just-mr/TARGETS index 15959fb7d..636e5383e 100644 --- a/test/end-to-end/just-mr/TARGETS +++ b/test/end-to-end/just-mr/TARGETS @@ -38,8 +38,8 @@ , "test": ["just-mr.test.sh"] , "deps": [ "create_test_archives" - , ["utils", "test_utils_install"] , ["", "mr-tool-under-test"] + , ["utils", "test_utils_install"] ] } , "just_mr_mirrors": @@ -48,9 +48,9 @@ , "test": ["just-mr-mirrors.test.sh"] , "deps": [ "create_test_archives" - , ["utils", "test_utils_install"] - , ["utils", "null server"] , ["", "mr-tool-under-test"] + , ["utils", "null server"] + , ["utils", "test_utils_install"] ] } , "git-tree-verbosity": @@ -122,9 +122,9 @@ , "name": ["fetch-absent-archives"] , "test": ["fetch-absent-archives.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "fetch-absent (data)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "fetch-absent (data)" ] , "repos": ["fetch-absent (data)"] } @@ -133,9 +133,9 @@ , "name": ["fetch-absent-archives-symlinks"] , "test": ["fetch-absent-archives-symlinks.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "fetch-absent-with-symlinks (data)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "fetch-absent-with-symlinks (data)" ] , "repos": ["fetch-absent-with-symlinks (data)"] } @@ -144,9 +144,9 @@ , "name": ["fetch-absent-distdir-archive"] , "test": ["fetch-absent-distdir-archive.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "fetch-absent (data)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "fetch-absent (data)" ] , "repos": ["fetch-absent (data)"] } @@ -191,9 +191,9 @@ , "name": ["reporting-verbosity"] , "test": ["reporting-verbosity.sh"] , "deps": - [ ["", "tool-under-test"] + [ "fetch-absent (data)" , ["", "mr-tool-under-test"] - , "fetch-absent (data)" + , ["", "tool-under-test"] ] } , "stay-local": @@ -201,9 +201,9 @@ , "name": ["stay-local"] , "test": ["stay-local.sh"] , "deps": - [ ["", "tool-under-test"] + [ "fetch-absent (data)" , ["", "mr-tool-under-test"] - , "fetch-absent (data)" + , ["", "tool-under-test"] , ["utils", "null server"] ] } @@ -243,18 +243,15 @@ , { "type": "if" , "cond": {"type": "var", "name": "TEST_COMPATIBLE_REMOTE"} , "then": [] - , "else": - [ "fetch-remote" - , "fetch-remote-git-tree" - , "fetch-absent" - , "fetch-absent-git-tree" - , "absent-config" - , "fetch-absent-archives" - , "fetch-absent-archives-symlinks" - , "fetch-absent-distdir-archive" - , "stay-local" - ] + , "else": ["fetch-remote", "fetch-remote-git-tree", "stay-local"] } + , [ "fetch-absent" + , "fetch-absent-git-tree" + , "absent-config" + , "fetch-absent-archives" + , "fetch-absent-archives-symlinks" + , "fetch-absent-distdir-archive" + ] ] } } diff --git a/test/end-to-end/just-mr/absent-config.sh b/test/end-to-end/just-mr/absent-config.sh index 236325c80..9bfd41a68 100644 --- a/test/end-to-end/just-mr/absent-config.sh +++ b/test/end-to-end/just-mr/absent-config.sh @@ -26,6 +26,11 @@ readonly RCFILE="${TEST_TMPDIR}/mrrc.json" readonly OUT="${TEST_TMPDIR}/out" readonly LOCAL_REPO="${TEST_TMPDIR}/local-repository" +COMPAT="" +if [ "${COMPATIBLE:-}" = "YES" ]; then + COMPAT="--compatible" +fi + mkdir -p "${LOCAL_REPO}" cd "${LOCAL_REPO}" mkdir src @@ -116,12 +121,12 @@ echo CONF=$("${JUST_MR}" --local-build-root "${LBR}" \ --rc "${RCFILE}" \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --fetch-absent setup) cat $CONF echo "${JUST}" install --local-build-root "${LBR}" -C "${CONF}" \ - -r "${REMOTE_EXECUTION_ADDRESS}" -o "${OUT}" 2>&1 + -r "${REMOTE_EXECUTION_ADDRESS}" ${COMPAT} -o "${OUT}" 2>&1 grep 42 "${OUT}/out.txt" grep eg "${OUT}/out.txt" diff --git a/test/end-to-end/just-mr/fetch-absent-archives-symlinks.sh b/test/end-to-end/just-mr/fetch-absent-archives-symlinks.sh index 48effd5ca..182aa35ff 100644 --- a/test/end-to-end/just-mr/fetch-absent-archives-symlinks.sh +++ b/test/end-to-end/just-mr/fetch-absent-archives-symlinks.sh @@ -25,6 +25,11 @@ readonly OUT="${TEST_TMPDIR}/out" readonly OUT2="${TEST_TMPDIR}/out2" readonly OUT3="${TEST_TMPDIR}/out3" +COMPAT="" +if [ "${COMPATIBLE:-}" = "YES" ]; then + COMPAT="--compatible" +fi + ARCHIVE_CONTENT=$(git hash-object src/data.tar) echo "Archive has content $ARCHIVE_CONTENT" @@ -70,13 +75,13 @@ echo CONF=$("${JUST_MR}" --norc --local-build-root "${LBR}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --fetch-absent setup) cat $CONF echo "${JUST}" install --local-build-root "${LBR}" -C "${CONF}" \ -L '["env", "PATH='"${PATH}"'"]' \ - -r "${REMOTE_EXECUTION_ADDRESS}" -o "${OUT}" 2>&1 + -r "${REMOTE_EXECUTION_ADDRESS}" ${COMPAT} -o "${OUT}" 2>&1 grep x "${OUT}/out.txt" # As the last call of just-mr had --fetch-absent, all relevent information @@ -112,7 +117,7 @@ echo "${JUST_MR}" --norc --local-build-root "${LBR}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --just "${JUST}" \ --fetch-absent install -o "${OUT3}" 2>&1 grep xx "${OUT3}/out.txt" diff --git a/test/end-to-end/just-mr/fetch-absent-archives.sh b/test/end-to-end/just-mr/fetch-absent-archives.sh index a85c95ee8..4f4f67048 100644 --- a/test/end-to-end/just-mr/fetch-absent-archives.sh +++ b/test/end-to-end/just-mr/fetch-absent-archives.sh @@ -29,6 +29,11 @@ readonly OUT3="${TEST_TMPDIR}/out3" readonly OUT_NON_ABSENT="${TEST_TMPDIR}/out4" readonly OUT_DISTDIR="${TEST_TMPDIR}/out4" +COMPAT="" +if [ "${COMPATIBLE:-}" = "YES" ]; then + COMPAT="--compatible" +fi + ARCHIVE_CONTENT=$(git hash-object src/data.tar) echo "Archive has content $ARCHIVE_CONTENT" @@ -68,13 +73,13 @@ echo CONF=$("${JUST_MR}" --norc --local-build-root "${LBR}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --fetch-absent setup) cat $CONF echo "${JUST}" install --local-build-root "${LBR}" -C "${CONF}" \ -L '["env", "PATH='"${PATH}"'"]' \ - -r "${REMOTE_EXECUTION_ADDRESS}" -o "${OUT}" 2>&1 + -r "${REMOTE_EXECUTION_ADDRESS}" ${COMPAT} -o "${OUT}" 2>&1 grep 42 "${OUT}/out.txt" # As the last call of just-mr had --fetch-absent, all relevent information @@ -114,7 +119,7 @@ EOF "${JUST_MR}" --norc --local-build-root "${LBR}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --just "${JUST}" \ --fetch-absent install -o "${OUT3}" 2>&1 grep 42 "${OUT3}/out.txt" @@ -153,7 +158,7 @@ echo "${JUST_MR}" --norc --local-build-root "${LBR_NON_ABSENT}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --just "${JUST}" \ install -o "${OUT_NON_ABSENT}" 2>&1 grep 42 "${OUT_NON_ABSENT}/out.txt" @@ -164,7 +169,7 @@ echo mkdir -p "${OUT_DISTDIR}" "${JUST_MR}" --norc --local-build-root "${LBR_FOR_FETCH}" \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ fetch -o "${OUT_DISTDIR}" 2>&1 FETCHED_CONTENT=$(git hash-object "${OUT_DISTDIR}"/data.tar) echo diff --git a/test/end-to-end/just-mr/fetch-absent-distdir-archive.sh b/test/end-to-end/just-mr/fetch-absent-distdir-archive.sh index ade94031b..d7cf70eff 100644 --- a/test/end-to-end/just-mr/fetch-absent-distdir-archive.sh +++ b/test/end-to-end/just-mr/fetch-absent-distdir-archive.sh @@ -26,6 +26,11 @@ readonly OUT="${TEST_TMPDIR}/out" readonly OUT2="${TEST_TMPDIR}/out2" readonly OUT_NON_ABSENT="${TEST_TMPDIR}/out3" +COMPAT="" +if [ "${COMPATIBLE:-}" = "YES" ]; then + COMPAT="--compatible" +fi + ARCHIVE_CONTENT=$(git hash-object src/data.tar) echo "Archive has content $ARCHIVE_CONTENT" @@ -74,13 +79,13 @@ echo CONF=$("${JUST_MR}" --norc --local-build-root "${LBR}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --fetch-absent setup) cat $CONF echo "${JUST}" install --local-build-root "${LBR}" -C "${CONF}" \ -L '["env", "PATH='"${PATH}"'"]' \ - -r "${REMOTE_EXECUTION_ADDRESS}" -o "${OUT}" 2>&1 + -r "${REMOTE_EXECUTION_ADDRESS}" ${COMPAT} -o "${OUT}" 2>&1 grep 42 "${OUT}/out.txt" # As the last call of just-mr had --fetch-absent, all relevent information @@ -121,7 +126,7 @@ echo "${JUST_MR}" --norc --local-build-root "${LBR_NON_ABSENT}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --just "${JUST}" \ install -o "${OUT_NON_ABSENT}" 2>&1 grep 42 "${OUT_NON_ABSENT}/out.txt" diff --git a/test/end-to-end/just-mr/fetch-absent-git-tree.sh b/test/end-to-end/just-mr/fetch-absent-git-tree.sh index fb5d516a8..b0669b15c 100644 --- a/test/end-to-end/just-mr/fetch-absent-git-tree.sh +++ b/test/end-to-end/just-mr/fetch-absent-git-tree.sh @@ -26,6 +26,11 @@ readonly OUT="${TEST_TMPDIR}/out" readonly OUT2="${TEST_TMPDIR}/out2" readonly OUT_NON_ABSENT="${TEST_TMPDIR}/out4" +COMPAT="" +if [ "${COMPATIBLE:-}" = "YES" ]; then + COMPAT="--compatible" +fi + mkdir work cd work touch ROOT @@ -61,12 +66,12 @@ echo CONF=$("${JUST_MR}" --norc --local-build-root "${LBR}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --fetch-absent setup) cat $CONF echo "${JUST}" install --local-build-root "${LBR}" -C "${CONF}" \ - -r "${REMOTE_EXECUTION_ADDRESS}" -o "${OUT}" 2>&1 + -r "${REMOTE_EXECUTION_ADDRESS}" ${COMPAT} -o "${OUT}" 2>&1 grep 42 "${OUT}/out.txt" @@ -103,7 +108,7 @@ echo "${JUST_MR}" --norc --local-build-root "${LBR_NON_ABSENT}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --just "${JUST}" \ install -o "${OUT_NON_ABSENT}" 2>&1 grep 42 "${OUT_NON_ABSENT}/out.txt" diff --git a/test/end-to-end/just-mr/fetch-absent.sh b/test/end-to-end/just-mr/fetch-absent.sh index 302679984..41cde7a88 100644 --- a/test/end-to-end/just-mr/fetch-absent.sh +++ b/test/end-to-end/just-mr/fetch-absent.sh @@ -27,6 +27,11 @@ readonly OUT2="${TEST_TMPDIR}/out2" readonly OUT3="${TEST_TMPDIR}/out3" readonly OUT_NON_ABSENT="${TEST_TMPDIR}/out4" +COMPAT="" +if [ "${COMPATIBLE:-}" = "YES" ]; then + COMPAT="--compatible" +fi + mkdir work cd work touch ROOT @@ -64,13 +69,13 @@ echo CONF=$("${JUST_MR}" --norc --local-build-root "${LBR}" \ -L '["env", "PATH='"${PATH}"'"]' \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ --fetch-absent setup) cat $CONF echo "${JUST}" install --local-build-root "${LBR}" -C "${CONF}" \ -L '["env", "PATH='"${PATH}"'"]' \ - -r "${REMOTE_EXECUTION_ADDRESS}" -o "${OUT}" 2>&1 + -r "${REMOTE_EXECUTION_ADDRESS}" ${COMPAT} -o "${OUT}" 2>&1 grep 42 "${OUT}/out.txt" # As the last call of just-mr had --fetch-absent, all relevent information @@ -110,7 +115,7 @@ cat > targets/TARGETS <<'EOF' EOF "${JUST_MR}" --norc --local-build-root "${LBR}" \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ -L '["env", "PATH='"${PATH}"'"]' \ --just "${JUST}" \ --fetch-absent install -o "${OUT3}" 2>&1 @@ -150,7 +155,7 @@ cat repos.json echo "${JUST_MR}" --norc --local-build-root "${LBR_NON_ABSENT}" \ --remote-serve-address ${SERVE} \ - -r ${REMOTE_EXECUTION_ADDRESS} \ + -r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT} \ -L '["env", "PATH='"${PATH}"'"]' \ --just "${JUST}" \ install -o "${OUT_NON_ABSENT}" 2>&1 diff --git a/test/end-to-end/just-mr/stay-local.sh b/test/end-to-end/just-mr/stay-local.sh index 1e4117808..531630dba 100644 --- a/test/end-to-end/just-mr/stay-local.sh +++ b/test/end-to-end/just-mr/stay-local.sh @@ -29,11 +29,6 @@ readonly EMPTY="${TEST_TMPDIR}/empty-directory" readonly SERVER="${PWD}/utils/null-server" readonly SERVER_STATE="${TEST_TMPDIR}/server" -COMPAT="" -if [ "${COMPATIBLE:-}" = "YES" ]; then - COMPAT="--compatible" -fi - ARCHIVE_CONTENT=$(git hash-object src/data.tar) echo "Archive has content $ARCHIVE_CONTENT" @@ -84,7 +79,7 @@ cat repos.json echo mkdir -p "${LOG}" "${JUST_MR}" --norc --local-build-root "${LBR}" \ - -r "127.0.0.1:${port}" ${COMPAT} \ + -r "127.0.0.1:${port}" \ --log-limit 5 -f "${LOG}/log" \ --distdir ../src \ setup > conf.json @@ -96,14 +91,14 @@ echo cat $(cat conf.json) echo # As a distdir (local directory!) was provided with all needed files, -# no attempty should be made to contact the remote-execution endpoint +# no attempt should be made to contact the remote-execution endpoint echo [ -f "${SERVER_STATE}/access" ] && cat "${SERVER_STATE}/access" && exit 1 || : # The obtained configuraiton should be suitable for building, also remotely "${JUST}" install -C "$(cat conf.json)" -o "${OUT}" \ --local-build-root "${LBR}" \ - -r "${REMOTE_EXECUTION_ADDRESS}" $COMPAT 2>&1 + -r "${REMOTE_EXECUTION_ADDRESS}" 2>&1 echo cat "${OUT}/archive_id" [ $(cat "${OUT}/archive_id") = "${ARCHIVE_CONTENT}" ] @@ -114,7 +109,7 @@ echo # distdir is empty mkdir -p "${EMPTY}" "${JUST_MR}" --norc --just "${JUST}" --local-build-root "${LBR2}" \ - -r "${REMOTE_EXECUTION_ADDRESS}" $COMPAT \ + -r "${REMOTE_EXECUTION_ADDRESS}" \ --distdir ${EMPTY} \ install -o "${OUT2}" 2>&1 cat "${OUT2}/archive_id" diff --git a/test/end-to-end/remote-execution/TARGETS b/test/end-to-end/remote-execution/TARGETS index 8ab1259ed..fbe3d4670 100644 --- a/test/end-to-end/remote-execution/TARGETS +++ b/test/end-to-end/remote-execution/TARGETS @@ -2,7 +2,7 @@ { "type": ["end-to-end", "with remote"] , "name": ["native-protocol"] , "test": ["native-protocol.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "large-blobs": { "type": ["end-to-end", "with remote"] @@ -70,7 +70,7 @@ { "type": ["end-to-end", "with remote"] , "name": ["add-to-cas"] , "test": ["add-to-cas.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "TESTS": { "type": ["@", "rules", "test", "suite"] diff --git a/test/end-to-end/serve-service/TARGETS b/test/end-to-end/serve-service/TARGETS index 23c0525a7..f91cbd8ec 100644 --- a/test/end-to-end/serve-service/TARGETS +++ b/test/end-to-end/serve-service/TARGETS @@ -2,7 +2,7 @@ { "type": ["end-to-end", "with serve"] , "name": ["serve-target-remote-build"] , "test": ["serve_target_remote_build.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] , "repos": [ "serve-target-remote-build (repo)" , "serve-target-remote-build (rule)" @@ -14,12 +14,12 @@ , "name": ["serve-target-failed-build"] , "test": ["serve_target_failed_build.sh"] , "deps": - [ ["", "tool-under-test"] - , ["", "mr-tool-under-test"] - , "serve-tree (archive)" - , "data/targets/TARGETS" - , "data/rules/RULES" + [ "data/rules/RULES" , "data/rules/RULES.dummy" + , "data/targets/TARGETS" + , "serve-tree (archive)" + , ["", "mr-tool-under-test"] + , ["", "tool-under-test"] ] , "repos": [ "serve-target-remote-build (repo)" @@ -88,10 +88,10 @@ , "name": ["serve-tree"] , "test": ["serve-tree.sh"] , "deps": - [ ["", "mr-tool-under-test"] - , ["", "tool-under-test"] + [ "data/targets/TARGETS.tree" , "serve-tree (archive)" - , "data/targets/TARGETS.tree" + , ["", "mr-tool-under-test"] + , ["", "tool-under-test"] ] , "repos": ["serve-tree (archive)"] } @@ -104,7 +104,7 @@ { "type": ["end-to-end", "with serve"] , "name": ["serve-target-remote-build-dispatch"] , "test": ["serve_target_remote_build_dispatch.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] , "repos": ["serve-target-remote-build-dispatch (data)"] } , "serve-query-target-cache-value": @@ -124,9 +124,9 @@ , "name": ["describe"] , "test": ["describe.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "describe (locally installed)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "describe (locally installed)" ] , "repos": ["describe (data)"] , "keep": ["out/describe.orig", "out/describe"] @@ -140,9 +140,9 @@ , "name": ["failure-report"] , "test": ["failure-report.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "describe (locally installed)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "describe (locally installed)" ] , "repos": ["failure-report (data)"] , "keep": ["out/log", "out/serve.log", "out/failure.log"] @@ -160,7 +160,7 @@ { "type": ["end-to-end", "with serve"] , "name": ["serve-many-targets"] , "test": ["serve_many_targets.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] , "repos": ["serve-many-targets (data)", "serve-many-targets (data in module)"] } @@ -168,12 +168,12 @@ { "type": ["end-to-end", "with serve"] , "name": ["deduplication-of-serve-requests"] , "test": ["deduplication_of_serve_requests.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] , "repos": ["serve-many-targets (data)"] } , "TESTS (unconfigured)": { "type": ["@", "rules", "test", "suite"] - , "arguments_config": ["TEST_COMPATIBLE_REMOTE", "TEST_STANDALONE_SERVE"] + , "arguments_config": ["TEST_STANDALONE_SERVE"] , "stage": [ { "type": "if" , "cond": {"type": "var", "name": "TEST_STANDALONE_SERVE"} @@ -196,12 +196,8 @@ , "describe" , "deduplication-of-serve-requests" , "failure-report" + , "serve-tree" ] - , { "type": "if" - , "cond": {"type": "var", "name": "TEST_COMPATIBLE_REMOTE"} - , "then": [] - , "else": ["serve-tree"] - } , [ ["./", "serve-archive-root", "TESTS"] , ["./", "serve-git-root", "TESTS"] , ["./", "serve-file-root", "TESTS"] diff --git a/test/end-to-end/serve-service/data/targets/TARGETS.dispatch b/test/end-to-end/serve-service/data/targets/TARGETS.dispatch index 24180bd74..915684344 100644 --- a/test/end-to-end/serve-service/data/targets/TARGETS.dispatch +++ b/test/end-to-end/serve-service/data/targets/TARGETS.dispatch @@ -19,12 +19,12 @@ , "outs": ["out.txt"] , "execution properties": {"type": "singleton_map", "key": "server", "value": "special"} - , "deps": ["payload", "drop"] + , "deps": ["drop", "payload"] } , "internal": { "type": "generic" , "cmds": ["cat out.txt post.txt > final.txt"] - , "deps": ["special-dispatch", "post"] + , "deps": ["post", "special-dispatch"] , "outs": ["final.txt"] } , "": diff --git a/test/end-to-end/serve-service/serve-archive-root/TARGETS b/test/end-to-end/serve-service/serve-archive-root/TARGETS index af54b68b9..2da72a666 100644 --- a/test/end-to-end/serve-service/serve-archive-root/TARGETS +++ b/test/end-to-end/serve-service/serve-archive-root/TARGETS @@ -60,9 +60,9 @@ , "name": ["resolved-present"] , "test": ["resolved-present.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "serve-tree-syms (archive)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "serve-tree-syms (archive)" ] , "repos": ["serve-tree-syms (archive)"] } @@ -71,9 +71,9 @@ , "name": ["resolved-absent"] , "test": ["resolved-absent.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "serve-tree-syms (archive)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "serve-tree-syms (archive)" ] , "repos": ["serve-tree-syms (archive)"] } @@ -82,9 +82,9 @@ , "name": ["resolved-absent-known"] , "test": ["resolved-absent-known.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "serve-tree-syms (archive)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "serve-tree-syms (archive)" ] , "repos": ["serve-tree-syms (archive)"] } @@ -93,23 +93,23 @@ , "name": ["resolved-absent-known-upload"] , "test": ["resolved-absent-known-upload.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "serve-tree-syms (archive)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "serve-tree-syms (archive)" ] } , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["serve-archive-root"] , "deps": - [ "unresolved-present" + [ "resolved-absent" + , "resolved-absent-known" + , "resolved-absent-known-upload" + , "resolved-present" , "unresolved-absent" , "unresolved-absent-known" , "unresolved-absent-known-upload" - , "resolved-present" - , "resolved-absent" - , "resolved-absent-known" - , "resolved-absent-known-upload" + , "unresolved-present" ] } } diff --git a/test/end-to-end/serve-service/serve-archive-root/resolved-absent-known-upload.sh b/test/end-to-end/serve-service/serve-archive-root/resolved-absent-known-upload.sh index 502aaecbf..5b8404ab0 100644 --- a/test/end-to-end/serve-service/serve-archive-root/resolved-absent-known-upload.sh +++ b/test/end-to-end/serve-service/serve-archive-root/resolved-absent-known-upload.sh @@ -18,7 +18,7 @@ # This test checks that an absent root can successfully be made in the presence # of the serve endpoint in the situation where we already have the file # association (i.e., we know the unresolved root tree) and the serve endpoint -# does not know the archive. The upload can only happen in native mode. +# does not know the archive. # # The test archive contains symlinks to be resolved, which tests also the # resolved tree file association. @@ -98,35 +98,17 @@ rm -rf "${DISTDIR}" # While keeping the file association, ask serve endpoint to provide the root as # absent. For a serve endpoint that does not have the archive blob available, # this will require uploading the locally-known root tree to remote CAS, from -# where the serve endpoint will pick it up. This can only happen in native mode. -if [ -z "${COMPAT}" ]; then - - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup absent) - cat "${CONF}" - echo - test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup absent 2>&1 && exit 1 || : - echo Failed as expected -fi +# where the serve endpoint will pick it up. +${JUST} gc --local-build-root ${LBR} 2>&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup absent) +cat "${CONF}" +echo +test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-archive-root/resolved-absent-known.sh b/test/end-to-end/serve-service/serve-archive-root/resolved-absent-known.sh index b882bc2df..4d20deb48 100644 --- a/test/end-to-end/serve-service/serve-archive-root/resolved-absent-known.sh +++ b/test/end-to-end/serve-service/serve-archive-root/resolved-absent-known.sh @@ -96,8 +96,8 @@ TREE=$(jq -r '.repositories.present.workspace_root[1]' "${CONF}") rm -rf "${DISTDIR}" # While keeping the file association, ask serve endpoint to provide the root as -# absent. This serve endpoint known already the archive, so it should be able to -# set it up even if in compatible mode. +# absent. This serve endpoint knows already the archive, so it should be able to +# set it up. ${JUST} gc --local-build-root ${LBR} 2>&1 ${JUST} gc --local-build-root ${LBR} 2>&1 diff --git a/test/end-to-end/serve-service/serve-archive-root/resolved-absent.sh b/test/end-to-end/serve-service/serve-archive-root/resolved-absent.sh index 075683903..fb31b9594 100644 --- a/test/end-to-end/serve-service/serve-archive-root/resolved-absent.sh +++ b/test/end-to-end/serve-service/serve-archive-root/resolved-absent.sh @@ -106,32 +106,15 @@ cat "${CONF}" echo test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" -# Check that serve can provide this tree as present in a clean build root. This -# can happen however only in native mode. -if [ -z "${COMPAT}" ]; then - - rm -rf "${LBR}" - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup present) - cat "${CONF}" - echo - test $(jq -r '.repositories.present.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - rm -rf "${LBR}" - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup present 2>&1 && exit 1 || : - echo Failed as expected -fi +# Check that serve can provide this tree as present in a clean build root. +rm -rf "${LBR}" +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup present) +cat "${CONF}" +echo +test $(jq -r '.repositories.present.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-archive-root/resolved-present.sh b/test/end-to-end/serve-service/serve-archive-root/resolved-present.sh index d2ecf13cc..bd9b17c9c 100644 --- a/test/end-to-end/serve-service/serve-archive-root/resolved-present.sh +++ b/test/end-to-end/serve-service/serve-archive-root/resolved-present.sh @@ -15,8 +15,9 @@ ### -# This test checks 3 of the options to make a present root for an archive, where: -# - archive is in local distfile; +# This test checks 3 of the options to make a present root for an archive, +# where: +# - archive is in a local distfile; # - there is already a file association to the unresolved root tree; # - we receive the archive content from serve endpoint via the remote CAS. # @@ -99,48 +100,31 @@ cat "${CONF}" echo test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" -# We now test if the serve endpoint can provide us the root. This can only -# happen in we're in native mode. -if [ -z "${COMPAT}" ]; then - - # In a clean build root, ask serve to set up the root for us, from scratch - rm -rf "${LBR}" - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" - - # Double-check the file association was created and root remains available - # without the remote endpoints - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - rm -rf "${LBR}" - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main 2>&1 && exit 1 || : - echo Failed as expected -fi +# We now test if the serve endpoint can provide us the root. +# In a clean build root, ask serve to set up the root for us, from scratch +rm -rf "${LBR}" + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" + +# Double-check the file association was created and root remains available +# without the remote endpoints +${JUST} gc --local-build-root ${LBR} 2>&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-archive-root/unresolved-absent-known-upload.sh b/test/end-to-end/serve-service/serve-archive-root/unresolved-absent-known-upload.sh index c27c8066d..f087b3d62 100644 --- a/test/end-to-end/serve-service/serve-archive-root/unresolved-absent-known-upload.sh +++ b/test/end-to-end/serve-service/serve-archive-root/unresolved-absent-known-upload.sh @@ -18,7 +18,7 @@ # This test checks that an absent root can successfully be made in the presence # of the serve endpoint in the situation where we already have the file # association (i.e., we know the unresolved root tree) and the serve endpoint -# does not know the archive. The upload can only happen in native mode. +# does not know the archive. # # The test archive does not contain symlinks. ## @@ -96,35 +96,17 @@ rm -rf "${DISTDIR}" # While keeping the file association, ask serve endpoint to provide the root as # absent. For a serve endpoint that does not have the archive blob available, # this will require uploading the locally-known root tree to remote CAS, from -# where the serve endpoint will pick it up. This can only happen in native mode. -if [ -z "${COMPAT}" ]; then - - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup absent) - cat "${CONF}" - echo - test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup absent 2>&1 && exit 1 || : - echo Failed as expected -fi +# where the serve endpoint will pick it up. +${JUST} gc --local-build-root ${LBR} 2>&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup absent) +cat "${CONF}" +echo +test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-archive-root/unresolved-absent-known.sh b/test/end-to-end/serve-service/serve-archive-root/unresolved-absent-known.sh index 8be8aa0ff..3f1fd7e26 100644 --- a/test/end-to-end/serve-service/serve-archive-root/unresolved-absent-known.sh +++ b/test/end-to-end/serve-service/serve-archive-root/unresolved-absent-known.sh @@ -94,8 +94,8 @@ TREE=$(jq -r '.repositories.present.workspace_root[1]' "${CONF}") rm -rf "${DISTDIR}" # While keeping the file association, ask serve endpoint to provide the root as -# absent. This serve endpoint known already the archive, so it should be able to -# set it up even if in compatible mode. +# absent. This serve endpoint knows already the archive, so it should be able to +# set it up. ${JUST} gc --local-build-root ${LBR} 2>&1 ${JUST} gc --local-build-root ${LBR} 2>&1 diff --git a/test/end-to-end/serve-service/serve-archive-root/unresolved-absent.sh b/test/end-to-end/serve-service/serve-archive-root/unresolved-absent.sh index e304c9f27..46ed6c37b 100644 --- a/test/end-to-end/serve-service/serve-archive-root/unresolved-absent.sh +++ b/test/end-to-end/serve-service/serve-archive-root/unresolved-absent.sh @@ -104,32 +104,15 @@ cat "${CONF}" echo test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" -# Check that serve can provide this tree as present in a clean build root. This -# can happen however only in native mode. -if [ -z "${COMPAT}" ]; then - - rm -rf "${LBR}" - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup present) - cat "${CONF}" - echo - test $(jq -r '.repositories.present.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - rm -rf "${LBR}" - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup present 2>&1 && exit 1 || : - echo Failed as expected -fi +# Check that serve can provide this tree as present in a clean build root. +rm -rf "${LBR}" +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup present) +cat "${CONF}" +echo +test $(jq -r '.repositories.present.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-archive-root/unresolved-present.sh b/test/end-to-end/serve-service/serve-archive-root/unresolved-present.sh index e25a72bf4..ee1e1dc2a 100644 --- a/test/end-to-end/serve-service/serve-archive-root/unresolved-present.sh +++ b/test/end-to-end/serve-service/serve-archive-root/unresolved-present.sh @@ -15,8 +15,9 @@ ### -# This test checks 3 of the options to make a present root for an archive, where: -# - archive is in local distfile; +# This test checks 3 of the options to make a present root for an archive, +# where: +# - archive is in a local distfile; # - there is already a file association to the unresolved root tree; # - we receive the archive content from serve endpoint via the remote CAS. # @@ -97,48 +98,31 @@ cat "${CONF}" echo test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" -# We now test if the serve endpoint can provide us the root. This can only -# happen in we're in native mode. -if [ -z "${COMPAT}" ]; then - - # In a clean build root, ask serve to set up the root for us, from scratch - rm -rf "${LBR}" - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" - - # Double-check the file association was created and root remains available - # without the remote endpoints - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - rm -rf "${LBR}" - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main 2>&1 && exit 1 || : - echo Failed as expected -fi +# We now test if the serve endpoint can provide us the root. +# In a clean build root, ask serve to set up the root for us, from scratch +rm -rf "${LBR}" + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" + +# Double-check the file association was created and root remains available +# without the remote endpoints +${JUST} gc --local-build-root ${LBR} 2>&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-distdir-root/TARGETS b/test/end-to-end/serve-service/serve-distdir-root/TARGETS index 156806d89..d8b55ee37 100644 --- a/test/end-to-end/serve-service/serve-distdir-root/TARGETS +++ b/test/end-to-end/serve-service/serve-distdir-root/TARGETS @@ -16,9 +16,9 @@ , "name": ["present"] , "test": ["present.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "serve-distdir (data)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "serve-distdir (data)" ] , "repos": ["serve-distdir (data)"] } @@ -27,9 +27,9 @@ , "name": ["absent"] , "test": ["absent.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "serve-distdir (data)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "serve-distdir (data)" ] , "repos": ["serve-distdir (data)"] } @@ -38,9 +38,9 @@ , "name": ["upload"] , "test": ["upload.sh"] , "deps": - [ ["", "mr-tool-under-test"] + [ "serve-distdir (data)" + , ["", "mr-tool-under-test"] , ["", "tool-under-test"] - , "serve-distdir (data)" ] } , "foreign-file": @@ -57,6 +57,6 @@ , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["serve-distdir-root"] - , "deps": ["present", "absent", "upload", "foreign-file"] + , "deps": ["absent", "foreign-file", "present", "upload"] } } diff --git a/test/end-to-end/serve-service/serve-distdir-root/absent.sh b/test/end-to-end/serve-service/serve-distdir-root/absent.sh index 241297c02..e44d10ea3 100644 --- a/test/end-to-end/serve-service/serve-distdir-root/absent.sh +++ b/test/end-to-end/serve-service/serve-distdir-root/absent.sh @@ -119,32 +119,15 @@ echo test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" # Check that serve can provide now this tree as present in a clean build root. -# This can happen only in native mode. -if [ -z "${COMPAT}" ]; then - - rm -rf "${LBR}" - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup present) - cat "${CONF}" - echo - test $(jq -r '.repositories.present.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - rm -rf "${LBR}" - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup present 2>&1 && exit 1 || : - echo Failed as expected -fi +rm -rf "${LBR}" + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup present) +cat "${CONF}" +echo +test $(jq -r '.repositories.present.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-distdir-root/foreign-file.sh b/test/end-to-end/serve-service/serve-distdir-root/foreign-file.sh index 18dff9e09..8696659bb 100644 --- a/test/end-to-end/serve-service/serve-distdir-root/foreign-file.sh +++ b/test/end-to-end/serve-service/serve-distdir-root/foreign-file.sh @@ -78,7 +78,7 @@ mkdir -p "${OUT}" grep 'HELLO WORLD' "${OUT}/out.txt" -# also verify that the repo config has the repository abent +# also verify that the repo config has the repository absent CONF=$("${JUST_MR}" --norc --local-build-root "${LBR}" ${ENDPOINT_ARGS} setup) echo diff --git a/test/end-to-end/serve-service/serve-distdir-root/present.sh b/test/end-to-end/serve-service/serve-distdir-root/present.sh index 8a2051823..b0bf77273 100644 --- a/test/end-to-end/serve-service/serve-distdir-root/present.sh +++ b/test/end-to-end/serve-service/serve-distdir-root/present.sh @@ -17,7 +17,7 @@ ### # This test checks 3 of the options to make a present root for a distidr # repository, where: -# - archives are in local distfile; +# - archives are in a local distfile; # - there is already a file association to the distdir root tree; # - we receive the distdir root from serve endpoint via the remote CAS. ## @@ -113,48 +113,31 @@ cat "${CONF}" echo test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" -# We now test if the serve endpoint can provide us the present root. This can -# only happen in we're in native mode. -if [ -z "${COMPAT}" ]; then - - # In a clean build root, ask serve to set up the root for us, from scratch - rm -rf "${LBR}" - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" - - # Double-check the file association was created and root remains available - # without the remote endpoints - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - rm -rf "${LBR}" - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main 2>&1 && exit 1 || : - echo Failed as expected -fi +# We now test if the serve endpoint can provide us the present root. +# In a clean build root, ask serve to set up the root for us, from scratch +rm -rf "${LBR}" + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" + +# Double-check the file association was created and root remains available +# without the remote endpoints +${JUST} gc --local-build-root ${LBR} 2>&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-distdir-root/upload.sh b/test/end-to-end/serve-service/serve-distdir-root/upload.sh index ff33599db..841a51db8 100644 --- a/test/end-to-end/serve-service/serve-distdir-root/upload.sh +++ b/test/end-to-end/serve-service/serve-distdir-root/upload.sh @@ -17,7 +17,6 @@ ### # This test checks that an absent distdir root can be successfully computed # locally and then uploaded to a serve endpoint that does not know the root. -# The upload can only happen in native mode. ## set -eu @@ -108,35 +107,17 @@ rm -rf "${DISTDIR}" # While keeping the file association, ask serve endpoint to provide the root as # absent. For a serve endpoint that does not have the archive blob available, # this will require uploading the locally-known root tree to remote CAS, from -# where the serve endpoint will pick it up. This can only happen in native mode. -if [ -z "${COMPAT}" ]; then - - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup absent) - cat "${CONF}" - echo - test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" - -else - - echo --- - echo Checking expected failures - - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup absent 2>&1 && exit 1 || : - echo Failed as expected -fi +# where the serve endpoint will pick it up. +${JUST} gc --local-build-root ${LBR} 2>&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup absent) +cat "${CONF}" +echo +test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-file-root/upload.sh b/test/end-to-end/serve-service/serve-file-root/upload.sh index 3362aa0d4..01e35ab9d 100644 --- a/test/end-to-end/serve-service/serve-file-root/upload.sh +++ b/test/end-to-end/serve-service/serve-file-root/upload.sh @@ -86,8 +86,6 @@ EOF # Setup an absent root from local path. Even if root is present, if a serve # endpoint is given then we try to set it up there as well. As this serve # endpoint does not know the tree, it will try to upload through the remote CAS. -# The upload succeeds if remote in native mode, but fails (non-fatally) in -# compatible mode. CONF=$("${JUST_MR}" --norc -C repos.json \ --just "${JUST}" \ --local-build-root "${LBR}" \ @@ -98,31 +96,15 @@ echo test $(jq -r '.repositories.present_file.workspace_root[1]' "${CONF}") = "${TREE}" # Check in a clean local build root that the serve endpoint now has the root -# tree. This can only work in native mode, where the root was actually uploaded. -if [ -z "${COMPAT}" ]; then - - rm -rf "${LBR}" - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup absent_git_tree) - cat "${CONF}" - echo - -else - - echo --- - echo Checking expected failures - - rm -rf "${LBR}" - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup absent_git_tree 2>&1 && exit 1 || : - echo Failed as expected -fi +# tree. +rm -rf "${LBR}" + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup absent_git_tree) +cat "${CONF}" +echo echo OK diff --git a/test/end-to-end/serve-service/serve-git-root/TARGETS b/test/end-to-end/serve-service/serve-git-root/TARGETS index 29ccee004..e73b9d0ce 100644 --- a/test/end-to-end/serve-service/serve-git-root/TARGETS +++ b/test/end-to-end/serve-service/serve-git-root/TARGETS @@ -25,6 +25,6 @@ , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["serve-git-root"] - , "deps": ["present", "absent", "absent-upload"] + , "deps": ["absent", "absent-upload", "present"] } } diff --git a/test/end-to-end/serve-service/serve-git-root/absent-upload.sh b/test/end-to-end/serve-service/serve-git-root/absent-upload.sh index 2fa54512e..9ec592d0f 100644 --- a/test/end-to-end/serve-service/serve-git-root/absent-upload.sh +++ b/test/end-to-end/serve-service/serve-git-root/absent-upload.sh @@ -16,8 +16,7 @@ ### # This test checks that an absent root known in a local checkout can be -# successfully uploaded to a serve endpoint. This can only succeed in native -# mode. +# successfully uploaded to a serve endpoint. ## set -eu @@ -79,30 +78,14 @@ EOF # Setup an absent root from a local checkout. For a serve endpoint that does # not have the commit available, this will upload the locally-known root tree -# to remote CAS, from where the serve endpoint will pick it up. This requires -# that the remotes are in native mode. -if [ -z "${COMPAT}" ]; then - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${SUBTREE}" - -else - - echo --- - echo Checking expected failures - - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main 2>&1 && exit 1 || : - echo Failed as expected -fi +# to remote CAS, from where the serve endpoint will pick it up. +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${SUBTREE}" echo OK diff --git a/test/end-to-end/serve-service/serve-git-root/absent.sh b/test/end-to-end/serve-service/serve-git-root/absent.sh index ab808d536..97d2f7ea9 100644 --- a/test/end-to-end/serve-service/serve-git-root/absent.sh +++ b/test/end-to-end/serve-service/serve-git-root/absent.sh @@ -71,8 +71,7 @@ EOF # Run the checks ## -# Compute absent root by asking serve to set it up from scratch. This works also -# in compatible mode. +# Compute absent root by asking serve to set it up from scratch. rm -rf "${LBR}" CONF=$("${JUST_MR}" --norc -C repos.json \ @@ -85,31 +84,15 @@ echo test $(jq -r '.repositories.absent.workspace_root[1]' "${CONF}") = "${TREE_0}" # Check that serve can provide also a subtree of this tree as present in a clean -# build root. This can only happen if remotes are in native mode. -if [ -z "${COMPAT}" ]; then - - rm -rf "${LBR}" - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup present) - cat "${CONF}" - echo - -else - - echo --- - echo Checking expected failures - - rm -rf "${LBR}" - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup present 2>&1 && exit 1 || : - echo Failed as expected -fi +# build root. +rm -rf "${LBR}" + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup present) +cat "${CONF}" +echo echo OK diff --git a/test/end-to-end/serve-service/serve-git-root/present.sh b/test/end-to-end/serve-service/serve-git-root/present.sh index 51723be27..0c359ec87 100644 --- a/test/end-to-end/serve-service/serve-git-root/present.sh +++ b/test/end-to-end/serve-service/serve-git-root/present.sh @@ -61,70 +61,54 @@ EOF # Run the checks ## -# Compute present root by asking the serve endpoint to set it up for us. This -# requires remotes in native mode. -if [ -z "${COMPAT}" ]; then - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE_0}" - - # Compute present root locally from now populated Git cache - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE_0}" - - # Check that the subdir is also working correctly - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - cat > repos.json <&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE_0}" + +# Check that the subdir is also working correctly +${JUST} gc --local-build-root ${LBR} 2>&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +cat > repos.json <&1 && exit 1 || : - echo Failed as expected -fi +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + setup main) +cat "${CONF}" +echo echo OK diff --git a/test/end-to-end/serve-service/serve-git-tree-root/TARGETS b/test/end-to-end/serve-service/serve-git-tree-root/TARGETS index ffcd5bd47..31b2f7adb 100644 --- a/test/end-to-end/serve-service/serve-git-tree-root/TARGETS +++ b/test/end-to-end/serve-service/serve-git-tree-root/TARGETS @@ -15,6 +15,6 @@ , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["serve-git-tree-root"] - , "deps": ["present", "absent"] + , "deps": ["absent", "present"] } } diff --git a/test/end-to-end/serve-service/serve-git-tree-root/absent.sh b/test/end-to-end/serve-service/serve-git-tree-root/absent.sh index 1e4b5f052..ddfe33d81 100644 --- a/test/end-to-end/serve-service/serve-git-tree-root/absent.sh +++ b/test/end-to-end/serve-service/serve-git-tree-root/absent.sh @@ -60,8 +60,7 @@ EOF # Run the checks ## -# Compute absent root by asking serve to set it up from scratch. This works also -# in compatible mode. +# Compute absent root by asking serve to set it up from scratch. rm -rf "${LBR}" CONF=$("${JUST_MR}" --norc -C repos.json \ diff --git a/test/end-to-end/serve-service/serve-git-tree-root/present.sh b/test/end-to-end/serve-service/serve-git-tree-root/present.sh index 2a704b52c..4b05f9b59 100644 --- a/test/end-to-end/serve-service/serve-git-tree-root/present.sh +++ b/test/end-to-end/serve-service/serve-git-tree-root/present.sh @@ -16,7 +16,7 @@ ### # This test checks if we can make a present root for a Git-tree repository -# using the serve endpoint. This can only succeed in native mode. +# using the serve endpoint. ## set -eu @@ -59,43 +59,27 @@ EOF # Run the checks ## -# Compute present root by asking the serve endpoint to set it up for us. This -# requires remotes in native mode. -if [ -z "${COMPAT}" ]; then - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE_0}" - - # Compute present root locally from now populated Git cache - ${JUST} gc --local-build-root ${LBR} 2>&1 - ${JUST} gc --local-build-root ${LBR} 2>&1 - - CONF=$("${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - setup main) - cat "${CONF}" - echo - test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE_0}" - -else - - echo --- - echo Checking expected failures - - "${JUST_MR}" --norc -C repos.json \ - --just "${JUST}" \ - --local-build-root "${LBR}" \ - --log-limit 6 \ - ${ENDPOINT_ARGS} setup main 2>&1 && exit 1 || : - echo Failed as expected -fi +# Compute present root by asking the serve endpoint to set it up for us. +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + ${ENDPOINT_ARGS} setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE_0}" + +# Compute present root locally from now populated Git cache +${JUST} gc --local-build-root ${LBR} 2>&1 +${JUST} gc --local-build-root ${LBR} 2>&1 + +CONF=$("${JUST_MR}" --norc -C repos.json \ + --just "${JUST}" \ + --local-build-root "${LBR}" \ + --log-limit 6 \ + setup main) +cat "${CONF}" +echo +test $(jq -r '.repositories.main.workspace_root[1]' "${CONF}") = "${TREE_0}" echo OK diff --git a/test/end-to-end/serve-service/serve-tree.sh b/test/end-to-end/serve-service/serve-tree.sh index 755739642..d95a890b7 100755 --- a/test/end-to-end/serve-service/serve-tree.sh +++ b/test/end-to-end/serve-service/serve-tree.sh @@ -32,7 +32,12 @@ mkdir -p "${DISTDIR}" cp src.tar "${DISTDIR}" HASH=$(git hash-object src.tar) -REMOTE="-r ${REMOTE_EXECUTION_ADDRESS}" +COMPAT="" +if [ "${COMPATIBLE:-}" = "YES" ]; then + COMPAT="--compatible" +fi + +REMOTE="-r ${REMOTE_EXECUTION_ADDRESS} ${COMPAT}" mkdir work cd work @@ -70,7 +75,7 @@ echo echo echo Local build "${JUST_MR}" --norc --local-build-root "${LBR_A}" --just "${JUST}" \ - --distdir "${DISTDIR}" build \ + --distdir "${DISTDIR}" ${COMPAT} build \ --log-limit 4 \ --dump-artifacts local.json 2>&1 diff --git a/test/end-to-end/symlinks/stage-links.sh b/test/end-to-end/symlinks/stage-links.sh index 25c9d15e3..b81731059 100644 --- a/test/end-to-end/symlinks/stage-links.sh +++ b/test/end-to-end/symlinks/stage-links.sh @@ -16,15 +16,17 @@ set -eu readonly JUST="${PWD}/bin/tool-under-test" +readonly OUT="${TMPDIR}/out" +mkdir -p "${OUT}" touch ROOT cat > TARGETS < TARGETS <&1 +${JUST} install -L '["env", "PATH='"${PATH}"'"]' ${ARGS} input-non-upwards \ + -o "${OUT}" 2>&1 && ls -alR "${OUT}" && rm -rf "${OUT}/*" || FAILED=YES + +echo +echo "test input non-upwards remotely" +${JUST} build -L '["env", "PATH='"${PATH}"'"]' ${ARGS} ${REMOTE_EXECUTION_ARGS} input-non-upwards 2>&1 +${JUST} install -L '["env", "PATH='"${PATH}"'"]' ${ARGS} ${REMOTE_EXECUTION_ARGS} input-non-upwards \ + -o "${OUT}" 2>&1 && ls -alR "${OUT}" && rm -rf "${OUT}/*" || FAILED=YES + +echo +echo "test staging non-upwards locally" +${JUST} build -L '["env", "PATH='"${PATH}"'"]' ${ARGS} stage-non-upwards-links 2>&1 || FAILED=YES + +echo +echo "test staging non-upwards remotely" +${JUST} build -L '["env", "PATH='"${PATH}"'"]' ${ARGS} ${REMOTE_EXECUTION_ARGS} stage-non-upwards-links 2>&1 || FAILED=YES + +# Check that actions with non-contained upwards symlinks fail +echo +echo "test input non-contained locally" +${JUST} build -L '["env", "PATH='"${PATH}"'"]' ${ARGS} input-non-contained 2>&1 \ + && echo "this should have failed" && FAILED=YES +${JUST} install -L '["env", "PATH='"${PATH}"'"]' ${ARGS} input-non-contained -o "${OUT}" 2>&1 \ + && echo "this should have failed" && FAILED=YES \ + && ls -alR "${OUT}" && rm -rf "${OUT}/*" || echo "failed as expected" + +echo +echo "test input non-contained remotely" +${JUST} build -L '["env", "PATH='"${PATH}"'"]' ${ARGS} ${REMOTE_EXECUTION_ARGS} input-non-contained 2>&1 \ + && echo "this should have failed" && FAILED=YES || echo "failed as expected" +${JUST} install -L '["env", "PATH='"${PATH}"'"]' ${ARGS} ${REMOTE_EXECUTION_ARGS} input-non-contained -o "${OUT}" 2>&1 \ + && echo "this should have failed" && FAILED=YES \ + && ls -alR "${OUT}" && rm -rf "${OUT}/*" || echo "failed as expected" + +echo +echo "test staging non-contained locally" +${JUST} build -L '["env", "PATH='"${PATH}"'"]' ${ARGS} stage-non-contained-links 2>&1 \ + && echo "this should have failed" && FAILED=YES || echo "failed as expected" + +echo +echo "test staging non-contained remotely" +${JUST} build -L '["env", "PATH='"${PATH}"'"]' ${ARGS} ${REMOTE_EXECUTION_ARGS} stage-non-contained-links 2>&1 \ + && echo "this should have failed" && FAILED=YES || echo "failed as expected" + +if [ ! -z "${FAILED}" ]; then + exit 1 +fi -echo "test staging remotely" -${JUST} build -L '["env", "PATH='"${PATH}"'"]' ${ARGS} ${REMOTE_EXECUTION_ARGS} stage-links +echo +echo OK diff --git a/test/end-to-end/target-cache/TARGETS b/test/end-to-end/target-cache/TARGETS index 75c0d3856..43b8f76d0 100644 --- a/test/end-to-end/target-cache/TARGETS +++ b/test/end-to-end/target-cache/TARGETS @@ -2,18 +2,18 @@ { "type": ["end-to-end", "with remote"] , "name": ["target-cache-hit"] , "test": ["target-cache-hit.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "artifacts-sync": { "type": ["@", "rules", "shell/test", "script"] , "name": ["artifacts-sync"] , "test": ["artifacts-sync.sh"] , "deps": - [ ["", "tool-under-test"] + [ "bootstrap-src-staged" , ["", "mr-tool-under-test"] + , ["", "tool-under-test"] , ["./", "data", "greetlib"] , ["./", "data", "pydicts"] - , "bootstrap-src-staged" ] } , "serve-sync": @@ -21,10 +21,10 @@ , "name": ["serve-sync"] , "test": ["serve-sync.sh"] , "deps": - [ ["", "tool-under-test"] + [ "bootstrap-src-staged" , ["", "mr-tool-under-test"] + , ["", "tool-under-test"] , ["./", "data", "lib with generated hdr"] - , "bootstrap-src-staged" ] , "repos": [["./", "data", "lib with generated hdr"]] } @@ -32,7 +32,7 @@ { "type": ["@", "rules", "shell/test", "script"] , "name": ["export-extern"] , "test": ["export-extern.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "bootstrap-src-staged": {"type": "install", "dirs": [[["@", "src", "", "bootstrap-src"], "src"]]} @@ -40,7 +40,7 @@ { "type": ["@", "rules", "shell/test", "script"] , "name": ["check-sharding"] , "test": ["check-sharding.sh"] - , "deps": [["", "tool-under-test"], ["", "mr-tool-under-test"]] + , "deps": [["", "mr-tool-under-test"], ["", "tool-under-test"]] } , "TESTS": { "type": ["@", "rules", "test", "suite"] diff --git a/test/end-to-end/target-tests/TARGETS b/test/end-to-end/target-tests/TARGETS index f49d62026..5fc0d6b73 100644 --- a/test/end-to-end/target-tests/TARGETS +++ b/test/end-to-end/target-tests/TARGETS @@ -44,13 +44,13 @@ { "type": ["@", "rules", "test", "suite"] , "stage": ["target-tests"] , "deps": - [ "upwards reference" + [ "configure target name" + , "configure variables" + , "glob expansion" , "repository naming" , "resolution of built-in rules" - , "glob expansion" - , "configure target name" - , "configure variables" , "tree inputs" + , "upwards reference" ] } } diff --git a/test/main.cpp b/test/main.cpp index f73045dcf..54afa2dcc 100644 --- a/test/main.cpp +++ b/test/main.cpp @@ -20,12 +20,9 @@ #include "src/buildtool/file_system/git_context.hpp" #include "src/buildtool/storage/file_chunker.hpp" #include "test/utils/logging/log_config.hpp" -#include "test/utils/test_env.hpp" auto main(int argc, char* argv[]) -> int { ConfigureLogging(); - ReadCompatibilityFromEnv(); - /** * The current implementation of libgit2 uses pthread_key_t incorrectly * on POSIX systems to handle thread-specific data, which requires us to diff --git a/test/other_tools/TARGETS b/test/other_tools/TARGETS index 5b92abf72..d12a9ad1c 100644 --- a/test/other_tools/TARGETS +++ b/test/other_tools/TARGETS @@ -3,8 +3,8 @@ , "stage": ["other_tools"] , "deps": [ ["./", "git_operations", "TESTS"] - , ["./", "utils", "TESTS"] , ["./", "just_mr", "TESTS"] + , ["./", "utils", "TESTS"] ] } } diff --git a/test/other_tools/git_operations/TARGETS b/test/other_tools/git_operations/TARGETS index 381b97c97..f63258b59 100644 --- a/test/other_tools/git_operations/TARGETS +++ b/test/other_tools/git_operations/TARGETS @@ -5,12 +5,12 @@ , "srcs": ["critical_git_ops.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "fmt", "", "fmt"] , ["@", "json", "", "json"] + , ["@", "src", "src/buildtool/execution_api/common", "common"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/other_tools/ops_maps", "critical_git_op_map"] - , ["@", "src", "src/buildtool/execution_api/common", "common"] + , ["", "catch-main"] , ["utils", "shell_quoting"] ] , "stage": ["test", "other_tools", "git_operations"] @@ -20,7 +20,7 @@ , "name": ["critical_git_ops_mp"] , "test": ["critical_git_ops_mp.sh"] , "deps": - [["buildtool/file_system", "test_data"], "critical_git_ops_test_install"] + ["critical_git_ops_test_install", ["buildtool/file_system", "test_data"]] } , "git_repo_remote": { "type": ["@", "rules", "CC/test", "test"] @@ -29,15 +29,15 @@ , "data": [["buildtool/file_system", "test_data"]] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "fmt", "", "fmt"] , ["@", "json", "", "json"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] - , ["@", "src", "src/other_tools/git_operations", "git_repo_remote"] , ["@", "src", "src/buildtool/storage", "config"] + , ["@", "src", "src/other_tools/git_operations", "git_repo_remote"] , ["@", "src", "src/utils/cpp", "atomic"] + , ["", "catch-main"] , ["utils", "shell_quoting"] , ["utils", "test_storage_config"] ] @@ -50,11 +50,11 @@ , "srcs": ["git_config_run.test.cpp"] , "private-deps": [ ["@", "src", "", "libgit2"] - , ["@", "src", "src/other_tools/git_operations", "git_config_settings"] - , ["@", "src", "src/buildtool/file_system", "git_utils"] , ["@", "src", "src/buildtool/file_system", "git_context"] + , ["@", "src", "src/buildtool/file_system", "git_utils"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] + , ["@", "src", "src/other_tools/git_operations", "git_config_settings"] , ["utils", "log_config"] ] , "stage": ["src"] @@ -78,9 +78,9 @@ , "stage": ["git_operations"] , "deps": [ "critical_git_ops_mp" - , "git_repo_remote" - , "git_config_ssl" , "git_config_proxy" + , "git_config_ssl" + , "git_repo_remote" ] } } diff --git a/test/other_tools/git_operations/critical_git_ops.test.cpp b/test/other_tools/git_operations/critical_git_ops.test.cpp index c1494bcfc..aa93b3911 100644 --- a/test/other_tools/git_operations/critical_git_ops.test.cpp +++ b/test/other_tools/git_operations/critical_git_ops.test.cpp @@ -80,7 +80,7 @@ class TestUtilsMP { : (*repo_path / ".git").string()), QuoteForShell(repo_path->string())); if (std::system(cmd.c_str()) == 0) { - return *repo_path; + return repo_path; } return std::nullopt; } @@ -152,21 +152,21 @@ TEST_CASE("Critical git operations", "[critical_git_op_map]") { // Add ops to the map. None should throw, as repeating the same operation // should retrieve the value from the map, not call the operation again. // helper lists - constexpr auto NUM_METHODS = 6; - std::vector ops_all(NUM_METHODS); // indices of all ops tested + constexpr auto kNumMethods = 6; + std::vector ops_all(kNumMethods); // indices of all ops tested std::iota(ops_all.begin(), ops_all.end(), 0); const std::vector ops_with_result{ 0, 4}; // indices of ops that return a non-empty string // Add to the map all ops multiple times - constexpr auto REPEATS = 3; - for ([[maybe_unused]] auto k = REPEATS; k > 0; --k) { + constexpr auto kRepeats = 3; + for ([[maybe_unused]] auto k = kRepeats; k > 0; --k) { auto error = false; auto error_msg = std::string("NONE"); { TaskSystem ts; - for ([[maybe_unused]] auto j = REPEATS; j > 0; --j) { + for ([[maybe_unused]] auto j = kRepeats; j > 0; --j) { crit_op_map.ConsumeAfterKeysReady( &ts, {GitOpKey{.params = diff --git a/test/other_tools/git_operations/git_config_run.test.cpp b/test/other_tools/git_operations/git_config_run.test.cpp index 1e72c7da6..573802804 100644 --- a/test/other_tools/git_operations/git_config_run.test.cpp +++ b/test/other_tools/git_operations/git_config_run.test.cpp @@ -60,7 +60,7 @@ auto main(int argc, char* argv[]) -> int { argc); return 1; } - auto args = std::span(argv, size_t(argc)); + auto args = std::span(argv, static_cast(argc)); std::string test_type{args[1]}; // type of test std::string test_url{args[2]}; // remote URL to test diff --git a/test/other_tools/git_operations/git_repo_remote.test.cpp b/test/other_tools/git_operations/git_repo_remote.test.cpp index 69cfce343..39f0bd8ad 100644 --- a/test/other_tools/git_operations/git_repo_remote.test.cpp +++ b/test/other_tools/git_operations/git_repo_remote.test.cpp @@ -69,7 +69,7 @@ class TestUtils { : (*repo_path / ".git").string()), QuoteForShell(repo_path->string())); if (std::system(cmd.c_str()) == 0) { - return *repo_path; + return repo_path; } return std::nullopt; } @@ -269,26 +269,26 @@ TEST_CASE("Single-threaded fake repository operations", "[git_repo_remote]") { SECTION("Fetch with refspec into repository") { // set repo to fetch into - auto path_fetch_wRefspec = TestUtils::GetRepoPath(); - auto repo_fetch_wRefspec = GitRepoRemote::InitAndOpen( - path_fetch_wRefspec, /*is_bare=*/true); - REQUIRE(repo_fetch_wRefspec); + auto path_fetch_refspec = TestUtils::GetRepoPath(); + auto repo_fetch_refspec = GitRepoRemote::InitAndOpen( + path_fetch_refspec, /*is_bare=*/true); + REQUIRE(repo_fetch_refspec); // check commit is not there before fetch CHECK_FALSE( - *repo_fetch_wRefspec->CheckCommitExists(kRootCommit, logger)); + *repo_fetch_refspec->CheckCommitExists(kRootCommit, logger)); // fetch all - REQUIRE(repo_fetch_wRefspec->FetchViaTmpRepo(storage_config.Get(), - *repo_path, - "master", - {}, - "git", - {}, - logger)); + REQUIRE(repo_fetch_refspec->FetchViaTmpRepo(storage_config.Get(), + *repo_path, + "master", + {}, + "git", + {}, + logger)); // check commit is there after fetch - CHECK(*repo_fetch_wRefspec->CheckCommitExists(kRootCommit, logger)); + CHECK(*repo_fetch_refspec->CheckCommitExists(kRootCommit, logger)); } } @@ -345,7 +345,7 @@ TEST_CASE("Multi-threaded fake repository operations", "[git_repo_remote]") { REQUIRE(target_repo); SECTION("Fetching into same repository from remote") { - constexpr int NUM_CASES = 4; + constexpr int kNumCases = 4; for (int id{}; id < kNumThreads; ++id) { threads.emplace_back( [&storage_config, @@ -355,7 +355,7 @@ TEST_CASE("Multi-threaded fake repository operations", "[git_repo_remote]") { &starting_signal](int tid) { starting_signal.wait(false); // cases based on thread number - switch (tid % NUM_CASES) { + switch (tid % kNumCases) { case 0: { auto result_containing = target_repo->CheckCommitExists(kRootCommit, @@ -400,6 +400,8 @@ TEST_CASE("Multi-threaded fake repository operations", "[git_repo_remote]") { REQUIRE(fetched_commit); CHECK(*fetched_commit == kRootCommit); } break; + default: + REQUIRE(false); } }, id); diff --git a/test/other_tools/just_mr/TARGETS b/test/other_tools/just_mr/TARGETS index d2397e7f1..a74921aca 100644 --- a/test/other_tools/just_mr/TARGETS +++ b/test/other_tools/just_mr/TARGETS @@ -4,9 +4,9 @@ , "srcs": ["rc_merge.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/build_engine/expression", "expression"] , ["@", "src", "src/other_tools/just_mr", "rc_merge"] + , ["", "catch-main"] ] , "stage": ["test", "other_tools", "just_mr"] } @@ -16,14 +16,14 @@ , "srcs": ["mirrors.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/other_tools/just_mr", "mirrors"] + , ["", "catch-main"] ] , "stage": ["test", "other_tools", "just_mr"] } , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["just_mr"] - , "deps": ["rc_merge", "mirrors"] + , "deps": ["mirrors", "rc_merge"] } } diff --git a/test/other_tools/utils/TARGETS b/test/other_tools/utils/TARGETS index 6506de8fc..6d7ed56b9 100644 --- a/test/other_tools/utils/TARGETS +++ b/test/other_tools/utils/TARGETS @@ -5,10 +5,10 @@ , "srcs": ["curl_usage.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/other_tools/utils", "curl_context"] , ["@", "src", "src/other_tools/utils", "curl_easy_handle"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] + , ["", "catch-main"] ] , "stage": ["test", "other_tools", "utils"] } @@ -16,7 +16,7 @@ { "type": ["@", "rules", "shell/test", "script"] , "name": ["curl_usage"] , "test": ["curl_usage_test.sh"] - , "deps": [["utils", "test_utils_install"], "curl_usage_install"] + , "deps": ["curl_usage_install", ["utils", "test_utils_install"]] } , "curl_url": { "type": ["@", "rules", "CC/test", "test"] @@ -24,14 +24,14 @@ , "srcs": ["curl_url.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/other_tools/utils", "curl_url_handle"] + , ["", "catch-main"] ] , "stage": ["test", "other_tools", "utils"] } , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["utils"] - , "deps": ["curl_usage", "curl_url"] + , "deps": ["curl_url", "curl_usage"] } } diff --git a/test/other_tools/utils/curl_usage.test.cpp b/test/other_tools/utils/curl_usage.test.cpp index d3b3fd492..980793bf5 100644 --- a/test/other_tools/utils/curl_usage.test.cpp +++ b/test/other_tools/utils/curl_usage.test.cpp @@ -38,27 +38,27 @@ TEST_CASE("Curl context", "[curl_context]") { } TEST_CASE("Curl easy handle", "[curl_easy_handle]") { - auto kServerUrl = std::string("http://127.0.0.1:") + getPort() + - std::string("/test_file.txt"); - auto kTargetDir = + auto const serve_url = std::string("http://127.0.0.1:") + getPort() + + std::string("/test_file.txt"); + auto const target_dir = std::filesystem::path(std::getenv("TEST_TMPDIR")) / "target_dir"; // make target dir - CHECK(FileSystemManager::CreateDirectory(kTargetDir)); + CHECK(FileSystemManager::CreateDirectory(target_dir)); // create handle auto curl_handle = CurlEasyHandle::Create(); REQUIRE(curl_handle); SECTION("Curl download to file") { // download test file from local HTTP server into new location - auto file_path = kTargetDir / "test_file.txt"; - REQUIRE(curl_handle->DownloadToFile(kServerUrl, file_path) == 0); + auto file_path = target_dir / "test_file.txt"; + REQUIRE(curl_handle->DownloadToFile(serve_url, file_path) == 0); REQUIRE(FileSystemManager::IsFile(file_path)); } SECTION("Curl download to string") { // download test file from local HTTP server into string - auto content = curl_handle->DownloadToString(kServerUrl); + auto content = curl_handle->DownloadToString(serve_url); REQUIRE(content); REQUIRE(*content == "test\n"); } diff --git a/test/utils/TARGETS b/test/utils/TARGETS index 619e41344..7d8f71481 100644 --- a/test/utils/TARGETS +++ b/test/utils/TARGETS @@ -10,14 +10,17 @@ , "name": ["execution_bazel"] , "hdrs": ["remote_execution/bazel_action_creator.hpp"] , "deps": - [ ["@", "gsl", "", "gsl"] - , ["@", "src", "src/buildtool/execution_api/remote", "bazel_network"] - , ["@", "src", "src/buildtool/common", "bazel_types"] - , ["@", "src", "src/buildtool/common/remote", "retry_config"] + [ "test_auth_config" , "test_env" - , "test_auth_config" , "test_remote_config" + , ["@", "gsl", "", "gsl"] + , ["@", "src", "src/buildtool/auth", "auth"] + , ["@", "src", "src/buildtool/common", "bazel_digest_factory"] + , ["@", "src", "src/buildtool/common", "bazel_types"] + , ["@", "src", "src/buildtool/common/remote", "retry_config"] , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["@", "src", "src/buildtool/execution_api/remote", "bazel_network"] + , ["@", "src", "src/buildtool/execution_api/remote", "config"] ] , "stage": ["test", "utils"] } @@ -37,8 +40,8 @@ , "hdrs": ["test_env.hpp"] , "deps": [ "log_config" - , ["@", "src", "src/buildtool/compatibility", "compatibility"] , ["@", "src", "src/buildtool/auth", "auth"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] ] , "stage": ["test", "utils"] } @@ -47,12 +50,25 @@ , "name": ["test_storage_config"] , "hdrs": ["hermeticity/test_storage_config.hpp"] , "deps": - [ ["@", "gsl", "", "gsl"] + [ "test_hash_function_type" + , ["@", "gsl", "", "gsl"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] , ["@", "src", "src/buildtool/storage", "config"] , ["@", "src", "src/utils/cpp", "tmp_dir"] + ] + , "stage": ["test", "utils"] + } +, "test_hash_function_type": + { "type": ["@", "rules", "CC", "library"] + , "name": ["test_hash_function_type"] + , "hdrs": ["hermeticity/test_hash_function_type.hpp"] + , "deps": + [ "test_env" , ["@", "src", "src/buildtool/crypto", "hash_function"] + , ["@", "src", "src/buildtool/logging", "log_level"] + , ["@", "src", "src/buildtool/logging", "logging"] ] , "stage": ["test", "utils"] } @@ -72,16 +88,16 @@ , "name": ["catch-main-remote-execution"] , "srcs": ["remote_execution/main-remote-execution.cpp"] , "deps": - [ ["@", "catch2", "", "catch2"] + [ "log_config" + , "test_auth_config" + , "test_env" + , "test_remote_config" + , ["@", "catch2", "", "catch2"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] + , ["@", "src", "src/buildtool/file_system", "git_context"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] - , ["@", "src", "src/buildtool/file_system", "git_context"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] - , "log_config" - , "test_env" - , "test_auth_config" - , "test_remote_config" ] , "stage": ["test", "utils"] } @@ -90,10 +106,10 @@ , "name": ["test_serve_config"] , "hdrs": ["serve_service/test_serve_config.hpp"] , "deps": - [ ["@", "src", "src/buildtool/serve_api/remote", "config"] + [ "test_env" , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] - , "test_env" + , ["@", "src", "src/buildtool/serve_api/remote", "config"] ] , "stage": ["test", "utils"] } @@ -102,10 +118,10 @@ , "name": ["test_auth_config"] , "hdrs": ["remote_execution/test_auth_config.hpp"] , "deps": - [ ["@", "src", "src/buildtool/auth", "auth"] + [ "test_env" + , ["@", "src", "src/buildtool/auth", "auth"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] - , "test_env" ] , "stage": ["test", "utils"] } @@ -114,10 +130,10 @@ , "name": ["test_remote_config"] , "hdrs": ["remote_execution/test_remote_config.hpp"] , "deps": - [ ["@", "src", "src/buildtool/execution_api/remote", "config"] + [ "test_env" + , ["@", "src", "src/buildtool/execution_api/remote", "config"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] - , "test_env" ] , "stage": ["test", "utils"] } @@ -126,18 +142,18 @@ , "name": ["catch-main-serve"] , "srcs": ["serve_service/main-serve.cpp"] , "deps": - [ ["@", "catch2", "", "catch2"] + [ "log_config" + , "shell_quoting" + , "test_env" + , "test_serve_config" + , ["@", "catch2", "", "catch2"] + , ["@", "src", "src/buildtool/common", "protocol_traits"] , ["@", "src", "src/buildtool/execution_api/remote", "config"] - , ["@", "src", "src/buildtool/serve_api/remote", "config"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/buildtool/logging", "log_level"] , ["@", "src", "src/buildtool/logging", "logging"] + , ["@", "src", "src/buildtool/serve_api/remote", "config"] , ["@", "src", "src/buildtool/storage", "storage"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] - , ["@", "src", "src/buildtool/compatibility", "compatibility"] - , "shell_quoting" - , "log_config" - , "test_env" - , "test_serve_config" ] , "stage": ["test", "utils"] } @@ -157,19 +173,6 @@ , "hdrs": ["shell_quoting.hpp"] , "stage": ["test", "utils"] } -, "blob_creator": - { "type": ["@", "rules", "CC", "library"] - , "name": ["blob_creator"] - , "hdrs": ["blob_creator.hpp"] - , "stage": ["test", "utils"] - , "deps": - [ ["@", "src", "src/buildtool/common", "common"] - , ["@", "src", "src/buildtool/file_system", "object_type"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] - , ["@", "src", "src/buildtool/execution_api/bazel_msg", "bazel_msg"] - , ["@", "src", "src/buildtool/crypto", "hash_function"] - ] - } , "test_api_bundle": { "type": ["@", "rules", "CC", "library"] , "name": ["test_api_bundle"] @@ -185,6 +188,6 @@ , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["utils"] - , "deps": [["./", "cpp", "TESTS"], ["./", "archive", "TESTS"]] + , "deps": [["./", "archive", "TESTS"], ["./", "cpp", "TESTS"]] } } diff --git a/test/utils/archive/TARGETS b/test/utils/archive/TARGETS index a1aa02c91..41aec44c3 100644 --- a/test/utils/archive/TARGETS +++ b/test/utils/archive/TARGETS @@ -4,10 +4,10 @@ , "srcs": ["archive_usage.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] - , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "", "libarchive"] + , ["@", "src", "src/buildtool/file_system", "file_system_manager"] , ["@", "src", "src/utils/archive", "archive_ops"] + , ["", "catch-main"] ] , "stage": ["test", "utils", "archive"] } diff --git a/test/utils/archive/archive_usage.test.cpp b/test/utils/archive/archive_usage.test.cpp index fd33f7e0d..233136209 100644 --- a/test/utils/archive/archive_usage.test.cpp +++ b/test/utils/archive/archive_usage.test.cpp @@ -50,7 +50,7 @@ auto const kExpected = filetree_t{{"foo", {"foo", AE_IFREG}}, {"bar/", {"", AE_IFDIR}}, {"bar/baz", {"baz", AE_IFREG}}}; -struct archive_test_info_t { +struct ArchiveTestInfo { std::string test_name; ArchiveType type; std::string test_dir; @@ -59,7 +59,7 @@ struct archive_test_info_t { std::string cmd; }; -std::vector const kTestScenarios = { +std::vector const kTestScenarios = { {.test_name = "tar", .type = ArchiveType::Tar, .test_dir = "test_tar", diff --git a/test/utils/blob_creator.hpp b/test/utils/blob_creator.hpp deleted file mode 100644 index 944fb9d28..000000000 --- a/test/utils/blob_creator.hpp +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef INCLUDED_SRC_TEST_UTILS_BLOB_CREATOR_HPP -#define INCLUDED_SRC_TEST_UTILS_BLOB_CREATOR_HPP - -#include -#include -#include - -#include "src/buildtool/common/artifact_digest.hpp" -#include "src/buildtool/crypto/hash_function.hpp" -#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp" -#include "src/buildtool/file_system/file_system_manager.hpp" -#include "src/buildtool/file_system/object_type.hpp" - -/// \brief Create a blob from the content found in file or symlink pointed to by -/// given path. -[[nodiscard]] static inline auto CreateBlobFromPath( - std::filesystem::path const& fpath, - HashFunction hash_function) noexcept -> std::optional { - auto const type = FileSystemManager::Type(fpath, /*allow_upwards=*/true); - if (not type) { - return std::nullopt; - } - auto const content = FileSystemManager::ReadContentAtPath(fpath, *type); - if (not content.has_value()) { - return std::nullopt; - } - return BazelBlob{ - ArtifactDigest::Create(hash_function, *content), - *content, - IsExecutableObject(*type)}; -} - -#endif // INCLUDED_SRC_TEST_UTILS_BLOB_CREATOR_HPP diff --git a/test/utils/container_matchers.hpp b/test/utils/container_matchers.hpp index 35a334e38..7def6d571 100644 --- a/test/utils/container_matchers.hpp +++ b/test/utils/container_matchers.hpp @@ -43,7 +43,7 @@ class UniqueElementsUnorderedMatcher using value_type = typename LeftContainer::value_type; using T = value_type; static_assert( - std::is_constructible::value, + std::is_constructible_v, "Value type of container in the left hand side must be constructible " "from that of the right hand side."); diff --git a/test/utils/cpp/TARGETS b/test/utils/cpp/TARGETS index c7a7e4f82..9a809b475 100644 --- a/test/utils/cpp/TARGETS +++ b/test/utils/cpp/TARGETS @@ -4,8 +4,8 @@ , "srcs": ["path.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/utils/cpp", "path"] + , ["", "catch-main"] ] , "stage": ["test", "utils", "cpp"] } @@ -15,8 +15,8 @@ , "srcs": ["path_rebase.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/utils/cpp", "path_rebase"] + , ["", "catch-main"] ] , "stage": ["test", "utils", "cpp"] } @@ -26,10 +26,10 @@ , "srcs": ["file_locking.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/buildtool/file_system", "file_system_manager"] - , ["@", "src", "src/utils/cpp", "file_locking"] , ["@", "src", "src/utils/cpp", "atomic"] + , ["@", "src", "src/utils/cpp", "file_locking"] + , ["", "catch-main"] ] , "stage": ["test", "utils", "cpp"] , "private-ldflags": ["-pthread"] @@ -40,14 +40,14 @@ , "srcs": ["prefix.test.cpp"] , "private-deps": [ ["@", "catch2", "", "catch2"] - , ["", "catch-main"] , ["@", "src", "src/utils/cpp", "prefix"] + , ["", "catch-main"] ] , "stage": ["test", "utils", "cpp"] } , "TESTS": { "type": ["@", "rules", "test", "suite"] , "stage": ["cpp"] - , "deps": ["path", "path_rebase", "file_locking", "prefix"] + , "deps": ["file_locking", "path", "path_rebase", "prefix"] } } diff --git a/test/utils/executor/test_api_bundle.hpp b/test/utils/executor/test_api_bundle.hpp index 1b582ce68..088c43233 100644 --- a/test/utils/executor/test_api_bundle.hpp +++ b/test/utils/executor/test_api_bundle.hpp @@ -26,10 +26,10 @@ /// implementation. As only the hash_function field is actually needed, the /// remote_context and repo_config are not needed to be provided. [[nodiscard]] static auto CreateTestApiBundle( - HashFunction hash_function, + gsl::not_null const& hash_function, gsl::not_null const& api) noexcept -> ApiBundle { return ApiBundle{ - .hash_function = hash_function, .local = api, .remote = api}; + .hash_function = *hash_function, .local = api, .remote = api}; } #endif // INCLUDED_SRC_TEST_UTILS_EXECUTOR_TEST_API_BUNDLE_HPP diff --git a/test/utils/hermeticity/test_hash_function_type.hpp b/test/utils/hermeticity/test_hash_function_type.hpp new file mode 100644 index 000000000..ab46f7913 --- /dev/null +++ b/test/utils/hermeticity/test_hash_function_type.hpp @@ -0,0 +1,41 @@ +// Copyright 2024 Huawei Cloud Computing Technology Co., Ltd. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef INCLUDED_SRC_TEST_UTILS_HERMETICITY_TEST_HASH_FUNCTION_TYPE_HPP +#define INCLUDED_SRC_TEST_UTILS_HERMETICITY_TEST_HASH_FUNCTION_TYPE_HPP + +#include //std::exit +#include + +#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/logging/log_level.hpp" +#include "src/buildtool/logging/logger.hpp" +#include "test/utils/test_env.hpp" + +class TestHashType final { + public: + [[nodiscard]] static auto ReadFromEnvironment() noexcept + -> HashFunction::Type { + auto const compatible = ReadCompatibilityFromEnv(); + if (not compatible) { + Logger::Log(LogLevel::Error, + "Failed to read COMPATIBLE from environment"); + std::exit(EXIT_FAILURE); + } + return *compatible ? HashFunction::Type::PlainSHA256 + : HashFunction::Type::GitSHA1; + } +}; + +#endif // INCLUDED_SRC_TEST_UTILS_HERMETICITY_TEST_HASH_FUNCTION_TYPE_HPP diff --git a/test/utils/hermeticity/test_storage_config.hpp b/test/utils/hermeticity/test_storage_config.hpp index 4bcbc1bef..4335592cf 100644 --- a/test/utils/hermeticity/test_storage_config.hpp +++ b/test/utils/hermeticity/test_storage_config.hpp @@ -21,11 +21,12 @@ #include //std::move #include "gsl/gsl" -#include "src/buildtool/crypto/hash_function.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" #include "src/buildtool/storage/config.hpp" #include "src/utils/cpp/tmp_dir.hpp" +#include "test/utils/hermeticity/test_hash_function_type.hpp" class TestStorageConfig final { public: @@ -40,9 +41,13 @@ class TestStorageConfig final { * there. Hence we set the storage root to a fixed location under * TEST_TMPDIR which is set by the test launcher. */ + char const* const env_tmpdir = std::getenv("TEST_TMPDIR"); + if (env_tmpdir == nullptr) { + Logger::Log(LogLevel::Debug, "missing TEST_TMPDIR env variable"); + std::exit(EXIT_FAILURE); + } auto const test_tempdir = - std::filesystem::path{std::string{std::getenv("TEST_TMPDIR")}} / - ".test_build_root"; + std::filesystem::path{std::string{env_tmpdir}} / ".test_build_root"; auto temp_dir = TmpDir::Create(test_tempdir); if (temp_dir == nullptr) { @@ -53,9 +58,7 @@ class TestStorageConfig final { StorageConfig::Builder builder; auto config = builder.SetBuildRoot(temp_dir->GetPath()) - .SetHashType(Compatibility::IsCompatible() - ? HashFunction::Type::PlainSHA256 - : HashFunction::Type::GitSHA1) + .SetHashType(TestHashType::ReadFromEnvironment()) .Build(); if (not config) { Logger::Log(LogLevel::Error, config.error()); diff --git a/test/utils/large_objects/large_object_utils.cpp b/test/utils/large_objects/large_object_utils.cpp index a30a9276b..0428b506d 100644 --- a/test/utils/large_objects/large_object_utils.cpp +++ b/test/utils/large_objects/large_object_utils.cpp @@ -30,7 +30,7 @@ class Randomizer final { Randomizer(std::uint64_t min, std::uint64_t max) noexcept : range_(std::random_device{}()), distribution_(min, max) {} - [[nodiscard]] inline auto Get() noexcept -> std::uint64_t { + [[nodiscard]] auto Get() noexcept -> std::uint64_t { return distribution_(range_); } @@ -75,7 +75,8 @@ class ChunkPool final { } // namespace auto LargeObjectUtils::GenerateFile(std::filesystem::path const& path, - std::uintmax_t size) noexcept -> bool { + std::uintmax_t size, + bool is_executable) noexcept -> bool { // Remove the file, if exists: if (not FileSystemManager::RemoveFile(path)) { return false; @@ -112,6 +113,17 @@ auto LargeObjectUtils::GenerateFile(std::filesystem::path const& path, } catch (...) { return false; } + + if (is_executable) { + using perms = std::filesystem::perms; + perms p = perms::owner_exec | perms::group_exec | perms::others_exec; + try { + std::filesystem::permissions( + path, p, std::filesystem::perm_options::add); + } catch (...) { + return false; + } + } return true; } diff --git a/test/utils/large_objects/large_object_utils.hpp b/test/utils/large_objects/large_object_utils.hpp index 966195182..d4ad3947b 100644 --- a/test/utils/large_objects/large_object_utils.hpp +++ b/test/utils/large_objects/large_object_utils.hpp @@ -30,9 +30,11 @@ class LargeObjectUtils { /// randomizations, a pool of pre-generated chunks is used. /// \param path Output path. /// \param size Size of the resulting file in bytes. + /// \param is_executable Set executable permissions /// \return True if the file is generated properly. [[nodiscard]] static auto GenerateFile(std::filesystem::path const& path, - std::uintmax_t size) noexcept + std::uintmax_t size, + bool is_executable = false) noexcept -> bool; /// \brief Generate a directory in the specified location and fill it with a diff --git a/test/utils/logging/log_config.hpp b/test/utils/logging/log_config.hpp index 816e73b9e..3b920917c 100644 --- a/test/utils/logging/log_config.hpp +++ b/test/utils/logging/log_config.hpp @@ -24,8 +24,8 @@ #include "src/buildtool/logging/log_sink_cmdline.hpp" static auto ReadLogLevelFromEnv() -> LogLevel { - LogLevel const kDefaultTestLogLevel{LogLevel::Error}; - LogLevel const kMaximumTestLogLevel{LogLevel::Trace}; + static constexpr LogLevel kDefaultTestLogLevel = LogLevel::Error; + static constexpr LogLevel kMaximumTestLogLevel = LogLevel::Trace; auto log_level{kDefaultTestLogLevel}; diff --git a/test/utils/remote_execution/bazel_action_creator.hpp b/test/utils/remote_execution/bazel_action_creator.hpp index 7b92428d8..b71851211 100644 --- a/test/utils/remote_execution/bazel_action_creator.hpp +++ b/test/utils/remote_execution/bazel_action_creator.hpp @@ -24,6 +24,7 @@ #include "gsl/gsl" #include "src/buildtool/auth/authentication.hpp" +#include "src/buildtool/common/bazel_digest_factory.hpp" #include "src/buildtool/common/bazel_types.hpp" #include "src/buildtool/common/remote/retry_config.hpp" #include "src/buildtool/crypto/hash_function.hpp" @@ -64,26 +65,24 @@ }); auto cmd_data = cmd.SerializeAsString(); - auto cmd_id = - ArtifactDigest::Create(hash_function, cmd_data); + auto cmd_id = BazelDigestFactory::HashDataAs( + hash_function, cmd_data); blobs.emplace_back(cmd_id, cmd_data, /*is_exec=*/false); bazel_re::Directory empty_dir; auto dir_data = empty_dir.SerializeAsString(); - auto dir_id = - ArtifactDigest::Create(hash_function, dir_data); + auto dir_id = BazelDigestFactory::HashDataAs( + hash_function, dir_data); blobs.emplace_back(dir_id, dir_data, /*is_exec=*/false); bazel_re::Action action; - action.set_allocated_command_digest( - gsl::owner{new bazel_re::Digest{cmd_id}}); + (*action.mutable_command_digest()) = cmd_id; action.set_do_not_cache(false); - action.set_allocated_input_root_digest( - gsl::owner{new bazel_re::Digest{dir_id}}); + (*action.mutable_input_root_digest()) = dir_id; auto action_data = action.SerializeAsString(); - auto action_id = - ArtifactDigest::Create(hash_function, action_data); + auto action_id = BazelDigestFactory::HashDataAs( + hash_function, action_data); blobs.emplace_back(action_id, action_data, /*is_exec=*/false); auto auth_config = TestAuthConfig::ReadFromEnvironment(); diff --git a/test/utils/remote_execution/main-remote-execution.cpp b/test/utils/remote_execution/main-remote-execution.cpp index 45021c61c..f7bbc4fc9 100644 --- a/test/utils/remote_execution/main-remote-execution.cpp +++ b/test/utils/remote_execution/main-remote-execution.cpp @@ -29,7 +29,6 @@ #include "test/utils/logging/log_config.hpp" #include "test/utils/remote_execution/test_auth_config.hpp" #include "test/utils/remote_execution/test_remote_config.hpp" -#include "test/utils/test_env.hpp" namespace { @@ -42,8 +41,6 @@ void wait_for_grpc_to_shutdown() { /// environment variable is malformed, we write a message and stop execution. /// \returns true If remote execution was successfully configured. void ConfigureRemoteExecution() { - ReadCompatibilityFromEnv(); - // Ensure authentication config is available if (not TestAuthConfig::ReadFromEnvironment()) { std::exit(EXIT_FAILURE); diff --git a/test/utils/serve_service/main-serve.cpp b/test/utils/serve_service/main-serve.cpp index 5a08eb89a..1f40de089 100644 --- a/test/utils/serve_service/main-serve.cpp +++ b/test/utils/serve_service/main-serve.cpp @@ -23,7 +23,7 @@ #include "catch2/catch_session.hpp" #include "catch2/catch_test_macros.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "src/buildtool/execution_api/remote/config.hpp" #include "src/buildtool/file_system/file_system_manager.hpp" #include "src/buildtool/logging/log_level.hpp" @@ -33,7 +33,6 @@ #include "test/utils/logging/log_config.hpp" #include "test/utils/serve_service/test_serve_config.hpp" #include "test/utils/shell_quoting.hpp" -#include "test/utils/test_env.hpp" namespace { @@ -110,8 +109,6 @@ void wait_for_grpc_to_shutdown() { auto main(int argc, char* argv[]) -> int { ConfigureLogging(); - ReadCompatibilityFromEnv(); - // Setup of serve service, including known repositories. if (not ConfigureServeService()) { return EXIT_FAILURE; diff --git a/test/utils/test_env.hpp b/test/utils/test_env.hpp index 30eba26ce..ec03a9eea 100644 --- a/test/utils/test_env.hpp +++ b/test/utils/test_env.hpp @@ -23,7 +23,7 @@ #include #include "src/buildtool/auth/authentication.hpp" -#include "src/buildtool/compatibility/compatibility.hpp" +#include "src/buildtool/common/protocol_traits.hpp" #include "test/utils/logging/log_config.hpp" [[nodiscard]] static inline auto ReadPlatformPropertiesFromEnv() @@ -40,10 +40,12 @@ return properties; } -static inline void ReadCompatibilityFromEnv() { - auto* compatible = std::getenv("COMPATIBLE"); - if (compatible != nullptr) { - Compatibility::SetCompatible(); +[[nodiscard]] static inline auto ReadCompatibilityFromEnv() noexcept + -> std::optional { + try { + return std::getenv("COMPATIBLE") != nullptr; + } catch (...) { + return std::nullopt; } }