diff --git a/WORKSPACE b/WORKSPACE index b8b61b3ed..cb24cfb8a 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -23,3 +23,25 @@ rules_proto_toolchains() load("@build_bazel_apple_support//crosstool:setup.bzl", "apple_cc_configure") apple_cc_configure() + +# Define LLVM toolchain used for extracting C++ API documentation information +load("@toolchains_llvm//toolchain:rules.bzl", "llvm_toolchain") + +llvm_toolchain( + name = "llvm_toolchain", + # https://github.com/bazel-contrib/toolchains_llvm/blob/master/toolchain/internal/llvm_distributions.bzl + llvm_versions = { + # Note: Older versions are built against older glibc, which is needed + # for compatibility with manylinux containers. + "": "15.0.6", + "darwin-aarch64": "15.0.7", + "darwin-x86_64": "15.0.7", + }, + extra_target_compatible_with = { + "": ["@//docs:docs_toolchain_value"], + }, +) + +load("@llvm_toolchain//:toolchains.bzl", "llvm_register_toolchains") + +llvm_register_toolchains() diff --git a/docs/BUILD b/docs/BUILD index d67d51fb4..fbcf37ead 100644 --- a/docs/BUILD +++ b/docs/BUILD @@ -1,11 +1,35 @@ +load("@bazel_skylib//rules:common_settings.bzl", "bool_flag") load("//bazel:pytest.bzl", "tensorstore_pytest_test") load("//bazel:pytype.bzl", "pytype_strict_binary", "pytype_strict_test") +load("//bazel:tensorstore.bzl", "tensorstore_cc_library") +load("//docs:defs.bzl", "cc_preprocessed_output") load("//docs:doctest.bzl", "doctest_test") package(default_visibility = ["//visibility:public"]) licenses(["notice"]) +# To exclude the Python API documentation from the generation documentation, +# specify: +# +# bazel build --//docs:exclude_python_api +# +# This significantly speeds up the documentation build since the Python +# extension module (which has a large dependency tree) does not have to be +# built. +bool_flag( + name = "exclude_python_api", + build_setting_default = False, +) + +config_setting( + name = "exclude_python_api_setting", + flag_values = { + ":exclude_python_api": "True", + }, + visibility = ["//visibility:private"], +) + filegroup( name = "doc_sources", srcs = [ @@ -50,12 +74,17 @@ pytype_strict_test( # Keep going after the first warning. "--keep-going", ], - data = [":doc_sources"] + glob( + data = [ + "cpp_api.json", + ":doc_sources", + "@pypa_clang_format//:clang-format_binary", + ] + glob( ["cached_external_resources/**"], allow_empty = True, ), env = { "TENSORSTORE_SPECIAL_CPU_USER_LIMITS": "forge-00=4", + "SPHINX_CLANG_FORMAT": "$(location @pypa_clang_format//:clang-format_binary)", }, python_version = "PY3", tags = [ @@ -66,11 +95,14 @@ pytype_strict_test( ], deps = [ "//docs/tensorstore_sphinx_ext:doctest", - "//python/tensorstore", + "@pypa_libclang//:libclang", # buildcleaner: keep "@pypa_pyyaml//:pyyaml", # buildcleaner: keep "@pypa_sphinx//:sphinx", "@pypa_sphinx_immaterial//:sphinx_immaterial", - ], + ] + select({ + ":exclude_python_api_setting": [], + "//conditions:default": ["//python/tensorstore:core"], + }), ) pytype_strict_binary( @@ -113,3 +145,131 @@ doctest_test( name = "doctest_test", srcs = glob(["python/**/*.rst"]), ) + +tensorstore_cc_library( + name = "cpp_api_include", + testonly = True, + srcs = ["cpp_api_include.cc"], + copts = [ + # Generated preprocessed output rather than object file. + "-E", + # Retain comments. + "-C", + # GCC/clang flag to output macro definitions. + "-dD", + ], + features = [ + "-use_header_modules", + "-layering_check", + ], + linkstatic = True, + local_defines = [ + "TENSORSTORE_CPP_DOC_GENERATION", + ], + tags = ["manual"], + deps = [ + "//tensorstore", + "//tensorstore:array", + "//tensorstore:cast", + "//tensorstore:data_type", + "//tensorstore:downsample", + "//tensorstore:open", + "//tensorstore:rank", + "//tensorstore/index_space:alignment", + "//tensorstore/index_space:dim_expression", + "//tensorstore/index_space:index_transform", + "//tensorstore/index_space:transformed_array", + "//tensorstore/util:byte_strided_pointer", + "//tensorstore/util:element_pointer", + "//tensorstore/util:element_traits", + "//tensorstore/util:future", + "//tensorstore/util:result", + "//tensorstore/util:status_testutil", + ], +) + +# Define a special constraint_setting that will be matched in order to select a +# hermetic clang toolchain for preprocessing the C++ API headers used for the +# API documentation. This is used when the default toolchain is not clang. +# +# The C++ API documentation is extracted using libclang. libstdc++ (typically +# used on Linux) is compatible with clang but only when preprocessed with clang. +# If it is first preprocessed by GCC, then the resultant output contains +# GCC-specific builtins, etc. and is not compatible with libclang. +constraint_setting( + name = "docs_toolchain_setting", + visibility = ["//visibility:public"], +) + +constraint_value( + name = "docs_toolchain_value", + constraint_setting = ":docs_toolchain_setting", + visibility = ["//visibility:public"], +) + +platform( + name = "docs_toolchain_platform", + constraint_values = [":docs_toolchain_value"], + parents = ["@platforms//host"], + visibility = ["//visibility:public"], +) + +cc_preprocessed_output( + name = "cpp_api_preprocessed.cc", + testonly = True, + cpp_compiler_constraint = select({ + "//:compiler_clang": "@platforms//host", + "//conditions:default": ":docs_toolchain_platform", + }), + flags_output = "compiler_flags.json", + tags = ["manual"], + target = ":cpp_api_include", +) + +pytype_strict_binary( + name = "generate_cpp_api", + srcs = ["generate_cpp_api.py"], + deps = [ + "@pypa_libclang//:libclang", # buildcleaner: keep + "@pypa_sphinx_immaterial//:sphinx_immaterial", + ], +) + +pytype_strict_binary( + name = "cpp_api_shell", + testonly = True, + srcs = ["generate_cpp_api.py"], + args = [ + "--source=$(location :cpp_api_preprocessed.cc)", + "--flags-file=$(location :compiler_flags.json)", + "--interactive", + ], + data = [ + ":compiler_flags.json", + ":cpp_api_preprocessed.cc", + ], + main = "generate_cpp_api.py", + tags = ["manual"], + deps = [ + "@pypa_libclang//:libclang", # buildcleaner: keep + "@pypa_sphinx_immaterial//:sphinx_immaterial", + ], +) + +genrule( + name = "genrule_cpp_api.json", + testonly = True, + srcs = [ + ":cpp_api_preprocessed.cc", + ":compiler_flags.json", + ], + outs = ["cpp_api.json"], + cmd = ("$(location :generate_cpp_api) " + + "--source=$(location :cpp_api_preprocessed.cc) " + + "--flags-file=$(location :compiler_flags.json) " + + "--output=$@"), + tags = ["manual"], + tools = [ + ":generate_cpp_api", + ], +) diff --git a/docs/build_docs.py b/docs/build_docs.py index 90363d8d5..0b763b593 100644 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -21,6 +21,7 @@ import pathlib import platform import re +import shutil import sys import tempfile from typing import List @@ -182,7 +183,7 @@ def _write_third_party_libraries_summary(runfiles_dir: str, output_path: str): @contextlib.contextmanager -def _prepare_source_tree(runfiles_dir: str): +def _prepare_source_tree(runfiles_dir: str, excluded: List[str]): with tempfile.TemporaryDirectory() as temp_src_dir: _write_third_party_libraries_summary( runfiles_dir=runfiles_dir, @@ -238,6 +239,18 @@ def create_symlinks(source_dir, target_dir): zipfile.ZipFile(cache_zip).extractall(zip_path) os.environ[cache_env_key] = zip_path + for excluded_glob in excluded: + if excluded_glob.startswith('/'): + excluded_glob = excluded_glob[1:] + matching_paths = glob.glob( + os.path.join(temp_src_dir, excluded_glob), recursive=True + ) + matching_paths.reverse() + for matching_path in matching_paths: + if os.path.islink(matching_path): + os.remove(matching_path) + else: + shutil.rmtree(matching_path) yield temp_src_dir @@ -294,7 +307,9 @@ def run(args: argparse.Namespace, unknown: List[str]): os.getenv('BUILD_WORKING_DIRECTORY', os.getcwd()), args.output ) os.makedirs(output_dir, exist_ok=True) - with _prepare_source_tree(runfiles_dir) as temp_src_dir: + with _prepare_source_tree( + runfiles_dir, excluded=args.exclude + ) as temp_src_dir: # Use a separate temporary directory for the doctrees, since we don't want # them mixed into the output directory. with tempfile.TemporaryDirectory() as doctree_dir: diff --git a/docs/conf.py b/docs/conf.py index c57575b81..e3cf95c05 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,12 +13,16 @@ # limitations under the License. """Sphinx configuration for TensorStore.""" +import importlib +import os +import sys from typing import NamedTuple, Optional import docutils.nodes import sphinx.addnodes import sphinx.domains.python import sphinx.environment +import sphinx.util.parallel project = 'TensorStore' copyright = '2020 The TensorStore Authors' # pylint: disable=redefined-builtin @@ -54,6 +58,7 @@ 'sphinx_immaterial.apidoc.cpp.cppreference', 'sphinx_immaterial.apidoc.json.domain', 'sphinx_immaterial.apidoc.python.apigen', + 'sphinx_immaterial.apidoc.cpp.apigen', ] exclude_patterns = [ @@ -149,6 +154,10 @@ :language: python :class: highlight +.. role:: cpp(code) + :language: cpp + :class: highlight + .. role:: json(code) :language: json :class: highlight @@ -157,6 +166,24 @@ # Warn about missing references nitpicky = True +# The Sphinx C++ domain generates bogus undefined reference warnings for every +# C++ namespace that is mentioned in the documentation. All such namespaces need +# to be listed here in order to silence the warnings. +nitpick_ignore = [ + ('cpp:identifier', 'tensorstore'), + ('cpp:identifier', '::tensorstore'), + ('cpp:identifier', 'tensorstore::kvstore'), + ('cpp:identifier', 'tensorstore::dtypes'), + ('cpp:identifier', 'kvstore'), + ('cpp:identifier', 'absl'), + ('cpp:identifier', 'std'), + ('cpp:identifier', '::std'), + ('cpp:identifier', 'nlohmann'), + ('cpp:identifier', '::nlohmann'), + ('cpp:identifier', 'half_float'), + ('cpp:identifier', '::half_float'), +] + default_role = 'any' # Extension options @@ -186,6 +213,8 @@ napoleon_use_admonition_for_examples = True napoleon_use_admonition_for_notes = True +object_description_options = [] + json_schemas = [ '*schema.yml', '**/*schema.yml', @@ -268,30 +297,258 @@ class TypeXrefTarget(NamedTuple): ), } -_orig_python_type_to_xref = sphinx.domains.python.type_to_xref - - -def _python_type_to_xref( - target: str, - env: Optional[sphinx.environment.BuildEnvironment] = None, - suppress_prefix: bool = False, -) -> sphinx.addnodes.pending_xref: - xref_info = python_type_to_xref_mappings.get(target) - if xref_info is not None: - return sphinx.addnodes.pending_xref( - '', - docutils.nodes.Text(xref_info.title), - refdomain=xref_info.domain, - reftype=xref_info.reftype, - reftarget=xref_info.target, - refspecific=False, - refexplicit=True, - refwarn=True, - ) - return _orig_python_type_to_xref(target, env, suppress_prefix) - - -sphinx.domains.python.type_to_xref = _python_type_to_xref + +def _monkey_patch_type_to_xref(): + _orig_python_type_to_xref = sphinx.domains.python.type_to_xref + + def _python_type_to_xref( + target: str, + env: Optional[sphinx.environment.BuildEnvironment] = None, + *args, + **kwargs, + ) -> sphinx.addnodes.pending_xref: + xref_info = python_type_to_xref_mappings.get(target) + if xref_info is not None: + return sphinx.addnodes.pending_xref( + '', + docutils.nodes.Text(xref_info.title), + refdomain=xref_info.domain, + reftype=xref_info.reftype, + reftarget=xref_info.target, + refspecific=False, + refexplicit=True, + refwarn=True, + ) + return _orig_python_type_to_xref(target, env, *args, **kwargs) + + for modname in [ + 'sphinx.domains.python', + # In newer sphinx versions, `type_to_xref` is actually defined in + # `sphinx.domains.python._annotations`, and must be overridden there as + # well. + 'sphinx.domains.python._annotations', + ]: + module = sys.modules.get(modname) + if module is None: + continue + if getattr(module, 'type_to_xref', None) is _orig_python_type_to_xref: + setattr(module, 'type_to_xref', _python_type_to_xref) + + +_monkey_patch_type_to_xref() + +external_cpp_references = { + 'nlohmann::json': { + 'url': 'https://json.nlohmann.me/api/json/', + 'object_type': 'type alias', + 'desc': 'C++ type alias', + }, + 'nlohmann::basic_json': { + 'url': 'https://json.nlohmann.me/api/basic_json/', + 'object_type': 'class', + 'desc': 'C++ class', + }, + 'half_float::half': { + 'url': 'http://half.sourceforge.net/classhalf__float_1_1half.html', + 'object_type': 'class', + 'desc': 'C++ class', + }, + 'absl::Status': { + 'url': 'https://abseil.io/docs/cpp/guides/status', + 'object_type': 'class', + 'desc': 'C++ class', + }, + 'absl::StatusOr': { + 'url': 'https://abseil.io/docs/cpp/guides/statuss#returning-a-status-or-a-value', + 'object_type': 'class', + 'desc': 'C++ class', + }, + 'absl::OkStatus': { + 'url': 'https://abseil.io/docs/cpp/guides/status', + 'object_type': 'function', + 'desc': 'C++ function', + }, + 'absl::StatusCode': { + 'url': 'https://abseil.io/docs/cpp/guides/status-codes', + 'object_type': 'enum', + 'desc': 'C++ enumeration', + }, + 'absl::Time': { + 'url': 'https://abseil.io/docs/cpp/guides/time#absolute-times-with-absltime', + 'object_type': 'class', + 'desc': 'C++ class', + }, + 'absl::InfiniteFuture': { + 'url': 'https://abseil.io/docs/cpp/guides/time#absolute-times-with-absltime', + 'object_type': 'function', + 'desc': 'C++ function', + }, + 'absl::InfinitePast': { + 'url': 'https://abseil.io/docs/cpp/guides/time#absolute-times-with-absltime', + 'object_type': 'function', + 'desc': 'C++ function', + }, + 'absl::Now': { + 'url': 'https://abseil.io/docs/cpp/guides/time#absolute-times-with-absltime', + 'object_type': 'function', + 'desc': 'C++ function', + }, + 'absl::Duration': { + 'url': 'https://abseil.io/docs/cpp/guides/time#time-durations', + 'object_type': 'class', + 'desc': 'C++ class', + }, + 'absl::Cord': { + 'url': 'https://github.com/abseil/abseil-cpp/blob/master/absl/strings/cord.h', + 'object_type': 'class', + 'desc': 'C++ class', + }, + 'absl::AnyInvocable': { + 'url': 'https://github.com/abseil/abseil-cpp/blob/master/absl/functional/any_invocable.h', + 'object_type': 'class', + 'desc': 'C++ class', + }, +} + +for code in [ + 'kOk', + 'kCancelled', + 'kUnknown', + 'kInvalidArgument', + 'kNotFound', + 'kAlreadyExists', + 'kPermissionDenied', + 'kResourceExhausted', + 'kFailedPrecondition', + 'kAborted', + 'kOutOfRange', + 'kUnimplemented', + 'kInternal', + 'kUnavailable', + 'kDataLoss', + 'kUnauthenticated', +]: + external_cpp_references[f'absl::StatusCode::{code}'] = { + 'url': 'https://abseil.io/docs/cpp/guides/status-codes', + 'object_type': 'enumerator', + 'desc': 'C++ enumerator', + } + +html_wrap_signatures_with_css = ['py'] + +object_description_options.append(( + '(cpp|c):.*', + dict( + clang_format_style={ + 'BasedOnStyle': 'Google', + 'AlignAfterOpenBracket': 'Align', + 'AlignOperands': 'AlignAfterOperator', + 'AllowAllArgumentsOnNextLine': 'true', + 'AllowAllParametersOfDeclarationOnNextLine': 'false', + 'AlwaysBreakTemplateDeclarations': 'Yes', + 'BinPackArguments': 'true', + 'BinPackParameters': 'false', + 'BreakInheritanceList': 'BeforeColon', + 'ColumnLimit': '70', + 'ContinuationIndentWidth': '4', + 'Cpp11BracedListStyle': 'true', + 'DerivePointerAlignment': 'false', + 'IndentRequiresClause': 'true', + 'IndentWidth': '2', + 'IndentWrappedFunctionNames': 'false', + 'InsertBraces': 'false', + 'InsertTrailingCommas': 'None', + 'PointerAlignment': 'Left', + 'QualifierAlignment': 'Leave', + 'ReferenceAlignment': 'Pointer', + 'RemoveBracesLLVM': 'false', + 'RequiresClausePosition': 'OwnLine', + 'Standard': 'c++20', + 'PenaltyReturnTypeOnItsOwnLine': '1', + 'PenaltyBreakBeforeFirstCallParameter': '2', + 'PenaltyBreakAssignment': '3', + 'SpaceBeforeParens': 'Custom', + 'SpaceBeforeParensOptions': { + 'AfterRequiresInClause': 'true', + }, + } + ), +)) + +clang_format_command = os.environ.get('SPHINX_CLANG_FORMAT', 'clang-format') + +cpp_strip_namespaces_from_signatures = ['tensorstore'] + +cpp_apigen_configs = [ + { + 'document_prefix': 'cpp/api/', + # Generated by generate_cpp_api.py + 'api_data': 'cpp_api.json', + }, +] + +cpp_apigen_rst_prolog = """ +.. default-role:: cpp:expr + +.. default-literal-role:: cpp + +.. highlight:: cpp + +""" + + +# Workaround for Sphinx parallel build inefficiency with large number of +# documents. +# +# This ensures that there is one worker per batch, and all workers are forked +# immediately at the start of reading/writing documents, such that the forked +# BuildEnvironment in each worker does not contain any partial results from +# previously-finished batches. +# +# https://github.com/sphinx-doc/sphinx/issues/10967 +def _monkey_patch_parallel_maxbatch(): + orig_make_chunks = sphinx.util.parallel.make_chunks + + orig_add_task = sphinx.util.parallel.ParallelTasks.add_task + orig_join_one = sphinx.util.parallel.ParallelTasks._join_one + orig_join = sphinx.util.parallel.ParallelTasks.join + + def add_task(self, *args, **kwargs): + try: + self._in_add_task = True + return orig_add_task(self, *args, **kwargs) + finally: + self._in_add_task = False + + sphinx.util.parallel.ParallelTasks.add_task = add_task + + def _join_one(self) -> bool: + if getattr(self, '_in_add_task', False): + return False + return orig_join_one(self) + + def join(self): + orig_join_one(self) + return orig_join(self) + + sphinx.util.parallel.ParallelTasks.join = join + + sphinx.util.parallel.ParallelTasks._join_one = _join_one + + def make_chunks(arguments, nproc: int, maxbatch: int = 10000000): + chunks = orig_make_chunks(arguments, nproc - 1, maxbatch) + return chunks + + for modname in [ + 'sphinx.util.parallel', + 'sphinx.builders', + ]: + module = importlib.import_module(modname) + if getattr(module, 'make_chunks', None) is orig_make_chunks: + setattr(module, 'make_chunks', make_chunks) + + +_monkey_patch_parallel_maxbatch() def setup(app): diff --git a/docs/cpp/api/index.rst b/docs/cpp/api/index.rst new file mode 100644 index 000000000..2d7553d88 --- /dev/null +++ b/docs/cpp/api/index.rst @@ -0,0 +1,114 @@ +.. _cpp-api: + +API reference +============= + +.. _cpp-api-core: + +Core +---- + +.. cpp-apigen-group:: core + +.. _cpp-api-indexing: + +Indexing +-------- + +.. cpp-apigen-group:: indexing + +.. _cpp-api-index-domain-alignment: + +Alignment +^^^^^^^^^ + +.. cpp-apigen-group:: index-domain-alignment + +.. _cpp-api-data-types: + +Data types +---------- + +.. cpp-apigen-group:: data-types + +.. _cpp-api-array: + +Array +----- + +.. cpp-apigen-group:: array + +.. _cpp-api-array-transformation: + +Transformation +^^^^^^^^^^^^^^ + +.. cpp-apigen-group:: array-transformation + +Iteration +^^^^^^^^^ + +.. cpp-apigen-group:: array-iteration + +.. _cpp-api-kvstore: + +Key-value storage +----------------- + +.. cpp-apigen-group:: kvstore + +.. _cpp-api-downsample: + +Downsampling +------------ + +.. cpp-apigen-group:: downsample + +.. _cpp-api-utilities: + +Utilities +--------- + +.. cpp-apigen-group:: utilities + +.. _cpp-api-error-handling: + +Error handling +^^^^^^^^^^^^^^ + +.. cpp-apigen-group:: error-handling + +.. _cpp-api-async: + +Asynchronous support +^^^^^^^^^^^^^^^^^^^^ + +.. cpp-apigen-group:: async + +.. _cpp-api-json: + +JSON serialization +^^^^^^^^^^^^^^^^^^ + +.. cpp-apigen-group:: json + +.. _cpp-api-compile-time-constraints: + +Compile-time data type/rank/mode constraints +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. cpp-apigen-group:: compile-time-constraints + +.. _cpp-api-string-utilities: + +String Utilities +^^^^^^^^^^^^^^^^ + +.. cpp-apigen-group:: string-utilities + +.. _cpp-api-index-vectors: + +Index vectors +^^^^^^^^^^^^^ + +.. cpp-apigen-group:: index-vectors diff --git a/docs/cpp_api_include.cc b/docs/cpp_api_include.cc new file mode 100644 index 000000000..c2615acab --- /dev/null +++ b/docs/cpp_api_include.cc @@ -0,0 +1,40 @@ +// Copyright 2022 The TensorStore Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "tensorstore/array.h" +#include "tensorstore/box.h" +#include "tensorstore/cast.h" +#include "tensorstore/context.h" +#include "tensorstore/data_type.h" +#include "tensorstore/downsample.h" +#include "tensorstore/index_interval.h" +#include "tensorstore/index_space/alignment.h" +#include "tensorstore/index_space/dim_expression.h" +#include "tensorstore/index_space/index_domain.h" +#include "tensorstore/index_space/index_transform.h" +#include "tensorstore/index_space/transformed_array.h" +#include "tensorstore/kvstore/kvstore.h" +#include "tensorstore/kvstore/operations.h" +#include "tensorstore/kvstore/spec.h" +#include "tensorstore/open.h" +#include "tensorstore/rank.h" +#include "tensorstore/strided_layout.h" +#include "tensorstore/tensorstore.h" +#include "tensorstore/util/byte_strided_pointer.h" +#include "tensorstore/util/element_pointer.h" +#include "tensorstore/util/element_traits.h" +#include "tensorstore/util/future.h" +#include "tensorstore/util/result.h" +#include "tensorstore/util/span.h" +#include "tensorstore/util/status_testutil.h" diff --git a/docs/defs.bzl b/docs/defs.bzl new file mode 100644 index 000000000..cf00095cd --- /dev/null +++ b/docs/defs.bzl @@ -0,0 +1,100 @@ +load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "CPP_COMPILE_ACTION_NAME") +load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain") + +# Aspect for extracting the C++ compiler flags that apply to a target. +CompilationAspect = provider() + +def _compilation_flags_aspect_impl(target, ctx): + cc_toolchain = find_cpp_toolchain(ctx) + feature_configuration = cc_common.configure_features( + ctx = ctx, + cc_toolchain = cc_toolchain, + requested_features = ctx.features, + unsupported_features = ctx.disabled_features, + ) + compile_variables = cc_common.create_compile_variables( + feature_configuration = feature_configuration, + cc_toolchain = cc_toolchain, + user_compile_flags = ctx.fragments.cpp.cxxopts + + ctx.fragments.cpp.copts, + add_legacy_cxx_options = True, + ) + compiler_options = cc_common.get_memory_inefficient_command_line( + feature_configuration = feature_configuration, + action_name = CPP_COMPILE_ACTION_NAME, + variables = compile_variables, + ) + return [CompilationAspect(compiler_options = compiler_options)] + +compilation_flags_aspect = aspect( + attrs = { + "_cc_toolchain": attr.label( + default = Label("@bazel_tools//tools/cpp:current_cc_toolchain"), + ), + "_xcode_config": attr.label(default = Label("@bazel_tools//tools/osx:current_xcode_config")), + }, + fragments = ["cpp"], + provides = [CompilationAspect], + toolchains = ["@bazel_tools//tools/cpp:toolchain_type"], + implementation = _compilation_flags_aspect_impl, +) + +# Rule for transitioning platform in order to switch to a hermetic clang toolchain. +def _compiler_transition_impl(settings, attr): + cpp_compiler_constraint = attr.cpp_compiler_constraint + if cpp_compiler_constraint == None: + return {} + return {"//command_line_option:platforms": str(cpp_compiler_constraint)} + +compiler_transition = transition( + implementation = _compiler_transition_impl, + inputs = [], + outputs = ["//command_line_option:platforms"], +) + +def _cc_preprocessed_output_impl(ctx): + target = ctx.attr.target + + # When a transition is specified for an attribute, the attribute always + # becomes a list. + # https://bazel.build/extending/config#accessing-attributes-with-transitions + if type(target) == type([]): + target = target[0] + + compilation_outputs = target.output_groups.compilation_outputs.to_list() + if len(compilation_outputs) != 1: + fail("More than one compilation output: ", compilation_outputs) + out = ctx.actions.declare_file(ctx.label.name) + ctx.actions.symlink(output = out, target_file = compilation_outputs[0]) + ctx.actions.write(output = ctx.outputs.flags_output, content = json.encode(target[CompilationAspect].compiler_options)) + return [DefaultInfo( + files = depset([out]), + runfiles = ctx.runfiles(files = [out]), + )] + +# Collects the preprocessed output of a C++ source file and the compilation +# flags in JSON format. +# +# The `target` attribute must refer to a `cc_library` label that specifies +# `copts=["-E"]` (among others) to ensure preprocssed output rather than object +# file output. +# +# The default output of the new target defined by this rule will be a symlink to +# the preprocessed output of `target`. The additional output file named by the +# `flags_output` attribute will be a JSON file containing a single array of +# strings specifying the compilation flags that would be used to build `target`. +cc_preprocessed_output = rule( + implementation = _cc_preprocessed_output_impl, + attrs = { + "target": attr.label( + aspects = [compilation_flags_aspect], + cfg = compiler_transition, + ), + "flags_output": attr.output(doc = "Name of the json file to which the compiler flags are written."), + "cpp_compiler_constraint": attr.label(), + "_allowlist_function_transition": attr.label( + default = "@bazel_tools//tools/allowlists/function_transition_allowlist", + ), + }, + provides = [DefaultInfo], +) diff --git a/docs/generate_cpp_api.py b/docs/generate_cpp_api.py new file mode 100644 index 000000000..a33b7c36e --- /dev/null +++ b/docs/generate_cpp_api.py @@ -0,0 +1,178 @@ +# Copyright 2022 The TensorStore Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates the JSON description of the C++ API from the preprocesed headers. + +This takes as input a single preprocessed C++ header file and uses the +sphinx-immaterial C++ API parser to generate a JSON API description, that is +then consumed during the documentation build by the sphinx-immaterial cpp apigen +extension. + +The input is typically the `:cpp_api_preprocessed.cc` Bazel target, which is +generated from the `cpp_api_include.cc` header file. + +To add additional entities to the API documentation, ensure that they are +transitively included by `cpp_api_include.cc` and not excluded by any the +configuration options below. + +Entities are filtered by original source path and namespace. + +While the sphinx-immaterial cpp apigen extension supports generating the JSON +API description during the documentation build, this separate script is used in +order to improve incremental build efficiency (the JSON API description does not +need to be rebuilt in some cases). + +API changes or additions can often result in undefined reference warnings from +the Sphinx C++ domain. There are 3 types of warnings: + +- References to namespaces: these are always bogus and just due to a limitation + in the Sphinx C++ domain. They should be listed in `nitpick_ignore` in + `conf.py`. + +- References to internal names (such as private class members), commonly via a + return type or SFINAE condition, that should not be exposed in the + documentation. The internal name should be renamed to match one of the + exclusions specified below. + +- References to names that should be exposed but cannot (currently) be resolved + by the Sphinx C++ domain, e.g. nested type aliases like + `std::remove_cvref_t::X`. These warnings should be silenced by adding a: + + // NONITPICK: std::remove_cvref_t::X + + comment in the source code. Due to how such comments are currently extracted, + the comment must appear between the first and last token of the entity to + which it applies. A common location is immediately after the template + parameters. +""" + +import argparse +import code +import json +import pathlib + +from sphinx_immaterial.apidoc.cpp import api_parser + + +def main(): + ap = argparse.ArgumentParser() + ap.add_argument("--source", required=True) + ap.add_argument("--flags-file", required=True) + ap.add_argument("--output") + ap.add_argument("--verbose", action="store_true") + ap.add_argument("--interactive", action="store_true") + + args = ap.parse_args() + + if args.interactive: + args.verbose = True + + config = api_parser.Config( # type: ignore[wrong-arg-types] + input_path=args.source, + compiler_flags=json.loads( + pathlib.Path(args.flags_file).read_text(encoding="utf-8") + ) + + [ + # Due to https://github.com/bazelbuild/bazel/issues/14764, this is not + # picked up from .bazelrc. + "-std=c++17" + ], + include_directory_map={"./": ""}, + # Only entities whose original source path matches any of these regular + # expressions are included in the API documentation. + allow_paths=[ + "^tensorstore/.*", + ], + # Entities whose original source path matches any of these regular + # expressions are excluded from the API documentation. This takes + # precedence over `allow_paths`. + disallow_paths=[ + "/internal/", + "^tensorstore/util/execution/", + r"^tensorstore/util/division\.h$", + r"^tensorstore/util/constant_vector\.h$", + r"^tensorstore/util/garbage_collection/", + r"^tensorstore/serialization/", + r"^tensorstore/util/apply_members/", + ], + # Namespace names (not fully qualified) matching any of these regular + # expressions are excluded from the API documentation. + disallow_namespaces=[ + "^internal($|_)", + "^execution", + ], + # Macros matching any of these regular expressions are excluded from the + # API documentation. + disallow_macros=[ + "^TENSORSTORE_INTERNAL_", + ], + ignore_diagnostics=[ + "__builtin_", + ], + # Initializers of variables and variable templates that match any of these + # regular expressions will be elided. + hide_initializers=[ + r"^=\s*(?:(true|false)\s*$|\[)", + ], + # Return types, SFINAE terms, and initializer expressions that match any + # of these regular expressions will be elided. Return types will be shown + # as `auto`. + hide_types=[ + r"(\b|_)internal(\b|_)", + r"\bdecltype\b", + r"\bpoly::", + r"\bStaticCastTraitsType\b", + r"\bDataTypeConversionTraits\b", + r"Impl\b", + ], + # Specifies type substitutions. + # + # The key specifies the type as it appears in the source code, not + # necessarily fully qualified. The value specifies the substitution to + # display in the documentation. + type_replacements={ + "absl::remove_cvref_t": "std::remove_cvref_t", + "tensorstore::internal::type_identity_t": "std::type_identity_t", + "internal::type_identity_t": "std::type_identity_t", + "SourceLocation": "std::source_location", + "tensorstore::SourceLocation": "std::source_location", + "absl::weak_ordering": "std::weak_ordering", + }, + verbose=args.verbose, + ) + + if args.interactive: + + extractor = api_parser.Extractor(config) + generator = api_parser.JsonApiGenerator(extractor) + + ns = { + "config": config, + "extractor": extractor, + "generator": generator, + "api_parser": api_parser, + } + code.interact(local=ns) + return + + output_json = api_parser.generate_output(config) + + if args.output: + pathlib.Path(args.output).write_text( + json.dumps(output_json), encoding="utf-8" + ) + + +if __name__ == "__main__": + main() diff --git a/docs/index.rst b/docs/index.rst index 9aca38d0f..404ec7582 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -11,6 +11,12 @@ TensorStore python/indexing python/api/index +.. toctree:: + :hidden: + :caption: C++ API + + cpp/api/index + .. toctree:: :hidden: :caption: Setup diff --git a/third_party/pypa/create_init_files.py b/third_party/pypa/create_init_files.py deleted file mode 100644 index 41f4f9339..000000000 --- a/third_party/pypa/create_init_files.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2023 The TensorStore Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Script for creating __init__.py files. - -This is invoked by `//third_party/repo.bzl:third_party_python_package`. - -To support namespace packages such as sphinxcontrib, __init__.py files must be -written as well, as in: -https://github.com/bazelbuild/rules_python/commit/5f78b4a04a50d660ec346df1a1ab76b02130c304 -""" - -import os -import pathlib -import re - -module_pattern = r"(\.py|\.so|\.pyd)$" - -all_paths = set(str(x) for x in pathlib.Path(".").glob("**/*")) - -init_paths = set() - -for name in all_paths: - if not re.search(module_pattern, name): - continue - while os.path.sep in name: - name = os.path.dirname(name) - init_py = os.path.join(name, "__init__.py") - if init_py not in all_paths: - init_paths.add(init_py) - -INITPY_CONTENTS = ''' -try: - import pkg_resources - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - __path__ = pkgutil.extend_path(__path__, __name__) -''' - -for init_path in init_paths: - pathlib.Path(init_path).write_text(INITPY_CONTENTS) diff --git a/third_party/pypa/docs_requirements.txt b/third_party/pypa/docs_requirements.txt index 0072fd58b..67d0e192e 100644 --- a/third_party/pypa/docs_requirements.txt +++ b/third_party/pypa/docs_requirements.txt @@ -1,7 +1,10 @@ # Packages required to build the documentation. -# Exclude sphinx 6.1 due to https://github.com/sphinx-doc/sphinx/issues/11163 -sphinx<6.1 -sphinx-immaterial +# Limit sphinx version due to https://github.com/jbms/sphinx-immaterial/pull/395 +sphinx<7.4 +sphinx-immaterial>=0.12.4 +libclang +clang-format +black jsonschema pyyaml markupsafe diff --git a/third_party/pypa/docs_requirements_frozen.txt b/third_party/pypa/docs_requirements_frozen.txt index 36dc0f7b1..3e073303e 100644 --- a/third_party/pypa/docs_requirements_frozen.txt +++ b/third_party/pypa/docs_requirements_frozen.txt @@ -1,6 +1,9 @@ # DO NOT EDIT: Generated from docs_requirements.txt by pypi_solver -sphinx==6.0.1 +sphinx==7.3.7 sphinx-immaterial==0.12.4 +libclang==18.1.1 +clang-format==19.1.2 +black==24.10.0 jsonschema==4.23.0 pyyaml==6.0.2 markupsafe==3.0.2 diff --git a/third_party/pypa/postinstall_fix.py b/third_party/pypa/postinstall_fix.py new file mode 100644 index 000000000..3e5bd9d61 --- /dev/null +++ b/third_party/pypa/postinstall_fix.py @@ -0,0 +1,88 @@ +# Copyright 2023 The TensorStore Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Script for creating __init__.py files and entry point scripts. + +This is invoked by `//third_party/repo.bzl:third_party_python_package`. + +To support namespace packages such as sphinxcontrib, __init__.py files must be +written as well, as in: +https://github.com/bazelbuild/rules_python/commit/5f78b4a04a50d660ec346df1a1ab76b02130c304 + +This re-creates the entry point scripts as Python scripts on all platforms, +since `pip install` creates them as a executables on Windows. +""" + +import configparser +import os +import pathlib +import re + + +def create_init_files(): + + module_pattern = r"(\.py|\.so|\.pyd)$" + + all_paths = set(str(x) for x in pathlib.Path(".").glob("**/*")) + + init_paths = set() + + for name in all_paths: + if not re.search(module_pattern, name): + continue + while os.path.sep in name: + name = os.path.dirname(name) + init_py = os.path.join(name, "__init__.py") + if init_py not in all_paths: + init_paths.add(init_py) + + INITPY_CONTENTS = """ +import pkgutil +__path__ = pkgutil.extend_path(__path__, __name__) +""" + + for init_path in init_paths: + pathlib.Path(init_path).write_text(INITPY_CONTENTS) + + +# See https://packaging.python.org/en/latest/specifications/entry-points/ +class CaseSensitiveConfigParser(configparser.ConfigParser): + optionxform = staticmethod(str) + + +def create_entrypoint_scripts(): + config = CaseSensitiveConfigParser() + config.read( + pathlib.Path(".").glob("*.dist-info/entry_points.txt"), encoding="utf-8" + ) + if "console_scripts" not in config: + return + dirname = "console_scripts_for_bazel" + os.makedirs(dirname, exist_ok=True) + for key, value in config["console_scripts"].items(): + if key.endswith(".py"): + key = key[:-3] + module_name, func_name = value.split(":", 2) + (pathlib.Path(dirname) / (key + ".py")).write_text( + f"""# -*- coding: utf-8 -*- +import sys +import {module_name} as _mod +sys.exit(_mod.{func_name}()) +""" + ) + + +if __name__ == "__main__": + create_init_files() + create_entrypoint_scripts() diff --git a/third_party/pypa/workspace.bzl b/third_party/pypa/workspace.bzl index a89b098c7..d9dfed701 100644 --- a/third_party/pypa/workspace.bzl +++ b/third_party/pypa/workspace.bzl @@ -18,6 +18,7 @@ def repo(): repo_pypa_aws_sam_translator() repo_pypa_aws_xray_sdk() repo_pypa_babel() + repo_pypa_black() repo_pypa_blinker() repo_pypa_boto3() repo_pypa_botocore() @@ -25,6 +26,7 @@ def repo(): repo_pypa_cffi() repo_pypa_cfn_lint() repo_pypa_charset_normalizer() + repo_pypa_clang_format() repo_pypa_click() repo_pypa_cloudpickle() repo_pypa_colorama() @@ -58,11 +60,13 @@ def repo(): repo_pypa_jsonschema_path() repo_pypa_jsonschema_specifications() repo_pypa_lazy_object_proxy() + repo_pypa_libclang() repo_pypa_markupsafe() repo_pypa_matplotlib_inline() repo_pypa_ml_dtypes() repo_pypa_moto() repo_pypa_mpmath() + repo_pypa_mypy_extensions() repo_pypa_networkx() repo_pypa_numpy() repo_pypa_openapi_schema_validator() @@ -70,6 +74,7 @@ def repo(): repo_pypa_packaging() repo_pypa_parso() repo_pypa_pathable() + repo_pypa_pathspec() repo_pypa_pexpect() repo_pypa_platformdirs() repo_pypa_pluggy() @@ -222,6 +227,30 @@ def repo_pypa_babel(): requirement = "babel==2.16.0", ) +def repo_pypa_black(): + repo_pypa_click() + repo_pypa_mypy_extensions() + repo_pypa_packaging() + repo_pypa_pathspec() + repo_pypa_platformdirs() + repo_pypa_tomli() + repo_pypa_typing_extensions() + maybe( + third_party_python_package, + name = "pypa_black", + target = "black", + requirement = "black==24.10.0", + deps = [ + "@pypa_click//:click", + "@pypa_mypy_extensions//:mypy_extensions", + "@pypa_packaging//:packaging", + "@pypa_pathspec//:pathspec", + "@pypa_platformdirs//:platformdirs", + "@pypa_tomli//:tomli", + "@pypa_typing_extensions//:typing_extensions", + ], + ) + def repo_pypa_blinker(): maybe( third_party_python_package, @@ -314,6 +343,14 @@ def repo_pypa_charset_normalizer(): requirement = "charset-normalizer==3.4.0", ) +def repo_pypa_clang_format(): + maybe( + third_party_python_package, + name = "pypa_clang_format", + target = "clang_format", + requirement = "clang-format==19.1.2", + ) + def repo_pypa_click(): maybe( third_party_python_package, @@ -389,7 +426,7 @@ def repo_pypa_docutils(): third_party_python_package, name = "pypa_docutils", target = "docutils", - requirement = "docutils==0.19", + requirement = "docutils==0.21.2", ) def repo_pypa_exceptiongroup(): @@ -694,6 +731,14 @@ def repo_pypa_lazy_object_proxy(): requirement = "lazy-object-proxy==1.10.0", ) +def repo_pypa_libclang(): + maybe( + third_party_python_package, + name = "pypa_libclang", + target = "libclang", + requirement = "libclang==18.1.1", + ) + def repo_pypa_markupsafe(): maybe( third_party_python_package, @@ -792,6 +837,14 @@ def repo_pypa_mpmath(): requirement = "mpmath==1.3.0", ) +def repo_pypa_mypy_extensions(): + maybe( + third_party_python_package, + name = "pypa_mypy_extensions", + target = "mypy_extensions", + requirement = "mypy-extensions==1.0.0", + ) + def repo_pypa_networkx(): maybe( third_party_python_package, @@ -866,6 +919,14 @@ def repo_pypa_pathable(): requirement = "pathable==0.4.3", ) +def repo_pypa_pathspec(): + maybe( + third_party_python_package, + name = "pypa_pathspec", + target = "pathspec", + requirement = "pathspec==0.12.1", + ) + def repo_pypa_pexpect(): repo_pypa_ptyprocess() maybe( @@ -1063,7 +1124,7 @@ def repo_pypa_pyyaml(): third_party_python_package, name = "pypa_pyyaml", target = "pyyaml", - requirement = "pyyaml==6.0.2", + requirement = "PyYAML==6.0.2", ) def repo_pypa_referencing(): @@ -1216,11 +1277,12 @@ def repo_pypa_sphinx(): repo_pypa_sphinxcontrib_jsmath() repo_pypa_sphinxcontrib_qthelp() repo_pypa_sphinxcontrib_serializinghtml() + repo_pypa_tomli() maybe( third_party_python_package, name = "pypa_sphinx", target = "sphinx", - requirement = "sphinx==6.0.1", + requirement = "sphinx==7.3.7", deps = [ "@pypa_alabaster//:alabaster", "@pypa_babel//:babel", @@ -1238,6 +1300,7 @@ def repo_pypa_sphinx(): "@pypa_sphinxcontrib_jsmath//:sphinxcontrib_jsmath", "@pypa_sphinxcontrib_qthelp//:sphinxcontrib_qthelp", "@pypa_sphinxcontrib_serializinghtml//:sphinxcontrib_serializinghtml", + "@pypa_tomli//:tomli", ], win32 = [ "@pypa_colorama//:colorama", @@ -1390,7 +1453,7 @@ def repo_pypa_werkzeug(): third_party_python_package, name = "pypa_werkzeug", target = "werkzeug", - requirement = "Werkzeug==3.0.4", + requirement = "werkzeug==3.0.4", deps = [ "@pypa_markupsafe//:markupsafe", ], diff --git a/third_party/repo.bzl b/third_party/repo.bzl index d636bc9ac..0169afd31 100644 --- a/third_party/repo.bzl +++ b/third_party/repo.bzl @@ -202,7 +202,7 @@ cc_library( # packages, such as `sphinxcontrib`. result = ctx.execute([ get_python_bin(ctx), - ctx.path(ctx.attr._create_init_files).realpath, + ctx.path(ctx.attr._postinstall_fix).realpath, ]) if result.return_code != 0: fail("Failed to install create init files for: %s\n%s%s" % ( @@ -229,6 +229,19 @@ py_library( }), visibility = ["//visibility:public"], ) + +SCRIPT_PREFIX = "console_scripts_for_bazel/" +SCRIPT_SUFFIX = ".py" + +[py_binary( + name = bin[len(SCRIPT_PREFIX):-len(SCRIPT_SUFFIX)] + "_binary", + srcs = [bin], + main = bin, + deps = [":""" + ctx.attr.target + """"], + visibility = ["//visibility:public"], + ) + for bin in glob([SCRIPT_PREFIX + "*" + SCRIPT_SUFFIX]) +] """) if is_numpy: build_file_content += """ @@ -257,8 +270,8 @@ _third_party_python_package_attrs = { "not_win32": attr.string_list(), "darwin": attr.string_list(), "not_darwin": attr.string_list(), - "_create_init_files": attr.label( - default = Label("//third_party:pypa/create_init_files.py"), + "_postinstall_fix": attr.label( + default = Label("//third_party:pypa/postinstall_fix.py"), ), } diff --git a/third_party/third_party.bzl b/third_party/third_party.bzl index e0529de85..63f7df66e 100644 --- a/third_party/third_party.bzl +++ b/third_party/third_party.bzl @@ -60,6 +60,7 @@ load("//third_party:rules_pkg/workspace.bzl", repo_rules_pkg = "repo") load("//third_party:rules_proto/workspace.bzl", repo_rules_proto = "repo") load("//third_party:se_curl/workspace.bzl", repo_se_curl = "repo") load("//third_party:tinyxml2/workspace.bzl", repo_tinyxml2 = "repo") +load("//third_party:toolchains_llvm/workspace.bzl", repo_toolchains_llvm = "repo") def third_party_dependencies(): repo_aws_c_auth() @@ -124,3 +125,4 @@ def third_party_dependencies(): repo_rules_proto() repo_se_curl() repo_tinyxml2() + repo_toolchains_llvm() diff --git a/third_party/toolchains_llvm/workspace.bzl b/third_party/toolchains_llvm/workspace.bzl new file mode 100644 index 000000000..13aa2351d --- /dev/null +++ b/third_party/toolchains_llvm/workspace.bzl @@ -0,0 +1,29 @@ +# Copyright 2023 The TensorStore Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# buildifier: disable=module-docstring + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") + +def repo(): + maybe( + http_archive, + name = "toolchains_llvm", + urls = [ + "https://storage.googleapis.com/tensorstore-bazel-mirror/github.com/bazel-contrib/toolchains_llvm/releases/download/v1.2.0/toolchains_llvm-v1.2.0.tar.gz", + ], + strip_prefix = "toolchains_llvm-v1.2.0", + sha256 = "e3fb6dc6b77eaf167cb2b0c410df95d09127cbe20547e5a329c771808a816ab4", + ) diff --git a/tools/bazel_platforms/BUILD.bazel b/tools/bazel_platforms/BUILD.bazel index e70a3dc95..5b56d2759 100644 --- a/tools/bazel_platforms/BUILD.bazel +++ b/tools/bazel_platforms/BUILD.bazel @@ -3,7 +3,7 @@ platform( name = "windows_x86_64_mingw", constraint_values = [ - "//tools/cpp:mingw", + "@bazel_tools//tools/cpp:mingw", "@platforms//os:windows", "@platforms//cpu:x86_64", ],