diff --git a/conda_forge_tick/auto_tick.py b/conda_forge_tick/auto_tick.py index 27189e327..aff385395 100644 --- a/conda_forge_tick/auto_tick.py +++ b/conda_forge_tick/auto_tick.py @@ -87,6 +87,7 @@ PipWheelMigrator, QtQtMainMigrator, Replacement, + StdlibMigrator, UpdateCMakeArgsMigrator, UpdateConfigSubGuessMigrator, Version, @@ -688,6 +689,10 @@ def add_rebuild_migration_yaml( piggy_back_migrations.append(JpegTurboMigrator()) if migration_name == "boost_cpp_to_libboost": piggy_back_migrations.append(LibboostMigrator()) + if migration_name == "boost1840": + # testing phase: only a single migration + # TODO: piggyback for all migrations + piggy_back_migrations.append(StdlibMigrator()) cycles = list(nx.simple_cycles(total_graph)) migrator = MigrationYaml( migration_yaml, diff --git a/conda_forge_tick/make_graph.py b/conda_forge_tick/make_graph.py index f2101d8eb..231c6bb20 100644 --- a/conda_forge_tick/make_graph.py +++ b/conda_forge_tick/make_graph.py @@ -42,6 +42,7 @@ # appear here COMPILER_STUBS_WITH_STRONG_EXPORTS = [ "c_compiler_stub", + "c_stdlib_stub", "cxx_compiler_stub", "fortran_compiler_stub", "cuda_compiler_stub", diff --git a/conda_forge_tick/migrators/__init__.py b/conda_forge_tick/migrators/__init__.py index ea98f1b93..81963dc72 100644 --- a/conda_forge_tick/migrators/__init__.py +++ b/conda_forge_tick/migrators/__init__.py @@ -14,6 +14,7 @@ UpdateCMakeArgsMigrator, UpdateConfigSubGuessMigrator, ) +from .cstdlib import StdlibMigrator from .dep_updates import DependencyUpdateMigrator from .duplicate_lines import DuplicateLinesCleanup from .extra_jinj2a_keys_cleanup import ExtraJinja2KeysCleanup diff --git a/conda_forge_tick/migrators/cstdlib.py b/conda_forge_tick/migrators/cstdlib.py new file mode 100644 index 000000000..98353b41d --- /dev/null +++ b/conda_forge_tick/migrators/cstdlib.py @@ -0,0 +1,236 @@ +import os +import re + +from conda_forge_tick.migrators.core import MiniMigrator +from conda_forge_tick.migrators.libboost import _replacer, _slice_into_output_sections + +pat_stub = re.compile(r"(c|cxx|fortran)_compiler_stub") +rgx_idt = r"(?P\s*)-\s*" +rgx_pre = r"(?P\{\{\s*compiler\([\"\']" +rgx_post = r"[\"\']\)\s*\}\})" +rgx_sel = r"(?P\s*\#\s+\[[\w\s()<>!=.,\-\'\"]+\])?" + +pat_compiler_c = re.compile("".join([rgx_idt, rgx_pre, "c", rgx_post, rgx_sel])) +pat_compiler_m2c = re.compile("".join([rgx_idt, rgx_pre, "m2w64_c", rgx_post, rgx_sel])) +pat_compiler_other = re.compile( + "".join([rgx_idt, rgx_pre, "(m2w64_)?(cxx|fortran)", rgx_post, rgx_sel]) +) +pat_compiler = re.compile( + "".join([rgx_idt, rgx_pre, "(m2w64_)?(c|cxx|fortran)", rgx_post, rgx_sel]) +) +pat_stdlib = re.compile(r".*\{\{\s*stdlib\([\"\']c[\"\']\)\s*\}\}.*") +# no version other than 2.17 currently available (except 2.12 as default on linux-64) +pat_sysroot_217 = re.compile(r"- sysroot_linux-64\s*=?=?2\.17") + + +def _process_section(name, attrs, lines): + """ + Migrate requirements per section. + + We want to migrate as follows: + - if there's _any_ `{{ stdlib("c") }}` in the recipe, abort (consider it migrated) + - if there's `{{ compiler("c") }}` in build, add `{{ stdlib("c") }}` in host + - where there's no host-section, add it + + If we find `sysroot_linux-64 2.17`, remove those lines and write the spec to CBC. + """ + write_stdlib_to_cbc = False + # remove occurrences of __osx due to MACOSX_DEPLOYMENT_TARGET (see migrate() below) + lines = _replacer(lines, "- __osx", "") + + outputs = attrs["meta_yaml"].get("outputs", []) + global_reqs = attrs["meta_yaml"].get("requirements", {}) + if name == "global": + reqs = global_reqs + else: + filtered = [o for o in outputs if o["name"] == name] + if len(filtered) == 0: + raise RuntimeError(f"Could not find output {name}!") + reqs = filtered[0].get("requirements", {}) + + build_reqs = reqs.get("build", set()) or set() + global_build_reqs = global_reqs.get("build", set()) or set() + + # either there's a compiler in the output we're processing, or the + # current output has no build-section but relies on the global one + needs_stdlib = any(pat_stub.search(x or "") for x in build_reqs) + needs_stdlib |= not bool(build_reqs) and any( + pat_stub.search(x or "") for x in global_build_reqs + ) + # see more computation further down depending on dependencies + # ignored due to selectors, where we need the line numbers below. + + line_build = line_compiler_c = line_compiler_m2c = line_compiler_other = 0 + line_host = line_run = line_constrain = line_test = 0 + indent_c = indent_m2c = indent_other = "" + selector_c = selector_m2c = selector_other = "" + last_line_was_build = False + for i, line in enumerate(lines): + if last_line_was_build: + # process this separately from the if-else-chain below + keys_after_nonreq_build = [ + "binary_relocation", + "force_ignore_keys", + "ignore_run_exports(_from)?", + "missing_dso_whitelist", + "noarch", + "number", + "run_exports", + "script", + "skip", + ] + if re.match(rf"^\s*({'|'.join(keys_after_nonreq_build)}):.*", line): + # last match was spurious, reset line_build + line_build = 0 + last_line_was_build = False + + if re.match(r"^\s*build:.*", line): + # we need to avoid build.{number,...}, but cannot use multiline + # regexes here. So leave a marker that we can skip on + last_line_was_build = True + line_build = i + elif pat_compiler_c.search(line): + line_compiler_c = i + indent_c = pat_compiler_c.match(line).group("indent") + selector_c = pat_compiler_c.match(line).group("selector") or "" + elif pat_compiler_m2c.search(line): + line_compiler_m2c = i + indent_m2c = pat_compiler_m2c.match(line).group("indent") + selector_m2c = pat_compiler_m2c.match(line).group("selector") or "" + elif pat_compiler_other.search(line): + line_compiler_other = i + indent_other = pat_compiler_other.match(line).group("indent") + selector_other = pat_compiler_other.match(line).group("selector") or "" + elif re.match(r"^\s*host:.*", line): + line_host = i + elif re.match(r"^\s*run:.*", line): + line_run = i + elif re.match(r"^\s*run_constrained:.*", line): + line_constrain = i + elif re.match(r"^\s*test:.*", line): + line_test = i + # ensure we don't read past test section (may contain unrelated deps) + break + + if line_build: + # double-check whether there are compilers in the build section + # that may have gotten ignored by selectors; we explicitly only + # want to match with compilers in build, not host or run + build_reqs = lines[ + line_build : (line_host or line_run or line_constrain or line_test or -1) + ] + needs_stdlib |= any(pat_compiler.search(line) for line in build_reqs) + + if not needs_stdlib: + if any(pat_sysroot_217.search(line) for line in lines): + # if there are no compilers, but we still find sysroot_linux-64, + # replace it; remove potential selectors, as only that package is + # linux-only, not the requirement for a c-stdlib + from_this, to_that = "sysroot_linux-64.*", '{{ stdlib("c") }}' + lines = _replacer(lines, from_this, to_that, max_times=1) + lines = _replacer(lines, "sysroot_linux-64.*", "") + write_stdlib_to_cbc = True + # otherwise, no change + return lines, write_stdlib_to_cbc + + # in case of several compilers, prefer line, indent & selector of c compiler + line_compiler = line_compiler_c or line_compiler_m2c or line_compiler_other + indent = indent_c or indent_m2c or indent_other + selector = selector_c or selector_m2c or selector_other + if indent == "": + # no compiler in current output; take first line of section as reference (without last \n); + # ensure it works for both global build section as well as for `- name: `. + indent = ( + re.sub(r"^([\s\-]*).*", r"\1", lines[0][:-1]).replace("-", " ") + " " * 4 + ) + + # align selectors between {{ compiler(...) }} with {{ stdlib(...) }} + selector = " " + selector if selector else "" + to_insert = indent + '- {{ stdlib("c") }}' + selector + "\n" + if line_build == 0: + # no build section, need to add it + to_insert = indent[:-2] + "build:\n" + to_insert + + # if there's no build section, try to insert (in order of preference) + # before the sections for host, run, run_constrained, test + line_insert = line_host or line_run or line_constrain or line_test + if not line_insert: + raise RuntimeError("Don't know where to insert build section!") + if line_compiler: + # by default, we insert directly after the compiler + line_insert = line_compiler + 1 + + lines = lines[:line_insert] + [to_insert] + lines[line_insert:] + if line_compiler_c and line_compiler_m2c: + # we have both compiler("c") and compiler("m2w64_c"), likely with complementary + # selectors; add a second stdlib line after m2w64_c with respective selector + selector_m2c = " " * 8 + selector_m2c if selector_m2c else "" + to_insert = indent + '- {{ stdlib("c") }}' + selector_m2c + "\n" + line_insert = line_compiler_m2c + 1 + (line_compiler_c < line_compiler_m2c) + lines = lines[:line_insert] + [to_insert] + lines[line_insert:] + + # check if someone specified a newer sysroot in recipe already, + # leave indicator to migrate() function that we need to write spec to CBC + if any(pat_sysroot_217.search(line) for line in lines): + write_stdlib_to_cbc = True + # as we've already inserted a stdlib-jinja next to the compiler, + # simply remove any remaining occurrences of sysroot_linux-64 + lines = _replacer(lines, "sysroot_linux-64.*", "") + return lines, write_stdlib_to_cbc + + +class StdlibMigrator(MiniMigrator): + def filter(self, attrs, not_bad_str_start=""): + lines = attrs["raw_meta_yaml"].splitlines() + already_migrated = any(pat_stdlib.search(line) for line in lines) + has_compiler = any(pat_compiler.search(line) for line in lines) + has_sysroot = any(pat_sysroot_217.search(line) for line in lines) + # filter() returns True if we _don't_ want to migrate + return already_migrated or not (has_compiler or has_sysroot) + + def migrate(self, recipe_dir, attrs, **kwargs): + outputs = attrs["meta_yaml"].get("outputs", []) + + new_lines = [] + write_stdlib_to_cbc = False + fname = os.path.join(recipe_dir, "meta.yaml") + if os.path.exists(fname): + with open(fname) as fp: + lines = fp.readlines() + + sections = _slice_into_output_sections(lines, attrs) + for name, section in sections.items(): + # _process_section returns list of lines already + chunk, cbc = _process_section(name, attrs, section) + new_lines += chunk + write_stdlib_to_cbc |= cbc + + with open(fname, "w") as fp: + fp.write("".join(new_lines)) + + fname = os.path.join(recipe_dir, "conda_build_config.yaml") + if write_stdlib_to_cbc: + with open(fname, "a") as fp: + # append ("a") to existing CBC (or create it if it exista already), + # no need to differentiate as no-one is using c_stdlib_version yet; + # selector can just be linux as that matches default on aarch/ppc + fp.write( + '\nc_stdlib_version: # [linux]\n - "2.17" # [linux]\n' + ) + + if os.path.exists(fname): + with open(fname) as fp: + cbc_lines = fp.readlines() + # in a well-formed recipe, all deviations from the baseline + # MACOSX_DEPLOYMENT_TARGET come with a constraint on `__osx` in meta.yaml. + # Since the c_stdlib_version (together with the macosx_deployment_target + # metapackage) satisfies exactly that role, we can unconditionally replace + # that in the conda_build_config, and remove all `__osx` constraints in + # the meta.yaml (see further up). + # this line almost always has a selector, keep the alignment + cbc_lines = _replacer( + cbc_lines, r"^MACOSX_DEPLOYMENT_TARGET:", "c_stdlib_version: " + ) + + with open(fname, "w") as fp: + fp.write("".join(cbc_lines)) diff --git a/conda_forge_tick/utils.py b/conda_forge_tick/utils.py index 9b6479a31..5e38db40a 100644 --- a/conda_forge_tick/utils.py +++ b/conda_forge_tick/utils.py @@ -34,6 +34,7 @@ PACKAGE_STUBS = [ "_compiler_stub", + "_stdlib_stub", "subpackage_stub", "compatible_pin_stub", "cdt_stub", @@ -43,6 +44,7 @@ os=os, environ=defaultdict(str), compiler=lambda x: x + "_compiler_stub", + stdlib=lambda x: x + "_stdlib_stub", pin_subpackage=lambda *args, **kwargs: args[0], pin_compatible=lambda *args, **kwargs: args[0], cdt=lambda *args, **kwargs: "cdt_stub", @@ -61,6 +63,7 @@ def _munge_dict_repr(dct: Dict[Any, Any]) -> str: os=os, environ=defaultdict(str), compiler=lambda x: x + "_compiler_stub", + stdlib=lambda x: x + "_stdlib_stub", # The `max_pin, ` stub is so we know when people used the functions # to create the pins pin_subpackage=lambda *args, **kwargs: _munge_dict_repr( diff --git a/tests/test_stdlib.py b/tests/test_stdlib.py new file mode 100644 index 000000000..1201fc495 --- /dev/null +++ b/tests/test_stdlib.py @@ -0,0 +1,69 @@ +import os +import re + +import pytest +from flaky import flaky +from test_migrators import run_test_migration + +from conda_forge_tick.migrators import StdlibMigrator, Version + +TEST_YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") + + +STDLIB = StdlibMigrator() +VERSION_WITH_STDLIB = Version( + set(), + piggy_back_migrations=[STDLIB], +) + + +@pytest.mark.parametrize( + "feedstock,new_ver,expect_cbc", + [ + # package with many outputs, includes inheritance from global build env + ("arrow", "1.10.0", False), + # package without c compiler, but with selectors + ("daal4py", "1.10.0", False), + # package involving selectors and m2w64_c compilers, and compilers in + # unusual places (e.g. in host & run sections) + ("go", "1.10.0", True), + # package with rust compilers + ("polars", "1.10.0", False), + # package without compilers, but with sysroot_linux-64 + ("sinabs", "1.10.0", True), + # test that we skip recipes that already contain a {{ stdlib("c") }} + ("skip_migration", "1.10.0", False), + ], +) +def test_stdlib(feedstock, new_ver, expect_cbc, tmpdir): + before = f"stdlib_{feedstock}_before_meta.yaml" + with open(os.path.join(TEST_YAML_PATH, before)) as fp: + in_yaml = fp.read() + + after = f"stdlib_{feedstock}_after_meta.yaml" + with open(os.path.join(TEST_YAML_PATH, after)) as fp: + out_yaml = fp.read() + + recipe_dir = os.path.join(tmpdir, f"{feedstock}-feedstock") + os.makedirs(recipe_dir, exist_ok=True) + + run_test_migration( + m=VERSION_WITH_STDLIB, + inp=in_yaml, + output=out_yaml, + kwargs={"new_version": new_ver}, + prb="Dependencies have been updated if changed", + mr_out={ + "migrator_name": "Version", + "migrator_version": VERSION_WITH_STDLIB.migrator_version, + "version": new_ver, + }, + tmpdir=recipe_dir, + should_filter=False, + ) + + cbc_pth = os.path.join(recipe_dir, "conda_build_config.yaml") + if expect_cbc: + with open(cbc_pth) as fp: + lines = fp.readlines() + assert any(re.match(r"c_stdlib_version:\s+\#\s\[linux\]", x) for x in lines) diff --git a/tests/test_yaml/stdlib_arrow_after_meta.yaml b/tests/test_yaml/stdlib_arrow_after_meta.yaml new file mode 100644 index 000000000..cda3226e1 --- /dev/null +++ b/tests/test_yaml/stdlib_arrow_after_meta.yaml @@ -0,0 +1,873 @@ +{% set version = "1.10.0" %} +{% set cuda_enabled = cuda_compiler_version != "None" %} +{% set build_ext_version = "5.0.0" %} +{% set build_ext = "cuda" if cuda_enabled else "cpu" %} +{% set proc_build_number = "0" %} +{% set llvm_version = "15" %} + +# see https://github.com/apache/arrow/blob/apache-arrow-10.0.1/cpp/CMakeLists.txt#L88-L90 +{% set so_version = (version.split(".")[0] | int * 100 + version.split(".")[1] | int) ~ "." ~ version.split(".")[2] ~ ".0" %} + +package: + name: apache-arrow + version: {{ version }} + +source: + # fake source url to get version migrator to pass + url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: 3f9e587a96844a9b4ee7f998cfe4dc3964dc95c4ca94d7de6a77bffb99f873da + # # arrow has the unfortunate habit of changing tags of X.0.0 in the + # # lead-up until release -> don't use github sources on main + # # - url: https://github.com/apache/arrow/archive/refs/tags/apache-arrow-{{ version }}.tar.gz + # - url: https://www.apache.org/dyn/closer.lua/arrow/arrow-{{ version }}/apache-arrow-{{ version }}.tar.gz?action=download + # fn: apache-arrow-{{ version }}.tar.gz + # sha256: 01dd3f70e85d9b5b933ec92c0db8a4ef504a5105f78d2d8622e84279fb45c25d + # patches: + # # workaround for https://github.com/apache/arrow/issues/37692 + # - patches/0001-fixture-teardown-should-not-fail-test.patch + # testing-submodule not part of release tarball + # - git_url: https://github.com/apache/arrow-testing.git + # git_rev: ad82a736c170e97b7c8c035ebd8a801c17eec170 + # folder: testing + +build: + number: 0 + # for cuda support, building with one version is enough to be compatible with + # all later versions, since arrow is only using libcuda, and not libcudart. + skip: true # [cuda_compiler_version not in ("None", cuda_compiler_version_min)] + # arrow promises API- & ABI-compatibility along SemVer, see #1096 + +requirements: + build: + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + # needs to run protoc & grpc_cpp_plugin + - libgrpc # [build_platform != target_platform] + - libprotobuf # [build_platform != target_platform] + # needed for gandiva + - clangdev {{ llvm_version }} # [build_platform != target_platform] + - llvmdev {{ llvm_version }} # [build_platform != target_platform] + - gnuconfig # [build_platform != target_platform] + - cmake + - ninja + # necessary for vendored jemalloc + - autoconf # [linux] + - make # [linux] + host: + # for required dependencies, see + # https://github.com/apache/arrow/blob/apache-arrow-11.0.0/cpp/cmake_modules/ThirdpartyToolchain.cmake#L46-L75 + - clangdev {{ llvm_version }} + - llvmdev {{ llvm_version }} + - aws-crt-cpp + - aws-sdk-cpp + - brotli + - bzip2 + # not yet: https://github.com/conda-forge/cpp-opentelemetry-sdk-feedstock/issues/38 + # - cpp-opentelemetry-sdk + # - proto-opentelemetry-proto =={{ cpp_opentelemetry_sdk }} + - gflags + - glog + - google-cloud-cpp + # arrow uses a customized jemalloc, see #944 + # - jemalloc + - libabseil + - libboost-headers + - libgrpc + - libutf8proc + - lz4-c + - nlohmann_json + - orc + - rapidjson + - re2 + - snappy + - thrift-cpp + - ucx # [linux] + - xsimd + - zlib + - zstd + +outputs: + - name: apache-arrow-proc + version: {{ build_ext_version }} + build: + number: {{ proc_build_number }} + string: {{ build_ext }} + requirements: + build: + - {{ stdlib("c") }} + run_constrained: + # avoid installation with old naming of proc package + - arrow-cpp-proc <0.0a0 + test: + commands: + - exit 0 + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: A meta-package to select Arrow build variant + + - name: libarrow-all + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow", max_pin="x") }} + - {{ pin_subpackage("libarrow-acero", max_pin="x") }} + - {{ pin_subpackage("libarrow-dataset", max_pin="x") }} + - {{ pin_subpackage("libarrow-flight", max_pin="x") }} + - {{ pin_subpackage("libarrow-flight-sql", max_pin="x") }} + - {{ pin_subpackage("libarrow-gandiva", max_pin="x") }} + - {{ pin_subpackage("libarrow-substrait", max_pin="x") }} + - {{ pin_subpackage("libparquet", max_pin="x") }} + requirements: + build: + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} + - {{ pin_subpackage("libarrow-gandiva", exact=True) }} + - {{ pin_subpackage("libarrow-substrait", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} + - {{ pin_subpackage("libarrow-gandiva", exact=True) }} + - {{ pin_subpackage("libarrow-substrait", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + test: + commands: + - echo "tested in other outputs" + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow + + - name: libarrow + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow", max_pin="x") }} + ignore_run_exports_from: + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + # arrow only uses headers, apparently + - gflags + # shared lib linked on unix, not on win + - glog # [win] + ignore_run_exports: + # we don't need all of brotli's run-exports + - libbrotlicommon + track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }} + missing_dso_whitelist: + - '*/libcuda.so.*' # [linux] + - '*/nvcuda.dll' # [win] + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + host: + - aws-crt-cpp + - aws-sdk-cpp + - brotli + - bzip2 + # not yet: https://github.com/conda-forge/cpp-opentelemetry-sdk-feedstock/issues/38 + # - cpp-opentelemetry-sdk + # - proto-opentelemetry-proto =={{ cpp_opentelemetry_sdk }} + - gflags + - glog + - google-cloud-cpp + # arrow uses a customized jemalloc, see #944 + # - jemalloc + - libabseil + - libutf8proc + - lz4-c + - openssl # [win] + - orc + - re2 + - snappy + - zlib + - zstd + - __cuda >={{ cuda_compiler_version_min }} # [cuda_compiler_version != "None"] + # since libgoogle-cloud is static on windows, see + # https://github.com/conda-forge/google-cloud-cpp-feedstock/pull/108, + # its host deps (which aren't yet covered above) leak into the build here + - libcrc32c # [win] + - libcurl # [win] + run_constrained: + - apache-arrow-proc =*={{ build_ext }} + # avoid installation with old naming of lib package + - arrow-cpp <0.0a0 + # old parquet lib output, now part of this feedstock + - parquet-cpp <0.0a0 + # since all the other libarrow-* variants in this recipe depend exactly on libarrow, + # this avoids that libarrow-X & -Y get installed with different builds or versions. + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\api.h exit 1 # [win] + + {% set libs = ["arrow"] + (cuda_compiler_version != "None") * ["arrow_cuda"] %} + {% for each_lib in libs %} + # shared + - test -f $PREFIX/lib/lib{{ each_lib }}.so # [linux] + - test -f $PREFIX/lib/lib{{ each_lib }}.dylib # [osx] + - if not exist %LIBRARY_BIN%\{{ each_lib }}.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\{{ each_lib }}.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/lib{{ each_lib }}.a # [unix] + - if exist %LIBRARY_LIB%\{{ each_lib }}_static.lib exit 1 # [win] + {% endfor %} + + # absence of arrow_cuda for CPU builds + - test ! -f $PREFIX/lib/libarrow_cuda.so # [(cuda_compiler_version == "None") and linux] + - test ! -f $PREFIX/lib/libarrow_cuda.a # [(cuda_compiler_version == "None") and linux] + - if exist %LIBRARY_BIN%\arrow_cuda.dll exit 1 # [(cuda_compiler_version == "None") and win] + - if exist %LIBRARY_LIB%\arrow_cuda.lib exit 1 # [(cuda_compiler_version == "None") and win] + - if exist %LIBRARY_LIB%\arrow_cuda_static.lib exit 1 # [(cuda_compiler_version == "None") and win] + + # gdb-wrapper (paths are stacked intentionally) + - test -f $PREFIX/share/gdb/auto-load/$PREFIX/lib/libarrow.so.{{ so_version }}-gdb.py # [linux] + - test -f $PREFIX/share/gdb/auto-load/$PREFIX/lib/libarrow.{{ so_version }}.dylib-gdb.py # [osx] + + {% set libs = [ + "arrow_acero", "arrow_dataset", "arrow_flight", + "arrow_flight_sql", "arrow_substrait", "gandiva", "parquet" + ] %} + {% for each_lib in libs %} + # absence of libraries that belong in other outputs + - test ! -f $PREFIX/lib/lib{{ each_lib }}.so # [linux] + - test ! -f $PREFIX/lib/lib{{ each_lib }}.dylib # [osx] + - if exist %LIBRARY_BIN%\{{ each_lib }}.dll exit 1 # [win] + - if exist %LIBRARY_LIB%\{{ each_lib }}.lib exit 1 # [win] + {% endfor %} + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow core + + - name: libarrow-acero + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-acero", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + host: + - {{ pin_subpackage("libarrow", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/acero/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\acero\api.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_acero.so # [linux] + - test -f $PREFIX/lib/libarrow_acero.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_acero.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_acero.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_acero.a # [unix] + - if exist %LIBRARY_LIB%\arrow_acero_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Acero + + - name: libarrow-dataset + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-dataset", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/dataset/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\dataset\api.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_dataset.so # [linux] + - test -f $PREFIX/lib/libarrow_dataset.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_dataset.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_dataset.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_dataset.a # [unix] + - if exist %LIBRARY_LIB%\arrow_dataset_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Dataset + + - name: libarrow-flight + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-flight", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + # needs to run protoc & grpc_cpp_plugin + - libgrpc # [build_platform != target_platform] + - libprotobuf # [build_platform != target_platform] + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - libabseil + - libgrpc + - libprotobuf + - ucx # [linux] + run: + - {{ pin_subpackage("libarrow", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/flight/types.h # [unix] + - if not exist %LIBRARY_INC%\arrow\flight\types.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_flight.so # [linux] + - test -f $PREFIX/lib/libarrow_flight.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_flight.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_flight.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_flight.a # [unix] + - if exist %LIBRARY_LIB%\arrow_flight_static.lib exit 1 # [win] + + # Only check UCX on Linux + - test -f $PREFIX/lib/libarrow_flight_transport_ucx.so # [linux] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Flight + + - name: libarrow-flight-sql + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-flight-sql", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + # needs to run protoc & grpc_cpp_plugin + - libgrpc # [build_platform != target_platform] + - libprotobuf # [build_platform != target_platform] + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - libprotobuf + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/flight/sql/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\flight\sql\api.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_flight_sql.so # [linux] + - test -f $PREFIX/lib/libarrow_flight_sql.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_flight_sql.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_flight_sql.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_flight_sql.a # [unix] + - if exist %LIBRARY_LIB%\arrow_flight_sql_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Flight SQL + + - name: libarrow-gandiva + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-gandiva", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + host: + - {{ pin_subpackage("libarrow", max_pin="x") }} + - libutf8proc + # gandiva requires shared libllvm; needs to match version used at build time + - llvm {{ llvm_version }} # [unix] + - openssl + - re2 + - zlib # [win] + run: + - {{ pin_subpackage("libarrow", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/gandiva/engine.h # [unix] + - if not exist %LIBRARY_INC%\gandiva\engine.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libgandiva.so # [linux] + - test -f $PREFIX/lib/libgandiva.dylib # [osx] + - if not exist %LIBRARY_BIN%\gandiva.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\gandiva.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libgandiva.a # [unix] + - if exist %LIBRARY_LIB%\gandiva_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Gandiva + + - name: libarrow-substrait + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-substrait", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + - libprotobuf # [build_platform != target_platform] + # for strong run-exports + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - libabseil # [win] + - libprotobuf + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/engine/substrait/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\engine\substrait\api.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_substrait.so # [linux] + - test -f $PREFIX/lib/libarrow_substrait.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_substrait.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_substrait.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_substrait.a # [unix] + - if exist %LIBRARY_LIB%\arrow_substrait_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Substrait + + - name: libparquet + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libparquet", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + host: + - {{ pin_subpackage("libarrow", max_pin="x") }} + - openssl + - thrift-cpp + run: + - {{ pin_subpackage("libarrow", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/parquet/api/reader.h # [unix] + - if not exist %LIBRARY_INC%\parquet\api\reader.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libparquet.so # [linux] + - test -f $PREFIX/lib/libparquet.dylib # [osx] + - if not exist %LIBRARY_BIN%\parquet.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\parquet.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libparquet.a # [unix] + - if exist %LIBRARY_LIB%\parquet_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Parquet + + - name: pyarrow + script: build-pyarrow.sh # [unix] + script: build-pyarrow.bat # [win] + version: {{ version }} + build: + string: py{{ CONDA_PY }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + ignore_run_exports_from: + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }} + rpaths: + - lib/ + - {{ SP_DIR }}/pyarrow + missing_dso_whitelist: + # not actually missing, but installed into SP_DIR, see tests + - '*/arrow_python.dll' # [win] + - '*/arrow_python_flight.dll' # [win] + requirements: + build: + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + # pyarrow does not require nvcc but it needs to link against libraries in libarrow=*=*cuda + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + - python # [build_platform != target_platform] + - cross-python_{{ target_platform }} # [build_platform != target_platform] + - cython # [build_platform != target_platform] + - numpy # [build_platform != target_platform] + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow-all", exact=True) }} + - clangdev {{ llvm_version }} + - llvmdev {{ llvm_version }} + - cython + - numpy + - python + - setuptools + # see https://github.com/apache/arrow/issues/37931 + - setuptools_scm <8 + run: + # full set of libs because run-exports from libarrow-all aren't picked up + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} + - {{ pin_subpackage("libarrow-gandiva", exact=True) }} + - {{ pin_subpackage("libarrow-substrait", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + - {{ pin_compatible('numpy') }} + - python + run_constrained: + - apache-arrow-proc =*={{ build_ext }} + + test: + files: + - test_read_parquet.py + imports: + - pyarrow + - pyarrow.dataset + - pyarrow.compute + - pyarrow.flight + - pyarrow.gandiva + - pyarrow.orc # [unix] + - pyarrow.parquet + - pyarrow.fs + - pyarrow._s3fs + - pyarrow._hdfs + # We can only test importing cuda package but cannot run when a + # CUDA device is not available, for instance, when building from CI. + # On Windows, we cannot even do that due to `nvcuda.dll` not being found, see + # https://conda-forge.org/docs/maintainer/knowledge_base.html#nvcuda-dll-cannot-be-found-on-windows + # However, we check below for (at least) the presence of a correctly-compiled module + - pyarrow.cuda # [cuda_compiler_version != "None" and not win] + commands: + # libraries that depend on python (and hence aren't in libarrow itself) + - test -f ${SP_DIR}/pyarrow/libarrow_python.so # [linux] + - test -f ${SP_DIR}/pyarrow/libarrow_python_flight.so # [linux] + - test -f ${SP_DIR}/pyarrow/libarrow_python.dylib # [osx] + - test -f ${SP_DIR}/pyarrow/libarrow_python_flight.dylib # [osx] + - if not exist %SP_DIR%\pyarrow\arrow_python.dll exit 1 # [win] + - if not exist %SP_DIR%\pyarrow\arrow_python_flight.dll exit 1 # [win] + + - test -f ${SP_DIR}/pyarrow/include/arrow/python/pyarrow.h # [unix] + - if not exist %SP_DIR%\pyarrow\include\arrow\python\pyarrow.h exit 1 # [win] + + - test ! -f ${SP_DIR}/pyarrow/tests/test_array.py # [unix] + - if exist %SP_DIR%/pyarrow/tests/test_array.py exit 1 # [win] + # Need to remove dot from PY_VER; %MYVAR:x=y% replaces "x" in %MYVAR% with "y" + - if not exist %SP_DIR%/pyarrow/_cuda.cp%PY_VER:.=%-win_amd64.pyd exit 1 # [win and cuda_compiler_version != "None"] + - python test_read_parquet.py + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: Python libraries for Apache Arrow + + - name: pyarrow-tests + script: build-pyarrow.sh # [unix] + script: build-pyarrow.bat # [win] + version: {{ version }} + build: + string: py{{ CONDA_PY }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + ignore_run_exports_from: + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }} + requirements: + build: + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + # pyarrow does not require nvcc but it needs to link against libraries in libarrow=*=*cuda + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + - python # [build_platform != target_platform] + - cross-python_{{ target_platform }} # [build_platform != target_platform] + - cython # [build_platform != target_platform] + - numpy # [build_platform != target_platform] + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow-all", exact=True) }} + - {{ pin_subpackage('pyarrow', exact=True) }} + - clangdev {{ llvm_version }} + - llvmdev {{ llvm_version }} + - cython + - numpy + - python + - setuptools + # see https://github.com/apache/arrow/issues/37931 + - setuptools_scm <8 + run: + - {{ pin_subpackage('pyarrow', exact=True) }} + - python + run_constrained: + - apache-arrow-proc =*={{ build_ext }} + + {% if not (aarch64 or ppc64le) or py == 311 %} + # only run the full test suite for one python version when in emulation + # (each run can take up to ~45min); there's essentially zero divergence + # in behaviour across python versions anyway + test: + requires: + # test_cpp_extension_in_python requires a compiler + - {{ compiler("cxx") }} # [linux] + # pytest-lazy-fixture fails with pytest 8 + # See issue on Arrow repo: https://github.com/apache/arrow/issues/39849 + - pytest <8 + - pytest-lazy-fixture + - backports.zoneinfo # [py<39] + - boto3 + - cffi + - cloudpickle + - cython + - fastparquet + - fsspec + - hypothesis + - minio-server + # Pandas pin required due to: https://github.com/apache/arrow/issues/39732 + - pandas <2.2 + - s3fs >=2023 + - scipy + # these are generally (far) behind on migrating abseil/grpc/protobuf, + # and using them as test dependencies blocks the migrator unnecessarily + # - pytorch + # - tensorflow + # we're not building java bindings + # - jpype1 + # doesn't get picked up correctly + # - libhdfs3 + # causes segfaults + # - sparse + source_files: + - testing/data + commands: + - cd ${SP_DIR} # [unix] + - cd %SP_DIR% # [win] + - export ARROW_TEST_DATA="${SRC_DIR}/testing/data" # [unix] + - set "ARROW_TEST_DATA=%SRC_DIR%\testing\data" # [win] + + {% set tests_to_skip = "_not_a_real_test" %} + # we do not have GPUs in CI --> cannot test cuda + {% set tests_to_skip = tests_to_skip + " or test_cuda" + " or test_dlpack_cuda_not_supported"%} + # skip tests that raise SIGINT and crash the test suite + {% set tests_to_skip = tests_to_skip + " or (test_csv and test_cancellation)" %} # [linux] + {% set tests_to_skip = tests_to_skip + " or (test_flight and test_interrupt)" %} # [linux] + # skip tests that make invalid(-for-conda) assumptions about the compilers setup + {% set tests_to_skip = tests_to_skip + " or test_cython_api" %} # [unix] + {% set tests_to_skip = tests_to_skip + " or test_visit_strings" %} # [unix] + # skip tests that cannot succeed in emulation + {% set tests_to_skip = tests_to_skip + " or test_debug_memory_pool_disabled" %} # [aarch64 or ppc64le] + {% set tests_to_skip = tests_to_skip + " or test_env_var_io_thread_count" %} # [aarch64 or ppc64le] + # vvvvvvv TESTS THAT SHOULDN'T HAVE TO BE SKIPPED vvvvvvv + # problems with minio + {% set tests_to_skip = tests_to_skip + " or (test_delete_dir and S3FileSystem)" %} + {% set tests_to_skip = tests_to_skip + " or (test_get_file_info and S3FileSystem)" %} + {% set tests_to_skip = tests_to_skip + " or (test_move_directory and S3FileSystem)" %} + # XMinioInvalidObjectName on win: "Object name contains unsupported characters" + {% set tests_to_skip = tests_to_skip + " or test_write_to_dataset_with_partitions_s3fs" %} # [win] + # gandiva tests are segfaulting on ppc + {% set tests_to_skip = tests_to_skip + " or test_gandiva" %} # [ppc64le] + # test failures on ppc (both failing with: Float value was truncated converting to int32) + {% set tests_to_skip = tests_to_skip + " or test_safe_cast_from_float_with_nans_to_int" %} # [ppc64le] + {% set tests_to_skip = tests_to_skip + " or test_float_with_null_as_integer" %} # [ppc64le] + # ^^^^^^^ TESTS THAT SHOULDN'T HAVE TO BE SKIPPED ^^^^^^^ + - pytest pyarrow/ -rfEs -k "not ({{ tests_to_skip }})" + {% endif %} + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: Python test files for Apache Arrow + +about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ and Python libraries for Apache Arrow + +extra: + recipe-maintainers: + - wesm + - xhochy + - leifwalsh + - jreback + - cpcloud + - pcmoritz + - robertnishihara + - siddharthteotia + - kou + - kszucs + - pitrou + - pearu + - nealrichardson + - jakirkham + - h-vetinari + - raulcd + feedstock-name: arrow-cpp diff --git a/tests/test_yaml/stdlib_arrow_before_meta.yaml b/tests/test_yaml/stdlib_arrow_before_meta.yaml new file mode 100644 index 000000000..85771bad5 --- /dev/null +++ b/tests/test_yaml/stdlib_arrow_before_meta.yaml @@ -0,0 +1,860 @@ +{% set version = "1.9.0" %} +{% set cuda_enabled = cuda_compiler_version != "None" %} +{% set build_ext_version = "5.0.0" %} +{% set build_ext = "cuda" if cuda_enabled else "cpu" %} +{% set proc_build_number = "0" %} +{% set llvm_version = "15" %} + +# see https://github.com/apache/arrow/blob/apache-arrow-10.0.1/cpp/CMakeLists.txt#L88-L90 +{% set so_version = (version.split(".")[0] | int * 100 + version.split(".")[1] | int) ~ "." ~ version.split(".")[2] ~ ".0" %} + +package: + name: apache-arrow + version: {{ version }} + +source: + # fake source url to get version migrator to pass + url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: b6d893dc7dcd4138b9e9df59a13c59695e50e80dc5c2cacee0674670693951a1 + # # arrow has the unfortunate habit of changing tags of X.0.0 in the + # # lead-up until release -> don't use github sources on main + # # - url: https://github.com/apache/arrow/archive/refs/tags/apache-arrow-{{ version }}.tar.gz + # - url: https://www.apache.org/dyn/closer.lua/arrow/arrow-{{ version }}/apache-arrow-{{ version }}.tar.gz?action=download + # fn: apache-arrow-{{ version }}.tar.gz + # sha256: 01dd3f70e85d9b5b933ec92c0db8a4ef504a5105f78d2d8622e84279fb45c25d + # patches: + # # workaround for https://github.com/apache/arrow/issues/37692 + # - patches/0001-fixture-teardown-should-not-fail-test.patch + # testing-submodule not part of release tarball + # - git_url: https://github.com/apache/arrow-testing.git + # git_rev: ad82a736c170e97b7c8c035ebd8a801c17eec170 + # folder: testing + +build: + number: 0 + # for cuda support, building with one version is enough to be compatible with + # all later versions, since arrow is only using libcuda, and not libcudart. + skip: true # [cuda_compiler_version not in ("None", cuda_compiler_version_min)] + # arrow promises API- & ABI-compatibility along SemVer, see #1096 + +requirements: + build: + - {{ compiler("c") }} + - {{ compiler("cxx") }} + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + # needs to run protoc & grpc_cpp_plugin + - libgrpc # [build_platform != target_platform] + - libprotobuf # [build_platform != target_platform] + # needed for gandiva + - clangdev {{ llvm_version }} # [build_platform != target_platform] + - llvmdev {{ llvm_version }} # [build_platform != target_platform] + - gnuconfig # [build_platform != target_platform] + - cmake + - ninja + # necessary for vendored jemalloc + - autoconf # [linux] + - make # [linux] + host: + # for required dependencies, see + # https://github.com/apache/arrow/blob/apache-arrow-11.0.0/cpp/cmake_modules/ThirdpartyToolchain.cmake#L46-L75 + - clangdev {{ llvm_version }} + - llvmdev {{ llvm_version }} + - aws-crt-cpp + - aws-sdk-cpp + - brotli + - bzip2 + # not yet: https://github.com/conda-forge/cpp-opentelemetry-sdk-feedstock/issues/38 + # - cpp-opentelemetry-sdk + # - proto-opentelemetry-proto =={{ cpp_opentelemetry_sdk }} + - gflags + - glog + - google-cloud-cpp + # arrow uses a customized jemalloc, see #944 + # - jemalloc + - libabseil + - libboost-headers + - libgrpc + - libutf8proc + - lz4-c + - nlohmann_json + - orc + - rapidjson + - re2 + - snappy + - thrift-cpp + - ucx # [linux] + - xsimd + - zlib + - zstd + +outputs: + - name: apache-arrow-proc + version: {{ build_ext_version }} + build: + number: {{ proc_build_number }} + string: {{ build_ext }} + requirements: + run_constrained: + # avoid installation with old naming of proc package + - arrow-cpp-proc <0.0a0 + test: + commands: + - exit 0 + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: A meta-package to select Arrow build variant + + - name: libarrow-all + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow", max_pin="x") }} + - {{ pin_subpackage("libarrow-acero", max_pin="x") }} + - {{ pin_subpackage("libarrow-dataset", max_pin="x") }} + - {{ pin_subpackage("libarrow-flight", max_pin="x") }} + - {{ pin_subpackage("libarrow-flight-sql", max_pin="x") }} + - {{ pin_subpackage("libarrow-gandiva", max_pin="x") }} + - {{ pin_subpackage("libarrow-substrait", max_pin="x") }} + - {{ pin_subpackage("libparquet", max_pin="x") }} + requirements: + build: + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} + - {{ pin_subpackage("libarrow-gandiva", exact=True) }} + - {{ pin_subpackage("libarrow-substrait", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} + - {{ pin_subpackage("libarrow-gandiva", exact=True) }} + - {{ pin_subpackage("libarrow-substrait", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + test: + commands: + - echo "tested in other outputs" + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow + + - name: libarrow + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow", max_pin="x") }} + ignore_run_exports_from: + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + # arrow only uses headers, apparently + - gflags + # shared lib linked on unix, not on win + - glog # [win] + ignore_run_exports: + # we don't need all of brotli's run-exports + - libbrotlicommon + track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }} + missing_dso_whitelist: + - '*/libcuda.so.*' # [linux] + - '*/nvcuda.dll' # [win] + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + host: + - aws-crt-cpp + - aws-sdk-cpp + - brotli + - bzip2 + # not yet: https://github.com/conda-forge/cpp-opentelemetry-sdk-feedstock/issues/38 + # - cpp-opentelemetry-sdk + # - proto-opentelemetry-proto =={{ cpp_opentelemetry_sdk }} + - gflags + - glog + - google-cloud-cpp + # arrow uses a customized jemalloc, see #944 + # - jemalloc + - libabseil + - libutf8proc + - lz4-c + - openssl # [win] + - orc + - re2 + - snappy + - zlib + - zstd + - __cuda >={{ cuda_compiler_version_min }} # [cuda_compiler_version != "None"] + # since libgoogle-cloud is static on windows, see + # https://github.com/conda-forge/google-cloud-cpp-feedstock/pull/108, + # its host deps (which aren't yet covered above) leak into the build here + - libcrc32c # [win] + - libcurl # [win] + run_constrained: + - apache-arrow-proc =*={{ build_ext }} + # avoid installation with old naming of lib package + - arrow-cpp <0.0a0 + # old parquet lib output, now part of this feedstock + - parquet-cpp <0.0a0 + # since all the other libarrow-* variants in this recipe depend exactly on libarrow, + # this avoids that libarrow-X & -Y get installed with different builds or versions. + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\api.h exit 1 # [win] + + {% set libs = ["arrow"] + (cuda_compiler_version != "None") * ["arrow_cuda"] %} + {% for each_lib in libs %} + # shared + - test -f $PREFIX/lib/lib{{ each_lib }}.so # [linux] + - test -f $PREFIX/lib/lib{{ each_lib }}.dylib # [osx] + - if not exist %LIBRARY_BIN%\{{ each_lib }}.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\{{ each_lib }}.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/lib{{ each_lib }}.a # [unix] + - if exist %LIBRARY_LIB%\{{ each_lib }}_static.lib exit 1 # [win] + {% endfor %} + + # absence of arrow_cuda for CPU builds + - test ! -f $PREFIX/lib/libarrow_cuda.so # [(cuda_compiler_version == "None") and linux] + - test ! -f $PREFIX/lib/libarrow_cuda.a # [(cuda_compiler_version == "None") and linux] + - if exist %LIBRARY_BIN%\arrow_cuda.dll exit 1 # [(cuda_compiler_version == "None") and win] + - if exist %LIBRARY_LIB%\arrow_cuda.lib exit 1 # [(cuda_compiler_version == "None") and win] + - if exist %LIBRARY_LIB%\arrow_cuda_static.lib exit 1 # [(cuda_compiler_version == "None") and win] + + # gdb-wrapper (paths are stacked intentionally) + - test -f $PREFIX/share/gdb/auto-load/$PREFIX/lib/libarrow.so.{{ so_version }}-gdb.py # [linux] + - test -f $PREFIX/share/gdb/auto-load/$PREFIX/lib/libarrow.{{ so_version }}.dylib-gdb.py # [osx] + + {% set libs = [ + "arrow_acero", "arrow_dataset", "arrow_flight", + "arrow_flight_sql", "arrow_substrait", "gandiva", "parquet" + ] %} + {% for each_lib in libs %} + # absence of libraries that belong in other outputs + - test ! -f $PREFIX/lib/lib{{ each_lib }}.so # [linux] + - test ! -f $PREFIX/lib/lib{{ each_lib }}.dylib # [osx] + - if exist %LIBRARY_BIN%\{{ each_lib }}.dll exit 1 # [win] + - if exist %LIBRARY_LIB%\{{ each_lib }}.lib exit 1 # [win] + {% endfor %} + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow core + + - name: libarrow-acero + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-acero", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + host: + - {{ pin_subpackage("libarrow", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/acero/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\acero\api.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_acero.so # [linux] + - test -f $PREFIX/lib/libarrow_acero.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_acero.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_acero.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_acero.a # [unix] + - if exist %LIBRARY_LIB%\arrow_acero_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Acero + + - name: libarrow-dataset + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-dataset", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/dataset/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\dataset\api.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_dataset.so # [linux] + - test -f $PREFIX/lib/libarrow_dataset.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_dataset.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_dataset.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_dataset.a # [unix] + - if exist %LIBRARY_LIB%\arrow_dataset_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Dataset + + - name: libarrow-flight + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-flight", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + # needs to run protoc & grpc_cpp_plugin + - libgrpc # [build_platform != target_platform] + - libprotobuf # [build_platform != target_platform] + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - libabseil + - libgrpc + - libprotobuf + - ucx # [linux] + run: + - {{ pin_subpackage("libarrow", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/flight/types.h # [unix] + - if not exist %LIBRARY_INC%\arrow\flight\types.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_flight.so # [linux] + - test -f $PREFIX/lib/libarrow_flight.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_flight.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_flight.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_flight.a # [unix] + - if exist %LIBRARY_LIB%\arrow_flight_static.lib exit 1 # [win] + + # Only check UCX on Linux + - test -f $PREFIX/lib/libarrow_flight_transport_ucx.so # [linux] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Flight + + - name: libarrow-flight-sql + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-flight-sql", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + # needs to run protoc & grpc_cpp_plugin + - libgrpc # [build_platform != target_platform] + - libprotobuf # [build_platform != target_platform] + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - libprotobuf + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/flight/sql/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\flight\sql\api.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_flight_sql.so # [linux] + - test -f $PREFIX/lib/libarrow_flight_sql.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_flight_sql.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_flight_sql.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_flight_sql.a # [unix] + - if exist %LIBRARY_LIB%\arrow_flight_sql_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Flight SQL + + - name: libarrow-gandiva + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-gandiva", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + host: + - {{ pin_subpackage("libarrow", max_pin="x") }} + - libutf8proc + # gandiva requires shared libllvm; needs to match version used at build time + - llvm {{ llvm_version }} # [unix] + - openssl + - re2 + - zlib # [win] + run: + - {{ pin_subpackage("libarrow", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/gandiva/engine.h # [unix] + - if not exist %LIBRARY_INC%\gandiva\engine.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libgandiva.so # [linux] + - test -f $PREFIX/lib/libgandiva.dylib # [osx] + - if not exist %LIBRARY_BIN%\gandiva.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\gandiva.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libgandiva.a # [unix] + - if exist %LIBRARY_LIB%\gandiva_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Gandiva + + - name: libarrow-substrait + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libarrow-substrait", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + - libprotobuf # [build_platform != target_platform] + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - libabseil # [win] + - libprotobuf + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/arrow/engine/substrait/api.h # [unix] + - if not exist %LIBRARY_INC%\arrow\engine\substrait\api.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libarrow_substrait.so # [linux] + - test -f $PREFIX/lib/libarrow_substrait.dylib # [osx] + - if not exist %LIBRARY_BIN%\arrow_substrait.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\arrow_substrait.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libarrow_substrait.a # [unix] + - if exist %LIBRARY_LIB%\arrow_substrait_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Arrow Substrait + + - name: libparquet + script: install-libarrow.sh # [unix] + script: install-libarrow.bat # [win] + version: {{ version }} + build: + string: h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + run_exports: + - {{ pin_subpackage("libparquet", max_pin="x") }} + requirements: + build: + - cmake + - ninja + # for strong run-exports + - {{ compiler("c") }} + - {{ compiler("cxx") }} + host: + - {{ pin_subpackage("libarrow", max_pin="x") }} + - openssl + - thrift-cpp + run: + - {{ pin_subpackage("libarrow", exact=True) }} + # run-constraints handled by libarrow, since we depend on it with exact=True + + test: + commands: + # headers + - test -f $PREFIX/include/parquet/api/reader.h # [unix] + - if not exist %LIBRARY_INC%\parquet\api\reader.h exit 1 # [win] + + # shared libraries + - test -f $PREFIX/lib/libparquet.so # [linux] + - test -f $PREFIX/lib/libparquet.dylib # [osx] + - if not exist %LIBRARY_BIN%\parquet.dll exit 1 # [win] + - if not exist %LIBRARY_LIB%\parquet.lib exit 1 # [win] + + # absence of static libraries + - test ! -f $PREFIX/lib/libparquet.a # [unix] + - if exist %LIBRARY_LIB%\parquet_static.lib exit 1 # [win] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ libraries for Apache Parquet + + - name: pyarrow + script: build-pyarrow.sh # [unix] + script: build-pyarrow.bat # [win] + version: {{ version }} + build: + string: py{{ CONDA_PY }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + ignore_run_exports_from: + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }} + rpaths: + - lib/ + - {{ SP_DIR }}/pyarrow + missing_dso_whitelist: + # not actually missing, but installed into SP_DIR, see tests + - '*/arrow_python.dll' # [win] + - '*/arrow_python_flight.dll' # [win] + requirements: + build: + - {{ compiler("c") }} + - {{ compiler("cxx") }} + # pyarrow does not require nvcc but it needs to link against libraries in libarrow=*=*cuda + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + - python # [build_platform != target_platform] + - cross-python_{{ target_platform }} # [build_platform != target_platform] + - cython # [build_platform != target_platform] + - numpy # [build_platform != target_platform] + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow-all", exact=True) }} + - clangdev {{ llvm_version }} + - llvmdev {{ llvm_version }} + - cython + - numpy + - python + - setuptools + # see https://github.com/apache/arrow/issues/37931 + - setuptools_scm <8 + run: + # full set of libs because run-exports from libarrow-all aren't picked up + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} + - {{ pin_subpackage("libarrow-gandiva", exact=True) }} + - {{ pin_subpackage("libarrow-substrait", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + - {{ pin_compatible('numpy') }} + - python + run_constrained: + - apache-arrow-proc =*={{ build_ext }} + + test: + files: + - test_read_parquet.py + imports: + - pyarrow + - pyarrow.dataset + - pyarrow.compute + - pyarrow.flight + - pyarrow.gandiva + - pyarrow.orc # [unix] + - pyarrow.parquet + - pyarrow.fs + - pyarrow._s3fs + - pyarrow._hdfs + # We can only test importing cuda package but cannot run when a + # CUDA device is not available, for instance, when building from CI. + # On Windows, we cannot even do that due to `nvcuda.dll` not being found, see + # https://conda-forge.org/docs/maintainer/knowledge_base.html#nvcuda-dll-cannot-be-found-on-windows + # However, we check below for (at least) the presence of a correctly-compiled module + - pyarrow.cuda # [cuda_compiler_version != "None" and not win] + commands: + # libraries that depend on python (and hence aren't in libarrow itself) + - test -f ${SP_DIR}/pyarrow/libarrow_python.so # [linux] + - test -f ${SP_DIR}/pyarrow/libarrow_python_flight.so # [linux] + - test -f ${SP_DIR}/pyarrow/libarrow_python.dylib # [osx] + - test -f ${SP_DIR}/pyarrow/libarrow_python_flight.dylib # [osx] + - if not exist %SP_DIR%\pyarrow\arrow_python.dll exit 1 # [win] + - if not exist %SP_DIR%\pyarrow\arrow_python_flight.dll exit 1 # [win] + + - test -f ${SP_DIR}/pyarrow/include/arrow/python/pyarrow.h # [unix] + - if not exist %SP_DIR%\pyarrow\include\arrow\python\pyarrow.h exit 1 # [win] + + - test ! -f ${SP_DIR}/pyarrow/tests/test_array.py # [unix] + - if exist %SP_DIR%/pyarrow/tests/test_array.py exit 1 # [win] + # Need to remove dot from PY_VER; %MYVAR:x=y% replaces "x" in %MYVAR% with "y" + - if not exist %SP_DIR%/pyarrow/_cuda.cp%PY_VER:.=%-win_amd64.pyd exit 1 # [win and cuda_compiler_version != "None"] + - python test_read_parquet.py + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: Python libraries for Apache Arrow + + - name: pyarrow-tests + script: build-pyarrow.sh # [unix] + script: build-pyarrow.bat # [win] + version: {{ version }} + build: + string: py{{ CONDA_PY }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + ignore_run_exports_from: + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + track_features: {{ "[arrow-cuda]" if cuda_enabled else "" }} + requirements: + build: + - {{ compiler("c") }} + - {{ compiler("cxx") }} + # pyarrow does not require nvcc but it needs to link against libraries in libarrow=*=*cuda + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + - python # [build_platform != target_platform] + - cross-python_{{ target_platform }} # [build_platform != target_platform] + - cython # [build_platform != target_platform] + - numpy # [build_platform != target_platform] + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow-all", exact=True) }} + - {{ pin_subpackage('pyarrow', exact=True) }} + - clangdev {{ llvm_version }} + - llvmdev {{ llvm_version }} + - cython + - numpy + - python + - setuptools + # see https://github.com/apache/arrow/issues/37931 + - setuptools_scm <8 + run: + - {{ pin_subpackage('pyarrow', exact=True) }} + - python + run_constrained: + - apache-arrow-proc =*={{ build_ext }} + + {% if not (aarch64 or ppc64le) or py == 311 %} + # only run the full test suite for one python version when in emulation + # (each run can take up to ~45min); there's essentially zero divergence + # in behaviour across python versions anyway + test: + requires: + # test_cpp_extension_in_python requires a compiler + - {{ compiler("cxx") }} # [linux] + # pytest-lazy-fixture fails with pytest 8 + # See issue on Arrow repo: https://github.com/apache/arrow/issues/39849 + - pytest <8 + - pytest-lazy-fixture + - backports.zoneinfo # [py<39] + - boto3 + - cffi + - cloudpickle + - cython + - fastparquet + - fsspec + - hypothesis + - minio-server + # Pandas pin required due to: https://github.com/apache/arrow/issues/39732 + - pandas <2.2 + - s3fs >=2023 + - scipy + # these are generally (far) behind on migrating abseil/grpc/protobuf, + # and using them as test dependencies blocks the migrator unnecessarily + # - pytorch + # - tensorflow + # we're not building java bindings + # - jpype1 + # doesn't get picked up correctly + # - libhdfs3 + # causes segfaults + # - sparse + source_files: + - testing/data + commands: + - cd ${SP_DIR} # [unix] + - cd %SP_DIR% # [win] + - export ARROW_TEST_DATA="${SRC_DIR}/testing/data" # [unix] + - set "ARROW_TEST_DATA=%SRC_DIR%\testing\data" # [win] + + {% set tests_to_skip = "_not_a_real_test" %} + # we do not have GPUs in CI --> cannot test cuda + {% set tests_to_skip = tests_to_skip + " or test_cuda" + " or test_dlpack_cuda_not_supported"%} + # skip tests that raise SIGINT and crash the test suite + {% set tests_to_skip = tests_to_skip + " or (test_csv and test_cancellation)" %} # [linux] + {% set tests_to_skip = tests_to_skip + " or (test_flight and test_interrupt)" %} # [linux] + # skip tests that make invalid(-for-conda) assumptions about the compilers setup + {% set tests_to_skip = tests_to_skip + " or test_cython_api" %} # [unix] + {% set tests_to_skip = tests_to_skip + " or test_visit_strings" %} # [unix] + # skip tests that cannot succeed in emulation + {% set tests_to_skip = tests_to_skip + " or test_debug_memory_pool_disabled" %} # [aarch64 or ppc64le] + {% set tests_to_skip = tests_to_skip + " or test_env_var_io_thread_count" %} # [aarch64 or ppc64le] + # vvvvvvv TESTS THAT SHOULDN'T HAVE TO BE SKIPPED vvvvvvv + # problems with minio + {% set tests_to_skip = tests_to_skip + " or (test_delete_dir and S3FileSystem)" %} + {% set tests_to_skip = tests_to_skip + " or (test_get_file_info and S3FileSystem)" %} + {% set tests_to_skip = tests_to_skip + " or (test_move_directory and S3FileSystem)" %} + # XMinioInvalidObjectName on win: "Object name contains unsupported characters" + {% set tests_to_skip = tests_to_skip + " or test_write_to_dataset_with_partitions_s3fs" %} # [win] + # gandiva tests are segfaulting on ppc + {% set tests_to_skip = tests_to_skip + " or test_gandiva" %} # [ppc64le] + # test failures on ppc (both failing with: Float value was truncated converting to int32) + {% set tests_to_skip = tests_to_skip + " or test_safe_cast_from_float_with_nans_to_int" %} # [ppc64le] + {% set tests_to_skip = tests_to_skip + " or test_float_with_null_as_integer" %} # [ppc64le] + # ^^^^^^^ TESTS THAT SHOULDN'T HAVE TO BE SKIPPED ^^^^^^^ + - pytest pyarrow/ -rfEs -k "not ({{ tests_to_skip }})" + {% endif %} + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: Python test files for Apache Arrow + +about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: C++ and Python libraries for Apache Arrow + +extra: + recipe-maintainers: + - wesm + - xhochy + - leifwalsh + - jreback + - cpcloud + - pcmoritz + - robertnishihara + - siddharthteotia + - kou + - kszucs + - pitrou + - pearu + - nealrichardson + - jakirkham + - h-vetinari + - raulcd + feedstock-name: arrow-cpp diff --git a/tests/test_yaml/stdlib_daal4py_after_meta.yaml b/tests/test_yaml/stdlib_daal4py_after_meta.yaml new file mode 100644 index 000000000..7fd0cb43e --- /dev/null +++ b/tests/test_yaml/stdlib_daal4py_after_meta.yaml @@ -0,0 +1,88 @@ +{% set version = "1.10.0" %} +{% set buildnumber = 0 %} + +package: + name: daal4py + version: {{ version }} + +source: + # fake source url to get version migrator to pass + url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: 3f9e587a96844a9b4ee7f998cfe4dc3964dc95c4ca94d7de6a77bffb99f873da + # url: https://github.com/intel/scikit-learn-intelex/archive/{{ version }}.tar.gz + # sha256: 169c62cbe231eb0b65a4009f035c4114a895a42ae6b004e5895b3aab1c750762 + +build: + skip: true # [not (linux64 or win)] + number: {{ buildnumber }} + include_recipe: false + +requirements: + build: + - make # [linux] + - {{ compiler('cxx') }} # [linux64 or win] + - {{ stdlib("c") }} # [linux64 or win] + host: + - python + - setuptools + - numpy + - dal-devel ==2024.1.0 + - cython + - jinja2 + - mpich # [not win] + - clang-format + - cmake + - pybind11 + run: + - python + - dal ==2024.1.0 + - {{ pin_compatible('numpy') }} + +test: + requires: + - pandas ==1.2.5 # [python_impl == 'pypy'] + - pandas # [python_impl != 'pypy'] + - scipy + - scikit-learn + - xgboost + - lightgbm + - pytest + - mpich # [not win] + source_files: + - examples + - tests + - daal4py + - onedal + commands: + - cd tests + - python -c "import daal4py" + - python -m unittest discover -v -p "test*[!ex].py" + - pytest --verbose --pyargs ../daal4py/sklearn + - pytest --verbose --pyargs ../onedal + #- python run_examples.py temp_removal + +about: + home: https://intelpython.github.io/daal4py/ + license: Apache-2.0 + license_file: + - LICENSE + - doc/daal4py/third-party-programs.txt + summary: A convenient Python API to Intel (R) oneAPI Data Analytics Library + description: | + LEGAL NOTICE: Use of this software package is subject to the + software license agreement (as set forth above, in the license section of + the installed Conda package and/or the README file) and all notices, + disclaimers or license terms for third party or open source software + included in or with the software. +

+ EULA: Apache-2.0 +

+ dev_url: https://github.com/intel/scikit-learn-intelex + doc_url: https://intelpython.github.io/daal4py + +extra: + recipe-maintainers: + # GitHub IDs for maintainers of the recipe. + - napetrov + - Alexsandruss + - maria-Petrova diff --git a/tests/test_yaml/stdlib_daal4py_before_meta.yaml b/tests/test_yaml/stdlib_daal4py_before_meta.yaml new file mode 100644 index 000000000..0b5fc1814 --- /dev/null +++ b/tests/test_yaml/stdlib_daal4py_before_meta.yaml @@ -0,0 +1,87 @@ +{% set version = "1.9.0" %} +{% set buildnumber = 0 %} + +package: + name: daal4py + version: {{ version }} + +source: + # fake source url to get version migrator to pass + url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: b6d893dc7dcd4138b9e9df59a13c59695e50e80dc5c2cacee0674670693951a1 + # url: https://github.com/intel/scikit-learn-intelex/archive/{{ version }}.tar.gz + # sha256: 169c62cbe231eb0b65a4009f035c4114a895a42ae6b004e5895b3aab1c750762 + +build: + skip: true # [not (linux64 or win)] + number: {{ buildnumber }} + include_recipe: false + +requirements: + build: + - make # [linux] + - {{ compiler('cxx') }} # [linux64 or win] + host: + - python + - setuptools + - numpy + - dal-devel ==2024.1.0 + - cython + - jinja2 + - mpich # [not win] + - clang-format + - cmake + - pybind11 + run: + - python + - dal ==2024.1.0 + - {{ pin_compatible('numpy') }} + +test: + requires: + - pandas ==1.2.5 # [python_impl == 'pypy'] + - pandas # [python_impl != 'pypy'] + - scipy + - scikit-learn + - xgboost + - lightgbm + - pytest + - mpich # [not win] + source_files: + - examples + - tests + - daal4py + - onedal + commands: + - cd tests + - python -c "import daal4py" + - python -m unittest discover -v -p "test*[!ex].py" + - pytest --verbose --pyargs ../daal4py/sklearn + - pytest --verbose --pyargs ../onedal + #- python run_examples.py temp_removal + +about: + home: https://intelpython.github.io/daal4py/ + license: Apache-2.0 + license_file: + - LICENSE + - doc/daal4py/third-party-programs.txt + summary: A convenient Python API to Intel (R) oneAPI Data Analytics Library + description: | + LEGAL NOTICE: Use of this software package is subject to the + software license agreement (as set forth above, in the license section of + the installed Conda package and/or the README file) and all notices, + disclaimers or license terms for third party or open source software + included in or with the software. +

+ EULA: Apache-2.0 +

+ dev_url: https://github.com/intel/scikit-learn-intelex + doc_url: https://intelpython.github.io/daal4py + +extra: + recipe-maintainers: + # GitHub IDs for maintainers of the recipe. + - napetrov + - Alexsandruss + - maria-Petrova diff --git a/tests/test_yaml/stdlib_go_after_meta.yaml b/tests/test_yaml/stdlib_go_after_meta.yaml new file mode 100644 index 000000000..ce8c7d42b --- /dev/null +++ b/tests/test_yaml/stdlib_go_after_meta.yaml @@ -0,0 +1,121 @@ +{% set name = "go" %} +{% set version = "1.10.0" %} + +package: + name: {{ name }} + version: {{ version }} + +source: + # fake source url to get version migrator to pass + - url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: 3f9e587a96844a9b4ee7f998cfe4dc3964dc95c4ca94d7de6a77bffb99f873da + # - folder: go + # url: https://dl.google.com/{{ name }}/go{{ version }}.src.tar.gz + # sha256: 4d196c3d41a0d6c1dfc64d04e3cc1f608b0c436bd87b7060ce3e23234e1f4d5c + # patches: + # # [...snip...] + +build: + binary_relocation: false + detect_binary_files_with_prefix: false + force_ignore_keys: # [win] + - c_compiler # [win] + - cxx_compiler # [win] + - fortran_compiler # [win] + # test data links to these DSOs + missing_dso_whitelist: + - $RPATH/libc.so.6 # [linux and not cgo] + - /usr/lib/libSystem.B.dylib # [osx] + - $SYSROOT\System32\winmm.dll # [win] + number: 0 + skip: true # [linux and s390x] + +requirements: + run: + - {{ pin_subpackage(name, exact=true) }} + - {{ compiler('c') }} # [unix and cgo] + - {{ compiler('m2w64_c') }} # [win and cgo] + +test: + commands: + - go help + +outputs: + - name: go + script: cgo/build.sh # [unix] + script: cgo/build.bat # [win] + + build: # [linux64 and cgo] + binary_relocation: false # [linux64 and cgo] + detect_binary_files_with_prefix: false # [linux64 and cgo] + run_exports: # [linux64 and cgo] + strong: # [linux64 and cgo] + - __glibc >=2.17 # [linux64 and cgo] + requirements: + build: + - {{ compiler('c') }} # [unix and cgo] + - {{ stdlib("c") }} # [unix and cgo] + - {{ compiler('cxx') }} # [unix and cgo] + - {{ compiler('fortran') }} # [unix and cgo] + + - {{ compiler('m2w64_c') }} # [win and cgo] + - {{ stdlib("c") }} # [win and cgo] + - {{ compiler('m2w64_cxx') }} # [win and cgo] + - {{ compiler('m2w64_fortran') }} # [win and cgo] + run: + - _go_select ==2.3.0=cgo + run_constrained: + # TODO: Move to run section once conda/conda#9845 is fixed + + - {{ pin_compatible(compiler('c')) }} # [unix and cgo] + - {{ pin_compatible(compiler('cxx')) }} # [unix and cgo] + - {{ pin_compatible(compiler('fortran')) }} # [unix and cgo] + + - {{ pin_compatible(compiler('m2w64_c')) }} # [win and cgo] + - {{ pin_compatible(compiler('m2w64_cxx')) }} # [win and cgo] + - {{ pin_compatible(compiler('m2w64_fortran')) }} # [win and cgo] + test: + requires: + - {{ compiler('c') }} # [unix and cgo] + - {{ compiler('m2w64_c') }} # [win and cgo] + - git # [linux] + - perl + files: + - cgo + commands: + - chmod +x cgo/test.sh # [unix] + - ./cgo/test.sh # [unix] + - cgo/test.bat # [win] + about: + home: https://go.dev/ + license: BSD-3-Clause + license_family: BSD + license_file: go/LICENSE + summary: The Go Programming Language + +about: + home: https://go.dev/ + license: BSD-3-Clause + license_family: BSD + license_file: go/LICENSE + summary: The Go Programming Language (cgo) + description: | + Go is expressive, concise, clean, and efficient. Its concurrency mechanisms + make it easy to write programs that get the most out of multicore and + networked machines, while its novel type system enables flexible and + modular program construction. Go compiles quickly to machine code yet has + the convenience of garbage collection and the power of run-time reflection. + It's a fast, statically typed, compiled language that feels like a + dynamically typed, interpreted language. + doc_url: https://go.dev/doc + dev_url: https://github.com/golang/ + +extra: + feedstock-name: go + recipe-maintainers: + - nehaljwani + - scopatz + - sodre + - stuarteberg + - xhochy + - hmaarrfk diff --git a/tests/test_yaml/stdlib_go_before_meta.yaml b/tests/test_yaml/stdlib_go_before_meta.yaml new file mode 100644 index 000000000..e67d31403 --- /dev/null +++ b/tests/test_yaml/stdlib_go_before_meta.yaml @@ -0,0 +1,122 @@ +{% set name = "go" %} +{% set version = "1.9.0" %} + +package: + name: {{ name }} + version: {{ version }} + +source: + # fake source url to get version migrator to pass + - url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: b6d893dc7dcd4138b9e9df59a13c59695e50e80dc5c2cacee0674670693951a1 + # - folder: go + # url: https://dl.google.com/{{ name }}/go{{ version }}.src.tar.gz + # sha256: 4d196c3d41a0d6c1dfc64d04e3cc1f608b0c436bd87b7060ce3e23234e1f4d5c + # patches: + # # [...snip...] + +build: + binary_relocation: false + detect_binary_files_with_prefix: false + force_ignore_keys: # [win] + - c_compiler # [win] + - cxx_compiler # [win] + - fortran_compiler # [win] + # test data links to these DSOs + missing_dso_whitelist: + - $RPATH/libc.so.6 # [linux and not cgo] + - /usr/lib/libSystem.B.dylib # [osx] + - $SYSROOT\System32\winmm.dll # [win] + number: 0 + skip: true # [linux and s390x] + +requirements: + run: + - {{ pin_subpackage(name, exact=true) }} + - {{ compiler('c') }} # [unix and cgo] + - {{ compiler('m2w64_c') }} # [win and cgo] + +test: + commands: + - go help + +outputs: + - name: go + script: cgo/build.sh # [unix] + script: cgo/build.bat # [win] + + build: # [linux64 and cgo] + binary_relocation: false # [linux64 and cgo] + detect_binary_files_with_prefix: false # [linux64 and cgo] + run_exports: # [linux64 and cgo] + strong: # [linux64 and cgo] + - __glibc >=2.17 # [linux64 and cgo] + requirements: + build: + - {{ compiler('c') }} # [unix and cgo] + - sysroot_linux-64 2.17 # [linux64 and cgo] + - {{ compiler('cxx') }} # [unix and cgo] + - {{ compiler('fortran') }} # [unix and cgo] + + - {{ compiler('m2w64_c') }} # [win and cgo] + - {{ compiler('m2w64_cxx') }} # [win and cgo] + - {{ compiler('m2w64_fortran') }} # [win and cgo] + run: + - _go_select ==2.3.0=cgo + - sysroot_linux-64 2.17 # [linux64 and cgo] + run_constrained: + # TODO: Move to run section once conda/conda#9845 is fixed + - __osx >={{ MACOSX_DEPLOYMENT_TARGET }} # [osx] + + - {{ pin_compatible(compiler('c')) }} # [unix and cgo] + - {{ pin_compatible(compiler('cxx')) }} # [unix and cgo] + - {{ pin_compatible(compiler('fortran')) }} # [unix and cgo] + + - {{ pin_compatible(compiler('m2w64_c')) }} # [win and cgo] + - {{ pin_compatible(compiler('m2w64_cxx')) }} # [win and cgo] + - {{ pin_compatible(compiler('m2w64_fortran')) }} # [win and cgo] + test: + requires: + - {{ compiler('c') }} # [unix and cgo] + - {{ compiler('m2w64_c') }} # [win and cgo] + - git # [linux] + - perl + files: + - cgo + commands: + - chmod +x cgo/test.sh # [unix] + - ./cgo/test.sh # [unix] + - cgo/test.bat # [win] + about: + home: https://go.dev/ + license: BSD-3-Clause + license_family: BSD + license_file: go/LICENSE + summary: The Go Programming Language + +about: + home: https://go.dev/ + license: BSD-3-Clause + license_family: BSD + license_file: go/LICENSE + summary: The Go Programming Language (cgo) + description: | + Go is expressive, concise, clean, and efficient. Its concurrency mechanisms + make it easy to write programs that get the most out of multicore and + networked machines, while its novel type system enables flexible and + modular program construction. Go compiles quickly to machine code yet has + the convenience of garbage collection and the power of run-time reflection. + It's a fast, statically typed, compiled language that feels like a + dynamically typed, interpreted language. + doc_url: https://go.dev/doc + dev_url: https://github.com/golang/ + +extra: + feedstock-name: go + recipe-maintainers: + - nehaljwani + - scopatz + - sodre + - stuarteberg + - xhochy + - hmaarrfk diff --git a/tests/test_yaml/stdlib_polars_after_meta.yaml b/tests/test_yaml/stdlib_polars_after_meta.yaml new file mode 100644 index 000000000..216a894b4 --- /dev/null +++ b/tests/test_yaml/stdlib_polars_after_meta.yaml @@ -0,0 +1,75 @@ +{% set name = "polars" %} +{% set version = "1.10.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + # fake source url to get version migrator to pass + - url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: 3f9e587a96844a9b4ee7f998cfe4dc3964dc95c4ca94d7de6a77bffb99f873da + # - url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/polars-{{ version }}.tar.gz + # sha256: ec742fdf41e16ff699c043259ba94a11bbc2f7dcb978d768495db1ff2b3c5c20 + +build: + number: 0 + skip: true # [win and python_impl=="pypy"] + +requirements: + build: + - python # [build_platform != target_platform] + # there is no cross-python for linux-64 -> win-64 + - cross-python_{{ target_platform }} # [build_platform != target_platform and not target_platform == "win-64"] + - crossenv # [build_platform != target_platform] + - maturin >=1.2.1,<2 # [build_platform != target_platform] + - {{ compiler('c') }} + - {{ stdlib("c") }} + - {{ compiler('rust') }} + - posix # [build_platform == "win-64"] + - cmake + - make # [unix] + - cargo-bundle-licenses + host: + # this is a hacky way to do cross-compilation from linux to windows + - python # [not (build_platform == "linux-64" and target_platform == "win-64")] + - pip # [not (build_platform == "linux-64" and target_platform == "win-64")] + - maturin >=1.2.1,<2 # [not (build_platform == "linux-64" and target_platform == "win-64")] + run: + - python + - numpy >=1.16.0 + - backports.zoneinfo # [py<39] + - typing_extensions >=4.0.0 # [py<311] + - packaging # [py>=310] + +test: + imports: + - polars + commands: + - pip check + - python -c "from polars import DataFrame" + requires: + - pip + +about: + home: https://github.com/pola-rs/polars + license: MIT + license_family: MIT + license_file: + - LICENSE + - THIRDPARTY.yml + summary: Polars is a blazingly fast DataFrames library implemented in Rust using Apache Arrow(2) as memory model. + doc_url: https://pola-rs.github.io/polars-book/user-guide/index.html + dev_url: https://github.com/pola-rs/polars + +extra: + recipe-maintainers: + - borchero + - Maxyme + - timkpaine + - ritchie46 + - sugatoray + - xhochy + - dhirschfeld + - pavelzw + - '0xbe7a' diff --git a/tests/test_yaml/stdlib_polars_before_meta.yaml b/tests/test_yaml/stdlib_polars_before_meta.yaml new file mode 100644 index 000000000..4221271f5 --- /dev/null +++ b/tests/test_yaml/stdlib_polars_before_meta.yaml @@ -0,0 +1,74 @@ +{% set name = "polars" %} +{% set version = "1.9.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + # fake source url to get version migrator to pass + - url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: b6d893dc7dcd4138b9e9df59a13c59695e50e80dc5c2cacee0674670693951a1 + # - url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/polars-{{ version }}.tar.gz + # sha256: ec742fdf41e16ff699c043259ba94a11bbc2f7dcb978d768495db1ff2b3c5c20 + +build: + number: 0 + skip: true # [win and python_impl=="pypy"] + +requirements: + build: + - python # [build_platform != target_platform] + # there is no cross-python for linux-64 -> win-64 + - cross-python_{{ target_platform }} # [build_platform != target_platform and not target_platform == "win-64"] + - crossenv # [build_platform != target_platform] + - maturin >=1.2.1,<2 # [build_platform != target_platform] + - {{ compiler('c') }} + - {{ compiler('rust') }} + - posix # [build_platform == "win-64"] + - cmake + - make # [unix] + - cargo-bundle-licenses + host: + # this is a hacky way to do cross-compilation from linux to windows + - python # [not (build_platform == "linux-64" and target_platform == "win-64")] + - pip # [not (build_platform == "linux-64" and target_platform == "win-64")] + - maturin >=1.2.1,<2 # [not (build_platform == "linux-64" and target_platform == "win-64")] + run: + - python + - numpy >=1.16.0 + - backports.zoneinfo # [py<39] + - typing_extensions >=4.0.0 # [py<311] + - packaging # [py>=310] + +test: + imports: + - polars + commands: + - pip check + - python -c "from polars import DataFrame" + requires: + - pip + +about: + home: https://github.com/pola-rs/polars + license: MIT + license_family: MIT + license_file: + - LICENSE + - THIRDPARTY.yml + summary: Polars is a blazingly fast DataFrames library implemented in Rust using Apache Arrow(2) as memory model. + doc_url: https://pola-rs.github.io/polars-book/user-guide/index.html + dev_url: https://github.com/pola-rs/polars + +extra: + recipe-maintainers: + - borchero + - Maxyme + - timkpaine + - ritchie46 + - sugatoray + - xhochy + - dhirschfeld + - pavelzw + - '0xbe7a' diff --git a/tests/test_yaml/stdlib_sinabs_after_meta.yaml b/tests/test_yaml/stdlib_sinabs_after_meta.yaml new file mode 100644 index 000000000..4a419bcd6 --- /dev/null +++ b/tests/test_yaml/stdlib_sinabs_after_meta.yaml @@ -0,0 +1,52 @@ +{% set name = "sinabs" %} +{% set version = "1.10.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + # fake source url to get version migrator to pass + url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: 3f9e587a96844a9b4ee7f998cfe4dc3964dc95c4ca94d7de6a77bffb99f873da + # url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/sinabs-{{ version }}.tar.gz + # sha256: 0277cba4dbdac68f0d71c9ee7df40283059a03372e026c534d712d16db3be1ee + +build: + number: 0 + noarch: python + script: {{ PYTHON }} -m pip install . -vv + +requirements: + build: + - {{ stdlib("c") }} + host: + - pbr + - pip + - python >=3.6 + run: + - numpy + - pbr + - python >=3.6 + - pytorch >=1.9.0 + - onnx + - nir + - nirtorch + +test: + imports: + - sinabs + commands: + - pip check + requires: + - pip + +about: + home: https://pypi.org/project/sinabs/ + summary: SynSense Spiking Neural Network simulator for deep neural networks (DNNs). + license: AGPL-3.0-only + license_file: LICENSE + +extra: + recipe-maintainers: + - Tobias-Fischer diff --git a/tests/test_yaml/stdlib_sinabs_before_meta.yaml b/tests/test_yaml/stdlib_sinabs_before_meta.yaml new file mode 100644 index 000000000..dd290ea17 --- /dev/null +++ b/tests/test_yaml/stdlib_sinabs_before_meta.yaml @@ -0,0 +1,52 @@ +{% set name = "sinabs" %} +{% set version = "1.9.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + # fake source url to get version migrator to pass + url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: b6d893dc7dcd4138b9e9df59a13c59695e50e80dc5c2cacee0674670693951a1 + # url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/sinabs-{{ version }}.tar.gz + # sha256: 0277cba4dbdac68f0d71c9ee7df40283059a03372e026c534d712d16db3be1ee + +build: + number: 0 + noarch: python + script: {{ PYTHON }} -m pip install . -vv + +requirements: + build: + - sysroot_linux-64 ==2.17 # [linux64] + host: + - pbr + - pip + - python >=3.6 + run: + - numpy + - pbr + - python >=3.6 + - pytorch >=1.9.0 + - onnx + - nir + - nirtorch + +test: + imports: + - sinabs + commands: + - pip check + requires: + - pip + +about: + home: https://pypi.org/project/sinabs/ + summary: SynSense Spiking Neural Network simulator for deep neural networks (DNNs). + license: AGPL-3.0-only + license_file: LICENSE + +extra: + recipe-maintainers: + - Tobias-Fischer diff --git a/tests/test_yaml/stdlib_skip_migration_after_meta.yaml b/tests/test_yaml/stdlib_skip_migration_after_meta.yaml new file mode 100644 index 000000000..ae72755ed --- /dev/null +++ b/tests/test_yaml/stdlib_skip_migration_after_meta.yaml @@ -0,0 +1,75 @@ +{% set name = "polars" %} +{% set version = "1.10.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + # fake source url to get version migrator to pass + - url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: 3f9e587a96844a9b4ee7f998cfe4dc3964dc95c4ca94d7de6a77bffb99f873da + # - url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/polars-{{ version }}.tar.gz + # sha256: ec742fdf41e16ff699c043259ba94a11bbc2f7dcb978d768495db1ff2b3c5c20 + +build: + number: 0 + skip: true # [win and python_impl=="pypy"] + +requirements: + build: + - python # [build_platform != target_platform] + # there is no cross-python for linux-64 -> win-64 + - cross-python_{{ target_platform }} # [build_platform != target_platform and not target_platform == "win-64"] + - crossenv # [build_platform != target_platform] + - maturin >=1.2.1,<2 # [build_platform != target_platform] + - {{ compiler('c') }} + - {{ compiler('rust') }} + - posix # [build_platform == "win-64"] + - cmake + - make # [unix] + - cargo-bundle-licenses + host: + - {{ stdlib("c") }} + # this is a hacky way to do cross-compilation from linux to windows + - python # [not (build_platform == "linux-64" and target_platform == "win-64")] + - pip # [not (build_platform == "linux-64" and target_platform == "win-64")] + - maturin >=1.2.1,<2 # [not (build_platform == "linux-64" and target_platform == "win-64")] + run: + - python + - numpy >=1.16.0 + - backports.zoneinfo # [py<39] + - typing_extensions >=4.0.0 # [py<311] + - packaging # [py>=310] + +test: + imports: + - polars + commands: + - pip check + - python -c "from polars import DataFrame" + requires: + - pip + +about: + home: https://github.com/pola-rs/polars + license: MIT + license_family: MIT + license_file: + - LICENSE + - THIRDPARTY.yml + summary: Polars is a blazingly fast DataFrames library implemented in Rust using Apache Arrow(2) as memory model. + doc_url: https://pola-rs.github.io/polars-book/user-guide/index.html + dev_url: https://github.com/pola-rs/polars + +extra: + recipe-maintainers: + - borchero + - Maxyme + - timkpaine + - ritchie46 + - sugatoray + - xhochy + - dhirschfeld + - pavelzw + - '0xbe7a' diff --git a/tests/test_yaml/stdlib_skip_migration_before_meta.yaml b/tests/test_yaml/stdlib_skip_migration_before_meta.yaml new file mode 100644 index 000000000..ed074ee75 --- /dev/null +++ b/tests/test_yaml/stdlib_skip_migration_before_meta.yaml @@ -0,0 +1,75 @@ +{% set name = "polars" %} +{% set version = "1.9.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + # fake source url to get version migrator to pass + - url: https://github.com/scipy/scipy/archive/refs/tags/v{{ version }}.tar.gz + sha256: b6d893dc7dcd4138b9e9df59a13c59695e50e80dc5c2cacee0674670693951a1 + # - url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/polars-{{ version }}.tar.gz + # sha256: ec742fdf41e16ff699c043259ba94a11bbc2f7dcb978d768495db1ff2b3c5c20 + +build: + number: 0 + skip: true # [win and python_impl=="pypy"] + +requirements: + build: + - python # [build_platform != target_platform] + # there is no cross-python for linux-64 -> win-64 + - cross-python_{{ target_platform }} # [build_platform != target_platform and not target_platform == "win-64"] + - crossenv # [build_platform != target_platform] + - maturin >=1.2.1,<2 # [build_platform != target_platform] + - {{ compiler('c') }} + - {{ compiler('rust') }} + - posix # [build_platform == "win-64"] + - cmake + - make # [unix] + - cargo-bundle-licenses + host: + - {{ stdlib("c") }} + # this is a hacky way to do cross-compilation from linux to windows + - python # [not (build_platform == "linux-64" and target_platform == "win-64")] + - pip # [not (build_platform == "linux-64" and target_platform == "win-64")] + - maturin >=1.2.1,<2 # [not (build_platform == "linux-64" and target_platform == "win-64")] + run: + - python + - numpy >=1.16.0 + - backports.zoneinfo # [py<39] + - typing_extensions >=4.0.0 # [py<311] + - packaging # [py>=310] + +test: + imports: + - polars + commands: + - pip check + - python -c "from polars import DataFrame" + requires: + - pip + +about: + home: https://github.com/pola-rs/polars + license: MIT + license_family: MIT + license_file: + - LICENSE + - THIRDPARTY.yml + summary: Polars is a blazingly fast DataFrames library implemented in Rust using Apache Arrow(2) as memory model. + doc_url: https://pola-rs.github.io/polars-book/user-guide/index.html + dev_url: https://github.com/pola-rs/polars + +extra: + recipe-maintainers: + - borchero + - Maxyme + - timkpaine + - ritchie46 + - sugatoray + - xhochy + - dhirschfeld + - pavelzw + - '0xbe7a'