diff --git a/.changes/next-release/feature-SourceDistribution-54150.json b/.changes/next-release/feature-SourceDistribution-54150.json new file mode 100644 index 000000000000..f778a48d531c --- /dev/null +++ b/.changes/next-release/feature-SourceDistribution-54150.json @@ -0,0 +1,5 @@ +{ + "type": "feature", + "category": "Source Distribution", + "description": "Add supported autotools interface for building from source." +} diff --git a/.github/workflows/source-dist-tests.yml b/.github/workflows/source-dist-tests.yml new file mode 100644 index 000000000000..1c777d409bb7 --- /dev/null +++ b/.github/workflows/source-dist-tests.yml @@ -0,0 +1,33 @@ + +name: Run source distribution tests + +on: + push: + pull_request: + branches-ignore: [ master ] + +jobs: + build: + + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10"] + os: [ubuntu-latest, macOS-latest, windows-latest] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python scripts/ci/install + python scripts/ci/install-build-system + python -m pip freeze --all + - name: Run build-system tests + run: | + pip uninstall -y awscli + python scripts/ci/run-build-system-tests \ No newline at end of file diff --git a/.gitignore b/.gitignore index 883d71645934..ba43271401dc 100644 --- a/.gitignore +++ b/.gitignore @@ -54,3 +54,27 @@ dist/ # autocomplete index awscli/data/ac.index + +# Build system files +Makefile +config.log +config.status + +# autoconf +autom4te.cache +/autoscan.log +/autoscan-*.log +/aclocal.m4 +/compile +/config.cache +/config.guess +/config.h.in +/config.log +/config.status +/config.sub +/configure +/configure.scan +/depcomp +/install-sh +/missing +/stamp-h1 \ No newline at end of file diff --git a/Makefile.in b/Makefile.in new file mode 100644 index 000000000000..e667896b880d --- /dev/null +++ b/Makefile.in @@ -0,0 +1,41 @@ +prefix = @prefix@ +exec_prefix = @exec_prefix@ +bindir = @bindir@ +libdir = @libdir@ +aws_cli_builddir = $(builddir)/build +build_backend = $(srcdir)/backends/build_system + +builddir = @builddir@ +srcdir = @srcdir@ +VPATH = @srcdir@ + +PYTHON = @PYTHON@ + +INSTALL_TYPE = @INSTALL_TYPE@ +DOWNLOAD_DEPS_FLAG = @DOWNLOAD_DEPS_FLAG@ + +all: build + +build: + "$(PYTHON)" "$(build_backend)" \ + build \ + --artifact "$(INSTALL_TYPE)" \ + --build-dir "$(aws_cli_builddir)" $(DOWNLOAD_DEPS_FLAG) + +clean: + rm -rf "$(aws_cli_builddir)" + +install: + "$(PYTHON)" "$(build_backend)" \ + install \ + --build-dir "$(aws_cli_builddir)" \ + --lib-dir "$(DESTDIR)$(libdir)" \ + --bin-dir "$(DESTDIR)$(bindir)" + +uninstall: + "$(PYTHON)" "$(build_backend)" \ + uninstall \ + --lib-dir "$(DESTDIR)$(libdir)" \ + --bin-dir "$(DESTDIR)$(bindir)" + +.PHONY: all build install uninstall diff --git a/backends/build_system/__init__.py b/backends/build_system/__init__.py new file mode 100644 index 000000000000..92338204f7fa --- /dev/null +++ b/backends/build_system/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/backends/build_system/__main__.py b/backends/build_system/__main__.py new file mode 100644 index 000000000000..2e8f8ceddbb9 --- /dev/null +++ b/backends/build_system/__main__.py @@ -0,0 +1,125 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import argparse +import os +import shutil +from awscli_venv import AwsCliVenv +from constants import ( + ArtifactType, + BUILD_DIR, + INSTALL_DIRNAME, +) +from exe import ExeBuilder +from install import ( + Installer, + Uninstaller, +) +from validate_env import validate_env + + +def create_exe(aws_venv, build_dir): + exe_workspace = os.path.join(build_dir, "exe") + if os.path.exists(exe_workspace): + shutil.rmtree(exe_workspace) + builder = ExeBuilder(exe_workspace, aws_venv) + builder.build() + + +def build(parsed_args): + aws_venv = _bootstap_venv( + parsed_args.build_dir, + parsed_args.artifact, + parsed_args.download_deps, + ) + if parsed_args.artifact == ArtifactType.PORTABLE_EXE.value: + create_exe(aws_venv, parsed_args.build_dir) + + +def validate(parsed_args): + validate_env(parsed_args.artifact) + + +def install(parsed_args): + build_dir = parsed_args.build_dir + install_dir = os.path.join(parsed_args.lib_dir, INSTALL_DIRNAME) + bin_dir = parsed_args.bin_dir + installer = Installer(build_dir) + installer.install(install_dir, bin_dir) + + +def uninstall(parsed_args): + install_dir = os.path.join(parsed_args.lib_dir, INSTALL_DIRNAME) + bin_dir = parsed_args.bin_dir + uninstaller = Uninstaller() + uninstaller.uninstall(install_dir, bin_dir) + + +def _bootstap_venv(build_dir: str, artifact_type: str, download_deps: bool): + venv_path = os.path.join(build_dir, "venv") + if os.path.exists(venv_path): + shutil.rmtree(venv_path) + os.makedirs(venv_path) + aws_venv = AwsCliVenv(venv_path) + aws_venv.create() + aws_venv.bootstrap(artifact_type, download_deps) + return aws_venv + + +def main(): + parser = argparse.ArgumentParser() + + subparser = parser.add_subparsers() + + validate_env_parser = subparser.add_parser("validate-env") + validate_env_parser.add_argument( + "--artifact", choices=[e.value for e in ArtifactType], required=True + ) + validate_env_parser.set_defaults(func=validate) + + build_parser = subparser.add_parser("build") + build_parser.add_argument( + "--artifact", choices=[e.value for e in ArtifactType], required=True + ) + build_parser.add_argument( + "--build-dir", default=BUILD_DIR, type=os.path.abspath + ) + build_parser.add_argument("--download-deps", action="store_true") + build_parser.set_defaults(func=build) + + install_parser = subparser.add_parser("install") + install_parser.add_argument( + "--build-dir", default=BUILD_DIR, type=os.path.abspath + ) + install_parser.add_argument( + "--lib-dir", required=True, type=os.path.abspath + ) + install_parser.add_argument( + "--bin-dir", required=True, type=os.path.abspath + ) + install_parser.set_defaults(func=install) + + uninstall_parser = subparser.add_parser("uninstall") + uninstall_parser.add_argument( + "--lib-dir", required=True, type=os.path.abspath + ) + uninstall_parser.add_argument( + "--bin-dir", required=True, type=os.path.abspath + ) + uninstall_parser.set_defaults(func=uninstall) + + parsed_args = parser.parse_args() + parsed_args.func(parsed_args) + + +if __name__ == "__main__": + main() diff --git a/backends/build_system/awscli_venv.py b/backends/build_system/awscli_venv.py new file mode 100644 index 000000000000..7997043153d6 --- /dev/null +++ b/backends/build_system/awscli_venv.py @@ -0,0 +1,152 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +import json +import subprocess +import site +import sys +import pathlib + +from constants import ( + ArtifactType, + DOWNLOAD_DEPS_BOOTSTRAP_LOCK, + PORTABLE_EXE_REQUIREMENTS_LOCK, + SYSTEM_SANDBOX_REQUIREMENTS_LOCK, + ROOT_DIR, + IS_WINDOWS, + BIN_DIRNAME, + PYTHON_EXE_NAME, + CLI_SCRIPTS, + DISTRIBUTION_SOURCE_SANDBOX, +) +from utils import Utils + + +class AwsCliVenv: + _PARENT_SCRIPTS_TO_COPY = [ + "pyinstaller", + "pyinstaller.exe", + ] + + def __init__(self, venv_dir: str, utils: Utils = None): + self._venv_dir = venv_dir + if utils is None: + utils = Utils() + self._utils = utils + + def create(self): + self._utils.create_venv(self._venv_dir, with_pip=True) + + def bootstrap( + self, artifact_type: ArtifactType, download_deps: bool = False + ): + if download_deps: + self._install_requirements(DOWNLOAD_DEPS_BOOTSTRAP_LOCK) + if artifact_type == ArtifactType.PORTABLE_EXE.value: + self._install_requirements(PORTABLE_EXE_REQUIREMENTS_LOCK) + else: + self._install_requirements(SYSTEM_SANDBOX_REQUIREMENTS_LOCK) + else: + self._copy_parent_packages() + self._install_awscli() + self._update_metadata() + self._update_windows_script_header() + + def _copy_parent_packages(self): + for site_package in site.getsitepackages(): + self._utils.copy_directory_contents_into( + site_package, self._site_packages() + ) + parent_scripts = pathlib.Path(sys.executable).parents[0] + for script in self._PARENT_SCRIPTS_TO_COPY: + source = os.path.join(parent_scripts, script) + if self._utils.path_exists(source): + self._utils.copy_file( + source, os.path.join(self.bin_dir, script) + ) + + def _install_requirements(self, requirements_file, cwd=None): + self._pip_install( + ["--no-build-isolation", "-r", requirements_file], + cwd=cwd, + ) + + def _install_awscli(self): + self._pip_install( + [ + ROOT_DIR, + "--no-build-isolation", + "--no-cache-dir", + "--no-index", + ] + ) + + def _update_windows_script_header(self): + # When installing to a venv pip will rewrite shebang lines + # to reference the relevant virutalenv directly. This is not + # the case for aws.cmd which is our own windows cmd file + # and does not have a shebang that is re-writable. + # We need to manually overwrite the header line in this script + # to reference the current virtualenv. + # If we are not on Windows then this is not relevant. + if not IS_WINDOWS: + return + python_exe_path = os.path.join(self.bin_dir, "python.exe") + exe_path = os.path.join(self.bin_dir, "aws.cmd") + lines = self._utils.read_file_lines(exe_path) + lines[0] = self._utils.get_script_header(python_exe_path) + self._utils.write_file(exe_path, "".join(lines)) + + def _update_metadata(self): + self._utils.update_metadata( + self._site_packages(), + distribution_source=DISTRIBUTION_SOURCE_SANDBOX, + ) + + @property + def bin_dir(self): + return os.path.join(self._venv_dir, BIN_DIRNAME) + + @property + def python_exe(self): + return os.path.join(self.bin_dir, PYTHON_EXE_NAME) + + def _pip_install(self, args, cwd=None): + args = [self.python_exe, "-m", "pip", "install"] + args + run_kwargs = {"check": True} + if IS_WINDOWS: + args = " ".join([str(a) for a in args]) + # The tests on windows will fail when executed with + # the wrapper test runner script in scripts/ci if this + # is not executed from shell. + run_kwargs["shell"] = True + if cwd is not None: + run_kwargs["cwd"] = cwd + self._utils.run(args, **run_kwargs) + + def _site_packages(self) -> str: + # On windows the getsitepackages can return the root venv dir. + # So instead of just taking the first entry, we need to take the + # first entry that contains the string "site-packages" in the path. + site_path = [path for path in json.loads( + subprocess.check_output( + [ + self.python_exe, + "-c", + "import site, json; print(json.dumps(site.getsitepackages()))", + ] + ) + .decode() + .strip() + ) if "site-packages" in path][0] + return site_path diff --git a/backends/build_system/constants.py b/backends/build_system/constants.py new file mode 100644 index 000000000000..7fb8bb35cc99 --- /dev/null +++ b/backends/build_system/constants.py @@ -0,0 +1,54 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import sys +from enum import Enum +from pathlib import Path + + +ROOT_DIR = Path(__file__).parents[2] +BUILD_DIR = ROOT_DIR / "build" + +EXE_DIR = ROOT_DIR / "exe" +EXE_ASSETS_DIR = EXE_DIR / "assets" +PYINSTALLER_DIR = EXE_DIR / "pyinstaller" + +# Platform specific values +IS_WINDOWS = sys.platform == "win32" +BIN_DIRNAME = "Scripts" if IS_WINDOWS else "bin" +PYTHON_EXE_NAME = "python.exe" if IS_WINDOWS else "python" +PYINSTALLER_EXE_NAME = "pyinstaller.exe" if IS_WINDOWS else "pyinstaller" +CLI_SCRIPTS = ["aws.cmd"] if IS_WINDOWS else ["aws", "aws_completer"] +LOCK_SUFFIX = "win-lock.txt" if IS_WINDOWS else "lock.txt" + +# Requirements files +REQUIREMENTS_DIR = ROOT_DIR / "requirements" +BOOTSTRAP_REQUIREMENTS = REQUIREMENTS_DIR / "bootstrap.txt" +DOWNLOAD_DEPS_BOOTSTRAP = REQUIREMENTS_DIR / "download-deps" / "bootstrap.txt" +DOWNLOAD_DEPS_BOOTSTRAP_LOCK = REQUIREMENTS_DIR / "download-deps" / f"bootstrap-{LOCK_SUFFIX}" +PORTABLE_EXE_REQUIREMENTS = REQUIREMENTS_DIR / "portable-exe-extras.txt" +PORTABLE_EXE_REQUIREMENTS_LOCK = REQUIREMENTS_DIR / "download-deps" / f"portable-exe-{LOCK_SUFFIX}" +SYSTEM_SANDBOX_REQUIREMENTS_LOCK = REQUIREMENTS_DIR / "download-deps" / f"system-sandbox-{LOCK_SUFFIX}" + +# Auto-complete index +AC_INDEX = ROOT_DIR / "awscli" / "data" / "ac.index" + +INSTALL_DIRNAME = "aws-cli" + + +DISTRIBUTION_SOURCE_EXE = "source-exe" +DISTRIBUTION_SOURCE_SANDBOX = "source-sandbox" + + +class ArtifactType(Enum): + PORTABLE_EXE = "portable-exe" + SYSTEM_SANDBOX = "system-sandbox" diff --git a/backends/build_system/exe.py b/backends/build_system/exe.py new file mode 100644 index 000000000000..4a8a8bda3b34 --- /dev/null +++ b/backends/build_system/exe.py @@ -0,0 +1,97 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +from dataclasses import dataclass, field + +from constants import EXE_ASSETS_DIR, PYINSTALLER_DIR, DISTRIBUTION_SOURCE_EXE, PYINSTALLER_EXE_NAME +from utils import Utils +from awscli_venv import AwsCliVenv + + +@dataclass +class ExeBuilder: + workspace: str + venv: AwsCliVenv + + _exe_dir: str = field(init=False) + _final_dist_dir: str = field(init=False) + _dist_dir: str = field(init=False) + _build_dir: str = field(init=False) + + _utils: Utils = field(default_factory=lambda: Utils()) + + def __post_init__(self): + self._exe_dir = os.path.join(self.workspace, "aws") + self._final_dist_dir = os.path.join(self._exe_dir, "dist") + self._dist_dir = os.path.join(self.workspace, "dist") + self._build_dir = os.path.join(self.workspace, "build") + + def build(self, cleanup=True): + self._ensure_no_existing_build_dir() + self._build_aws() + self._build_aws_completer() + self._utils.copy_directory_contents_into(EXE_ASSETS_DIR, self._exe_dir) + self._update_metadata() + if cleanup: + self._cleanup() + print(f"Built exe at {self._exe_dir}") + + def _update_metadata(self): + self._utils.update_metadata( + self._final_dist_dir, + distribution_source=DISTRIBUTION_SOURCE_EXE, + ) + + def _ensure_no_existing_build_dir(self): + if self._utils.isdir(self._dist_dir): + self._utils.rmtree(self._dist_dir) + + def _build_aws(self): + aws_exe_build_dir = self._run_pyinstaller("aws.spec") + self._utils.copy_directory(aws_exe_build_dir, self._final_dist_dir) + + def _build_aws_completer(self): + aws_completer_exe_build_dir = self._run_pyinstaller( + "aws_completer.spec" + ) + self._utils.copy_directory_contents_into( + aws_completer_exe_build_dir, self._final_dist_dir + ) + + def _run_pyinstaller(self, specfile: str): + aws_spec_path = os.path.join(PYINSTALLER_DIR, specfile) + self._utils.run( + [ + self.venv.python_exe, + os.path.join(self.venv.bin_dir, PYINSTALLER_EXE_NAME), + aws_spec_path, + "--distpath", + self._dist_dir, + "--workpath", + self._build_dir, + ], + cwd=PYINSTALLER_DIR, + check=True, + ) + return os.path.join( + self.workspace, "dist", os.path.splitext(specfile)[0] + ) + + def _cleanup(self): + locations = [ + self._build_dir, + self._dist_dir, + ] + for location in locations: + self._utils.rmtree(location) + print("Deleted build directory: %s" % location) diff --git a/backends/build_system/install.py b/backends/build_system/install.py new file mode 100644 index 000000000000..189455d30145 --- /dev/null +++ b/backends/build_system/install.py @@ -0,0 +1,109 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +import functools + +from constants import CLI_SCRIPTS +from constants import IS_WINDOWS +from constants import BIN_DIRNAME +from constants import PYTHON_EXE_NAME +from constants import ArtifactType +from utils import Utils + + +WINDOWS_CMD_TEMPLATE = """@echo off +{path} %* +""" + +class Uninstaller: + def __init__(self, utils: Utils = None): + if utils is None: + utils = Utils() + self._utils = utils + + def uninstall(self, install_dir: str, bin_dir: str): + if self._utils.isdir(install_dir): + self._utils.rmtree(install_dir) + for exe in CLI_SCRIPTS: + exe_path = os.path.join(bin_dir, exe) + if self._utils.islink(exe_path) or self._utils.path_exists(exe_path): + self._utils.remove(exe_path) + + +class Installer: + def __init__(self, build_dir: str, utils: Utils = None): + self._build_dir = build_dir + if utils is None: + utils = Utils() + self._utils = utils + + def install(self, install_dir: str, bin_dir: str): + self._copy_to_install_dir(install_dir) + self._install_executables(install_dir, bin_dir) + + @functools.cached_property + def artifact_type(self): + if self._utils.isdir(os.path.join(self._build_dir, "exe")): + return ArtifactType.PORTABLE_EXE + return ArtifactType.SYSTEM_SANDBOX + + def _copy_to_install_dir(self, install_dir): + build_lib = self._get_build_lib() + if self._utils.isdir(install_dir): + self._utils.rmtree(install_dir) + self._utils.copy_directory(build_lib, install_dir) + if self.artifact_type == ArtifactType.SYSTEM_SANDBOX: + self._update_script_header(install_dir) + + def _get_build_lib(self): + if self.artifact_type == ArtifactType.PORTABLE_EXE: + return os.path.join(self._build_dir, "exe", "aws", "dist") + return os.path.join(self._build_dir, "venv") + + def _install_executables(self, install_dir, bin_dir): + if IS_WINDOWS and self.artifact_type == ArtifactType.PORTABLE_EXE: + self._install_executables_on_windows(install_dir, bin_dir) + else: + self._symlink_executables(install_dir, bin_dir) + + def _install_executables_on_windows(self, install_dir, bin_dir): + filepath = os.path.join(bin_dir, "aws.cmd") + content = WINDOWS_CMD_TEMPLATE.format(path=os.path.join(install_dir, "aws.exe")) + self._utils.write_file(filepath, content) + + def _symlink_executables(self, install_dir, bin_dir): + if not self._utils.path_exists(bin_dir): + self._utils.makedirs(bin_dir) + for exe in CLI_SCRIPTS: + exe_path = os.path.join(bin_dir, exe) + if self._utils.islink(exe_path): + self._utils.remove(exe_path) + self._utils.symlink( + self._get_install_bin_exe(install_dir, exe), exe_path + ) + + def _get_install_bin_exe(self, install_dir, exe): + install_bin_dir = install_dir + if self.artifact_type == ArtifactType.SYSTEM_SANDBOX: + install_bin_dir = os.path.join(install_dir, BIN_DIRNAME) + return os.path.join(install_bin_dir, exe) + + def _update_script_header(self, install_dir): + python_exe_path = self._get_install_bin_exe( + install_dir, PYTHON_EXE_NAME + ) + for exe in CLI_SCRIPTS: + exe_path = self._get_install_bin_exe(install_dir, exe) + lines = self._utils.read_file_lines(exe_path) + lines[0] = self._utils.get_script_header(python_exe_path) + self._utils.write_file(exe_path, "".join(lines)) diff --git a/backends/build_system/utils.py b/backends/build_system/utils.py new file mode 100644 index 000000000000..955af9732c59 --- /dev/null +++ b/backends/build_system/utils.py @@ -0,0 +1,260 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +import re +import sys +import shlex +import json +import shutil +import subprocess +import venv +import contextlib +from typing import List, Dict, Any, Optional, Callable + +from constants import ROOT_DIR +from constants import IS_WINDOWS +from constants import BOOTSTRAP_REQUIREMENTS + + +PACKAGE_NAME = re.compile(r"(?P[A-Za-z][A-Za-z0-9_\.\-]+)(?P.+)") +CONSTRAINT = re.compile(r"(?P[=\<\>]+)(?P.+)") +COMPARISONS: Dict[str, Callable[[List[int], List[int]], bool]] = { + '==': lambda a, b: a == b, + '>': lambda a, b: a > b, + '>=': lambda a, b: a >= b, + '<': lambda a, b: a < b, + '<=': lambda a, b: a <= b, +} + + +class UnmetDependenciesException(Exception): + def __init__(self, unmet_deps, in_venv, reason=None): + pip_install_command_args = ["-m", "pip", "install", "--prefer-binary"] + msg = "Environment requires following Python dependencies:\n\n" + for package, actual_version, required in unmet_deps: + msg += ( + f"{package} (required: {required.constraints}) " + f"(version installed: {actual_version})\n" + ) + pip_install_command_args.append(f'{package}{required.string_constraints()}') + + if reason: + msg += f"\n{reason}\n" + + msg += ( + "\n" + "We recommend using --with-download-deps flag to automatically create a " + "virtualenv and download the dependencies.\n\n" + "If you want to manage the dependencies yourself instead, run the following " + "pip command:\n" + ) + msg += f"{sys.executable} {shlex.join(pip_install_command_args)}\n" + + if not in_venv: + msg += ( + "\nWe noticed you are not in a virtualenv.\nIf not using --with-download-deps " + "we highly recommend using a virtualenv to prevent dependencies " + "from being installed into your global " + "Python environment.\n" + ) + super().__init__(msg) + + +@contextlib.contextmanager +def cd(dirname): + original = os.getcwd() + os.chdir(dirname) + try: + yield + finally: + os.chdir(original) + + +class Requirement: + def __init__(self, name: str, *constraints): + self.name = name + self.constraints = constraints + + def is_in_range(self, version: str) -> bool: + return self._meets_constraints(version) + + def _meets_constraints(self, version): + return all( + self._meets_constraint(version, constraint) + for constraint in self.constraints + ) + + def _meets_constraint(self, version, constraint) -> bool: + match = CONSTRAINT.match(constraint) + if not match: + raise RuntimeError(f"Unknown version specifier {constraint}") + comparison, constraint_version = match.group('comparison', 'version') + version, constraint_version = self._normalize(version, constraint_version) + + compare_fn = COMPARISONS.get(comparison) + if not compare_fn: + raise RuntimeError(f"Unknown version range specifier {comparison}") + return compare_fn(version, constraint_version) + + def _normalize(self, v1: str, v2: str): + v1_parts = [int(v) for v in v1.split(".")] + v2_parts = [int(v) for v in v2.split(".")] + while (pad := len(v1_parts) - len(v2_parts)) != 0: + if pad > 0: + v2_parts.append(0) + if pad < 0: + v1_parts.append(0) + return v1_parts, v2_parts + + def __eq__(self, other): + if other is None: + return False + return (self.name == other.name and self.constraints == other.constraints) + + def string_constraints(self): + return ','.join(self.constraints) + + +class ParseError(Exception): + pass + + +def parse_requirements(lines_list): + lines = iter(lines_list) + for line in lines: + if ';' in line: + raise ParseError('Parser does not support env markers') + if line.startswith('#'): + continue + if ' #' in line: + line = line[:line.find(' #')] + if line.endswith('\\'): + line = line[:-2].strip() + try: + line += next(lines) + except StopIteration: + return + yield _parse_req_line(line) + + +def _parse_req_line(line: str): + match = PACKAGE_NAME.search(line) + if not match: + raise RuntimeError(f"Unrecognized dependency {line}") + + name, rest = match.group('name', 'rest') + return Requirement(name, *rest.split(',')) + + +def get_install_requires(): + try: + import flit_core.buildapi + except ImportError: + flit_core_exception = get_flit_core_unmet_exception() + raise flit_core_exception + + with cd(ROOT_DIR): + requires = flit_core.buildapi.get_requires_for_build_wheel() + # Generation of the auto-complete index requires importing from the + # awscli package and iterating over the commands from the clidriver. In + # order to be able to do this, it requires all of the CLI's runtime + # dependencies to be present to avoid import errors. + dependency_block_re = re.compile( + r"dependencies = \[([\s\S]+?)\]", re.MULTILINE + ) + extract_dependencies_re = re.compile(r'"(.+)"') + with open(ROOT_DIR / "pyproject.toml", "r") as f: + data = f.read() + raw_dependencies = dependency_block_re.findall(data)[0] + dependencies = extract_dependencies_re.findall(raw_dependencies) + return dependencies + + +def get_flit_core_unmet_exception(): + in_venv = sys.prefix != sys.base_prefix + with open(BOOTSTRAP_REQUIREMENTS, 'r') as f: + flit_core_req = [ + l for l in f.read().split('\n') + if 'flit_core' in l + ] + return UnmetDependenciesException( + [('flit_core', None, list(parse_requirements(flit_core_req))[0])], + in_venv, + reason=( + 'flit_core is needed ahead of time in order to parse the ' + 'rest of the requirements.' + ) + ) + + +class Utils: + def isdir(self, path: str) -> bool: + return os.path.isdir(path) + + def islink(self, path: str) -> bool: + return os.path.islink(path) + + def remove(self, path: str): + os.remove(path) + + def makedirs(self, path: str): + os.makedirs(path) + + def symlink(self, src: str, dst: str): + os.symlink(src, dst) + + def read_file_lines(self, path: str) -> List[str]: + return open(path, "r").readlines() + + def write_file(self, path: str, content: str): + with open(path, "w") as f: + f.write(content) + + def path_exists(self, path: str) -> bool: + return os.path.exists(path) + + def rmtree(self, path: str) -> None: + shutil.rmtree(path) + + def run(self, args: List[str], **kwargs: Dict[str, Any]): + return subprocess.run(args, **kwargs) + + def copy_file(self, src: str, dst: str): + print("Copying file %s -> %s" % (src, dst)) + shutil.copy2(src, dst) + + def copy_directory_contents_into(self, src: str, dst: str): + print("Copying contents of %s into %s" % (src, dst)) + shutil.copytree(src, dst, dirs_exist_ok=True) + + def copy_directory(self, src: str, dst: str): + print("Copying %s -> %s" % (src, dst)) + shutil.copytree(src, dst) + + def update_metadata(self, dirname, **kwargs): + print("Update metadata values %s" % kwargs) + metadata_file = os.path.join(dirname, "awscli", "data", "metadata.json") + with open(metadata_file) as f: + metadata = json.load(f) + for key, value in kwargs.items(): + metadata[key] = value + with open(metadata_file, "w") as f: + json.dump(metadata, f) + + def create_venv(self, name: str, with_pip: bool = True): + venv.create(name, with_pip=with_pip) + + def get_script_header(self, python_exe_path: str) -> str: + if IS_WINDOWS: + return f'@echo off & "{python_exe_path}" -x "%~f0" %* & goto :eof\n' + return f"#!{python_exe_path}\n" diff --git a/backends/build_system/validate_env.py b/backends/build_system/validate_env.py new file mode 100644 index 000000000000..6a7b4110dc77 --- /dev/null +++ b/backends/build_system/validate_env.py @@ -0,0 +1,83 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import re +import sys +from pathlib import Path +import importlib.metadata + +from constants import ( + BOOTSTRAP_REQUIREMENTS, + PORTABLE_EXE_REQUIREMENTS, +) +from utils import get_install_requires, parse_requirements +from utils import UnmetDependenciesException + + +ROOT = Path(__file__).parents[2] +PYPROJECT = ROOT / "pyproject.toml" +BUILD_REQS_RE = re.compile( + r"requires = \[([\s\S]+?)\]\s", re.MULTILINE +) +EXTRACT_DEPENDENCIES_RE = re.compile(r'"(.+)"') + + +def validate_env(target_artifact): + requirements = _get_requires_list(target_artifact) + unmet_deps = _get_unmet_dependencies(requirements) + if unmet_deps: + in_venv = sys.prefix != sys.base_prefix + raise UnmetDependenciesException(unmet_deps, in_venv) + + +def _get_requires_list(target_artifact): + requires_list = _parse_pyproject_requirements() + requires_list += _parse_requirements(BOOTSTRAP_REQUIREMENTS) + requires_list += get_install_requires() + if target_artifact == "portable-exe": + requires_list += _parse_requirements(PORTABLE_EXE_REQUIREMENTS) + return parse_requirements(requires_list) + + +def _parse_pyproject_requirements(): + with open(PYPROJECT, 'r') as f: + data = f.read() + raw_dependencies = BUILD_REQS_RE.findall(data)[0] + dependencies = EXTRACT_DEPENDENCIES_RE.findall(raw_dependencies) + return list(dependencies) + + +def _parse_requirements(requirements_file): + requirements = [] + with open(requirements_file, "r") as f: + for line in f.readlines(): + if not line.startswith(("-r", "#")): + requirements.append(line.strip()) + return requirements + + +def _get_unmet_dependencies(requirements): + unmet = [] + checked = set() + for requirement in requirements: + project_name = requirement.name + if project_name in checked: + continue + checked.add(project_name) + try: + actual_version = importlib.metadata.version(project_name) + except importlib.metadata.PackageNotFoundError: + unmet.append((project_name, None, requirement)) + continue + if not requirement.is_in_range(actual_version): + unmet.append((project_name, actual_version, requirement)) + return unmet diff --git a/backends/pep517.py b/backends/pep517.py index f60e9bacd47c..4e4818007e84 100644 --- a/backends/pep517.py +++ b/backends/pep517.py @@ -136,11 +136,20 @@ def _inject_extra_sdist_files(tar_root): for filename in glob.glob(pattern, recursive=True): filename = os.path.relpath(filename, ROOT_DIR) path_to_add = os.path.join(ROOT_DIR, filename) + if not _should_copy(path_to_add): + continue target_path = os.path.join(tar_root, filename) _create_dir_if_not_exists(os.path.dirname(target_path)) shutil.copy2(path_to_add, target_path) +def _should_copy(path): + if "__pycache__" in path or path.endswith(".pyc"): + return False + if os.path.isdir(path): + return False + return True + def read_sdist_extras(): with open(ROOT_DIR / "pyproject.toml", "r") as f: data = f.read() @@ -206,10 +215,12 @@ def _extracted_sdist_dir(sdist_path): def _build_and_inject_ac_index(build_dir, extracted_wheel_dir): ac_index_build_name = _build_ac_index(build_dir) + extracted_ac_index = os.path.join(extracted_wheel_dir, AC_INDEX_REL_PATH) + _remove_file_if_exists(extracted_ac_index) print("Adding auto-complete index into wheel") os.rename( ac_index_build_name, - os.path.join(extracted_wheel_dir, AC_INDEX_REL_PATH), + extracted_ac_index, ) diff --git a/bin/aws.cmd b/bin/aws.cmd index 3f8caa78af44..6ea7a4fc6889 100644 --- a/bin/aws.cmd +++ b/bin/aws.cmd @@ -1,38 +1,7 @@ -@echo OFF -REM=""" -setlocal -set PythonExe="" -set PythonExeFlags= - -for %%i in (cmd bat exe) do ( - for %%j in (python.%%i) do ( - call :SetPythonExe "%%~$PATH:j" - ) -) -for /f "tokens=2 delims==" %%i in ('assoc .py') do ( - for /f "tokens=2 delims==" %%j in ('ftype %%i') do ( - for /f "tokens=1" %%k in ("%%j") do ( - call :SetPythonExe %%k - ) - ) -) -%PythonExe% -x %PythonExeFlags% "%~f0" %* -exit /B %ERRORLEVEL% -goto :EOF - -:SetPythonExe -if not ["%~1"]==[""] ( - if [%PythonExe%]==[""] ( - set PythonExe="%~1" - ) -) -goto :EOF -""" - +@echo off & python -x "%~f0" %* & goto :eof # =================================================== # Python script starts here # =================================================== - #!/usr/bin/env python # Copyright 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. @@ -46,7 +15,6 @@ goto :EOF # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. - import awscli.clidriver import sys diff --git a/configure b/configure new file mode 100755 index 000000000000..25b31307fdb8 --- /dev/null +++ b/configure @@ -0,0 +1,3251 @@ +#! /bin/sh +# Guess values for system-dependent variables and create Makefiles. +# Generated by GNU Autoconf 2.71 for awscli 2.9.23. +# +# +# Copyright (C) 1992-1996, 1998-2017, 2020-2021 Free Software Foundation, +# Inc. +# +# +# This configure script is free software; the Free Software Foundation +# gives unlimited permission to copy, distribute and modify it. +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +as_nop=: +if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else $as_nop + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + + +# Reset variables that may have inherited troublesome values from +# the environment. + +# IFS needs to be set, to space, tab, and newline, in precisely that order. +# (If _AS_PATH_WALK were called with IFS unset, it would have the +# side effect of setting IFS to empty, thus disabling word splitting.) +# Quoting is to prevent editors from complaining about space-tab. +as_nl=' +' +export as_nl +IFS=" "" $as_nl" + +PS1='$ ' +PS2='> ' +PS4='+ ' + +# Ensure predictable behavior from utilities with locale-dependent output. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# We cannot yet rely on "unset" to work, but we need these variables +# to be unset--not just set to an empty or harmless value--now, to +# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct +# also avoids known problems related to "unset" and subshell syntax +# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). +for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH +do eval test \${$as_var+y} \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done + +# Ensure that fds 0, 1, and 2 are open. +if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi +if (exec 3>&2) ; then :; else exec 2>/dev/null; fi + +# The user is always right. +if ${PATH_SEPARATOR+false} :; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + test -r "$as_dir$0" && as_myself=$as_dir$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + + +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="as_nop=: +if test \${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else \$as_nop + case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ) +then : + +else \$as_nop + exitcode=1; echo positional parameters were not saved. +fi +test x\$exitcode = x0 || exit 1 +blah=\$(echo \$(echo blah)) +test x\"\$blah\" = xblah || exit 1 +test -x / || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1" + if (eval "$as_required") 2>/dev/null +then : + as_have_required=yes +else $as_nop + as_have_required=no +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null +then : + +else $as_nop + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + as_run=a "$as_shell" -c "$as_bourne_compatible""$as_required" 2>/dev/null +then : + CONFIG_SHELL=$as_shell as_have_required=yes + if as_run=a "$as_shell" -c "$as_bourne_compatible""$as_suggested" 2>/dev/null +then : + break 2 +fi +fi + done;; + esac + as_found=false +done +IFS=$as_save_IFS +if $as_found +then : + +else $as_nop + if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + as_run=a "$SHELL" -c "$as_bourne_compatible""$as_required" 2>/dev/null +then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi +fi + + + if test "x$CONFIG_SHELL" != x +then : + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 +fi + + if test x$as_have_required = xno +then : + printf "%s\n" "$0: This script requires a shell more modern than all" + printf "%s\n" "$0: the shells that I found on your system." + if test ${ZSH_VERSION+y} ; then + printf "%s\n" "$0: In particular, zsh $ZSH_VERSION has bugs and should" + printf "%s\n" "$0: be upgraded to zsh 4.3.4 or later." + else + printf "%s\n" "$0: Please tell bug-autoconf@gnu.org about your system, +$0: including any error possibly output before this +$0: message. Then install a modern shell, or manually run +$0: the script under such a shell if you do have one." + fi + exit 1 +fi +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit +# as_fn_nop +# --------- +# Do nothing but, unlike ":", preserve the value of $?. +as_fn_nop () +{ + return $? +} +as_nop=as_fn_nop + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null +then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else $as_nop + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null +then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else $as_nop + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + +# as_fn_nop +# --------- +# Do nothing but, unlike ":", preserve the value of $?. +as_fn_nop () +{ + return $? +} +as_nop=as_fn_nop + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + printf "%s\n" "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { printf "%s\n" "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit +} + + +# Determine whether it's possible to make 'echo' print without a newline. +# These variables are no longer used directly by Autoconf, but are AC_SUBSTed +# for compatibility with existing Makefiles. +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +# For backward compatibility with old third-party macros, we provide +# the shell variables $as_echo and $as_echo_n. New code should use +# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. +as_echo='printf %s\n' +as_echo_n='printf %s' + + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +test -n "$DJDIR" || exec 7<&0 &1 + +# Name of the host. +# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` + +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= + +# Identity of this package. +PACKAGE_NAME='awscli' +PACKAGE_TARNAME='awscli' +PACKAGE_VERSION='2.9.23' +PACKAGE_STRING='awscli 2.9.23' +PACKAGE_BUGREPORT='' +PACKAGE_URL='' + +ac_unique_file="bin/aws" +ac_subst_vars='LTLIBOBJS +LIBOBJS +DOWNLOAD_DEPS_FLAG +INSTALL_TYPE +pkgpyexecdir +pyexecdir +pkgpythondir +pythondir +PYTHON_EXEC_PREFIX +PYTHON_PREFIX +PYTHON_PLATFORM +PYTHON_VERSION +PYTHON +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +runstatedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +with_python_sys_prefix +with_python_prefix +with_python_exec_prefix +with_install_type +with_download_deps +' + ac_precious_vars='build_alias +host_alias +target_alias +PYTHON' + + +# Initialize some variables set by options. +ac_init_help= +ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= +# The variables have the same names as the options, with +# dashes changed to underlines. +cache_file=/dev/null +exec_prefix=NONE +no_create= +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +verbose= +x_includes=NONE +x_libraries=NONE + +# Installation directory options. +# These are left unexpanded so users can "make install exec_prefix=/foo" +# and all the variables that are supposed to be based on exec_prefix +# by default will actually change. +# Use braces instead of parens because sh, perl, etc. also accept them. +# (The list follows the same order as the GNU Coding Standards.) +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datarootdir='${prefix}/share' +datadir='${datarootdir}' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +runstatedir='${localstatedir}/run' +includedir='${prefix}/include' +oldincludedir='/usr/include' +docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' +infodir='${datarootdir}/info' +htmldir='${docdir}' +dvidir='${docdir}' +pdfdir='${docdir}' +psdir='${docdir}' +libdir='${exec_prefix}/lib' +localedir='${datarootdir}/locale' +mandir='${datarootdir}/man' + +ac_prev= +ac_dashdash= +for ac_option +do + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval $ac_prev=\$ac_option + ac_prev= + continue + fi + + case $ac_option in + *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *=) ac_optarg= ;; + *) ac_optarg=yes ;; + esac + + case $ac_dashdash$ac_option in + --) + ac_dashdash=yes ;; + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir=$ac_optarg ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build_alias ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build_alias=$ac_optarg ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: \`$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: \`$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -runstatedir | --runstatedir | --runstatedi | --runstated \ + | --runstate | --runstat | --runsta | --runst | --runs \ + | --run | --ru | --r) + ac_prev=runstatedir ;; + -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ + | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ + | --run=* | --ru=* | --r=*) + runstatedir=$ac_optarg ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: \`$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: \`$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error $? "unrecognized option: \`$ac_option' +Try \`$0 --help' for more information" + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + printf "%s\n" "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + printf "%s\n" "$as_me: WARNING: invalid host type: $ac_option" >&2 + : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error $? "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; + *) printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir runstatedir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: `$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error $? "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error $? "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done +if test -n "$ac_init_help"; then + cat <<_ACEOF +Configures builds and installs of the AWS CLI + +Usage: ./configure [OPTION]... [ENV_VAR=VALUE]... + +Help options: + -h, --help Display help + -V, --version Display version + +Installation directories: + --prefix=PREFIX Set installation prefix. By default, this value is + "$ac_default_prefix". + --libdir=LIBDIR Set parent directory for AWS CLI installation. The + full path to the AWS CLI installation is "LIBDIR/aws-cli". + The default value for "LIBDIR" is "PREFIX/lib" + (i.e., "$ac_default_prefix/lib" if "--prefix" is not set). + --bindir=BINDIR Set install directory for AWS CLI executables. The + default value for "BINDIR" is "PREFIX/bin" + (i.e., "$ac_default_prefix/bin" if "--prefix" is not set). +Optional arguments: + --with-install-type=system-sandbox|portable-exe + Specify type of AWS CLI installation. Options are: + "portable-exe", "system-sandbox" (default is + "system-sandbox") + --with-download-deps Download all dependencies and use those when + building the AWS CLI. Note that this is best effort + and has no backwards compatibility guarantee. If not + specified, the dependencies (including all python + packages) must be installed on your system +Some influential environment variables: + PYTHON the Python interpreter +_ACEOF + exit 0 +fi +if $ac_init_version; then + cat <<\_ACEOF +awscli configure 2.9.23 +generated by GNU Autoconf 2.71 + +Copyright (C) 2021 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## +ac_configure_args_raw= +for ac_arg +do + case $ac_arg in + *\'*) + ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append ac_configure_args_raw " '$ac_arg'" +done + +case $ac_configure_args_raw in + *$as_nl*) + ac_safe_unquote= ;; + *) + ac_unsafe_z='|&;<>()$`\\"*?[ '' ' # This string ends in space, tab. + ac_unsafe_a="$ac_unsafe_z#~" + ac_safe_unquote="s/ '\\([^$ac_unsafe_a][^$ac_unsafe_z]*\\)'/ \\1/g" + ac_configure_args_raw=` printf "%s\n" "$ac_configure_args_raw" | sed "$ac_safe_unquote"`;; +esac + +cat >config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by awscli $as_me 2.9.23, which was +generated by GNU Autoconf 2.71. Invocation command line was + + $ $0$ac_configure_args_raw + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + printf "%s\n" "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Sanitize IFS. + IFS=" "" $as_nl" + # Save into config.log some information that might help in debugging. + { + echo + + printf "%s\n" "## ---------------- ## +## Cache variables. ## +## ---------------- ##" + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + printf "%s\n" "## ----------------- ## +## Output variables. ## +## ----------------- ##" + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + printf "%s\n" "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + printf "%s\n" "## ------------------- ## +## File substitutions. ## +## ------------------- ##" + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + printf "%s\n" "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + printf "%s\n" "## ----------- ## +## confdefs.h. ## +## ----------- ##" + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + printf "%s\n" "$as_me: caught signal $ac_signal" + printf "%s\n" "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +printf "%s\n" "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +printf "%s\n" "#define PACKAGE_NAME \"$PACKAGE_NAME\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_TARNAME \"$PACKAGE_TARNAME\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_VERSION \"$PACKAGE_VERSION\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_STRING \"$PACKAGE_STRING\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_BUGREPORT \"$PACKAGE_BUGREPORT\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_URL \"$PACKAGE_URL\"" >>confdefs.h + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +if test -n "$CONFIG_SITE"; then + ac_site_files="$CONFIG_SITE" +elif test "x$prefix" != xNONE; then + ac_site_files="$prefix/share/config.site $prefix/etc/config.site" +else + ac_site_files="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site" +fi + +for ac_site_file in $ac_site_files +do + case $ac_site_file in #( + */*) : + ;; #( + *) : + ac_site_file=./$ac_site_file ;; +esac + if test -f "$ac_site_file" && test -r "$ac_site_file"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +printf "%s\n" "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" \ + || { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "failed to load site script $ac_site_file +See \`config.log' for more details" "$LINENO" 5; } + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special files + # actually), so we avoid doing that. DJGPP emulates it as a regular file. + if test /dev/null != "$cache_file" && test -f "$cache_file"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +printf "%s\n" "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +printf "%s\n" "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 +printf "%s\n" "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 +printf "%s\n" "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 +printf "%s\n" "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 +printf "%s\n" "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 +printf "%s\n" "$as_me: former value: \`$ac_old_val'" >&2;} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 +printf "%s\n" "$as_me: current value: \`$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`printf "%s\n" "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +printf "%s\n" "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error $? "run \`${MAKE-make} distclean' and/or \`rm $cache_file' + and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + + + + + + + + if test -n "$PYTHON"; then + # If the user set $PYTHON, use it and don't search something else. + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether $PYTHON version is >= 3.8" >&5 +printf %s "checking whether $PYTHON version is >= 3.8... " >&6; } + prog="import sys +# split strings by '.' and convert to numeric. Append some zeros +# because we need at least 4 digits for the hex conversion. +# map returns an iterator in Python 3.0 and a list in 2.x +minver = list(map(int, '3.8'.split('.'))) + [0, 0, 0] +minverhex = 0 +# xrange is not present in Python 3.0 and range returns an iterator +for i in list(range(0, 4)): minverhex = (minverhex << 8) + minver[i] +sys.exit(sys.hexversion < minverhex)" + if { echo "$as_me:$LINENO: $PYTHON -c "$prog"" >&5 + ($PYTHON -c "$prog") >&5 2>&5 + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + (exit $ac_status); } +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } +else $as_nop + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + as_fn_error $? "Python interpreter is too old" "$LINENO" 5 +fi + am_display_PYTHON=$PYTHON + else + # Otherwise, try each interpreter until we find one that satisfies + # VERSION. + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for a Python interpreter with version >= 3.8" >&5 +printf %s "checking for a Python interpreter with version >= 3.8... " >&6; } +if test ${am_cv_pathless_PYTHON+y} +then : + printf %s "(cached) " >&6 +else $as_nop + + for am_cv_pathless_PYTHON in python python2 python3 python3.9 python3.8 python3.7 python3.6 python3.5 python3.4 python3.3 python3.2 python3.1 python3.0 python2.7 python2.6 python2.5 python2.4 python2.3 python2.2 python2.1 python2.0 none; do + test "$am_cv_pathless_PYTHON" = none && break + prog="import sys +# split strings by '.' and convert to numeric. Append some zeros +# because we need at least 4 digits for the hex conversion. +# map returns an iterator in Python 3.0 and a list in 2.x +minver = list(map(int, '3.8'.split('.'))) + [0, 0, 0] +minverhex = 0 +# xrange is not present in Python 3.0 and range returns an iterator +for i in list(range(0, 4)): minverhex = (minverhex << 8) + minver[i] +sys.exit(sys.hexversion < minverhex)" + if { echo "$as_me:$LINENO: $am_cv_pathless_PYTHON -c "$prog"" >&5 + ($am_cv_pathless_PYTHON -c "$prog") >&5 2>&5 + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + (exit $ac_status); } +then : + break +fi + done +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_pathless_PYTHON" >&5 +printf "%s\n" "$am_cv_pathless_PYTHON" >&6; } + # Set $PYTHON to the absolute path of $am_cv_pathless_PYTHON. + if test "$am_cv_pathless_PYTHON" = none; then + PYTHON=: + else + # Extract the first word of "$am_cv_pathless_PYTHON", so it can be a program name with args. +set dummy $am_cv_pathless_PYTHON; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_PYTHON+y} +then : + printf %s "(cached) " >&6 +else $as_nop + case $PYTHON in + [\\/]* | ?:[\\/]*) + ac_cv_path_PYTHON="$PYTHON" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_PYTHON="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +PYTHON=$ac_cv_path_PYTHON +if test -n "$PYTHON"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PYTHON" >&5 +printf "%s\n" "$PYTHON" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + fi + am_display_PYTHON=$am_cv_pathless_PYTHON + fi + + + if test "$PYTHON" = :; then + as_fn_error $? "no suitable Python interpreter found" "$LINENO" 5 + else + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $am_display_PYTHON version" >&5 +printf %s "checking for $am_display_PYTHON version... " >&6; } +if test ${am_cv_python_version+y} +then : + printf %s "(cached) " >&6 +else $as_nop + am_cv_python_version=`$PYTHON -c "import sys; print ('%u.%u' % sys.version_info[:2])"` +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_version" >&5 +printf "%s\n" "$am_cv_python_version" >&6; } + PYTHON_VERSION=$am_cv_python_version + + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $am_display_PYTHON platform" >&5 +printf %s "checking for $am_display_PYTHON platform... " >&6; } +if test ${am_cv_python_platform+y} +then : + printf %s "(cached) " >&6 +else $as_nop + am_cv_python_platform=`$PYTHON -c "import sys; sys.stdout.write(sys.platform)"` +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_platform" >&5 +printf "%s\n" "$am_cv_python_platform" >&6; } + PYTHON_PLATFORM=$am_cv_python_platform + + + if test "x$prefix" = xNONE; then + am__usable_prefix=$ac_default_prefix + else + am__usable_prefix=$prefix + fi + + # Allow user to request using sys.* values from Python, + # instead of the GNU $prefix values. + +# Check whether --with-python-sys-prefix was given. +if test ${with_python_sys_prefix+y} +then : + withval=$with_python_sys_prefix; am_use_python_sys=: +else $as_nop + am_use_python_sys=false +fi + + + # Allow user to override whatever the default Python prefix is. + +# Check whether --with-python_prefix was given. +if test ${with_python_prefix+y} +then : + withval=$with_python_prefix; am_python_prefix_subst=$withval + am_cv_python_prefix=$withval + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for explicit $am_display_PYTHON prefix" >&5 +printf %s "checking for explicit $am_display_PYTHON prefix... " >&6; } + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_prefix" >&5 +printf "%s\n" "$am_cv_python_prefix" >&6; } +else $as_nop + + if $am_use_python_sys; then + # using python sys.prefix value, not GNU + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for python default $am_display_PYTHON prefix" >&5 +printf %s "checking for python default $am_display_PYTHON prefix... " >&6; } +if test ${am_cv_python_prefix+y} +then : + printf %s "(cached) " >&6 +else $as_nop + am_cv_python_prefix=`$PYTHON -c "import sys; sys.stdout.write(sys.prefix)"` +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_prefix" >&5 +printf "%s\n" "$am_cv_python_prefix" >&6; } + + case $am_cv_python_prefix in + $am__usable_prefix*) + am__strip_prefix=`echo "$am__usable_prefix" | sed 's|.|.|g'` + am_python_prefix_subst=`echo "$am_cv_python_prefix" | sed "s,^$am__strip_prefix,\\${prefix},"` + ;; + *) + am_python_prefix_subst=$am_cv_python_prefix + ;; + esac + else # using GNU prefix value, not python sys.prefix + am_python_prefix_subst='${prefix}' + am_python_prefix=$am_python_prefix_subst + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for GNU default $am_display_PYTHON prefix" >&5 +printf %s "checking for GNU default $am_display_PYTHON prefix... " >&6; } + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_python_prefix" >&5 +printf "%s\n" "$am_python_prefix" >&6; } + fi +fi + + # Substituting python_prefix_subst value. + PYTHON_PREFIX=$am_python_prefix_subst + + + # emacs-page Now do it all over again for Python exec_prefix, but with yet + # another conditional: fall back to regular prefix if that was specified. + +# Check whether --with-python_exec_prefix was given. +if test ${with_python_exec_prefix+y} +then : + withval=$with_python_exec_prefix; am_python_exec_prefix_subst=$withval + am_cv_python_exec_prefix=$withval + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for explicit $am_display_PYTHON exec_prefix" >&5 +printf %s "checking for explicit $am_display_PYTHON exec_prefix... " >&6; } + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_exec_prefix" >&5 +printf "%s\n" "$am_cv_python_exec_prefix" >&6; } +else $as_nop + + # no explicit --with-python_exec_prefix, but if + # --with-python_prefix was given, use its value for python_exec_prefix too. + if test -n "$with_python_prefix" +then : + am_python_exec_prefix_subst=$with_python_prefix + am_cv_python_exec_prefix=$with_python_prefix + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for python_prefix-given $am_display_PYTHON exec_prefix" >&5 +printf %s "checking for python_prefix-given $am_display_PYTHON exec_prefix... " >&6; } + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_exec_prefix" >&5 +printf "%s\n" "$am_cv_python_exec_prefix" >&6; } +else $as_nop + + # Set am__usable_exec_prefix whether using GNU or Python values, + # since we use that variable for pyexecdir. + if test "x$exec_prefix" = xNONE; then + am__usable_exec_prefix=$am__usable_prefix + else + am__usable_exec_prefix=$exec_prefix + fi + # + if $am_use_python_sys; then # using python sys.exec_prefix, not GNU + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for python default $am_display_PYTHON exec_prefix" >&5 +printf %s "checking for python default $am_display_PYTHON exec_prefix... " >&6; } +if test ${am_cv_python_exec_prefix+y} +then : + printf %s "(cached) " >&6 +else $as_nop + am_cv_python_exec_prefix=`$PYTHON -c "import sys; sys.stdout.write(sys.exec_prefix)"` +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_exec_prefix" >&5 +printf "%s\n" "$am_cv_python_exec_prefix" >&6; } + case $am_cv_python_exec_prefix in + $am__usable_exec_prefix*) + am__strip_prefix=`echo "$am__usable_exec_prefix" | sed 's|.|.|g'` + am_python_exec_prefix_subst=`echo "$am_cv_python_exec_prefix" | sed "s,^$am__strip_prefix,\\${exec_prefix},"` + ;; + *) + am_python_exec_prefix_subst=$am_cv_python_exec_prefix + ;; + esac + else # using GNU $exec_prefix, not python sys.exec_prefix + am_python_exec_prefix_subst='${exec_prefix}' + am_python_exec_prefix=$am_python_exec_prefix_subst + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for GNU default $am_display_PYTHON exec_prefix" >&5 +printf %s "checking for GNU default $am_display_PYTHON exec_prefix... " >&6; } + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_python_exec_prefix" >&5 +printf "%s\n" "$am_python_exec_prefix" >&6; } + fi +fi +fi + + # Substituting python_exec_prefix_subst. + PYTHON_EXEC_PREFIX=$am_python_exec_prefix_subst + + + # Factor out some code duplication into this shell variable. + am_python_setup_sysconfig="\ +import sys +# Prefer sysconfig over distutils.sysconfig, for better compatibility +# with python 3.x. See automake bug#10227. +try: + import sysconfig +except ImportError: + can_use_sysconfig = 0 +else: + can_use_sysconfig = 1 +# Can't use sysconfig in CPython 2.7, since it's broken in virtualenvs: +# +try: + from platform import python_implementation + if python_implementation() == 'CPython' and sys.version[:3] == '2.7': + can_use_sysconfig = 0 +except ImportError: + pass" + + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $am_display_PYTHON script directory (pythondir)" >&5 +printf %s "checking for $am_display_PYTHON script directory (pythondir)... " >&6; } +if test ${am_cv_python_pythondir+y} +then : + printf %s "(cached) " >&6 +else $as_nop + if test "x$am_cv_python_prefix" = x; then + am_py_prefix=$am__usable_prefix + else + am_py_prefix=$am_cv_python_prefix + fi + am_cv_python_pythondir=`$PYTHON -c " +$am_python_setup_sysconfig +if can_use_sysconfig: + sitedir = sysconfig.get_path('purelib', vars={'base':'$am_py_prefix'}) +else: + from distutils import sysconfig + sitedir = sysconfig.get_python_lib(0, 0, prefix='$am_py_prefix') +sys.stdout.write(sitedir)"` + # + case $am_cv_python_pythondir in + $am_py_prefix*) + am__strip_prefix=`echo "$am_py_prefix" | sed 's|.|.|g'` + am_cv_python_pythondir=`echo "$am_cv_python_pythondir" | sed "s,^$am__strip_prefix,\\${PYTHON_PREFIX},"` + ;; + *) + case $am_py_prefix in + /usr|/System*) ;; + *) am_cv_python_pythondir="\${PYTHON_PREFIX}/lib/python$PYTHON_VERSION/site-packages" + ;; + esac + ;; + esac + +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_pythondir" >&5 +printf "%s\n" "$am_cv_python_pythondir" >&6; } + pythondir=$am_cv_python_pythondir + + + pkgpythondir=\${pythondir}/$PACKAGE + + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $am_display_PYTHON extension module directory (pyexecdir)" >&5 +printf %s "checking for $am_display_PYTHON extension module directory (pyexecdir)... " >&6; } +if test ${am_cv_python_pyexecdir+y} +then : + printf %s "(cached) " >&6 +else $as_nop + if test "x$am_cv_python_exec_prefix" = x; then + am_py_exec_prefix=$am__usable_exec_prefix + else + am_py_exec_prefix=$am_cv_python_exec_prefix + fi + am_cv_python_pyexecdir=`$PYTHON -c " +$am_python_setup_sysconfig +if can_use_sysconfig: + sitedir = sysconfig.get_path('platlib', vars={'platbase':'$am_py_exec_prefix'}) +else: + from distutils import sysconfig + sitedir = sysconfig.get_python_lib(1, 0, prefix='$am_py_exec_prefix') +sys.stdout.write(sitedir)"` + # + case $am_cv_python_pyexecdir in + $am_py_exec_prefix*) + am__strip_prefix=`echo "$am_py_exec_prefix" | sed 's|.|.|g'` + am_cv_python_pyexecdir=`echo "$am_cv_python_pyexecdir" | sed "s,^$am__strip_prefix,\\${PYTHON_EXEC_PREFIX},"` + ;; + *) + case $am_py_exec_prefix in + /usr|/System*) ;; + *) am_cv_python_pyexecdir="\${PYTHON_EXEC_PREFIX}/lib/python$PYTHON_VERSION/site-packages" + ;; + esac + ;; + esac + +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $am_cv_python_pyexecdir" >&5 +printf "%s\n" "$am_cv_python_pyexecdir" >&6; } + pyexecdir=$am_cv_python_pyexecdir + + + pkgpyexecdir=\${pyexecdir}/$PACKAGE + + + + fi + + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for sqlite3" >&5 +printf %s "checking for sqlite3... " >&6; } +read -r -d '' SQLITE_TEST_PROG <<-EOF +import sys +try: + import sqlite3 +except ImportError: + sys.exit(1) +EOF +$PYTHON -c "${SQLITE_TEST_PROG}" +if [ $? -eq 0 ] +then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } +else + as_fn_error 1 "AWS CLI requires a Python interpreter compiled with sqlite3. $PYTHON does not have sqlite3." "$LINENO" 5 +fi + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-install-type" >&5 +printf %s "checking for --with-install-type... " >&6; } + +# Check whether --with-install_type was given. +if test ${with_install_type+y} +then : + withval=$with_install_type; +else $as_nop + with_install_type=system-sandbox +fi + +case $with_install_type in #( + portable-exe) : + INSTALL_TYPE=portable-exe ;; #( + system-sandbox) : + INSTALL_TYPE=system-sandbox ;; #( + *) : + as_fn_error $? "--with-install-type=portable-exe|system-sandbox" "$LINENO" 5 ;; +esac + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_install_type" >&5 +printf "%s\n" "$with_install_type" >&6; } + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-download-deps" >&5 +printf %s "checking for --with-download-deps... " >&6; } + +# Check whether --with-download_deps was given. +if test ${with_download_deps+y} +then : + withval=$with_download_deps; +else $as_nop + with_download_deps=no + +fi + +if test "$with_download_deps" = no; then + ${PYTHON} ${srcdir}/backends/build_system validate-env --artifact $with_install_type || as_fn_error $? "\"Python dependencies not met.\"" "$LINENO" 5 + DOWNLOAD_DEPS_FLAG="" +else + DOWNLOAD_DEPS_FLAG=--download-deps +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_download_deps" >&5 +printf "%s\n" "$with_download_deps" >&6; } + + +ac_config_files="$ac_config_files Makefile" + +cat >confcache <<\_ACEOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs, see configure's option --config-cache. +# It is not useful on other systems. If it contains results you don't +# want to keep, you may remove or edit it. +# +# config.status only pays attention to the cache file if you give it +# the --recheck option to rerun configure. +# +# `ac_cv_env_foo' variables (set or unset) will be overridden when +# loading this file, other *unset* `ac_cv_foo' will be assigned the +# following values. + +_ACEOF + +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, we kill variables containing newlines. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +( + for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + + (set) 2>&1 | + case $as_nl`(ac_space=' '; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + # `set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. + sed -n \ + "s/'/'\\\\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" + ;; #( + *) + # `set' quotes correctly as required by POSIX, so do not add quotes. + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) | + sed ' + /^ac_cv_env_/b end + t clear + :clear + s/^\([^=]*\)=\(.*[{}].*\)$/test ${\1+y} || &/ + t end + s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ + :end' >>confcache +if diff "$cache_file" confcache >/dev/null 2>&1; then :; else + if test -w "$cache_file"; then + if test "x$cache_file" != "x/dev/null"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +printf "%s\n" "$as_me: updating cache $cache_file" >&6;} + if test ! -f "$cache_file" || test -h "$cache_file"; then + cat confcache >"$cache_file" + else + case $cache_file in #( + */* | ?:*) + mv -f confcache "$cache_file"$$ && + mv -f "$cache_file"$$ "$cache_file" ;; #( + *) + mv -f confcache "$cache_file" ;; + esac + fi + fi + else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +printf "%s\n" "$as_me: not updating unwritable cache $cache_file" >&6;} + fi +fi +rm -f confcache + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +# Transform confdefs.h into DEFS. +# Protect against shell expansion while executing Makefile rules. +# Protect against Makefile macro expansion. +# +# If the first sed substitution is executed (which looks for macros that +# take arguments), then branch to the quote section. Otherwise, +# look for a macro that doesn't take arguments. +ac_script=' +:mline +/\\$/{ + N + s,\\\n,, + b mline +} +t clear +:clear +s/^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*([^)]*)\)[ ]*\(.*\)/-D\1=\2/g +t quote +s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g +t quote +b any +:quote +s/[ `~#$^&*(){}\\|;'\''"<>?]/\\&/g +s/\[/\\&/g +s/\]/\\&/g +s/\$/$$/g +H +:any +${ + g + s/^\n// + s/\n/ /g + p +} +' +DEFS=`sed -n "$ac_script" confdefs.h` + + +ac_libobjs= +ac_ltlibobjs= +U= +for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue + # 1. Remove the extension, and $U if already installed. + ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' + ac_i=`printf "%s\n" "$ac_i" | sed "$ac_script"` + # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR + # will be set to the directory where LIBOBJS objects are built. + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' +done +LIBOBJS=$ac_libobjs + +LTLIBOBJS=$ac_ltlibobjs + + + +: "${CONFIG_STATUS=./config.status}" +ac_write_fail=0 +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files $CONFIG_STATUS" +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +printf "%s\n" "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 +#! $SHELL +# Generated by $as_me. +# Run this file to recreate the current configuration. +# Compiler output produced by configure, useful for debugging +# configure, is in config.log if it exists. + +debug=false +ac_cs_recheck=false +ac_cs_silent=false + +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +as_nop=: +if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else $as_nop + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + + +# Reset variables that may have inherited troublesome values from +# the environment. + +# IFS needs to be set, to space, tab, and newline, in precisely that order. +# (If _AS_PATH_WALK were called with IFS unset, it would have the +# side effect of setting IFS to empty, thus disabling word splitting.) +# Quoting is to prevent editors from complaining about space-tab. +as_nl=' +' +export as_nl +IFS=" "" $as_nl" + +PS1='$ ' +PS2='> ' +PS4='+ ' + +# Ensure predictable behavior from utilities with locale-dependent output. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# We cannot yet rely on "unset" to work, but we need these variables +# to be unset--not just set to an empty or harmless value--now, to +# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct +# also avoids known problems related to "unset" and subshell syntax +# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). +for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH +do eval test \${$as_var+y} \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done + +# Ensure that fds 0, 1, and 2 are open. +if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi +if (exec 3>&2) ; then :; else exec 2>/dev/null; fi + +# The user is always right. +if ${PATH_SEPARATOR+false} :; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + test -r "$as_dir$0" && as_myself=$as_dir$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + printf "%s\n" "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null +then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else $as_nop + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null +then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else $as_nop + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + +# Determine whether it's possible to make 'echo' print without a newline. +# These variables are no longer used directly by Autoconf, but are AC_SUBSTed +# for compatibility with existing Makefiles. +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +# For backward compatibility with old third-party macros, we provide +# the shell variables $as_echo and $as_echo_n. New code should use +# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. +as_echo='printf %s\n' +as_echo_n='printf %s' + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to +# report actual input values of CONFIG_FILES etc. instead of their +# values after options handling. +ac_log=" +This file was extended by awscli $as_me 2.9.23, which was +generated by GNU Autoconf 2.71. Invocation command line was + + CONFIG_FILES = $CONFIG_FILES + CONFIG_HEADERS = $CONFIG_HEADERS + CONFIG_LINKS = $CONFIG_LINKS + CONFIG_COMMANDS = $CONFIG_COMMANDS + $ $0 $@ + +on `(hostname || uname -n) 2>/dev/null | sed 1q` +" + +_ACEOF + +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# Files that config.status was made for. +config_files="$ac_config_files" + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +ac_cs_usage="\ +\`$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. + +Usage: $0 [OPTION]... [TAG]... + + -h, --help print this help, then exit + -V, --version print version number and configuration settings, then exit + --config print configuration, then exit + -q, --quiet, --silent + do not print progress messages + -d, --debug don't remove temporary files + --recheck update $as_me by reconfiguring in the same conditions + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + +Configuration files: +$config_files + +Report bugs to the package provider." + +_ACEOF +ac_cs_config=`printf "%s\n" "$ac_configure_args" | sed "$ac_safe_unquote"` +ac_cs_config_escaped=`printf "%s\n" "$ac_cs_config" | sed "s/^ //; s/'/'\\\\\\\\''/g"` +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_config='$ac_cs_config_escaped' +ac_cs_version="\\ +awscli config.status 2.9.23 +configured by $0, generated by GNU Autoconf 2.71, + with options \\"\$ac_cs_config\\" + +Copyright (C) 2021 Free Software Foundation, Inc. +This config.status script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it." + +ac_pwd='$ac_pwd' +srcdir='$srcdir' +test -n "\$AWK" || AWK=awk +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. +ac_need_defaults=: +while test $# != 0 +do + case $1 in + --*=?*) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` + ac_shift=: + ;; + --*=) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg= + ac_shift=: + ;; + *) + ac_option=$1 + ac_optarg=$2 + ac_shift=shift + ;; + esac + + case $ac_option in + # Handling of the options. + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + ac_cs_recheck=: ;; + --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) + printf "%s\n" "$ac_cs_version"; exit ;; + --config | --confi | --conf | --con | --co | --c ) + printf "%s\n" "$ac_cs_config"; exit ;; + --debug | --debu | --deb | --de | --d | -d ) + debug=: ;; + --file | --fil | --fi | --f ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`printf "%s\n" "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + '') as_fn_error $? "missing file argument" ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" + ac_need_defaults=false;; + --he | --h | --help | --hel | -h ) + printf "%s\n" "$ac_cs_usage"; exit ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil | --si | --s) + ac_cs_silent=: ;; + + # This is an error. + -*) as_fn_error $? "unrecognized option: \`$1' +Try \`$0 --help' for more information." ;; + + *) as_fn_append ac_config_targets " $1" + ac_need_defaults=false ;; + + esac + shift +done + +ac_configure_extra_args= + +if $ac_cs_silent; then + exec 6>/dev/null + ac_configure_extra_args="$ac_configure_extra_args --silent" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +if \$ac_cs_recheck; then + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \printf "%s\n" "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' + export CONFIG_SHELL + exec "\$@" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +exec 5>>config.log +{ + echo + sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX +## Running $as_me. ## +_ASBOX + printf "%s\n" "$ac_log" +} >&5 + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + +# Handling of arguments. +for ac_config_target in $ac_config_targets +do + case $ac_config_target in + "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; + + *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; + esac +done + + +# If the user did not use the arguments to specify the items to instantiate, +# then the envvar interface is used. Set only those that are not. +# We use the long form for the default assignment because of an extremely +# bizarre bug on SunOS 4.1.3. +if $ac_need_defaults; then + test ${CONFIG_FILES+y} || CONFIG_FILES=$config_files +fi + +# Have a temporary directory for convenience. Make it in the build tree +# simply because there is no reason against having it here, and in addition, +# creating and moving files from /tmp can sometimes cause problems. +# Hook for its removal unless debugging. +# Note that there is a small window in which the directory will not be cleaned: +# after its creation but before its name has been assigned to `$tmp'. +$debug || +{ + tmp= ac_tmp= + trap 'exit_status=$? + : "${ac_tmp:=$tmp}" + { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status +' 0 + trap 'as_fn_exit 1' 1 2 13 15 +} +# Create a (secure) tmp directory for tmp files. + +{ + tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && + test -d "$tmp" +} || +{ + tmp=./conf$$-$RANDOM + (umask 077 && mkdir "$tmp") +} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 +ac_tmp=$tmp + +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with `./config.status config.h'. +if test -n "$CONFIG_FILES"; then + + +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\\r' +else + ac_cs_awk_cr=$ac_cr +fi + +echo 'BEGIN {' >"$ac_tmp/subs1.awk" && +_ACEOF + + +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` +ac_delim='%!_!# ' +for ac_last_try in false false false false false :; do + . ./conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done +rm -f conf$$subs.sh + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && +_ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\)..*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\)..*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} + +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ + || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 +_ACEOF + +# VPATH may cause trouble with some makes, so we remove sole $(srcdir), +# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and +# trailing colons and then remove the whole line if VPATH becomes empty +# (actually we leave an empty line to preserve line numbers). +if test "x$srcdir" = x.; then + ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ +h +s/// +s/^/:/ +s/[ ]*$/:/ +s/:\$(srcdir):/:/g +s/:\${srcdir}:/:/g +s/:@srcdir@:/:/g +s/^:*// +s/:*$// +x +s/\(=[ ]*\).*/\1/ +G +s/\n// +s/^[^=]*=[ ]*$// +}' +fi + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +fi # test -n "$CONFIG_FILES" + + +eval set X " :F $CONFIG_FILES " +shift +for ac_tag +do + case $ac_tag in + :[FHLC]) ac_mode=$ac_tag; continue;; + esac + case $ac_mode$ac_tag in + :[FHL]*:*);; + :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; + :[FH]-) ac_tag=-:-;; + :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; + esac + ac_save_IFS=$IFS + IFS=: + set x $ac_tag + IFS=$ac_save_IFS + shift + ac_file=$1 + shift + + case $ac_mode in + :L) ac_source=$1;; + :[FH]) + ac_file_inputs= + for ac_f + do + case $ac_f in + -) ac_f="$ac_tmp/stdin";; + *) # Look for the file first in the build tree, then in the source tree + # (if the path is not absolute). The absolute path cannot be DOS-style, + # because $ac_f cannot contain `:'. + test -f "$ac_f" || + case $ac_f in + [\\/$]*) false;; + *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; + esac || + as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; + esac + case $ac_f in *\'*) ac_f=`printf "%s\n" "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" + done + + # Let's still pretend it is `configure' which instantiates (i.e., don't + # use $as_me), people would be surprised to read: + # /* config.h. Generated by config.status. */ + configure_input='Generated from '` + printf "%s\n" "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' + if test x"$ac_file" != x-; then + configure_input="$ac_file. $configure_input" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +printf "%s\n" "$as_me: creating $ac_file" >&6;} + fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`printf "%s\n" "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac + + case $ac_tag in + *:-:* | *:-) cat >"$ac_tmp/stdin" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; + esac + ;; + esac + + ac_dir=`$as_dirname -- "$ac_file" || +$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$ac_file" : 'X\(//\)[^/]' \| \ + X"$ac_file" : 'X\(//\)$' \| \ + X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$ac_file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir="$ac_dir"; as_fn_mkdir_p + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + + case $ac_mode in + :F) + # + # CONFIG_FILE + # + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# If the template does not know about datarootdir, expand it. +# FIXME: This hack should be removed a few years after 2.60. +ac_datarootdir_hack=; ac_datarootdir_seen= +ac_sed_dataroot=' +/datarootdir/ { + p + q +} +/@datadir@/p +/@docdir@/p +/@infodir@/p +/@localedir@/p +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in +*datarootdir*) ac_datarootdir_seen=yes;; +*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +printf "%s\n" "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + ac_datarootdir_hack=' + s&@datadir@&$datadir&g + s&@docdir@&$docdir&g + s&@infodir@&$infodir&g + s&@localedir@&$localedir&g + s&@mandir@&$mandir&g + s&\\\${datarootdir}&$datarootdir&g' ;; +esac +_ACEOF + +# Neutralize VPATH when `$srcdir' = `.'. +# Shell code in configure.ac might set extrasub. +# FIXME: do we really want to maintain this feature? +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub +$extrasub +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +:t +/@[a-zA-Z_][a-zA-Z_0-9]*@/!b +s|@configure_input@|$ac_sed_conf_input|;t t +s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t +s&@srcdir@&$ac_srcdir&;t t +s&@abs_srcdir@&$ac_abs_srcdir&;t t +s&@top_srcdir@&$ac_top_srcdir&;t t +s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t +s&@builddir@&$ac_builddir&;t t +s&@abs_builddir@&$ac_abs_builddir&;t t +s&@abs_top_builddir@&$ac_abs_top_builddir&;t t +$ac_datarootdir_hack +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ + >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + +test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && + { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ + "$ac_tmp/out"`; test -z "$ac_out"; } && + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&5 +printf "%s\n" "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&2;} + + rm -f "$ac_tmp/stdin" + case $ac_file in + -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; + *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; + esac \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + ;; + + + + esac + +done # for ac_tag + + +as_fn_exit 0 +_ACEOF +ac_clean_files=$ac_clean_files_save + +test $ac_write_fail = 0 || + as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 + + +# configure is writing to config.log, and then calls config.status. +# config.status does its own redirection, appending to config.log. +# Unfortunately, on DOS this fails, as config.log is still kept open +# by configure, so config.status won't be able to write to it; its +# output is simply discarded. So we exec the FD to /dev/null, +# effectively closing config.log, so it can be properly (re)opened and +# appended to by config.status. When coming back to configure, we +# need to make the FD available again. +if test "$no_create" != yes; then + ac_cs_success=: + ac_config_status_args= + test "$silent" = yes && + ac_config_status_args="$ac_config_status_args --quiet" + exec 5>/dev/null + $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false + exec 5>>config.log + # Use ||, not &&, to avoid exiting from the if with $? = 1, which + # would make configure fail if this is the last instruction. + $ac_cs_success || as_fn_exit 1 +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} +fi + + + + + diff --git a/configure.ac b/configure.ac new file mode 100644 index 000000000000..4cda8de10509 --- /dev/null +++ b/configure.ac @@ -0,0 +1,58 @@ +AC_CONFIG_MACRO_DIRS([m4]) +AC_INIT([awscli], [2.9.23]) +AC_CONFIG_SRCDIR([bin/aws]) +AM_PATH_PYTHON([3.8]) + +AC_MSG_CHECKING(for sqlite3) +read -r -d '' SQLITE_TEST_PROG <<-EOF +import sys +try: + import sqlite3 +except ImportError: + sys.exit(1) +EOF +$PYTHON -c "${SQLITE_TEST_PROG}" +if [[ $? -eq 0 ]] +then + AC_MSG_RESULT(yes) +else + AC_MSG_ERROR(AWS CLI requires a Python interpreter compiled with sqlite3. $PYTHON does not have sqlite3., 1) +fi + +AC_MSG_CHECKING(for --with-install-type) +AC_ARG_WITH(install_type, + AS_HELP_STRING([--with-install-type=@<:@system-sandbox|portable-exe@:>@], + [Specify type of AWS CLI installation. Options are: + "portable-exe", "system-sandbox" (default is "system-sandbox")]), +[], +[with_install_type=system-sandbox]) +AS_CASE($with_install_type, + [portable-exe],[INSTALL_TYPE=portable-exe], + [system-sandbox],[INSTALL_TYPE=system-sandbox], + [AC_MSG_ERROR([--with-install-type=portable-exe|system-sandbox])]) +AC_SUBST(INSTALL_TYPE) +AC_MSG_RESULT($with_install_type) + +AC_MSG_CHECKING(for --with-download-deps) +AC_ARG_WITH(download_deps, + AS_HELP_STRING([--with-download-deps], + [Download all dependencies and use those when + building the AWS CLI. Note that this is best effort + and has no backwards compatibility guarantee. If not + specified, the dependencies (including all python + packages) must be installed on your system]), +[], +[with_download_deps=no] +) +if test "$with_download_deps" = no; then + ${PYTHON} ${srcdir}/backends/build_system validate-env --artifact $with_install_type || AC_MSG_ERROR("Python dependencies not met.") + DOWNLOAD_DEPS_FLAG="" +else + DOWNLOAD_DEPS_FLAG=--download-deps +fi +AC_MSG_RESULT($with_download_deps) +AC_SUBST(DOWNLOAD_DEPS_FLAG) + +AC_CONFIG_FILES([Makefile]) +AC_OUTPUT +OVERRIDE_HELP diff --git a/m4/awscli_help.m4 b/m4/awscli_help.m4 new file mode 100644 index 000000000000..cb1e12c1c7a3 --- /dev/null +++ b/m4/awscli_help.m4 @@ -0,0 +1,50 @@ +AC_DEFUN([OVERRIDE_HELP], +[dnl# Clear the default help message. +m4_cleardivert([HELP_BEGIN])dnl +m4_cleardivert([HELP_ENABLE])dnl +m4_cleardivert([HELP_WITH])dnl +m4_cleardivert([HELP_VAR])dnl +m4_cleardivert([HELP_VAR_END])dnl +m4_cleardivert([HELP_END])dnl + +m4_divert_push([HELP_BEGIN])dnl +if test -n "$ac_init_help"; then + cat <<_ACEOF +Configures builds and installs of the AWS CLI + +Usage: ./configure [[OPTION]]... [[ENV_VAR=VALUE]]... + +Help options: + -h, --help Display help + -V, --version Display version + +Installation directories: + --prefix=PREFIX Set installation prefix. By default, this value is + "$ac_default_prefix". + --libdir=LIBDIR Set parent directory for AWS CLI installation. The + full path to the AWS CLI installation is "LIBDIR/aws-cli". + The default value for "LIBDIR" is "PREFIX/lib" + (i.e., "$ac_default_prefix/lib" if "--prefix" is not set). + --bindir=BINDIR Set install directory for AWS CLI executables. The + default value for "BINDIR" is "PREFIX/bin" + (i.e., "$ac_default_prefix/bin" if "--prefix" is not set). +Optional arguments: + --with-install-type=[system-sandbox|portable-exe] + Specify type of AWS CLI installation. Options are: + "portable-exe", "system-sandbox" (default is + "system-sandbox") + --with-download-deps Download all dependencies and use those when + building the AWS CLI. Note that this is best effort + and has no backwards compatibility guarantee. If not + specified, the dependencies (including all python + packages) must be installed on your system +Some influential environment variables: + PYTHON the Python interpreter +m4_divert_pop([HELP_BEGIN])dnl + +m4_divert_push([HELP_END])dnl +_ACEOF + exit 0 +fi +m4_divert_pop([HELP_END])dnl +]) diff --git a/proposals/source-install.md b/proposals/source-install.md index 25b9b06e6295..fa51fc1238aa 100644 --- a/proposals/source-install.md +++ b/proposals/source-install.md @@ -3,7 +3,7 @@ Proposal | Metadata ---------------- | ------------- **Author** | Kyle Knapp -**Status** | Accepted +**Status** | Finalized **Created** | 24-August-2021 ## Abstract @@ -21,13 +21,13 @@ most platforms and environments, but they do not satisfy all use cases: * The desired platform (e.g, [ARM 32-bit](https://github.com/aws/aws-cli/issues/5426)) is not supported by any of the pre-built executables. - + * The environment lacks system dependencies that are required for the pre-built executable. For example, Alpine Linux uses [musl](https://musl.libc.org/), but the current executables require glibc. This causes the pre-built executables to not work out of the box for [Alpine Linux](https://github.com/aws/aws-cli/issues/4685). - + * The environment restricts access to resources only needed by the pre-built executable. For example, on security hardened systems, it does not give permission to shared memory @@ -61,7 +61,7 @@ following goals: 1. The source installation process maximizes the number of environments the AWS CLI v2 can be installed on. - + 2. The source installation process is straightforward and intuitive. It minimizes the number of cycles required to figure out how to install the AWS CLI from source. @@ -110,10 +110,10 @@ These commands are specific to Autotools where: detected and explicitly specified configurations. See the [Configuration section](#configuration) for the available configuration options. - + * `make` - Builds the AWS CLI v2. For details on the build mechanics, see the [Build/install mechanics section](#buildinstall-mechanics). - + * `make install` - Installs the built AWS CLI v2 to the configured location on the system. For details on the install mechanics, see the [Build/install mechanics section](#buildinstall-mechanics). @@ -159,6 +159,7 @@ the `configure` script with the `--help` option: ``` This is a sample output from the help page: ``` +✗ ./configure -h Configures builds and installs of the AWS CLI Usage: ./configure [OPTION]... [ENV_VAR=VALUE]... @@ -177,11 +178,8 @@ Installation directories: --bindir=BINDIR Set install directory for AWS CLI executables. The default value for "BINDIR" is "PREFIX/bin" (i.e., "/usr/local/bin" if "--prefix" is not set). - -Optional Packages: - --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) - --with-install-type=[portable-exe|system-sandbox] +Optional arguments: + --with-install-type=system-sandbox|portable-exe Specify type of AWS CLI installation. Options are: "portable-exe", "system-sandbox" (default is "system-sandbox") @@ -189,12 +187,11 @@ Optional Packages: building the AWS CLI. If not specified, the dependencies (including all python packages) must be installed on your system - Some influential environment variables: PYTHON the Python interpreter ``` -The sections below describes the most pertinent options. +The sections below describes the most pertinent options. ##### Install location @@ -206,7 +203,7 @@ install the AWS CLI v2: path to the AWS CLI v2 installation is `/aws-cli`. The default `libdir` value is `/usr/local/lib` making the default installation directory `/usr/local/lib/aws-cli` - + * `bindir` - Directory where the AWS CLI v2 executables (e.g., `aws`, `aws_completer`) will be installed. The default location is `/usr/local/bin`. @@ -278,7 +275,7 @@ that are Python libraries. All dependencies are checked when the `configure` script is run, and if the system is missing any Python dependencies, the `configure` script errors out. For example: ``` -$ ./configure +$ ./configure checking for a Python interpreter with version >= 3.8... python3.8 checking for python3.8... /usr/local/bin/python3.8 checking for python3.8 version... 3.8 @@ -315,8 +312,8 @@ In specifying this flag, the build process: * Skips the configuration check to make sure all Python library dependencies are installed on the system. - -* During the `make` step, downloads **all** required dependencies and uses + +* During the `make` step, downloads **all** required dependencies and uses **only** the downloaded dependencies to build the AWS CLI v2. Currently, this flag only downloads Python packages, but may be expanded in @@ -330,12 +327,12 @@ The source install process supports two different installation types: * `system-sandbox` - (Default) Creates an isolated Python virtual environment, installs the AWS CLI v2 into the virtual environment, and symlinks to the `aws` and `aws_completer` executable in the virtual environment. - + * `portable-exe` - Freezes the AWS CLI v2 into a standalone executable that can be distributed to environments of similar architectures. This is the same process used to generate the official pre-built executables of the AWS CLI v2. - + The primary difference between the two installation types is that the `portable-exe` freezes in a copy of the Python interpreter chosen in the `configure` step to use for the runtime of the AWS CLI v2. This allows it to @@ -357,7 +354,7 @@ For more information on reasons to use one installation type over the over, see #### Supported make targets and usage - + Once the `Makefile` is generated from the `configure` script, users can run a variety of targets to manage the building and installing the AWS CLI v2 via the command pattern: @@ -412,11 +409,11 @@ from source that are backwards compatible: AWS CLI v2. For example, the commands to install/upgrade the AWS CLI v2 will always be: `./configure`, `make`, and `make install`, and the command to uninstall the AWS CLI v2 will always be `make uninstall`. - + * Documented `configure` options. -* Documented `make` targets. - +* Documented `make` targets. + * Usage of `libdir` and `bindir` in the installation process. All bits related to the AWS CLI v2 installation will be located in the `libdir` and all publicly accessible executables (e.g., `aws` and `aws_completer`) will @@ -427,11 +424,11 @@ These are the aspects that have **no** backwards compatibility guarantees: * Dependencies. The AWS CLI v2 will add new dependencies in the future. This means users must install any new dependencies to their system in order to install the AWS CLI v2 from source. This includes anything from: - + * Increasing minimum required Python version * Pulling in a new Python library dependency * Requiring a new system dependency - + Furthermore, the `--with-download-deps` option does not guarantee that all possible new dependencies in the future will be accounted for by the flag. For example, a new programming language may be required in the future @@ -467,24 +464,23 @@ When running `make`, the following steps are run: This includes: `wheel`, `setuptools`, all CLI runtime dependencies, and `pyinstaller` (if building the `portable-exe`). These requirements are all specified in lock files generated from [`pip-compile`](https://github.com/jazzband/pip-tools). - + If `--with-download-deps` was not specified in the `configure` command, it copies all Python libraries from the Python interpreter's site package plus any scripts (e.g., `pyinstaller`) into the virtual environment being used for the build. - + 3. Run `pip install` directly on the AWS CLI v2 codebase to do an offline, in-tree build and install of the AWS CLI v2 into the build virtual environment. This is done by including the follow pip flags: * [`--no-build-isolation`](https://pip.pypa.io/en/stable/cli/pip_install/#cmdoption-no-build-isolation) - * [`--use-feature=in-tree-build`](https://pip.pypa.io/en/stable/cli/pip_install/#local-project-installs) * [`--no-cache-dir`](https://pip.pypa.io/en/stable/cli/pip_install/#caching) * [`--no-index`](https://pip.pypa.io/en/stable/cli/pip_install/#cmdoption-no-index) - + 4. If the `--install-type` was set to `portable-exe` in the `configure` command, run [`pyinstaller`](https://www.pyinstaller.org/) to build a standalone executable. - + #### Install steps @@ -493,7 +489,7 @@ When running `make install`, the following steps are run: 1. Move the built virtual environment (if the install type is `system-sandbox`) or standalone executable (if the install type is a `portable-exe`) to the configured install directory (i.e., `/awscli`). - + 2. Create symlinks for both the `aws` and `aws_completer` in the configured bin directory (i.e., at `/aws` and `/aws_completer`). @@ -524,16 +520,16 @@ CLI is written in Python. To make the AWS CLI v2 codebase compliant, it requires the following changes: -* Port all information from the `setup.py` to the `setup.cfg` that can be - programmatically parsed for runtime dependencies. - * Introduce a `pyproject.toml` file (see [PEP 518](https://www.python.org/dev/peps/pep-0518/)) and in-tree build backend that builds the autocomplete index when building both the sdist and wheel (see [PEP 517](https://www.python.org/dev/peps/pep-0517/#in-tree-build-backends)). - + +* Port all information from the `setup.py` and `setup.cfg` to `pyproject.toml + that can be programmatically parsed for runtime dependencies. + * Pull in and maintain the unreleased `botocore` `v2` branch along with - `s3transfer` into the AWS CLI v2 codebase. + `s3transfer` into the AWS CLI v2 codebase. ### Hosted source distribution @@ -595,16 +591,16 @@ Specifically: environments the AWS CLI v2 can be installed on. For systems that do not have a POSIX-compatible shell (e.g., Windows), there is software available to install to help run the scripts (e.g., MSYS2). - + * Autotools is one of the most common build systems. This improves familiarity for any users looking to install the AWS CLI v2 from source as Autotools enabled projects follow the same `configure`, `make`, `make install` command flow and share similar configuration flags. - + * Autotools is language agnostic. It shields users from having to understand Python build tool usage and details and provides stability if we needed to change the underlying programming language and corresponding build tools. - + #### Q. What alternatives to Autotools were considered? @@ -618,15 +614,15 @@ notes on what this interface would look like. **Pros** -* It is language agnostic (e.g., can abstract over the underlying +* It is language agnostic (e.g., can abstract over the underlying programming language). - + * It is a commonly used build tool. - + * It supports a wide range of build systems. Unlike Autotools that can only generate GNU Makefiles, CMake can target GNU Makefiles as well as common Windows build systems such as NMake and Visual Studio. - + * The CMake CLI has built-in commands that allows you to build and install the project (e.g., `cmake --build` and `cmake --install`) without having to directly use the underlying build system (e.g., `make`). Therefore, you could @@ -641,7 +637,7 @@ notes on what this interface would look like. Furthermore, for Windows, users would still need to install Microsoft Visual Studio in order to access build systems that `cmake` would target (e.g., `nmake`). - + * It introduces a new tool that users will have to know how to use in order to build and install the AWS CLI. While we can provide quick getting started instructions, there is still the possibility that users will have to learn @@ -649,7 +645,7 @@ notes on what this interface would look like. With Autotools, it is scoped to just running a single `configure` shell script followed by `make` commands. From the end user's perspective, there is no additional concepts/knowledge needed past that single usage pattern. - + * If we want to take advantage of new features in CMake, we will have to force users to upgrade to new versions of CMake. This is different from Autotools where the authors generate the `configure` script so only the authors need @@ -668,7 +664,7 @@ leveraged. For the purpose of being a thin, familiar interface, Autotools is advantageous over CMake because: * Minimal dependencies required to use the build/install interface. - + * The end user interface is more minimal as it is scoped to running a single `configure` script followed by `make` commands. Users do not have to potentially learn the usage and concepts for a new tool (e.g., `cmake`). @@ -677,7 +673,7 @@ In addition, one important note is that the decision between Autotools and CMake is **not** a one-way door. It is possible for a project to allow users to build and install it using either Autotools or CMake. So, CMake support can always be added in the future if needed. - + ##### Custom build/install script Building and installing would be exposed through a custom shell script or @@ -687,7 +683,7 @@ Makefile. * It is language agnostic (e.g., can abstract over the underlying programming language). - + * Can have greater control over build and install interface instead of trying to fit the interface into the patterns of an established build tool. @@ -696,14 +692,14 @@ Makefile. * Does not bring the same potential familiarity as an Autotools/CMake project for users new to the project. Users will have to learn a new usage pattern. - + * Anything custom-built would not be able to match the maintainability and portability of a more mature build system (e.g., Autotools and CMake) - + * Easier to deviate from conventions established by other build systems, which could make it more difficult to understand how to customize the build and install of the AWS CLI. - + **Verdict:** While this option gives similar benefits to Autotools and CMake, it still does @@ -719,17 +715,17 @@ tools to build and then install the AWS CLI v2. * It reduces complexity of the project (e.g., we would not have to add another layer of abstraction over the Python build logic) - + * It introduces no new dependencies. Python is already required to build the AWS CLI and it comes with `pip` which is PEP 517 compliant. Also, the Python build tools generally have cross-platform support. - + **Cons:** * Requires users to be familiar with Python build and install tooling and be familiar how to use them safely (e.g., not install the AWS CLI into the global site packages). - + * If we were to change programming languages or add new build steps, it will require users to rewrite any build logic. @@ -757,7 +753,7 @@ Linux pre-built ZIP). These types of builds are useful because: * Users can ensure their installation of AWS CLI v2 is not coupled to their system installation of Python. - + * Users can distribute their build to other similar systems that may not have Python installed. @@ -766,7 +762,7 @@ instead of one of the official artifacts because: * They want to customize the build (e.g., hand select what dependencies are bundled into the executable). - + * They have compliance/security reasons to not rely on executables built by third-parties and want to be able to build it themselves. @@ -788,25 +784,25 @@ PyInstaller install/build does not work whether: * There is not a pre-compiled PyInstaller bootloader available compatible for their environment and have to [build the bootloader themselves](https://pyinstaller.readthedocs.io/en/stable/bootloader-building.html). - + * The Python interpreter may need to be recompiled to enable it as a shared library (e.g., compile with `--enable-shared` on Linux or `--enable-framework` for Mac). - + In general, the benefit of having the installation not requiring a system Python is not worth the potential problems users will have to work through when building the exe when: * The source install already requires Python to be on the system to build the AWS CLI v2. - + * One of the main reasons users will be building from source is because there is not an official pre-built artifact available for their environment. However, these types of environments will likely be the ones where users will have to build the PyInstaller bootloader themselves or not be even [fully tested](https://github.com/pyinstaller/pyinstaller#untested-platforms), and be more prone to running into the issues. - + #### Q. Why have `--with-download-deps` flag? @@ -816,7 +812,7 @@ users would have to learn how to install the appropriate Python libraries and may do so in a way that can be detrimental to their environment setup such as install packages into their global site packages directory, which can break other system tools. - + #### Q. Why is there provisional support for PEP 517? @@ -833,7 +829,7 @@ provide a sharp, no-frills escape hatch for installing the AWS CLI v2 whether: * The environment is a non-POSIX compliant system, and the user does not want or is unable to use additional software (e.g., MSYS2) to be able to run the Autotools workflow in order to install the AWS CLI v2. - + * The user actually wants the AWS CLI v2 installed directly as part of the global or user site-packages directory, which would help minimize the number of copies of third-party Python packages managed on the system. @@ -877,7 +873,7 @@ v2 codebase for the following reasons: * It simplifies the building and installing of the AWS CLI v2 as it removes the need to download, checkout, and install a specific commit of the botocore source in order to install the AWS CLI v2. - + * For users that leverage provisional support for PEP 517 to install the AWS CLI v2, they do not need to be concerned about the unreleased backwards incompatible version of botocore breaking their installation of other Python @@ -896,7 +892,7 @@ v2 codebase for the following reasons: future official major version bump of botocore. It is also worth noting that in the future, the team may also be able to stop maintaining its fork of botocore in favor of an officially released major version of botocore. - + The reason s3transfer is being maintained as part of the AWS CLI v2 codebase because it has a direct dependency on botocore. This is specifically @@ -905,8 +901,8 @@ problematic because: * If s3transfer was not pulled in, it would automatically pull in the official version of botocore, which would unnecessarily bloat the size of the dependency closure. - -* It ensures that any changes that are made to the AWS CLI v2 maintained + +* It ensures that any changes that are made to the AWS CLI v2 maintained version of botocore is compatible with s3transfer interfaces. @@ -925,10 +921,10 @@ for better managing source installs such as: * `dist` - Generates a source distribution that could be distributed similar to the official hosted source distributions. - + * `check` - Runs smoke tests on the built AWS CLI v2 to make sure the AWS CLI v2 is working correctly before actually installing it to the system. - + * `html`/`install-html` - Generates the HTML references and installs them to the configured location on the system. @@ -940,7 +936,7 @@ some optional dependencies used in running the CLI. For example: * Pager (e.g., `less`) if not available on the system for redirecting command output - + * Any standalone executable plugins required for customized commands such as the [Session Manager plugin](https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager-working-with-install-plugin.html) to use the `ssm start-session` command. @@ -952,7 +948,7 @@ These could be either exposed via: * Adding more opt-in values options to the `--with-download-deps` flag (e.g., `--with-download-deps=pager`). - + * Adding completely separate flags for downloading these optional dependencies that is completely separate from `-with-download-deps` (e.g., `--with-download-pager`, `--with-download-plugins`). @@ -990,7 +986,7 @@ Below is an example Dockerfile that can be used to get a working installation of the AWS CLI v2 in an Alpine Linux container as an [alternative to pre-built binaries for Alpine](https://github.com/aws/aws-cli/issues/4685): ```dockerfile -FROM python:3.8-alpine +FROM python:3.8-alpine AS builder ENV AWSCLI_VERSION=2.2.1 @@ -999,6 +995,7 @@ RUN apk add --no-cache \ make \ cmake \ gcc \ + g++ \ libc-dev \ libffi-dev \ openssl-dev \ diff --git a/pyproject.toml b/pyproject.toml index 5192a858942c..dd2494d05c91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [build-system] requires = [ -"flit_core>=3.7.1,<3.7.2", +"flit_core>=3.7.1,<3.8.1", ] build-backend = "pep517" backend-path = ["backends"] @@ -49,6 +49,11 @@ include = [ "backends/**/*.py", "bin/*", "CHANGELOG.rst", + "tests/**/*", + "requirements/**/*.txt", + "configure", + "Makefile.in", + "exe/**/*", ] # end of cli sdist tool section diff --git a/requirements-base.txt b/requirements-base.txt index c05cc87f9c73..3986857daec5 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -1,2 +1,3 @@ + wheel==0.37.1 flit_core==3.8.0 diff --git a/requirements-test.txt b/requirements-test.txt index ce9671e3c111..fc5c62f5a093 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -4,3 +4,4 @@ pytest==7.2.0 coverage==7.0.1 pytest-cov==4.0.0 pytest-xdist==3.1.0 +pip-tools==6.9.0 diff --git a/requirements/bootstrap.txt b/requirements/bootstrap.txt new file mode 100644 index 000000000000..e0641675344b --- /dev/null +++ b/requirements/bootstrap.txt @@ -0,0 +1,2 @@ +pip>=22.0.0,<23.0.0 +flit_core>=3.7.1,<3.8.1 diff --git a/requirements/download-deps/bootstrap-lock.txt b/requirements/download-deps/bootstrap-lock.txt new file mode 100644 index 000000000000..0937ec37ffad --- /dev/null +++ b/requirements/download-deps/bootstrap-lock.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/download-deps/bootstrap-lock.txt --unsafe-package=flit-core --unsafe-package=pip --unsafe-package=setuptools --unsafe-package=wheel requirements/download-deps/bootstrap.txt +# +flit-core==3.8.0 \ + --hash=sha256:64a29ec845164a6abe1136bf4bc5ae012bdfe758ed42fc7571a9059a7c80bd83 \ + --hash=sha256:b305b30c99526df5e63d6022dd2310a0a941a187bd3884f4c8ef0418df6c39f3 + # via -r requirements/download-deps/../bootstrap.txt +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements/download-deps/bootstrap.txt + +# The following packages are considered to be unsafe in a requirements file: +pip==22.3.1 \ + --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ + --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 + # via -r requirements/download-deps/../bootstrap.txt +setuptools==65.3.0 \ + --hash=sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82 \ + --hash=sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57 + # via -r requirements/download-deps/bootstrap.txt diff --git a/requirements/download-deps/bootstrap-win-lock.txt b/requirements/download-deps/bootstrap-win-lock.txt new file mode 100644 index 000000000000..8633e11adb59 --- /dev/null +++ b/requirements/download-deps/bootstrap-win-lock.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes --output-file='requirements\download-deps\bootstrap-win-lock.txt' --unsafe-package=flit-core --unsafe-package=pip --unsafe-package=setuptools --unsafe-package=wheel 'requirements\download-deps\bootstrap.txt' +# +flit-core==3.8.0 \ + --hash=sha256:64a29ec845164a6abe1136bf4bc5ae012bdfe758ed42fc7571a9059a7c80bd83 \ + --hash=sha256:b305b30c99526df5e63d6022dd2310a0a941a187bd3884f4c8ef0418df6c39f3 + # via -r requirements\download-deps\../bootstrap.txt +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements\download-deps\bootstrap.txt + +# The following packages are considered to be unsafe in a requirements file: +pip==22.3.1 \ + --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ + --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 + # via -r requirements\download-deps\../bootstrap.txt +setuptools==65.3.0 \ + --hash=sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82 \ + --hash=sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57 + # via -r requirements\download-deps\bootstrap.txt diff --git a/requirements/download-deps/bootstrap.txt b/requirements/download-deps/bootstrap.txt new file mode 100644 index 000000000000..93658ee615f7 --- /dev/null +++ b/requirements/download-deps/bootstrap.txt @@ -0,0 +1,3 @@ +setuptools==65.3.0 +wheel==0.37.1 +-r ../bootstrap.txt diff --git a/requirements/download-deps/portable-exe-lock.txt b/requirements/download-deps/portable-exe-lock.txt new file mode 100644 index 000000000000..f097dea2e67a --- /dev/null +++ b/requirements/download-deps/portable-exe-lock.txt @@ -0,0 +1,245 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --generate-hashes --output-file=requirements/download-deps/portable-exe-lock.txt --unsafe-package=flit-core --unsafe-package=pip --unsafe-package=setuptools --unsafe-package=wheel pyproject.toml requirements/portable-exe-extras.txt +# +altgraph==0.17.3 \ + --hash=sha256:ad33358114df7c9416cdb8fa1eaa5852166c505118717021c6a8c7c7abbd03dd \ + --hash=sha256:c8ac1ca6772207179ed8003ce7687757c04b0b71536f81e2ac5755c6226458fe + # via + # macholib + # pyinstaller +awscrt==0.16.10 \ + --hash=sha256:0a19c49442d8c5265ebdfc7ab50a1b00d13209742796a5700d43671dc69864bd \ + --hash=sha256:20844f811edb1babe867688f7528b9173a002b3aca851aea6b5659ae73c90fed \ + --hash=sha256:2f89df5ccbf84bffed3aa1de8c4b01a7405f961873279ab9cfc18f94c6ed6566 \ + --hash=sha256:3027db877ede48a4cd7b9a3e9e2c288c0c2ca6483da634d2c3fa00ebecf9cfb2 \ + --hash=sha256:3101499b479b82f0403f615bf520c1f8b9506818d275e3e28ade25cc6bcdb92b \ + --hash=sha256:3544d3c0dee23c4e3c2d8575523615a329d1aeb7a2dd0c7eccb42c329f9cbb41 \ + --hash=sha256:3811240c485ad7621ddfa912d08928a9ea639a5875e11439193b7f4f4508ac9a \ + --hash=sha256:48cd45afd66cbd0cc9a1fdf78669fbc630a0882993547250a3e08477f75dfe25 \ + --hash=sha256:4f6e6dab64a5a3955f9c7961b7cf64c8db05c79f7d78df5dd72c556d2f12d2d6 \ + --hash=sha256:562b15030307894458b750aaa6a5ef57d854e96a34666d18e32d036d274cec39 \ + --hash=sha256:644fbf3a9f6ab38f19d8a9f62903bdc1e05e75bbc6ac9a00c70eb1401394928f \ + --hash=sha256:6732eedf2638287eecddbced53889ac681a6efc8c028de3511c2707d33c70d88 \ + --hash=sha256:74664388bf47aaa41bf290639dfe69446833a17f2377fa9b8099f3bfa1221ea1 \ + --hash=sha256:74690f01b4ef534d06005a8ece46cee79337f5f93678e317c67c029300bd1bc2 \ + --hash=sha256:777913b731b784f6e62e13abde840669de06875c3c2fa717d5fd88b134eebfec \ + --hash=sha256:833f01cbad3328ad848f6354dd8a03260bef3422c3647b03354b303c6b8fa37c \ + --hash=sha256:9c33d58785651ef62e0e5d8ff0ba71d4835357c7fdef8dc3d2a01e80a6aa0500 \ + --hash=sha256:a05bcafd3ae22d9854a7d835d13aef89a28c00801336bde544c027e4dee61281 \ + --hash=sha256:a84e6e88ff192d4747382c5d6867a5a1f099aecbb8ae60cce7fd03610fb7439b \ + --hash=sha256:acf871eb39c790af597cbfb027f80c086aa95cf363ee957cbceede9614874c32 \ + --hash=sha256:ae2d6b93313a6134ed04850038f50fa29b2015d2928c725ec66361504fc898f5 \ + --hash=sha256:be0649f075b3a3c861ad47e0df199dfe6f46475eb674c9db473d8dec87a69646 \ + --hash=sha256:c41dc07ffcdaef74cc8bc5680c0257e906f3de87e0bc73d367d68a28a80690e7 \ + --hash=sha256:cda32c1b2045c96772a5f54e5b0cd5dd112ba7f0aaa4dc59bf0c1eae9f2aa476 \ + --hash=sha256:d2190af8785e9cc2de86c652a403e6a2ed99107ff8d9efad41a042687128273c \ + --hash=sha256:dcce455b7aa42e8bd868ec0dd50f5a7707e87acc4ead529fb356065cc139fe42 \ + --hash=sha256:e7103baa733c8ca056c64c1cf0e880d53a345c7339d5e79bb71f1b27a2d89e80 \ + --hash=sha256:ebc9666dc15f37e9fd47295a777109a4eddd644c6a817016b4d91f5fc9747b1b \ + --hash=sha256:f31bf234fb879c0bc7bd9bb46e6217e9d0e8939b31e8427ff555b30c15fe4f76 \ + --hash=sha256:f67c3cf0559254b1349d32e2ad002d794df7e90fad085ba918e9a34ec371f74e \ + --hash=sha256:fbe9d9870a26c9cc43878f7db828dfd697e10760a1ab47ee14363e1bb03903ad \ + --hash=sha256:fd7a468fff961c6a19da67fd128396084c064b8ca09fbbc9567680d447718620 + # via awscli (pyproject.toml) +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via awscli (pyproject.toml) +cryptography==38.0.4 \ + --hash=sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd \ + --hash=sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db \ + --hash=sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290 \ + --hash=sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744 \ + --hash=sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb \ + --hash=sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d \ + --hash=sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70 \ + --hash=sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b \ + --hash=sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876 \ + --hash=sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083 \ + --hash=sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6 \ + --hash=sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1 \ + --hash=sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00 \ + --hash=sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b \ + --hash=sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b \ + --hash=sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285 \ + --hash=sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9 \ + --hash=sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0 \ + --hash=sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d \ + --hash=sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2 \ + --hash=sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8 \ + --hash=sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee \ + --hash=sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b \ + --hash=sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7 \ + --hash=sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353 \ + --hash=sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c + # via awscli (pyproject.toml) +distro==1.5.0 \ + --hash=sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92 \ + --hash=sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799 + # via awscli (pyproject.toml) +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via awscli (pyproject.toml) +jmespath==1.0.1 \ + --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ + --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe + # via awscli (pyproject.toml) +macholib==1.16.2 \ + --hash=sha256:44c40f2cd7d6726af8fa6fe22549178d3a4dfecc35a9cd15ea916d9c83a688e0 \ + --hash=sha256:557bbfa1bb255c20e9abafe7ed6cd8046b48d9525db2f9b77d3122a63a2a8bf8 + # via pyinstaller +prompt-toolkit==3.0.28 \ + --hash=sha256:30129d870dcb0b3b6a53efdc9d0a83ea96162ffd28ffe077e94215b233dc670c \ + --hash=sha256:9f1cd16b1e86c2968f2519d7fb31dd9d669916f515612c269d14e9ed52b51650 + # via awscli (pyproject.toml) +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pyinstaller==5.3 \ + --hash=sha256:066b83a0eae89ad418749e9e29429c152f1ff096230df11a093bbded8344ade0 \ + --hash=sha256:4c658a762cbbee5c5997c364578804d4c1e91d688de8ed018710c2705bf1474b \ + --hash=sha256:7591a9e1e2a481f99eb99036d6786e20717bc10f8f0a8ef519958cb3172fac7a \ + --hash=sha256:794e8e143ae73d1acdd2cbc52f02dd34cdfbd954ede34c7067ce68a268d8b7c2 \ + --hash=sha256:9efbad718fe29d425336f289871c67bfc6a1876013037fee2ef1f7613fd675a2 \ + --hash=sha256:a0e7a80fe04204add3f743101958a3cf62b79e7ccda838388784b1a35bb5b27f \ + --hash=sha256:aa9d1b8639d2402438c179ae1c8acfd41b65366c803a5a6484a5bb7586e88647 \ + --hash=sha256:b38505b445cdd64279f04650e0ddfe5ac6cef61996b14f06e3c99da8aac3cfbe \ + --hash=sha256:cae43e01e04f37185d23202aba8cf2837fa24ec3d0aa5ebc42e26f404e6eba95 \ + --hash=sha256:d4123992556951ed24e11cf2eec9a4e18e94ee8bd63ca49d9b7fc37387097eb9 \ + --hash=sha256:de71d4669806e4d54b23b477cc077e2e8fe9c4d57e79ed32d22b7585137fd7b7 + # via -r requirements/portable-exe-extras.txt +pyinstaller-hooks-contrib==2022.15 \ + --hash=sha256:55c1def8066d0279d06cd67eea30c12ffcdb961a5edeeaf361adac0164baef30 \ + --hash=sha256:73fd4051dc1620f3ae9643291cd9e2f47bfed582ade2eb05e3247ecab4a4f5f3 + # via pyinstaller +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via awscli (pyproject.toml) +ruamel-yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af + # via awscli (pyproject.toml) +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + # via ruamel-yaml +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via python-dateutil +urllib3==1.26.14 \ + --hash=sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72 \ + --hash=sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1 + # via awscli (pyproject.toml) +wcwidth==0.2.6 \ + --hash=sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e \ + --hash=sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0 + # via prompt-toolkit + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# setuptools diff --git a/requirements/download-deps/portable-exe-win-lock.txt b/requirements/download-deps/portable-exe-win-lock.txt new file mode 100644 index 000000000000..5d0ded6d7b3a --- /dev/null +++ b/requirements/download-deps/portable-exe-win-lock.txt @@ -0,0 +1,247 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --generate-hashes --output-file='requirements\download-deps\portable-exe-win-lock.txt' --unsafe-package=flit-core --unsafe-package=pip --unsafe-package=setuptools --unsafe-package=wheel 'pyproject.toml' 'requirements\portable-exe-extras.txt' +# +altgraph==0.17.3 \ + --hash=sha256:ad33358114df7c9416cdb8fa1eaa5852166c505118717021c6a8c7c7abbd03dd \ + --hash=sha256:c8ac1ca6772207179ed8003ce7687757c04b0b71536f81e2ac5755c6226458fe + # via pyinstaller +awscrt==0.16.10 \ + --hash=sha256:0a19c49442d8c5265ebdfc7ab50a1b00d13209742796a5700d43671dc69864bd \ + --hash=sha256:20844f811edb1babe867688f7528b9173a002b3aca851aea6b5659ae73c90fed \ + --hash=sha256:2f89df5ccbf84bffed3aa1de8c4b01a7405f961873279ab9cfc18f94c6ed6566 \ + --hash=sha256:3027db877ede48a4cd7b9a3e9e2c288c0c2ca6483da634d2c3fa00ebecf9cfb2 \ + --hash=sha256:3101499b479b82f0403f615bf520c1f8b9506818d275e3e28ade25cc6bcdb92b \ + --hash=sha256:3544d3c0dee23c4e3c2d8575523615a329d1aeb7a2dd0c7eccb42c329f9cbb41 \ + --hash=sha256:3811240c485ad7621ddfa912d08928a9ea639a5875e11439193b7f4f4508ac9a \ + --hash=sha256:48cd45afd66cbd0cc9a1fdf78669fbc630a0882993547250a3e08477f75dfe25 \ + --hash=sha256:4f6e6dab64a5a3955f9c7961b7cf64c8db05c79f7d78df5dd72c556d2f12d2d6 \ + --hash=sha256:562b15030307894458b750aaa6a5ef57d854e96a34666d18e32d036d274cec39 \ + --hash=sha256:644fbf3a9f6ab38f19d8a9f62903bdc1e05e75bbc6ac9a00c70eb1401394928f \ + --hash=sha256:6732eedf2638287eecddbced53889ac681a6efc8c028de3511c2707d33c70d88 \ + --hash=sha256:74664388bf47aaa41bf290639dfe69446833a17f2377fa9b8099f3bfa1221ea1 \ + --hash=sha256:74690f01b4ef534d06005a8ece46cee79337f5f93678e317c67c029300bd1bc2 \ + --hash=sha256:777913b731b784f6e62e13abde840669de06875c3c2fa717d5fd88b134eebfec \ + --hash=sha256:833f01cbad3328ad848f6354dd8a03260bef3422c3647b03354b303c6b8fa37c \ + --hash=sha256:9c33d58785651ef62e0e5d8ff0ba71d4835357c7fdef8dc3d2a01e80a6aa0500 \ + --hash=sha256:a05bcafd3ae22d9854a7d835d13aef89a28c00801336bde544c027e4dee61281 \ + --hash=sha256:a84e6e88ff192d4747382c5d6867a5a1f099aecbb8ae60cce7fd03610fb7439b \ + --hash=sha256:acf871eb39c790af597cbfb027f80c086aa95cf363ee957cbceede9614874c32 \ + --hash=sha256:ae2d6b93313a6134ed04850038f50fa29b2015d2928c725ec66361504fc898f5 \ + --hash=sha256:be0649f075b3a3c861ad47e0df199dfe6f46475eb674c9db473d8dec87a69646 \ + --hash=sha256:c41dc07ffcdaef74cc8bc5680c0257e906f3de87e0bc73d367d68a28a80690e7 \ + --hash=sha256:cda32c1b2045c96772a5f54e5b0cd5dd112ba7f0aaa4dc59bf0c1eae9f2aa476 \ + --hash=sha256:d2190af8785e9cc2de86c652a403e6a2ed99107ff8d9efad41a042687128273c \ + --hash=sha256:dcce455b7aa42e8bd868ec0dd50f5a7707e87acc4ead529fb356065cc139fe42 \ + --hash=sha256:e7103baa733c8ca056c64c1cf0e880d53a345c7339d5e79bb71f1b27a2d89e80 \ + --hash=sha256:ebc9666dc15f37e9fd47295a777109a4eddd644c6a817016b4d91f5fc9747b1b \ + --hash=sha256:f31bf234fb879c0bc7bd9bb46e6217e9d0e8939b31e8427ff555b30c15fe4f76 \ + --hash=sha256:f67c3cf0559254b1349d32e2ad002d794df7e90fad085ba918e9a34ec371f74e \ + --hash=sha256:fbe9d9870a26c9cc43878f7db828dfd697e10760a1ab47ee14363e1bb03903ad \ + --hash=sha256:fd7a468fff961c6a19da67fd128396084c064b8ca09fbbc9567680d447718620 + # via awscli (pyproject.toml) +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via awscli (pyproject.toml) +cryptography==38.0.4 \ + --hash=sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd \ + --hash=sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db \ + --hash=sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290 \ + --hash=sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744 \ + --hash=sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb \ + --hash=sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d \ + --hash=sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70 \ + --hash=sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b \ + --hash=sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876 \ + --hash=sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083 \ + --hash=sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6 \ + --hash=sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1 \ + --hash=sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00 \ + --hash=sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b \ + --hash=sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b \ + --hash=sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285 \ + --hash=sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9 \ + --hash=sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0 \ + --hash=sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d \ + --hash=sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2 \ + --hash=sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8 \ + --hash=sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee \ + --hash=sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b \ + --hash=sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7 \ + --hash=sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353 \ + --hash=sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c + # via awscli (pyproject.toml) +distro==1.5.0 \ + --hash=sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92 \ + --hash=sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799 + # via awscli (pyproject.toml) +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via awscli (pyproject.toml) +jmespath==1.0.1 \ + --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ + --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe + # via awscli (pyproject.toml) +pefile==2023.2.7 \ + --hash=sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc \ + --hash=sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6 + # via pyinstaller +prompt-toolkit==3.0.28 \ + --hash=sha256:30129d870dcb0b3b6a53efdc9d0a83ea96162ffd28ffe077e94215b233dc670c \ + --hash=sha256:9f1cd16b1e86c2968f2519d7fb31dd9d669916f515612c269d14e9ed52b51650 + # via awscli (pyproject.toml) +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pyinstaller==5.3 \ + --hash=sha256:066b83a0eae89ad418749e9e29429c152f1ff096230df11a093bbded8344ade0 \ + --hash=sha256:4c658a762cbbee5c5997c364578804d4c1e91d688de8ed018710c2705bf1474b \ + --hash=sha256:7591a9e1e2a481f99eb99036d6786e20717bc10f8f0a8ef519958cb3172fac7a \ + --hash=sha256:794e8e143ae73d1acdd2cbc52f02dd34cdfbd954ede34c7067ce68a268d8b7c2 \ + --hash=sha256:9efbad718fe29d425336f289871c67bfc6a1876013037fee2ef1f7613fd675a2 \ + --hash=sha256:a0e7a80fe04204add3f743101958a3cf62b79e7ccda838388784b1a35bb5b27f \ + --hash=sha256:aa9d1b8639d2402438c179ae1c8acfd41b65366c803a5a6484a5bb7586e88647 \ + --hash=sha256:b38505b445cdd64279f04650e0ddfe5ac6cef61996b14f06e3c99da8aac3cfbe \ + --hash=sha256:cae43e01e04f37185d23202aba8cf2837fa24ec3d0aa5ebc42e26f404e6eba95 \ + --hash=sha256:d4123992556951ed24e11cf2eec9a4e18e94ee8bd63ca49d9b7fc37387097eb9 \ + --hash=sha256:de71d4669806e4d54b23b477cc077e2e8fe9c4d57e79ed32d22b7585137fd7b7 + # via -r requirements\portable-exe-extras.txt +pyinstaller-hooks-contrib==2022.15 \ + --hash=sha256:55c1def8066d0279d06cd67eea30c12ffcdb961a5edeeaf361adac0164baef30 \ + --hash=sha256:73fd4051dc1620f3ae9643291cd9e2f47bfed582ade2eb05e3247ecab4a4f5f3 + # via pyinstaller +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via awscli (pyproject.toml) +pywin32-ctypes==0.2.0 \ + --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \ + --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 + # via pyinstaller +ruamel-yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af + # via awscli (pyproject.toml) +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + # via ruamel-yaml +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via python-dateutil +urllib3==1.26.14 \ + --hash=sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72 \ + --hash=sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1 + # via awscli (pyproject.toml) +wcwidth==0.2.6 \ + --hash=sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e \ + --hash=sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0 + # via prompt-toolkit + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# setuptools diff --git a/requirements/download-deps/system-sandbox-lock.txt b/requirements/download-deps/system-sandbox-lock.txt new file mode 100644 index 000000000000..aae1679a2765 --- /dev/null +++ b/requirements/download-deps/system-sandbox-lock.txt @@ -0,0 +1,214 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --generate-hashes --output-file=requirements/download-deps/system-sandbox-lock.txt --unsafe-package=flit-core --unsafe-package=pip --unsafe-package=setuptools --unsafe-package=wheel pyproject.toml +# +awscrt==0.16.10 \ + --hash=sha256:0a19c49442d8c5265ebdfc7ab50a1b00d13209742796a5700d43671dc69864bd \ + --hash=sha256:20844f811edb1babe867688f7528b9173a002b3aca851aea6b5659ae73c90fed \ + --hash=sha256:2f89df5ccbf84bffed3aa1de8c4b01a7405f961873279ab9cfc18f94c6ed6566 \ + --hash=sha256:3027db877ede48a4cd7b9a3e9e2c288c0c2ca6483da634d2c3fa00ebecf9cfb2 \ + --hash=sha256:3101499b479b82f0403f615bf520c1f8b9506818d275e3e28ade25cc6bcdb92b \ + --hash=sha256:3544d3c0dee23c4e3c2d8575523615a329d1aeb7a2dd0c7eccb42c329f9cbb41 \ + --hash=sha256:3811240c485ad7621ddfa912d08928a9ea639a5875e11439193b7f4f4508ac9a \ + --hash=sha256:48cd45afd66cbd0cc9a1fdf78669fbc630a0882993547250a3e08477f75dfe25 \ + --hash=sha256:4f6e6dab64a5a3955f9c7961b7cf64c8db05c79f7d78df5dd72c556d2f12d2d6 \ + --hash=sha256:562b15030307894458b750aaa6a5ef57d854e96a34666d18e32d036d274cec39 \ + --hash=sha256:644fbf3a9f6ab38f19d8a9f62903bdc1e05e75bbc6ac9a00c70eb1401394928f \ + --hash=sha256:6732eedf2638287eecddbced53889ac681a6efc8c028de3511c2707d33c70d88 \ + --hash=sha256:74664388bf47aaa41bf290639dfe69446833a17f2377fa9b8099f3bfa1221ea1 \ + --hash=sha256:74690f01b4ef534d06005a8ece46cee79337f5f93678e317c67c029300bd1bc2 \ + --hash=sha256:777913b731b784f6e62e13abde840669de06875c3c2fa717d5fd88b134eebfec \ + --hash=sha256:833f01cbad3328ad848f6354dd8a03260bef3422c3647b03354b303c6b8fa37c \ + --hash=sha256:9c33d58785651ef62e0e5d8ff0ba71d4835357c7fdef8dc3d2a01e80a6aa0500 \ + --hash=sha256:a05bcafd3ae22d9854a7d835d13aef89a28c00801336bde544c027e4dee61281 \ + --hash=sha256:a84e6e88ff192d4747382c5d6867a5a1f099aecbb8ae60cce7fd03610fb7439b \ + --hash=sha256:acf871eb39c790af597cbfb027f80c086aa95cf363ee957cbceede9614874c32 \ + --hash=sha256:ae2d6b93313a6134ed04850038f50fa29b2015d2928c725ec66361504fc898f5 \ + --hash=sha256:be0649f075b3a3c861ad47e0df199dfe6f46475eb674c9db473d8dec87a69646 \ + --hash=sha256:c41dc07ffcdaef74cc8bc5680c0257e906f3de87e0bc73d367d68a28a80690e7 \ + --hash=sha256:cda32c1b2045c96772a5f54e5b0cd5dd112ba7f0aaa4dc59bf0c1eae9f2aa476 \ + --hash=sha256:d2190af8785e9cc2de86c652a403e6a2ed99107ff8d9efad41a042687128273c \ + --hash=sha256:dcce455b7aa42e8bd868ec0dd50f5a7707e87acc4ead529fb356065cc139fe42 \ + --hash=sha256:e7103baa733c8ca056c64c1cf0e880d53a345c7339d5e79bb71f1b27a2d89e80 \ + --hash=sha256:ebc9666dc15f37e9fd47295a777109a4eddd644c6a817016b4d91f5fc9747b1b \ + --hash=sha256:f31bf234fb879c0bc7bd9bb46e6217e9d0e8939b31e8427ff555b30c15fe4f76 \ + --hash=sha256:f67c3cf0559254b1349d32e2ad002d794df7e90fad085ba918e9a34ec371f74e \ + --hash=sha256:fbe9d9870a26c9cc43878f7db828dfd697e10760a1ab47ee14363e1bb03903ad \ + --hash=sha256:fd7a468fff961c6a19da67fd128396084c064b8ca09fbbc9567680d447718620 + # via awscli (pyproject.toml) +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via awscli (pyproject.toml) +cryptography==38.0.4 \ + --hash=sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd \ + --hash=sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db \ + --hash=sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290 \ + --hash=sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744 \ + --hash=sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb \ + --hash=sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d \ + --hash=sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70 \ + --hash=sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b \ + --hash=sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876 \ + --hash=sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083 \ + --hash=sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6 \ + --hash=sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1 \ + --hash=sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00 \ + --hash=sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b \ + --hash=sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b \ + --hash=sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285 \ + --hash=sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9 \ + --hash=sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0 \ + --hash=sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d \ + --hash=sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2 \ + --hash=sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8 \ + --hash=sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee \ + --hash=sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b \ + --hash=sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7 \ + --hash=sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353 \ + --hash=sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c + # via awscli (pyproject.toml) +distro==1.5.0 \ + --hash=sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92 \ + --hash=sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799 + # via awscli (pyproject.toml) +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via awscli (pyproject.toml) +jmespath==1.0.1 \ + --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ + --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe + # via awscli (pyproject.toml) +prompt-toolkit==3.0.28 \ + --hash=sha256:30129d870dcb0b3b6a53efdc9d0a83ea96162ffd28ffe077e94215b233dc670c \ + --hash=sha256:9f1cd16b1e86c2968f2519d7fb31dd9d669916f515612c269d14e9ed52b51650 + # via awscli (pyproject.toml) +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via awscli (pyproject.toml) +ruamel-yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af + # via awscli (pyproject.toml) +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + # via ruamel-yaml +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via python-dateutil +urllib3==1.26.14 \ + --hash=sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72 \ + --hash=sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1 + # via awscli (pyproject.toml) +wcwidth==0.2.6 \ + --hash=sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e \ + --hash=sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0 + # via prompt-toolkit diff --git a/requirements/download-deps/system-sandbox-win-lock.txt b/requirements/download-deps/system-sandbox-win-lock.txt new file mode 100644 index 000000000000..0e75d764e09b --- /dev/null +++ b/requirements/download-deps/system-sandbox-win-lock.txt @@ -0,0 +1,214 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --generate-hashes --output-file='requirements\download-deps\system-sandbox-win-lock.txt' --unsafe-package=flit-core --unsafe-package=pip --unsafe-package=setuptools --unsafe-package=wheel 'pyproject.toml' +# +awscrt==0.16.10 \ + --hash=sha256:0a19c49442d8c5265ebdfc7ab50a1b00d13209742796a5700d43671dc69864bd \ + --hash=sha256:20844f811edb1babe867688f7528b9173a002b3aca851aea6b5659ae73c90fed \ + --hash=sha256:2f89df5ccbf84bffed3aa1de8c4b01a7405f961873279ab9cfc18f94c6ed6566 \ + --hash=sha256:3027db877ede48a4cd7b9a3e9e2c288c0c2ca6483da634d2c3fa00ebecf9cfb2 \ + --hash=sha256:3101499b479b82f0403f615bf520c1f8b9506818d275e3e28ade25cc6bcdb92b \ + --hash=sha256:3544d3c0dee23c4e3c2d8575523615a329d1aeb7a2dd0c7eccb42c329f9cbb41 \ + --hash=sha256:3811240c485ad7621ddfa912d08928a9ea639a5875e11439193b7f4f4508ac9a \ + --hash=sha256:48cd45afd66cbd0cc9a1fdf78669fbc630a0882993547250a3e08477f75dfe25 \ + --hash=sha256:4f6e6dab64a5a3955f9c7961b7cf64c8db05c79f7d78df5dd72c556d2f12d2d6 \ + --hash=sha256:562b15030307894458b750aaa6a5ef57d854e96a34666d18e32d036d274cec39 \ + --hash=sha256:644fbf3a9f6ab38f19d8a9f62903bdc1e05e75bbc6ac9a00c70eb1401394928f \ + --hash=sha256:6732eedf2638287eecddbced53889ac681a6efc8c028de3511c2707d33c70d88 \ + --hash=sha256:74664388bf47aaa41bf290639dfe69446833a17f2377fa9b8099f3bfa1221ea1 \ + --hash=sha256:74690f01b4ef534d06005a8ece46cee79337f5f93678e317c67c029300bd1bc2 \ + --hash=sha256:777913b731b784f6e62e13abde840669de06875c3c2fa717d5fd88b134eebfec \ + --hash=sha256:833f01cbad3328ad848f6354dd8a03260bef3422c3647b03354b303c6b8fa37c \ + --hash=sha256:9c33d58785651ef62e0e5d8ff0ba71d4835357c7fdef8dc3d2a01e80a6aa0500 \ + --hash=sha256:a05bcafd3ae22d9854a7d835d13aef89a28c00801336bde544c027e4dee61281 \ + --hash=sha256:a84e6e88ff192d4747382c5d6867a5a1f099aecbb8ae60cce7fd03610fb7439b \ + --hash=sha256:acf871eb39c790af597cbfb027f80c086aa95cf363ee957cbceede9614874c32 \ + --hash=sha256:ae2d6b93313a6134ed04850038f50fa29b2015d2928c725ec66361504fc898f5 \ + --hash=sha256:be0649f075b3a3c861ad47e0df199dfe6f46475eb674c9db473d8dec87a69646 \ + --hash=sha256:c41dc07ffcdaef74cc8bc5680c0257e906f3de87e0bc73d367d68a28a80690e7 \ + --hash=sha256:cda32c1b2045c96772a5f54e5b0cd5dd112ba7f0aaa4dc59bf0c1eae9f2aa476 \ + --hash=sha256:d2190af8785e9cc2de86c652a403e6a2ed99107ff8d9efad41a042687128273c \ + --hash=sha256:dcce455b7aa42e8bd868ec0dd50f5a7707e87acc4ead529fb356065cc139fe42 \ + --hash=sha256:e7103baa733c8ca056c64c1cf0e880d53a345c7339d5e79bb71f1b27a2d89e80 \ + --hash=sha256:ebc9666dc15f37e9fd47295a777109a4eddd644c6a817016b4d91f5fc9747b1b \ + --hash=sha256:f31bf234fb879c0bc7bd9bb46e6217e9d0e8939b31e8427ff555b30c15fe4f76 \ + --hash=sha256:f67c3cf0559254b1349d32e2ad002d794df7e90fad085ba918e9a34ec371f74e \ + --hash=sha256:fbe9d9870a26c9cc43878f7db828dfd697e10760a1ab47ee14363e1bb03903ad \ + --hash=sha256:fd7a468fff961c6a19da67fd128396084c064b8ca09fbbc9567680d447718620 + # via awscli (pyproject.toml) +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via awscli (pyproject.toml) +cryptography==38.0.4 \ + --hash=sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd \ + --hash=sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db \ + --hash=sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290 \ + --hash=sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744 \ + --hash=sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb \ + --hash=sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d \ + --hash=sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70 \ + --hash=sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b \ + --hash=sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876 \ + --hash=sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083 \ + --hash=sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6 \ + --hash=sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1 \ + --hash=sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00 \ + --hash=sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b \ + --hash=sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b \ + --hash=sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285 \ + --hash=sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9 \ + --hash=sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0 \ + --hash=sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d \ + --hash=sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2 \ + --hash=sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8 \ + --hash=sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee \ + --hash=sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b \ + --hash=sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7 \ + --hash=sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353 \ + --hash=sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c + # via awscli (pyproject.toml) +distro==1.5.0 \ + --hash=sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92 \ + --hash=sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799 + # via awscli (pyproject.toml) +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via awscli (pyproject.toml) +jmespath==1.0.1 \ + --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ + --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe + # via awscli (pyproject.toml) +prompt-toolkit==3.0.28 \ + --hash=sha256:30129d870dcb0b3b6a53efdc9d0a83ea96162ffd28ffe077e94215b233dc670c \ + --hash=sha256:9f1cd16b1e86c2968f2519d7fb31dd9d669916f515612c269d14e9ed52b51650 + # via awscli (pyproject.toml) +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via awscli (pyproject.toml) +ruamel-yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af + # via awscli (pyproject.toml) +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + # via ruamel-yaml +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via python-dateutil +urllib3==1.26.14 \ + --hash=sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72 \ + --hash=sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1 + # via awscli (pyproject.toml) +wcwidth==0.2.6 \ + --hash=sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e \ + --hash=sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0 + # via prompt-toolkit diff --git a/requirements/portable-exe-extras.txt b/requirements/portable-exe-extras.txt new file mode 100644 index 000000000000..edb226469edc --- /dev/null +++ b/requirements/portable-exe-extras.txt @@ -0,0 +1 @@ +pyinstaller==5.3 diff --git a/scripts/ci/install b/scripts/ci/install index 58889d047e70..8bad7f65e089 100755 --- a/scripts/ci/install +++ b/scripts/ci/install @@ -14,10 +14,13 @@ def run(command): return check_call(command, shell=True) -run('pip install --no-build-isolation -r requirements-base.txt') -run('pip install --no-build-isolation -r requirements.txt') -if os.path.isdir('dist') and os.listdir('dist'): - shutil.rmtree('dist') -run('python -m build') -wheel_dist = glob.glob(os.path.join('dist', '*.whl'))[0] -run('pip install %s' % wheel_dist) +run("pip install --no-build-isolation -r requirements-base.txt") +run("pip install --no-build-isolation -r requirements.txt") +run( + "python -m pip install --no-build-isolation -r requirements/download-deps/bootstrap-lock.txt" +) +if os.path.isdir("dist") and os.listdir("dist"): + shutil.rmtree("dist") +run("python -m build") +wheel_dist = glob.glob(os.path.join("dist", "*.whl"))[0] +run("pip install %s" % wheel_dist) diff --git a/scripts/ci/install-build-system b/scripts/ci/install-build-system new file mode 100755 index 000000000000..094ae212a432 --- /dev/null +++ b/scripts/ci/install-build-system @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +import argparse +import tarfile +import tempfile +import os +import glob +import shutil + +from pathlib import Path +from subprocess import check_call + +REPO_ROOT = Path(__file__).parents[2] +os.chdir(REPO_ROOT) + + +def run(command, cwd=None): + print(f"Running command: {command}") + return check_call(command, shell=True, cwd=cwd) + + +def main(sdist_path=None): + run("pip install --no-build-isolation -r requirements-base.txt") + run("pip install --no-build-isolation -r requirements.txt") + run("pip install --no-build-isolation -r requirements-test.txt") + run( + "pip install --no-build-isolation -r requirements/download-deps/bootstrap-lock.txt" + ) + if sdist_path is None: + wheel_dist = _build_sdist_and_wheel() + else: + wheel_dist = _build_wheel(sdist_path) + run("pip install %s" % wheel_dist) + + +def _build_wheel(sdist_path): + build_dir = REPO_ROOT / "dist" + with tempfile.TemporaryDirectory() as tempdir: + with tarfile.open(sdist_path, "r:gz") as tar: + tar.extractall(tempdir) + unpacked_sdist = os.path.join(tempdir, os.listdir(tempdir)[0]) + run(f"python -m build -w -o {build_dir}", cwd=unpacked_sdist) + return _find_wheel_file() + return wheel_dist + + +def _build_sdist_and_wheel(): + if os.path.isdir("dist") and os.listdir("dist"): + shutil.rmtree("dist") + run("python -m build") + return _find_wheel_file() + + +def _find_wheel_file(): + return glob.glob(os.path.join("dist", "*.whl"))[0] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--use-sdist", default=None, type=os.path.abspath) + args = parser.parse_args() + main(args.use_sdist) diff --git a/scripts/ci/run-backend-tests b/scripts/ci/run-backend-tests index ad135171a9e4..e8c8a9ba98ef 100755 --- a/scripts/ci/run-backend-tests +++ b/scripts/ci/run-backend-tests @@ -29,5 +29,5 @@ if __name__ == "__main__": with cd(os.path.join(REPO_ROOT, "tests")): run( f"{sys.executable} {RUN_TESTS_SCRIPTS} " - f"backends --allow-repo-root-on-path" + f"backends --ignore backends/build_system --allow-repo-root-on-path" ) diff --git a/scripts/ci/run-build-system-tests b/scripts/ci/run-build-system-tests new file mode 100755 index 000000000000..3fae903657e0 --- /dev/null +++ b/scripts/ci/run-build-system-tests @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 +import argparse +import os +import sys + +from contextlib import contextmanager +from pathlib import Path +from subprocess import check_call + +REPO_ROOT = Path(__file__).parents[2].absolute() +RUN_TESTS_SCRIPTS = os.path.join(REPO_ROOT, "scripts", "ci", "run-tests") + + +@contextmanager +def cd(path): + """Change directory while inside context manager.""" + cwd = os.getcwd() + try: + os.chdir(path) + yield + finally: + os.chdir(cwd) + + +def run(command): + return check_call(command, shell=True) + + +def main(tests_path=None): + if tests_path is None: + tests_path = REPO_ROOT / "tests" + with cd(tests_path): + run( + f"{sys.executable} {RUN_TESTS_SCRIPTS} " + f"--tests-path {tests_path} " + f"backends/build_system/ --allow-repo-root-on-path" + ) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--tests-path", default=None, type=os.path.abspath) + args = parser.parse_args() + main(args.tests_path) diff --git a/scripts/ci/run-tests b/scripts/ci/run-tests index 9f04c7ffe3dd..663b7f1a506e 100755 --- a/scripts/ci/run-tests +++ b/scripts/ci/run-tests @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Don't run tests from the root repo dir. # We want to ensure we're importing from the installed # binary package not from the CWD. @@ -34,6 +34,10 @@ def run(command, allow_repo_root_on_path=False): def process_args(args): runner = args.test_runner test_args = "" + if args.ignore: + test_args += " ".join(f"--ignore {ignore}" for ignore in args.ignore) + test_args += " " + if args.with_cov: test_args += ( # Even though botocore and s3transfer reside in the awscli package, @@ -82,10 +86,31 @@ if __name__ == "__main__": "distribution." ) ) + parser.add_argument( + "--ignore", + nargs='+', + default=[], + help=( + "Ignore a test subdirectory. Can be specified multiple times." + ) + ) + parser.add_argument( + "--tests-path", + default=None, + type=os.path.abspath, + help=( + "Optional path to an alternate test directory to use." + ) + ) raw_args = parser.parse_args() test_runner, test_args, test_dirs = process_args(raw_args) + tests_path = raw_args.tests_path + if tests_path is None: + tests_path = os.path.join(REPO_ROOT, "tests") + cmd = f"{test_runner} {test_args}{test_dirs}" + print(f"CDing to {tests_path}") print(f"Running {cmd}...") - with cd(os.path.join(REPO_ROOT, "tests")): + with cd(tests_path): run(cmd, allow_repo_root_on_path=raw_args.allow_repo_root_on_path) diff --git a/scripts/regenerate-configure/Dockerfile b/scripts/regenerate-configure/Dockerfile new file mode 100644 index 000000000000..be4d15e3cc6b --- /dev/null +++ b/scripts/regenerate-configure/Dockerfile @@ -0,0 +1,26 @@ +FROM public.ecr.aws/amazonlinux/amazonlinux:2 +ENV AUTOCONF_VERSION=2.71 +ENV AUTOMAKE_VERSION=1.16.5 + +WORKDIR /build +COPY ./ /build +RUN yum -y update +RUN yum -y install curl xz gzip m4 perl perl-Data-Dumper make tar help2man cpanminus +RUN cpanm Thread::Queue + +WORKDIR /autoreconf +RUN curl https://ftp.gnu.org/gnu/autoconf/autoconf-${AUTOCONF_VERSION}.tar.gz | tar -xz +WORKDIR /autoreconf/autoconf-${AUTOCONF_VERSION} +RUN ./configure +RUN make +RUN make install + +WORKDIR /automake +RUN curl https://ftp.gnu.org/gnu/automake/automake-${AUTOMAKE_VERSION}.tar.xz | tar -xJ +WORKDIR /automake/automake-${AUTOMAKE_VERSION} +RUN ./configure +RUN make +RUN make install + +WORKDIR /build +RUN autoreconf \ No newline at end of file diff --git a/scripts/regenerate-configure/regenerate-configure b/scripts/regenerate-configure/regenerate-configure new file mode 100755 index 000000000000..4fbc015f5210 --- /dev/null +++ b/scripts/regenerate-configure/regenerate-configure @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 +import argparse +import re +import time +from pathlib import Path +from subprocess import run + + +ROOT = Path(__file__).parents[2] +DOCKERFILE_PATH = ROOT / "scripts" / "regenerate-configure" / "Dockerfile" +IMAGE_RE = re.compile(r"sha256:(?P.*?)\s") + + +def main(cleanup): + image = _build_image() + container_id = _start_image(image) + _extract_configure_file(container_id) + if cleanup: + _cleanup_image_and_container(image, container_id) + + +def _build_image(): + print(f"Building docker image from: {DOCKERFILE_PATH}") + result = _docker( + ["build", "-f", str(DOCKERFILE_PATH), ".", "--quiet"], + cwd=ROOT, + ) + _assert_success(result) + match = IMAGE_RE.search(result.stdout.decode()) + image = match.groups("image")[0] + print(f"Image created: {image}") + return image + + +def _start_image(image_id): + print("Starting image") + run_result = _docker(["run", image_id]) + _assert_success(run_result) + + ps_result = _docker(["ps", "--latest", "-q"]) + _assert_success(ps_result) + + container_id = ps_result.stdout.decode().strip() + print(f"ContainerID: {container_id}") + return container_id + + +def _extract_configure_file(container_id): + src = f"{container_id}:/build/configure" + dst = f"{ROOT}/configure" + result = _docker(["cp", src, dst]) + _assert_success(result) + print(f"Replaced configure file") + + +def _cleanup_image_and_container(image, container_id): + _docker(["container", "stop", container_id]) + _docker(["container", "rm", container_id]) + _docker(["image", "rm", image]) + print(f"Deleted image: {image} container: {container_id}") + + +def _docker(command, cwd=None): + full_command = ["docker"] + command + return run(full_command, capture_output=True, cwd=cwd) + + +def _assert_success(result): + assert result.returncode == 0, ( + result.stdout.decode() + result.stderr.decode() + ) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "--no-cleanup", + action="store_true", + default=False, + help="Do not clean up docker image and container. Useful for debugging.", + ) + args = parser.parse_args() + main(not args.no_cleanup) diff --git a/scripts/regenerate-lock-files b/scripts/regenerate-lock-files new file mode 100755 index 000000000000..38ef3c6d01ef --- /dev/null +++ b/scripts/regenerate-lock-files @@ -0,0 +1,140 @@ +#!/usr/bin/env python3 +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""This script is to programatically regenerate the requirements/*-lock.txt +files. In order to run it you need to have pip-tools installed into the +curerntly active virtual environment.""" +import argparse +import sys +import os +from typing import List, Optional, ClassVar +from pathlib import Path +from dataclasses import dataclass + +from utils import run, BadRCError + + +ROOT = Path(__file__).parents[1] +IS_WINDOWS = sys.platform == "win32" +LOCK_SUFFIX = "win-lock.txt" if IS_WINDOWS else "lock.txt" + + +@dataclass +class LockFileBuilder: + _UNSAFE_PACKAGES: ClassVar[List[str]] = [ + "flit-core", + "setuptools", + "pip", + "wheel", + ] + + source_directory: Path + build_directory: Path + + def raise_if_no_pip_compile(self): + try: + self._pip_compile(["-h"]) + except BadRCError: + raise RuntimeError( + "Must have pip-tools installed to run this script, run the following:\npip install pip-tools" + ) + + def build_lock_file( + self, sources: List[Path], output: Path, allow_unsafe=False + ): + output_path = self._full_output_path(output) + self._delete_file(output_path) + args = self._pip_compile_args(sources, output_path) + result = self._pip_compile(args, allow_unsafe) + self._overwrite_paths(output_path) + + def _full_output_path(self, output: Path) -> Path: + lock_path = self.build_directory / f"{output}-{LOCK_SUFFIX}" + return lock_path + + def _delete_file(self, path: Path): + try: + os.remove(path) + print(f"Removed existing file: {path}") + except FileNotFoundError: + pass + + def _pip_compile_args(self, sources: List[str], lock_path: Path): + args = [f"--output-file={lock_path}"] + for source in sources: + args.append(self.source_directory / source) + return args + + def _pip_compile(self, args: List[str], allow_unsafe: bool = False): + command = [ + sys.executable, + "-m", + "piptools", + "compile", + "--generate-hashes", + ] + for unsafe in self._UNSAFE_PACKAGES: + command.append("--unsafe-package") + command.append(unsafe) + if allow_unsafe: + command += ["--allow-unsafe"] + command += args + return run(command, cwd=self.build_directory) + + def _overwrite_paths(self, output_path: Path): + rel_output_path = os.path.relpath(output_path, self.build_directory) + with open(output_path, "r") as f: + content = f.read() + # Overwrite absolute path in --output-file argument. + content = content.replace(str(output_path), str(rel_output_path)) + # Overwrite absolute paths in the source arguments. + content = content.replace(f"{self.source_directory}{os.sep}", "") + with open(output_path, "w") as f: + f.write(content) + + +def main(build_directory: Path): + builder = LockFileBuilder( + source_directory=ROOT, + build_directory=build_directory, + ) + builder.raise_if_no_pip_compile() + + builder.build_lock_file( + sources=[Path("requirements/download-deps/bootstrap.txt")], + output=Path("requirements/download-deps/bootstrap"), + allow_unsafe=True, + ) + builder.build_lock_file( + sources=[ + Path("requirements/portable-exe-extras.txt"), + "pyproject.toml", + ], + output=Path("requirements", "download-deps", "portable-exe"), + ) + builder.build_lock_file( + sources=["pyproject.toml"], + output=Path("requirements/download-deps/system-sandbox"), + ) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "--output-directory", + default=ROOT, + type=Path, + help=("Default base directory where output lock files to be written."), + ) + args = parser.parse_args() + main(args.output_directory) diff --git a/tests/__init__.py b/tests/__init__.py index 1df09e60f7a4..8c5626151047 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -27,7 +27,7 @@ from awscli.clidriver import create_clidriver, AWSCLIEntryPoint from awscli.compat import collections_abc, six from awscli.testutils import ( - unittest, mock, capture_output, skip_if_windows, create_bucket, + unittest, mock, capture_output, if_windows, skip_if_windows, create_bucket, FileCreator, ConsistencyWaiter ) diff --git a/tests/backends/build_system/__init__.py b/tests/backends/build_system/__init__.py new file mode 100644 index 000000000000..92338204f7fa --- /dev/null +++ b/tests/backends/build_system/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/backends/build_system/conftest.py b/tests/backends/build_system/conftest.py new file mode 100644 index 000000000000..34dc7e7b9dd4 --- /dev/null +++ b/tests/backends/build_system/conftest.py @@ -0,0 +1,23 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import sys +from pathlib import Path + + +ROOT = Path(__file__).parents[3] +BACKENDS = ROOT / "backends" +BUILD_SYSTEM = ROOT / "backends" / "build_system" + + +sys.path.append(str(BACKENDS)) +sys.path.append(str(BUILD_SYSTEM)) diff --git a/tests/backends/build_system/functional/__init__.py b/tests/backends/build_system/functional/__init__.py new file mode 100644 index 000000000000..92338204f7fa --- /dev/null +++ b/tests/backends/build_system/functional/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/backends/build_system/functional/test_aws_cli_venv.py b/tests/backends/build_system/functional/test_aws_cli_venv.py new file mode 100644 index 000000000000..4d946dd1992e --- /dev/null +++ b/tests/backends/build_system/functional/test_aws_cli_venv.py @@ -0,0 +1,223 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import contextlib +import json +import re +import os +import sys +import pathlib +import subprocess + +import pytest +import flit_core.buildapi + +from build_system.awscli_venv import AwsCliVenv +from build_system.constants import ArtifactType + +from awscli.testutils import skip_if_windows +from awscli.testutils import if_windows +from backends.build_system.constants import BIN_DIRNAME, PYTHON_EXE_NAME + +ROOT_DIR = pathlib.Path(__file__).parents[4] + + +@contextlib.contextmanager +def cd(dirname): + original = os.getcwd() + os.chdir(dirname) + try: + yield + finally: + os.chdir(original) + + +@pytest.fixture(scope="session") +def venv_path(tmp_path_factory): + return tmp_path_factory.mktemp("venv") + + +@pytest.fixture(scope="session") +def cli_venv(venv_path): + venv = AwsCliVenv(venv_path) + venv.create() + return venv + + +class TestAwsCliVenv: + def _normalize_dependency_entry(self, dep: str) -> str: + dep = re.split("[<=>]", dep)[0] + dep = dep.rstrip("<=>") + dep = dep.lower() + dep = dep.replace("-", "_") + return dep + + def _normalize_dist_info_name(self, name: str) -> str: + name = name.split("-")[0] + name = name.lower() + return name + + def _get_install_requires(self): + with cd(ROOT_DIR): + requires = flit_core.buildapi.get_requires_for_build_wheel() + # Generation of the auto-complete index requires importing from the + # awscli package and iterating over the commands from the clidriver. In + # order to be able to do this, it requires all of the CLI's runtime + # dependencies to be present to avoid import errors. + dependency_block_re = re.compile( + r"dependencies = \[([\s\S]+?)\]", re.MULTILINE + ) + extract_dependencies_re = re.compile(r'"(.+)"') + with open(ROOT_DIR / "pyproject.toml", "r") as f: + data = f.read() + raw_dependencies = dependency_block_re.findall(data)[0] + dependencies = extract_dependencies_re.findall(raw_dependencies) + return dependencies + requires + + def _python_version(self) -> str: + info = sys.version_info + return f"python{info[0]}.{info[1]}" + + def _site_packages_dir(self, venv_path: pathlib.PurePath) -> str: + site_path = [path for path in json.loads( + subprocess.check_output( + [ + venv_path / BIN_DIRNAME / PYTHON_EXE_NAME, + "-c", + "import site, json; print(json.dumps(site.getsitepackages()))", + ] + ) + .decode() + .strip() + ) if "site-packages" in path][0] + return site_path + + @skip_if_windows("Posix virtualenv") + def test_create(self, tmp_path_factory): + path = tmp_path_factory.mktemp("test_create") + venv = AwsCliVenv(path) + venv.create() + + venv_dirs = set(os.listdir(path)) + required_files = [ + "bin", + "include", + "pyvenv.cfg", + ] + for required_file in required_files: + assert required_file in venv_dirs + + @if_windows("Windows virtualenv") + def test_create_windows(self, tmp_path_factory): + path = tmp_path_factory.mktemp("test_create") + venv = AwsCliVenv(path) + venv.create() + + venv_dirs = set(os.listdir(path)) + required_files = [ + "Scripts", + "Include", + "Lib", + "pyvenv.cfg", + ] + for required_file in required_files: + assert required_file in venv_dirs + + @skip_if_windows("Posix bootstrap") + def test_bootstrap(self, cli_venv, venv_path): + site_package_path = self._site_packages_dir(venv_path) + + prior_site_dir = set(os.listdir(site_package_path)) + cli_venv.bootstrap( + ArtifactType.SYSTEM_SANDBOX.value, download_deps=False + ) + post_site_dir = set(os.listdir(site_package_path)) + + # Check that parent packages were installed + added_packages = { + self._normalize_dist_info_name(p) + for p in post_site_dir - prior_site_dir + if "dist-info" in p or "egg-info" in p + } + + expected_packages = { + self._normalize_dependency_entry(r) + for r in self._get_install_requires() + } + missing_packages = expected_packages - added_packages + + assert missing_packages == set() + + # Check that the CLI is installed + cli_path = ( + venv_path + / "lib" + / self._python_version() + / "site-packages" + / "awscli" + ) + assert os.path.isdir(cli_path) is True + + # Make sure the ac.index got generated and injected correctly. + ac_index_path = cli_path / "data" / "ac.index" + assert os.path.exists(ac_index_path) is True + + @if_windows("Windows bootstrap") + def test_bootstrap_windows(self, cli_venv, venv_path): + site_package_path = self._site_packages_dir(venv_path) + + prior_site_dir = set(os.listdir(site_package_path)) + cli_venv.bootstrap( + ArtifactType.SYSTEM_SANDBOX.value, download_deps=False + ) + post_site_dir = set(os.listdir(site_package_path)) + + # Check that parent packages were installed + added_packages = { + self._normalize_dist_info_name(p) + for p in post_site_dir - prior_site_dir + if "dist-info" in p + } + + expected_packages = { + self._normalize_dependency_entry(r) + for r in self._get_install_requires() + } + missing_packages = expected_packages - added_packages + + assert missing_packages == set() + + # Check that the CLI is installed + cli_path = venv_path / "Lib" / "site-packages" / "awscli" + assert os.path.isdir(cli_path) is True + + # Make sure the ac.index got generated and injected correctly. + ac_index_path = cli_path / "data" / "ac.index" + assert os.path.exists(ac_index_path) is True + + @skip_if_windows("No bin dir on windows") + def test_bin_dir(self, cli_venv, venv_path): + assert cli_venv.bin_dir == os.path.join(venv_path, "bin") + + @if_windows("Scripts dir is only on windows") + def test_scripts_dir(self, cli_venv, venv_path): + assert cli_venv.bin_dir == os.path.join(venv_path, "Scripts") + + @skip_if_windows("Python binary location on posix") + def test_python_exe(self, cli_venv, venv_path): + assert cli_venv.python_exe == os.path.join(venv_path, "bin", "python") + + @if_windows("Python binary location on win") + def test_python_exe_windows(self, cli_venv, venv_path): + assert cli_venv.python_exe == os.path.join( + venv_path, "Scripts", "python.exe" + ) diff --git a/tests/backends/build_system/functional/test_lock_files.py b/tests/backends/build_system/functional/test_lock_files.py new file mode 100644 index 000000000000..33ee028928ad --- /dev/null +++ b/tests/backends/build_system/functional/test_lock_files.py @@ -0,0 +1,168 @@ +import re +import sys +import os +from subprocess import run +from pathlib import Path + +import pytest + + +ROOT = Path(__file__).parents[4] +REQUIREMENTS_PATH = ROOT / "requirements" +REGENERATE_LOCK_FILE_SCRIPT_PATH = ROOT / "scripts" / "regenerate-lock-files" + +# Lockfiles are generated on the CANNONICAL_PYTHON_VERSION +# These tests will be skipped on any other version of python. +CANNONICAL_PYTHON_VERSION = "3.9" +IS_CANNONICAL_PYTHON_VERSION = sys.version_info[0:2] == tuple( + map(int, CANNONICAL_PYTHON_VERSION.split(".")) +) +SKIP_REASON = f"Lock files are generated on Python {CANNONICAL_PYTHON_VERSION}" + +IS_LINUX = sys.platform == "linux" + + +def should_read_line(line): + line = line.strip() + return not line.startswith("#") + + +def read_lock_file(path): + with open(path, 'r') as f: + lines = f.read().split('\n') + + # Find source files + source_files = get_source_files(lines) + dependencies = [] + for source_file in source_files: + dependencies.extend(get_dependency_names(source_file)) + + # Filter out comments + lines = [ + line for line in lines + if should_read_line(line) + ] + + # Filter out transient dependencies, we only care about the ones explicitly + # mentioned in the requirements files. + reading = False + relevant_lines = [] + for line in lines: + if line and line[0] != ' ': + dependency_name = get_name_component(line) + if dependency_name in dependencies: + reading = True + else: + reading = False + if reading: + relevant_lines.append(line) + lockfile = "\n".join(relevant_lines) + return lockfile + + +def get_name_component(dependency): + return re.split(r'(>|<|>=|<=|==)', dependency)[0] + + +def get_requires_from_pyproject(): + dependency_block_re = re.compile( + r"dependencies = \[([\s\S]+?)\]\s", re.MULTILINE + ) + extract_dependencies_re = re.compile(r'"(.+)"') + with open(ROOT / "pyproject.toml", "r") as f: + data = f.read() + raw_dependencies = dependency_block_re.findall(data)[0] + dependencies = extract_dependencies_re.findall(raw_dependencies) + return dependencies + + +def get_dependency_names(filename): + if not filename.endswith('.txt'): + return [get_name_component(dep) for dep in get_requires_from_pyproject()] + + dependencies = [] + filepath = ROOT / filename + with open(filepath) as f: + content = f.read() + lines = content.split('\n') + for line in lines: + if not line: + continue + if line.startswith('-r'): + directory = os.path.dirname(filepath) + new_filename = line.split(' ')[1] + new_full_filepath = os.path.abspath(os.path.join(directory, new_filename)) + dependencies.extend(get_dependency_names(new_full_filepath)) + else: + dependencies.append(get_name_component(line)) + return dependencies + + + +def get_source_files(lines): + for line in lines: + if 'pip-compile' and 'generate-hashes' in line: + line = line[1:] + files = [ + part for part in + line.split(' ') + if part and not part.startswith('--') and part != 'pip-compile' + ] + return files + + +def is_lockfile(path: str) -> bool: + return path.endswith("-lock.txt") + + +def lockfile_paths(root): + for base, _, filenames in os.walk(root): + for filename in filenames: + path = os.path.join(base, filename) + if is_lockfile(path): + yield path + + +@pytest.mark.skipif( + not IS_CANNONICAL_PYTHON_VERSION, + reason=SKIP_REASON, +) +def test_all_lock_files_are_generated_by_expected_python_version(): + for path in lockfile_paths(REQUIREMENTS_PATH): + with open(path, "r") as f: + content = f.read() + assert f"python {CANNONICAL_PYTHON_VERSION}" in content + + +@pytest.mark.skipif( + not IS_CANNONICAL_PYTHON_VERSION, + reason=SKIP_REASON, +) +@pytest.mark.skipif( + IS_LINUX, + reason=( + "The linux lock files are generated on mac. So the only " + "platforms we need to test are mac and Windows." + ), +) +def test_lock_files_are_up_to_date(tmpdir): + reqs_dir = tmpdir / "requirements" + reqs_dir.mkdir() + download_deps_dir = reqs_dir / "download-deps" + download_deps_dir.mkdir() + command = [ + sys.executable, + REGENERATE_LOCK_FILE_SCRIPT_PATH, + "--output-directory", + tmpdir, + ] + result = run(command, cwd=ROOT) + assert result.returncode == 0 + + lockfile_mapping = { + path: path.replace(str(tmpdir), str(ROOT)) + for path in lockfile_paths(tmpdir) + } + + for regenerated_file, original_file in lockfile_mapping.items(): + assert read_lock_file(regenerated_file) == read_lock_file(original_file) diff --git a/tests/backends/build_system/functional/test_utils.py b/tests/backends/build_system/functional/test_utils.py new file mode 100644 index 000000000000..ea56e1124441 --- /dev/null +++ b/tests/backends/build_system/functional/test_utils.py @@ -0,0 +1,283 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +import sys +import json +import platform +from typing import List + +import pytest + +from build_system.utils import Utils +from build_system.utils import parse_requirements +from build_system.utils import ParseError +from build_system.utils import Requirement +from build_system.utils import UnmetDependenciesException + +from tests.backends.build_system.markers import skip_if_windows, if_windows + + +@pytest.fixture +def utils(): + return Utils() + + +@pytest.mark.parametrize( + "lines,expected", + [ + ( + "flit_core>=3.7.1,<3.7.2", + Requirement("flit_core", ">=3.7.1", "<3.7.2"), + ), + ( + "colorama>=0.2.5,<0.4.4", + Requirement("colorama", ">=0.2.5", "<0.4.4"), + ), + ("docutils>=0.10,<0.16", Requirement("docutils", ">=0.10", "<0.16")), + ( + "cryptography>=3.3.2,<37.0.0", + Requirement("cryptography", ">=3.3.2", "<37.0.0"), + ), + ( + "ruamel.yaml>=0.15.0,<=0.17.21", + Requirement("ruamel.yaml", ">=0.15.0", "<=0.17.21"), + ), + ("wcwidth<0.2.0", Requirement("wcwidth", "<0.2.0")), + ( + "prompt-toolkit>=3.0.24,<3.0.29", + Requirement("prompt-toolkit", ">=3.0.24", "<3.0.29"), + ), + ("distro>=1.5.0,<1.6.0", Requirement("distro", ">=1.5.0", "<1.6.0")), + ( + "awscrt>=0.12.4,<=0.14.0", + Requirement("awscrt", ">=0.12.4", "<=0.14.0"), + ), + ( + "python-dateutil>=2.1,<3.0.0", + Requirement("python-dateutil", ">=2.1", "<3.0.0"), + ), + ( + "jmespath>=0.7.1,<1.1.0", + Requirement("jmespath", ">=0.7.1", "<1.1.0"), + ), + ("urllib3>=1.25.4,<1.27", Requirement("urllib3", ">=1.25.4", "<1.27")), + (["urllib3>=1.\\\\", "25.4,<1.27"], Requirement("urllib3", ">=1.25.4", "<1.27")), + ("#urllib3>=1.25.4,<1.27", None), + ("urllib3>=1.25.4,<1.27 #foobarbaz", Requirement("urllib3", ">=1.25.4", "<1.27")), + ("urllib3>=1.25.4,<1.27 ; python_version == 3.6", ParseError), + ], +) +def test_parse_requirements(lines, expected): + if isinstance(lines, str): + lines = [lines] + if isinstance(expected, Requirement): + req = list(parse_requirements(lines))[0] + assert req == expected + elif expected is None: + assert len(list(parse_requirements(lines))) == 0 + else: + with pytest.raises(expected): + list(parse_requirements(lines)) + + +@pytest.mark.parametrize( + "req,version,expected", + [ + (Requirement("foo", "==1.0"), "1.0", True), + (Requirement("foo", "==1.0"), "1.0.0", True), + (Requirement("foo", "==1.0.0"), "1.0", True), + (Requirement("foo", "==1.0"), "2.0", False), + (Requirement("foo", ">=1.0", "<2.0"), "1.1", True), + (Requirement("foo", ">=3.7.1", "<3.7.2"), "3.7.1", True), + (Requirement("foo", ">=3.7.1", "<3.7.2"), "3.7.2", False), + (Requirement("foo", ">=3.7.1", "<3.7.2"), "1.1.1", False), + (Requirement("foo", ">=3.7.1", "<3.7.2"), "1.1.1", False), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.14.0", False), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.14.99999", False), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.14.99999.123", False), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.15.0", True), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.16.0", True), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.17.0", True), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.17.21", True), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.17.22", False), + (Requirement("foo", ">=0.15.0", "<=0.17.21"), "0.18.0", False), + ], +) +def test_requirement_ranges(req, version, expected): + assert req.is_in_range(version) == expected + + +class TestUtils: + def test_isdir(self, utils: Utils, tmp_path): + file_path = tmp_path / "filename.txt" + file_path.write_text("foo") + + assert utils.isdir(tmp_path) is True + assert utils.isdir(file_path) is False + + def test_path_exists(self, utils: Utils, tmp_path): + file_path = tmp_path / "filename.txt" + file_path.write_text("foo") + fake_path = tmp_path / "fake" + + assert utils.path_exists(file_path) is True + assert utils.path_exists(fake_path) is False + + def test_rmtree(self, utils: Utils, tmp_path): + file_path = tmp_path / "root.txt" + file_path.write_text("foo") + subdir_path = tmp_path / "dir" + subdir_path.mkdir() + subfile_path = subdir_path / "subfile.txt" + subfile_path.write_text("bar") + + utils.rmtree(tmp_path) + assert os.path.exists(tmp_path) is False + + @skip_if_windows + def test_run(self, utils: Utils): + assert utils.run(["test", "1", "==", "2"]).returncode == 1 + assert utils.run(["test", "1", "==", "1"]).returncode == 0 + + @if_windows + def test_run_windows(self, utils: Utils): + assert utils.run(["exit", "1"], shell=True).returncode == 1 + assert utils.run(["exit"], shell=True).returncode == 0 + + def test_copy_file(self, utils: Utils, tmp_path): + src_path = tmp_path / "source.txt" + src_path.write_text("foo") + dst_path = tmp_path / "destination.txt" + + utils.copy_file(src_path, dst_path) + with open(src_path, "r") as f: + src = f.read() + with open(dst_path, "r") as f: + dst = f.read() + + assert src == dst + + def test_copy_directory_contents_into(self, utils: Utils, tmp_path): + src_path = tmp_path / "src" + src_path.mkdir() + (src_path / "file").write_text("foo") + dst_path = tmp_path / "dst" + dst_path.mkdir() + + utils.copy_directory_contents_into(str(src_path), str(dst_path)) + + assert open((dst_path / "file"), "r").read() == "foo" + + def test_copy_directory(self, utils: Utils, tmp_path): + src_path = tmp_path / "src" + src_path.mkdir() + (src_path / "file").write_text("foo") + dst_path = tmp_path / "dst" + + utils.copy_directory(src_path, dst_path) + + assert os.path.exists(dst_path) + assert open((dst_path / "file"), "r").read() == "foo" + + def test_update_metadata(self, utils: Utils, tmp_path): + data_dir = tmp_path / "awscli" / "data" + data_dir.mkdir(parents=True) + metadata_path = data_dir / "metadata.json" + metadata_path.write_text("{}") + + utils.update_metadata(tmp_path, key="value") + + with open(metadata_path, "r") as f: + data = json.load(f) + assert data == {"key": "value"} + + @skip_if_windows + def test_create_venv(self, utils: Utils, tmp_path): + utils.create_venv(tmp_path) + + self.assert_dir_has_content( + tmp_path, + [ + "bin", + "include", + "lib", + ], + ) + self.assert_dir_has_content( + tmp_path / "bin", + [ + "python", + "pip", + ], + ) + + @if_windows + def test_create_venv_windows(self, utils: Utils, tmp_path): + utils.create_venv(tmp_path) + + self.assert_dir_has_content( + tmp_path, + [ + "Scripts", + "Include", + "Lib", + "pyvenv.cfg", + ], + ) + self.assert_dir_has_content( + tmp_path / "Scripts", + [ + "python.exe", + "pip.exe", + ], + ) + + def assert_dir_has_content(self, path: str, expected_files: List[str]): + assert set(l.lower() for l in expected_files).issubset( + set(l.lower() for l in os.listdir(path)) + ) + + +@pytest.fixture +def unmet_error(request): + error = UnmetDependenciesException([ + ('colorama', '1.0', Requirement('colorama', '>=2.0', '<3.0')), + ], **request.param) + return str(error) + + +class TestUnmetDependencies: + @pytest.mark.parametrize('unmet_error', [{'in_venv': False}], indirect=True) + def test_in_error_message(self, unmet_error): + assert ( + "colorama (required: ('>=2.0', '<3.0')) (version installed: 1.0)" + ) in unmet_error + assert ( + f"{sys.executable} -m pip install --prefer-binary 'colorama>=2.0,<3.0'" + ) in unmet_error + + @pytest.mark.parametrize('unmet_error', [{'in_venv': False}], indirect=True) + def test_not_in_venv(self, unmet_error): + assert 'We noticed you are not in a virtualenv.' in unmet_error + + @pytest.mark.parametrize('unmet_error', [{'in_venv': True}], indirect=True) + def test_in_venv(self, unmet_error): + assert 'We noticed you are not in a virtualenv.' not in unmet_error + + @pytest.mark.parametrize( + 'unmet_error', + [{'in_venv': False, 'reason': "custom reason message"}], + indirect=True, + ) + def test_custom_reason(self, unmet_error): + assert 'custom reason message' in unmet_error diff --git a/tests/backends/build_system/integration/__init__.py b/tests/backends/build_system/integration/__init__.py new file mode 100644 index 000000000000..e99e9a2823e1 --- /dev/null +++ b/tests/backends/build_system/integration/__init__.py @@ -0,0 +1,222 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import re +import sys +import shutil +import venv +import subprocess +import os + +from pathlib import Path +from typing import Dict + + +IS_WINDOWS = sys.platform == "win32" +BIN_DIRNAME = "Scripts" if IS_WINDOWS else "bin" +PYTHON_EXE_NAME = "python.exe" if IS_WINDOWS else "python" +CLI_SCRIPT_NAME = "aws.cmd" if IS_WINDOWS else "aws" +LOCK_SUFFIX = "win-lock.txt" if IS_WINDOWS else "lock.txt" + +ROOT = Path(__file__).parents[4] +BOOTSTRAP_REQUIREMENTS = ( + ROOT / "requirements" / "download-deps" / f"bootstrap-{LOCK_SUFFIX}" +) +PORTABLE_EXE_REQUIREMENTS = ( + ROOT / "requirements" / "download-deps" / f"portable-exe-{LOCK_SUFFIX}" +) +SYSTEM_SANDBOX_REQIREMENTS = ( + ROOT / "requirements" / "download-deps" / f"system-sandbox-{LOCK_SUFFIX}" +) + + +class BaseArtifactTest: + def expected_cli_version(self): + init_file_path = ROOT / "awscli" / "__init__.py" + version_file = open(init_file_path, "r").read() + version_match = re.search( + r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M + ) + if version_match: + return version_match.group(1) + raise RuntimeError("Unable to find version string.") + + def expected_python_version(self): + python_info = sys.version_info + return f"{python_info.major}.{python_info.minor}.{python_info.micro}" + + def assert_version_string_is_correct(self, exe_path, dist_type): + version_string = subprocess.check_output( + [str(exe_path), "--version"] + ).decode() + + assert f"aws-cli/{self.expected_cli_version()}" in version_string + assert f"Python/{self.expected_python_version()}" in version_string + assert f"source-{dist_type}" in version_string + + def assert_built_venv_is_correct(self, venv_dir): + self.assert_venv_is_correct(venv_dir) + files = os.listdir(venv_dir) + + # The venv after building also includes the binary + assert BIN_DIRNAME in files + aws_exe = venv_dir / BIN_DIRNAME / CLI_SCRIPT_NAME + self.assert_version_string_is_correct(aws_exe, "sandbox") + + def assert_venv_is_correct(self, venv_dir): + files = os.listdir(venv_dir) + assert "Include" in files or "include" in files + assert "Lib" in files or "lib" in files or "lib64" in files + assert "pyvenv.cfg" in files + + def assert_built_exe_is_correct(self, root_dir): + aws_dir = root_dir / "build" / "exe" / "aws" + files = os.listdir(aws_dir) + + assert set(files) == { + "README.md", + "THIRD_PARTY_LICENSES", + "dist", + "install", + } + + aws_exe = aws_dir / "dist" / "aws" + self.assert_version_string_is_correct(aws_exe, "exe") + + def assert_installed_exe_is_correct(self, exe_dir): + self.assert_version_string_is_correct(exe_dir / CLI_SCRIPT_NAME, "exe") + + def assert_installed_venv_is_correct(self, exe_dir, lib_dir): + self.assert_version_string_is_correct(exe_dir / CLI_SCRIPT_NAME, "sandbox") + self.assert_venv_is_correct(lib_dir / "aws-cli") + + +class VEnvWorkspace: + def __init__(self, path): + self.path = path + self.cli_path = path / "aws" + self.venv_path = path / "venv" + self.install_path = path / "install" + self.bin_path = path / "install" / "bin" + self.lib_path = path / "install" / "lib" + + self._init_cli_directory() + self._init_venv_directory() + self._init_install_path() + + def _init_cli_directory(self): + shutil.copytree( + ROOT, + self.cli_path, + ignore=shutil.ignore_patterns(".git", "build", ".tox"), + ) + + def _init_venv_directory(self): + venv.create(self.venv_path, with_pip=True) + + def _init_install_path(self): + self.install_path.mkdir() + self.bin_path.mkdir() + self.lib_path.mkdir() + + def install_bootstrap_dependencies(self): + self._install_requirements_file(BOOTSTRAP_REQUIREMENTS) + + def install_dependencies(self): + self.install_bootstrap_dependencies() + self._install_requirements_file(SYSTEM_SANDBOX_REQIREMENTS) + + def install_pyinstaller(self): + self._install_requirements_file(PORTABLE_EXE_REQUIREMENTS) + + def _install_requirements_file(self, path: Path): + subprocess.check_call( + [self.python_exe(), "-m", "pip", "install", "-r", path] + ) + + @property + def build_path(self): + return self.cli_path / "build" / "venv" + + def python_exe(self): + return self.venv_path / BIN_DIRNAME / PYTHON_EXE_NAME + + def env(self, overrides: Dict[str, str] = None): + env = os.environ.copy() + if overrides: + env.update(overrides) + env["PYTHON"] = str(self.python_exe()) + return env + + def subprocess(self, args, env=None): + return subprocess.check_output( + args, + stderr=subprocess.STDOUT, + cwd=self.cli_path, + env=self.env(env), + ) + + def configure(self, install_type: str, download_deps: bool = False): + configure_path = self.cli_path / "configure" + args = [ + configure_path, + f"--with-install-type={install_type}", + ] + if download_deps: + args.append("--with-download-deps") + self.subprocess(args) + + def make(self, args=None, env=None): + cmd = ["make"] + if args: + cmd += args + return self.subprocess(cmd, env=env) + + def call_build_system(self, artifact_type: str, download_deps: bool): + args = [ + self.python_exe(), + os.path.join("backends", "build_system"), + "build", + "--artifact", + artifact_type, + "--build-dir", + "build", + ] + if download_deps: + args.append("--download-deps") + return self.subprocess(args) + + def call_install(self, bin_path: str, lib_path: str): + args = [ + self.python_exe(), + os.path.join("backends", "build_system"), + "install", + "--bin-dir", + bin_path, + "--lib-dir", + lib_path, + "--build-dir", + "build", + ] + return self.subprocess(args) + + def call_uninstall(self, bin_path: str, lib_path: str): + args = [ + self.python_exe(), + os.path.join("backends", "build_system"), + "uninstall", + "--bin-dir", + bin_path, + "--lib-dir", + lib_path, + ] + return self.subprocess(args) diff --git a/tests/backends/build_system/integration/conftest.py b/tests/backends/build_system/integration/conftest.py new file mode 100644 index 000000000000..f97cae954fb7 --- /dev/null +++ b/tests/backends/build_system/integration/conftest.py @@ -0,0 +1,8 @@ +import pytest + +from tests.backends.build_system.integration import VEnvWorkspace + + +@pytest.fixture +def workspace(tmp_path) -> VEnvWorkspace: + return VEnvWorkspace(tmp_path) diff --git a/tests/backends/build_system/integration/test_build_system.py b/tests/backends/build_system/integration/test_build_system.py new file mode 100644 index 000000000000..b4d35ecf15ca --- /dev/null +++ b/tests/backends/build_system/integration/test_build_system.py @@ -0,0 +1,115 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +import subprocess + +import pytest + +from tests.backends.build_system.integration import BaseArtifactTest +from tests.backends.build_system.integration import VEnvWorkspace + + +class TestBuildBackend(BaseArtifactTest): + def test_exe_with_deps(self, workspace: VEnvWorkspace): + workspace.call_build_system("portable-exe", download_deps=True) + + self.assert_built_exe_is_correct(workspace.cli_path) + + def test_exe_without_deps(self, workspace: VEnvWorkspace): + workspace.install_dependencies() + workspace.install_pyinstaller() + workspace.call_build_system("portable-exe", download_deps=False) + + self.assert_built_exe_is_correct(workspace.cli_path) + + def test_venv_with_deps(self, workspace: VEnvWorkspace): + workspace.call_build_system("system-sandbox", download_deps=True) + + self.assert_built_venv_is_correct(workspace.build_path) + + def test_venv_without_deps(self, workspace: VEnvWorkspace): + workspace.install_dependencies() + workspace.call_build_system("system-sandbox", download_deps=False) + + self.assert_built_venv_is_correct(workspace.build_path) + + +class TestBuildBackendFailureCases: + def test_errors_building_exe_without_pyinstaller(self, workspace, capsys): + workspace.install_dependencies() + with pytest.raises(subprocess.CalledProcessError) as e: + workspace.call_build_system("portable-exe", download_deps=False) + error_text = e.value.stdout.decode() + assert "pyinstaller" in error_text + assert "No such file or directory" in error_text + + def test_errors_building_venv_without_runtime_deps(self, workspace): + with pytest.raises(subprocess.CalledProcessError) as e: + workspace.call_build_system("system-sandbox", download_deps=False) + error_text = e.value.stdout.decode() + assert "No module named 'flit_core'" in error_text + + +class TestInstall(BaseArtifactTest): + def test_install_exe(self, workspace: VEnvWorkspace): + workspace.call_build_system("portable-exe", download_deps=True) + workspace.call_install( + bin_path=workspace.bin_path, + lib_path=workspace.lib_path, + ) + + self.assert_installed_exe_is_correct(workspace.bin_path) + + def test_install_venv(self, workspace: VEnvWorkspace): + workspace.call_build_system("system-sandbox", download_deps=True) + workspace.call_install( + bin_path=workspace.bin_path, + lib_path=workspace.lib_path, + ) + + self.assert_installed_venv_is_correct( + workspace.bin_path, + workspace.lib_path, + ) + + +class TestUninstall(BaseArtifactTest): + def test_uninstall_exe(self, workspace: VEnvWorkspace): + workspace.call_build_system("portable-exe", download_deps=True) + workspace.call_install( + bin_path=workspace.bin_path, + lib_path=workspace.lib_path, + ) + + workspace.call_uninstall( + bin_path=workspace.bin_path, + lib_path=workspace.lib_path, + ) + + assert os.listdir(workspace.bin_path) == [] + assert os.listdir(workspace.lib_path) == [] + + def test_uninstall_venv(self, workspace: VEnvWorkspace): + workspace.call_build_system("system-sandbox", download_deps=True) + workspace.call_install( + bin_path=workspace.bin_path, + lib_path=workspace.lib_path, + ) + + workspace.call_uninstall( + bin_path=workspace.bin_path, + lib_path=workspace.lib_path, + ) + + assert os.listdir(workspace.bin_path) == [] + assert os.listdir(workspace.lib_path) == [] diff --git a/tests/backends/build_system/integration/test_makefile.py b/tests/backends/build_system/integration/test_makefile.py new file mode 100644 index 000000000000..7f0b2b0b2e68 --- /dev/null +++ b/tests/backends/build_system/integration/test_makefile.py @@ -0,0 +1,155 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os + +from awscli.testutils import skip_if_windows +from tests.backends.build_system.integration import BaseArtifactTest +from tests.backends.build_system.integration import VEnvWorkspace + + +WINDOWS_SKIP_REASON = "./configure tests do not run nativly on windows" + + +class TestMakeInstall(BaseArtifactTest): + def assert_venv_installed_correctly(self, bin_path, lib_path): + venv_dir = lib_path / "aws-cli" + self.assert_built_venv_is_correct(venv_dir) + + bins = set(os.listdir(bin_path)) + assert bins == {"aws", "aws_completer"} + + aws_exe = bin_path / "aws" + self.assert_version_string_is_correct(aws_exe, "sandbox") + + @skip_if_windows(WINDOWS_SKIP_REASON) + def test_install(self, workspace: VEnvWorkspace): + workspace.configure( + install_type="system-sandbox", + download_deps=True, + ) + workspace.make() + workspace.make( + [ + "install", + f"libdir={workspace.lib_path}", + f"bindir={workspace.bin_path}", + ] + ) + self.assert_venv_installed_correctly( + workspace.bin_path, workspace.lib_path + ) + + @skip_if_windows(WINDOWS_SKIP_REASON) + def test_install_prefix(self, workspace): + workspace.configure( + install_type="system-sandbox", + download_deps=True, + ) + + workspace.make() + workspace.make( + [ + "install", + f"prefix={workspace.install_path}", + ] + ) + + self.assert_venv_installed_correctly( + workspace.bin_path, workspace.lib_path + ) + + @skip_if_windows(WINDOWS_SKIP_REASON) + def test_install_destdir(self, workspace): + workspace.configure( + install_type="system-sandbox", + download_deps=True, + ) + + workspace.make() + workspace.make( + [ + "install", + f"prefix=/install", + ], + env={"DESTDIR": str(workspace.path)}, + ) + + self.assert_venv_installed_correctly( + workspace.bin_path, workspace.lib_path + ) + + @skip_if_windows(WINDOWS_SKIP_REASON) + def test_uninstall(self, workspace: VEnvWorkspace): + workspace.configure( + install_type="system-sandbox", + download_deps=True, + ) + workspace.make() + workspace.make( + [ + "install", + f"libdir={workspace.lib_path}", + f"bindir={workspace.bin_path}", + ] + ) + workspace.make( + [ + "uninstall", + f"libdir={workspace.lib_path}", + f"bindir={workspace.bin_path}", + ] + ) + + assert os.listdir(workspace.bin_path) == [] + assert os.listdir(workspace.lib_path) == [] + + +class TestMake(BaseArtifactTest): + @skip_if_windows(WINDOWS_SKIP_REASON) + def test_exe_with_deps(self, workspace: VEnvWorkspace): + workspace.configure( + install_type="portable-exe", + download_deps=True, + ) + workspace.make() + + self.assert_built_exe_is_correct(workspace.cli_path) + + @skip_if_windows(WINDOWS_SKIP_REASON) + def test_exe_without_deps(self, workspace: VEnvWorkspace): + workspace.install_dependencies() + workspace.install_pyinstaller() + workspace.configure(install_type="portable-exe") + workspace.make() + + self.assert_built_exe_is_correct(workspace.cli_path) + + @skip_if_windows(WINDOWS_SKIP_REASON) + def test_venv_with_deps(self, workspace: VEnvWorkspace): + workspace.configure( + install_type="system-sandbox", + download_deps=True, + ) + workspace.make() + + self.assert_built_venv_is_correct(workspace.build_path) + + @skip_if_windows(WINDOWS_SKIP_REASON) + def test_venv_without_deps(self, workspace: VEnvWorkspace): + workspace.install_dependencies() + workspace.configure( + install_type="system-sandbox", + ) + workspace.make() + + self.assert_built_venv_is_correct(workspace.build_path) diff --git a/tests/backends/build_system/markers.py b/tests/backends/build_system/markers.py new file mode 100644 index 000000000000..26a5010711c8 --- /dev/null +++ b/tests/backends/build_system/markers.py @@ -0,0 +1,9 @@ +import platform + +import pytest + + +skip_if_windows = pytest.mark.skipif(platform.system() not in ['Darwin', 'Linux'], + reason="This test does not run on windows.") +if_windows = pytest.mark.skipif(platform.system() in ['Darwin', 'Linux'], + reason="This test only runs on windows.") diff --git a/tests/backends/build_system/unit/__init__.py b/tests/backends/build_system/unit/__init__.py new file mode 100644 index 000000000000..92338204f7fa --- /dev/null +++ b/tests/backends/build_system/unit/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/backends/build_system/unit/test_exe.py b/tests/backends/build_system/unit/test_exe.py new file mode 100644 index 000000000000..94547bf3da85 --- /dev/null +++ b/tests/backends/build_system/unit/test_exe.py @@ -0,0 +1,149 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os + +import pytest + +from build_system.constants import PYINSTALLER_DIR +from build_system.constants import EXE_ASSETS_DIR +from build_system.constants import BIN_DIRNAME +from build_system.constants import PYINSTALLER_EXE_NAME +from build_system.exe import ExeBuilder + + +class FakeUtils: + def __init__(self, prior_build_dir=False): + self._prior_build_dir = prior_build_dir + self.calls = [] + + def isdir(self, path: str) -> bool: + self.calls.append(("isdir", path)) + return self._prior_build_dir + + def run(self, command, **kwargs): + self.calls.append(("run", command, kwargs)) + + def copy_directory(self, src, dst): + self.calls.append(("copy_directory", src, dst)) + + def copy_directory_contents_into(self, src, dst): + self.calls.append(("copy_directory_contents_into", src, dst)) + + def update_metadata(self, dirname, **kwargs): + self.calls.append(("update_metadata", dirname, kwargs)) + + def rmtree(self, path): + self.calls.append(("rmtree", path)) + + +class FakeAwsCliVenv: + @property + def bin_dir(self) -> str: + return BIN_DIRNAME + + @property + def python_exe(self) -> str: + return "python" + + +@pytest.fixture +def fake_aws_cli_venv(): + return FakeAwsCliVenv() + + +class TestExe: + def _expected_build_tasks(self): + return [ + # Check if build dir is present + ("isdir", os.path.join("workspace", "dist")), + # Build aws and copy into final directory + ( + "run", + [ + "python", + os.path.join(BIN_DIRNAME, PYINSTALLER_EXE_NAME), + os.path.join(PYINSTALLER_DIR, "aws.spec"), + "--distpath", + os.path.join("workspace", "dist"), + "--workpath", + os.path.join("workspace", "build"), + ], + {"cwd": PYINSTALLER_DIR, "check": True}, + ), + ( + "copy_directory", + os.path.join("workspace", "dist", "aws"), + os.path.join("workspace", "aws", "dist"), + ), + # build aws_completer and copy into final directory + ( + "run", + [ + "python", + os.path.join(BIN_DIRNAME, PYINSTALLER_EXE_NAME), + os.path.join(PYINSTALLER_DIR, "aws_completer.spec"), + "--distpath", + os.path.join("workspace", "dist"), + "--workpath", + os.path.join("workspace", "build"), + ], + {"cwd": PYINSTALLER_DIR, "check": True}, + ), + ( + "copy_directory_contents_into", + os.path.join("workspace", "dist", "aws_completer"), + os.path.join("workspace", "aws", "dist"), + ), + # Copy exe assets + ( + "copy_directory_contents_into", + EXE_ASSETS_DIR, + os.path.join("workspace", "aws"), + ), + # Update metadata + ( + "update_metadata", + os.path.join("workspace", "aws", "dist"), + {"distribution_source": "source-exe"}, + ), + + ] + + def test_build(self, fake_aws_cli_venv): + fake_utils = FakeUtils() + builder = ExeBuilder("workspace", fake_aws_cli_venv, _utils=fake_utils) + builder.build() + + assert fake_utils.calls == self._expected_build_tasks() + [ + # Cleanup + ("rmtree", os.path.join("workspace", "build")), + ("rmtree", os.path.join("workspace", "dist")), + ] + + def test_build_no_cleanup(self, fake_aws_cli_venv): + fake_utils = FakeUtils() + builder = ExeBuilder("workspace", fake_aws_cli_venv, _utils=fake_utils) + builder.build(cleanup=False) + + assert fake_utils.calls == self._expected_build_tasks() + + def test_build_does_delete_prior_workspace(self, fake_aws_cli_venv): + fake_utils = FakeUtils(prior_build_dir=True) + + builder = ExeBuilder("workspace", fake_aws_cli_venv, _utils=fake_utils) + builder.build() + + assert fake_utils.calls[0:2] == [ + ("isdir", os.path.join("workspace", "dist")), + ("rmtree", os.path.join("workspace", "dist")), + ] diff --git a/tests/backends/build_system/unit/test_install.py b/tests/backends/build_system/unit/test_install.py new file mode 100644 index 000000000000..9c0025050fff --- /dev/null +++ b/tests/backends/build_system/unit/test_install.py @@ -0,0 +1,191 @@ +import os +import platform +from typing import List, Any + +import pytest + + +from build_system.install import Installer +from build_system.install import Uninstaller +from backends.build_system.utils import Utils +from tests.backends.build_system.markers import skip_if_windows, if_windows + + +class FakeUtils(Utils): + def __init__(self, is_exe: bool, responses: bool = False): + self._is_exe = is_exe + self._responses = responses + self.calls: List[Any] = [] + + def isdir(self, path: str) -> bool: + self.calls.append(("isdir", path)) + if path == os.path.join("build_dir", "exe"): + return self._is_exe + else: + return self._responses + + def run(self, command, **kwargs): + self.calls.append(("run", command, kwargs)) + + def copy_directory(self, src, dst): + self.calls.append(("copy_directory", src, dst)) + + def copy_directory_contents_into(self, src, dst): + self.calls.append(("copy_directory_contents_into", src, dst)) + + def update_metadata(self, dirname, **kwargs): + self.calls.append(("update_metadata", dirname, kwargs)) + + def rmtree(self, path): + self.calls.append(("rmtree", path)) + + def read_file_lines(self, path: str) -> List[str]: + self.calls.append(("read_file_lines", path)) + return ["file"] + + def write_file(self, path: str, content: str): + self.calls.append(("write_file", path, content)) + + def path_exists(self, path: str): + self.calls.append(("path_exists", path)) + return self._responses + + def makedirs(self, path: str): + self.calls.append(("makedirs", path)) + + def islink(self, path: str) -> bool: + self.calls.append(("islink", path)) + return self._responses + + def symlink(self, src: str, dst: str): + self.calls.append(("symlink", src, dst)) + + def remove(self, path: str): + self.calls.append(("remove", path)) + + +class TestInstaller: + @skip_if_windows + def test_install_exe(self): + utils = FakeUtils(is_exe=True) + installer = Installer("build_dir", utils=utils) + installer.install("lib_dir", "bin_dir") + + assert utils.calls == [ + ("isdir", os.path.join("build_dir", "exe")), + ("isdir", "lib_dir"), + ( + "copy_directory", + os.path.join("build_dir", "exe", "aws", "dist"), + "lib_dir", + ), + ("path_exists", "bin_dir"), + ("makedirs", "bin_dir"), + ("islink", os.path.join("bin_dir", "aws")), + ( + "symlink", + os.path.join("lib_dir", "aws"), + os.path.join("bin_dir", "aws"), + ), + ("islink", os.path.join("bin_dir", "aws_completer")), + ( + "symlink", + os.path.join("lib_dir", "aws_completer"), + os.path.join("bin_dir", "aws_completer"), + ), + ] + + @if_windows + def test_install_exe_windows(self): + utils = FakeUtils(is_exe=True) + installer = Installer("build_dir", utils=utils) + installer.install("lib_dir", "bin_dir") + + assert utils.calls == [ + ("isdir", os.path.join("build_dir", "exe")), + ("isdir", "lib_dir"), + ( + "copy_directory", + os.path.join("build_dir", "exe", "aws", "dist"), + "lib_dir", + ), + ("write_file", "bin_dir\\aws.cmd", "@echo off\nlib_dir\\aws.exe %*\n"), + ] + + @skip_if_windows + def test_install_venv(self): + utils = FakeUtils(is_exe=False) + installer = Installer("build_dir", utils=utils) + installer.install("lib_dir", "bin_dir") + + assert utils.calls == [ + ("isdir", "build_dir/exe"), + ("isdir", "lib_dir"), + ("copy_directory", "build_dir/venv", "lib_dir"), + ("read_file_lines", "lib_dir/bin/aws"), + ("write_file", "lib_dir/bin/aws", "#!lib_dir/bin/python\n"), + ("read_file_lines", "lib_dir/bin/aws_completer"), + ( + "write_file", + "lib_dir/bin/aws_completer", + "#!lib_dir/bin/python\n", + ), + ("path_exists", "bin_dir"), + ("makedirs", "bin_dir"), + ("islink", "bin_dir/aws"), + ("symlink", "lib_dir/bin/aws", "bin_dir/aws"), + ("islink", "bin_dir/aws_completer"), + ("symlink", "lib_dir/bin/aws_completer", "bin_dir/aws_completer"), + ] + + @if_windows + def test_install_venv_windows(self): + utils = FakeUtils(is_exe=False) + installer = Installer("build_dir", utils=utils) + installer.install("lib_dir", "bin_dir") + + assert utils.calls == [ + ("isdir", "build_dir\\exe"), + ("isdir", "lib_dir"), + ("copy_directory", "build_dir\\venv", "lib_dir"), + ("read_file_lines", "lib_dir\\Scripts\\aws.cmd"), + ( + "write_file", + "lib_dir\\Scripts\\aws.cmd", + '@echo off & "lib_dir\\Scripts\\python.exe" -x "%~f0" %* & goto :eof\n', + ), + ("path_exists", "bin_dir"), + ("makedirs", "bin_dir"), + ("islink", "bin_dir\\aws.cmd"), + ("symlink", "lib_dir\\Scripts\\aws.cmd", "bin_dir\\aws.cmd"), + ] + + +class TestUninstaller: + @skip_if_windows + def test_uninstall(self): + utils = FakeUtils(is_exe=True, responses=True) + uninstaller = Uninstaller(utils=utils) + uninstaller.uninstall("lib_dir", "bin_dir") + + assert utils.calls == [ + ("isdir", "lib_dir"), + ("rmtree", "lib_dir"), + ("islink", "bin_dir/aws"), + ("remove", "bin_dir/aws"), + ("islink", "bin_dir/aws_completer"), + ("remove", "bin_dir/aws_completer"), + ] + + @if_windows + def test_uninstall_windows(self): + utils = FakeUtils(is_exe=True, responses=True) + uninstaller = Uninstaller(utils=utils) + uninstaller.uninstall("lib_dir", "bin_dir") + + assert utils.calls == [ + ("isdir", "lib_dir"), + ("rmtree", "lib_dir"), + ("islink", "bin_dir\\aws.cmd"), + ("remove", "bin_dir\\aws.cmd"), + ] diff --git a/tests/backends/build_system/unit/test_validate_env.py b/tests/backends/build_system/unit/test_validate_env.py new file mode 100644 index 000000000000..c58694903378 --- /dev/null +++ b/tests/backends/build_system/unit/test_validate_env.py @@ -0,0 +1,5 @@ +import sys + +import pytest + +from backends.build_system.utils import Requirement diff --git a/tests/backends/test_pep517.py b/tests/backends/test_pep517.py index 5c1303fe6810..9488180be389 100644 --- a/tests/backends/test_pep517.py +++ b/tests/backends/test_pep517.py @@ -110,6 +110,25 @@ def test_build_sdist(tmpdir, config_settings): # Make sure the bin directory is included. assert unpacked_sdist.join("bin", "aws").check() + # Make sure the tests directory is included. + assert unpacked_sdist.join("tests", "__init__.py").check() + assert unpacked_sdist.join("tests", "unit", "__init__.py").check() + assert unpacked_sdist.join( + "tests", "backends", "build_system", "unit", "__init__.py").check() + assert unpacked_sdist.join("tests", "__init__.py").check() + + # Make sure sdist will be buildable + assert unpacked_sdist.join("configure").check() + assert unpacked_sdist.join("Makefile.in").check() + assert unpacked_sdist.join("requirements", "bootstrap.txt").check() + assert unpacked_sdist.join( + "requirements", "download-deps", "bootstrap.txt").check() + + # Make sure exe build files are added to the sdist + assert unpacked_sdist.join("exe", "pyinstaller", "aws.spec") + assert unpacked_sdist.join("exe", "asssets", "install") + assert unpacked_sdist.join("exe", "tests", "README.md") + # We do not build the ac.index in building the sdist. So we want to make # sure it is not being included. assert not unpacked_sdist.join("awscli", "data", "ac.index").check() @@ -244,6 +263,11 @@ def test_read_sdist_extras(): "backends/**/*.py", "bin/*", "CHANGELOG.rst", + "tests/**/*", + "requirements/**/*.txt", + "configure", + "Makefile.in", + "exe/**/*", } extras = set(backends.pep517.read_sdist_extras()) diff --git a/tests/conftest.py b/tests/conftest.py index 4bd573144c79..39c376b31191 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,9 +10,8 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. -import contextlib import logging -import threading +import platform from prompt_toolkit.application import create_app_session from prompt_toolkit.input import create_pipe_input