From c045ec2da514532772b0618e4fc856e16e4f54f1 Mon Sep 17 00:00:00 2001 From: Troy Dai Date: Thu, 30 Nov 2017 10:38:43 -0800 Subject: [PATCH] White list the module to run through style check --- scripts/ci/test_static.sh | 61 ++++++++++- tools/.vscode/settings.json | 2 + tools/automation/__main__.py | 4 +- tools/automation/clibuild/__init__.py | 77 +++++++------ tools/automation/commandlint/run.py | 6 +- tools/automation/style/__init__.py | 103 ++++++++++++++++++ .../automation/style/pylint_disable_check.py | 3 +- tools/automation/style/run.py | 96 ---------------- tools/automation/tests/run.py | 1 + tools/automation/tests/verify_dependencies.py | 8 +- .../tests/verify_package_versions.py | 8 +- tools/automation/tests/verify_packages.py | 10 +- tools/automation/utilities/pypi.py | 1 + tools/automation/verify/__init__.py | 7 +- tools/setup.py | 6 +- 15 files changed, 238 insertions(+), 155 deletions(-) create mode 100644 tools/.vscode/settings.json delete mode 100644 tools/automation/style/run.py diff --git a/scripts/ci/test_static.sh b/scripts/ci/test_static.sh index 29a5b5fa026..fda9a22cd6c 100755 --- a/scripts/ci/test_static.sh +++ b/scripts/ci/test_static.sh @@ -1,7 +1,5 @@ #!/usr/bin/env bash -set -e - . $(cd $(dirname $0); pwd)/artifacts.sh ls -la $share_folder/build @@ -9,7 +7,7 @@ ls -la $share_folder/build ALL_MODULES=`find $share_folder/build/ -name "*.whl"` [ -d privates ] && pip install privates/*.whl -pip install pylint flake8 +pip install pylint pip install $ALL_MODULES echo '=== List installed packages' @@ -20,5 +18,60 @@ echo '=== Begin testing' proc_number=`python -c 'import multiprocessing; print(multiprocessing.cpu_count())'` echo "Run pylint with $proc_number proc." -pylint azure.cli --rcfile=./pylintrc -j $proc_number +# Uncommented after all conversion is done +# pylint azure.cli --rcfile=./pylintrc -j $proc_number + +proc_number=`python -c 'import multiprocessing; print(multiprocessing.cpu_count())'` +exit_code=0 + +run_style() { + pylint $1 --rcfile=./pylintrc -j $proc_number + let exit_code=$exit_code+$? +} + + +set +e + +run_style azure.cli.core +#run_style azure.cli.command_modules.acr +#run_style azure.cli.command_modules.acs +#run_style azure.cli.command_modules.advisor +#run_style azure.cli.command_modules.appservice +#run_style azure.cli.command_modules.backup +#run_style azure.cli.command_modules.batch +#run_style azure.cli.command_modules.batchai +run_style azure.cli.command_modules.billing +run_style azure.cli.command_modules.cdn +run_style azure.cli.command_modules.cloud +#run_style azure.cli.command_modules.cognitiveservices +run_style azure.cli.command_modules.configure +#run_style azure.cli.command_modules.consumption +#run_style azure.cli.command_modules.container +#run_style azure.cli.command_modules.cosmosdb +#run_style azure.cli.command_modules.dla +#run_style azure.cli.command_modules.dls +#run_style azure.cli.command_modules.eventgrid +run_style azure.cli.command_modules.extension +run_style azure.cli.command_modules.feedback +run_style azure.cli.command_modules.find +run_style azure.cli.command_modules.interactive +#run_style azure.cli.command_modules.iot +#run_style azure.cli.command_modules.keyvault +#run_style azure.cli.command_modules.lab +#run_style azure.cli.command_modules.monitor +run_style azure.cli.command_modules.network +#run_style azure.cli.command_modules.nspkg +#run_style azure.cli.command_modules.profile +#run_style azure.cli.command_modules.rdbms +run_style azure.cli.command_modules.redis +#run_style azure.cli.command_modules.reservations +run_style azure.cli.command_modules.resource +run_style azure.cli.command_modules.role +#run_style azure.cli.command_modules.servicefabric +#run_style azure.cli.command_modules.sql +#run_style azure.cli.command_modules.storage +#run_style azure.cli.command_modules.testsdk +#run_style azure.cli.command_modules.vm + +exit $exit_code diff --git a/tools/.vscode/settings.json b/tools/.vscode/settings.json new file mode 100644 index 00000000000..7a73a41bfdf --- /dev/null +++ b/tools/.vscode/settings.json @@ -0,0 +1,2 @@ +{ +} \ No newline at end of file diff --git a/tools/automation/__main__.py b/tools/automation/__main__.py index a7601306bfb..2016bf30d96 100644 --- a/tools/automation/__main__.py +++ b/tools/automation/__main__.py @@ -7,6 +7,8 @@ import sys import automation.verify import automation.clibuild +import automation.style + def main(): parser = argparse.ArgumentParser(prog='Azure CLI build tools') @@ -14,6 +16,7 @@ def main(): sub_parser = parser.add_subparsers(title='azure cli tools sub commands') automation.verify.init_args(sub_parser) automation.clibuild.init_args(sub_parser) + automation.style.init_args(sub_parser) if sys.argv[1:]: args = parser.parse_args() @@ -24,4 +27,3 @@ def main(): if __name__ == '__main__': main() - diff --git a/tools/automation/clibuild/__init__.py b/tools/automation/clibuild/__init__.py index 175df442f0d..92ed6b1e33c 100644 --- a/tools/automation/clibuild/__init__.py +++ b/tools/automation/clibuild/__init__.py @@ -20,11 +20,10 @@ def build_debian(git_url, git_branch, cli_version, artifact_dir, arg_ns=None): - cmd = ['docker', 'run', '-d', '-e', 'CLI_VERSION='+cli_version, '-e', 'BUILD_ARTIFACT_DIR=/artifacts', - '-v', artifact_dir+':/artifacts', 'ubuntu:14.04', '/bin/bash', '-cx', - 'apt-get update && apt-get install -y git wget && ' \ - 'git clone --progress --verbose {} --branch {} /repo_clone && cd /repo_clone ' \ - '&& build_scripts/debian/build.sh /repo_clone'.format(git_url, git_branch)] + cmd = ['docker', 'run', '-d', '-e', 'CLI_VERSION=' + cli_version, '-e', 'BUILD_ARTIFACT_DIR=/artifacts', + '-v', artifact_dir + ':/artifacts', 'ubuntu:14.04', '/bin/bash', '-cx', + 'apt-get update && apt-get install -y git wget && git clone --progress --verbose {} --branch {} /repo_clone ' + '&& cd /repo_clone && build_scripts/debian/build.sh /repo_clone'.format(git_url, git_branch)] container_id = check_output(cmd, universal_newlines=True).strip() print('Debian build running. Use `docker logs -f {}`'.format(container_id)) exit_code = check_output(['docker', 'wait', container_id], universal_newlines=True).strip() @@ -32,7 +31,8 @@ def build_debian(git_url, git_branch, cli_version, artifact_dir, arg_ns=None): def build_docker(git_url, git_branch, cli_version, artifact_dir, arg_ns=None): - cmd = ['docker', 'build', '--no-cache', '--quiet', '--build-arg', 'BUILD_DATE="`date -u +"%Y-%m-%dT%H:%M:%SZ"`"', '--build-arg', 'CLI_VERSION='+cli_version, get_repo_root()] + cmd = ['docker', 'build', '--no-cache', '--quiet', '--build-arg', 'BUILD_DATE="`date -u +"%Y-%m-%dT%H:%M:%SZ"`"', + '--build-arg', 'CLI_VERSION=' + cli_version, get_repo_root()] print('Docker build started.') image_id = check_output(cmd, universal_newlines=True).strip() image_id = image_id.split(':')[1] @@ -43,13 +43,12 @@ def build_docker(git_url, git_branch, cli_version, artifact_dir, arg_ns=None): def build_rpm(git_url, git_branch, cli_version, artifact_dir, arg_ns=None): - cmd = ['docker', 'run', '-d', '-e', 'CLI_VERSION='+cli_version, '-e', 'REPO_PATH=/repo_clone', - '-v', artifact_dir+':/artifacts', 'centos:7', '/bin/bash', '-cx', - 'yum check-update; yum install -y gcc git rpm-build rpm-devel rpmlint make bash coreutils diffutils ' \ - 'patch rpmdevtools python libffi-devel python-devel openssl-devel wget && ' \ - 'git clone --progress --verbose {} --branch {} /repo_clone && cd /repo_clone ' \ - '&& rpmbuild -v -bb --clean build_scripts/rpm/azure-cli.spec ' \ - '&& cp /root/rpmbuild/RPMS/x86_64/* /artifacts/'.format(git_url, git_branch)] + cmd = ['docker', 'run', '-d', '-e', 'CLI_VERSION=' + cli_version, '-e', 'REPO_PATH=/repo_clone', + '-v', artifact_dir + ':/artifacts', 'centos:7', '/bin/bash', '-cx', + 'yum check-update; yum install -y gcc git rpm-build rpm-devel rpmlint make bash coreutils diffutils patch ' + 'rpmdevtools python libffi-devel python-devel openssl-devel wget && git clone --progress --verbose {} ' + '--branch {} /repo_clone && cd /repo_clone && rpmbuild -v -bb --clean build_scripts/rpm/azure-cli.spec && ' + 'cp /root/rpmbuild/RPMS/x86_64/* /artifacts/'.format(git_url, git_branch)] container_id = check_output(cmd, universal_newlines=True).strip() print('RPM build running. Use `docker logs -f {}`'.format(container_id)) exit_code = check_output(['docker', 'wait', container_id], universal_newlines=True).strip() @@ -57,10 +56,9 @@ def build_rpm(git_url, git_branch, cli_version, artifact_dir, arg_ns=None): def build_pypi(git_url, git_branch, _, artifact_dir, arg_ns=None): - cmd = ['docker', 'run', '-d', '-v', artifact_dir+':/artifacts', 'python:3.6', '/bin/bash', '-cx', - 'mkdir /artifacts/pypi ' \ - '&& git clone --progress --verbose {} --branch {} /repo_clone && cd /repo_clone ' \ - '&& python build_scripts/pypi/build.py /artifacts/pypi /repo_clone'.format(git_url, git_branch)] + cmd = ['docker', 'run', '-d', '-v', artifact_dir + ':/artifacts', 'python:3.6', '/bin/bash', '-cx', + 'mkdir /artifacts/pypi && git clone --progress --verbose {} --branch {} /repo_clone && cd /repo_clone && ' + 'python build_scripts/pypi/build.py /artifacts/pypi /repo_clone'.format(git_url, git_branch)] container_id = check_output(cmd, universal_newlines=True).strip() print('Python pypi build message: The version numbers of packages will be as defined in source code.') print('Python pypi build running. Use `docker logs -f {}`'.format(container_id)) @@ -69,7 +67,7 @@ def build_pypi(git_url, git_branch, _, artifact_dir, arg_ns=None): def build_msi(git_url, git_branch, cli_version, artifact_dir, arg_ns=None): - #TODO + # TODO print('SKIPPED MSI build. Not Yet Implemented. Please build manually.') @@ -78,13 +76,16 @@ def build_homebrew(git_url, git_branch, cli_version, artifact_dir, arg_ns=None): print('Homebrew message : The Homebrew formula requires CLI packages to be available on public PyPI. ' 'Version {} of the CLI does not appear to be on PyPI. ' 'If it was just updated, this message can be safely ignored.'.format(cli_version)) - upstream_url = arg_ns.homebrew_upstream_url or 'https://github.com/Azure/azure-cli/archive/azure-cli-{cli_version}.tar.gz'.format(cli_version=cli_version) - print('Homebrew message: The generated formula uses the latest public packages that are available on PyPI, not the code in your Git repo.') - cmd = ['docker', 'run', '-d', '-e', 'CLI_VERSION='+cli_version, '-e', 'BUILD_ARTIFACT_DIR=/artifacts', - '-e', 'UPSTREAM_URL='+upstream_url, - '-v', artifact_dir+':/artifacts', 'python:3.6', '/bin/bash', '-cx', - 'pip install sh && git clone --progress --verbose {} --branch {} /repo_clone && cd /repo_clone ' \ - '&& python build_scripts/homebrew/formula-generate.py'.format(git_url, git_branch)] + + upstream_url = arg_ns.homebrew_upstream_url or 'https://github.com/Azure/azure-cli/archive/azure-cli-{cli_version}.tar.gz'.format( + cli_version=cli_version) + print('Homebrew message: The generated formula uses the latest public packages that are available on PyPI, ' + 'not the code in your Git repo.') + cmd = ['docker', 'run', '-d', '-e', 'CLI_VERSION=' + cli_version, '-e', 'BUILD_ARTIFACT_DIR=/artifacts', + '-e', 'UPSTREAM_URL=' + upstream_url, + '-v', artifact_dir + ':/artifacts', 'python:3.6', '/bin/bash', '-cx', + 'pip install sh && git clone --progress --verbose {} --branch {} /repo_clone && cd /repo_clone && ' + 'python build_scripts/homebrew/formula-generate.py'.format(git_url, git_branch)] container_id = check_output(cmd, universal_newlines=True).strip() print('Homebrew formula generation running. Use `docker logs -f {}`'.format(container_id)) exit_code = check_output(['docker', 'wait', container_id], universal_newlines=True).strip() @@ -112,15 +113,18 @@ def cli_build(args): git_url = args.git_clone_url git_branch = args.git_clone_branch cli_version = args.cli_version - artifact_dir = tempfile.mkdtemp(prefix='cli-build-{}-'.format(datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')), dir=os.getcwd()) + artifact_dir = tempfile.mkdtemp( + prefix='cli-build-{}-'.format(datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')), dir=os.getcwd()) if len(build_types) == 1 and build_types[0] == '*': build_types = BUILD_TYPES print_heading('Building for {} from branch {} of {} ' 'and version number will be {}\n' - 'Build artifacts will be in {}'.format(', '.join(build_types), git_branch, git_url, cli_version, artifact_dir)) + 'Build artifacts will be in {}'.format(', '.join(build_types), git_branch, git_url, cli_version, + artifact_dir)) from concurrent.futures import ThreadPoolExecutor, as_completed with ThreadPoolExecutor(max_workers=len(build_types)) as executor: - tasks = {executor.submit(build_dispatch, bt, git_url, git_branch, cli_version, artifact_dir, arg_ns=args) for bt in build_types} + tasks = {executor.submit(build_dispatch, bt, git_url, git_branch, cli_version, artifact_dir, arg_ns=args) for bt + in build_types} for t in as_completed(tasks): t.result() print('Done.') @@ -130,9 +134,16 @@ def init_args(root): cli_build_parser = root.add_parser('build', help='Build the CLI. Docker is required.') cli_build_parser.set_defaults(func=cli_build) git_args = cli_build_parser.add_argument_group('Git Clone Arguments') - git_args.add_argument('-b', '--git-clone-branch', dest='git_clone_branch', help='Branch name that should be checked out. (default: %(default)s)', default='master') - git_args.add_argument('-u', '--git-clone-url', dest='git_clone_url', help='The url to clone. This will be passed to `git clone`. (default: %(default)s)', default='https://github.com/Azure/azure-cli.git') - cli_build_parser.add_argument('-t', '--type', dest='build_types', required=True, nargs='+', choices=BUILD_TYPES+['*'], help="Space separated list of the artifacts to build. Use '*' for all.") - cli_build_parser.add_argument('-c', '--cli-version', dest='cli_version', required=True, help="The version of the build. (ignored for 'pypi' type)") + git_args.add_argument('-b', '--git-clone-branch', dest='git_clone_branch', + help='Branch name that should be checked out. (default: %(default)s)', default='master') + git_args.add_argument('-u', '--git-clone-url', dest='git_clone_url', + help='The url to clone. This will be passed to `git clone`. (default: %(default)s)', + default='https://github.com/Azure/azure-cli.git') + cli_build_parser.add_argument('-t', '--type', dest='build_types', required=True, nargs='+', + choices=BUILD_TYPES + ['*'], + help="Space separated list of the artifacts to build. Use '*' for all.") + cli_build_parser.add_argument('-c', '--cli-version', dest='cli_version', required=True, + help="The version of the build. (ignored for 'pypi' type)") homebrew_args = cli_build_parser.add_argument_group('Homebrew Specific Arguments') - homebrew_args.add_argument('--homebrew-upstream-url', dest='homebrew_upstream_url', help='The upstream URL to specify in the formula.') + homebrew_args.add_argument('--homebrew-upstream-url', dest='homebrew_upstream_url', + help='The upstream URL to specify in the formula.') diff --git a/tools/automation/commandlint/run.py b/tools/automation/commandlint/run.py index 3f1241bf41e..08b300ec1a7 100644 --- a/tools/automation/commandlint/run.py +++ b/tools/automation/commandlint/run.py @@ -38,7 +38,7 @@ def dump_no_help(modules): try: import_module('azure.cli.command_modules.' + mod).load_params(mod) except Exception as ex: - print("EXCEPTION: " + str(mod)) + print("EXCEPTION: {} for module {}".format(ex, str(mod))) _update_command_definitions(cmd_table) add_id_parameters(cmd_table) @@ -66,8 +66,8 @@ def dump_no_help(modules): param_list = set() for key in cmd_table[cmd].arguments: name = cmd_table[cmd].arguments[key].name - if not cmd_table[cmd].arguments[key].type.settings.get('help') and \ - name not in white_list_parameters.get(cmd, []): + if not cmd_table[cmd].arguments[key].type.settings.get('help') and name not in white_list_parameters.get( + cmd, []): exit_val = 1 param_list.add(name) if param_list: diff --git a/tools/automation/style/__init__.py b/tools/automation/style/__init__.py index 34913fb394d..52932960dce 100644 --- a/tools/automation/style/__init__.py +++ b/tools/automation/style/__init__.py @@ -2,3 +2,106 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- + + +import argparse +import os +import multiprocessing +import sys +from subprocess import call + +import automation.utilities.path as automation_path + + +def run_pylint(modules): + print('\n\nRun pylint') + print('Modules: {}'.format(', '.join([name for name, _ in modules if not name.endswith('-nspkg')]))) + + modules_list = ' '.join( + [os.path.join(path, 'azure') for name, path in modules if not name.endswith('-nspkg')]) + arguments = '{} --rcfile={} -j {}'.format( + modules_list, + os.path.join(automation_path.get_repo_root(), 'pylintrc'), + multiprocessing.cpu_count()) + + return_code = call(('python -m pylint ' + arguments).split()) + + if return_code: + print('Pylint failed') + else: + print('Pylint passed') + + return return_code + + +def run_pep8(modules): + print('\n\nRun flake8 for PEP8 compliance') + print('Modules: {}'.format(', '.join(name for name, _ in modules))) + + command = 'flake8 --statistics --exclude=azure_bdist_wheel.py --append-config={} {}'.format( + os.path.join(automation_path.get_repo_root(), '.flake8'), + ' '.join(path for _, path in modules)) + + return_code = call(command.split()) + if return_code: + print('Flake8 failed') + else: + print('Flake8 passed') + + return return_code + + +def define_arguments(parser): + parser.add_argument('--ci', action='store_true', help='Run in CI mode') + parser.add_argument('--pep8', dest='suites', action='append_const', const='pep8', + help='Run flake8 to check PEP8') + parser.add_argument('--pylint', dest='suites', action='append_const', const='pylint', + help='Run pylint') + parser.add_argument('--module', dest='modules', action='append', + help='The modules on which the style check should run. Accept short names, ' + 'except azure-cli, azure-cli-core and azure-cli-nspkg') + + +def main_style(args): + if args.ci: + # When the command is run in CI mode all the other parameters are ignored + selected_modules = automation_path.filter_user_selected_modules(None) + + # Run pylint on all modules + return_code_sum = run_pylint(selected_modules) + + # Run flake8 on modules + return_code_sum += run_pep8(selected_modules) + + sys.exit(return_code_sum) + + selected_modules = automation_path.filter_user_selected_modules(args.modules) + if not selected_modules: + sys.stderr('No module is selected.\n') + sys.exit(1) + + if not args.suites: + return_code_sum = run_pylint(selected_modules) + run_pep8(selected_modules) + else: + return_code_sum = 0 + if 'pep8' in args.suites: + return_code_sum += run_pep8(selected_modules) + + if 'pylint' in args.suites: + return_code_sum += run_pylint(selected_modules) + + sys.exit(return_code_sum) + + +def init_args(root): + parser = root.add_parser('style', help="Code style check.") + define_arguments(parser) + parser.set_defaults(func=main_style) + + +def legacy_entry(): + sys.stderr.write("The check_style command is going to be replaced by 'azdev style' command.\n\n") + parser = argparse.ArgumentParser('Code style check.') + define_arguments(parser) + args = parser.parse_args() + main_style(args) diff --git a/tools/automation/style/pylint_disable_check.py b/tools/automation/style/pylint_disable_check.py index c9b5913e9f5..fd020b28610 100644 --- a/tools/automation/style/pylint_disable_check.py +++ b/tools/automation/style/pylint_disable_check.py @@ -59,7 +59,6 @@ def main(): src_folder = os.path.join(get_repo_root(), 'src') all_rules = [e for e in get_all_rules(src_folder)] - with open('pylint_report.txt', 'w') as f: f.write('GROUP BY RULES\n') f.writelines(tabulate.tabulate(sorted(group_by_rules(all_rules), key=lambda each: each[1], reverse=True), @@ -75,4 +74,4 @@ def main(): if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/tools/automation/style/run.py b/tools/automation/style/run.py deleted file mode 100644 index 48416a9915e..00000000000 --- a/tools/automation/style/run.py +++ /dev/null @@ -1,96 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -import argparse -import multiprocessing -import os.path -import sys -from subprocess import call - -import automation.utilities.path as automation_path - - -def run_pylint(modules): - print('\n\nRun pylint') - print( - 'Modules: {}'.format(', '.join([name for name, _ in modules if not name.endswith('-nspkg')]))) - - modules_list = ' '.join( - [os.path.join(path, 'azure') for name, path in modules if not name.endswith('-nspkg')]) - arguments = '{} --rcfile={} -j {}'.format( - modules_list, - os.path.join(automation_path.get_repo_root(), 'pylintrc'), - multiprocessing.cpu_count()) - - return_code = call(('python -m pylint ' + arguments).split()) - - if return_code: - print('Pylint failed') - else: - print('Pylint passed') - - return return_code - - -def run_pep8(modules): - print('\n\nRun flake8 for PEP8 compliance') - print('Modules: {}'.format(', '.join(name for name, _ in modules))) - - command = 'flake8 --statistics --exclude=azure_bdist_wheel.py --append-config={} {}'.format( - os.path.join(automation_path.get_repo_root(), '.flake8'), - ' '.join(path for _, path in modules)) - - return_code = call(command.split()) - if return_code: - print('Flake8 failed') - else: - print('Flake8 passed') - - return return_code - - -def main(): - parser = argparse.ArgumentParser('Code style tools') - parser.add_argument('--ci', action='store_true', help='Run in CI mode') - parser.add_argument('--pep8', dest='suites', action='append_const', const='pep8', - help='Run flake8 to check PEP8') - parser.add_argument('--pylint', dest='suites', action='append_const', const='pylint', - help='Run pylint') - parser.add_argument('--module', dest='modules', action='append', - help='The modules on which the style check should run. Accept short names, ' - 'except azure-cli, azure-cli-core and azure-cli-nspkg') - args = parser.parse_args() - - if args.ci: - # When the command is run in CI mode all the other parameters are ignored - selected_modules = automation_path.filter_user_selected_modules(None) - - # Run pylint on all modules - return_code_sum = run_pylint(selected_modules) - - # Run flake8 on modules - return_code_sum += run_pep8(selected_modules) - - sys.exit(return_code_sum) - - selected_modules = automation_path.filter_user_selected_modules(args.modules) - if not selected_modules: - parser.print_help() - sys.exit(1) - - if not args.suites: - return_code_sum = run_pylint(selected_modules) + run_pep8(selected_modules) - else: - return_code_sum = 0 - if 'pep8' in args.suites: - return_code_sum += run_pep8(selected_modules) - - if 'pylint' in args.suites: - return_code_sum += run_pylint(selected_modules) - - sys.exit(return_code_sum) - -if __name__ == '__main__': - main() diff --git a/tools/automation/tests/run.py b/tools/automation/tests/run.py index eef29b68961..fbb10c902dd 100644 --- a/tools/automation/tests/run.py +++ b/tools/automation/tests/run.py @@ -100,5 +100,6 @@ def main(): sys.exit(0 if success else 1) + if __name__ == '__main__': main() diff --git a/tools/automation/tests/verify_dependencies.py b/tools/automation/tests/verify_dependencies.py index 3767ada905b..9631a79e23d 100644 --- a/tools/automation/tests/verify_dependencies.py +++ b/tools/automation/tests/verify_dependencies.py @@ -10,8 +10,8 @@ import subprocess import sys -ALLOWED_ERRORS = [ -] +ALLOWED_ERRORS = [] + def verify_dependencies(): try: @@ -27,9 +27,11 @@ def verify_dependencies(): print('\n'.join(errors), file=sys.stderr) sys.exit(1) else: - print("'pip check' returned exit code {} but the errors are allowable.".format(err.returncode), file=sys.stderr) + print("'pip check' returned exit code {} but the errors are allowable.".format(err.returncode), + file=sys.stderr) print("Full output from pip follows:", file=sys.stderr) print(err.output, file=sys.stderr) + if __name__ == '__main__': verify_dependencies() diff --git a/tools/automation/tests/verify_package_versions.py b/tools/automation/tests/verify_package_versions.py index 507979a9b51..bcf94b41539 100644 --- a/tools/automation/tests/verify_package_versions.py +++ b/tools/automation/tests/verify_package_versions.py @@ -30,13 +30,15 @@ def contains_no_plus_dev(mod_version): return False return True + def changes_require_version_bump(mod_name, mod_version, mod_path): revision_range = os.environ.get('TRAVIS_COMMIT_RANGE', None) if revision_range: cmd = ["git", "log", "--pretty=format:* %s", revision_range, "--", mod_path, ":(exclude)*/tests/*"] changes = subprocess.check_output(cmd, cwd=mod_path, universal_newlines=True).strip() if changes and is_available_on_pypi(mod_name, mod_version): - print("There are changes to {} and the current version {} is already available on PyPI! Bump the version.".format(mod_name, mod_version)) + print("There are changes to {} and the current version {} is already available on PyPI! " + "Bump the version.".format(mod_name, mod_version)) print("Changes are as follows:") print(changes) return False @@ -46,8 +48,10 @@ def changes_require_version_bump(mod_name, mod_version, mod_path): # There's no revision range so we'll ignore this check return True + def check_package_version(mod_name, mod_path): - mod_version = subprocess.check_output('python setup.py --version'.split(), cwd=mod_path, universal_newlines=True).strip() + mod_version = subprocess.check_output('python setup.py --version'.split(), cwd=mod_path, + universal_newlines=True).strip() checks = [] if mod_name in ['azure-cli', 'azure-cli-core']: checks.append(is_unreleased_version(mod_name, mod_version)) diff --git a/tools/automation/tests/verify_packages.py b/tools/automation/tests/verify_packages.py index 2fe9059a098..49e19b18929 100644 --- a/tools/automation/tests/verify_packages.py +++ b/tools/automation/tests/verify_packages.py @@ -61,7 +61,7 @@ def _valid_wheel(wheel_path): # these files shouldn't exist in the wheel print('Verifying {}'.format(wheel_path)) bad_files = ['azure/__init__.py', 'azure/cli/__init__.py', 'azure/cli/command_modules/__init__.py'] - wheel_zip=zipfile.ZipFile(wheel_path) + wheel_zip = zipfile.ZipFile(wheel_path) whl_file_list = wheel_zip.namelist() if any(f in whl_file_list for f in bad_files): return False @@ -89,7 +89,8 @@ def verify_packages(built_packages_dir): all_modules = automation_path.get_all_module_paths() all_command_modules = automation_path.get_command_modules_paths(include_prefix=True) - modules_missing_manifest_in = [name for name, path in all_modules if not os.path.isfile(os.path.join(path, 'MANIFEST.in'))] + modules_missing_manifest_in = [name for name, path in all_modules if + not os.path.isfile(os.path.join(path, 'MANIFEST.in'))] if modules_missing_manifest_in: print_heading('Error: The following modules are missing the MANIFEST.in file.') print(modules_missing_manifest_in) @@ -138,8 +139,8 @@ def verify_packages(built_packages_dir): print('Installed command modules', installed_command_modules) - missing_modules = set([name for name, fullpath in all_command_modules]) - set(installed_command_modules) - \ - EXCLUDE_MODULES + missing_modules = set([name for name, fullpath in all_command_modules]) - set( + installed_command_modules) - EXCLUDE_MODULES if missing_modules: print_heading('Error: The following modules were not installed successfully', f=sys.stderr) @@ -165,6 +166,7 @@ def verify_packages(built_packages_dir): if __name__ == '__main__': import argparse + parser = argparse.ArgumentParser() parser.add_argument('build_folder', help='The path to the folder contains all wheel files.') diff --git a/tools/automation/utilities/pypi.py b/tools/automation/utilities/pypi.py index 24ac09500f3..fcd943395c7 100644 --- a/tools/automation/utilities/pypi.py +++ b/tools/automation/utilities/pypi.py @@ -8,6 +8,7 @@ except ImportError: import xmlrpc.client as xmlrpclib # pylint: disable=import-error + def is_available_on_pypi(module_name, module_version): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') available_versions = client.package_releases(module_name, True) diff --git a/tools/automation/verify/__init__.py b/tools/automation/verify/__init__.py index 28291fdd92f..b5620fb7804 100644 --- a/tools/automation/verify/__init__.py +++ b/tools/automation/verify/__init__.py @@ -13,8 +13,7 @@ def verify_license(args): import os from automation.utilities.path import get_repo_root - license_header = \ -"""# -------------------------------------------------------------------------------------------- + license_header = """# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- @@ -39,13 +38,13 @@ def verify_license(args): sys.stderr.write("Error: The following files don't have the required license headers: \n{}".format( '\n'.join(files_without_header))) - sys.exit(1) + sys.exit(1) def init_args(root): parser = root.add_parser('verify') sub_parser = parser.add_subparsers() - + license_verify = sub_parser.add_parser('license', help='Verify license headers.') license_verify.set_defaults(func=verify_license) diff --git a/tools/setup.py b/tools/setup.py index 5cee81dfe8f..c16b2f5fcb2 100644 --- a/tools/setup.py +++ b/tools/setup.py @@ -49,12 +49,12 @@ 'automation.setup', 'automation.coverage' ], - entry_points = { + entry_points={ 'console_scripts': [ 'azdev=automation.__main__:main', - 'check_style=automation.style.run:main', + 'check_style=automation.style:legacy_entry', 'run_tests=automation.tests.run:main' - ] + ] }, install_requires=DEPENDENCIES )