Skip to content

Commit

Permalink
Merge branch 'staging' into miguelgfierro-patch-1
Browse files Browse the repository at this point in the history
  • Loading branch information
miguelgfierro authored Jun 8, 2023
2 parents 655709c + 58753b6 commit a2b76de
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 29 deletions.
9 changes: 3 additions & 6 deletions .github/actions/azureml-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,6 @@ runs:
- name: Install wheel package
shell: bash
run: pip install --quiet wheel
- name: Create wheel from setup.py
shell: bash
run: python setup.py --quiet bdist_wheel
- name: Submit CPU tests to AzureML
shell: bash
if: contains(inputs.TEST_GROUP, 'cpu')
Expand All @@ -94,7 +91,7 @@ runs:
--rg ${{inputs.RG}} --wsname ${{inputs.WS}} --expname ${{inputs.EXP_NAME}}_${{inputs.TEST_GROUP}}
--testlogs ${{inputs.TEST_LOGS_PATH}} --testkind ${{inputs.TEST_KIND}}
--conda_pkg_python ${{inputs.PYTHON_VERSION}} --testgroup ${{inputs.TEST_GROUP}}
--disable-warnings
--disable-warnings --sha "${GITHUB_SHA}"
- name: Submit GPU tests to AzureML
shell: bash
if: contains(inputs.TEST_GROUP, 'gpu')
Expand All @@ -104,7 +101,7 @@ runs:
--rg ${{inputs.RG}} --wsname ${{inputs.WS}} --expname ${{inputs.EXP_NAME}}_${{inputs.TEST_GROUP}}
--testlogs ${{inputs.TEST_LOGS_PATH}} --add_gpu_dependencies --testkind ${{inputs.TEST_KIND}}
--conda_pkg_python ${{inputs.PYTHON_VERSION}} --testgroup ${{inputs.TEST_GROUP}}
--disable-warnings
--disable-warnings --sha "${GITHUB_SHA}"
- name: Submit PySpark tests to AzureML
shell: bash
if: contains(inputs.TEST_GROUP, 'spark')
Expand All @@ -114,7 +111,7 @@ runs:
--rg ${{inputs.RG}} --wsname ${{inputs.WS}} --expname ${{inputs.EXP_NAME}}_${{inputs.TEST_GROUP}}
--testlogs ${{inputs.TEST_LOGS_PATH}} --add_spark_dependencies --testkind ${{inputs.TEST_KIND}}
--conda_pkg_python ${{inputs.PYTHON_VERSION}} --testgroup ${{inputs.TEST_GROUP}}
--disable-warnings
--disable-warnings --sha "${GITHUB_SHA}"
- name: Print test logs
shell: bash
run: cat ${{inputs.TEST_LOGS_PATH}}
Expand Down
10 changes: 7 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@
"jinja2>=2,<3.1",
"pyyaml>=5.4.1,<6",
"requests>=2.0.0,<3",
"cornac>=1.1.2,<2",
"cornac>=1.1.2,<1.15.2;python_version<='3.7'",
"cornac>=1.15.2,<2;python_version>='3.8'", # After 1.15.2, Cornac requires python 3.8
"retrying>=1.3.3",
"pandera[strategies]>=0.6.5", # For generating fake datasets
"scikit-surprise>=1.0.6",
Expand Down Expand Up @@ -136,6 +137,9 @@
install_requires=install_requires,
package_dir={"recommenders": "recommenders"},
python_requires=">=3.6, <3.10",
packages=find_packages(where=".", exclude=["contrib", "docs", "examples", "scenarios", "tests", "tools"]),
setup_requires=["numpy>=1.19"]
packages=find_packages(
where=".",
exclude=["contrib", "docs", "examples", "scenarios", "tests", "tools"],
),
setup_requires=["numpy>=1.19"],
)
37 changes: 17 additions & 20 deletions tests/ci/azureml_tests/submit_groupwise_azureml_pytest.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def create_run_config(
add_spark_dependencies,
conda_pkg_jdk,
conda_pkg_python,
reco_wheel_path,
commit_sha,
):
"""
AzureML requires the run environment to be setup prior to submission.
Expand All @@ -172,6 +172,7 @@ def create_run_config(
added to the conda environment, else False
add_spark_dependencies (bool) : True if PySpark packages should be
added to the conda environment, else False
commit_sha (str) : the commit that triggers the workflow
Return:
run_azuremlcompute : AzureML run config
Expand All @@ -188,32 +189,28 @@ def create_run_config(
# True means the user will manually configure the environment
run_azuremlcompute.environment.python.user_managed_dependencies = False

# install local version of recommenders on AzureML compute using .whl file
whl_url = run_azuremlcompute.environment.add_private_pip_wheel(
workspace=workspace,
file_path=reco_wheel_path,
exist_ok=True,
)
conda_dep = CondaDependencies()
conda_dep.add_conda_package(conda_pkg_python)
conda_dep.add_pip_package(whl_url)
conda_dep.add_pip_package(
"pymanopt@https://github.com/pymanopt/pymanopt/archive/fb36a272cdeecb21992cfd9271eb82baafeb316d.zip"
)

# install extra dependencies
# install recommenders
reco_extras = "dev,examples"
if add_gpu_dependencies and add_spark_dependencies:
conda_dep.add_channel("conda-forge")
conda_dep.add_conda_package(conda_pkg_jdk)
conda_dep.add_pip_package("recommenders[dev,examples,spark,gpu]")
reco_extras = reco_extras + ",spark,gpu"
elif add_gpu_dependencies:
conda_dep.add_pip_package("recommenders[dev,examples,gpu]")
reco_extras = reco_extras + ",gpu"
elif add_spark_dependencies:
conda_dep.add_channel("conda-forge")
conda_dep.add_conda_package(conda_pkg_jdk)
conda_dep.add_pip_package("recommenders[dev,examples,spark]")
else:
conda_dep.add_pip_package("recommenders[dev,examples]")
reco_extras = reco_extras + ",spark"

conda_dep.add_pip_package(
f"recommenders[{reco_extras}]@git+https://github.com/microsoft/recommenders.git@{commit_sha}"
)

run_azuremlcompute.environment.python.conda_dependencies = conda_dep
return run_azuremlcompute
Expand Down Expand Up @@ -286,6 +283,11 @@ def create_arg_parser():
"""

parser = argparse.ArgumentParser(description="Process some inputs")
parser.add_argument(
"--sha",
action="store",
help="the commit that triggers the workflow",
)
# script to run pytest
parser.add_argument(
"--test",
Expand Down Expand Up @@ -448,11 +450,6 @@ def create_arg_parser():
max_nodes=args.maxnodes,
)

wheel_list = glob.glob("./dist/*.whl")
if not wheel_list:
logger.error("Wheel not found!")
logger.info("Found wheel at " + wheel_list[0])

run_config = create_run_config(
cpu_cluster=cpu_cluster,
docker_proc_type=docker_proc_type,
Expand All @@ -461,7 +458,7 @@ def create_arg_parser():
add_spark_dependencies=args.add_spark_dependencies,
conda_pkg_jdk=args.conda_pkg_jdk,
conda_pkg_python=args.conda_pkg_python,
reco_wheel_path=wheel_list[0],
commit_sha=args.sha,
)

logger.info("exp: In Azure, look for experiment named {}".format(args.expname))
Expand Down

0 comments on commit a2b76de

Please sign in to comment.