Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 0 additions & 20 deletions .github/workflows/jira.pr.yml

This file was deleted.

27 changes: 27 additions & 0 deletions .github/workflows/pythonapp.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: MLflow tests

on:
push:
branches: [ master ]
pull_request:
branches: [ master ]

jobs:
python-small:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.6
uses: actions/setup-python@v1
with:
python-version: 3.6
- name: Install dependencies
run: |
export GITHUB_WORKFLOW=1
INSTALL_SMALL_PYTHON_DEPS=true source ./travis/install-common-deps.sh
- name: Run tests
run: |
export GITHUB_WORKFLOW=1
export PATH="$HOME/miniconda/bin:$PATH"
source activate test-environment
./travis/run-small-python-tests.sh
4 changes: 4 additions & 0 deletions tests/models/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ def test_model_with_no_deployable_flavors_fails_pollitely():
assert "No suitable flavor backend was found for the model." in stderr


@pytest.mark.large
def test_serve_gunicorn_opts(iris_data, sk_model):
if sys.platform == "win32":
pytest.skip("This test requires gunicorn which is not available on windows.")
Expand Down Expand Up @@ -159,6 +160,7 @@ def test_serve_gunicorn_opts(iris_data, sk_model):
assert expected_command_pattern.search(stdout) is not None


@pytest.mark.large
def test_predict(iris_data, sk_model):
with TempDir(chdr=True) as tmp:
with mlflow.start_run() as active_run:
Expand Down Expand Up @@ -245,6 +247,7 @@ def test_predict(iris_data, sk_model):
assert all(expected == actual)


@pytest.mark.large
def test_prepare_env_passes(sk_model):
if no_conda:
pytest.skip("This test requires conda.")
Expand All @@ -270,6 +273,7 @@ def test_prepare_env_passes(sk_model):
assert p.wait() == 0


@pytest.mark.large
def test_prepare_env_fails(sk_model):
if no_conda:
pytest.skip("This test requires conda.")
Expand Down
2 changes: 2 additions & 0 deletions tests/projects/test_docker_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def _build_uri(base_uri, subdirectory):


@pytest.mark.parametrize("use_start_run", map(str, [0, 1]))
@pytest.mark.large
def test_docker_project_execution(
use_start_run,
tmpdir, docker_example_base_image): # pylint: disable=unused-argument
Expand Down Expand Up @@ -68,6 +69,7 @@ def test_docker_project_execution(
("databricks://some-profile", "-e MLFLOW_TRACKING_URI=databricks ")
])
@mock.patch('databricks_cli.configure.provider.ProfileConfigProvider')
@pytest.mark.large
def test_docker_project_tracking_uri_propagation(
ProfileConfigProvider, tmpdir, tracking_uri,
expected_command_segment, docker_example_base_image): # pylint: disable=unused-argument
Expand Down
2 changes: 2 additions & 0 deletions tests/projects/test_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ def test_invalid_run_mode():
mlflow.projects.run(uri=TEST_PROJECT_DIR, backend="some unsupported mode")


@pytest.mark.large
def test_use_conda():
""" Verify that we correctly handle the `use_conda` argument."""
# Verify we throw an exception when conda is unavailable
Expand All @@ -207,6 +208,7 @@ def test_use_conda():
os.environ["CONDA_EXE"] = conda_exe_path


@pytest.mark.large
def test_expected_tags_logged_when_using_conda():
with mock.patch.object(mlflow.tracking.MlflowClient, "set_tag") as tag_mock:
try:
Expand Down
26 changes: 13 additions & 13 deletions travis/install-common-deps.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,15 @@

set -ex
sudo mkdir -p /travis-install
sudo chown travis /travis-install
if [[ -z $GITHUB_WORKFLOW ]]; then
sudo chown travis /travis-install
fi
# (The conda installation steps below are taken from http://conda.pydata.org/docs/travis.html)
# We do this conditionally because it saves us some downloading if the
# version is the same.
if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
wget https://repo.anaconda.com/miniconda/Miniconda2-latest-Linux-x86_64.sh -O /travis-install/miniconda.sh;
else
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /travis-install/miniconda.sh;
fi
wget -q https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh

bash /travis-install/miniconda.sh -b -p $HOME/miniconda
bash miniconda.sh -b -p $HOME/miniconda
export PATH="$HOME/miniconda/bin:$PATH"
hash -r
conda config --set always_yes yes --set changeps1 no
Expand All @@ -28,21 +26,23 @@ python --version
pip install --upgrade pip==19.3.1
# Install Python test dependencies only if we're running Python tests
if [[ "$INSTALL_SMALL_PYTHON_DEPS" == "true" ]]; then
pip install -r ./travis/small-requirements.txt
pip install --quiet -r ./travis/small-requirements.txt
fi
if [[ "$INSTALL_LARGE_PYTHON_DEPS" == "true" ]]; then
pip install -r ./travis/large-requirements.txt
pip install --quiet -r ./travis/large-requirements.txt
# Hack: make sure all spark-* scripts are executable.
# Conda installs 2 version spark-* scripts and makes the ones spark
# uses not executable. This is a temporary fix to unblock the tests.
ls -lha `find /home/travis/miniconda/envs/test-environment/ -path "*bin/spark-*"`
chmod 777 `find /home/travis/miniconda/envs/test-environment/ -path "*bin/spark-*"`
ls -lha `find /home/travis/miniconda/envs/test-environment/ -path "*bin/spark-*"`
ls -lha $(find $HOME/miniconda/envs/test-environment/ -path "*bin/spark-*")
chmod 777 $(find $HOME/miniconda/envs/test-environment/ -path "*bin/spark-*")
ls -lha $(find $HOME/miniconda/envs/test-environment/ -path "*bin/spark-*")
fi
pip install .
export MLFLOW_HOME=$(pwd)
# Remove boto config present in Travis VMs (https://github.com/travis-ci/travis-ci/issues/7940)
sudo rm -f /etc/boto.cfg
if [[ -z $GITHUB_WORKFLOW ]]; then
sudo rm -f /etc/boto.cfg
fi
# Print current environment info
pip list
which mlflow
Expand Down