-
Notifications
You must be signed in to change notification settings - Fork 30
124 lines (105 loc) · 3.77 KB
/
ci.yml
File metadata and controls
124 lines (105 loc) · 3.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
name: CI
on:
push:
branches:
- main
tags:
- '*'
pull_request:
env:
LATEST_PY_VERSION: '3.14'
jobs:
tests:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.11', '3.12', '3.13', '3.14']
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- name: Install uv
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7
with:
version: "0.9.*"
enable-cache: true
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: uv sync --extra rasterio
- name: Run pre-commit
if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
run: |
uv run pre-commit run --all-files
uv run --with mypy mypy -p morecantile --ignore-missing-imports
- name: Run tests
run: uv run pytest --cov morecantile --cov-report term-missing --cov-report xml
- name: Upload Results
if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
uses: codecov/codecov-action@75cd11691c0faa626561e295848008c8a7dddffe # v5
with:
files: ./coverage.xml
flags: unittests
fail_ci_if_error: false
token: ${{ secrets.CODECOV_TOKEN }}
benchmark:
needs: [tests]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- name: Install uv
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7
with:
version: "0.9.*"
enable-cache: true
python-version: ${{ env.LATEST_PY_VERSION }}
- name: Install dependencies
run: |
uv sync --group benchmark
- name: Run Benchmark
run: |
uv run pytest tests/benchmarks.py --benchmark-only --benchmark-columns 'min, max, mean, median' --benchmark-sort 'min' --benchmark-json output.json
- name: Store and Compare benchmark result
uses: benchmark-action/github-action-benchmark@a60cea5bc7b49e15c1f58f411161f99e0df48372 # v1
with:
name: morecantile Benchmarks
tool: 'pytest'
output-file-path: output.json
alert-threshold: '150%'
comment-on-alert: true
fail-on-alert: false
# GitHub API token to make a commit comment
github-token: ${{ secrets.GITHUB_TOKEN }}
gh-pages-branch: 'gh-benchmarks'
# Make a commit on `gh-pages` only if main
auto-push: ${{ github.ref == 'refs/heads/main' }}
benchmark-data-dir-path: dev/benchmarks
publish:
needs: [tests]
runs-on: ubuntu-latest
if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- name: Install uv
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7
with:
version: "0.9.*"
enable-cache: true
python-version: ${{ env.LATEST_PY_VERSION }}
- name: Install dependencies
run: |
uv sync --group deploy
- name: Set tag version
id: tag
run: |
echo "version=${GITHUB_REF#refs/*/}"
echo "version=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
- name: Set module version
id: module
run: |
echo "version=$(uv run hatch --quiet version)" >> $GITHUB_OUTPUT
- name: Build and publish
if: ${{ steps.tag.outputs.version }} == ${{ steps.module.outputs.version}}
env:
HATCH_INDEX_USER: ${{ secrets.PYPI_USERNAME }}
HATCH_INDEX_AUTH: ${{ secrets.PYPI_PASSWORD }}
run: |
uv run hatch build
uv run hatch publish