Skip to content

Commit cf1617b

Browse files
authored
ci: Clean up of Makefile and Gitpod setup (#464)
* switch to Dockerfile, same configuration as the kedro branch. Signed-off-by: Nok <[email protected]> * simplify setup Signed-off-by: Nok <[email protected]> * update makefile Signed-off-by: Nok <[email protected]> * simplified install in background while not slowing down startup Signed-off-by: Nok <[email protected]> * fix command Signed-off-by: Nok <[email protected]> * fix setup Signed-off-by: Nok <[email protected]> * combined makefile Signed-off-by: Nok <[email protected]> --------- Signed-off-by: Nok <[email protected]>
1 parent 1eb2bf7 commit cf1617b

File tree

2 files changed

+36
-60
lines changed

2 files changed

+36
-60
lines changed

.gitpod.yml

+10-18
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
# Learn more from ready-to-use templates: https://www.gitpod.io/docs/introduction/getting-started/quickstart
2-
image: gitpod/workspace-python-3.10:2023-04-20-16-32-37
3-
1+
image: gitpod/workspace-python-3.11
42

53
tasks:
64
# We want packages installed during the pre-build init steps to go to /workspace
@@ -12,22 +10,16 @@ tasks:
1210
echo PIP_USER=no >> ~/.bashrc && export PIP_USER=no
1311
init: |
1412
make sign-off
13+
pip install uv
14+
uv venv
15+
echo source .venv/bin/activate >> ~/.bashrc
16+
source ~/.bashrc
17+
make install-test-requirements plugin=kedro-datasets
1518
command: |
1619
pre-commit install --install-hooks
1720
clear
1821
19-
20-
github:
21-
prebuilds:
22-
# enable for the master/default branch (defaults to true)
23-
master: true
24-
# enable for all branches in this repo (defaults to false)
25-
branches: true
26-
# enable for pull requests coming from this repo (defaults to true)
27-
pullRequests: true
28-
# enable for pull requests coming from forks (defaults to false)
29-
pullRequestsFromForks: true
30-
# add a "Review in Gitpod" button as a comment to pull requests (defaults to true)
31-
addComment: false
32-
# add a "Review in Gitpod" button to pull requests (defaults to false)
33-
addBadge: true
22+
- name: system
23+
init: |
24+
sudo apt-get update && sudo apt-get install -y --no-install-recommends libgl1 make
25+
sudo apt-get install -y --no-install-recommends libatk-bridge2.0-0 libcups2 ca-certificates fonts-liberation libasound2 libatk-bridge2.0-0 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libglib2.0-0 libgtk-3-0 libnspr4 libnss3 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 lsb-release wget xdg-utils

Makefile

+26-42
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,6 @@ package:
55
rm -Rf dist;\
66
python -m build
77

8-
pypi:
9-
python -m pip install twine -U
10-
python -m twine upload $(plugin)/dist/*
11-
12-
install: package
13-
cd $(plugin) && pip install -U dist/*.whl
14-
158
install-pip-setuptools:
169
python -m pip install -U pip setuptools wheel
1710

@@ -25,46 +18,14 @@ mypy:
2518
test:
2619
cd $(plugin) && pytest tests --cov-config pyproject.toml --numprocesses 4 --dist loadfile
2720

28-
# Run test_tensorflow_model_dataset separately, because these tests are flaky when run as part of the full test-suite
29-
dataset-tests: dataset-doctests
30-
cd kedro-datasets && pytest tests --cov-config pyproject.toml --numprocesses 4 --dist loadfile --ignore tests/tensorflow
31-
cd kedro-datasets && pytest tests/tensorflow/test_tensorflow_model_dataset.py --no-cov
32-
33-
extra_pytest_args-no-spark=--ignore kedro_datasets/databricks --ignore kedro_datasets/spark
34-
extra_pytest_args=
35-
dataset-doctest%:
36-
if [ "${*}" != 's-no-spark' ] && [ "${*}" != 's' ]; then \
37-
echo "make: *** No rule to make target \`${@}\`. Stop."; \
38-
exit 2; \
39-
fi; \
40-
\
41-
# The ignored datasets below require complicated setup with cloud/database clients which is overkill for the doctest examples.
42-
cd kedro-datasets && pytest kedro_datasets --doctest-modules --doctest-continue-on-failure --no-cov \
43-
--ignore kedro_datasets/pandas/gbq_dataset.py \
44-
--ignore kedro_datasets/partitions/partitioned_dataset.py \
45-
--ignore kedro_datasets/redis/redis_dataset.py \
46-
--ignore kedro_datasets/snowflake/snowpark_dataset.py \
47-
--ignore kedro_datasets/spark/spark_hive_dataset.py \
48-
--ignore kedro_datasets/spark/spark_jdbc_dataset.py \
49-
$(extra_pytest_arg${*})
50-
51-
test-sequential:
52-
cd $(plugin) && pytest tests --cov-config pyproject.toml
53-
5421
e2e-tests:
5522
cd $(plugin) && behave
5623

5724
secret-scan:
5825
trufflehog --max_depth 1 --exclude_paths trufflehog-ignore.txt .
5926

60-
clean:
61-
cd $(plugin);\
62-
rm -rf build dist pip-wheel-metadata .pytest_cache;\
63-
find . -regex ".*/__pycache__" -exec rm -rf {} +;\
64-
find . -regex ".*\.egg-info" -exec rm -rf {} +;\
65-
6627
install-test-requirements:
67-
cd $(plugin) && pip install ".[test]"
28+
cd $(plugin) && uv pip install ".[test]"
6829

6930
install-pre-commit:
7031
pre-commit install --install-hooks
@@ -79,12 +40,12 @@ sign-off:
7940
echo '--in-place "$$1"' >> .git/hooks/commit-msg
8041
chmod +x .git/hooks/commit-msg
8142

43+
## kedro-datasets specific
44+
8245
# kedro-datasets related only
8346
test-no-spark: dataset-doctests-no-spark
8447
cd kedro-datasets && pytest tests --no-cov --ignore tests/spark --ignore tests/databricks --numprocesses 4 --dist loadfile
8548

86-
test-no-spark-sequential: dataset-doctests-no-spark
87-
cd kedro-datasets && pytest tests --no-cov --ignore tests/spark --ignore tests/databricks
8849

8950
# kedro-datasets/snowflake tests skipped from default scope
9051
test-snowflake-only:
@@ -93,3 +54,26 @@ test-snowflake-only:
9354

9455
check-datasets-docs:
9556
cd kedro-datasets && python -m sphinx -WETan -j auto -D language=en -b linkcheck -d _build/doctrees docs/source _build/linkcheck
57+
58+
# Run test_tensorflow_model_dataset separately, because these tests are flaky when run as part of the full test-suite
59+
dataset-tests: dataset-doctests
60+
cd kedro-datasets && pytest tests --cov-config pyproject.toml --numprocesses 4 --dist loadfile --ignore tests/tensorflow
61+
cd kedro-datasets && pytest tests/tensorflow/test_tensorflow_model_dataset.py --no-cov
62+
63+
extra_pytest_args-no-spark=--ignore kedro_datasets/databricks --ignore kedro_datasets/spark
64+
extra_pytest_args=
65+
dataset-doctest%:
66+
if [ "${*}" != 's-no-spark' ] && [ "${*}" != 's' ]; then \
67+
echo "make: *** No rule to make target \`${@}\`. Stop."; \
68+
exit 2; \
69+
fi; \
70+
\
71+
# The ignored datasets below require complicated setup with cloud/database clients which is overkill for the doctest examples.
72+
cd kedro-datasets && pytest kedro_datasets --doctest-modules --doctest-continue-on-failure --no-cov \
73+
--ignore kedro_datasets/pandas/gbq_dataset.py \
74+
--ignore kedro_datasets/partitions/partitioned_dataset.py \
75+
--ignore kedro_datasets/redis/redis_dataset.py \
76+
--ignore kedro_datasets/snowflake/snowpark_dataset.py \
77+
--ignore kedro_datasets/spark/spark_hive_dataset.py \
78+
--ignore kedro_datasets/spark/spark_jdbc_dataset.py \
79+
$(extra_pytest_arg${*})

0 commit comments

Comments
 (0)