Skip to content

Commit 745208d

Browse files
VibhuJawaVibhu Jawa
and
Vibhu Jawa
authored
Fix CI Build Issues (#110)
* Fix crossfit build Signed-off-by: Vibhu Jawa <[email protected]> * Fix crossfit build Signed-off-by: Vibhu Jawa <[email protected]> * Test on 3.10+ only Signed-off-by: Vibhu Jawa <[email protected]> * Test on 3.10+ only Signed-off-by: Vibhu Jawa <[email protected]> * Update rapids to 25.02 * Try a different docker image Signed-off-by: Vibhu Jawa <[email protected]> * Switch dask min dependency to 2024.12.1 Signed-off-by: Vibhu Jawa <[email protected]> --------- Signed-off-by: Vibhu Jawa <[email protected]> Signed-off-by: Vibhu Jawa <[email protected]> Co-authored-by: Vibhu Jawa <[email protected]>
1 parent 654e1dc commit 745208d

File tree

7 files changed

+15
-69
lines changed

7 files changed

+15
-69
lines changed

.github/workflows/base.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ jobs:
1414
runs-on: ubuntu-latest
1515
strategy:
1616
matrix:
17-
python-version: ["3.8", "3.9", "3.10"]
17+
python-version: ["3.10", "3.11", "3.12"]
1818
steps:
1919
- uses: actions/checkout@v3
2020
- name: Set up Python ${{ matrix.python-version }}

.github/workflows/cf_backends.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ jobs:
1414
runs-on: ${{ matrix.os }}
1515
strategy:
1616
matrix:
17-
python-version: [3.8]
17+
python-version: ["3.10"]
1818
os: [ubuntu-latest]
1919
torch-version: ["~=1.11.0", "~=1.12.0", "~=1.13.0"]
2020

.github/workflows/pr.yaml

+3-3
Original file line numberDiff line numberDiff line change
@@ -13,16 +13,16 @@ jobs:
1313
needs:
1414
- python-gpu-tests
1515
secrets: inherit
16-
uses: rapidsai/shared-workflows/.github/workflows/pr-builder.yaml@branch-24.08
16+
uses: rapidsai/shared-workflows/.github/workflows/pr-builder.yaml@branch-25.02
1717

1818
python-gpu-tests:
1919
secrets: inherit
20-
uses: rapidsai/shared-workflows/.github/workflows/custom-job.yaml@branch-24.08
20+
uses: rapidsai/shared-workflows/.github/workflows/custom-job.yaml@branch-25.02
2121
with:
2222
build_type: pull-request
2323
node_type: "gpu-v100-latest-1"
2424
arch: "amd64"
25-
container_image: "rapidsai/base:24.08-cuda12.2-py3.11"
25+
container_image: "rapidsai/base:25.02-cuda12.8-py3.11"
2626
run_script: "ci/test_gpu.sh"
2727

2828
# benchmark:

conda/environments/cuda_dev.yaml

+6-6
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,17 @@ dependencies:
77
- bandit
88
- black
99
- cuda-version=12.0
10-
- cudf>=24.12
11-
- cuml>=24.12
10+
- cudf>=25.02
11+
- cuml>=25.02
1212
- cupy>=12.0.0
13-
- cuvs>=24.12
14-
- dask-cuda>=24.12
15-
- dask-cudf>=24.12
13+
- cuvs>=25.02
14+
- dask-cuda>=25.02
15+
- dask-cudf>=25.02
1616
- flake8
1717
- isort
1818
- pip
1919
- pre_commit
20-
- pylibraft>=24.12
20+
- pylibraft>=25.02
2121
- pytest
2222
- pytest-benchmark
2323
- pytest-cov>=2

crossfit/backend/dask/cluster.py

-55
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
import dask
2222
import distributed
23-
from dask.dataframe.optimize import optimize as dd_optimize
2423
from dask.distributed import Client, get_client
2524

2625
from crossfit.backend.gpu import HAS_GPU
@@ -93,60 +92,6 @@ def increase_gc_threshold():
9392
gc.set_threshold(g0 * 3, g1 * 3, g2 * 3)
9493

9594

96-
def ensure_optimize_dataframe_graph(ddf=None, dsk=None, keys=None):
97-
"""Perform HLG DataFrame optimizations
98-
99-
If `ddf` is specified, an optimized Dataframe
100-
collection will be returned. If `dsk` and `keys`
101-
are specified, an optimized graph will be returned.
102-
103-
These optimizations are performed automatically
104-
when a DataFrame collection is computed/persisted,
105-
but they are NOT always performed when statistics
106-
are computed. The purpose of this utility is to
107-
ensure that the Dataframe-based optimizations are
108-
always applied.
109-
110-
Parameters
111-
----------
112-
ddf : dask_cudf.DataFrame, optional
113-
The dataframe to optimize, by default None
114-
dsk : dask.highlevelgraph.HighLevelGraph, optional
115-
Dask high level graph, by default None
116-
keys : List[str], optional
117-
The keys to optimize, by default None
118-
119-
Returns
120-
-------
121-
Union[dask_cudf.DataFrame, dask.highlevelgraph.HighLevelGraph]
122-
A dask_cudf DataFrame or dask HighLevelGraph depending
123-
on the parameters provided.
124-
125-
Raises
126-
------
127-
ValueError
128-
If ddf is not provided and one of dsk or keys are None.
129-
"""
130-
131-
if ddf is None:
132-
if dsk is None or keys is None:
133-
raise ValueError("Must specify both `dsk` and `keys` if `ddf` is not supplied.")
134-
dsk = ddf.dask if dsk is None else dsk
135-
keys = ddf.__dask_keys__() if keys is None else keys
136-
137-
if isinstance(dsk, dask.highlevelgraph.HighLevelGraph):
138-
with dask.config.set({"optimization.fuse.active": False}):
139-
dsk = dd_optimize(dsk, keys=keys)
140-
141-
if ddf is None:
142-
# Return optimized graph
143-
return dsk
144-
145-
# Return optimized ddf
146-
ddf.dask = dsk
147-
return ddf
148-
149-
15095
class Distributed:
15196
"""Distributed-Execution Context Manager
15297

requirements/base.txt

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
astunparse
22
pandas
3+
pyarrow
34
numpy
45
numba
5-
dask
6-
distributed>=2022.11.1
6+
dask[dataframe]>=2024.12.1
7+
distributed>=2024.12.1
78
scikit-learn>=1.2.0
89
fsspec>=2022.7.1
910
tensorflow_metadata

setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,6 @@ def read_requirements(filename):
7878
**dev_requirements,
7979
"all": list(itertools.chain(*list(requirements.values()))),
8080
},
81-
python_requires=">=3.8, <3.13",
81+
python_requires=">=3.10, <3.13",
8282
test_suite="tests",
8383
)

0 commit comments

Comments
 (0)